blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
771095473e3066ad09df7493a6581d7bb5843b18 | 78b01cc5249504f067ed9a53063037818c4a2663 | /main.py | 79c4c87baff9be781702595bc7afbc06ad51483e | [
"MIT"
] | permissive | toshikurauchi/chopro_book | 4fae592b261a74b2e97782346348bba7df809879 | 73e7d1e4cd540457a1609959a4e912eba5e8278e | refs/heads/master | 2021-06-24T15:20:18.442808 | 2019-03-17T03:30:41 | 2019-03-17T03:30:41 | 135,749,123 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,004 | py | #!/usr/bin/python3
import sys
import re
from unicodedata import normalize
from pathlib import Path
from flask import Flask, render_template, request, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
from chopro import ChoPro
from config import *
SHORT_LIMIT = 100
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///chopro_book.db'
db = SQLAlchemy(app)
class Playlist(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80), unique=True, nullable=False)
songs = db.relationship('PlaylistSong', backref='playlist', cascade="all,delete")
def __init__(self, id=None, name='', song_files=[]):
self.id = id
self.name = name
self.song_files = song_files
self.new_songs = []
self.deleted_songs = []
def __repr__(self):
return '<Playlist %r>' % self.name
@property
def sorted_songs(self):
return sorted(self.songs, key=lambda s: s.index)
@property
def song_files(self):
return [s.filename for s in self.sorted_songs]
@song_files.setter
def song_files(self, new_songs):
current_songs = {s.filename: s for s in self.songs}
self.new_songs = []
self.songs = []
for index, f in enumerate(new_songs):
if f in current_songs:
song = current_songs.pop(f)
song.index = index
else:
song = PlaylistSong(self, f, index)
self.new_songs.append(song)
self.songs.append(song)
self.deleted_songs = list(current_songs.values())
@property
def song_list(self):
print([(s.filename, s.index) for s in self.sorted_songs])
return [s.song for s in self.sorted_songs]
class PlaylistSong(db.Model):
id = db.Column(db.Integer, primary_key=True)
filename = db.Column(db.String(120), nullable=False)
transpose = db.Column(db.Integer)
index = db.Column(db.Integer)
playlist_id = db.Column(db.Integer, db.ForeignKey('playlist.id'), nullable=False)
def __init__(self, playlist, filename, index, transpose=0):
self.playlist = playlist
self.filename = filename
self.index = index
self.transpose = transpose
@property
def song(self):
return Song(self.filename, self.transpose, self.id)
class Song:
def __init__(self, filename, transpose=0, playlist_song_id=None):
self.filename = filename
self.transpose = transpose
self.playlist_song_id = playlist_song_id
self._lyrics = None
self.name = clean_name(self.filename)
def __eq__(self, other):
return self.filename == other.filename
def chopro(self):
full_filename = (Path(CHOPRO_DIR) / self.filename).absolute()
with open(full_filename) as cpfile:
cpstr = cpfile.read()
return ChoPro(cpstr, self.transpose)
@property
def html(self):
return self.chopro().get_html()
@property
def lyrics(self):
if self._lyrics is None:
try:
self._lyrics = self.chopro().get_lyrics()
except:
raise Exception(self.name)
return self._lyrics
@property
def short_lyrics(self):
lyrics = self.lyrics
short = ''
song_started = False
for line in lyrics.split('\n'):
clean = line.strip()
if not song_started and clean:
song_started = True
if 'intro' in clean.lower():
continue
if clean:
short += clean + '<br>'
if len(short) > SHORT_LIMIT:
break
return short
@property
def next_transpose(self):
return str(self.transpose + 1)
@property
def prev_transpose(self):
return str(self.transpose - 1)
@property
def slug(self):
# Based on: http://flask.pocoo.org/snippets/5/
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(self.filename.lower()):
word = normalize('NFKD', word).encode('ascii', 'ignore')
if word:
result.append(''.join(map(chr, word)))
return '-'.join(result)
def clean_name(name):
regex = re.compile(r".(chopro|chordpro)$", re.IGNORECASE)
return regex.sub('', name)
def list_songs(ignore=[]):
path = Path(CHOPRO_DIR)
songfiles = sorted([Song(f.name) for f in path.iterdir()], key=lambda s: s.filename)
return [s for s in songfiles if s not in ignore]
@app.route('/')
def index():
return render_template('index.html', songs=list_songs())
@app.route('/playlists')
def playlists():
all_playlists = Playlist.query.all()
return render_template('playlists.html', playlists=all_playlists)
@app.route('/song')
def chords():
transpose = int(request.args.get('transpose', 0))
song = Song(request.args['filename'], transpose)
return render_template('chords.html', song=song)
@app.route('/playlist', methods=['GET', 'POST'])
@app.route('/playlist/<pid>/edit', methods=['GET', 'POST'], endpoint='playlist_edit')
def playlist_form(pid=None):
if (request.method == 'POST'):
name = request.form.get('name')
songs = request.form.get('songs').split(';;')
if pid is not None:
playlist = Playlist.query.get(pid)
playlist.name = name
playlist.song_files = songs
else:
playlist = Playlist(name=name, song_files=songs)
for s in playlist.new_songs:
db.session.add(s)
for s in playlist.deleted_songs:
db.session.delete(s)
db.session.add(playlist)
db.session.commit()
return redirect(url_for('playlist_view', pid=playlist.id))
# GET
playlist = Playlist()
form_action = url_for('playlist_form')
if pid is not None:
playlist = Playlist.query.get(pid)
form_action = url_for('playlist_edit', pid=pid)
selected_songs = playlist.song_list
available_songs = list_songs(ignore=selected_songs)
return render_template('playlist_form.html', playlist=playlist, selected_songs=selected_songs, available_songs=available_songs, form_action=form_action)
@app.route('/playlist/<pid>', methods=['GET'])
def playlist_view(pid):
playlist = Playlist.query.get(pid)
return render_template('playlist_view.html', playlist=playlist, songs=playlist.song_list)
@app.route('/playlist-song/<pid>', methods=['GET'])
def playlist_song(pid):
playlist_song = PlaylistSong.query.get(pid)
transpose = int(request.args.get('transpose', 0))
playlist_song.transpose = transpose
# Save
db.session.add(playlist_song)
db.session.commit()
song = playlist_song.song
return render_template('playlist_song.html', song=song)
if __name__=='__main__':
app.run(debug=True) | [
"andrew.kurauchi@gmail.com"
] | andrew.kurauchi@gmail.com |
9d3f971d92a6f6e1cd18b6d14d0e39c203776983 | a8c76e503df527445b14163f74faa8aca7e90ecd | /examples/cardless_credit_example.py | 9b76216fa16aa9281bbd570ee1371176ada29324 | [
"MIT"
] | permissive | baseup/xendit-python | c2a4f0766886f6124a86810e0831653e1ca1e1f4 | 8b677fbbad5fe3bbcd0a2b93e30e8040543b8f61 | refs/heads/master | 2023-03-09T02:56:53.513101 | 2021-03-04T08:06:44 | 2021-03-04T08:06:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,442 | py | import time
from print_running_function import print_running_function
# Hackish method to import from another directory
# Useful while xendit-python isn't released yet to the public
import importlib.machinery
loader = importlib.machinery.SourceFileLoader("xendit", "../xendit/__init__.py")
xendit = loader.load_module("xendit")
class CreateCardlessCreditPayment:
@staticmethod
def run(xendit_instance, **kwargs):
try:
cardless_credit_payment = xendit_instance.CardlessCredit.create_payment(
**kwargs
)
print(cardless_credit_payment)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
cardless_credit_items = []
cardless_credit_items.append(
{
"id": "item-123",
"name": "Phone Case",
"price": 200000,
"type": "Smartphone",
"url": "http://example.com/phone/phone_case",
"quantity": 2,
}
)
customer_details = {
"first_name": "customer first name",
"last_name": "customer last name",
"email": "customer@email.com",
"phone": "0812332145",
}
shipping_address = {
"first_name": "first name",
"last_name": "last name",
"address": "Jl Teknologi No. 12",
"city": "Jakarta",
"postal_code": "12345",
"phone": "081513114262",
"country_code": "IDN",
}
args = {
"cardless_credit_type": xendit.CardlessCreditType.KREDIVO,
"external_id": f"id-{int(time.time())}",
"amount": 10000,
"payment_type": "3_months",
"items": cardless_credit_items,
"customer_details": customer_details,
"shipping_address": shipping_address,
"redirect_url": "https://my-shop.com/home",
"callback_url": "https://my-shop.com/callback",
}
print_running_function("xendit.CardlessCredit.create_payment", args)
CreateCardlessCreditPayment.run(xendit_instance, **args)
class CalculatePaymentType:
@staticmethod
def run(xendit_instance, **kwargs):
try:
cardless_credit_payment_types = xendit_instance.CardlessCredit.calculate_payment_type(
**kwargs
)
print(cardless_credit_payment_types)
except xendit.XenditError as e:
print("Error status code:", e.status_code)
print("Error message:", e)
@staticmethod
def example(xendit_instance):
cardless_credit_items = []
cardless_credit_items.append(
{
"id": "item-123",
"name": "Phone Case",
"price": 200000,
"type": "Smartphone",
"url": "http://example.com/phone/phone_case",
"quantity": 2,
}
)
args = {
"cardless_credit_type": xendit.CardlessCreditType.KREDIVO,
"amount": 10000,
"items": cardless_credit_items,
}
print_running_function("xendit.CardlessCredit.calculate_payment_type", args)
CalculatePaymentType.run(xendit_instance, **args)
def ask_cardless_credit_input():
print("Input the action that you want to use")
print("0. Exit")
print("1. Create Payment / Generate Checkout URL")
print("2. Calculate Payment Types")
try:
return int(input())
except ValueError:
print("Invalid input. Please type a number")
return ask_cardless_credit_input()
def cardless_credit_example(xendit_instance):
cardless_credit_input = ask_cardless_credit_input()
while cardless_credit_input != 0:
if cardless_credit_input == 1:
print("Running example of Create Payment / Generate Checkout URL")
CreateCardlessCreditPayment.example(xendit_instance)
elif cardless_credit_input == 2:
print("Running example of Calculate Payment Types")
CalculatePaymentType.example(xendit_instance)
cardless_credit_input = ask_cardless_credit_input()
| [
"adyaksa.wisanggeni@gmail.com"
] | adyaksa.wisanggeni@gmail.com |
3c4bf638f60768a4555ee438fa275702572a60bd | fd6af2839be96a7966925d2d4c47a5af6dffd938 | /ratatosk/pipeline/__init__.py | 35d6ef55b865e5f3ca00702f832ebc8b84f18c4c | [
"Apache-2.0"
] | permissive | percyfal/ratatosk | 429ce423100ddbf4008b51df66db99c5205f8def | 71ee4741952219c1fccf9cb6c172a354610d499c | refs/heads/master | 2020-05-16T22:43:33.212506 | 2013-09-24T11:24:13 | 2013-09-24T11:24:13 | 8,706,433 | 7 | 4 | null | 2013-04-08T16:27:07 | 2013-03-11T15:25:44 | Python | UTF-8 | Python | false | false | 1,231 | py | import os
import ratatosk
from ratatosk.pipeline import align, seqcap, haloplex
# Define configuration file locations and classes for predefined workflows
config_dict = {
'ratatosk' : {'config':os.path.join(ratatosk.__path__[0], os.pardir, "config", "ratatosk.yaml"),
'cls':None},
'Align' : {'config' : os.path.join(ratatosk.__path__[0], os.pardir, "config", "align.yaml"),
'cls' : align.Align},
'Seqcap' : {'config' : os.path.join(ratatosk.__path__[0], os.pardir, "config", "seqcap.yaml"),
'cls' : seqcap.SeqCap},
'SeqcapSummary' : {'config' : os.path.join(ratatosk.__path__[0], os.pardir, "config", "seqcap.yaml"),
'cls' : seqcap.SeqCapSummary},
'HaloPlex' : {'config' : os.path.join(ratatosk.__path__[0], os.pardir, "config", "haloplex.yaml"),
'cls' : haloplex.HaloPlex},
'HaloPlexSummary' : {'config' : os.path.join(ratatosk.__path__[0],os.pardir, "config", "haloplex.yaml"),
'cls' : haloplex.HaloPlexSummary},
'HaloPlexCombine' : {'config' : os.path.join(ratatosk.__path__[0],os.pardir, "config", "haloplex.yaml"),
'cls' : haloplex.HaloPlexCombine},
}
| [
"per.unneberg@scilifelab.se"
] | per.unneberg@scilifelab.se |
fed848079573db2088199105182feb85233e4b34 | 6923f79f1eaaba0ab28b25337ba6cb56be97d32d | /Numerical_Eng_Python/example10_1.py | 865e519667c1c0de4e2117143d9072094cfc3225 | [] | no_license | burakbayramli/books | 9fe7ba0cabf06e113eb125d62fe16d4946f4a4f0 | 5e9a0e03aa7ddf5e5ddf89943ccc68d94b539e95 | refs/heads/master | 2023-08-17T05:31:08.885134 | 2023-08-14T10:05:37 | 2023-08-14T10:05:37 | 72,460,321 | 223 | 174 | null | 2022-10-24T12:15:06 | 2016-10-31T17:24:00 | Jupyter Notebook | UTF-8 | Python | false | false | 355 | py | ## example10_1
from goldSearch import *
def f(x):
lam = 1.0 # Constraint multiplier
c = min(0.0, x) # Constraint function
return 1.6*x**3 + 3.0*x**2 - 2.0*x + lam*c**2
xStart = 1.0
h = 0.01
x1,x2 = bracket(f,xStart,h)
x,fMin = search(f,x1,x2)
print "x =",x
print "f(x) =",fMin
raw_input ("\nPress return to exit")
| [
"bb@b.om"
] | bb@b.om |
63c121a05dfeb948f6e5c1fa84cafc4740c8d4dd | 1b4abb5e310c7ae1b2928f9ea80a6b3a8c2fb8ed | /model/ml/infrastructure/parameter_search.py | 065cd4b1f932cd37ca710d1d220b8003732a721f | [] | no_license | zhang-198/ExampleDrivenErrorDetection | 2e2c708665f2b57b6ac7c785604a2ac6234f7ba9 | ae8bc24fc441957d9a29e5fa4cc247f1805d8b4d | refs/heads/master | 2023-05-23T14:49:29.628520 | 2020-04-09T14:02:28 | 2020-04-09T14:02:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,479 | py | import os
import glob
import shutil
import json
import numpy as np
import sys
def evaluate_check_point_json(checkpoint_json_file):
with open(checkpoint_json_file) as data_file:
data = json.load(data_file)
loss_history = data['val_loss_history']
checkpoint_pointer = data['val_loss_history_it']
best_i = np.argmin(loss_history)
return loss_history[best_i], checkpoint_pointer[best_i]
def get_latest_checkpoint(path):
try:
newest = max(glob.iglob(path + '/*.json'), key=os.path.getctime)
return newest
except ValueError:
return None
def delete_folder_content(folder):
for the_file in os.listdir(folder):
file_path = os.path.join(folder, the_file)
try:
if os.path.isfile(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path): shutil.rmtree(file_path)
except Exception as e:
print(e)
def run_command(command, run):
print command
if run:
os.system(command)
def get_best_loss(best_loss, json_file):
if os.path.exists(json_file):
best_loss_new, _ = evaluate_check_point_json(json_file)
return best_loss_new
else:
return best_loss
run = True
data_name = 'BlackOakUppercase'
num_columns = 12
for column_id in range(num_columns):
command = 'python scripts/preprocess.py \\\n' + \
'--input_txt /root/torch-rnn/storage/' + data_name + '/column_' + str(
column_id) + '/orig_input/column_' + str(column_id) + '.txt \\\n' + \
'--output_h5 /root/torch-rnn/storage/' + data_name + '/column_' + str(
column_id) + '/input/my_data.h5 \\\n' + \
'--output_json /root/torch-rnn/storage/' + data_name + '/column_' + str(
column_id) + '/input/my_data.json\n\n'
run_command(command, run)
directory = '/root/torch-rnn/storage/' + data_name + '/column_' + str(column_id) + '/best'
if not os.path.exists(directory):
os.makedirs(directory)
best_loss = sys.float_info.max
# check whether we need batch size of 50
# check whether seq_length was important
for units in [128]:
for num_layers in [1]:
for batch_size in [5, 10]:
for learning_rate in [0.001, 0.002, 0.003]:
for dropout in [0.0, 0.1, 0.3]:
for seq_length in [15, 25, 50]:
command = 'th train.lua ' + \
'-input_h5 /root/torch-rnn/storage/' + data_name + '/column_' + str(column_id) + '/input/my_data.h5 ' + \
'-input_json /root/torch-rnn/storage/' + data_name + '/column_' + str(column_id) + '/input/my_data.json '+ \
'-checkpoint_name /root/torch-rnn/storage/' + data_name + '/column_' + str(column_id) + '/cv/checkpoint '+ \
'-rnn_size ' + str(units) + ' ' + \
'-checkpoint_every 50 ' + \
'-num_layers ' + str(num_layers) + ' ' + \
'-dropout ' + str(dropout) + ' ' + \
'-seq_length ' + str(seq_length) + ' ' + \
'-max_epochs 100 ' + \
'-batch_size ' + str(batch_size) + ' ' + \
'-learning_rate ' + str(learning_rate) + \
'\n\n'
run_command(command, run)
checkpoint_path = '/root/torch-rnn/storage/' + data_name + '/column_' + str(column_id) + '/cv'
latest_checkpoint_file = get_latest_checkpoint(checkpoint_path)
if latest_checkpoint_file == None:
with open(directory + "/log.txt", "a") as myfile:
myfile.write("rnn_size: " + str(units) + ", " + \
"num_layers: " + str(num_layers) + ", " + \
"dropout: " + str(dropout) + ", " + \
"seq_length: " + str(seq_length) + ", " + \
"batch_size: " + str(batch_size) + ", " + \
"learning_rate: " + str(learning_rate) + ", " + \
"best checkpoint id: " + "none" + ", " + \
"loss: " + "none" + "\n"
)
else:
loss, checkpoint_index = evaluate_check_point_json(latest_checkpoint_file)
best_loss = get_best_loss(best_loss, directory + "/best.json")
if best_loss > loss:
# found a better parameter config
best_loss = loss
# save this checkpoint
shutil.copy(checkpoint_path + "/checkpoint_" + str(checkpoint_index) + ".t7", directory + "/best.t7")
shutil.copy(checkpoint_path + "/checkpoint_" + str(checkpoint_index) + ".json", directory + "/best.json")
# log everything
with open(directory + "/log.txt", "a") as myfile:
myfile.write("rnn_size: " + str(units) + ", " + \
"num_layers: "+ str(num_layers) + ", " + \
"dropout: " + str(dropout) + ", " + \
"seq_length: " + str(seq_length) + ", " + \
"batch_size: " + str(batch_size) + ", " + \
"learning_rate: " + str(learning_rate) + ", " + \
"best checkpoint id: " + str(checkpoint_index) + ", " + \
"loss: " + str(loss) + "\n"
)
#clean up old checkpoints
delete_folder_content(checkpoint_path)
| [
"neutatz@googlemail.com"
] | neutatz@googlemail.com |
cf142b9f2fd1fb5e8cf3857fad308b0fa5003c56 | dd256415176fc8ab4b63ce06d616c153dffb729f | /aditya-works-feature-python_programming (1)/aditya-works-feature-python_programming/24-Jul-2019/nested_class_example/inner_class_1.py | ff91fb3904a8d4af0d622839083cff7ca1336443 | [] | no_license | adityapatel329/python_works | 6d9c6b4a64cccbe2717231a7cfd07cb350553df3 | 6cb8b2e7f691401b1d2b980f6d1def848b0a71eb | refs/heads/master | 2020-07-24T17:15:39.839826 | 2019-09-12T07:53:28 | 2019-09-12T07:53:28 | 207,993,516 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | class Outer:
def __init__(self):
self.inner = self.Inner()
def reveal(self):
self.inner.display("Calling Inner class function from outer class")
class Inner:
def display(self,msg):
print(msg)
outer = Outer()
outer.reveal()
## both method is right
inner = outer.Inner() ## inner =Outer().Inner() or inner = outer.inner
inner.display("Just print it!")
| [
"aditya.patel@1rivet.local"
] | aditya.patel@1rivet.local |
18def03de92ac213dcf488baf0b20ef2ea65a3b1 | b475baab9cdc73b104c077d48ab7053094040068 | /torchbiggraph/converters/export_to_tsv.py | 49cbff0b999d5de9eac4e65970863799a581535a | [
"BSD-3-Clause"
] | permissive | king2727/PyTorch-BigGraph | 91008349eb92d32283ced6a29d60b39229b0d276 | e3de4a3df84e4d7994477bbaa76d828592110a87 | refs/heads/main | 2023-08-26T00:32:43.912575 | 2021-10-27T14:32:52 | 2021-10-27T14:34:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,783 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE.txt file in the root directory of this source tree.
import argparse
from typing import Iterable, TextIO
from torchbiggraph.checkpoint_manager import CheckpointManager
from torchbiggraph.config import ConfigFileLoader, ConfigSchema
from torchbiggraph.graph_storages import (
ENTITY_STORAGES,
RELATION_TYPE_STORAGES,
AbstractEntityStorage,
AbstractRelationTypeStorage,
)
from torchbiggraph.model import MultiRelationEmbedder, make_model
def write(outf: TextIO, key: Iterable[str], value: Iterable[float]) -> None:
outf.write("%s\t%s\n" % ("\t".join(key), "\t".join("%.9f" % x for x in value)))
def make_tsv(
config: ConfigSchema, entities_tf: TextIO, relation_types_tf: TextIO
) -> None:
print("Loading relation types and entities...")
entity_storage = ENTITY_STORAGES.make_instance(config.entity_path)
relation_type_storage = RELATION_TYPE_STORAGES.make_instance(config.entity_path)
print("Initializing model...")
model = make_model(config)
print("Loading model check point...")
checkpoint_manager = CheckpointManager(config.checkpoint_path)
state_dict, _ = checkpoint_manager.read_model()
if state_dict is not None:
model.load_state_dict(state_dict, strict=False)
make_tsv_for_entities(model, checkpoint_manager, entity_storage, entities_tf)
make_tsv_for_relation_types(model, relation_type_storage, relation_types_tf)
def make_tsv_for_entities(
model: MultiRelationEmbedder,
checkpoint_manager: CheckpointManager,
entity_storage: AbstractEntityStorage,
entities_tf: TextIO,
) -> None:
print("Writing entity embeddings...")
for ent_t_name, ent_t_config in model.entities.items():
for partition in range(ent_t_config.num_partitions):
print(
f"Reading embeddings for entity type {ent_t_name} partition "
f"{partition} from checkpoint..."
)
entities = entity_storage.load_names(ent_t_name, partition)
embeddings, _ = checkpoint_manager.read(ent_t_name, partition)
if model.global_embs is not None:
embeddings += model.global_embs[model.EMB_PREFIX + ent_t_name]
print(
f"Writing embeddings for entity type {ent_t_name} partition "
f"{partition} to output file..."
)
for ix in range(len(embeddings)):
write(entities_tf, (entities[ix],), embeddings[ix])
if (ix + 1) % 5000 == 0:
print(f"- Processed {ix+1}/{len(embeddings)} entities so far...")
print(f"- Processed all {len(embeddings)} entities")
entities_output_filename = getattr(entities_tf, "name", "the output file")
print(f"Done exporting entity data to {entities_output_filename}")
def make_tsv_for_relation_types(
model: MultiRelationEmbedder,
relation_type_storage: AbstractRelationTypeStorage,
relation_types_tf: TextIO,
) -> None:
print("Writing relation type parameters...")
relation_types = relation_type_storage.load_names()
if model.num_dynamic_rels > 0:
(rel_t_config,) = model.relations
op_name = rel_t_config.operator
(lhs_operator,) = model.lhs_operators
(rhs_operator,) = model.rhs_operators
for side, operator in [("lhs", lhs_operator), ("rhs", rhs_operator)]:
for param_name, all_params in operator.named_parameters():
for rel_t_name, param in zip(relation_types, all_params):
shape = "x".join(f"{d}" for d in param.shape)
write(
relation_types_tf,
(rel_t_name, side, op_name, param_name, shape),
param.flatten(),
)
else:
for rel_t_name, rel_t_config, operator in zip(
relation_types, model.relations, model.rhs_operators
):
if rel_t_name != rel_t_config.name:
raise ValueError(
f"Mismatch in relations names: got {rel_t_name} in the "
f"dictionary and {rel_t_config.name} in the config."
)
op_name = rel_t_config.operator
for param_name, param in operator.named_parameters():
shape = "x".join(f"{d}" for d in param.shape)
write(
relation_types_tf,
(rel_t_name, "rhs", op_name, param_name, shape),
param.flatten(),
)
relation_types_output_filename = getattr(
relation_types_tf, "name", "the output file"
)
print(f"Done exporting relation type data to {relation_types_output_filename}")
def main():
config_help = "\n\nConfig parameters:\n\n" + "\n".join(ConfigSchema.help())
parser = argparse.ArgumentParser(
epilog=config_help,
# Needed to preserve line wraps in epilog.
formatter_class=argparse.RawDescriptionHelpFormatter,
)
parser.add_argument("config", help="Path to config file")
parser.add_argument("-p", "--param", action="append", nargs="*")
parser.add_argument("--entities-output", required=True)
parser.add_argument("--relation-types-output", required=True)
opt = parser.parse_args()
loader = ConfigFileLoader()
config = loader.load_config(opt.config, opt.param)
with open(opt.entities_output, "xt") as entities_tf, open(
opt.relation_types_output, "xt"
) as relation_types_tf:
make_tsv(config, entities_tf, relation_types_tf)
if __name__ == "__main__":
main()
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
64a76e03201f2dd54989a72856ef55fb03fd7f1c | e197cbe92104a328ac87c201569724b6142c34ce | /workshop1/settings.py | 2353d21f77d7fe92e0b3303f1ff34fb19057af63 | [] | no_license | chepe4pi/workshop1 | 877654782dee2f0a0d5d26aabd19356225fad59a | e744ec0520071beba3549519a04d3d728b61c0a0 | refs/heads/master | 2021-07-14T02:53:52.473856 | 2017-10-18T15:04:47 | 2017-10-18T15:04:47 | 105,813,167 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,485 | py | """
Django settings for workshop1 project.
Generated by 'django-admin startproject' using Django 1.11.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fi!p06auio$kb6(50g5*y1g*8#*3aojig=jn2pjsq72et6+6e6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'orders',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'workshop1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'workshop1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'workshop_1',
'USER': 'workshop_1',
'PASSWORD': '123456',
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
REST_FRAMEWORK = {
'DEFAULT_THROTTLE_CLASSES': (
'rest_framework.throttling.AnonRateThrottle',
'rest_framework.throttling.UserRateThrottle'
),
'DEFAULT_THROTTLE_RATES': {
'anon': '100/day',
'user': '5000/day'
}
}
| [
"chepe4pi@gmail.com"
] | chepe4pi@gmail.com |
550dd7c6a02182f25d9f4efb8462999eb0161fe3 | 3ad8887aca54daa74b1fe446cb35cd0902e1e9bd | /jackdaw/gatherer/ldap/agent/common.py | 9dbba6051d5737ff4460fb9c6620d25d4d1ef4b7 | [] | no_license | huangzccn/jackdaw | 6ea5f3f7901c1c64b469ea4c25de0e77a3fc49a2 | 1a9800152fb8f19d5db43fcd235f45f6db2e3878 | refs/heads/master | 2023-08-29T11:44:46.692776 | 2021-10-23T20:00:36 | 2021-10-23T20:00:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,288 | py | import enum
class LDAPAgentCommand(enum.Enum):
SPNSERVICE = 0
SPNSERVICES = 1
USER = 2
USERS = 3
MACHINE = 4
MACHINES = 5
OU = 6
OUS = 7
DOMAININFO = 8
GROUP = 9
GROUPS = 10
MEMBERSHIP = 11
MEMBERSHIPS = 12
SD = 13
SDS = 14
GPO = 15
GPOS = 16
TRUSTS = 17
SCHEMA = 18
EXCEPTION = 99
SPNSERVICES_FINISHED = 31
USERS_FINISHED = 32
MACHINES_FINISHED = 33
OUS_FINISHED = 34
GROUPS_FINISHED = 35
MEMBERSHIPS_FINISHED = 36
SDS_FINISHED = 37
DOMAININFO_FINISHED = 38
GPOS_FINISHED = 39
TRUSTS_FINISHED = 40
MEMBERSHIP_FINISHED = 41
SCHEMA_FINISHED = 42
MSLDAP_JOB_TYPES = {
'users' : LDAPAgentCommand.USERS_FINISHED ,
'machines' : LDAPAgentCommand.MACHINES_FINISHED ,
'sds' : LDAPAgentCommand.SDS_FINISHED ,
'memberships' : LDAPAgentCommand.MEMBERSHIPS_FINISHED ,
'ous' : LDAPAgentCommand.OUS_FINISHED ,
'gpos' : LDAPAgentCommand.GPOS_FINISHED ,
'groups' : LDAPAgentCommand.GROUPS_FINISHED ,
'spns' : LDAPAgentCommand.SPNSERVICES_FINISHED ,
'adinfo' : LDAPAgentCommand.DOMAININFO_FINISHED,
'trusts' : LDAPAgentCommand.TRUSTS_FINISHED,
'schema' : LDAPAgentCommand.SCHEMA_FINISHED,
}
MSLDAP_JOB_TYPES_INV = {v: k for k, v in MSLDAP_JOB_TYPES.items()}
class LDAPAgentJob:
def __init__(self, command, data):
self.command = command
self.data = data | [
"info@skelsec.com"
] | info@skelsec.com |
816c4461ef4bd6c6665cc240da911eecc02460b1 | bca9c2fa3c4c3d06dd612280ce39090a9dfab9bd | /neekanee/job_scrapers/plugins/com/icims/oasispetroleum.py | 02b50cd3371124e8785267ca1f23c53926380bbc | [] | no_license | thayton/neekanee | 0890dd5e5cf5bf855d4867ae02de6554291dc349 | f2b2a13e584469d982f7cc20b49a9b19fed8942d | refs/heads/master | 2021-03-27T11:10:07.633264 | 2018-07-13T14:19:30 | 2018-07-13T14:19:30 | 11,584,212 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 584 | py | from neekanee.jobscrapers.icims.icims2 import IcimsJobScraper
COMPANY = {
'name': 'Oasis Petroleum',
'hq': 'Houston, TX',
'home_page_url': 'http://www.oasispetroleum.com',
'jobs_page_url': 'https://jobs-oasispetroleum.icims.com/jobs/intro?in_iframe=1',
'empcnt': [201,500]
}
class OasisPetroleumJobScraper(IcimsJobScraper):
def __init__(self):
super(OasisPetroleumJobScraper, self).__init__(COMPANY)
def get_scraper():
return OasisPetroleumJobScraper()
if __name__ == '__main__':
job_scraper = get_scraper()
job_scraper.scrape_jobs()
| [
"thayton@neekanee.com"
] | thayton@neekanee.com |
863046733c83dda48b2b4afcc90be17c2f9b0841 | 7ff0077a55f6bf4a74704e430f354aeabaae3e0b | /tensorflow_probability/python/bijectors/weibull_test.py | c1289dbf4e865ef6940993c305076a246fcf7033 | [
"Apache-2.0"
] | permissive | markaduol/probability | 50a1d97810d11c747bd9546f977b2937c9e04d78 | 8af21dff96502a5bdc01b1be2c595043a3efc5d1 | refs/heads/master | 2020-03-29T20:50:26.001297 | 2018-09-25T21:51:10 | 2018-09-25T21:51:50 | 150,333,784 | 0 | 1 | Apache-2.0 | 2018-09-25T21:54:49 | 2018-09-25T21:54:49 | null | UTF-8 | Python | false | false | 2,768 | py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from scipy import stats
import tensorflow as tf
from tensorflow_probability.python import bijectors as tfb
from tensorflow_probability.python.bijectors import bijector_test_util
class WeibullBijectorTest(tf.test.TestCase):
"""Tests correctness of the weibull bijector."""
def testBijector(self):
with self.test_session():
scale = 5.
concentration = 0.3
bijector = tfb.Weibull(
scale=scale, concentration=concentration, validate_args=True)
self.assertEqual("weibull", bijector.name)
x = np.array([[[0.], [1.], [14.], [20.], [100.]]], dtype=np.float32)
# Weibull distribution
weibull_dist = stats.frechet_r(c=concentration, scale=scale)
y = weibull_dist.cdf(x).astype(np.float32)
self.assertAllClose(y, self.evaluate(bijector.forward(x)))
self.assertAllClose(x, self.evaluate(bijector.inverse(y)))
self.assertAllClose(
weibull_dist.logpdf(x),
self.evaluate(bijector.forward_log_det_jacobian(x, event_ndims=0)))
self.assertAllClose(
self.evaluate(-bijector.inverse_log_det_jacobian(y, event_ndims=0)),
self.evaluate(bijector.forward_log_det_jacobian(x, event_ndims=0)),
rtol=1e-4,
atol=0.)
def testScalarCongruency(self):
bijector_test_util.assert_scalar_congruency(
tfb.Weibull(scale=20., concentration=0.3),
lower_x=1.,
upper_x=100.,
eval_func=self.evaluate,
rtol=0.02)
def testBijectiveAndFinite(self):
bijector = tfb.Weibull(scale=20., concentration=2., validate_args=True)
x = np.linspace(1., 8., num=10).astype(np.float32)
y = np.linspace(
-np.expm1(-1 / 400.),
-np.expm1(-16), num=10).astype(np.float32)
bijector_test_util.assert_bijective_and_finite(
bijector, x, y, eval_func=self.evaluate, event_ndims=0, rtol=1e-3)
if __name__ == "__main__":
tf.test.main()
| [
"copybara-piper@google.com"
] | copybara-piper@google.com |
2854d57257d3db636fb50901650e1de302aeb079 | 562522946c03d168569cd79f43140c8326441fb4 | /nn.py | c6f15358a68cd1305fd4f5a439e748f625d64abd | [] | no_license | hackingmath/puzzles | 3d5f17d037f97767e14d1c1be51e352dc35928c7 | a3fd8cc976759b655b9eb13db173c461d8ced5ca | refs/heads/master | 2021-06-08T12:55:03.290076 | 2021-06-07T16:43:55 | 2021-06-07T16:43:55 | 159,741,819 | 9 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,596 | py | """DIY Neural Net
Sept. 26, 2019"""
import math
from matrix import Matrix,transpose,multiply
def sigmoid(x):
return 1 / (1 + math.exp(-x))
def dsigmoid(y):
return y * (1 - y)
class NeuralNetwork(object):
def __init__(self, input_nodes,hidden_nodes,output_nodes,
learningrate):
self.input_nodes = input_nodes #int
self.hidden_nodes = hidden_nodes #int
self.output_nodes = output_nodes #int
self.weights_ih = Matrix(self.hidden_nodes,self.input_nodes)
self.weights_ho = Matrix(self.output_nodes,self.hidden_nodes)
self.weights_ih.randomize()
self.weights_ho.randomize()
self.lr = learningrate
def activation_function(self,x):
"""The Sigmoid Function"""
out = [0]*len(x)
for i, element in enumerate(x):
#print("element:",element)
out[i] = sigmoid(x[i][0])
#print(out)
return out
#train the neural network
def train(self,inputs_list,targets_list):
#convert inputs list to 2d array
inputs = inputs_list.transpose()
targets = targets_list.transpose()
#calculate signals into hidden layer
hidden_inputs = multiply(self.weights_ih,inputs)
hidden_outputs = self.activation_function(hidden_inputs)
#calculate signals entering final output layer
final_inputs = multiply(self.weights_ho,hidden_outputs)
#calculate signals exiting final output layer
final_outputs = self.activation_function(final_inputs)
#output layer error is the target - actual
output_errors = targets - final_outputs
#hidden layer error is the output_errors, split by weights,
#recombined at hidden nodes
hidden_errors = multiply(transpose(self.weights_ho),output_errors)
#update the weights for the links between the hidden and output layers
self.weights_ho += self.lr * multiply((output_errors*final_inputs *\
(1.0 - final_outputs)),
transpose(hidden_outputs))
#update the weights for the links between the input and hidden layers
self.weights_ih += self.lr * multiply((hidden_errors * hidden_outputs *\
(1.0 - hidden_outputs)),
transpose(inputs))
def query(self,inputs_list):
#convert inputs list to 2d array
| [
"noreply@github.com"
] | hackingmath.noreply@github.com |
93e56287fee314f5e72515d1053e8119aadf4c05 | 96e38b89fa057fa0c1cf34e498b4624041dfc6e2 | /BOJ/DFS/Python/16946.py | 9a1a8a8cdd3697ecee32131f3f4d4f30a7f49492 | [] | no_license | malkoG/polyglot-cp | 66059246b01766da3c359dbd16f04348d3c7ecd2 | 584763144afe40d73e72dd55f90ee1206029ca8f | refs/heads/master | 2021-11-24T13:33:49.625237 | 2019-10-06T07:42:49 | 2019-10-06T07:42:49 | 176,255,722 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,357 | py | dx = [-1, 0, 1, 0]
dy = [0, -1, 0, 1]
area = [0] * 1010101
def dfs(field, component, x, y, max_x, max_y, num_of_components):
component[y][x] = num_of_components
area[num_of_components] += 1
for i in range(4):
next_x = x + dx[i]
next_y = y + dy[i]
if next_x > 0 and next_x <= max_x and next_y > 0 and next_y <= max_y and component[next_y][next_x] == 0 and field[next_y][next_x] == '0':
dfs(field, component, next_x, next_y, max_x, max_y, num_of_components)
N,M=map(int, input().split())
component = [ ([0] * (M+2)) for i in range(N+2) ]
field = ["0" * (M+2)]
for i in range(N):
field.append("0" + input() + "0")
field.append("0" * (M+2))
number_of_components = 1
for y in range(1, N+1):
for x in range(1, M+1):
if component[y][x] == 0 and field[y][x] == "0":
dfs(field, component, x, y, M, N, number_of_components)
number_of_components += 1
for y in range(1, N+1):
for x in range(1, M+1):
if field[y][x] == "0":
print("0", end="")
else:
acc = 1
set_of_components = set()
for i in range(4):
set_of_components.add(component[y + dy[i]][x + dx[i]])
acc += sum([ area[s] for s in set_of_components ])
print(str(acc), end="")
print()
| [
"rijgndqw012@gmail.com"
] | rijgndqw012@gmail.com |
74a377f3bcaf26e2e0c06cefce6c53b6c4ade061 | 42186fa6507999ce60d334a0f04d9ae2127579cd | /安恒杯10月/ezshop/payment/urls.py | 6c40abef29cdd96038a297c075c72985a8be4cf7 | [] | no_license | Imtinmin/CTF_Challenge | ef8b62b3a4a1741d814d989f795a243257ff6f2b | ea276596f9effdbe0cf9ef4457e2e676e652bb74 | refs/heads/master | 2022-12-21T12:40:40.625562 | 2020-04-30T03:27:56 | 2020-04-30T03:27:56 | 158,999,004 | 18 | 3 | null | 2022-12-10T04:34:27 | 2018-11-25T04:53:04 | PHP | UTF-8 | Python | false | false | 140 | py | from django.urls import path
from .views import checkPayment
app_name='payment'
urlpatterns = [
path('check', checkPayment, name='check')
] | [
"954093370@qq.com"
] | 954093370@qq.com |
7dde5a888030c37e55eb6f400d27220026f4bf54 | 9d16c9badcc3d30ec7eb1c3caa73f3ecd2d8161e | /blog_project/settings.py | 6466a803a5d69312122aebf75c641987bc947d10 | [] | no_license | Jordan-Rob/dj-blog | aca34eb2cd737ee3fd806eb360b615ceceedc9c4 | 81a1414c1213dd3c8731c1195a11f7c456b973d6 | refs/heads/master | 2021-09-27T18:55:48.685682 | 2020-02-19T07:57:49 | 2020-02-19T07:57:49 | 241,149,581 | 0 | 1 | null | 2021-09-22T18:38:44 | 2020-02-17T16:04:21 | Python | UTF-8 | Python | false | false | 3,540 | py | """
Django settings for blog_project project.
Generated by 'django-admin startproject' using Django 3.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+q*^r!04#6#g*pgmf#m_&)4p^v-x#hf^g4a44f$45e$mhsn5m9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'blog.apps.BlogConfig',
'accounts.apps.AccountsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'whitenoise.runserver_nostatic',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'blog_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
| [
"jordanrob709@gmail.com"
] | jordanrob709@gmail.com |
7a46a14e687412faa1f5cc1ed42e79d9948cbd85 | db69daa1b517b539d78e4ab79691c33fdb410e8f | /check_friend.py | d31cd515ec1cdaad92e1d1894ae6181aa6afd5ee | [
"MIT"
] | permissive | team55/vkbot | 4cd49faefda81db4aae7db6c0bb6d2204097494f | 34705106560dbf6d96eee8b21cfd6d78e05646ef | refs/heads/master | 2021-01-21T01:39:28.846429 | 2016-06-26T11:16:15 | 2016-06-26T11:16:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,694 | py | import time
import config
import accounts
fields = 'photo_50,country,last_seen'
s = open(accounts.getFile('allowed.txt'), encoding='utf-8').readlines()
noadd = set(map(int, open(accounts.getFile('noadd.txt')).read().split()))
allowed = set(s[0] + ' ')
s = s[1].split()
offline_allowed = config.get('check_friend.offline_allowed', 'i')
def writeNoadd():
with open(accounts.getFile('noadd.txt'), 'w') as f:
f.write('\n'.join(map(str, sorted(noadd))))
def appendNoadd(users):
noadd.update(users)
with open(accounts.getFile('noadd.txt'), 'a') as f:
f.write('\n' + '\n'.join(map(str, sorted(users))))
def check_char(c):
return c in allowed
checks = [
(lambda fr:'deactivated' not in fr, 'Account is deactivated'),
(lambda fr:fr['photo_50'] and not fr['photo_50'].endswith('camera_50.png'), 'No avatar'),
(lambda fr:fr.get('country', {'id':0})['id'] in [0, 1, 2, 3], 'Bad country'),
(lambda fr:all(check_char(i) for i in fr['first_name'] + fr['last_name']), 'Bad characters in name'),
(lambda fr:'last_seen' in fr and time.time() - fr['last_seen']['time'] < 3600 * 24 * offline_allowed, 'Offline too long'),
(lambda fr:not any(i in (fr['first_name'] + ' ' + fr['last_name']).lower() for i in s), 'Bad substring in name'),
(lambda fr:fr['id'] not in noadd, 'Ignored'),
(lambda fr:fr['first_name'] != fr['last_name'], 'First name equal to last name'),
]
def is_good(fr, need_reason=False):
reasons = []
for fun, msg in checks:
if not fun(fr):
if need_reason:
reasons.append(msg)
else:
return False
if need_reason:
return ', '.join(reasons) or None
else:
return True
| [
"kalinochkind@gmail.com"
] | kalinochkind@gmail.com |
97b226cbbe0e31725743a250e4d4740cda6c9572 | 5c3ae39ce3964fab73959052cdece57c263ad52c | /tests/unit/test_payment_method_gateway.py | 786d846ab0f5ab21b7ff22d5d911bae42ad6b95f | [
"MIT"
] | permissive | maneeshd/braintree_python | bb072f8db300797338cf3ccbfa755a45eabb0db2 | 4aa3f4b8a376ea81bf16a053d840efe55ae13675 | refs/heads/master | 2023-08-31T12:04:24.420050 | 2020-02-29T06:40:34 | 2020-02-29T06:48:49 | 243,717,955 | 0 | 0 | MIT | 2020-10-02T20:21:05 | 2020-02-28T08:50:21 | Python | UTF-8 | Python | false | false | 6,835 | py | from tests.test_helper import *
from braintree.payment_method_gateway import PaymentMethodGateway
if sys.version_info[0] == 2:
from mock import MagicMock
else:
from unittest.mock import MagicMock
class TestPaymentMethodGateway(unittest.TestCase):
def test_create_signature(self):
actual_signature = PaymentMethod.signature("create")
expected_signature = [
"billing_address_id",
"cardholder_name",
"customer_id",
"cvv",
"device_data",
"device_session_id",
"expiration_date",
"expiration_month",
"expiration_year",
"number",
"payment_method_nonce",
"paypal_refresh_token",
# NEXT_MAJOR_VERSION remove this parameter as it's been ignored in the gateway
"paypal_vault_without_upgrade",
"token",
{
"billing_address": Address.create_signature()},
{
"options": [
"fail_on_duplicate_payment_method",
"make_default",
"us_bank_account_verification_method",
"verification_merchant_account_id",
"verify_card",
"verification_amount",
"verification_account_type",
{
"adyen":[
"overwrite_brand",
"selected_brand"
]
},
{
"paypal":[
"payee_email",
"order_id",
"custom_field",
"description",
"amount",
{
"shipping":[
"company",
"country_code_alpha2",
"country_code_alpha3",
"country_code_numeric",
"country_name",
"customer_id",
"extended_address",
"first_name",
"last_name",
"locality",
"postal_code",
"region",
"street_address"
]
},
]
},
]
}
]
self.assertEqual(expected_signature, actual_signature)
def test_update_signature(self):
actual_signature = PaymentMethod.update_signature()
expected_signature = [
"billing_address_id",
"cardholder_name",
"cvv",
"device_session_id",
"expiration_date",
"expiration_month",
"expiration_year",
"number",
"token",
"venmo_sdk_payment_method_code",
"device_data",
"fraud_merchant_id",
"payment_method_nonce",
{
"options": [
"make_default",
"us_bank_account_verification_method",
"verify_card",
"verification_amount",
"verification_merchant_account_id",
"verification_account_type",
"venmo_sdk_session",
{
"adyen":[
"overwrite_brand",
"selected_brand"
]
}
]
},
{
"billing_address" : Address.update_signature() + [{"options": ["update_existing"]}]
}
]
self.assertEqual(expected_signature, actual_signature)
def test_nonce_grant_params(self):
"""
We validate parameters to PaymentMethod.grant properly
"""
payment_method_gateway = PaymentMethodGateway(BraintreeGateway(None))
options = { "include_billing_postal_code": True }
with self.assertRaises(ValueError):
payment_method_gateway.grant("", options)
with self.assertRaises(ValueError):
payment_method_gateway.grant("\t", False)
with self.assertRaises(ValueError):
payment_method_gateway.grant(None, True)
def test_nonce_revoke_params(self):
payment_method_gateway = PaymentMethodGateway(BraintreeGateway(None))
with self.assertRaises(ValueError):
payment_method_gateway.revoke("")
with self.assertRaises(ValueError):
payment_method_gateway.revoke("\t")
with self.assertRaises(ValueError):
payment_method_gateway.revoke(None)
def test_delete_with_revoke_all_grants_value_as_true(self):
payment_method_gateway, http_mock = self.setup_payment_method_gateway_and_mock_http()
payment_method_gateway.delete("some_token", {"revoke_all_grants": True})
self.assertTrue("delete('/merchants/integration_merchant_id/payment_methods/any/some_token?revoke_all_grants=true')" in str(http_mock.mock_calls))
def test_delete_with_revoke_all_grants_value_as_false(self):
payment_method_gateway, http_mock = self.setup_payment_method_gateway_and_mock_http()
payment_method_gateway.delete("some_token", {"revoke_all_grants": False})
self.assertTrue("delete('/merchants/integration_merchant_id/payment_methods/any/some_token?revoke_all_grants=false')" in str(http_mock.mock_calls))
def test_delete_without_revoke_all_grants(self):
payment_method_gateway, http_mock = self.setup_payment_method_gateway_and_mock_http()
payment_method_gateway.delete("some_token")
self.assertTrue("delete('/merchants/integration_merchant_id/payment_methods/any/some_token')" in str(http_mock.mock_calls))
def test_delete_with_invalid_keys_to_raise_error(self):
payment_method_gateway, http_mock = self.setup_payment_method_gateway_and_mock_http()
with self.assertRaises(KeyError):
payment_method_gateway.delete("some_token", {"invalid_keys": False})
def setup_payment_method_gateway_and_mock_http(self):
braintree_gateway = BraintreeGateway(Configuration.instantiate())
payment_method_gateway = PaymentMethodGateway(braintree_gateway)
http_mock = MagicMock(name='config.http.delete')
braintree_gateway.config.http = http_mock
return payment_method_gateway, http_mock
| [
"code@getbraintree.com"
] | code@getbraintree.com |
6408cc3a3d782a6b3d7e64ff7926e7380008c136 | 16631cf7cd4a70f2cd2750851649d3eff5e17724 | /2019/day02/part2.py | ec6da2e434b6d2de88016e4776ea6087928eb9f4 | [] | no_license | kynax/AdventOfCode | 1dd609a3308d733f2dd7d4ea00508d2da73180b9 | 36a339241dd7a31ebe08a73e5efa599e5faeea1a | refs/heads/master | 2022-12-21T13:32:52.591068 | 2022-12-16T22:41:30 | 2022-12-16T22:41:30 | 48,439,585 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 348 | py | import sys
from intcode import *
m = [int(l) for l in sys.stdin.readline().split(',')]
for verb in range(100):
for noun in range(100):
c = IntCode()
c.mem_init(m)
c.mem_set(1, noun)
c.mem_set(2, verb)
c.run()
if c.result() == 19690720:
print(100 * noun + verb)
| [
"guilemay@gmail.com"
] | guilemay@gmail.com |
a91390aea6a8dc9e7b2c54cfb9d54260053f026d | 3a784e3d612cfc58d73eb017b2f1b068a570d55c | /z3/fancy.py | 1bad9d13f5828658b4960d33dcd6715690aa6461 | [] | no_license | DialloMamadou/PPC | 23f527671007952b0b7707716a367aa47b709842 | 03e508a2b2f3314fbb40eab345506b62fcb8e1da | refs/heads/master | 2020-05-23T15:06:43.588228 | 2019-05-05T19:33:09 | 2019-05-05T19:33:09 | 186,819,656 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,837 | py | #!/usr/bin/python -u
# -*- coding: latin-1 -*-
#
# Mr Greenguest puzzle (a.k.a fancy dress problem) in Z3
#
# Problem (and LPL) code in
#
# http://diuflx71.unifr.ch/lpl/GetModel?name=/demo/demo2
#
# """
# (** Mr. Greenfan wants to give a dress party where the male guests
# * must wear green dresses. The following rules are given:
# * 1 If someone wears a green tie he has to wear a green shirt.
# * 2 A guest may only wear green socks and a green shirt
# * if he wears a green tie or a green hat.
# * 3 A guest wearing a green shirt or a green hat or who does
# * not wear green socks must wear a green tie.
# * 4 A guest who is not dressed according to rules 1-3 must
# * pay a $11 entrance fee.
# * Mr Greenguest wants to participate but owns only a green shirt
# * (otherwise he would have to pay one for $9). He could buy
# * a green tie for $10, a green hat (used) for $2 and green socks
# * for $12.
# * What is the cheapest solution for Mr Greenguest to participate?
# *)
# """
#
# This Z3 model was written by Hakan Kjellerstrand (hakank@gmail.com)
# See also my Z3 page: http://hakank.org/z3/
#
from z3_utils_hakank import *
sol = Solver()
# variables
# t: tie
# h: hat
# r: shirt
# s: socks
# n: entrance fee
[t,h,r,s,n] = Bools('t h r s n')
cost = makeIntVar(sol,"cost",0,100)
# constraints
# This is a straight translation from the LPL code
# ( (t->r) \/ n)
sol.add( Or(Implies(t,r), n))
# ( ((s \/ r) -> (t \/ h)) \/ n )
sol.add( Or(Implies(Or(s,r), Or(t,h)), n))
# ( ((r \/ h \/ not s) -> t) \/ n )
sol.add(Or( Implies(Or(r, h, Not(s)), t), n))
sol.add(cost == 10*t + 2*h + 12*s + 11*n)
num_solutions = 0
while sol.check() == sat:
num_solutions += 1
mod = sol.model()
print "cost:", mod.eval(cost),
print [(x, mod.eval(x)) for x in [t,h,r,s,n]]
getLessSolution(sol,mod,cost)
| [
"hakank@gmail.com"
] | hakank@gmail.com |
1b3d67978f6b4b17f7e6bd2541a0308600324518 | aa9aa0868d857d64603e7b0e9e8cff0e2bbdf189 | /server/migrations/0059_machine_report_format.py | dd178bc844c43e713743a8291a09c6e2fc397df5 | [
"Apache-2.0"
] | permissive | haribert/sal | 2ecafa7d5b7c812f860465f7f4511209d8916526 | 9c80cf9b063ba1cb1fb7649dc6aba7f032de261b | refs/heads/master | 2021-09-06T05:07:46.868175 | 2018-02-02T16:12:44 | 2018-02-02T16:12:44 | 112,587,211 | 0 | 0 | null | 2017-11-30T08:44:43 | 2017-11-30T08:44:43 | null | UTF-8 | Python | false | false | 557 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-09-30 12:03
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('server', '0058_auto_20170822_1430'),
]
operations = [
migrations.AddField(
model_name='machine',
name='report_format',
field=models.CharField(choices=[(b'base64', b'base64'), (b'base64bz2', b'base64bz2')], default=b'base64bz2', editable=False, max_length=256),
),
]
| [
"graham@grahamgilbert.com"
] | graham@grahamgilbert.com |
a84b044c4ec14bb86cb2f2cf4cafa93abd776f37 | bc6b561958649c391c159d4dd3363c60eeabc7e4 | /mayan/apps/file_caching/tests/test_events.py | f64952560df1b1a38aab02077d4856121e0f301a | [
"Apache-2.0"
] | permissive | chrisranjana/Mayan-EDMS | 37deb105cda268768fea502491ae875ff905e0e9 | 34b414ce49a2eb156e27dc1a2915e52121c9d1b7 | refs/heads/master | 2020-12-22T13:50:41.263625 | 2020-01-28T18:45:24 | 2020-01-28T18:45:24 | 236,804,825 | 0 | 1 | NOASSERTION | 2020-01-28T18:12:53 | 2020-01-28T18:12:52 | null | UTF-8 | Python | false | false | 1,096 | py | from __future__ import unicode_literals
from actstream.models import Action
from mayan.apps.common.tests.base import BaseTestCase
from ..events import event_cache_created, event_cache_purged
from ..models import Cache
from .mixins import CacheTestMixin
class CacheEventsTestCase(CacheTestMixin, BaseTestCase):
def test_cache_create_event(self):
action_count = Action.objects.count()
self._create_test_cache()
self.assertEqual(Action.objects.count(), action_count + 1)
event = Action.objects.first()
cache = Cache.objects.last()
self.assertEqual(event.verb, event_cache_created.id)
self.assertEqual(event.target, cache)
def test_cache_purge_event(self):
self._create_test_cache()
action_count = Action.objects.count()
self.test_cache.purge()
self.assertEqual(Action.objects.count(), action_count + 1)
event = Action.objects.first()
cache = Cache.objects.last()
self.assertEqual(event.verb, event_cache_purged.id)
self.assertEqual(event.target, cache)
| [
"roberto.rosario@mayan-edms.com"
] | roberto.rosario@mayan-edms.com |
a346ab92b72621f5e8966b5d8b20365ea9816590 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02854/s614017761.py | 93c7fca03d5483bad4cda9f6e10005518c14e660 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 166 | py | n = int(input())
a = list(map(int,input().split()))
r = ans = sum(a)
l = 0
ans = float('inf')
for i in a:
r -= i
l += i
ans = min(ans,abs(r-l))
print(ans) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
b3ca48aa937f678d46460cb290f303d6d3d024ae | d11a77c7230880973fc48cb64f86f621b642b3e9 | /solutions/video_similarity_search/quick_deploy/server/src/operations/search.py | 3087edb4814e19e9ab7cc1f6b9f0f15189c46203 | [
"Apache-2.0"
] | permissive | parsa-ra/bootcamp | 22bd6082948aad13b14537582b321ae570b4dc7a | f881a562751c673f45eaba063803adcb472fc1da | refs/heads/master | 2023-08-10T19:50:15.334127 | 2021-09-18T07:16:16 | 2021-09-18T07:16:16 | 407,830,782 | 0 | 0 | Apache-2.0 | 2021-09-18T10:36:49 | 2021-09-18T10:36:48 | null | UTF-8 | Python | false | false | 838 | py | import sys
from logs import LOGGER
sys.path.append("..")
from config import TOP_K
from config import DEFAULT_TABLE
def do_search(host, table_name, img_path, model, milvus_client, mysql_cli):
if not table_name:
table_name = DEFAULT_TABLE
try:
feat = model.resnet50_extract_feat(img_path)
vectors = milvus_client.search_vectors(table_name, [feat], TOP_K)
vids = [str(x.id) for x in vectors[0]]
paths = mysql_cli.search_by_milvus_ids(vids, table_name)
distances = [x.distance for x in vectors[0]]
for i in range(len(paths)):
tmp = "http://" + str(host) + "/data?gif_path=" + str(paths[i])
paths[i] = tmp
return paths, distances
except Exception as e:
LOGGER.error(" Error with search : {}".format(e))
sys.exit(1) | [
"shiyu.chen@zilliz.com"
] | shiyu.chen@zilliz.com |
e1bfb943e312a2e57db0c78c33fa8d0db2f45f44 | 81f999d6f8e622542212e6fc2b5e328b06ced75d | /admin/post.py | b02de7c9f90fe7b52e9071d0c01a5cb86bcc7e90 | [] | no_license | lujinda/zjypan | 37beab246b1ceb84ae24330b742d3a9bf7a635a5 | fcc2a8ff221eeaebaced84735b3e12b3584efc8c | refs/heads/master | 2021-01-01T20:49:04.984315 | 2015-07-12T08:25:33 | 2015-07-12T08:25:33 | 29,416,929 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,806 | py | #!/usr/bin/env python
#coding:utf8
# Author : tuxpy
# Email : q8886888@qq.com
# Last modified : 2015-02-22 21:56:00
# Filename : admin/post.py
# Description :
from .base import AdminHandler
from page.do import write_post, get_post_list, get_post, del_post
from public.do import swith_time
from lib.wrap import auth_log_save
class AdminListPostHandler(AdminHandler):
def get(self):
self.render('post/list.html', post_list = get_post_list(),
swith_time = swith_time)
@auth_log_save
def post(self):
action = self.get_query_argument('action')
assert action in ('del', )
post_uuid_list = self.get_arguments('checked_post')
assert post_uuid_list
do_func = getattr(self, 'do_' + action, None)
self.redirect(self.referer or 'list')
return do_func(post_uuid_list)
def do_del(self, post_uuid_list):
for post_uuid in post_uuid_list:
del_post(post_uuid)
return '删除通告'
class AdminWritePostHandler(AdminHandler):
def get(self):
post_uuid = self.get_query_argument('post_uuid', '')
post = get_post(post_uuid)
self.render('post/write.html', post = post)
@auth_log_save
def post(self):
post_title = self.get_argument('post_title')
post_content = self.get_argument('post_content')
post_important = self.get_argument('post_important', None) == 'yes'
post_uuid = self.get_argument('post_uuid', None)
write_post(post_title = post_title, post_content = post_content,
post_important = post_important, post_uuid = post_uuid)
self.redirect('list')
if post_uuid:
return '编辑通告'
else:
return '发布通告'
| [
"q8886888@gmail.com"
] | q8886888@gmail.com |
4a5864386c1107faa4a2a4a9a3af41fecc137e9c | 82cba93ed3339150dcbccc1e3a245f7284edb8ed | /t.py | 7c820b15876f323d1c459455ba089bdd030e14be | [] | no_license | boyi880322/python | c10ca906cb6db7e143071f7c830eb9d9741ee05b | 375e6d532f229f5a508ca8396b6cd1aa77aa6c46 | refs/heads/main | 2023-02-12T13:42:29.393617 | 2021-01-12T02:24:35 | 2021-01-12T02:24:35 | 325,202,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | name = input("姓名:")
while name != "":
h = float(input("身高(公尺):"))
w = int(input("體重(公斤):"))
BMI = w / h ** 2
print("{}的BMI是:{}".format(name, BMI))
name = input("姓名:") | [
"skynet.tw@gmail.com"
] | skynet.tw@gmail.com |
a0545f0e647e3e695ae19d66971e2bec4f38d109 | 832b4121b7d49b56ce9446cb1437b401a037b43f | /python/part03-scripts/save_the_world_v1.py | a19df8f23e84963a54b2391eedf291aad34aa769 | [] | no_license | dleehr/gcb-academy-python | 407ad2fef3f73d7e2485c7839b20c0cf53050e07 | 3a2d191ac33b8e08c2380856e01bbc65b5fd5eec | refs/heads/master | 2021-01-18T14:01:53.320240 | 2015-02-03T19:05:02 | 2015-02-03T19:05:02 | 29,843,584 | 0 | 0 | null | 2015-02-02T18:56:08 | 2015-01-26T03:38:36 | Python | UTF-8 | Python | false | false | 479 | py | import fileinput
import re
for line in fileinput.input():
match = re.search('^(.*)\t(20\d\d)-(\d\d)-(\d\d)\t(\d+\.?\d*)$', line)
if match:
fields = [
match.group(2), # year
match.group(3), # month
match.group(4), # day
match.group(1), # site
match.group(5) # value
]
print ','.join(fields)
else:
print "Line {} did not match!".format(fileinput.lineno())
| [
"whitews@gmail.com"
] | whitews@gmail.com |
c38311c336acbeb73379afa6ba0e49ecee97a5c4 | 04875545151aa1ef547c2c47ae36b9c90254317b | /example/SyntheticParameterized/basis.py | 0b99aa22b0201ff5e38f543e5a7992c74b0cc8a5 | [
"MIT"
] | permissive | treverhines/PSGI | 5a49da0a24e2460350b280f2437229de51ea3364 | 356ca2208fc24d51062417126807d79dd79df73c | refs/heads/master | 2021-03-12T19:57:37.642196 | 2015-11-14T02:08:20 | 2015-11-14T02:08:20 | 39,227,339 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,683 | py | #!/usr/bin/env python
#
# This script defines variables and functions which are needed for
# plotting purposes and for using WriteRegularization.py which forms a
# regularization matrix by collocation
#
# Variables in all caps are required for other scripts to run and
# this script must also define the slip and fluidity basis functions
#
from __future__ import division
from rbf.bspline import augmented_knots
from rbf.bspline import natural_knots
from rbf.bspline import bspnd as bspline_nd
from modest import linear_to_array_index
from modest import Perturb
import transform as trans
import pickle
import numpy as np
## Define parameters for slip basis function geometry
######################################################################
FAULT_ANCHOR = [[-116.0,32.0]]
FAULT_LENGTH = [50000.0]
FAULT_WIDTH = [20000.0]
FAULT_STRIKE = [0.0]
FAULT_DIP = [60.0]
FAULT_NLENGTH = [10]
FAULT_NWIDTH = [4]
FAULT_ORDER = [[0,0]]
FLUIDITY_ANCHOR = [-119.25,35.0]
FLUIDITY_STRIKE = 90.0
FLUIDITY_LENGTH = 600000.0
FLUIDITY_WIDTH = 600000.0
FLUIDITY_THICKNESS = 150000.0
FLUIDITY_NLENGTH = 1
FLUIDITY_NWIDTH = 1
FLUIDITY_NTHICKNESS = 5
FLUIDITY_ORDER = [0,0,3]
######################################################################
FAULT_N = sum(l*w for l,w in zip(FAULT_NLENGTH,FAULT_NWIDTH))
FLUIDITY_N = FLUIDITY_NLENGTH*FLUIDITY_NWIDTH*FLUIDITY_NTHICKNESS
FAULT_SEGMENTS = len(FAULT_ANCHOR)
FAULT_TRANSFORMS = []
FAULT_KNOTS = []
BASEMAP = pickle.load(open('basemap.pkl','r'))
# find knots for faults
for d in range(FAULT_SEGMENTS):
xc,yc = BASEMAP(*FAULT_ANCHOR[d])
t = trans.point_stretch([FAULT_LENGTH[d],FAULT_WIDTH[d],1.0])
t += trans.point_rotation_x(FAULT_DIP[d]*np.pi/180)
t += trans.point_rotation_z(np.pi/2.0 - FAULT_STRIKE[d]*np.pi/180)
t += trans.point_translation([xc,yc,0.0])
# create knots defining B-splines for slip on a rectangle x = [0,1]
# and y = [-1,0]
fault_knots_x = natural_knots(FAULT_NLENGTH[d],
FAULT_ORDER[d][0],side='both')
fault_knots_y = natural_knots(FAULT_NWIDTH[d],
FAULT_ORDER[d][1],side='both') - 1.0
FAULT_TRANSFORMS += [t]
FAULT_KNOTS += [(fault_knots_x,fault_knots_y)]
# find knots for fluidity
xc,yc = BASEMAP(*FLUIDITY_ANCHOR)
t = trans.point_stretch([FLUIDITY_LENGTH,FLUIDITY_WIDTH,FLUIDITY_THICKNESS])
t += trans.point_rotation_z(np.pi/2.0 - FLUIDITY_STRIKE*np.pi/180)
t += trans.point_translation([xc,yc,0.0])
fluidity_knots_x = natural_knots(FLUIDITY_NLENGTH,
FLUIDITY_ORDER[0],side='both')
fluidity_knots_y = natural_knots(FLUIDITY_NWIDTH,
FLUIDITY_ORDER[1],side='both') - 1.0
fluidity_knots_z = natural_knots(FLUIDITY_NTHICKNESS,
FLUIDITY_ORDER[2],side='none') - 1.0
FLUIDITY_TRANSFORM = t
FLUIDITY_KNOTS = (fluidity_knots_x,fluidity_knots_y,fluidity_knots_z)
def slip(x,coeff,segment=None,diff=None):
'''
takes positions, x, and slip coefficients, coeff, and returns the
vaues for slip. The segment key word is specified to only use
coefficients corresponding to the specified fault segment. if no
segment is specified then all coefficients will be used
'''
minN = 0
s = segment
out = np.zeros(len(x))
assert len(coeff) == FAULT_N, (
'coefficient list must have length %s' % FAULT_N)
if s is None:
for d in range(FAULT_SEGMENTS):
t = FAULT_TRANSFORMS[d].inverse()
fx = t(x)[:,[0,1]]
shape = FAULT_NLENGTH[d],FAULT_NWIDTH[d]
order = FAULT_ORDER[d]
maxN = minN + np.prod(shape)
for n in range(minN,maxN):
idx = linear_to_array_index(n-minN,shape)
out += coeff[n]*bspline_nd(fx,FAULT_KNOTS[d],idx,order,diff=diff)
minN += np.prod(shape)
else:
for d in range(s):
shape = FAULT_NLENGTH[d],FAULT_NWIDTH[d]
maxN = minN + np.prod(shape)
minN += np.prod(shape)
shape = FAULT_NLENGTH[s],FAULT_NWIDTH[s]
maxN = minN + np.prod(shape)
t = FAULT_TRANSFORMS[s].inverse()
fx = t(x)[:,[0,1]]
order = FAULT_ORDER[s]
for n in range(minN,maxN):
idx = linear_to_array_index(n-minN,shape)
out += coeff[n]*bspline_nd(fx,FAULT_KNOTS[s],idx,order,diff=diff)
minN += np.prod(shape)
return out
def fluidity(x,coeff,diff=None):
out = np.zeros(len(x))
t = FLUIDITY_TRANSFORM.inverse()
fx = t(x)
shape = FLUIDITY_NLENGTH,FLUIDITY_NWIDTH,FLUIDITY_NTHICKNESS
order = FLUIDITY_ORDER
for n in range(FLUIDITY_N):
idx = linear_to_array_index(n,shape)
out += coeff[n]*bspline_nd(fx,FLUIDITY_KNOTS,idx,order,diff=diff)
return out
if __name__ == '__main__':
from myplot.xsection import XSection
import mayavi.mlab
bm = BASEMAP
sta_array = np.loadtxt('stations.txt',dtype=str)
sta_pos = np.array(sta_array[:,[1,2]],dtype=float)
sta_pos_x,sta_pos_y = bm(sta_pos[:,0],sta_pos[:,1])
fluidity_transforms = []
x,y = bm(*FLUIDITY_ANCHOR[:2])
length = FLUIDITY_LENGTH
width = FLUIDITY_WIDTH
thickness = FLUIDITY_THICKNESS
t = trans.point_stretch([FLUIDITY_LENGTH,
FLUIDITY_THICKNESS,
1.0])
t += trans.point_rotation_x(np.pi/2.0)
t += trans.point_translation([0.0,-width/2.0,0.0])
t += trans.point_rotation_z(np.pi/2.0 - FLUIDITY_STRIKE*np.pi/180)
t += trans.point_translation([x,y,0.0])
fluidity_transforms += [t]
t = trans.point_stretch([FLUIDITY_WIDTH,
FLUIDITY_THICKNESS,
1.0])
t += trans.point_rotation_x(np.pi/2.0)
t += trans.point_rotation_z(-np.pi/2.0)
t += trans.point_translation([FLUIDITY_LENGTH/2.0,
0.0,
0.0])
t += trans.point_rotation_z(np.pi/2.0 - FLUIDITY_STRIKE*np.pi/180)
t += trans.point_translation([x,y,0.0])
fluidity_transforms += [t]
xs1 = XSection(fluidity,
f_args=(np.random.random(FLUIDITY_N),),
base_square_y=(-1,0),
transforms = fluidity_transforms,
clim = (0,1))
xs2 = XSection(fluidity,
f_args=(np.random.random(FLUIDITY_N),),
base_square_y=(-1,0),
transforms = FAULT_TRANSFORMS)
xs1.draw()
xs2.draw(color=(0.2,0.2,0.2),opacity=0.5)
mayavi.mlab.points3d(sta_pos_x,sta_pos_y,0*sta_pos[:,1],scale_factor=10000)
xs1.view()
coeff = np.random.random(FAULT_N)
xs1 = XSection(slip,
f_args=(coeff,),
base_square_y=(-1,0),
transforms = FAULT_TRANSFORMS,
clim=(0,1))
xs1.draw()
xs1.view()
coeff = np.random.random(FLUIDITY_N)
| [
"treverhines@gmail.com"
] | treverhines@gmail.com |
27369ce439c746edb215195782810276fff77a6f | e389ca9d52230140038082e3111ce41db1c00514 | /SocialMedia_API/settings.py | ee05062973c3aadfdaccfa3fcdaaf56a19f68cc0 | [] | no_license | DharmendraB/SocialMedia_API | dbd94af3fb71ae1db6dbdaf25bb429bc4648399f | 9cc134525b90f3fbba31b7442688fc7baf41c0e1 | refs/heads/main | 2023-03-29T04:15:25.286277 | 2021-04-04T04:59:08 | 2021-04-04T04:59:08 | 354,458,714 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,197 | py | """
Django settings for SocialMedia_API project.
Generated by 'django-admin startproject' using Django 3.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2@!h#6ku@5g%d+e$0bjq%(nm%%7_%x*^ofgtkl97q6bj2cvngf'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'social.apps.SocialConfig',
'registration',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'crispy_forms',
'social_django', # <-- Here social-auth-app-django
]
CRISPY_TEMPLATE_PACK = 'bootstrap4'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'social_django.middleware.SocialAuthExceptionMiddleware', # <-- Here Social
]
ROOT_URLCONF = 'SocialMedia_API.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'social_django.context_processors.backends', # <-- Here
'social_django.context_processors.login_redirect', # <-- Here
],
},
},
]
AUTHENTICATION_BACKENDS = (
'social_core.backends.facebook.FacebookOAuth2',
'social_core.backends.twitter.TwitterOAuth',
'social_core.backends.github.GithubOAuth2',
'django.contrib.auth.backends.ModelBackend',
)
WSGI_APPLICATION = 'SocialMedia_API.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
#Managing Media Code here
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
MEDIA_URL = '/media/'
#Registration import Code here
ACCOUNT_ACTIVATION_DAYS=3
EMAIL_HOST= 'smtp.gmail.com'
EMAIL_HOST_USER= 'XXXXXX@gmail.com'
EMAIL_HOST_PASSWORD= 'XXXXXXXX'
EMAIL_PORT= 587
EMAIL_USE_TLS= True
#Direct Login Redirect Code here
LOGIN_REDIRECT_URL = "/"
SOCIAL_AUTH_FACEBOOK_KEY = 'XXXXXXX' # App ID
SOCIAL_AUTH_FACEBOOK_SECRET = 'XXXXXXX' # App Secret | [
"ghldharmendra@gmail.com"
] | ghldharmendra@gmail.com |
ef7fdd81b5556be3d11abd7c8ad2872d3efcf5dc | 713f9168a7ba68740bb9b4ea6994e853a56d2d5c | /python/2019-10-07/shout.py | 075131c1205615b5f917df669e9b52537c71c00b | [] | no_license | marko-knoebl/courses-code | ba7723c9a61861b037422670b98276fed41060e2 | faeaa31c9a156a02e4e9169bc16f229cdaee085d | refs/heads/master | 2022-12-29T02:13:12.653745 | 2022-12-16T09:21:18 | 2022-12-16T09:21:18 | 142,756,698 | 16 | 10 | null | 2022-03-08T22:30:11 | 2018-07-29T11:51:04 | Jupyter Notebook | UTF-8 | Python | false | false | 321 | py | def shout(phrase, end="!"):
"""Prints a phrase in capital letters.
A second optional parameter can mark the end.
"""
# .upper() is a string function that converts the
# string to uppercase
upper = phrase.upper()
print(upper + end)
shout("hello") # HELLO!
shout("hey")
shout("hi", ".") # HI.
| [
"marko.kn@gmail.com"
] | marko.kn@gmail.com |
0fc4a06e03869a0850aefd5b5e8684092faaa024 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_108/ch31_2020_03_23_21_09_21_287563.py | ae11e5f03385199948261d80790e123dffe6af98 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 182 | py | def eh_primo(n):
if n < 2:
return False
elif n == 2:
return True
for x in range(2,n)
if n % x == 0:
return False
return True | [
"you@example.com"
] | you@example.com |
feb9dd83d3dcf1efadf822ae27bb091c2b222382 | b8c373c2265e894c2f6097457051290152caedc3 | /project_ini/questionnaire/migrations/0004_auto_20170312_2344.py | 52f1cb82be650eeb6d908c2d59af33d73dc22b12 | [] | no_license | alikhundmiri/ini_alpha | b17b8d4c7e6064a5c538d279453e53bbae2c042a | ef3a558287e9de8d01ddda354d850d03226a9bf4 | refs/heads/master | 2021-01-19T09:59:54.817258 | 2017-04-10T12:34:17 | 2017-04-10T12:34:17 | 87,807,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-12 23:44
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('questionnaire', '0003_auto_20170312_2327'),
]
operations = [
migrations.AlterField(
model_name='questions_info',
name='publish',
field=models.DateField(default=datetime.datetime(2017, 3, 12, 23, 44, 25, 275866, tzinfo=utc)),
),
]
| [
"salikhundmiri@gmail.com"
] | salikhundmiri@gmail.com |
9e45a9a79b2f82434136ae93f6a234460d3ce2c8 | 816232db2f21e193612eaa60eda0d5897d31caaf | /Inflearn/2일차-코드구현 능력/5.py | 75ab790ecc133625aaf2085bb608b959feb78934 | [] | no_license | Juyoung4/StudyAlgorithm | a60bfa7657eac57f59200bfa204aff1ad27c79f8 | 4b190e0bfeb268bef4be00ae9bedd9ca8946fbd6 | refs/heads/master | 2023-08-31T04:37:07.422641 | 2021-09-27T08:38:09 | 2021-09-27T08:38:09 | 282,757,306 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,529 | py | # 점수 계산
"""
[문제]
여러 개의 OX 문제로 만들어진시험에서 연속적으로 답을 맞히는 경우에는 가산점을 주기 위해서 다음과 같이 점수 계산을 하기로 하였다
- 1번 문제가 맞는 경우에는 1점으로 계산한다.
- 앞의 문제에 대해서는 답을 틀리다가 답이 맞는 처음 문제는 1점으로 계산한다.
- 연속으로 문제의 답이 맞는 경우에서 두 번째 문제는 2점, 세 번째 문제는 3점, ..., K번째 문제는 K점으로 계산한다.
- 틀린 문제는 0점으로 계산한다.
(ex)
10 개의 OX 문제에서 답이 맞은 문제의 경우에는 1로 표시하고, 틀린 경우에는 0으로 표시하여
1011100110일때 점수 계산하는 방법은
1012300120 => 1+1+2+3+1+2=10 점이다.
[INPUT EX]
10
1 0 1 1 1 0 0 1 1 0
[OUTPUT EX]
10
"""
def solution(T):
score = list(map(int, input().split()))
if len(score) != T: return -1
add_ = 1 if score[0] else 0
total = 1 if score[0] else 0
for i in range(1, len(score)):
if not score[i]:
add_ = 0
print(add_, total)
continue
if not score[i-1] and score[i]:
add_ = 1
total += add_
print(add_, total)
continue
if score[i-1] and score[i]:
add_ += 1
total += add_
print(add_, total)
continue
return total
if __name__ == "__main__":
print(solution(int(input()))) | [
"vallot7@naver.com"
] | vallot7@naver.com |
bac92da3627b53e8162cbf94250ccf181fff620b | 21e177a4d828f4e0a003e9424c4952dbc0b47d29 | /lints/lint_sub_cert_province_must_appear.py | 0232e7991a8e0921aca00c46582fb7e3dd8ac4e6 | [] | no_license | 846468230/Plint | 1071277a55144bb3185347a58dd9787562fc0538 | c7e7ca27e5d04bbaa4e7ad71d8e86ec5c9388987 | refs/heads/master | 2020-05-15T12:11:22.358000 | 2019-04-19T11:46:05 | 2019-04-19T11:46:05 | 182,255,941 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | from lints import base
from cryptography import x509
from cryptography.x509.oid import NameOID
from util.time import Time
from util import ca
'''
'''
class subCertProvinceMustAppear(base.LintInterface):
def Initialize(self):
return 0
def CheckApplies(self,c):
return ca.IsSubscriberCert(c)
def Execute(self,c):
try:
subject = c.subject
if subject.get_attributes_for_oid(NameOID.ORGANIZATION_NAME) or subject.get_attributes_for_oid(NameOID.GIVEN_NAME) or subject.get_attributes_for_oid(NameOID.SURNAME):
if not subject.get_attributes_for_oid(NameOID.LOCALITY_NAME):
if not subject.get_attributes_for_oid(NameOID.STATE_OR_PROVINCE_NAME):
return base.LintResult(base.LintStatus.Error)
return base.LintResult(base.LintStatus.Pass)
except ValueError:
return base.LintResult(base.LintStatus.Fatal)
def init():
base.RegisterLint(base.Lint("e_sub_cert_province_must_appear","Subscriber Certificate: subject:stateOrProvinceName MUST appear if the subject:organizationName, subject:givenName, or subject:surname fields are present and subject:localityName is absent.","BRs: 7.1.4.2.2",base.LintSource.CABFBaselineRequirements,Time.CABGivenNameDate,subCertProvinceMustAppear())) | [
"846468230@qq.com"
] | 846468230@qq.com |
af39a1bf1eb073451ed5e06bcf042d850a88ea85 | edcc2f90e91cc781ed6e305daa5f6cb539533897 | /dataset/py150/utils/ast/child_only.py | b412d1e5a9d1e1bc4ae53d064ec6d2eb17c75efd | [
"MIT"
] | permissive | keiouok/naturalcc | d364639700d137720242a32b74b3ac48d0e94b76 | 7bab9a97331fafac1235fb32de829ff8d572320f | refs/heads/master | 2023-06-03T10:38:42.481107 | 2021-06-20T00:52:52 | 2021-06-20T00:52:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,323 | py | import sys
from ncc.data.constants import (
PAD,
SBT_LEFT_PARENTHESE,
SBT_RIGHT_PARENTHESE,
)
from ncc.data import tokenizer_funcs
from ..constants import (
RECURSION_DEPTH,
MAX_SUBTOKEN_LEN,
NODE_TMP,
)
from copy import deepcopy
# ignore those ast whose size is too large. Therefore set it as a small number
sys.setrecursionlimit(RECURSION_DEPTH) # recursion depth
def child_value2child_only(ast):
"""node['value'] => node['children']"""
for idx, node in ast.items():
value = node.get('value', None)
if value:
node.pop('value')
node['children'] = [value]
return ast
def pad_leaf_nodes(ast, max_len=MAX_SUBTOKEN_LEN):
'''
pad leaf node's child into [XX, [XX, ...]]
split token and pad it with PAD_TOKEN till reach MAX_TOKEN_LIST_LEN
e.g. VariableName -> [VariableName, [Variable, Name, PAD_TOKEN, PAD_TOKEN, ...]]
'''
for idx, node in ast.items():
if len(node['children']) == 1 and isinstance(node['children'][0], str):
subtokens = tokenizer_funcs._space_dpu_sub_tokenizer(node['children'][0])[:max_len]
subtokens.extend([PAD] * (max_len - len(subtokens)))
node['children'].append(subtokens)
return ast
def ast2sbt(ast, idx):
'''
build structure-based traversal SBT tree
ref: Deep Code Comment Generation
'''
if len(ast[idx]['children']) == 2 and type(ast[idx]['children'][1]) == list:
token = ast[idx]['type'] + '_' + ast[idx]['children'][0]
seq = [SBT_LEFT_PARENTHESE, token, SBT_RIGHT_PARENTHESE, token]
else:
token = ast[idx]['type']
seq = [SBT_LEFT_PARENTHESE, token]
for child_idx in ast[idx]['children']:
seq += ast2sbt(ast, str(child_idx))
seq += [SBT_RIGHT_PARENTHESE, token]
return seq
def get_root(ast):
"""get root node index"""
for idx, node in ast.items():
if node['parent'] is None:
return idx
def delete_root_with_unichild(ast):
"""
delete root node with only a child
because in such way, head node might be Program/Function/Error and its child is the code's AST
"""
for idx in sorted([idx for idx in ast.keys()], key=int):
if (ast[idx]['parent'] is None) and len(ast[idx]['children']) == 1:
child_idx = ast[idx]['children'][0]
ast[str(child_idx)]['parent'] = None
ast.pop(idx)
else:
break
return ast
def delete_nodes_with_unichild(ast):
'''
delete nodes with single child node
e.g. [1*NODEFIX1] -> [1*NODEFIX2] -> ['void'] => [1*NODEFIX1] -> ['void']
'''
def _dfs(idx):
node = ast[idx]
# get current node's children indices, if it's leaf node, ignore.
if not (len(node['children']) == 1 and isinstance(node['children'][0], str)):
child_ids = node['children']
else:
return # move to leaf node, return
# each ast tree generally is parsed from a method, so it has a "program" root node and a "method" node
# therefore, if current node is the root node with single child, we do not delete it
while (len(child_ids) == 1) and (node['parent'] is not None):
# update its parent's children
parent_node = ast[str(node['parent'])]
del_idx = parent_node['children'].index(int(idx))
parent_node['children'].pop(del_idx)
child_idx = child_ids[0]
# update its children's parent to its parent
ast[str(child_idx)]['parent'] = node['parent']
# update its parent's children
parent_node['children'].insert(del_idx, child_idx)
# delete itself
ast.pop(idx)
# update current info
idx = str(child_idx)
node = ast[idx]
# get current node's children indices, if it's leaf node, ignore.
if not (len(node['children']) == 1 and isinstance(node['children'][0], str)):
child_ids = node['children']
else:
return # move to leaf node, return
for idx in child_ids:
_dfs(str(idx))
idx = get_root(ast)
_dfs(idx)
return ast
def ast2bin_ast(ast):
'''ast tree -> binary ast tree'''
last_node_idx = sorted(ast.keys(), key=int)[-1]
def _dfs(idx):
node = ast[idx]
# get current node's children indices, if it's leaf node, ignore.
if not (len(node['children']) == 1 and isinstance(node['children'][0], str)):
child_ids = node['children']
else:
return # move to leaf node, return
if len(child_ids) > 2:
# add new node
nonlocal last_node_idx
last_node_idx = str(int(last_node_idx) + 1)
ast[last_node_idx] = {'type': NODE_TMP, 'parent': idx, 'children': child_ids[1:]}
# update node's children info
node['children'] = [child_ids[0], int(last_node_idx)]
# update other childen nodes' parent info
for child_idx in child_ids[1:]:
ast[str(child_idx)]['parent'] = last_node_idx
# update current node's children info
# get current node's children indices, if it's leaf node, ignore.
if not (len(node['children']) == 1 and isinstance(node['children'][0], str)):
child_ids = node['children']
else:
return # move to leaf node, return
for idx in child_ids:
_dfs(str(idx))
idx = get_root(ast)
_dfs(idx)
return ast
def reset_indices(ast):
'''rename ast tree's node indices with consecutive indices'''
if sorted(list(ast.keys())) == list(range(len(ast))):
return ast
# firstly, resort node index with a prefix "_", e.g. 0 => "_0"
_idx = 0
def _dfs(idx, _parent_idx):
nonlocal _idx
_new_idx, _idx = f'_{_idx}', _idx + 1 # update for next node
node = ast.pop(str(idx))
ast[_new_idx] = node
# update its parent's children
if node['parent'] is None:
pass # current node is root node, no need for update its children
else:
parent_node = ast[_parent_idx]
# update its index in its parent node
parent_node['children'][parent_node['children'].index(idx)] = _new_idx
# update parent index
node['parent'] = _parent_idx
if isinstance(node['children'][0], int): # non-leaf nodes, traverse its children nodes
# update its children nodes' parent
for child_idx in node['children']:
_dfs(child_idx, _parent_idx=_new_idx)
else:
return
root_idx = get_root(ast)
_dfs(root_idx, _parent_idx=None)
# recover name: from _* => *
node_ids = deepcopy(list(ast.keys()))
for idx in node_ids:
node = ast.pop(idx)
# update children index
if len(node['children']) > 1:
node['children'] = [int(child_idx[1:]) for child_idx in node['children']]
# update parent index
if node['parent'] == None:
pass
else:
node['parent'] = int(node['parent'][1:])
ast[int(idx[1:])] = node # _idx => idx
return ast
| [
"yanghece96@gmail.com"
] | yanghece96@gmail.com |
73f09faf8526f34d6d9fda58789407a5e7cc3123 | 5e255ad1360c90478393744586663741a9569c21 | /linebot/v3/audience/models/audience_group_type.py | 02893782b733c181cd30b2473e4ee4459e95ae99 | [
"Apache-2.0"
] | permissive | line/line-bot-sdk-python | d76268e8b542060d6eccbacc5dbfab16960ecc35 | cffd35948238ae24982173e30b1ea1e595bbefd9 | refs/heads/master | 2023-08-31T22:12:31.698183 | 2023-08-28T01:10:09 | 2023-08-28T01:10:09 | 70,553,423 | 1,898 | 1,181 | Apache-2.0 | 2023-09-11T05:14:07 | 2016-10-11T03:42:26 | Python | UTF-8 | Python | false | false | 1,017 | py | # coding: utf-8
"""
LINE Messaging API
This document describes LINE Messaging API. # noqa: E501
The version of the OpenAPI document: 0.0.1
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
import json
import pprint
import re # noqa: F401
from aenum import Enum, no_arg
class AudienceGroupType(str, Enum):
"""
Audience group type
"""
"""
allowed enum values
"""
UPLOAD = 'UPLOAD'
CLICK = 'CLICK'
IMP = 'IMP'
CHAT_TAG = 'CHAT_TAG'
FRIEND_PATH = 'FRIEND_PATH'
RESERVATION = 'RESERVATION'
APP_EVENT = 'APP_EVENT'
VIDEO_VIEW = 'VIDEO_VIEW'
WEBTRAFFIC = 'WEBTRAFFIC'
IMAGE_CLICK = 'IMAGE_CLICK'
RICHMENU_IMP = 'RICHMENU_IMP'
RICHMENU_CLICK = 'RICHMENU_CLICK'
@classmethod
def from_json(cls, json_str: str) -> AudienceGroupType:
"""Create an instance of AudienceGroupType from a JSON string"""
return AudienceGroupType(json.loads(json_str))
| [
"noreply@github.com"
] | line.noreply@github.com |
21ea2e613c180d34f365b8fba3bcd8715f8abe8c | e7f814227f64aae9ea30dd7c878a9406d0c2380f | /optuna_dashboard/search_space.py | 1f740fde7e59881d31fdf4846bb86acbb835e07e | [
"MIT"
] | permissive | tktran/optuna-dashboard | 8bf5c106cc3c96470c9e281bcb28bd6c92138627 | 38e56010bf9230f8b27c7eeeb7f01a2f65cda7ac | refs/heads/main | 2023-03-31T09:54:14.289445 | 2021-04-04T06:26:46 | 2021-04-04T06:26:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,230 | py | import copy
import threading
from typing import Dict, List, Optional, Set, Tuple
from optuna.distributions import BaseDistribution
from optuna.trial import TrialState, FrozenTrial
SearchSpaceSetT = Set[Tuple[str, BaseDistribution]]
SearchSpaceListT = List[Tuple[str, BaseDistribution]]
# In-memory search space cache
search_space_cache_lock = threading.Lock()
search_space_cache: Dict[int, "_SearchSpace"] = {}
states_of_interest = [TrialState.COMPLETE, TrialState.PRUNED]
def get_search_space(
study_id: int, trials: List[FrozenTrial]
) -> Tuple[SearchSpaceListT, SearchSpaceListT]:
with search_space_cache_lock:
search_space = search_space_cache.get(study_id, None)
if search_space is None:
search_space = _SearchSpace()
search_space.update(trials)
search_space_cache[study_id] = search_space
return search_space.intersection, search_space.union
class _SearchSpace:
def __init__(self) -> None:
self._cursor: int = -1
self._intersection: Optional[SearchSpaceSetT] = None
self._union: SearchSpaceSetT = set()
@property
def intersection(self) -> SearchSpaceListT:
if self._intersection is None:
return []
intersection = list(self._intersection)
intersection.sort(key=lambda x: x[0])
return intersection
@property
def union(self) -> SearchSpaceListT:
union = list(self._union)
union.sort(key=lambda x: x[0])
return union
def update(self, trials: List[FrozenTrial]) -> None:
next_cursor = self._cursor
for trial in reversed(trials):
if self._cursor > trial.number:
break
if not trial.state.is_finished():
next_cursor = trial.number
if trial.state not in states_of_interest:
continue
current = set([(n, d) for n, d in trial.distributions.items()])
self._union = self._union.union(current)
if self._intersection is None:
self._intersection = copy.copy(current)
else:
self._intersection = self._intersection.intersection(current)
self._cursor = next_cursor
| [
"contact@c-bata.link"
] | contact@c-bata.link |
9f706d9d26452ede0c1df501a6d6ac04541e0c77 | ef29c31ef26815a237445b9359da00c4323717d0 | /django/orm/manytomany/models.py | 56c410dff6f951f6196132a5d64b33a8807bbe6a | [] | no_license | gvg4991/TIL-c9 | 3fe59bfe114e3af7f56a9163fa9e7ec83b3f97f0 | 2e4d6b99de2523ac4540cac2acd40342bbd6f9e3 | refs/heads/master | 2020-04-17T17:18:04.146013 | 2019-05-09T06:59:19 | 2019-05-09T06:59:19 | 166,777,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,754 | py | from django.db import models
# Create your models here.
# 병원에 오는 사람들을 기록하는 시스템을 만드려고 한다.
# 필수적인 모델은 환자와 의사이다.
# 어떠한 관계로 표현할 수 있을까?
class Doctor(models.Model):
name = models.TextField()
# patients = models.ManyToManyField(Patient, through='Reservation') #아래 doctors와 둘 중 하나만 적기!
# patient1.doctor_set.all()
class Patient(models.Model):
name = models.TextField()
# Reservation을 통해서 doctor와 patient의 N:N관계를 형성
# (의사와 예약이 1:N, 환자와 예약이 1:M이므로 의사와 환자가 M:N)
doctors = models.ManyToManyField(Doctor, related_name='patients')#, through='Reservation')
#doctor1.patient_set.all(): reservation을 통하지않고 바로 의사의 환자를 불러옴
# patient_set을 patients로 이름을 지정해줌
# 중계자 역할
# class Reservation(models.Model):
# # Doctor:Reservation = 1:N 관계
# # Patient:Reservation = 1:N 관계
# doctor = models.ForeignKey(Doctor, on_delete=models.CASCADE)
# patient = models.ForeignKey(Patient, on_delete=models.CASCADE)
# doctor1 = Doctor.objects.create(name='kim')
# doctor2 = Doctor.objects.create(name='kang')
# patient1 = Patient.objects.create(name='tom')
# patient2 = Patient.objects.create(name='jhon')
# Reservation.objects.create(doctor=doctor1, patient=patient2)
# Reservation.objects.create(doctor=doctor1, patient=patient1)
# Reservation.objects.create(doctor=doctor2, patient=patient1)
# doctor1.patients.add(patient2)
# >>> doctor1.patients.all()
# >>> patient2.doctors.all()
# doctor1.patients.remove(patient2) == patient2.doctors.remove(doctor1) | [
"14.73oo6o19@gmail.com"
] | 14.73oo6o19@gmail.com |
f73e6a719077834333f26688c0cefb3ca7a0773e | d6aed520d16b5c6d1b36ef4e21e4c0d895b751fe | /blog/models.py | 9a14f374ba058b002c2f20a38cc4888f3bd28990 | [
"MIT"
] | permissive | CoderLambert/DjangoBlog | 22bd71ed29af37847cd17542d21e1f2253975469 | a29290aadc5ace070976dd934a530c9e6fe3bb56 | refs/heads/master | 2021-05-06T02:58:57.162849 | 2017-12-16T18:11:38 | 2017-12-16T18:11:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,636 | py | from django.db import models
from django.core.urlresolvers import reverse
from django.conf import settings
from uuslug import slugify
from django.contrib.sites.models import Site
from DjangoBlog.utils import cache_decorator, logger, cache
from django.utils.functional import cached_property
from django.utils.timezone import now
class BaseModel(models.Model):
slug = models.SlugField(default='no-slug', max_length=60, blank=True)
created_time = models.DateTimeField('创建时间', default=now)
last_mod_time = models.DateTimeField('修改时间', default=now)
def save(self, *args, **kwargs):
from DjangoBlog.blog_signals import article_save_signal
if not self.slug or self.slug == 'no-slug' or not self.id:
slug = self.title if 'title' in self.__dict__ else self.name
self.slug = slugify(slug)
super().save(*args, **kwargs)
# type = self.__class__.__name__
is_update_views = 'update_fields' in kwargs and len(kwargs['update_fields']) == 1 and kwargs['update_fields'][
0] == 'views'
article_save_signal.send(sender=self.__class__, is_update_views=is_update_views, id=self.id)
def get_full_url(self):
site = Site.objects.get_current().domain
url = "https://{site}{path}".format(site=site, path=self.get_absolute_url())
return url
class Meta:
abstract = True
class Article(BaseModel):
"""文章"""
STATUS_CHOICES = (
('d', '草稿'),
('p', '发表'),
)
COMMENT_STATUS = (
('o', '打开'),
('c', '关闭'),
)
TYPE = (
('a', '文章'),
('p', '页面'),
)
title = models.CharField('标题', max_length=200, unique=True)
body = models.TextField('正文')
pub_time = models.DateTimeField('发布时间', blank=True, null=True)
status = models.CharField('文章状态', max_length=1, choices=STATUS_CHOICES, default='p')
comment_status = models.CharField('评论状态', max_length=1, choices=COMMENT_STATUS, default='o')
type = models.CharField('类型', max_length=1, choices=TYPE, default='a')
views = models.PositiveIntegerField('浏览量', default=0)
author = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name='作者', on_delete=models.CASCADE)
category = models.ForeignKey('Category', verbose_name='分类', on_delete=models.CASCADE, blank=True, null=True)
tags = models.ManyToManyField('Tag', verbose_name='标签集合', blank=True)
def __str__(self):
return self.title
class Meta:
ordering = ['-pub_time']
verbose_name = "文章"
verbose_name_plural = verbose_name
get_latest_by = 'created_time'
def get_absolute_url(self):
return reverse('blog:detailbyid', kwargs={
'article_id': self.id,
'year': self.created_time.year,
'month': self.created_time.month,
'day': self.created_time.day
})
@cache_decorator(60 * 60 * 10)
def get_category_tree(self):
tree = self.category.get_category_tree()
names = list(map(lambda c: (c.name, c.get_absolute_url()), tree))
return names
def save(self, *args, **kwargs):
if not self.slug or self.slug == 'no-slug' or not self.id:
# Only set the slug when the object is created.
self.slug = slugify(self.title)
super().save(*args, **kwargs)
def viewed(self):
self.views += 1
self.save(update_fields=['views'])
def comment_list(self):
cache_key = 'article_comments_{id}'.format(id=self.id)
value = cache.get(cache_key)
if value:
logger.info('get article comments:{id}'.format(id=self.id))
return value
else:
comments = self.comment_set.all()
cache.set(cache_key, comments)
logger.info('set article comments:{id}'.format(id=self.id))
return comments
def get_admin_url(self):
info = (self._meta.app_label, self._meta.model_name)
return reverse('admin:%s_%s_change' % info, args=(self.pk,))
@cached_property
def next_article(self):
# 下一篇
return Article.objects.filter(id__gt=self.id, status='p').order_by('id').first()
@cached_property
def prev_article(self):
# 前一篇
return Article.objects.filter(id__lt=self.id, status='p').first()
class Category(BaseModel):
"""文章分类"""
name = models.CharField('分类名', max_length=30, unique=True)
parent_category = models.ForeignKey('self', verbose_name="父级分类", blank=True, null=True)
class Meta:
ordering = ['name']
verbose_name = "分类"
verbose_name_plural = verbose_name
def get_absolute_url(self):
return reverse('blog:category_detail', kwargs={'category_name': self.slug})
def __str__(self):
return self.name
@cache_decorator(60 * 60 * 10)
def get_category_tree(self):
"""
递归获得分类目录的父级
:return:
"""
categorys = []
def parse(category):
categorys.append(category)
if category.parent_category:
parse(category.parent_category)
parse(self)
return categorys
@cache_decorator(60 * 60 * 10)
def get_sub_categorys(self):
"""
获得当前分类目录所有子集
:return:
"""
categorys = []
all_categorys = Category.objects.all()
def parse(category):
if category not in categorys:
categorys.append(category)
childs = all_categorys.filter(parent_category=category)
for child in childs:
if category not in categorys:
categorys.append(child)
parse(child)
parse(self)
return categorys
class Tag(BaseModel):
"""文章标签"""
name = models.CharField('标签名', max_length=30, unique=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('blog:tag_detail', kwargs={'tag_name': self.slug})
@cache_decorator(60 * 60 * 10)
def get_article_count(self):
return Article.objects.filter(tags__name=self.name).distinct().count()
class Meta:
ordering = ['name']
verbose_name = "标签"
verbose_name_plural = verbose_name
class Links(models.Model):
"""友情链接"""
name = models.CharField('链接名称', max_length=30, unique=True)
link = models.URLField('链接地址')
sequence = models.IntegerField('排序', unique=True)
created_time = models.DateTimeField('创建时间', default=now)
last_mod_time = models.DateTimeField('修改时间', default=now)
class Meta:
ordering = ['sequence']
verbose_name = '友情链接'
verbose_name_plural = verbose_name
def __str__(self):
return self.name
class SideBar(models.Model):
"""侧边栏,可以展示一些html内容"""
name = models.CharField('标题', max_length=100)
content = models.TextField("内容")
sequence = models.IntegerField('排序', unique=True)
is_enable = models.BooleanField('是否启用', default=True)
created_time = models.DateTimeField('创建时间', default=now)
last_mod_time = models.DateTimeField('修改时间', default=now)
class Meta:
ordering = ['sequence']
verbose_name = '侧边栏'
verbose_name_plural = verbose_name
def __str__(self):
return self.name
| [
"liangliangyy@gmail.com"
] | liangliangyy@gmail.com |
f0e798f0316e955e18f9b5b9ff48d942cad9ac7e | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/bob/218dae7f9abb4ae5803d0c739a05c8b6.py | 44d656f33ebed779c4fc9b02201c0d8f51e2d108 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 245 | py | #
# Skeleton file for the Python "Bob" exercise.
#
def hey(input):
if input.strip() == "" :
return "Fine. Be that way!"
if input.isupper() :
return "Whoa, chill out!"
if input[-1:] == '?':
return "Sure."
return "Whatever."
| [
"rrc@berkeley.edu"
] | rrc@berkeley.edu |
93e8ea6d2bca6946873359d86dfb0118ce399dff | 9095c1a0da8c6ffe914ee6dd9c4708062fd95c9a | /vtpl_api/models/gender.py | c4712ee17882c6e19c587c76af25f03479537928 | [
"MIT"
] | permissive | vtpl1/vtpl_api_py | 2e5338bd08677f12fc7304fb6ac7a32f32af1c93 | d289c92254deb040de925205c583de69802a1c6b | refs/heads/master | 2020-09-10T23:34:21.828350 | 2019-11-15T07:26:53 | 2019-11-15T07:26:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,340 | py | # coding: utf-8
"""
Engine api
Engine APIs # noqa: E501
The version of the OpenAPI document: 1.0.4
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Gender(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
allowed enum values
"""
MALE = "Male"
FEMALE = "Female"
OTHER = "Other"
NA = "NA"
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""Gender - a model defined in OpenAPI""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Gender):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"monotosh.das@videonetics.com"
] | monotosh.das@videonetics.com |
3df4a905da8f0e35b240d9567e50007b3ff14528 | ef1bf421aca35681574c03014e0c2b92da1e7dca | /examples/modes/extended_selections.py | 9609014cb98132afba108e9cc0cccf699b92feb4 | [
"MIT"
] | permissive | pyQode/pyqode.core | 74e67f038455ea8cde2bbc5bd628652c35aff6eb | 0ffabebe4f0397d53429024f6f44db3fe97b0828 | refs/heads/master | 2020-04-12T06:36:33.483459 | 2020-01-18T14:16:08 | 2020-01-18T14:16:08 | 7,739,074 | 24 | 25 | MIT | 2020-01-18T14:16:10 | 2013-01-21T19:46:41 | Python | UTF-8 | Python | false | false | 631 | py | """
Minimal example showing the use of the ExtendedSelectionMode.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.qt import QtWidgets
from pyqode.core.api import CodeEdit
from pyqode.core.backend import server
from pyqode.core.modes import ExtendedSelectionMode
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
editor = CodeEdit()
editor.backend.start(server.__file__)
editor.resize(800, 600)
print(editor.modes.append(ExtendedSelectionMode()))
editor.file.open(__file__)
editor.show()
app.exec_()
editor.close()
del editor
del app
| [
"colin.duquesnoy@gmail.com"
] | colin.duquesnoy@gmail.com |
15e97e0097131cf3461a92612e34e80735c8233d | 556e88a954cf031460ea7fdf3791eb968ca4fbdd | /fluent_python/chapter_16/ch16_coroaverager0.py | 4d18e28584350ad23568d896475d11f306ac2cae | [] | no_license | feng-hui/python_books_examples | c696243fcb8305be495f44d1a88a02e7f906b7bd | e38542db7be927cdaa5d85317a58a13b3a13ae25 | refs/heads/master | 2022-03-07T00:37:29.311687 | 2019-09-28T15:15:20 | 2019-09-28T15:15:20 | 122,941,867 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | #!/usr/bin/python3
# -*- coding:utf-8 -*-
# @author FH
# @email: capricorn12032126.com
# @time: 2018/12/24 20:55
def averager():
"""
execute at terminal
example:
from chapter_16.ch16_coroaverager0 import averager
coro_avg = averager()
next(coro_avg)
coro_avg.send(10) -> 10.0
coro_avg.send(30) -> 20.0
coro_avg.send(5) -> 15.0
"""
total = 0.0
count = 0
average = None
while True:
term = yield average
total += term
count += 1
average = total / count
| [
"982698913@qq.com"
] | 982698913@qq.com |
1acf4cb9ad9106bd17384777aa04d54b258c868f | 5016d6d2eb0b66b5d1417001a40a9ec10c983e32 | /python-puka/rpc_client.py | 3ddbf03e07844cb48c3088e911794f338bf83afe | [
"Apache-2.0"
] | permissive | PoeBlu/rabbitmq-tutorials | 6466e463d5ed4e3de8ef9c3ef19ba0768d0857bd | be8f5dd34be04bb31c30d17598bb1cc0b458a2d6 | refs/heads/master | 2023-05-02T03:21:49.824907 | 2015-04-28T17:03:41 | 2015-04-28T17:03:41 | 70,755,760 | 0 | 0 | Apache-2.0 | 2023-04-14T15:25:35 | 2016-10-13T01:11:04 | C# | UTF-8 | Python | false | false | 1,291 | py | #!/usr/bin/env python
import puka
import uuid
class FibonacciRpcClient(object):
def __init__(self):
self.client = client = puka.Client("amqp://localhost/")
promise = client.connect()
client.wait(promise)
promise = client.queue_declare(exclusive=True)
self.callback_queue = client.wait(promise)['queue']
self.consume_promise = client.basic_consume(queue=self.callback_queue,
no_ack=True)
def call(self, n):
correlation_id = str(uuid.uuid4())
# We don't need to wait on promise from publish, let it happen async.
self.client.basic_publish(exchange='',
routing_key='rpc_queue',
headers={'reply_to': self.callback_queue,
'correlation_id': correlation_id},
body=str(n))
while True:
msg_result = self.client.wait(self.consume_promise)
if msg_result['headers']['correlation_id'] == correlation_id:
return int(msg_result['body'])
fibonacci_rpc = FibonacciRpcClient()
print " [x] Requesting fib(30)"
response = fibonacci_rpc.call(30)
print " [.] Got %r" % (response,)
| [
"majek04@gmail.com"
] | majek04@gmail.com |
79a64e807ffc1a3e3cf0a52eb76fe385806de058 | a8b37bd399dd0bad27d3abd386ace85a6b70ef28 | /airbyte-ci/connectors/pipelines/pipelines/__init__.py | 371bafaa1370e03bd1b7464db2f095b34966388c | [
"LicenseRef-scancode-free-unknown",
"MIT",
"Elastic-2.0"
] | permissive | thomas-vl/airbyte | 5da2ba9d189ba0b202feb952cadfb550c5050871 | 258a8eb683634a9f9b7821c9a92d1b70c5389a10 | refs/heads/master | 2023-09-01T17:49:23.761569 | 2023-08-25T13:13:11 | 2023-08-25T13:13:11 | 327,604,451 | 1 | 0 | MIT | 2021-01-07T12:24:20 | 2021-01-07T12:24:19 | null | UTF-8 | Python | false | false | 727 | py | #
# Copyright (c) 2023 Airbyte, Inc., all rights reserved.
#
"""The pipelines package."""
import logging
import os
from rich.logging import RichHandler
from . import sentry_utils
sentry_utils.initialize()
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("httpx").setLevel(logging.WARNING)
logging_handlers = [RichHandler(rich_tracebacks=True)]
if "CI" in os.environ:
# RichHandler does not work great in the CI
logging_handlers = [logging.StreamHandler()]
logging.basicConfig(
level=logging.INFO,
format="%(name)s: %(message)s",
datefmt="[%X]",
handlers=logging_handlers,
)
main_logger = logging.getLogger(__name__)
| [
"noreply@github.com"
] | thomas-vl.noreply@github.com |
1889a20824f9d1897607d0c56bd84f23b17c9ae4 | 8a2c6e45cd64ee04d8c02ea579e3396cc21f7309 | /PartitionList.py | 325957f56423ab9f55cad10c360a65764aee48f5 | [] | no_license | Danyshman/Leetcode | e665ece38cb0b22d6b5b19f15d6a73e73da1710e | aa2799e7512ea389f6dc18448da7c2b4cda75e47 | refs/heads/master | 2021-07-06T17:44:45.243915 | 2020-10-05T04:55:03 | 2020-10-05T04:55:03 | 191,697,327 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 692 | py | class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def partition(self, head: ListNode, x: int):
l1 = l1_head = ListNode(0)
l2 = l2_head = ListNode(0)
if head is None or head.next is None:
return head
node = head
while node:
if node.val < x:
new_node = ListNode(node.val)
l1.next = new_node
l1 = new_node
else:
new_node = ListNode(node.val)
l2.next = new_node
l2 = new_node
node = node.next
l1.next = l2_head.next
return l1_head.next
| [
"danyshman.azamatov@gmail.com"
] | danyshman.azamatov@gmail.com |
f3c58bb4f225970c8017b3ca0bf7fc03919b3db9 | fd529ba6ade52cd2a3dab94da01252d7ea90398d | /0528/foddddddppdpdoodd.py | 5014c1cbe98b6709533621c6e604f53676c551df | [] | no_license | fjfhfjfjgishbrk/AE401-Python | 4a984deb0281542c205d72695285b35c7413338f | ee80fa4588b127cff2402fd81e732ede28a66411 | refs/heads/master | 2022-06-13T13:49:39.875567 | 2022-05-28T12:40:51 | 2022-05-28T12:40:51 | 251,178,163 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,757 | py | from selenium import webdriver
from bs4 import BeautifulSoup
from selenium.webdriver.chrome.options import Options
import time
import datetime
import requests
now = datetime.datetime.now()
dateNow = now.strftime("%Y-%m-%d")
chrome_options = Options()
#chrome_options.add_argument('--headless')
chrome = webdriver.Chrome(options=chrome_options)
chrome.get("https://tw.eztable.com/search?country=tw&date=2020-05-30&people=2&q=%E4%B8%80%E8%B5%B7%E5%B0%8F%E9%A3%9F%E9%A4%A8&searchTab=restaurant&source=mobile.eztable.com&utm_campaign=branding_keyword&utm_medium=cpc&utm_source=marketing")
time.sleep(2)
#for i in range(5):
#chrome.execute_script("window.scrollTo(0,document.body.scrollHeight);")
#time.sleep(1)
pageSource = chrome.page_source
soup = BeautifulSoup(pageSource, "html.parser")
space = soup.find_all("div", class_="sc-gzVnrw")
restaurants = soup.find_all("h4", class_="sc-gpHHfC")
value1 = restaurants[0].text + " Time: " + space[0].text
chrome.find_elements_by_class_name("sc-fgfRvd")[0].click()
time.sleep(2)
pageSource = chrome.page_source
soup = BeautifulSoup(pageSource, "html.parser")
image = soup.find_all("div", class_="sc-ESoVU")[0]['style']
img_url = image.split("\"")[1]
chrome.back()
time.sleep(2)
chrome.find_elements_by_class_name("sc-gzVnrw")[0].click()
time.sleep(5)
pageSource = chrome.page_source
soup = BeautifulSoup(pageSource, "html.parser")
people = soup.find_all("div", class_="sc-keVrkP")
value1 += " " + people[0].text.split(",")[1].strip()
chrome.find_elements_by_class_name("sc-fHxwqH")[0].click()
webhook_key = "PI8b5ouDPVMzfDrEQlHyP"
trigger_name = "abc"
url = 'https://maker.ifttt.com/trigger/'+trigger_name+'/with/key/'+webhook_key+'?value1=' + value1 + "&value2=" + img_url
requests.get(url)
chrome.close() | [
"59891511+fjfhfjfjgishbrk@users.noreply.github.com"
] | 59891511+fjfhfjfjgishbrk@users.noreply.github.com |
352ce6bbbcce97cc0658ecd29193cba7bd06a0c6 | 111bb07459d59e16fe4ccff773c51426fdc4e3bc | /public/pages/qdsIndexPage.py | cf4cd6b2c543e872931a38346c4424ca547d9c60 | [] | no_license | Pactortester/UItestframework-master | 65486d2708a26fdd78d009bab4bdef0334a98d22 | 3f41da16e62a1ea181eca45ed33120842a324a69 | refs/heads/master | 2020-03-26T15:48:13.752371 | 2018-08-17T10:30:00 | 2018-08-17T10:30:00 | 145,065,346 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,853 | py | #coding=utf-8
from public.common import basepage
from public.common import mytest
import time
class DDSIndexPage(basepage.Page):
def into_qds_page(self):
"""打ads首页"""
self.dr.open('https://new.quandashi.com/')
def return_title(self):
"""返回该页面的title"""
return self.dr.get_title()
def cookies(self):
"""登录"""
self.dr.add_cookie({'name': 'QDS_COOKIE',
'value': '4cee3ae144733628cc3ce396a7713a2cfe720901',
'Domain': '.quandashi.com'})
def order_info(self):
"""订单信息"""
self.dr.clear_type("name->ownerContactPerson","全大师")
self.dr.clear_type("css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > table:nth-child(2) > tbody > tr:nth-child(2) > td.td-2 > input", "15624992498")
self.dr.clear_type("css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > table:nth-child(2) > tbody > tr:nth-child(3) > td.td-2 > input","4564564@qq.com")
self.dr.clear_type("css->#remark","test")
def pay_check(self):
"""支付检验"""
for i in self.dr.get_elements(
"css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > div > ul > li.row-sense > em > i"):
print("总价:" + i.text)
ii = i.text
self.assertIn(aa, ii)
print("价格一致")
self.dr.click(
"css->body > div.myOrder-wrap > div.section-myorder.width1200 > div > div > ul > li.row-step > a.btn-next.submitOrder")
time.sleep(2)
for o in self.dr.get_elements("class->payable"):
print("订单提交成功,应付金额:" + o.text)
oo = o.text
time.sleep(2)
self.assertIn(oo, ii)
print("测试通过")
self.dr.click("id->alisubmit")
| [
"1456470136@qq.com"
] | 1456470136@qq.com |
df54030576bca2ec55e311a0961ecbcba5fed0a7 | d5dd2ade4671b237c747b592d3635c1bc2852ca8 | /0x16-api_advanced/1-top_ten.py | 47611769b7e33e0060ef3c1b8b44200d25381a9d | [] | no_license | emna7/holberton-system_engineering-devops | ce14780a6d091ca1da37fbe26e70534081195bcb | ed4bee21409f12c12afd8d28acd121de67643789 | refs/heads/master | 2020-07-24T02:07:42.844728 | 2020-07-07T21:58:19 | 2020-07-07T21:58:19 | 207,769,072 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | #!/usr/bin/python3
"""contains 1 function: top_ten"""
import requests
def top_ten(subreddit):
user = {"User-Agent": "custom"}
request = requests.get("https://www.reddit.com/r/{}/hot.json"
.format(subreddit), headers=user)
try:
for i in range(10):
print(request.json()
.get("data").get("children")[i].get("data").get("title"))
except:
print("None")
| [
"bhmemna7@gmail.com"
] | bhmemna7@gmail.com |
c657cb2b615bc1a8d1d93d8440095c8dc3bab44a | 57aed4644e21ec53e9b3c577bc4faa6b7610d79c | /test_duration.py | 7ba8f3fab4d55ca9fedf7fc9e0c21758297860ea | [
"MIT"
] | permissive | ec500-software-engineering/exercise-2-ffmpeg-JiaruiJin | 1b9277a4ac8c0b4d09236946c0d127f07016b262 | 1b2f99a1b530c3db54c70e44f2cf09bc6c9e0c5f | refs/heads/master | 2020-04-24T21:56:37.027254 | 2019-02-28T21:40:13 | 2019-02-28T21:40:13 | 172,295,010 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 795 | py | from pytest import approx
import subprocess
import json
def ffprobe(file) -> dict:
""" get media metadata """
meta = subprocess.check_output(['ffprobe', '-v', 'warning',
'-print_format', 'json',
'-show_streams',
'-show_format',
str(file)], universal_newlines = True)
return json.loads(meta)
def test_duration():
fnin = "./videoplayback.mp4"
fnout = "./videoplayback.mp4_480.mp4"
orig_meta = ffprobe(fnin)
orig_duration = float(orig_meta['streams'][0]['duration'])
meta_480 = ffprobe(fnout)
duration_480 = float(meta_480['streams'][0]['duration'])
assert round(orig_duration) == approx(round(duration_480))
| [
"noreply@github.com"
] | ec500-software-engineering.noreply@github.com |
c06a9369d360069947fe1625aa7aab2e998bbc6f | 0aa3b2d2146e6754f80a8fcfbde799104c4372d9 | /scripts/dataset_processing/tts/audio_processing/preprocess_audio.py | 128d311e04c0115c83b1e49a54af26bcc4775433 | [
"Apache-2.0"
] | permissive | shalevy1/NeMo | 22d231d15e56aac09704f8d9fb5059da84314641 | 5e07ed39f317fc03de2bb90c5ed218304bf88602 | refs/heads/master | 2023-06-26T18:09:16.776952 | 2023-01-12T22:42:28 | 2023-01-12T22:42:28 | 209,153,028 | 0 | 0 | Apache-2.0 | 2023-06-09T22:53:07 | 2019-09-17T20:43:57 | Python | UTF-8 | Python | false | false | 6,608 | py | # Copyright (c) 2022, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is used to preprocess audio before TTS model training.
It can be configured to do several processing steps such as silence trimming, volume normalization,
and duration filtering.
These can be done separately through multiple executions of the script, or all at once to avoid saving
too many copies of the same audio.
Most of these can also be done by the TTS data loader at training time, but doing them ahead of time
lets us implement more complex processing, validate the corectness of the output, and save on compute time.
$ HYDRA_FULL_ERROR=1 python <nemo_root_path>/scripts/dataset_processing/tts/audio_processing/preprocess_audio.py \
--config-path=<nemo_root_path>/scripts/dataset_processing/tts/audio_processing/config \
--config-name=preprocessing.yaml \
data_base_dir="/home/data" \
config.num_workers=1
"""
import os
from dataclasses import dataclass
from pathlib import Path
from typing import Tuple
import librosa
import soundfile as sf
from hydra.utils import instantiate
from joblib import Parallel, delayed
from tqdm import tqdm
from nemo.collections.tts.data.audio_trimming import AudioTrimmer
from nemo.collections.tts.data.data_utils import normalize_volume, read_manifest, write_manifest
from nemo.collections.tts.torch.helpers import get_base_dir
from nemo.core.config import hydra_runner
from nemo.utils import logging
@dataclass
class AudioPreprocessingConfig:
# Input training manifest.
input_manifest: Path
# New training manifest after processing audio.
output_manifest: Path
# Directory to save processed audio to.
output_dir: Path
# Number of threads to use. -1 will use all available CPUs.
num_workers: int = -1
# If provided, maximum number of entries in the manifest to process.
max_entries: int = 0
# If provided, rate to resample the audio to.
output_sample_rate: int = 0
# If provided, peak volume to normalize audio to.
volume_level: float = 0.0
# If provided, filter out utterances shorter than min_duration.
min_duration: float = 0.0
# If provided, filter out utterances longer than min_duration.
max_duration: float = float("inf")
# If provided, output filter_file will contain list of utterances filtered out.
filter_file: Path = None
def _process_entry(
entry: dict,
base_dir: Path,
output_dir: Path,
audio_trimmer: AudioTrimmer,
output_sample_rate: int,
volume_level: float,
) -> Tuple[dict, float, float]:
audio_filepath = Path(entry["audio_filepath"])
rel_audio_path = audio_filepath.relative_to(base_dir)
input_path = os.path.join(base_dir, rel_audio_path)
output_path = os.path.join(output_dir, rel_audio_path)
audio, sample_rate = librosa.load(input_path, sr=None)
if audio_trimmer is not None:
audio_id = str(audio_filepath)
audio, start_i, end_i = audio_trimmer.trim_audio(audio=audio, sample_rate=sample_rate, audio_id=audio_id)
if output_sample_rate is not None:
audio = librosa.resample(y=audio, orig_sr=sample_rate, target_sr=output_sample_rate)
sample_rate = output_sample_rate
if volume_level:
audio = normalize_volume(audio, volume_level=volume_level)
sf.write(file=output_path, data=audio, samplerate=sample_rate)
original_duration = librosa.get_duration(filename=str(audio_filepath))
output_duration = librosa.get_duration(filename=str(output_path))
entry["audio_filepath"] = output_path
entry["duration"] = output_duration
return entry, original_duration, output_duration
@hydra_runner(config_path='config', config_name='preprocessing')
def main(cfg):
config = instantiate(cfg.config)
logging.info(f"Running audio preprocessing with config: {config}")
input_manifest_path = Path(config.input_manifest)
output_manifest_path = Path(config.output_manifest)
output_dir = Path(config.output_dir)
num_workers = config.num_workers
max_entries = config.max_entries
output_sample_rate = config.output_sample_rate
volume_level = config.volume_level
min_duration = config.min_duration
max_duration = config.max_duration
filter_file = Path(config.filter_file)
if cfg.trim:
audio_trimmer = instantiate(cfg.trim)
else:
audio_trimmer = None
output_dir.mkdir(exist_ok=True, parents=True)
entries = read_manifest(input_manifest_path)
if max_entries:
entries = entries[:max_entries]
audio_paths = [entry["audio_filepath"] for entry in entries]
base_dir = get_base_dir(audio_paths)
# 'threading' backend is required when parallelizing torch models.
job_outputs = Parallel(n_jobs=num_workers, backend='threading')(
delayed(_process_entry)(
entry=entry,
base_dir=base_dir,
output_dir=output_dir,
audio_trimmer=audio_trimmer,
output_sample_rate=output_sample_rate,
volume_level=volume_level,
)
for entry in tqdm(entries)
)
output_entries = []
filtered_entries = []
original_durations = 0.0
output_durations = 0.0
for output_entry, original_duration, output_duration in job_outputs:
if not min_duration <= output_duration <= max_duration:
if output_duration != original_duration:
output_entry["original_duration"] = original_duration
filtered_entries.append(output_entry)
continue
original_durations += original_duration
output_durations += output_duration
output_entries.append(output_entry)
write_manifest(manifest_path=output_manifest_path, entries=output_entries)
if filter_file:
write_manifest(manifest_path=filter_file, entries=filtered_entries)
logging.info(f"Duration of original audio: {original_durations / 3600} hours")
logging.info(f"Duration of processed audio: {output_durations / 3600} hours")
if __name__ == "__main__":
main()
| [
"noreply@github.com"
] | shalevy1.noreply@github.com |
07ef11d4fa19032f13b947b8fcb93a347ebf61ec | 57a1b4048643a4a68f4e07116d7a9f4222a5ec34 | /ga.py | dc25e9099ee0e25e8a70f3c7e8c3c27069d3f375 | [] | no_license | LauraDiosan-CS/lab04-gatsp-serbancalin | 29fd193353ff7d6c38b5d4fd279929e4f2d4b92a | 188b2a99bf02d172f02e05515ab7e55f653c25ba | refs/heads/master | 2021-04-02T03:45:16.373896 | 2020-03-31T13:15:20 | 2020-03-31T13:15:20 | 248,240,155 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,236 | py | import random
def mainGA(input):
nrIteratii = input["nrGeneratii"]
# se creeaza o populatie
populatie = Populatie()
populatie.populeaza(input["n"], input["dimensiunePopulatie"], input["startNode"])
while nrIteratii > 0:
parinte1 = selectieReproducere(populatie, input["matrix"])
parinte2 = selectieReproducere(populatie, input["matrix"])
copil1, copil2 = incrucisare(parinte1, parinte2)
copil1.mutatie()
copil2.mutatie()
while not copil1.eValid(input["startNode"]):
copil1.mutatie()
while not copil2.eValid(input["startNode"]):
copil2.mutatie()
if copil1.fitness(input["matrix"]) > copil2.fitness(input["matrix"]):
copil1 = copil2
populatie.selectieSupravietuire(copil1, input["matrix"])
nrIteratii -= 1
# se returneaza cromozomul cu fitnessul minim si valoarea acestuia
return fitnessMinim(populatie, input["matrix"])
class Cromozom:
"""
lista de noduri -> permutare de elemente de n
"""
def __init__(self, n):
self.__n = n
self.__permutare = generarePermutareIdentica(n)
def __eq__(self, other):
for i in range(self.__n):
if self.__permutare[i] != other.__permutare[i]:
return False
return True
def getPermutare(self):
return self.__permutare
def setPermutare(self, permutare):
self.__permutare = permutare
def getN(self):
return self.__n
def mutatie(self):
i = random.randrange(self.__n)
j = random.randrange(self.__n)
self.__permutare[i], self.__permutare[j] = self.__permutare[j], self.__permutare[i]
def eValid(self, startNode):
return self.__permutare[0] == startNode
def fitness(self, matrix):
f = 0
node = self.__permutare[0]
for i in range(self.__n - 1):
f += matrix[node - 1][self.__permutare[i + 1] - 1]
node = self.__permutare[i + 1]
# se adauga la final si drumul de la ultimul nod la startNode
# un cromozom e valid doar daca permutarea sa incepe cu startNode
f += matrix[node - 1][self.__permutare[0] - 1]
return f
class Populatie:
"""
lista de cromozomi
"""
def __init__(self):
self.__cromozomi = []
def addCromozom(self, cromozom):
self.__cromozomi.append(cromozom)
def getCromozomi(self):
return self.__cromozomi
def populeaza(self, n, dimesiunePopulatie, startNode):
dim = 0
while dim < dimesiunePopulatie:
cromozom = Cromozom(n)
while not cromozom.eValid(startNode):
cromozom.mutatie()
dim += 1
self.__cromozomi.append(cromozom)
def selectieSupravietuire(self, copil, matrix):
fitnessMaxim = 0
indexFitnessMaxim = -1
index = -1
for cromozom in self.__cromozomi:
index += 1
fitness = cromozom.fitness(matrix)
if fitness > fitnessMaxim:
fitnessMaxim = fitness
indexFitnessMaxim = index
if copil.fitness(matrix) < fitnessMaxim and indexFitnessMaxim > -1:
self.__cromozomi[indexFitnessMaxim] = copil
def generarePermutareIdentica(n):
permutare = []
for i in range(n):
permutare.append(i + 1)
return permutare
def incrucisare(parinte1, parinte2):
p1 = parinte1.getPermutare()
p2 = parinte2.getPermutare()
c1 = Cromozom(parinte1.getN())
permutare = p1[:2]
for i in range(2, parinte2.getN()):
if p2[i] not in permutare:
permutare.append(p2[i])
for x in p2:
if x not in permutare:
permutare.append(x)
c1.setPermutare(permutare)
c2 = Cromozom(parinte2.getN())
permutare = p2[:2]
for i in range(2, parinte1.getN()):
if p1[i] not in permutare:
permutare.append(p1[i])
for x in p1:
if x not in permutare:
permutare.append(x)
c2.setPermutare(permutare)
return c1, c2
def selectieReproducere(populatie, matrix):
probabilitati = [0]
sumaFitness = 0
i = 1
for cromozom in populatie.getCromozomi():
fitness = 1 / cromozom.fitness(matrix)
sumaFitness += fitness
probabilitati.append(fitness)
i += 1
for j in range(1, i):
prob = probabilitati[j] / sumaFitness
probabilitati[j] = prob
s = 0
for j in range(i):
s += probabilitati[j]
probabilitati[j] = s
nr = random.random()
for j in range(1, i):
if probabilitati[j - 1] <= nr < probabilitati[j]:
return populatie.getCromozomi()[j - 1]
return None
def fitnessMinim(populatie, matrix):
copil = populatie.getCromozomi()[0]
fitnessMin = copil.fitness(matrix)
for c in populatie.getCromozomi():
fitness = c.fitness(matrix)
if fitness < fitnessMin:
fitnessMin = fitness
copil = c
return copil.getPermutare(), fitnessMin | [
"noreply@github.com"
] | LauraDiosan-CS.noreply@github.com |
12edb5f89ba6f2c8dd47d128a4f8ef0560c8ef34 | a02ccb5dff094fad8bcd691dda234d50ff768299 | /tools/Polygraphy/polygraphy/tools/surgeon/subtool/__init__.py | cc6034ccc8df9c408f916506516b9a1284fb6fba | [
"Apache-2.0",
"BSD-3-Clause",
"MIT",
"ISC",
"BSD-2-Clause"
] | permissive | NVIDIA/TensorRT | 5520d5a6a5926a2b30dbdd2c5b2e4dfe6d1b429b | a167852705d74bcc619d8fad0af4b9e4d84472fc | refs/heads/release/8.6 | 2023-07-29T05:39:45.688091 | 2023-06-09T22:29:09 | 2023-06-09T23:04:18 | 184,657,328 | 8,026 | 2,096 | Apache-2.0 | 2023-09-13T17:30:16 | 2019-05-02T22:02:08 | C++ | UTF-8 | Python | false | false | 183 | py | from polygraphy.tools.surgeon.subtool.extract import Extract
from polygraphy.tools.surgeon.subtool.insert import Insert
from polygraphy.tools.surgeon.subtool.sanitize import Sanitize
| [
"rajeevsrao@users.noreply.github.com"
] | rajeevsrao@users.noreply.github.com |
7e4a35ad33a0da28823a458e6da9c48e4536cb0f | 2da6b95fe4237cc00014f80c45d268ab62fc90cd | /DFRep/V_normweight/DFPNet.py | aa5602f8847531e9cab77d9278a61b59fd0b7a0d | [] | no_license | lvzongyao/Open-Set-Recognition-1 | 7e26cd1d97f67b6c075f4e64296ce7a82d479168 | 26a8a1cca199f4e23df98abca6893e3eef3307da | refs/heads/master | 2023-08-19T09:15:16.119377 | 2021-09-13T04:21:18 | 2021-09-13T04:21:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,939 | py | """
Version2: includes centroids into model, and shares embedding layers.
"""
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import backbones.cifar as models
from Distance import Similarity, Distance
class DFPNet(nn.Module):
def __init__(self, backbone='ResNet18', num_classes=1000, embed_dim=512):
super(DFPNet, self).__init__()
self.num_classes = num_classes
self.backbone_name = backbone
self.backbone = models.__dict__[backbone](num_classes=num_classes, backbone_fc=False)
self.feat_dim = self.get_backbone_last_layer_out_channel() # get the channel number of backbone output
self.embed_dim = embed_dim
self.embeddingLayer = nn.Sequential(
nn.PReLU(),
nn.Linear(self.feat_dim, self.feat_dim // 16, bias=False),
nn.PReLU(),
nn.Linear(self.feat_dim // 16, embed_dim, bias=False)
)
self.centroids = nn.Parameter(torch.randn(num_classes, embed_dim))
nn.init.xavier_uniform_(self.centroids)
def get_backbone_last_layer_out_channel(self):
if self.backbone_name.startswith("LeNet"):
return 128 * 3 * 3
last_layer = list(self.backbone.children())[-1]
while (not isinstance(last_layer, nn.Conv2d)) and \
(not isinstance(last_layer, nn.Linear)) and \
(not isinstance(last_layer, nn.BatchNorm2d)):
temp_layer = list(last_layer.children())[-1]
if isinstance(temp_layer, nn.Sequential) and len(list(temp_layer.children())) == 0:
temp_layer = list(last_layer.children())[-2]
last_layer = temp_layer
if isinstance(last_layer, nn.BatchNorm2d):
return last_layer.num_features
else:
return last_layer.out_channels
def forward(self, x):
x = self.backbone(x)
gap = (F.adaptive_avg_pool2d(x, 1)).view(x.size(0), -1)
embed_fea = self.embeddingLayer(gap)
embed_fea_norm = F.normalize(embed_fea, dim=1, p=2)
centroids = self.centroids
centroids_norm = F.normalize(centroids, dim=1, p=2)
SIMI = Similarity()
dotproduct_fea2cen = getattr(SIMI, "dotproduct")(embed_fea, centroids)
cosine_fea2cen = getattr(SIMI, "dotproduct")(embed_fea_norm, centroids_norm)
normweight_fea2cen = getattr(SIMI, "dotproduct")(embed_fea, centroids_norm)
return {
"gap": gap, # [n,self.feat_dim]
"embed_fea": embed_fea, # [n,embed_dim]
"dotproduct_fea2cen": dotproduct_fea2cen, # [n,num_classes]
"cosine_fea2cen": cosine_fea2cen, # [n,num_classes]
"normweight_fea2cen": normweight_fea2cen
}
def demo():
x = torch.rand([3, 3, 32, 32])
y = torch.rand([6, 3, 32, 32])
net = DFPNet('ResNet18', num_classes=10, embed_dim=64)
output = net(x)
print(output)
# demo()
| [
"xuma@my.unt.edu"
] | xuma@my.unt.edu |
1260c5bfe4eb0011b6dd92b047da4841adfa4331 | 50008b3b7fb7e14f793e92f5b27bf302112a3cb4 | /recipes/Python/541081_Thread_Safe_Any_Object/recipe-541081.py | 7342c9a7b3d0e8a02f39b737502226164403f75d | [
"MIT",
"Python-2.0"
] | permissive | betty29/code-1 | db56807e19ac9cfe711b41d475a322c168cfdca6 | d097ca0ad6a6aee2180d32dce6a3322621f655fd | refs/heads/master | 2023-03-14T08:15:47.492844 | 2021-02-24T15:39:59 | 2021-02-24T15:39:59 | 341,878,663 | 0 | 0 | MIT | 2021-02-24T15:40:00 | 2021-02-24T11:31:15 | Python | UTF-8 | Python | false | false | 557 | py | class ThreadSafeObject:
"""
A class that makes any object thread safe.
"""
def __init__(self, obj):
"""
Initialize the class with the object to make thread safe.
"""
self.lock = threading.RLock()
self.object = obj
def __getattr__(self, attr):
self.lock.acquire()
def _proxy(*args, **kargs):
self.lock.acquire()
answer = getattr(self.object, attr)(*args, **kargs)
self.lock.release()
return answer
return _proxy
| [
"betty@qburst.com"
] | betty@qburst.com |
520e49c53958f4f5cfc0c6f3582a05760870c8d6 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /065_serialization_and_deserialization/002_json/_exercises/_templates/Working With JSON Data in Python/002_Deserializing JSON.py | a9dc2d0a331d0f6f6e26d6388f706549a71f8fcd | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 2,865 | py | # # -*- coding: utf-8 -*-
#
# # Deserializing JSON
# # Great, looks like you’ve captured yourself some wild JSON! Now it’s time to whip it into shape. In the json library,
# # you’ll find load() and loads() for turning JSON encoded data into Python objects.
# # Just like serialization, there is a simple conversion table for deserialization, though you can probably guess what it
# # looks like already.
# #
# # JSON Python
# # object dict
# # array list
# # string str
# # number (int) int
# # number (real) float
# # true True
# # false False
# # null None
#
# # Technically, this conversion isn’t a perfect inverse to the serialization table. That basically means that if you
# # encode an object now and then decode it again later, you may not get exactly the same object back. I imagine it’s
# # a bit like teleportation: break my molecules down over here and put them back together over there. Am I still
# # the same person?
# # In reality, it’s probably more like getting one friend to translate something into Japanese and another friend to
# # translate it back into English. Regardless, the simplest example would be encoding a tuple and getting back a list
# # after decoding, like so:
# #
# ____ ____
# blackjack_hand = (8, "Q")
# encoded_hand = ____.d.. ?
# decoded_hand = ____.l.. ?
#
# print(b.. __ d..
# # False
# print(ty.. b..
# # <class 'tuple'>
# print(ty.. d..
# # <class 'list'>
# print(b.. __ tu.. d..
# # True
#
# # A Simple Deserialization Example
# # This time, imagine you’ve got some data stored on disk that you’d like to manipulate in memory. You’ll still use
# # the context manager, but this time you’ll open up the existing data_file.json in read mode.
# #
# w___ o.. data_file.json _ __ read_file
# data _ ____.l.. ?
# # Things are pretty straightforward here, but keep in mind that the result of this method could return any of
# # the allowed data types from the conversion table. This is only important if you’re loading in data you haven’t
# # seen before. In most cases, the root object will be a dict or a list.
# # If you’ve pulled JSON data in from another program or have otherwise obtained a string of JSON formatted data
# # in Python, you can easily deserialize that with loads(), which naturally loads from a string:
#
# json_string = """
# {
# "researcher": {
# "name": "Ford Prefect",
# "species": "Betelgeusian",
# "relatives": [
# {
# "name": "Zaphod Beeblebrox",
# "species": "Betelgeusian"
# }
# ]
# }
# }
# """
# data _ ____.l.. ?
# # Voilà! You’ve tamed the wild JSON, and now it’s under your control. But what you do with that power is up to you.
# # You could feed it, nurture it, and even teach it tricks. It’s not that I don’t trust you…but keep it on a leash, okay?
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
27af581a2079c7dda5b2c4d90d320ba7794f88d6 | 47dbfa8fe684142f88eed391bab1c8049984acda | /tests/TestUtils.py | 00c638d09e4fe6032802dd3333be759e324df36a | [] | no_license | fortyMiles/StanfordAlgorithmCourse | e90dea54ae12b12fbb875e1dd14a33c27af45d46 | b54b0c0b8a044842bfec26f9223e5345d1176964 | refs/heads/master | 2021-09-03T21:34:40.598877 | 2018-01-12T06:14:39 | 2018-01-12T06:14:39 | 109,659,323 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,294 | py | from utils.utils import replace_element_quickly
from utils.profiler import get_running_time
import random
import copy
running_time = 10000
@get_running_time(running_time=running_time)
def original_replace_way(L, old_e, new_e):
new_L = []
for i in L:
if i == old_e: new_L.append(new_e)
else: new_L.append(i)
return new_L
@get_running_time(running_time=running_time)
def replace_by_list_comprehension(L, old_e, new_e):
return [new_e if e == old_e else e for e in L]
@get_running_time(running_time=running_time)
def replace_by_map(L, old_e, new_e):
return list(map(lambda x: new_e if x == old_e else x, L))
replace_element_quickly = get_running_time(running_time=running_time)(replace_element_quickly)
LIST = [random.randrange(10) for _ in range(100)]
LIST = original_replace_way(LIST, 2, 3)
LIST = replace_by_list_comprehension(LIST, 2, 3)
LIST = replace_by_map(LIST, 2, 3)
assert 2 not in LIST
LIST = [random.randrange(10) for _ in range(100)]
assert 2 in LIST
list1 = replace_element_quickly(LIST[:], 2, 3)
list2 = original_replace_way(LIST, 2, 3)
assert 2 not in list1
assert len(list1) == 100, len(list1)
assert sorted(list1) == sorted(list2)
L = [1, 2, 2, 31, 42, 12, 13, 2, 1, 2, 32, 1]
print(replace_element_quickly(L[:], 2, 0))
print(L)
| [
"mqgao@outlook.com"
] | mqgao@outlook.com |
ef49eb8c1c6ed480e749d0f79d1d7595b2c5a73b | 27b4d1b7723845812111a0c6c659ef87c8da2755 | /Fluent_Python/21_类元编程/record_factory.py | 865a049828201568b3428a78e2a905618598dfef | [] | no_license | NAMEs/Python_Note | 59a6eff7b4287aaef04bd69fbd4af3faf56cccb4 | f560e00af37c4f22546abc4c2756e7037adcc40c | refs/heads/master | 2022-04-11T09:32:17.512962 | 2020-03-17T09:30:58 | 2020-03-17T09:30:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 859 | py | def record_factory(cls_name, field_names):
try:
field_names = field_names.replace(',', " ").split()
except AttributeError:
pass
field_names = tuple(field_names)
def __init__(self, *args, **kwargs):
attrs = dict(zip(self.__slots__, args))
attrs.update(kwargs)
for name, value in attrs.items():
setattr(self, name, value)
def __iter__(self):
for name in self.__slots__:
yield getattr(self, name)
def __repr__(self):
values = ', '.join('{}={!r}'.format(*i) for i in zip(self.__slots__, self))
return '{}({})'.format(self.__class__.__name__, values)
cls_attrs = dict(
__slots__ = field_names,
__init__ = __init__,
__iter__ = __iter__,
__repr__ = __repr__
)
return type(cls_name, (object, ), cls_attrs) | [
"1558255789@qq.com"
] | 1558255789@qq.com |
c9b993bd57669777535c69f78395ce1a9830a834 | 9b4de05054f37a65dce49857fb6a809a370b23ca | /person/migrations/0006_person_p_pic.py | 83d8c588ec4bf9ffcdb1f63503bcbb531df5df3a | [] | no_license | susahe/gis | f6b03b8f23abf7ca22c0069a4cdf603bfe879808 | 6b8d433cd5f672994ac138c1b656136425d0c345 | refs/heads/master | 2021-05-12T01:50:12.862559 | 2018-01-27T02:25:31 | 2018-01-27T02:25:31 | 117,569,888 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | # Generated by Django 2.0 on 2017-12-24 02:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('person', '0005_auto_20171223_1730'),
]
operations = [
migrations.AddField(
model_name='person',
name='p_pic',
field=models.ImageField(blank=True, upload_to='profile_image'),
),
]
| [
"sumudu.susahe@gmail.com"
] | sumudu.susahe@gmail.com |
b06929bd407a91e7ab68e1dc6adc7fd2c187e252 | c5744c2fda48ae6a79c155c641fe98021a0cb7f3 | /PP4E/GUI/ShellGui/shellgui.py | 4ed6046c52f0842dac1c08fd92c495f59e463026 | [] | no_license | skinkie/Scripts | e0fd3d3f767612ade111f28bc7af3e1b25fc2947 | 80a1ba71ddf9a0c5ff33866832cb5c42aca0c0b1 | refs/heads/master | 2021-05-31T16:57:21.100919 | 2016-05-23T09:58:59 | 2016-05-23T09:58:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,378 | py | #!/usr/local/bin/python
"""
e.g. 10-5
##########################################################################
工具启动器, 使用guimaker模板, guimixin标准quit对话框; 本程序只是一个类库, 要显示
图形界面, 请运行mytools脚本
##########################################################################
"""
from tkinter import *
from PP4E.GUI.Tools.guimixin import GuiMixin
from PP4E.GUI.Tools.guimaker import *
class ShellGui(GuiMixin, GuiMakerWindowMenu):
def start(self):
self.setMenuBar()
self.setToolBar()
self.master.title('Shell Tools Listbox')
self.master.iconname('Shell Tools')
def handleList(self, event):
label = self.listbox.get(ACTIVE)
self.runCommand(label)
def makeWidgets(self):
sbar = Scrollbar(self)
list = Listbox(self, bg='white')
sbar.config(command=list.yview)
list.config(yscrollcommand=sbar.set)
sbar.pack(side=RIGHT, fill=Y)
list.pack(side=LEFT, expand=YES, fill=BOTH)
for (label, action) in self.fetchCommands():
list.insert(END, label)
list.bind('<Double-1>', self.handleList)
self.listbox = list
def forToolBar(self, label):
return True
def setToolBar(self):
self.toolBar = []
for (label, action) in self.fetchCommands():
if self.forToolBar(label):
self.toolBar.append(('Quit', self.quit, dict(side=RIGHT)))
def setMenuBar(self):
toolEntries = []
self.menuBar = [('File', 0, [('Quit', 0, self.quit),
('Tools', 0, toolEntries)])]
for (label, action) in self.fetchCommands():
toolEntries.append(label, -1, action)
##########################################################################
# 针对特定模板类型的子类而设计, 后者又针对特定应用工具集的子类而设计
##########################################################################
class ListMenuGui(ShellGui):
def fetchCommands(self):
return self.myMenu
def runCommand(self, cmd):
for (label, action) in self.myMenu:
if label == cmd:
action()
class DictMenuGui(ShellGui):
def fetchCommands(self):
return self.myMenu.items()
def runCommand(self):
self.myMenu[cmd]()
| [
"death_finger@sina.com"
] | death_finger@sina.com |
53d67ff92bd610c4d6a221c9495e62e5de801d5a | 55eda01bdcbda99f72cfdf0b29afb5ea36756873 | /arxiv/kdgan/trials/nobatch/metric.py | 26dbe61e3471dd57fefde1c7ec66fffc0b6eedfc | [] | no_license | yyht/KDGAN | 7489a0ca1a2f044b6bcb7cd8bb0d6f2dae1da5e7 | 8f1367d242d7d174bf5bb2740aa18e3846d7b521 | refs/heads/master | 2020-05-16T08:36:18.872239 | 2019-01-12T04:17:31 | 2019-01-12T04:17:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,632 | py | import numpy as np
def compute_score(logits, labels, cutoff, normalize):
predictions = np.argsort(-logits, axis=1)[:,:cutoff]
batch_size, _ = labels.shape
scores = []
for batch in range(batch_size):
label_bt = labels[batch,:]
label_bt = np.nonzero(label_bt)[0]
prediction_bt = predictions[batch,:]
num_label = len(label_bt)
present = 0
for label in label_bt:
if label in prediction_bt:
present += 1
score = present
if score > 0:
score *= (1.0 / normalize(cutoff, num_label))
# print('score={0:.4f}'.format(score))
scores.append(score)
score = np.mean(scores)
return score
def compute_hit(logits, labels, cutoff):
def normalize(cutoff, num_label):
return min(cutoff, num_label)
hit = compute_score(logits, labels, cutoff, normalize)
# print('hit={0:.4f}'.format(hit))
return hit
def compute_rec(logits, labels, cutoff):
def normalize(cutoff, num_label):
return num_label
rec = compute_score(logits, labels, cutoff, normalize)
# print('rec={0:.4f}'.format(rec))
return rec
def main():
logits = np.log([
[0.1, 0.2, 0.3, 0.4], [0.1, 0.2, 0.3, 0.4],
[0.1, 0.2, 0.3, 0.4], [0.1, 0.2, 0.3, 0.4],
])
labels = np.asarray([
[0, 0, 0, 0], [0, 0, 0, 1],
[0, 0, 1, 1], [1, 1, 1, 0],
], dtype=np.int32)
cutoff = 2
hit = compute_hit(logits, labels, cutoff)
rec = compute_rec(logits, labels, cutoff)
print('hit={0:.4f} rec={1:.4f}'.format(hit, rec))
if __name__ == '__main__':
main() | [
"xiaojiew1@student.unimelb.edu.au"
] | xiaojiew1@student.unimelb.edu.au |
83dd947d41a89af655f1f3fb6aa74965019bf8c2 | 9cc76b1b1dd0064ab6613cbca6ce93bc179db355 | /ros_ws/build/learning_ros/Part_2/lidar_alarm/catkin_generated/pkg.develspace.context.pc.py | e0b46158886e263efb8a7494036a7a51db3b4004 | [] | no_license | ABCaps35/learning_ros_ready_ws | 1131c32b2ecadffa8dd186c9ebcfdba7284f30ad | 1aa9c512d5006584e8bc84101a715e16a222a47d | refs/heads/main | 2023-04-03T20:32:58.671255 | 2021-04-13T23:41:13 | 2021-04-13T23:41:13 | 357,715,306 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;sensor_msgs;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "lidar_alarm"
PROJECT_SPACE_DIR = "/home/abcaps35/ros_ws_nogit/devel"
PROJECT_VERSION = "0.0.0"
| [
"acapelli345@gmail.com"
] | acapelli345@gmail.com |
55d3863597c6b206e5cdb3d151e7c33a8e4cc987 | 41dc19883789f45b6086399a1ae23995f53b4b2c | /bayesian-stats-modelling-tutorial/notebooks/utils.py | 2b57dbc6659cd9441d8abca762dae681d5171176 | [
"MIT"
] | permissive | sunny2309/scipy_conf_notebooks | f86179ddcd67168b709c755cc01862ed7c9ab2bd | 30a85d5137db95e01461ad21519bc1bdf294044b | refs/heads/master | 2022-10-28T17:27:42.717171 | 2021-01-25T02:24:05 | 2021-01-25T02:24:05 | 221,385,814 | 2 | 0 | MIT | 2022-10-20T02:55:20 | 2019-11-13T06:12:07 | Jupyter Notebook | UTF-8 | Python | false | false | 492 | py | import numpy as np
def ECDF(data):
"""Compute ECDF for a one-dimensional array of measurements."""
# Number of data points
n = len(data)
# x-data for the ECDF
x = np.sort(data)
# y-data for the ECDF
y = np.arange(1, n+1) / n
return x, y
def despine(ax):
ax.spines['right'].set_visible(False)
ax.spines['top'].set_visible(False)
def despine_traceplot(traceplot):
for row in traceplot:
for ax in row:
despine(ax)
| [
"sunny.2309@yahoo.in"
] | sunny.2309@yahoo.in |
e96a9e2d544fe3df65de440760a2510d46864deb | a8042cb7f6a4daec26b8cea6b7da2cb7cb880a84 | /997_FindtheTownJudge.py | 3d35d0d8a85d7fad9c59f1adab8826c33b289454 | [] | no_license | renukadeshmukh/Leetcode_Solutions | 0108edf6c5849946623a75c2dfd57cbf9bb338e4 | 1211eac167f33084f536007468ea10c1a0ceab08 | refs/heads/master | 2022-11-10T20:48:42.108834 | 2022-10-18T07:24:36 | 2022-10-18T07:24:36 | 80,702,452 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,824 | py | '''
997. Find the Town Judge
In a town, there are N people labelled from 1 to N. There is a rumor that one
of these people is secretly the town judge.
If the town judge exists, then:
The town judge trusts nobody.
Everybody (except for the town judge) trusts the town judge.
There is exactly one person that satisfies properties 1 and 2.
You are given trust, an array of pairs trust[i] = [a, b] representing that the
person labelled a trusts the person labelled b.
If the town judge exists and can be identified, return the label of the town
judge. Otherwise, return -1.
Example 1:
Input: N = 2, trust = [[1,2]]
Output: 2
Example 2:
Input: N = 3, trust = [[1,3],[2,3]]
Output: 3
Example 3:
Input: N = 3, trust = [[1,3],[2,3],[3,1]]
Output: -1
Example 4:
Input: N = 3, trust = [[1,2],[2,3]]
Output: -1
Example 5:
Input: N = 4, trust = [[1,3],[1,4],[2,3],[2,4],[4,3]]
Output: 3
Note:
1 <= N <= 1000
trust.length <= 10000
trust[i] are all different
trust[i][0] != trust[i][1]
1 <= trust[i][0], trust[i][1] <= N
'''
'''
ALGORITHM:
1. Maintain 2 lists degree and flag
2. for each pair <trustee, trusted>,
mark flag[trustee] = -1 as he cannot be the judge
inc count of degree[trusted]
3. If for some i, flag[i] != -1 and degree[i] == N-1, return i as judge
4. else return -1
RUNTIME COMPLEXITY: O(N)
SPACE COMPLEXITY: O(N)
'''
from collections import defaultdict
class Solution(object):
def findJudge(self, N, trust):
"""
:type N: int
:type trust: List[List[int]]
:rtype: int
"""
degree = [0] * (1+N)
flag = [0] * (1+N)
for a,b in trust:
flag[a] = -1
degree[b] += 1
for i in range(1, N+1):
if degree[i] == N-1 and flag[i] != -1:
return i
return -1
| [
"renud1988@gmail.com"
] | renud1988@gmail.com |
b7fd323fcc803b1f893482e9a3dfdda684488089 | 35a2a3f5fa6573c32e411d399a60e6f67ae51556 | /example/fcn-xs/solver.py | cf7298b83c8c69283950847701c0c82c30e9b383 | [
"Apache-2.0",
"BSD-2-Clause-Views",
"Zlib",
"BSD-2-Clause",
"BSD-3-Clause",
"Intel",
"LicenseRef-scancode-proprietary-license",
"CC-BY-NC-4.0"
] | permissive | TuSimple/mxnet | 21c1b8fedd1a626cb57189f33ee5c4b2b382fd79 | 4cb69b85b4db8e1492e378c6d1a0a0a07bd737fb | refs/heads/master | 2021-01-09T07:59:24.301512 | 2019-07-27T00:56:52 | 2019-07-27T00:56:52 | 53,660,918 | 33 | 47 | Apache-2.0 | 2019-07-27T01:09:17 | 2016-03-11T10:56:36 | Python | UTF-8 | Python | false | false | 7,190 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import numpy as np
import mxnet as mx
import time
import logging
from collections import namedtuple
from mxnet import optimizer as opt
from mxnet.optimizer import get_updater
from mxnet import metric
# Parameter to pass to batch_end_callback
BatchEndParam = namedtuple('BatchEndParams', ['epoch', 'nbatch', 'eval_metric'])
class Solver(object):
def __init__(self, symbol, ctx=None,
begin_epoch=0, num_epoch=None,
arg_params=None, aux_params=None,
optimizer='sgd', **kwargs):
self.symbol = symbol
if ctx is None:
ctx = mx.cpu()
self.ctx = ctx
self.begin_epoch = begin_epoch
self.num_epoch = num_epoch
self.arg_params = arg_params
self.aux_params = aux_params
self.optimizer = optimizer
self.kwargs = kwargs.copy()
def fit(self, train_data, eval_data=None,
eval_metric='acc',
grad_req='write',
epoch_end_callback=None,
batch_end_callback=None,
kvstore='local',
logger=None):
if logger is None:
logger = logging
logging.info('Start training with %s', str(self.ctx))
arg_shapes, out_shapes, aux_shapes = self.symbol.infer_shape(data=train_data.provide_data[0][1])
arg_names = self.symbol.list_arguments()
if grad_req != 'null':
self.grad_params = {}
for name, shape in zip(arg_names, arg_shapes):
if not (name.endswith('data') or name.endswith('label')):
self.grad_params[name] = mx.nd.zeros(shape, self.ctx)
else:
self.grad_params = None
aux_names = self.symbol.list_auxiliary_states()
self.aux_params = {k : nd.zeros(s) for k, s in zip(aux_names, aux_shapes)}
data_name = train_data.data_name
label_name = train_data.label_name
input_names = [data_name, label_name]
self.optimizer = opt.create(self.optimizer, rescale_grad=(1.0/train_data.get_batch_size()), **(self.kwargs))
self.updater = get_updater(self.optimizer)
eval_metric = metric.create(eval_metric)
# begin training
for epoch in range(self.begin_epoch, self.num_epoch):
nbatch = 0
train_data.reset()
eval_metric.reset()
for data in train_data:
nbatch += 1
label_shape = data[label_name].shape
self.arg_params[data_name] = mx.nd.array(data[data_name], self.ctx)
self.arg_params[label_name] = mx.nd.array(data[label_name].reshape(label_shape[0], \
label_shape[1]*label_shape[2]), self.ctx)
output_names = self.symbol.list_outputs()
self.exector = self.symbol.bind(self.ctx, self.arg_params,
args_grad=self.grad_params,
grad_req=grad_req,
aux_states=self.aux_params)
assert len(self.symbol.list_arguments()) == len(self.exector.grad_arrays)
update_dict = {name: nd for name, nd in zip(self.symbol.list_arguments(), \
self.exector.grad_arrays) if nd is not None}
output_dict = {}
output_buff = {}
for key, arr in zip(self.symbol.list_outputs(), self.exector.outputs):
output_dict[key] = arr
output_buff[key] = mx.nd.empty(arr.shape, ctx=mx.cpu())
self.exector.forward(is_train=True)
for key in output_dict:
output_dict[key].copyto(output_buff[key])
self.exector.backward()
for key, arr in update_dict.items():
if key != "bigscore_weight":
self.updater(key, arr, self.arg_params[key])
pred_shape = self.exector.outputs[0].shape
label = mx.nd.array(data[label_name].reshape(label_shape[0], label_shape[1]*label_shape[2]))
pred = mx.nd.array(output_buff["softmax_output"].asnumpy().reshape(pred_shape[0], \
pred_shape[1], pred_shape[2]*pred_shape[3]))
eval_metric.update([label], [pred])
self.exector.outputs[0].wait_to_read()
batch_end_params = BatchEndParam(epoch=epoch, nbatch=nbatch, eval_metric=eval_metric)
batch_end_callback(batch_end_params)
if epoch_end_callback is not None:
epoch_end_callback(epoch, self.symbol, self.arg_params, self.aux_params)
name, value = eval_metric.get()
logger.info(" --->Epoch[%d] Train-%s=%f", epoch, name, value)
# evaluation
if eval_data:
logger.info(" in eval process...")
nbatch = 0
eval_data.reset()
eval_metric.reset()
for data in eval_data:
nbatch += 1
label_shape = data[label_name].shape
self.arg_params[data_name] = mx.nd.array(data[data_name], self.ctx)
self.arg_params[label_name] = mx.nd.array(data[label_name].reshape(label_shape[0], \
label_shape[1]*label_shape[2]), self.ctx)
exector = self.symbol.bind(self.ctx, self.arg_params,
args_grad=self.grad_params,
grad_req=grad_req,
aux_states=self.aux_params)
cpu_output_array = mx.nd.zeros(exector.outputs[0].shape)
exector.forward(is_train=False)
exector.outputs[0].copyto(cpu_output_array)
pred_shape = cpu_output_array.shape
label = mx.nd.array(data[label_name].reshape(label_shape[0], \
label_shape[1]*label_shape[2]))
pred = mx.nd.array(cpu_output_array.asnumpy().reshape(pred_shape[0], \
pred_shape[1], pred_shape[2]*pred_shape[3]))
eval_metric.update([label], [pred])
exector.outputs[0].wait_to_read()
name, value = eval_metric.get()
logger.info('batch[%d] Validation-%s=%f', nbatch, name, value)
| [
"piiswrong@users.noreply.github.com"
] | piiswrong@users.noreply.github.com |
416a5be967979ccd729bd346dbadfa91fc98de62 | bebba3fb1dfc13a2220f06997c4bc8da42ef8e87 | /smashlib/ipy3x/nbconvert/exporters/pdf.py | 873282d02c0808d0c87c8868da6db44206a5ad4e | [
"MIT"
] | permissive | mattvonrocketstein/smash | b48b93c3419637f615c7ac3386b04ae756e1fadc | 98acdc27ab72ca80d9a7f63a54c0d52f126a8009 | refs/heads/master | 2021-01-18T23:23:59.340206 | 2016-07-14T01:28:17 | 2016-07-14T01:28:17 | 2,813,958 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,514 | py | """Export to PDF via latex"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import subprocess
import os
import sys
from IPython.utils.traitlets import Integer, List, Bool, Instance
from IPython.utils.tempdir import TemporaryWorkingDirectory
from .latex import LatexExporter
class PDFExporter(LatexExporter):
"""Writer designed to write to PDF files"""
latex_count = Integer(3, config=True,
help="How many times latex will be called."
)
latex_command = List([u"pdflatex", u"{filename}"], config=True,
help="Shell command used to compile latex."
)
bib_command = List([u"bibtex", u"{filename}"], config=True,
help="Shell command used to run bibtex."
)
verbose = Bool(False, config=True,
help="Whether to display the output of latex commands."
)
temp_file_exts = List(['.aux', '.bbl', '.blg', '.idx', '.log', '.out'], config=True,
help="File extensions of temp files to remove after running."
)
writer = Instance("IPython.nbconvert.writers.FilesWriter", args=())
def run_command(self, command_list, filename, count, log_function):
"""Run command_list count times.
Parameters
----------
command_list : list
A list of args to provide to Popen. Each element of this
list will be interpolated with the filename to convert.
filename : unicode
The name of the file to convert.
count : int
How many times to run the command.
Returns
-------
success : bool
A boolean indicating if the command was successful (True)
or failed (False).
"""
command = [c.format(filename=filename) for c in command_list]
# In windows and python 2.x there is a bug in subprocess.Popen and
# unicode commands are not supported
if sys.platform == 'win32' and sys.version_info < (3, 0):
# We must use cp1252 encoding for calling subprocess.Popen
# Note that sys.stdin.encoding and encoding.DEFAULT_ENCODING
# could be different (cp437 in case of dos console)
command = [c.encode('cp1252') for c in command]
times = 'time' if count == 1 else 'times'
self.log.info(
"Running %s %i %s: %s", command_list[0], count, times, command)
with open(os.devnull, 'rb') as null:
stdout = subprocess.PIPE if not self.verbose else None
for index in range(count):
p = subprocess.Popen(command, stdout=stdout, stdin=null)
out, err = p.communicate()
if p.returncode:
if self.verbose:
# verbose means I didn't capture stdout with PIPE,
# so it's already been displayed and `out` is None.
out = u''
else:
out = out.decode('utf-8', 'replace')
log_function(command, out)
return False # failure
return True # success
def run_latex(self, filename):
"""Run pdflatex self.latex_count times."""
def log_error(command, out):
self.log.critical(u"%s failed: %s\n%s", command[0], command, out)
return self.run_command(self.latex_command, filename,
self.latex_count, log_error)
def run_bib(self, filename):
"""Run bibtex self.latex_count times."""
filename = os.path.splitext(filename)[0]
def log_error(command, out):
self.log.warn('%s had problems, most likely because there were no citations',
command[0])
self.log.debug(u"%s output: %s\n%s", command[0], command, out)
return self.run_command(self.bib_command, filename, 1, log_error)
def clean_temp_files(self, filename):
"""Remove temporary files created by pdflatex/bibtex."""
self.log.info("Removing temporary LaTeX files")
filename = os.path.splitext(filename)[0]
for ext in self.temp_file_exts:
try:
os.remove(filename + ext)
except OSError:
pass
def from_notebook_node(self, nb, resources=None, **kw):
latex, resources = super(PDFExporter, self).from_notebook_node(
nb, resources=resources, **kw
)
with TemporaryWorkingDirectory() as td:
notebook_name = "notebook"
tex_file = self.writer.write(
latex, resources, notebook_name=notebook_name)
self.log.info("Building PDF")
rc = self.run_latex(tex_file)
if not rc:
rc = self.run_bib(tex_file)
if not rc:
rc = self.run_latex(tex_file)
pdf_file = notebook_name + '.pdf'
if not os.path.isfile(pdf_file):
raise RuntimeError("PDF creating failed")
self.log.info('PDF successfully created')
with open(pdf_file, 'rb') as f:
pdf_data = f.read()
# convert output extension to pdf
# the writer above required it to be tex
resources['output_extension'] = '.pdf'
return pdf_data, resources
| [
"matthewvonrocketstein@gmail-dot-com"
] | matthewvonrocketstein@gmail-dot-com |
baad1ea61ddcc627ef3a4d33fe866a8b0fce5db7 | a11aa2be20ccc0c814153e7f17813f412c8a3d45 | /tests/testapp/settings.py | d96ff9bf0456cd192716dbc6f613ad51f00dd48c | [
"BSD-3-Clause"
] | permissive | Jiaming1999/django-beam | 32e80ab3472fc5aa25b79dabdb21080e89804bdc | cba5874bfef414e65051c2534cf03c772a4da98c | refs/heads/master | 2023-08-16T04:59:20.479734 | 2021-10-25T13:18:30 | 2021-10-25T13:18:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,282 | py | DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}}
INSTALLED_APPS = [
"beam",
"beam.themes.bootstrap4",
"testapp",
"crispy_forms",
"django.contrib.contenttypes",
# "django.contrib.admin",
"django.contrib.sessions",
"django.contrib.auth",
# contrib.reversion
"reversion",
"beam.contrib.reversion",
# contrib.autocomplete_light
"dal",
"beam.contrib.autocomplete_light",
"dal_select2",
"django_filters",
]
SECRET_KEY = "secret_key_for_testing"
ROOT_URLCONF = "testapp.urls"
MIDDLEWARE = [
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
]
},
}
]
STATIC_URL = "/static/"
| [
"raphael.kimmig@ampad.de"
] | raphael.kimmig@ampad.de |
cc17a773ede83ac3e9cf349ec624f77af64bc738 | b53141494618cd6c1bc96960f9a6026257f9fbb3 | /packaging/setup/plugins/ovirt-engine-setup/apache/selinux.py | 6cd9eb057f0264cc12be97b40dcb50745ffdbcb0 | [
"Apache-2.0"
] | permissive | SunOfShine/ovirt-engine | b97454017c86e7729265dc70bbf58f1d0319c560 | 7684597e2d38ff854e629e5cbcbb9f21888cb498 | refs/heads/master | 2020-12-30T19:23:27.311186 | 2013-05-13T20:36:58 | 2013-06-09T09:42:14 | 10,784,897 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,213 | py | #
# ovirt-engine-setup -- ovirt engine setup
# Copyright (C) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Selinux plugin."""
import gettext
_ = lambda m: gettext.dgettext(message=m, domain='ovirt-engine-setup')
from otopi import util
from otopi import plugin
from ovirt_engine_setup import constants as osetupcons
@util.export
class Plugin(plugin.PluginBase):
"""Selinux plugin."""
def __init__(self, context):
super(Plugin, self).__init__(context=context)
self._enabled = True
@plugin.event(
stage=plugin.Stages.STAGE_SETUP,
)
def _setup(self):
self.command.detect('semanage')
self._enabled = not self.environment[
osetupcons.CoreEnv.DEVELOPER_MODE
]
@plugin.event(
stage=plugin.Stages.STAGE_VALIDATION,
condition=lambda self: self._enabled,
)
def _validation(self):
if self.command.get('semanage', optional=True) is None:
self._enabled = False
@plugin.event(
stage=plugin.Stages.STAGE_MISC,
condition=lambda self: self._enabled,
)
def _misc(self):
command = (
self.command.get('semanage'),
'boolean',
'--modify',
'--on',
'httpd_can_network_connect',
)
rc, stdout, stderr = self.execute(
command,
raiseOnError=False,
)
if rc != 0:
self.logger.warning(
_(
'Failed to modify httpd selinux context, please make '
'sure httpd_can_network_connect is set.'
)
)
# vim: expandtab tabstop=4 shiftwidth=4
| [
"gerrit2@gerrit.ovirt.org"
] | gerrit2@gerrit.ovirt.org |
fd2e282016d51faca7f03108af89b284f4cec396 | 72764f01185c872daa7519f919511f6a49ecf94e | /Basic_App/models.py | 767c729f46a33c3a0bfbb4f0dbfc8704c8a39866 | [] | no_license | Alan-thapa98/Online-Shopping-master | 9df018293d42e08060f72f0c3d566fb2de5ce30a | 6de6552edc67495e9902154f83a73b2a7c759f7b | refs/heads/master | 2023-06-25T06:10:39.141935 | 2021-07-30T16:06:20 | 2021-07-30T16:06:20 | 391,121,088 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,104 | py | from django.db import models
# Create your models here.
class home(models.Model):
author=models.ForeignKey('auth.User',on_delete=models.CASCADE)
title=models.CharField(max_length=200)
price=models.FloatField()
image=models.ImageField(upload_to='media/home')
details=models.TextField()
posted_on=models.DateTimeField(null=True,blank=True)
def get_absolute_url(self):
return reverse('Basic_App:product', kwargs={'pk': self.pk})
def image_url(self):
if self.image and hasattr(self.image,'url'):
return self.image.url
def __str__(self):
return self.title
class Categori(models.Model):
Category=models.CharField(max_length=200)
def __str__(self):
return self.Category
class Shop(models.Model):
Category=models.CharField(max_length=200)
title=models.ForeignKey('home',on_delete=models.CASCADE)
price=models.FloatField()
image=models.ImageField(upload_to='media/shop')
details=models.TextField()
posted_on=models.DateTimeField(null=True,blank=True)
def get_absolute_url(self):
return reverse('Basic_App:shopproduct', kwargs={'pk': self.pk})
def image_url(self):
if self.image and hasattr(self.image,'url'):
return self.image.url
def __str__(self):
return self.Category
#class Product(models.Model):
class Checkout(models.Model):
fname=models.CharField(max_length=100)
lname=models.CharField(max_length=100)
c_name=models.CharField(max_length=100)
email=models.EmailField()
d=[("1","Bangladesh"),
("2","India"),
("3","Mayanmar"),
("4","Soudi Arabia"),
("5","Pakistan"),
("6","Canada"),
("7","Malaysia"),
("8","China"),
("9","US"),
("10","UK"),
('11','Other'),
]
states=models.CharField(max_length=200,choices=d)
address=models.CharField(max_length=200)
town=models.CharField(max_length=200)
zipcode=models.PositiveIntegerField()
phone_number=models.IntegerField()
comment=models.TextField()
def __str__(self):
return self.email
| [
"alanthapa98.gmail.com"
] | alanthapa98.gmail.com |
5909d464cdf9b56d9f67865e413a18d52700f581 | 56ca0c81e6f8f984737f57c43ad8d44a84f0e6cf | /src/raport_slotow/tests/test_views/test_raport_slotow_autor.py | b3c12f85003d3d7324d8ec69c4f9edd627a7b676 | [
"MIT"
] | permissive | iplweb/bpp | c40f64c78c0da9f21c1bd5cf35d56274a491f840 | a3d36a8d76733a479e6b580ba6ea57034574e14a | refs/heads/dev | 2023-08-09T22:10:49.509079 | 2023-07-25T04:55:54 | 2023-07-25T04:55:54 | 87,017,024 | 2 | 0 | NOASSERTION | 2023-03-04T04:02:36 | 2017-04-02T21:22:20 | Python | UTF-8 | Python | false | false | 439 | py | from django.urls import reverse
def test_RaportSlotow_get_pdf(admin_app, autor_jan_kowalski):
url = reverse("raport_slotow:index")
form_page = admin_app.get(url)
form_page.forms[0]["obiekt"].force_value(autor_jan_kowalski.pk)
raport_page = form_page.forms[0].submit().maybe_follow()
pdf_page = raport_page.click("pobierz PDF")
assert pdf_page.status_code == 200
assert pdf_page.content[:8] == b"%PDF-1.7"
| [
"michal.dtz@gmail.com"
] | michal.dtz@gmail.com |
366ba960d296aa971689f7a44844442e3718aa6e | 5f58ceeffe080fabda0ad7ef30e2f04acc233271 | /demo/plugins/file_ops/template.py | 24bbac6eaa6a0961698fbf8c68a54cb32ca40595 | [] | no_license | EDRN/jpl.pipedreams | a9c9f2b25260ffe5a18b1ae12f6a5f08894f1533 | ecbdb280d25d123e9a7a0dcb505ceec40eaf047e | refs/heads/main | 2023-08-25T08:48:14.265406 | 2021-10-26T18:45:13 | 2021-10-26T18:45:13 | 380,337,383 | 0 | 0 | null | 2021-07-02T19:06:51 | 2021-06-25T19:35:26 | Python | UTF-8 | Python | false | false | 876 | py | # encoding: utf-8
from jpl.pipedreams.plugins_ops import Plugin
class Template(Plugin):
def __init__(self):
super().__init__()
self.description = 'for basic file ops'
def get_bytes(self, *args, **kwargs):
raise NotImplementedError
def read_str(self, *args, **kwargs):
raise NotImplementedError
def get_file_size(self, *args, **kwargs):
raise NotImplementedError
def isfile(self, *args, **kwargs):
raise NotImplementedError
def isdir(self, *args, **kwargs):
raise NotImplementedError
def exists(self, *args, **kwargs):
raise NotImplementedError
def dir_walk(self, *args, **kwargs):
raise NotImplementedError
def download(self, *args, **kwargs):
raise NotImplementedError
def search_file(self, *args, **kwargs):
raise NotImplementedError
| [
"kelly@seankelly.biz"
] | kelly@seankelly.biz |
e8d31cd5c24ee547db03c38c29d1f4d6cc81f24e | b8062e01860960131b37e27298b6b755b4191f5f | /cplusplus/contrib/HandWrite/presenterserver/hand_write/src/config_parser.py | d047e6c703f5801d764c9cb6b4588929bc61f180 | [
"Apache-2.0"
] | permissive | RomanGaraev/samples | 4071fcbe6bf95cf274576665eb72588568d8bcf2 | 757aac75a0f3921c6d1b4d98599bd7d4ffda936b | refs/heads/master | 2023-07-16T02:17:36.640036 | 2021-08-30T15:14:05 | 2021-08-30T15:14:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,434 | py | # =======================================================================
#
# Copyright (C) 2018, Hisilicon Technologies Co., Ltd. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1 Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2 Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3 Neither the names of the copyright holders nor the names of the
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# =======================================================================
#
import os
import configparser
import common.parameter_validation as validate
class ConfigParser():
""" parse configuration from the config.conf"""
__instance = None
def __init__(self):
"""init"""
def __new__(cls):
"""ensure class object is a single instance"""
if cls.__instance is None:
cls.__instance = object.__new__(cls)
cls.config_parser()
return cls.__instance
def config_verify(self):
'''Verify configuration Parameters '''
if not validate.validate_ip(ConfigParser.web_server_ip) or \
not validate.validate_ip(ConfigParser.presenter_server_ip) or \
not validate.validate_port(ConfigParser.web_server_port) or \
not validate.validate_port(ConfigParser.presenter_server_port):
return False
return True
@classmethod
def config_parser(cls):
"""parser config from config.conf"""
config_parser = configparser.ConfigParser()
cls.root_path = ConfigParser.get_rootpath()
config_file = os.path.join(cls.root_path, "config/config.conf")
config_parser.read(config_file)
cls.web_server_ip = config_parser.get('baseconf', 'web_server_ip')
cls.presenter_server_ip = \
config_parser.get('baseconf', 'presenter_server_ip')
cls.web_server_port = config_parser.get('baseconf', 'web_server_port')
cls.presenter_server_port = \
config_parser.get('baseconf', 'presenter_server_port')
@staticmethod
def get_rootpath():
"""get presenter server's root directory."""
path = __file__
idx = path.rfind("src")
return path[0:idx]
| [
"derek.qian.wang@huawei.com"
] | derek.qian.wang@huawei.com |
2af78ce0a40eda8b75a944402af2b657dc3a437d | 8c45e306209f4560309374828042819e6e5ddfc5 | /Visual-IDE/Model/models.py | dc8c4b9ff539aa4560a8ea76ad88bb2c4f1463cc | [] | no_license | franklingu/Visual-IDE | 2ee2640c2c98ecf076d7b925d1095ff565433673 | 9f2acc4283044aa3ea77a3fd01e5edad79e4ea10 | refs/heads/master | 2020-05-18T11:52:53.978165 | 2014-11-10T03:51:40 | 2014-11-10T03:51:40 | 24,674,470 | 0 | 1 | null | 2014-10-26T07:09:07 | 2014-10-01T10:33:16 | JavaScript | UTF-8 | Python | false | false | 2,289 | py | from google.appengine.ext import db
class SavedProject(db.Model):
user_email = db.EmailProperty()
project_title = db.StringProperty()
project_content = db.TextProperty()
created_at = db.DateTimeProperty(auto_now_add=True)
class DbManager(object):
def __init__(self):
super(DbManager, self).__init__()
@classmethod
def get_saved_project_titles_for_user(cls, user_email):
if user_email is None:
return None
query_results = db.GqlQuery("SELECT * FROM SavedProject WHERE user_email = :1", user_email)
titles_list = []
for item in query_results:
titles_list.append(item.project_title)
return titles_list
@classmethod
def get_saved_project_for_user(cls, user_email, project_title):
if user_email is None or project_title is None:
return None
query_results = db.GqlQuery("SELECT * FROM SavedProject WHERE user_email = :1 AND project_title = :2",
user_email, project_title).get()
return query_results
@classmethod
def save_project(cls, user_email, project_title, project_content):
if user_email is None or project_title is None or project_content is None:
return False
query_result = db.GqlQuery("SELECT * FROM SavedProject WHERE user_email = :1 AND project_title = :2",
user_email, project_title).get()
if query_result:
query_result.project_content = project_content
query_result.put()
else:
project = SavedProject()
project.user_email = user_email
project.project_title = project_title
project.project_content = project_content
project.put()
return True
@classmethod
def delete_project(cls, user_email, project_title):
if user_email is None or project_title is None:
return False
query_results = db.GqlQuery("SELECT * FROM SavedProject WHERE user_email = :1 AND project_title = :2",
user_email, project_title)
if query_results:
for item in query_results:
item.delete()
else:
pass
return True
| [
"franklingujunchao@gmail.com"
] | franklingujunchao@gmail.com |
da20ae6db24880030b45ad77229b59f4fa7c4a86 | e1a7d00dbe27403427078c627ccebe1562a6049d | /mercury/model/clients/client/shortcut.py | 1a7b319d0fa32f71dae07a4b75a8087cd3ed2d88 | [
"Apache-2.0"
] | permissive | greenlsi/mercury_mso_framework | f24fc167230057bb07b7de5dc9fbb10490293fee | cb425605de3341d27ce43fb326b300cb8ac781f6 | refs/heads/master | 2023-04-28T02:18:16.362823 | 2023-04-18T12:03:23 | 2023-04-18T12:03:23 | 212,610,400 | 2 | 1 | Apache-2.0 | 2023-03-02T14:36:56 | 2019-10-03T15:12:32 | Python | UTF-8 | Python | false | false | 2,301 | py | from mercury.msg.packet import AppPacket, NetworkPacket, PhysicalPacket, PacketInterface
from mercury.msg.packet.app_packet.acc_packet import AccessPacket
from mercury.msg.packet.app_packet.srv_packet import SrvPacket
from typing import Generic, Type
from ...common import ExtendedAtomic
from xdevs.models import Port
class Shortcut(ExtendedAtomic, Generic[PacketInterface]):
def __init__(self, p_type: Type[PacketInterface], client_id: str):
if p_type not in [AppPacket, NetworkPacket]:
raise ValueError(f'Invalid value for p_type ({p_type})')
super().__init__(f'client_{client_id}_shortcut')
self.p_type: Type[PacketInterface] = p_type
self.input_data: Port[PacketInterface] = Port(p_type, 'input_data')
self.input_phys_pss: Port[PhysicalPacket] = Port(PhysicalPacket, 'input_phys_pss')
self.output_data_acc: Port[PacketInterface] = Port(p_type, 'output_data_acc')
self.output_data_srv: Port[PacketInterface] = Port(p_type, 'output_data_srv')
self.output_app_pss: Port[AppPacket] = Port(AppPacket, 'output_app_pss')
self.add_in_port(self.input_data)
self.add_in_port(self.input_phys_pss)
self.add_out_port(self.output_data_acc)
self.add_out_port(self.output_data_srv)
self.add_out_port(self.output_app_pss)
def deltint_extension(self):
self.passivate()
def deltext_extension(self, e):
for msg in self.input_data.values:
app = msg if isinstance(msg, AppPacket) else msg.data if isinstance(msg.data, AppPacket) else msg.data.data
if isinstance(app, AccessPacket):
self.add_msg_to_queue(self.output_data_acc, msg)
elif isinstance(app, SrvPacket):
self.add_msg_to_queue(self.output_data_srv, msg)
for phys_msg in self.input_phys_pss.values:
phys_msg.receive(self._clock)
phys_msg.data.receive(self._clock)
app_msg = phys_msg.data.data
app_msg.snr = phys_msg.snr
self.add_msg_to_queue(self.output_app_pss, app_msg)
self.passivate() if self.msg_queue_empty() else self.activate()
def lambdaf_extension(self):
pass
def initialize(self):
self.passivate()
def exit(self):
pass
| [
"rcardenas.rod@gmail.com"
] | rcardenas.rod@gmail.com |
c3b35eec240bedc549d923ed1619303e89db9956 | bd75c7ec55b78ef189f57596520744f82ec73073 | /Restore IP Addresses.py | 982bfe59424d2b68d11b0afcfdc62100bfe8524f | [] | no_license | GaoLF/LeetCode-PY | 17058ac0743403292559f9b83a20bf79d89e33f6 | ccd294cfe0c228a21518d077d1aa01e510930ea3 | refs/heads/master | 2021-01-23T02:24:05.940132 | 2015-07-22T13:44:01 | 2015-07-22T13:44:01 | 38,248,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,458 | py | #coding=utf-8
class Solution:
# @param {string} s
# @return {string[]}
def restoreIpAddresses(self, s):
res = []
self.solve(s,0,0,res,"")
return res
# 几点注意事项:当位数太多或者太少,都不能组成
# 当第一个数是0的时候,只能当0
def solve(self,s,i,begin,res,string):
size = len(s)
if (size - begin) < 1*(4 - i):
return
if (size - begin) > 3*(4 - i):
return
if i == 3:
num = int(s[begin:size])
if num > 255:
return
else:
if s[begin:begin+1] == '0' and begin + 1 < size:
return
res.append(string + s[begin:size])
else:
if s[begin:begin+1] == '0':
#string = string + '0.'
self.solve(s,i+1,begin+1,res,string + '0.')
#string = string[0:begin]
else:
for j in range(3):
if begin + j + 1 < size:
num = int(s[begin:begin+j+1])
# print num
if num < 256:
#string = string + (str(num)+'.')
self.solve(s,i+1,begin+j+1,res,string + (str(num)+'.'))
#string = string[0:begin]
A = Solution()
print A.restoreIpAddresses("25525511135")
print A.restoreIpAddresses("010010") | [
"gaolongfei@pku.edu.cn"
] | gaolongfei@pku.edu.cn |
7adc594346e952c66be110d2560704c21d224bf0 | acd41dc7e684eb2e58b6bef2b3e86950b8064945 | /res/packages/scripts/scripts/common/Lib/plat-irix5/panel.py | c46a1d28e7c386cfb3ca4cdfbf0af379ca3e165d | [] | no_license | webiumsk/WoT-0.9.18.0 | e07acd08b33bfe7c73c910f5cb2a054a58a9beea | 89979c1ad547f1a1bbb2189f5ee3b10685e9a216 | refs/heads/master | 2021-01-20T09:37:10.323406 | 2017-05-04T13:51:43 | 2017-05-04T13:51:43 | 90,268,530 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 5,364 | py | # 2017.05.04 15:33:49 Střední Evropa (letní čas)
# Embedded file name: scripts/common/Lib/plat-irix5/panel.py
from warnings import warnpy3k
warnpy3k('the panel module has been removed in Python 3.0', stacklevel=2)
del warnpy3k
import pnl
debug = 0
def is_list(x):
return type(x) == type([])
def reverse(list):
res = []
for item in list:
res.insert(0, item)
return res
def getattrlist(list, name):
for item in list:
if item and is_list(item) and item[0] == name:
return item[1:]
return []
def getproplist(list, name):
for item in list:
if item and is_list(item) and item[0] == 'prop':
if len(item) > 1 and item[1] == name:
return item[2:]
return []
def is_endgroup(list):
x = getproplist(list, 'end-of-group')
return x and x[0] == '#t'
def show_actuator(prefix, a):
for item in a:
if not is_list(item):
print prefix, item
elif item and item[0] == 'al':
print prefix, 'Subactuator list:'
for a in item[1:]:
show_actuator(prefix + ' ', a)
elif len(item) == 2:
print prefix, item[0], '=>', item[1]
elif len(item) == 3 and item[0] == 'prop':
print prefix, 'Prop', item[1], '=>',
print item[2]
else:
print prefix, '?', item
def show_panel(prefix, p):
for item in p:
if not is_list(item):
print prefix, item
elif item and item[0] == 'al':
print prefix, 'Actuator list:'
for a in item[1:]:
show_actuator(prefix + ' ', a)
elif len(item) == 2:
print prefix, item[0], '=>', item[1]
elif len(item) == 3 and item[0] == 'prop':
print prefix, 'Prop', item[1], '=>',
print item[2]
else:
print prefix, '?', item
panel_error = 'panel error'
def dummy_callback(arg):
pass
def assign_members(target, attrlist, exclist, prefix):
for item in attrlist:
if is_list(item) and len(item) == 2 and item[0] not in exclist:
name, value = item[0], item[1]
ok = 1
if value[0] in '-0123456789':
value = eval(value)
elif value[0] == '"':
value = value[1:-1]
elif value == 'move-then-resize':
ok = 0
else:
print 'unknown value', value, 'for', name
ok = 0
if ok:
lhs = 'target.' + prefix + name
stmt = lhs + '=' + repr(value)
if debug:
print 'exec', stmt
try:
exec stmt + '\n'
except KeyboardInterrupt:
raise KeyboardInterrupt
except:
print 'assign failed:', stmt
def build_actuator(descr):
namelist = getattrlist(descr, 'name')
if namelist:
actuatorname = namelist[0][1:-1]
else:
actuatorname = ''
type = descr[0]
if type[:4] == 'pnl_':
type = type[4:]
act = pnl.mkact(type)
act.downfunc = act.activefunc = act.upfunc = dummy_callback
assign_members(act, descr[1:], ['al', 'data', 'name'], '')
datalist = getattrlist(descr, 'data')
prefix = ''
if type[-4:] == 'puck':
prefix = 'puck_'
elif type == 'mouse':
prefix = 'mouse_'
assign_members(act, datalist, [], prefix)
return (act, actuatorname)
def build_subactuators(panel, super_act, al):
for a in al:
act, name = build_actuator(a)
act.addsubact(super_act)
if name:
stmt = 'panel.' + name + ' = act'
if debug:
print 'exec', stmt
exec stmt + '\n'
if is_endgroup(a):
panel.endgroup()
sub_al = getattrlist(a, 'al')
if sub_al:
build_subactuators(panel, act, sub_al)
super_act.fixact()
def build_panel(descr):
if not descr or descr[0] != 'panel':
raise panel_error, 'panel description must start with "panel"'
if debug:
show_panel('', descr)
panel = pnl.mkpanel()
assign_members(panel, descr[1:], ['al'], '')
al = getattrlist(descr, 'al')
al = reverse(al)
for a in al:
act, name = build_actuator(a)
act.addact(panel)
if name:
stmt = 'panel.' + name + ' = act'
exec stmt + '\n'
if is_endgroup(a):
panel.endgroup()
sub_al = getattrlist(a, 'al')
if sub_al:
build_subactuators(panel, act, sub_al)
return panel
def my_dopanel():
a, down, active, up = pnl.dopanel()[:4]
if down:
down.downfunc(down)
if active:
active.activefunc(active)
if up:
up.upfunc(up)
return a
def defpanellist(file):
import panelparser
descrlist = panelparser.parse_file(open(file, 'r'))
panellist = []
for descr in descrlist:
panellist.append(build_panel(descr))
return panellist
from pnl import *
dopanel = my_dopanel
# okay decompyling C:\Users\PC\wotmods\files\originals\res\packages\scripts\scripts\common\Lib\plat-irix5\panel.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.05.04 15:33:49 Střední Evropa (letní čas)
| [
"info@webium.sk"
] | info@webium.sk |
5bf80d863d186f3a1ae4e7423084819ebb3e22d2 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/277/85229/submittedfiles/testes.py | d4013f6e86810a783d2c7133b2a748973ae519a1 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,023 | py | from minha_bib import *
nota1 = ler_inteiro()
nota2 = ler_inteiro_msg('minha msg de input aqui: ')
print (media(nota1,nota2))
"""
while(True):
while(True):
n = int(input("Digite um numero inteiro positivo: "))
if (n >= 0) :
break
f = 1
for i in range(2,n+1,1) :
f *= i
print("%d! = %d" % (n,f))
opt = input("Deseja continuar? [S ou N] ")
if (opt == 'N') :
print("\n\nATE BREVE!")
break
"""
"""
# DEFINI A FUNCAO
def fatorial(n) :
f = 1
for i in range(2,n+1,1) :
f *= i
return f
# VOU USAR
while(True):
while(True):
n = int(input("Digite um numero inteiro positivo: "))
if (n >= 0) :
break
fat = fatorial(n)
print("%d! = %d" % (n,f))
opt = input("Deseja continuar? [S ou N] ")
if (opt == 'N') :
print("\n\nATE BREVE!")
break
"""
| [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
b27fbb843467b18be113187788bfa1fff7bf50fa | 786de89be635eb21295070a6a3452f3a7fe6712c | /InterfaceCtlr/tags/V01-00-25/web/icws/config/middleware.py | 1c5036406ef9ec4f50474a412cfbaa7cf56be1dc | [] | no_license | connectthefuture/psdmrepo | 85267cfe8d54564f99e17035efe931077c8f7a37 | f32870a987a7493e7bf0f0a5c1712a5a030ef199 | refs/heads/master | 2021-01-13T03:26:35.494026 | 2015-09-03T22:22:11 | 2015-09-03T22:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,060 | py | """Pylons middleware initialization"""
from beaker.middleware import CacheMiddleware, SessionMiddleware
from paste.cascade import Cascade
from paste.registry import RegistryManager
from paste.urlparser import StaticURLParser
from paste.deploy.converters import asbool
from pylons import config
from pylons.middleware import ErrorHandler, StatusCodeRedirect
from pylons.wsgiapp import PylonsApp
from routes.middleware import RoutesMiddleware
from icws.config.environment import load_environment
class _MyApp(PylonsApp):
"""
Subclass of PylonsApp which returns text/plain message when resource is
not found instead of html document.
"""
def dispatch(self, controller, environ, start_response):
if not controller:
body = "Resource '%s' does not exist" % environ.get('REQUEST_URI')
headers = [('Content-Type', 'text/plain; charset=utf8'),
('Content-Length', str(len(body)))]
start_response("404 Not Found", headers)
return [body]
else:
return PylonsApp.dispatch(self, controller, environ, start_response)
def make_app(global_conf, full_stack=True, static_files=True, **app_conf):
"""Create a Pylons WSGI application and return it
``global_conf``
The inherited configuration for this application. Normally from
the [DEFAULT] section of the Paste ini file.
``full_stack``
Whether this application provides a full WSGI stack (by default,
meaning it handles its own exceptions and errors). Disable
full_stack when this application is "managed" by another WSGI
middleware.
``static_files``
Whether this application serves its own static files; disable
when another web server is responsible for serving them.
``app_conf``
The application's local configuration. Normally specified in
the [app:<name>] section of the Paste ini file (where <name>
defaults to main).
"""
# Configure the Pylons environment
load_environment(global_conf, app_conf)
# The Pylons WSGI app
app = _MyApp()
# Routing/Session/Cache Middleware
app = RoutesMiddleware(app, config['routes.map'])
app = SessionMiddleware(app, config)
app = CacheMiddleware(app, config)
# CUSTOM MIDDLEWARE HERE (filtered by error handling middlewares)
if asbool(full_stack):
# Handle Python exceptions
app = ErrorHandler(app, global_conf, **config['pylons.errorware'])
# Display error documents for 401, 403, 404 status codes (and
# 500 when debug is disabled)
if asbool(config['debug']):
app = StatusCodeRedirect(app)
else:
app = StatusCodeRedirect(app, [400, 401, 403, 404, 500])
# Establish the Registry for this application
app = RegistryManager(app)
if asbool(static_files):
# Serve static files
static_app = StaticURLParser(config['pylons.paths']['static_files'])
app = Cascade([static_app, app])
return app
| [
"salnikov@SLAC.STANFORD.EDU@b967ad99-d558-0410-b138-e0f6c56caec7"
] | salnikov@SLAC.STANFORD.EDU@b967ad99-d558-0410-b138-e0f6c56caec7 |
3282930d830eed41d2bc689235584f254e71ee95 | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/smartfault/ruleinst.py | d20ec0c3006f9d3a6d4fdad3c8dd8f61fa79d5e6 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,289 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class RuleInst(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = ClassMeta("cobra.model.smartfault.RuleInst")
meta.moClassName = "smartfaultRuleInst"
meta.rnFormat = "ruleinst-[%(key)s]"
meta.category = MoCategory.REGULAR
meta.label = "Rule Instance"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.smartfault.RuleSubject")
meta.childNamesAndRnPrefix.append(("cobra.model.smartfault.RuleSubject", "rulesubject-"))
meta.parentClasses.add("cobra.model.smartfault.RuleDef")
meta.rnPrefixes = [
('ruleinst-', True),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "createdTs", "createdTs", 50200, PropCategory.REGULAR)
prop.label = "Creation Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("createdTs", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "key", "key", 50196, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
prop.range = [(1, 512)]
meta.props.add("key", prop)
prop = PropMeta("str", "lastTransitionTs", "lastTransitionTs", 50199, PropCategory.REGULAR)
prop.label = "Last Transition Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastTransitionTs", prop)
prop = PropMeta("str", "lc", "lc", 50201, PropCategory.REGULAR)
prop.label = "Lifecycle"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "unknown"
prop._addConstant("raised", "raised", 2)
prop._addConstant("raised-clearing", "raised,-clearing", 8)
prop._addConstant("retaining", "retaining", 16)
prop._addConstant("soaking", "soaking", 1)
prop._addConstant("soaking-clearing", "soaking,-clearing", 4)
prop._addConstant("unknown", "unknown", 0)
meta.props.add("lc", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "message", "message", 50197, PropCategory.REGULAR)
prop.label = "Name"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("message", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "name", "name", 50195, PropCategory.REGULAR)
prop.label = "Name"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 512)]
meta.props.add("name", prop)
prop = PropMeta("str", "occur", "occur", 50202, PropCategory.REGULAR)
prop.label = "Occurance"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("occur", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "severity", "severity", 50198, PropCategory.REGULAR)
prop.label = "Severity"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 1
prop.defaultValueStr = "info"
prop._addConstant("cleared", "cleared", 0)
prop._addConstant("critical", "critical", 5)
prop._addConstant("info", "info", 1)
prop._addConstant("major", "major", 4)
prop._addConstant("minor", "minor", 3)
prop._addConstant("warning", "warning", 2)
meta.props.add("severity", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
meta.namingProps.append(getattr(meta.props, "key"))
getattr(meta.props, "key").needDelimiter = True
def __init__(self, parentMoOrDn, key, markDirty=True, **creationProps):
namingVals = [key]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"bkhoward@live.com"
] | bkhoward@live.com |
cc84edf7c53698b41332e6856bf4369aa93221e1 | d04d3eec289376e7682403af2f32044b3991d27b | /10 - Exams/ExamPrepF-7.py | 50cd7a3f8f1c1ea3e3e759d4d225b6f5c7855296 | [] | no_license | m-evtimov96/softUni-python-fundamentals | 190002dbc6196211340126814e8ed4fce3b8a07f | 817a44a3d78130d37e58facfc7bcfdc8af5f4051 | refs/heads/master | 2020-12-10T12:45:27.847764 | 2020-06-23T13:09:43 | 2020-06-23T13:09:43 | 233,598,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,297 | py | username = input()
while True:
input_line = input()
if input_line == "Sign up":
break
input_line = input_line.split()
command = input_line[0]
if command == "Case":
case_type = input_line[1]
if case_type == "lower":
username = username.lower()
elif case_type == "upper":
username = username.upper()
print(username)
elif command == "Reverse":
start = int(input_line[1])
end = int(input_line[2])
if 0 <= start <= len(username) and 0 <= end <= len(username) and end > start:
print("".join(reversed(username[start:end+1])))
elif command == "Cut":
string_to_check = input_line[1]
if string_to_check in username:
username = username.replace(string_to_check, "")
print(username)
else:
print(f"The word {username} doesn't contain {string_to_check}.")
elif command == "Replace":
char_to_replace = input_line[1]
username = username.replace(char_to_replace, "*")
print(username)
elif command == "Check":
char_to_check = input_line[1]
if char_to_check in username:
print("Valid")
else:
print(f"Your username must contain {char_to_check}.")
| [
"m.evtimov196@gmail.com"
] | m.evtimov196@gmail.com |
9972224be353e26764302da53393a31f7934be5e | 09934eefaada5f1b8048215f0cda4083a60d9091 | /lib/chy506r/api/__init__.py | 3b37fd3b4360536c2fc1b0a68f5cbaae8cb8685b | [
"MIT"
] | permissive | ztmir/chy506r | 2b531b0858fb8b6dc43b873a19d998e29598eac5 | 10121baa70765b1a53b0d576dc11660bb3dd725a | refs/heads/master | 2021-04-06T06:41:32.448764 | 2018-03-14T22:03:02 | 2018-03-14T22:03:02 | 125,049,662 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | # -*- coding: utf8 -*-
from .. import util
util.import_all_from(__package__, [
'.chy506r_',
'.plotter_',
])
# vim: set ft=python et ts=4 sw=4:
| [
"ptomulik@meil.pw.edu.pl"
] | ptomulik@meil.pw.edu.pl |
5a16a541e0ff8f2ea18cba1a2dc332b762134f5e | 2ecca33f7b934c69ab5383a24dd820b01bdc3617 | /tests/people_test.py | 50681da4ac429468e8b1b544ab0567a9b6677255 | [] | no_license | mentalclear/tau-api-testing-python | b2ff6e1e541e351385ea813faba8f0528749bca9 | 5648a70e4089c455b7d7322fd3d4a8324d7d1dcb | refs/heads/master | 2023-07-15T00:21:44.093476 | 2021-08-31T12:08:50 | 2021-08-31T12:08:50 | 397,907,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,976 | py | import requests
from assertpy.assertpy import assert_that
from json import dumps, loads
from config import BASE_URI
from utils.print_helpers import pretty_print
from uuid import uuid4
import pytest
import random
from utils.file_reader import read_file
from jsonpath_ng import parse
def test_read_all_has_kent():
response_text, response = get_all_users()
pretty_print(response_text)
assert_that(response.status_code).is_equal_to(200)
first_names = [people['fname'] for people in response_text]
# assert_that(first_names).contains('Kent')
# Same option with assertpy extracting the first name from the response
assert_that(response_text).extracting('fname').contains('Kent')
def test_new_person_can_be_added():
unique_last_name = create_new_unique_user()
people = requests.get(BASE_URI).json()
is_new_user_created = filter(lambda person: person['lname'] == unique_last_name, people)
assert_that(is_new_user_created).is_true()
def test_person_can_be_deleted():
new_user_last_name = create_new_unique_user()
all_users, _ = get_all_users()
new_user = search_user_by_last_name(all_users, new_user_last_name)[0]
print(new_user)
person_to_be_deleted = new_user['person_id']
url = f'{BASE_URI}/{person_to_be_deleted}'
response= requests.delete(url)
assert_that(response.status_code).is_equal_to(200)
def create_new_unique_user():
unique_last_name = f'User {str(uuid4())}'
payload = dumps({
'fname': 'New',
'lname': unique_last_name
})
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
response = requests.post(url=BASE_URI, data=payload, headers=headers)
assert_that(response.status_code).is_equal_to(204)
return unique_last_name
def get_all_users():
response = requests.get(BASE_URI)
response_text = response.json()
return response_text, response
def search_user_by_last_name(response_text, last_name):
return [person for person in response_text if person['lname'] == last_name]
@pytest.fixture
def create_data():
payload = read_file('create_person.json')
random_no = random.randint(0, 1000)
last_name = f'Olabini{random_no}'
payload['lname'] = last_name
yield payload
def test_person_can_be_added_with_a_json_template(create_data):
create_person_with_unique_last_name(create_data)
response = requests.get(BASE_URI)
peoples = loads(response.text)
# Get all last names for any object in the root array
# Here $ = root, [*] represents any element in the array
# Read full syntax: https://pypi.org/project/jsonpath-ng/
jsonpath_expr = parse("$.[*].lname")
result = [match.value for match in jsonpath_expr.find(peoples)]
expected_last_name = create_data['lname']
assert_that(result).contains(expected_last_name)
def create_person_with_unique_last_name(body=None):
if body is None:
# Ensure a user with a unique last name is created everytime the test runs
# Note: json.dumps() is used to convert python dict to json string
unique_last_name = f'User {str(uuid4())}'
payload = dumps({
'fname': 'New',
'lname': unique_last_name
})
else:
unique_last_name = body['lname']
payload = dumps(body)
# Setting default headers to show that the client accepts json
# And will send json in the headers
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json'
}
# We use requests.post method with keyword params to make the request more readable
response = requests.post(url=BASE_URI, data=payload, headers=headers)
assert_that(response.status_code, description='Person not created').is_equal_to(requests.codes.no_content)
return unique_last_name
def search_created_user_in(peoples, last_name):
return [person for person in peoples if person['lname'] == last_name] | [
"mentalclear@gmail.com"
] | mentalclear@gmail.com |
d6c7d16d2e4f6624fb2f38f6d4de4b2af9ff0a13 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/138/usersdata/164/53382/submittedfiles/volumeTV.py | 3b3527168f49c2e6a7820c671bc7ad0039a79cef | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 263 | py | v=int(input('Digite o volume inicial: '))
t=int(input('Digite quantas vezes houve a troca de volume: '))
volume=0
for i in range (1, t+1, 1):
a=int(input('Digite a modificação do volume: '))
while (0<volume<100):
volume=v=a
print(volume) | [
"rafael.mota@ufca.edu.br"
] | rafael.mota@ufca.edu.br |
ddfb388db7ce08f39b58b80d878e30f9f5ea59aa | a22ca8cd5e434a28677e8f21c2afc6f32359a675 | /rentomatic-master/rentomatic/rest/storageroom.py | 34d8627fd4dff69ead4f716952960338c877d344 | [
"MIT"
] | permissive | wangdan25/rentomatic | 34cc78b73a894bae8e6a990cdf7d9a839d57980d | 2931a4f5ff8727b4e20c89004609aacd181f161c | refs/heads/master | 2022-06-21T05:35:37.306044 | 2020-05-13T04:05:26 | 2020-05-13T04:05:26 | 263,348,089 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,783 | py | import json
from flask import Blueprint, request, Response
from rentomatic.use_cases import request_objects as req
from rentomatic.shared import response_object as res
from rentomatic.repository import memrepo as mr
from rentomatic.use_cases import storageroom_use_cases as uc
from rentomatic.serializers import storageroom_serializer as ser
blueprint = Blueprint('storageroom', __name__)
STATUS_CODES = {
res.ResponseSuccess.SUCCESS: 200,
res.ResponseFailure.RESOURCE_ERROR: 404,
res.ResponseFailure.PARAMETERS_ERROR: 400,
res.ResponseFailure.SYSTEM_ERROR: 500
}
storageroom1 = {
'code': 'f853578c-fc0f-4e65-81b8-566c5dffa35a',
'size': 215,
'price': 39,
'longitude': -0.09998975,
'latitude': 51.75436293,
}
storageroom2 = {
'code': 'fe2c3195-aeff-487a-a08f-e0bdc0ec6e9a',
'size': 405,
'price': 66.5,
'longitude': 0.18228006,
'latitude': 51.74640997,
}
storageroom3 = {
'code': '913694c6-435a-4366-ba0d-da5334a611b2',
'size': 56,
'price': 60,
'longitude': 0.27891577,
'latitude': 51.45994069,
}
@blueprint.route('/storagerooms', methods=['GET'])
def storageroom():
qrystr_params = {
'filters': {},
}
for arg, values in request.args.items():
if arg.startswith('filter_'):
qrystr_params['filters'][arg.replace('filter_', '')] = values
request_object = req.StorageRoomListRequestObject.from_dict(qrystr_params)
repo = mr.MemRepo([storageroom1, storageroom2, storageroom3])
use_case = uc.StorageRoomListUseCase(repo)
response = use_case.execute(request_object)
return Response(json.dumps(response.value, cls=ser.StorageRoomEncoder),
mimetype='application/json',
status=STATUS_CODES[response.type])
| [
"you@example.com"
] | you@example.com |
fb742151862b0bda3389cc7b8b8bd9e6948ccb69 | 8630c0a928f1e112b7cb67f2cab8ce212a231063 | /unhelpful/robinhood.py | 1cd653f36e7debfa6620df557d62510dfc6a0a56 | [
"MIT"
] | permissive | lockefox/unhelpful-stockbot | 4958201bc925ff6e00dfe0c170574aa27fb07329 | d0d8a282788843c6a5b4065681a1542be432e8bb | refs/heads/master | 2023-05-28T08:00:45.646039 | 2019-09-22T19:37:58 | 2019-09-22T19:37:58 | 197,688,810 | 0 | 0 | MIT | 2023-05-22T22:28:14 | 2019-07-19T02:36:41 | Python | UTF-8 | Python | false | false | 4,253 | py | """utilities for collecting stock quotes from Robinhood"""
import os
import pkgutil
import logging
import requests
from . import exceptions
from .utilities import get_config
class RobinhoodConnection:
"""contextmanater for handling authenticated feeds from Robinhood
Args:
username (str): Robinhood Username
password (str): Robinhood Password
client_id (str): Robinhood client_id for oAuth
"""
def __init__(self, username, password, client_id=''):
self._client_id = client_id
if not self._client_id:
self._client_id = 'c82SH0WZOsabOXGP2sxqcj34FxkvfnWRZBKlBjFS'
self._username = username
self._password = password
self.auth_token = ''
self.refresh_token = ''
def get(self, endpoint, params=dict(), headers=dict()):
"""fetch data from endpoint
Args:
endpoint (str): what endpoint to fetch data from
params (dict): params for requests
headers (dict): headers for requests
Returns:
dict: requests.json() output
Raises:
requests.RequestException: connection/http errors
RobinhoodNoLogin: lacking login credentials
"""
if not any([self.auth_token, self.refresh_token]):
raise exceptions.RobinhoodNoLogin
headers = {**headers, 'Authorization': f'Bearer {self.auth_token}'}
req = requests.get(
f'{get_config("ROBINHOOD", "root")}/{endpoint}', params=params, headers=headers
)
req.raise_for_status()
return req.json()
def __enter__(self):
req = requests.post(
f'{get_config("ROBINHOOD", "root")}/{get_config("ROBINHOOD", "oauth_endpoint")}',
params=dict(
grant_type='password',
client_id=self._client_id,
username=self._username,
password=self._password,
),
)
req.raise_for_status()
self.auth_token = req.json()['access_token']
self.refresh_token = req.json()['refresh_token']
return self
def __exit__(self, *exc):
req = requests.post(
f'{get_config("ROBINHOOD", "root")}/{get_config("ROBINHOOD", "logout_endpoint")}',
data=dict(client_id=self._client_id, token=self.refresh_token),
)
req.raise_for_status()
def get_price(ticker, client, endpoint=get_config("ROBINHOOD", "quotes_endpoint")):
"""generate a stock quote from Robinhood
Args:
ticker (str): stock ticker
client (:obj:`RobinhoodConnection`): connection context
endpoint (str): path to
Returns:
float: todo
Raises:
requests.RequestException: Unable to connect to robinhood
"""
quote = client.get(endpoint, params={'symbols': ticker})
if quote['results'][0]['last_extended_hours_trade_price']:
return float(quote['results'][0]['last_extended_hours_trade_price'])
return float(quote['results'][0]['last_trade_price'])
def get_name(ticker, client, endpoint=get_config("ROBINHOOD", "instruments_endpoint")):
"""Fetch `simple name` of company given ticker
Args:
ticker (str): stock ticker
client (:obj:`RobinhoodConnection`): connection context
endpoint (str): endpoint for `instruments` and company trading metadata
Notes:
Only smart enough to search 1 page
Returns:
str: simple name of company given ticker
Raises:
requests.RequestException: unable to conncet to robinhood
TickerNotFound:
KeyError: unable to find requested stock ticker
"""
ticker = ticker.upper()
instruments = client.get(endpoint, params={'query': ticker})
company_info = {}
pageno = 1
while not company_info:
for record in instruments['results']:
if record['symbol'] == ticker:
company_info = record
break
if company_info: # TODO: this sucks
break
if not instruments['next']:
raise exceptions.TickerNotFound
instruments = client.get(instruments['next'], params={'query': ticker})
return company_info['simple_name']
| [
"locke.renard@gmail.com"
] | locke.renard@gmail.com |
d46197609d8e954b8cb87398f8a03cc13f5b8022 | 675a6ed1aa824ac801783471e634e538d11acc8d | /examples/example_flask/example_formatter.py | 6edf6309f9d3a0cca24cee0b6a42698f229fee93 | [
"MIT"
] | permissive | Eastwu5788/pre-request | 7ea50b3930252b5a0f99bf9588d0fdd8f4ae4562 | 42da2bf5edc6690983188e1ee013c810ef8985db | refs/heads/master | 2023-05-24T22:53:04.353491 | 2022-01-26T02:03:57 | 2022-01-26T02:03:57 | 100,257,925 | 102 | 9 | MIT | 2023-05-23T03:10:20 | 2017-08-14T10:56:59 | Python | UTF-8 | Python | false | false | 966 | py | # !/usr/local/python/bin/python
# -*- coding: utf-8 -*-
# (C) Wu Dong, 2020
# All rights reserved
# @Author: 'Wu Dong <wudong@eastwu.cn>'
# @Time: '2020-04-09 14:43'
# 3p
from flask import Flask
from pre_request import pre, Rule
def custom_formatter(code, msg):
""" 自定义结果格式化函数
:param code: 响应码
:param msg: 响应消息
"""
return {
"code": code,
"msg": "hello",
"sss": "tt",
}
app = Flask(__name__)
app.config["TESTING"] = True
filter_params = {
"email": Rule(email=True)
}
@app.route("/email", methods=['get', 'post'])
@pre.catch(filter_params)
def email_resp_handler():
""" 测试邮件验证
"""
from flask import g
params = g.params
return str(params)
if __name__ == "__main__":
pre.add_formatter(custom_formatter)
resp = app.test_client().get("/email", data={
"email": "wudong@eastwu.cn"
})
print(resp.get_data(as_text=True))
| [
"wudong@eastwu.cn"
] | wudong@eastwu.cn |
352e2af0fc2126431efc05449e46c709820dc0f9 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /jecvfH5eyGLrSwzNh_17.py | 530d24676f42b48dd2c4961e0b3a2513ec1f1914 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py |
import re
animals = ["muggercrocodile","one-hornedrhino","python","moth","monitorlizard","bengaltiger"]
def fauna_number(txt):
numbers = re.findall(r'\d+',txt)
words = list(filter(lambda x: x in txt,animals))
words.sort(key = lambda x: txt.index(x))
return [(a,b) for a,b in zip(words,numbers)]
| [
"daniel.reich@danielreichs-MacBook-Pro.local"
] | daniel.reich@danielreichs-MacBook-Pro.local |
9c2ea26cbc755a4f53cb515c72985b5c247d054c | 8787b2fbb5017b61dcf6075a5261071b403847bf | /SWExpert/5607. [Professional] 조합.py | fcdb69ef676bc6db7fed89d771db093ff9716b60 | [] | no_license | khw5123/Algorithm | a6fe0009e33289813959553c2366d77c93d7b4b9 | 323a829f17a10276ab6f1aec719c496a3e76b974 | refs/heads/master | 2023-01-02T00:12:21.848924 | 2020-10-23T06:37:41 | 2020-10-23T06:37:41 | 282,162,235 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | mod = 1234567891
def solve(n, x):
global mod
if x == 0:
return 1
num = solve(n, x//2)
ret = (num * num) % mod
if x % 2 == 0:
return ret
else:
return (ret * n) % mod
for t in range(int(input())):
n, r = map(int, input().split())
factorial = [1]*(n+1)
for i in range(1, n+1):
factorial[i] = (factorial[i-1] * i) % mod
denominator = solve((factorial[r] * factorial[n-r]) % mod, mod-2)
answer = (factorial[n] * denominator) % mod
print('#' + str(t+1), str(answer)) | [
"5123khw@hknu.ac.kr"
] | 5123khw@hknu.ac.kr |
566b75c70f81b310ab1705e6cd84012d7fcc8f98 | ad0910142f3e0a5cb0c17ec3ef96c68ee9e63798 | /qt/lineedit/le_2.py | 32acaa46b0693d343d1ac12eebf13789bbb8c9aa | [] | no_license | notmikeb/workspace | 94cce9d4c39e78b0471ff75f8af03576d93410e5 | be350091237dc7c318e4d9f1c9ac6a2f10356b82 | refs/heads/master | 2021-01-19T04:16:06.049253 | 2019-09-24T14:50:00 | 2019-09-24T14:50:00 | 43,929,782 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,599 | py | import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
# http://www.saltycrane.com/blog/2008/01/python-pyqt-tab-completion-example/
LIST_DATA = ['a', 'aardvark', 'aardvarks', 'aardwolf', 'aardwolves',
'abacus', 'babel', 'bach', 'cache',
'daggle', 'facet', 'kabob', 'kansas']
####################################################################
def main():
app = QApplication(sys.argv)
w = MyWindow()
w.show()
sys.exit(app.exec_())
####################################################################
class MyWindow(QWidget):
def __init__(self, *args):
QWidget.__init__(self, *args)
# create objects
self.la = QLabel("Start typing to match items in list:")
self.le = MyLineEdit()
self.lm = MyListModel(LIST_DATA, self)
self.lv = QListView()
self.lv.setModel(self.lm)
# layout
layout = QVBoxLayout()
layout.addWidget(self.la)
layout.addWidget(self.le)
layout.addWidget(self.lv)
self.setLayout(layout)
# connections
self.connect(self.le, SIGNAL("textChanged(QString)"),
self.text_changed)
self.connect(self.le, SIGNAL("tabPressed"),
self.tab_pressed)
def text_changed(self):
""" updates the list of possible completions each time a key is
pressed """
pattern = str(self.le.text())
self.new_list = [item for item in LIST_DATA if item.find(pattern) == 0]
self.lm.setAllData(self.new_list)
def tab_pressed(self):
""" completes the word to the longest matching string
when the tab key is pressed """
# only one item in the completion list
if len(self.new_list) == 1:
newtext = self.new_list[0] + " "
self.le.setText(newtext)
# more than one remaining matches
elif len(self.new_list) > 1:
match = self.new_list.pop(0)
for word in self.new_list:
match = string_intersect(word, match)
self.le.setText(match)
####################################################################
class MyLineEdit(QLineEdit):
def __init__(self, *args):
QLineEdit.__init__(self, *args)
def event(self, event):
if (event.type() == QEvent.KeyPress) and (event.key() == Qt.Key_Tab):
self.emit(SIGNAL("tabPressed"))
return True
return QLineEdit.event(self, event)
####################################################################
class MyListModel(QAbstractListModel):
def __init__(self, datain, parent=None, *args):
""" datain: a list where each item is a row
"""
QAbstractTableModel.__init__(self, parent, *args)
self.listdata = datain
def rowCount(self, parent=QModelIndex()):
return len(self.listdata)
def data(self, index, role):
if index.isValid() and role == Qt.DisplayRole:
return QVariant(self.listdata[index.row()])
else:
return QVariant()
def setAllData(self, newdata):
""" replace all data with new data """
self.listdata = newdata
self.reset()
####################################################################
def string_intersect(str1, str2):
newlist = []
for i, j in zip(str1, str2):
if i == j:
newlist.append(i)
else:
break
return ''.join(newlist)
####################################################################
if __name__ == "__main__":
main() | [
"notmikeb@gmail.com"
] | notmikeb@gmail.com |
219986d7e18dee3ec7e5c8aa82e15af9ecc87ab4 | 9bb01fa882e713aa59345051fec07f4e3d3478b0 | /examples/memory.py | fe52ab2be578f504f2cd96d164193a9464fee3c4 | [] | no_license | syarra/cysparse | f1169c496b54d61761fdecbde716328fd0fb131b | 7654f7267ab139d0564d3aa3b21c75b364bcfe72 | refs/heads/master | 2020-05-25T16:15:38.160443 | 2017-03-14T21:17:39 | 2017-03-14T21:17:39 | 84,944,993 | 0 | 0 | null | 2017-03-14T12:11:48 | 2017-03-14T12:11:48 | null | UTF-8 | Python | false | false | 592 | py | from cysparse.sparse.ll_mat import *
A = ArrowheadLLSparseMatrix(nrow=50, ncol=800, itype=INT32_T, dtype=COMPLEX128_T)
print A
print "In bytes:"
print A.memory_real_in_bytes()
print A.memory_virtual_in_bytes()
print A.memory_element_in_bytes()
print "In bits:"
print A.memory_real_in_bits()
print A.memory_virtual_in_bits()
print A.memory_element_in_bits()
A.compress()
print A.memory_real_in_bytes()
print A.memory_real_in_bits()
print A
print "=" * 80
print A.memory_element_in_bits()
print A.memory_element_in_bytes()
print A.memory_index_in_bits()
print A.memory_index_in_bytes() | [
"nikolaj.van.omme@gmail.com"
] | nikolaj.van.omme@gmail.com |
a5beb4f64c56e337ed39593290ee686d6f416492 | 8f1c3c76bf8514818b733ba29fe575d8a5243add | /eduerp_health/models/health.py | 9f3a9a81be4161a4552cb29af6366a8395a72087 | [
"Apache-2.0"
] | permissive | westlyou/eduerp | 27f1c7dcd0d2badf50cb6c69f5e761d7f0c6a898 | 968d79b5adc729bc81192604f1fc223517d38ccf | refs/heads/master | 2021-06-04T05:11:13.858246 | 2016-09-12T07:21:17 | 2016-09-12T07:21:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,837 | py | # -*- coding: utf-8 -*-
###############################################################################
#
###############################################################################
from openerp import models, fields, api
from openerp.exceptions import ValidationError
class OpHealth(models.Model):
_name = 'op.health'
_rec_name = 'student_id'
_description = """Health Detail for Students and Faculties"""
type = fields.Selection(
[('student', 'Student'), ('faculty', 'Faculty')],
'Type', default='student', required=True)
student_id = fields.Many2one('op.student', 'Student')
faculty_id = fields.Many2one('op.faculty', 'Faculty')
height = fields.Float('Height(C.M.)', required=True)
weight = fields.Float('Weight', required=True)
blood_group = fields.Selection(
[('A+', 'A+ve'), ('B+', 'B+ve'), ('O+', 'O+ve'), ('AB+', 'AB+ve'),
('A-', 'A-ve'), ('B-', 'B-ve'), ('O-', 'O-ve'), ('AB-', 'AB-ve')],
'Blood Group', required=True)
physical_challenges = fields.Boolean('Physical Challenge?', default=False)
physical_challenges_note = fields.Text('Physical Challenge')
major_diseases = fields.Boolean('Major Diseases?', default=False)
major_diseases_note = fields.Text('Major Diseases')
eyeglasses = fields.Boolean('Eye Glasses?')
eyeglasses_no = fields.Char('Eye Glasses', size=64)
regular_checkup = fields.Boolean(
'Any Regular Checkup Required?', default=False)
health_line = fields.One2many(
'op.health.line', 'health_id', 'Checkup Lines')
@api.constrains('height', 'weight')
def check_height_weight(self):
if self.height <= 0.0 or self.weight <= 0.0:
raise ValidationError("Enter proper height and weight!")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"huysamdua@yahoo.com"
] | huysamdua@yahoo.com |
193729fe025f3c1a9a71925ec6b0f7dae9c8690c | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_bananas.py | 5da6d16ede25e387d2fefa2b110d677e35f5187a | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 415 | py |
#calss header
class _BANANAS():
def __init__(self,):
self.name = "BANANAS"
self.definitions = [u'very silly: ', u'to become extremely angry or excited: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'adjectives'
def run(self, obj1, obj2):
self.jsondata[obj2] = {}
self.jsondata[obj2]['properties'] = self.name.lower()
return self.jsondata
| [
"xingwang1991@gmail.com"
] | xingwang1991@gmail.com |
4c7d34cab8e0809dd3e4b61828e891a790c80f34 | 90f7253957e25105950c2a0305033c14302237bb | /encapsulation/exercise/pizza_calories/project/topping.py | 652473d33ba12823fdc496424478646c1607281e | [] | no_license | DeanDupalov/Softuni-Python-OOP | a4f010a7a218d4cdd466e2158dcea2e861304627 | 72fb3f00991419ef55f6c53101edba7fbb84746b | refs/heads/master | 2023-04-01T01:59:50.441918 | 2021-04-12T18:47:56 | 2021-04-12T18:47:56 | 340,356,051 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 492 | py | class Topping:
topping_type: str
weight: float
def __init__(self, topping_type, weight):
self.topping_type = topping_type
self.weight = weight
@property
def topping_type(self):
return self.__topping_type
@topping_type.setter
def topping_type(self, value):
self.__topping_type = value
@property
def weight(self):
return self.__weight
@weight.setter
def weight(self, value):
self.__weight = value
| [
"75751527+DeanDupalov@users.noreply.github.com"
] | 75751527+DeanDupalov@users.noreply.github.com |
bbf11456a422f642b88c670ac1315b8d736a4d2d | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/gos_20200614060327.py | 43b8bc11b0159debb65da784dff5c6138f57f685 | [] | no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,895 | py | # # Імпорт фажливих бібліотек
# from BeautifulSoup import BeautifulSoup
# import urllib2
# import re
# # Створення функції пошуку силок
# def getLinks(url):
# # отримання та присвоєння контенту сторінки в змінну
# html_page = urllib2.urlopen(url)
# # Перетворення контенту в обєкт бібліотеки BeautifulSoup
# soup = BeautifulSoup(html_page)
# # створення пустого масиву для лінків
# links = []
# # ЗА ДОПОМОГОЮ ЧИКЛУ ПРОХЛДИМСЯ ПО ВСІХ ЕЛЕМЕНТАХ ДЕ Є СИЛКА
# for link in soup.findAll('a', attrs={'href': re.compile("^http://")}):
# # Додаємо всі силки в список
# links.append(link.get('href'))
# # повертаємо список
# return links
# -----------------------------------------------------------------------------------------------------------
# # # Імпорт фажливих бібліотек
# import subprocess
# # Створення циклу та використання функції range для генерації послідовних чисел
# for ping in range(1,10):
# # генерування IP адреси базуючись на номері ітерації
# address = "127.0.0." + str(ping)
# # виклик функції call яка робить запит на IP адрес та запис відповіді в змінну
# res = subprocess.call(['ping', '-c', '3', address])
# # За допомогою умовних операторів перевіряємо відповідь та виводимо результат
# if res == 0:
# print "ping to", address, "OK"
# elif res == 2:
# print "no response from", address
# else:
# print "ping to", address, "failed!"
# -----------------------------------------------------------------------------------------------------------
# # Імпорт фажливих бібліотек
# import requests
# # Ітеруємося по масиву з адресами зображень
# for i, pic_url in enumerate(["http://x.com/nanachi.jpg", "http://x.com/nezuko.jpg"]):
# # Відкриваємо файл базуючись на номері ітерації
# with open('pic{0}.jpg'.format(i), 'wb') as handle:
# # Отримуємо картинку
# response = requests.get(pic_url, stream=True)
# # Використовуючи умовний оператор перевіряємо чи успішно виконався запит
# if not response.ok:
# print(response)
# # Ітеруємося по байтах картинки та записуємо батчаси в 1024 до файлу
# for block in response.iter_content(1024):
# # Якщо байти закінчилися, завершуємо алгоритм
# if not block:
# break
# # Записуємо байти в файл
# handle.write(block)
# -----------------------------------------------------------------------------------------------------------
# Створюємо клас для рахунку
class Bank_Account:
# В конструкторі ініціалізуємо рахунок як 0
def __init__(self):
self.balance=0
print("Hello!!! Welcome to the Deposit & Withdrawal Machine")
# В методі депозит, використовуючи функцію input() просимо ввести суму поповенння та додаємо цю суму до рахунку
def deposit(self):
amount=float(input("Enter amount to be Deposited: "))
self.balance += amount
print("\n Amount Deposited:",amount)
# В методі депозит, використовуючи функцію input() просимо ввести суму отримання та віднімаємо цю суму від рахунку
def withdraw(self):
amount = float(input("Enter amount to be Withdrawn: "))
# За допомогою умовного оператора перевіряємо чи достатнього грошей на рахунку
if self.balance>=amount:
self.balance-=amount
print("\n You Withdrew:", amount)
else:
print("\n Insufficient balance ")
# Виводимо бааланс на екран
def display(self):
print("\n Net Available Balance=",self.balance)
# Driver code
# creating an object of class
s = Bank_Account()
# Calling functions with that class object
s.deposit()
s.withdraw()
s.display() | [
"yevheniira@intelink-ua.com"
] | yevheniira@intelink-ua.com |
18ba6200a6df7e620bdb37d52e470e7c9cb06db9 | 3cf66bb76a6d8bef1c1945c1bdef0b16254e3470 | /windows/lng_window.py | 465c49a000104327cab11d89164f44f5319f7d1b | [] | no_license | DmitryChitalov/OpenFOAM_decompose_GUI | f055b6d24c90dab07140713960003107d72aea1c | fd614b2c77df327588809fccbc7a233ce59c1688 | refs/heads/master | 2020-04-17T17:08:38.821276 | 2019-01-21T07:57:50 | 2019-01-21T07:57:50 | 166,770,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,683 | py | # -*- coding: utf-8 -*-
# -------------------------------Импорт модулей----------------------------------#
from PyQt5 import QtCore
from PyQt5 import QtSql
from PyQt5 import QtGui
import shutil
import sys
import re
import os
import os.path
from PyQt5.QtWidgets import QWidget, QFileDialog, QLineEdit, QLabel, \
QHBoxLayout, QLineEdit, QPushButton, QGridLayout, \
QFrame, QVBoxLayout, QFormLayout, QRadioButton, QDockWidget
from windows.bMD_window import bmd_window_class
# ---------------------------Главная форма проекта-------------------------------#
class lng_form_class(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.setWindowFlags(QtCore.Qt.Dialog | QtCore.Qt.WindowSystemMenuHint)
self.setWindowModality(QtCore.Qt.WindowModal)
global par
par = parent
global int_lng
int_lng = par.interface_lng_val
# ------------------------------------Первый блок формы--------------------------------------#
self.lng_label = QLabel()
self.lng_lbl_hbox = QHBoxLayout()
self.lng_lbl_hbox.addWidget(self.lng_label)
self.ru_radio = QRadioButton("Ru")
self.en_radio = QRadioButton("En")
self.lng_grid = QGridLayout()
self.lng_grid.addWidget(self.ru_radio, 0, 0)
self.lng_grid.addWidget(self.en_radio, 1, 0)
self.lng_frame = QFrame()
self.lng_frame.setFrameShape(QFrame.Panel)
self.lng_frame.setFrameShadow(QFrame.Sunken)
self.lng_frame.setLayout(self.lng_grid)
self.lng_hbox = QVBoxLayout()
self.lng_hbox.addWidget(self.lng_frame)
# ---------------------Кнопки сохранения и отмены и их блок-------------------------#
self.save_button = QPushButton()
self.save_button.setFixedSize(80, 25)
self.save_button.clicked.connect(self.on_save_clicked)
self.cancel_button = QPushButton()
self.cancel_button.setFixedSize(80, 25)
self.cancel_button.clicked.connect(self.on_cancel_clicked)
self.buttons_hbox = QHBoxLayout()
self.buttons_hbox.addWidget(self.save_button)
self.buttons_hbox.addWidget(self.cancel_button)
# -------------------------Фрейм формы---------------------------#
self.form_grid = QGridLayout()
self.form_grid.addLayout(self.lng_lbl_hbox, 0, 0, alignment=QtCore.Qt.AlignCenter)
self.form_grid.addLayout(self.lng_hbox, 1, 0, alignment=QtCore.Qt.AlignCenter)
self.form_grid.addLayout(self.buttons_hbox, 2, 0, alignment=QtCore.Qt.AlignCenter)
self.form_frame = QFrame()
self.form_frame.setStyleSheet(open("./styles/properties_form_style.qss","r").read())
self.form_frame.setLayout(self.form_grid)
self.form_vbox = QVBoxLayout()
self.form_vbox.addWidget(self.form_frame)
# --------------------Размещение на форме всех компонентов---------#
self.form = QFormLayout()
self.form.addRow(self.form_vbox)
self.setLayout(self.form)
# --------------------Определяем параметры интерфейса окна---------#
if int_lng == 'Russian':
self.lng_label.setText("Выберите язык интерфейса программы")
self.save_button.setText("Сохранить")
self.cancel_button.setText("Отмена")
self.ru_radio.setChecked(True)
elif int_lng == 'English':
self.lng_label.setText("Select the interface language for the program")
self.save_button.setText("Save")
self.cancel_button.setText("Cancel")
self.en_radio.setChecked(True)
# ------------------------Функции связанные с формой-----------------------------#
# ....................Функция, запускаемая при нажатии кнопки "сохранить"....................#
def on_save_clicked(self):
if self.ru_radio.isChecked() == True:
interface_lng = 'Russian'
elif self.en_radio.isChecked() == True:
interface_lng = 'English'
while par.tdw_grid.count():
item = par.tdw_grid.takeAt(0)
widget = item.widget()
widget.deleteLater()
fsw_default = QLabel()
par.fsw.setWidget(fsw_default)
par.fsw.setTitleBarWidget(fsw_default)
ffw_default = QLabel()
par.ffw.setWidget(ffw_default)
par.ffw.setTitleBarWidget(ffw_default)
serv_mes_default = QLabel()
par.serv_mes.setWidget(serv_mes_default)
par.serv_mes.setTitleBarWidget(serv_mes_default)
cdw_default = QLabel()
par.cdw.setWidget(cdw_default)
par.cdw.setTitleBarWidget(cdw_default)
par.on_lng_get(interface_lng)
self.close()
# .....................Функция, запускаемая при нажатии кнопки "отмена"......................#
def on_cancel_clicked(self):
self.close()
| [
"cdi9@yandex.ru"
] | cdi9@yandex.ru |
ed215734cf4c04b73afad4971cafa935f9826513 | 3a533d1503f9a1c767ecd3a29885add49fff4f18 | /saleor/graphql/checkout/tests/deprecated/test_checkout_shipping_method_update.py | fefe034680a4ce74d3b921d68f0758228e883437 | [
"BSD-3-Clause"
] | permissive | jonserna/saleor | 0c1e4297e10e0a0ce530b5296f6b4488f524c145 | b7d1b320e096d99567d3fa7bc4780862809d19ac | refs/heads/master | 2023-06-25T17:25:17.459739 | 2023-06-19T14:05:41 | 2023-06-19T14:05:41 | 186,167,599 | 0 | 0 | BSD-3-Clause | 2019-12-29T15:46:40 | 2019-05-11T18:21:31 | TypeScript | UTF-8 | Python | false | false | 7,594 | py | from unittest.mock import patch
import graphene
from .....checkout.error_codes import CheckoutErrorCode
from .....checkout.fetch import (
fetch_checkout_info,
fetch_checkout_lines,
get_delivery_method_info,
)
from .....plugins.manager import get_plugins_manager
from .....shipping.utils import convert_to_shipping_method_data
from ....tests.utils import get_graphql_content
MUTATION_UPDATE_SHIPPING_METHOD = """
mutation checkoutShippingMethodUpdate(
$checkoutId: ID, $token: UUID, $shippingMethodId: ID!){
checkoutShippingMethodUpdate(
checkoutId: $checkoutId,
token: $token,
shippingMethodId: $shippingMethodId
) {
errors {
field
message
code
}
checkout {
id
token
}
}
}
"""
@patch(
"saleor.graphql.checkout.mutations.checkout_shipping_method_update."
"clean_delivery_method"
)
def test_checkout_shipping_method_update_by_id(
mock_clean_shipping,
staff_api_client,
shipping_method,
checkout_with_item_and_shipping_method,
):
checkout = checkout_with_item_and_shipping_method
old_shipping_method = checkout.shipping_method
query = MUTATION_UPDATE_SHIPPING_METHOD
mock_clean_shipping.return_value = True
checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
response = staff_api_client.post_graphql(
query, {"checkoutId": checkout_id, "shippingMethodId": method_id}
)
data = get_graphql_content(response)["data"]["checkoutShippingMethodUpdate"]
checkout.refresh_from_db()
manager = get_plugins_manager()
lines, _ = fetch_checkout_lines(checkout)
checkout_info = fetch_checkout_info(checkout, lines, manager)
checkout_info.delivery_method_info = get_delivery_method_info(
convert_to_shipping_method_data(
old_shipping_method, old_shipping_method.channel_listings.first()
),
None,
)
mock_clean_shipping.assert_called_once_with(
checkout_info=checkout_info,
lines=lines,
method=convert_to_shipping_method_data(
shipping_method, shipping_method.channel_listings.first()
),
)
errors = data["errors"]
assert not errors
assert data["checkout"]["id"] == checkout_id
assert checkout.shipping_method == shipping_method
@patch(
"saleor.graphql.checkout.mutations.checkout_shipping_method_update."
"clean_delivery_method"
)
def test_checkout_shipping_method_update_by_token(
mock_clean_shipping,
staff_api_client,
shipping_method,
checkout_with_item_and_shipping_method,
):
checkout = checkout_with_item_and_shipping_method
old_shipping_method = checkout.shipping_method
query = MUTATION_UPDATE_SHIPPING_METHOD
mock_clean_shipping.return_value = True
checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
response = staff_api_client.post_graphql(
query, {"token": checkout.token, "shippingMethodId": method_id}
)
data = get_graphql_content(response)["data"]["checkoutShippingMethodUpdate"]
checkout.refresh_from_db()
manager = get_plugins_manager()
lines, _ = fetch_checkout_lines(checkout)
checkout_info = fetch_checkout_info(checkout, lines, manager)
checkout_info.delivery_method_info = get_delivery_method_info(
convert_to_shipping_method_data(
old_shipping_method, old_shipping_method.channel_listings.first()
),
None,
)
mock_clean_shipping.assert_called_once_with(
checkout_info=checkout_info,
lines=lines,
method=convert_to_shipping_method_data(
shipping_method, shipping_method.channel_listings.first()
),
)
errors = data["errors"]
assert not errors
assert data["checkout"]["id"] == checkout_id
assert checkout.shipping_method == shipping_method
@patch(
"saleor.graphql.checkout.mutations.checkout_shipping_method_update."
"clean_delivery_method"
)
def test_checkout_shipping_method_update_neither_token_and_id_given(
mock_clean_shipping, staff_api_client, checkout_with_item, shipping_method
):
query = MUTATION_UPDATE_SHIPPING_METHOD
mock_clean_shipping.return_value = True
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
response = staff_api_client.post_graphql(query, {"shippingMethodId": method_id})
data = get_graphql_content(response)["data"]["checkoutShippingMethodUpdate"]
assert len(data["errors"]) == 1
assert not data["checkout"]
assert data["errors"][0]["code"] == CheckoutErrorCode.GRAPHQL_ERROR.name
@patch(
"saleor.graphql.checkout.mutations.checkout_shipping_method_update."
"clean_delivery_method"
)
def test_checkout_shipping_method_update_both_token_and_id_given(
mock_clean_shipping, staff_api_client, checkout_with_item, shipping_method
):
checkout = checkout_with_item
query = MUTATION_UPDATE_SHIPPING_METHOD
mock_clean_shipping.return_value = True
checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
response = staff_api_client.post_graphql(
query,
{
"checkoutId": checkout_id,
"token": checkout_with_item.token,
"shippingMethodId": method_id,
},
)
data = get_graphql_content(response)["data"]["checkoutShippingMethodUpdate"]
assert len(data["errors"]) == 1
assert not data["checkout"]
assert data["errors"][0]["code"] == CheckoutErrorCode.GRAPHQL_ERROR.name
@patch(
"saleor.graphql.checkout.mutations.checkout_shipping_method_update."
"clean_delivery_method"
)
def test_checkout_shipping_method_update_by_id_no_checkout_metadata(
mock_clean_shipping,
staff_api_client,
shipping_method,
checkout_with_item_and_shipping_method,
):
# given
checkout = checkout_with_item_and_shipping_method
old_shipping_method = checkout.shipping_method
query = MUTATION_UPDATE_SHIPPING_METHOD
mock_clean_shipping.return_value = True
checkout.metadata_storage.delete()
checkout_id = graphene.Node.to_global_id("Checkout", checkout.pk)
method_id = graphene.Node.to_global_id("ShippingMethod", shipping_method.id)
# when
response = staff_api_client.post_graphql(
query, {"checkoutId": checkout_id, "shippingMethodId": method_id}
)
# then
data = get_graphql_content(response)["data"]["checkoutShippingMethodUpdate"]
checkout.refresh_from_db()
manager = get_plugins_manager()
lines, _ = fetch_checkout_lines(checkout)
checkout_info = fetch_checkout_info(checkout, lines, manager)
checkout_info.delivery_method_info = get_delivery_method_info(
convert_to_shipping_method_data(
old_shipping_method, old_shipping_method.channel_listings.first()
),
None,
)
mock_clean_shipping.assert_called_once_with(
checkout_info=checkout_info,
lines=lines,
method=convert_to_shipping_method_data(
shipping_method, shipping_method.channel_listings.first()
),
)
errors = data["errors"]
assert not errors
assert data["checkout"]["id"] == checkout_id
assert checkout.shipping_method == shipping_method
| [
"noreply@github.com"
] | jonserna.noreply@github.com |
baaf820ec0fcbd83caa164ba770029f08a8807d2 | cdc91518212d84f3f9a8cd3516a9a7d6a1ef8268 | /python/datetime_challenge.py | bc3a461215b59792b81ebf39cce8d3f3e6e83ede | [] | no_license | paulfranco/code | 1a1a316fdbe697107396b98f4dfe8250b74b3d25 | 10a5b60c44934d5d2788d9898f46886b99bd32eb | refs/heads/master | 2021-09-20T14:00:35.213810 | 2018-08-10T06:38:40 | 2018-08-10T06:38:40 | 112,060,914 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | # naive is a datetime with no timezone.
# Create a new timezone for US/Pacific, which is 8 hours behind UTC (UTC-08:00).
# Then make a new variable named hill_valley that is naive with its tzinfo attribute replaced with the US/Pacific timezone you made.
import datetime
naive = datetime.datetime(2015, 10, 21, 4, 29)
usPacific = datetime.timezone(datetime.timedelta(hours=-8))
hill_valley = datetime.datetime(2014,4,21,9, tzinfo=usPacific) | [
"paulfranco@me.com"
] | paulfranco@me.com |
06548eeec187cb35b04530c6468cfc46ff894a36 | 2b2db19b4f60c9367313c6325f62d52713a6e304 | /src/main.py | ebf3a826e17d11621ff2581a64905ba4a08eab3b | [] | no_license | movefast/ComparingTileCoders | 853f640d2fdca9ced9afe07a972cd8d8a2acc01b | 3d3e4c02dea3d5c8092229d3e83ee4bb77594fdd | refs/heads/master | 2023-01-28T18:18:45.165099 | 2020-12-11T18:19:33 | 2020-12-11T18:19:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,651 | py | import numpy as np
import time
import sys
import os
sys.path.append(os.getcwd())
from RlGlue import RlGlue
from src.experiment import ExperimentModel
from src.problems.registry import getProblem
from PyExpUtils.utils.Collector import Collector
from src.utils.rlglue import OneStepWrapper
if len(sys.argv) < 3:
print('run again with:')
print('python3 src/main.py <runs> <path/to/description.json> <idx>')
exit(1)
exp = ExperimentModel.load(sys.argv[1])
idx = int(sys.argv[2])
max_steps = exp.max_steps
run = exp.getRun(idx)
collector = Collector()
# set random seeds accordingly
np.random.seed(run)
Problem = getProblem(exp.problem)
problem = Problem(exp, idx)
agent = problem.getAgent()
env = problem.getEnvironment()
wrapper = OneStepWrapper(agent, problem.getGamma(), problem.rep)
glue = RlGlue(wrapper, env)
# Run the experiment
glue.start()
start_time = time.time()
episode = 0
for step in range(exp.max_steps):
_, _, _, t = glue.step()
if t:
episode += 1
glue.start()
# collect an array of rewards that is the length of the number of steps in episode
# effectively we count the whole episode as having received the same final reward
collector.concat('step_return', [glue.total_reward] * glue.num_steps)
# compute the average time-per-step in ms
avg_time = 1000 * (time.time() - start_time) / step
print(episode, step, glue.total_reward, f'{avg_time:.4}ms')
glue.total_reward = 0
glue.num_steps = 0
collector.fillRest('step_return', exp.max_steps)
collector.collect('time', time.time() - start_time)
collector.collect('feature_utilization', np.count_nonzero(agent.w) / np.product(agent.w.shape))
# import matplotlib.pyplot as plt
# from src.utils.plotting import plot
# fig, ax1 = plt.subplots(1)
# collector.reset()
# return_data = collector.getStats('step_return')
# plot(ax1, return_data)
# ax1.set_title('Return')
# plt.show()
# exit()
from PyExpUtils.results.backends.csv import saveResults
from PyExpUtils.utils.arrays import downsample
for key in collector.run_data:
data = collector.run_data[key]
# heavily downsample the data to reduce storage costs
# we don't need all of the data-points for plotting anyways
# method='window' returns a window average
# method='subsample' returns evenly spaced samples from array
# num=1000 makes sure final array is of length 1000
# percent=0.1 makes sure final array is 10% of the original length (only one of `num` or `percent` can be specified)
data = downsample(data, num=500, method='window')
saveResults(exp, idx, key, data, precision=2)
| [
"andnpatterson@gmail.com"
] | andnpatterson@gmail.com |
0d6e53f857240c591ca7b8c99819c53941741926 | f015ba9945ef2035d219cedc0992063f0b5ea10c | /src/project/settings.py | 9e64a4e20192856b5ee5db823e8937ff24d95e57 | [] | no_license | JacobSima/trydjango | 9e4bbdceb26c73ef9ced3e77ab7b21c891f1b5e9 | 2694a0239b69fc4ec81a2e1aee7df1e1e6892a46 | refs/heads/master | 2021-09-29T22:46:07.172924 | 2020-04-18T15:59:37 | 2020-04-18T15:59:37 | 252,255,176 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,322 | py | """
Django settings for project project.
Generated by 'django-admin startproject' using Django 3.0.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$9a(!!h1g*rlisgvm7d-g$*ruzvs=+7gm184b0%k9+kbrqd*jp'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Third party
# Own
'products.apps.ProductsConfig',
'pages.apps.PagesConfig',
'articles.apps.ArticlesConfig',
'blogs.apps.BlogsConfig',
'courses.apps.CoursesConfig'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db2.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"simajacob2011@gmail.com"
] | simajacob2011@gmail.com |
4e73792b01bc0fbf380a261a81ca7945cd2daf94 | 32eeb97dff5b1bf18cf5be2926b70bb322e5c1bd | /benchmark/websms/testcase/firstcases/testcase6_024.py | 83901a5a80dd6d8457a042e27d8685e19f4e9600 | [] | no_license | Prefest2018/Prefest | c374d0441d714fb90fca40226fe2875b41cf37fc | ac236987512889e822ea6686c5d2e5b66b295648 | refs/heads/master | 2021-12-09T19:36:24.554864 | 2021-12-06T12:46:14 | 2021-12-06T12:46:14 | 173,225,161 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,667 | py | #coding=utf-8
import os
import subprocess
import time
import traceback
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.common.exceptions import NoSuchElementException, WebDriverException
desired_caps = {
'platformName' : 'Android',
'deviceName' : 'Android Emulator',
'platformVersion' : '4.4',
'appPackage' : 'de.ub0r.android.websms',
'appActivity' : 'de.ub0r.android.websms.WebSMS',
'resetKeyboard' : True,
'androidCoverage' : 'de.ub0r.android.websms/de.ub0r.android.websms.JacocoInstrumentation',
'noReset' : True
}
def command(cmd, timeout=5):
p = subprocess.Popen(cmd, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, shell=True)
time.sleep(timeout)
p.terminate()
return
def getElememt(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str)
return element
def getElememtBack(driver, str1, str2) :
for i in range(0, 2, 1):
try:
element = driver.find_element_by_android_uiautomator(str1)
except NoSuchElementException:
time.sleep(1)
else:
return element
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str2)
except NoSuchElementException:
time.sleep(1)
else:
return element
os.popen("adb shell input tap 50 50")
element = driver.find_element_by_android_uiautomator(str2)
return element
def swipe(driver, startxper, startyper, endxper, endyper) :
size = driver.get_window_size()
width = size["width"]
height = size["height"]
try:
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=1000)
except WebDriverException:
time.sleep(1)
driver.swipe(start_x=int(width * startxper), start_y=int(height * startyper), end_x=int(width * endxper),
end_y=int(height * endyper), duration=1000)
return
def scrollToFindElement(driver, str) :
for i in range(0, 5, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
elements = driver.find_elements_by_android_uiautomator(str)
if (len(elements) > 1) :
for temp in elements :
if temp.get_attribute("enabled") == "true" :
element = temp
break
except NoSuchElementException:
swipe(driver, 0.5, 0.55, 0.5, 0.2)
else :
return element
for i in range(0, 4, 1):
try:
element = driver.find_element_by_android_uiautomator(str)
elements = driver.find_elements_by_android_uiautomator(str)
if (len(elements) > 1):
for temp in elements:
if temp.get_attribute("enabled") == "true":
element = temp
break
except NoSuchElementException:
swipe(driver, 0.5, 0.2, 0.5, 0.55)
else :
return element
return
def scrollToClickElement(driver, str) :
element = scrollToFindElement(driver, str)
if element is None :
return
else :
element.click()
def clickInList(driver, str) :
element = None
if (str is None) :
candidates = driver.find_elements_by_class_name("android.widget.CheckedTextView")
if len(candidates) >= 1 and checkWindow(driver):
element = candidates[len(candidates)-1]
else :
element = scrollToFindElement(driver, str)
if element is not None :
element.click()
else :
if checkWindow(driver) :
driver.press_keycode(4)
def clickOnCheckable(driver, str, value = "true") :
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if len(lists) == 1 :
innere = parent.find_element_by_android_uiautomator("new UiSelector().checkable(true)")
nowvalue = innere.get_attribute("checked")
if (nowvalue != value) :
innere.click()
break
except NoSuchElementException:
continue
def typeText(driver, value) :
element = getElememt(driver, "new UiSelector().className(\"android.widget.EditText\")")
element.clear()
element.send_keys(value)
enterelement = getElememt(driver, "new UiSelector().text(\"OK\")")
if (enterelement is None) :
if checkWindow(driver):
driver.press_keycode(4)
else :
enterelement.click()
def checkWindow(driver) :
dsize = driver.get_window_size()
nsize = driver.find_element_by_class_name("android.widget.FrameLayout").size
if dsize['height'] > nsize['height']:
return True
else :
return False
def testingSeekBar(driver, str, value):
try :
if(not checkWindow(driver)) :
element = seekForNearestSeekBar(driver, str)
else :
element = driver.find_element_by_class_name("android.widget.SeekBar")
if (None != element):
settingSeekBar(driver, element, value)
driver.find_element_by_android_uiautomator("new UiSelector().text(\"OK\")").click()
except NoSuchElementException:
time.sleep(1)
def seekForNearestSeekBar(driver, str):
parents = driver.find_elements_by_class_name("android.widget.LinearLayout")
for parent in parents:
try :
parent.find_element_by_android_uiautomator(str)
lists = parent.find_elements_by_class_name("android.widget.LinearLayout")
if len(lists) == 1 :
innere = parent.find_element_by_class_name("android.widget.SeekBar")
return innere
break
except NoSuchElementException:
continue
def settingSeekBar(driver, element, value) :
x = element.rect.get("x")
y = element.rect.get("y")
width = element.rect.get("width")
height = element.rect.get("height")
TouchAction(driver).press(None, x + 10, y + height/2).move_to(None, x + width * value,y + height/2).release().perform()
y = value
def clickInMultiList(driver, str) :
element = None
if (str is None) :
candidates = driver.find_elements_by_class_name("android.widget.CheckedTextView")
if len(candidates) >= 1 and checkWindow(driver):
element = candidates[len(candidates)-1]
else :
element = scrollToFindElement(driver, str)
if element is not None :
nowvalue = element.get_attribute("checked")
if (nowvalue != "true") :
element.click()
if checkWindow(driver) :
driver.find_element_by_android_uiautomator("new UiSelector().text(\"OK\")").click()
# testcase6_024
try :
starttime = time.time()
driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
element = getElememt(driver, "new UiSelector().resourceId(\"de.ub0r.android.websms:id/text\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("Text");
element = getElememt(driver, "new UiSelector().resourceId(\"de.ub0r.android.websms:id/text\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("12st)D");
element = getElememt(driver, "new UiSelector().resourceId(\"de.ub0r.android.websms:id/text\").className(\"android.widget.EditText\")")
element.clear()
element.send_keys("12st)D");
element = getElememt(driver, "new UiSelector().resourceId(\"de.ub0r.android.websms:id/select\").className(\"android.widget.ImageButton\")")
TouchAction(driver).tap(element).perform()
except Exception, e:
print 'FAIL'
print 'str(e):\t\t', str(e)
print 'repr(e):\t', repr(e)
print traceback.format_exc()
else:
print 'OK'
finally:
cpackage = driver.current_package
endtime = time.time()
print 'consumed time:', str(endtime - starttime), 's'
command("adb shell am broadcast -a com.example.pkg.END_EMMA --es name \"6_024\"")
jacocotime = time.time()
print 'jacoco time:', str(jacocotime - endtime), 's'
driver.quit()
if (cpackage != 'de.ub0r.android.websms'):
cpackage = "adb shell am force-stop " + cpackage
os.popen(cpackage) | [
"prefest2018@gmail.com"
] | prefest2018@gmail.com |
be812a85bf9bd10893cafd3bae114e721cd334ea | 62ac17070b927ee4249baa3b3cf65b39b9e8fafa | /Karrigell-2.3.5/modules/mod_py.py | cd23034cf66e064be12b95ff353171f3d77f46b8 | [
"BSD-3-Clause"
] | permissive | qinguan/infolist | 67eb0da25f4abc2f7214388d33448e8bb8ceb8c1 | b8738ea374819266ed0b90458a5e7270e94fb030 | refs/heads/master | 2021-01-10T19:58:06.316866 | 2012-05-01T08:05:42 | 2012-05-01T08:05:42 | 4,190,526 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 282 | py | from k_script import BaseScript
class Script(BaseScript):
"""Python script"""
def __init__(self, fileName):
pc=open(fileName).read().rstrip()
pc = pc.replace('\r\n','\n') # normalize line separator
BaseScript.__init__(self, fileName, pc, None)
| [
"qinguan0619@gmail.com"
] | qinguan0619@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.