blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 777 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 149 values | src_encoding stringclasses 26 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 3 10.2M | extension stringclasses 188 values | content stringlengths 3 10.2M | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7bee13d0cb73112e145bfacd1f65772a9339d4a8 | 44b389338c12b0dc2018d8022031b58090c58a63 | /Byte_of_Python/str_format.py | 37d384f0a73528d7d6a49621979ddd9af022ee12 | [] | no_license | llcawthorne/old-python-learning-play | cbe71b414d6fafacec7bad681b91976648b230d3 | 5241613a5536cd5c086ec56acbc9d825935ab292 | refs/heads/master | 2016-09-05T17:47:47.985814 | 2015-07-13T01:25:44 | 2015-07-13T01:25:44 | 38,983,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 178 | py | #!/usr/bin/python
#Filename: str_format.py
age = 25
name = 'Swaroop'
print('{0} is {1} years old'.format(name,age))
print('Why is {0} playing with that python?'.format(name))
| [
"LLC@acm.org"
] | LLC@acm.org |
ae669605ec9b63f7de4aded534b4943cd4f76a68 | a9a9e19b1f24033d7141dd6da6572cb425acc09b | /elsie/ext/__init__.py | ab05e9c4d013b72e18d7676980310c548fcf875b | [
"MIT"
] | permissive | spirali/elsie | 88be594fec51cfa12f9e5c69bea7b1fd539da9fe | fd95c841d03e453aaac4acd27939ca614cd5ac79 | refs/heads/master | 2023-06-09T05:59:18.051813 | 2023-05-25T12:08:50 | 2023-05-25T12:09:00 | 134,177,408 | 49 | 6 | MIT | 2023-05-25T12:04:21 | 2018-05-20T18:48:53 | Python | UTF-8 | Python | false | false | 298 | py | """
This module contains extension and helper classes which are not part of the Elsie core.
Items from this module may experience backwards-incompatible changes even in non-major releases of
Elsie.
"""
from .list import ordered_list, unordered_list # noqa
from .markdown import markdown # noqa
| [
"stanislav.bohm@vsb.cz"
] | stanislav.bohm@vsb.cz |
58ba36f83dde33f4fc01a00d8a1c5e0a650a051c | 04d954f7734a48bb00d510f58793cb516ef03891 | /5_CNN对CIFAR-10进行分类/cnn_lrn.py | b2441337d55e1eee197fbbfc96c8632a59b4a516 | [] | no_license | MrKingJM/TensorFlow | 5ef17a47128ed0b9897d0ffc166edf51d2fc9575 | 703b6446402c2f15d90b08a9593bcc0517775555 | refs/heads/master | 2020-03-10T18:53:00.867240 | 2017-06-13T08:27:58 | 2017-06-13T08:27:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,982 | py | # coding=utf-8
# cnn_lrn
from __future__ import division
import cifar10, cifar10_input
import tensorflow as tf
import numpy as np
import math
import time
data_dir = 'cifar10_data/cifar-10-batches-bin' # 下载 CIFAR-10 的默认路径
cifar10.maybe_download_and_extract() # 下载数据集,并解压、展开到其默认位置
batch_size = 128
images_train, labels_train = cifar10_input.distorted_inputs(data_dir=data_dir, batch_size=batch_size)
images_test, labels_test = cifar10_input.inputs(eval_data=True, data_dir=data_dir, batch_size=batch_size)
def weight_variable(shape, stddev):
var = tf.Variable(tf.truncated_normal(shape, stddev=stddev)) # stddev=stddev!!!
return var
def bias_variable(cons, shape):
initial = tf.constant(cons, shape=shape) # 必须是 shape=shape
return tf.Variable(initial)
def conv(x, W):
return tf.nn.conv2d(x, W, [1, 1, 1, 1], padding='SAME')
def max_pool_3x3(x):
return tf.nn.max_pool(x, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1], padding='SAME')
image_holder = tf.placeholder(tf.float32, [batch_size, 24, 24, 3])
label_holder = tf.placeholder(tf.int32, [batch_size])
# 第一层
weight1 = weight_variable([5, 5, 3, 64], 5e-2)
bias1 = bias_variable(0.0, [64])
conv1 = tf.nn.relu(conv(image_holder, weight1) + bias1)
pool1 = max_pool_3x3(conv1)
norm1 = tf.nn.lrn(pool1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75)
# 第二层
weight2 = weight_variable([5, 5, 64, 64], 5e-2)
bias2 = bias_variable(0.1, [64])
conv2 = tf.nn.relu(conv(norm1, weight2) + bias2)
norm2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75)
pool2 = max_pool_3x3(norm2)
reshape = tf.reshape(pool2, [batch_size, -1])
dim = reshape.get_shape()[1].value
# 全连接层
weight3 = weight_variable([dim, 384], 0.04)
bias3 = bias_variable(0.1, [384])
local3 = tf.nn.relu(tf.matmul(reshape, weight3) + bias3)
# 全连接层
weight4 = weight_variable([384, 192], 0.04)
bias4 = bias_variable(0.1, [192])
local4 = tf.nn.relu(tf.matmul(local3, weight4) + bias4)
# 输出
weight5 = weight_variable([192, 10], 1 / 192.0)
bias5 = bias_variable(0.0, [10])
logits = tf.matmul(local4, weight5) + bias5
# 损失函数
def loss(logits, labels):
labels = tf.cast(labels, tf.int64)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=labels,
name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
return tf.add_n(tf.get_collection('losses'), name='total_loss')
loss = loss(logits, label_holder)
train_op = tf.train.AdamOptimizer(1e-3).minimize(loss)
top_k_op = tf.nn.in_top_k(logits, label_holder, 1)
sess = tf.InteractiveSession()
tf.global_variables_initializer().run()
tf.train.start_queue_runners()
max_steps = 3000
for step in range(max_steps):
start_time = time.time()
image_batch, label_batch = sess.run([images_train, labels_train])
_, loss_value = sess.run([train_op, loss], feed_dict={image_holder: image_batch, label_holder: label_batch})
duration = time.time() - start_time
if step % 10 == 0:
examples_per_sec = batch_size / duration
sec_per_batch = float(duration)
print 'step {},loss={},({} examples/sec; {} sec/batch)'.format(step, loss_value, examples_per_sec,
sec_per_batch)
num_examples = 10000
num_iter = int(math.ceil(num_examples / batch_size)) # 计算一共有多少组
true_count = 0
total_sample_count = num_iter * batch_size
step = 0
while step < num_iter:
image_batch, label_batch = sess.run([images_test, labels_test])
predictions = sess.run([top_k_op], feed_dict={image_holder: image_batch, label_holder: label_batch})
true_count += np.sum(predictions)
step += 1
precision = true_count / total_sample_count
print 'precision = ', precision | [
"ywtail@gmail.com"
] | ywtail@gmail.com |
f9f753802fda005a7df3d8e87f1684460ae38ba1 | 7e5dd510284f6944995f1c8586309e6e3acecb84 | /main.py | c674ccbbc2802acc706c683b667d6d27caceb6ae | [] | no_license | Namenaro/frog_demo | b3a630d7f8a4139954ebb92c0c83dd76b49f6fb6 | b74da899f949233aed1fb07bd48314e961ba99e7 | refs/heads/master | 2020-03-31T10:52:55.876607 | 2018-10-08T22:20:56 | 2018-10-08T22:20:56 | 152,153,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,425 | py | import os
from pprint import pprint
import tkinter as tk
class MainScreen(tk.Frame):
def __init__(self, parent):
self.frame = tk.Frame(parent)
self.frame.pack()
self.parent = parent
folder = "frog_muskle"
self.image_0 = tk.PhotoImage(file=os.path.join(folder, "zero_freq.png"))
self.image_1 = tk.PhotoImage(file=os.path.join(folder, "one_freq.png"))
self.image_27 = tk.PhotoImage(file=os.path.join(folder, "two_7_freq.png"))
self.image_8 = tk.PhotoImage(file=os.path.join(folder, "tetanus.png"))
self.pic_freq = {0: self.image_0,
1: self.image_1,
2: self.image_27,
3: self.image_27,
4: self.image_27,
5: self.image_27,
6: self.image_27,
7: self.image_27,
8: self.image_8}
self.panel_frame = tk.Frame(self.frame)
self.panel_frame.grid(row=0, column=0, padx=25, pady=25)
# статическая подпись про герцы
self.freq_static_var = tk.StringVar()
self.freq_static_label = tk.Label(self.panel_frame, textvariable=self.freq_static_var, relief=tk.RAISED)
self.freq_static_var.set(" Гц ")
self.freq_static_label.grid(row=0, column=2, padx=5)
# статическая подпись про милливольты
self.voltage_static_var = tk.StringVar()
self.voltage_static_label = tk.Label(self.panel_frame, textvariable=self.voltage_static_var, relief=tk.RAISED)
self.voltage_static_var.set(" мВ ")
self.voltage_static_label.grid(row=3, column=2, padx=5)
# поле ввода про милливольты
self.voltage_var = tk.StringVar()
self.voltage_var.set("1.5")
self.voltage_entry = tk.Entry(self.panel_frame, width=4, state=tk.DISABLED, textvariable=self.voltage_var)
self.voltage_entry.grid(row=3, column=1, padx=5)
# статическая картинка лягушки
frog_pic = os.path.join(folder, "frog.png")
self.frog_img = tk.PhotoImage(file=frog_pic)
self.label_frog = tk.Label(self.frame, image=self.frog_img, relief=tk.RAISED)
self.label_frog.grid(row=0, column=2, padx=25, pady=25)
# меняющаяся картика с графиком
self.label_graph = tk.Label(self.frame, image=self.pic_freq[0], relief=tk.RAISED)
self.label_graph.grid(row=0, column=1, padx=25, pady=25)
# ползунок
self.freq_var = tk.IntVar()
self.scale = tk.Scale(self.panel_frame, variable=self.freq_var, to=8, showvalue=False, command=self.slider_moved)
self.scale.grid(row=0, column=0, rowspan=4,
sticky=tk.W, padx=5)
# поле ввода про герцы
self.freq_entry = tk.Entry(self.panel_frame, width=4, textvariable=self.freq_var)
self.freq_entry.grid(row=0, column=1, padx=5)
def slider_moved(self,value_slider):
# сменим картинку
self.label_graph.configure(image=self.pic_freq[int(value_slider)])
self.label_graph.image = self.pic_freq[int(value_slider)]
def main():
root = tk.Tk()
app = MainScreen(root)
app.parent.title("Лягушка - демо")
root.mainloop()
if __name__ == '__main__':
main()
| [
"nanenaro@gmail.com"
] | nanenaro@gmail.com |
fe09ad947154e129b17bad8aec3e3e9b70958b77 | 03ee6ba65cd8e21bea257b1bf4dfd4dffed3d01e | /lib/python2.7/site-packages/IPython/core/oinspect.py | 6e9951d456cf2334c29c40c7e57cd009d2dbe39e | [] | no_license | BlackgateResearch/gmr_django | 310db12140e324ab0c477a2f436366dd943e4d26 | 527439b54a1e17f9423c10c386ab03ad73d13759 | refs/heads/master | 2016-09-05T23:01:21.683037 | 2011-11-24T17:25:07 | 2011-11-24T17:25:07 | 1,075,730 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,936 | py | # -*- coding: utf-8 -*-
"""Tools for inspecting Python objects.
Uses syntax highlighting for presenting the various information elements.
Similar in spirit to the inspect module, but all calls take a name argument to
reference the name under which an object is being read.
"""
#*****************************************************************************
# Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#*****************************************************************************
__all__ = ['Inspector','InspectColors']
# stdlib modules
import __builtin__
import inspect
import linecache
import os
import sys
import types
from collections import namedtuple
from itertools import izip_longest
# IPython's own
from IPython.core import page
from IPython.utils import PyColorize
from IPython.utils import io
from IPython.utils.text import indent
from IPython.utils.wildcard import list_namespace
from IPython.utils.coloransi import *
#****************************************************************************
# Builtin color schemes
Colors = TermColors # just a shorthand
# Build a few color schemes
NoColor = ColorScheme(
'NoColor',{
'header' : Colors.NoColor,
'normal' : Colors.NoColor # color off (usu. Colors.Normal)
} )
LinuxColors = ColorScheme(
'Linux',{
'header' : Colors.LightRed,
'normal' : Colors.Normal # color off (usu. Colors.Normal)
} )
LightBGColors = ColorScheme(
'LightBG',{
'header' : Colors.Red,
'normal' : Colors.Normal # color off (usu. Colors.Normal)
} )
# Build table of color schemes (needed by the parser)
InspectColors = ColorSchemeTable([NoColor,LinuxColors,LightBGColors],
'Linux')
#****************************************************************************
# Auxiliary functions and objects
# See the messaging spec for the definition of all these fields. This list
# effectively defines the order of display
info_fields = ['type_name', 'base_class', 'string_form', 'namespace',
'length', 'file', 'definition', 'docstring', 'source',
'init_definition', 'class_docstring', 'init_docstring',
'call_def', 'call_docstring',
# These won't be printed but will be used to determine how to
# format the object
'ismagic', 'isalias', 'isclass', 'argspec', 'found', 'name'
]
def object_info(**kw):
"""Make an object info dict with all fields present."""
infodict = dict(izip_longest(info_fields, [None]))
infodict.update(kw)
return infodict
def getdoc(obj):
"""Stable wrapper around inspect.getdoc.
This can't crash because of attribute problems.
It also attempts to call a getdoc() method on the given object. This
allows objects which provide their docstrings via non-standard mechanisms
(like Pyro proxies) to still be inspected by ipython's ? system."""
ds = None # default return value
try:
ds = inspect.getdoc(obj)
except:
# Harden against an inspect failure, which can occur with
# SWIG-wrapped extensions.
pass
# Allow objects to offer customized documentation via a getdoc method:
try:
ds2 = obj.getdoc()
except:
pass
else:
# if we get extra info, we add it to the normal docstring.
if ds is None:
ds = ds2
else:
ds = '%s\n%s' % (ds,ds2)
return ds
def getsource(obj,is_binary=False):
"""Wrapper around inspect.getsource.
This can be modified by other projects to provide customized source
extraction.
Inputs:
- obj: an object whose source code we will attempt to extract.
Optional inputs:
- is_binary: whether the object is known to come from a binary source.
This implementation will skip returning any output for binary objects, but
custom extractors may know how to meaningfully process them."""
if is_binary:
return None
else:
# get source if obj was decorated with @decorator
if hasattr(obj,"__wrapped__"):
obj = obj.__wrapped__
try:
src = inspect.getsource(obj)
except TypeError:
if hasattr(obj,'__class__'):
src = inspect.getsource(obj.__class__)
return src
def getargspec(obj):
"""Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names (it may contain nested lists).
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
Modified version of inspect.getargspec from the Python Standard
Library."""
if inspect.isfunction(obj):
func_obj = obj
elif inspect.ismethod(obj):
func_obj = obj.im_func
elif hasattr(obj, '__call__'):
func_obj = obj.__call__
else:
raise TypeError('arg is not a Python function')
args, varargs, varkw = inspect.getargs(func_obj.func_code)
return args, varargs, varkw, func_obj.func_defaults
def format_argspec(argspec):
"""Format argspect, convenience wrapper around inspect's.
This takes a dict instead of ordered arguments and calls
inspect.format_argspec with the arguments in the necessary order.
"""
return inspect.formatargspec(argspec['args'], argspec['varargs'],
argspec['varkw'], argspec['defaults'])
def call_tip(oinfo, format_call=True):
"""Extract call tip data from an oinfo dict.
Parameters
----------
oinfo : dict
format_call : bool, optional
If True, the call line is formatted and returned as a string. If not, a
tuple of (name, argspec) is returned.
Returns
-------
call_info : None, str or (str, dict) tuple.
When format_call is True, the whole call information is formattted as a
single string. Otherwise, the object's name and its argspec dict are
returned. If no call information is available, None is returned.
docstring : str or None
The most relevant docstring for calling purposes is returned, if
available. The priority is: call docstring for callable instances, then
constructor docstring for classes, then main object's docstring otherwise
(regular functions).
"""
# Get call definition
argspec = oinfo.get('argspec')
if argspec is None:
call_line = None
else:
# Callable objects will have 'self' as their first argument, prune
# it out if it's there for clarity (since users do *not* pass an
# extra first argument explicitly).
try:
has_self = argspec['args'][0] == 'self'
except (KeyError, IndexError):
pass
else:
if has_self:
argspec['args'] = argspec['args'][1:]
call_line = oinfo['name']+format_argspec(argspec)
# Now get docstring.
# The priority is: call docstring, constructor docstring, main one.
doc = oinfo.get('call_docstring')
if doc is None:
doc = oinfo.get('init_docstring')
if doc is None:
doc = oinfo.get('docstring','')
return call_line, doc
class Inspector:
def __init__(self, color_table=InspectColors,
code_color_table=PyColorize.ANSICodeColors,
scheme='NoColor',
str_detail_level=0):
self.color_table = color_table
self.parser = PyColorize.Parser(code_color_table,out='str')
self.format = self.parser.format
self.str_detail_level = str_detail_level
self.set_active_scheme(scheme)
def _getdef(self,obj,oname=''):
"""Return the definition header for any callable object.
If any exception is generated, None is returned instead and the
exception is suppressed."""
try:
# We need a plain string here, NOT unicode!
hdef = oname + inspect.formatargspec(*getargspec(obj))
return hdef.encode('ascii')
except:
return None
def __head(self,h):
"""Return a header string with proper colors."""
return '%s%s%s' % (self.color_table.active_colors.header,h,
self.color_table.active_colors.normal)
def set_active_scheme(self,scheme):
self.color_table.set_active_scheme(scheme)
self.parser.color_table.set_active_scheme(scheme)
def noinfo(self,msg,oname):
"""Generic message when no information is found."""
print 'No %s found' % msg,
if oname:
print 'for %s' % oname
else:
print
def pdef(self,obj,oname=''):
"""Print the definition header for any callable object.
If the object is a class, print the constructor information."""
if not callable(obj):
print 'Object is not callable.'
return
header = ''
if inspect.isclass(obj):
header = self.__head('Class constructor information:\n')
obj = obj.__init__
elif type(obj) is types.InstanceType:
obj = obj.__call__
output = self._getdef(obj,oname)
if output is None:
self.noinfo('definition header',oname)
else:
print >>io.stdout, header,self.format(output),
def pdoc(self,obj,oname='',formatter = None):
"""Print the docstring for any object.
Optional:
-formatter: a function to run the docstring through for specially
formatted docstrings.
Examples
--------
In [1]: class NoInit:
...: pass
In [2]: class NoDoc:
...: def __init__(self):
...: pass
In [3]: %pdoc NoDoc
No documentation found for NoDoc
In [4]: %pdoc NoInit
No documentation found for NoInit
In [5]: obj = NoInit()
In [6]: %pdoc obj
No documentation found for obj
In [5]: obj2 = NoDoc()
In [6]: %pdoc obj2
No documentation found for obj2
"""
head = self.__head # For convenience
lines = []
ds = getdoc(obj)
if formatter:
ds = formatter(ds)
if ds:
lines.append(head("Class Docstring:"))
lines.append(indent(ds))
if inspect.isclass(obj) and hasattr(obj, '__init__'):
init_ds = getdoc(obj.__init__)
if init_ds is not None:
lines.append(head("Constructor Docstring:"))
lines.append(indent(init_ds))
elif (type(obj) is types.InstanceType or isinstance(obj,object)) \
and hasattr(obj,'__call__'):
call_ds = getdoc(obj.__call__)
if call_ds:
lines.append(head("Calling Docstring:"))
lines.append(indent(call_ds))
if not lines:
self.noinfo('documentation',oname)
else:
page.page('\n'.join(lines))
def psource(self,obj,oname=''):
"""Print the source code for an object."""
# Flush the source cache because inspect can return out-of-date source
linecache.checkcache()
try:
src = getsource(obj)
except:
self.noinfo('source',oname)
else:
page.page(self.format(src))
def pfile(self,obj,oname=''):
"""Show the whole file where an object was defined."""
try:
try:
lineno = inspect.getsourcelines(obj)[1]
except TypeError:
# For instances, try the class object like getsource() does
if hasattr(obj,'__class__'):
lineno = inspect.getsourcelines(obj.__class__)[1]
# Adjust the inspected object so getabsfile() below works
obj = obj.__class__
except:
self.noinfo('file',oname)
return
# We only reach this point if object was successfully queried
# run contents of file through pager starting at line
# where the object is defined
ofile = inspect.getabsfile(obj)
if ofile.endswith(('.so', '.dll', '.pyd')):
print 'File %r is binary, not printing.' % ofile
elif not os.path.isfile(ofile):
print 'File %r does not exist, not printing.' % ofile
else:
# Print only text files, not extension binaries. Note that
# getsourcelines returns lineno with 1-offset and page() uses
# 0-offset, so we must adjust.
page.page(self.format(open(ofile).read()),lineno-1)
def _format_fields(self, fields, title_width=12):
"""Formats a list of fields for display.
Parameters
----------
fields : list
A list of 2-tuples: (field_title, field_content)
title_width : int
How many characters to pad titles to. Default 12.
"""
out = []
header = self.__head
for title, content in fields:
if len(content.splitlines()) > 1:
title = header(title + ":") + "\n"
else:
title = header((title+":").ljust(title_width))
out.append(title + content)
return "\n".join(out)
# The fields to be displayed by pinfo: (fancy_name, key_in_info_dict)
pinfo_fields1 = [("Type", "type_name"),
("Base Class", "base_class"),
("String Form", "string_form"),
("Namespace", "namespace"),
("Length", "length"),
("File", "file"),
("Definition", "definition")]
pinfo_fields_obj = [("Class Docstring", "class_docstring"),
("Constructor Docstring","init_docstring"),
("Call def", "call_def"),
("Call docstring", "call_docstring")]
def pinfo(self,obj,oname='',formatter=None,info=None,detail_level=0):
"""Show detailed information about an object.
Optional arguments:
- oname: name of the variable pointing to the object.
- formatter: special formatter for docstrings (see pdoc)
- info: a structure with some information fields which may have been
precomputed already.
- detail_level: if set to 1, more information is given.
"""
info = self.info(obj, oname=oname, formatter=formatter,
info=info, detail_level=detail_level)
displayfields = []
for title, key in self.pinfo_fields1:
field = info[key]
if field is not None:
displayfields.append((title, field.rstrip()))
# Source or docstring, depending on detail level and whether
# source found.
if detail_level > 0 and info['source'] is not None:
displayfields.append(("Source", info['source']))
elif info['docstring'] is not None:
displayfields.append(("Docstring", info["docstring"]))
# Constructor info for classes
if info['isclass']:
if info['init_definition'] or info['init_docstring']:
displayfields.append(("Constructor information", ""))
if info['init_definition'] is not None:
displayfields.append((" Definition",
info['init_definition'].rstrip()))
if info['init_docstring'] is not None:
displayfields.append((" Docstring",
indent(info['init_docstring'])))
# Info for objects:
else:
for title, key in self.pinfo_fields_obj:
field = info[key]
if field is not None:
displayfields.append((title, field.rstrip()))
# Finally send to printer/pager:
if displayfields:
page.page(self._format_fields(displayfields))
def info(self, obj, oname='', formatter=None, info=None, detail_level=0):
"""Compute a dict with detailed information about an object.
Optional arguments:
- oname: name of the variable pointing to the object.
- formatter: special formatter for docstrings (see pdoc)
- info: a structure with some information fields which may have been
precomputed already.
- detail_level: if set to 1, more information is given.
"""
obj_type = type(obj)
header = self.__head
if info is None:
ismagic = 0
isalias = 0
ospace = ''
else:
ismagic = info.ismagic
isalias = info.isalias
ospace = info.namespace
# Get docstring, special-casing aliases:
if isalias:
if not callable(obj):
try:
ds = "Alias to the system command:\n %s" % obj[1]
except:
ds = "Alias: " + str(obj)
else:
ds = "Alias to " + str(obj)
if obj.__doc__:
ds += "\nDocstring:\n" + obj.__doc__
else:
ds = getdoc(obj)
if ds is None:
ds = '<no docstring>'
if formatter is not None:
ds = formatter(ds)
# store output in a dict, we initialize it here and fill it as we go
out = dict(name=oname, found=True, isalias=isalias, ismagic=ismagic)
string_max = 200 # max size of strings to show (snipped if longer)
shalf = int((string_max -5)/2)
if ismagic:
obj_type_name = 'Magic function'
elif isalias:
obj_type_name = 'System alias'
else:
obj_type_name = obj_type.__name__
out['type_name'] = obj_type_name
try:
bclass = obj.__class__
out['base_class'] = str(bclass)
except: pass
# String form, but snip if too long in ? form (full in ??)
if detail_level >= self.str_detail_level:
try:
ostr = str(obj)
str_head = 'string_form'
if not detail_level and len(ostr)>string_max:
ostr = ostr[:shalf] + ' <...> ' + ostr[-shalf:]
ostr = ("\n" + " " * len(str_head.expandtabs())).\
join(q.strip() for q in ostr.split("\n"))
out[str_head] = ostr
except:
pass
if ospace:
out['namespace'] = ospace
# Length (for strings and lists)
try:
out['length'] = str(len(obj))
except: pass
# Filename where object was defined
binary_file = False
try:
try:
fname = inspect.getabsfile(obj)
except TypeError:
# For an instance, the file that matters is where its class was
# declared.
if hasattr(obj,'__class__'):
fname = inspect.getabsfile(obj.__class__)
if fname.endswith('<string>'):
fname = 'Dynamically generated function. No source code available.'
if fname.endswith(('.so', '.dll', '.pyd')):
binary_file = True
out['file'] = fname
except:
# if anything goes wrong, we don't want to show source, so it's as
# if the file was binary
binary_file = True
# reconstruct the function definition and print it:
defln = self._getdef(obj, oname)
if defln:
out['definition'] = self.format(defln)
# Docstrings only in detail 0 mode, since source contains them (we
# avoid repetitions). If source fails, we add them back, see below.
if ds and detail_level == 0:
out['docstring'] = ds
# Original source code for any callable
if detail_level:
# Flush the source cache because inspect can return out-of-date
# source
linecache.checkcache()
source = None
try:
try:
src = getsource(obj,binary_file)
except TypeError:
if hasattr(obj,'__class__'):
src = getsource(obj.__class__,binary_file)
if src is not None:
source = self.format(src)
out['source'] = source.rstrip()
except Exception:
pass
if ds and source is None:
out['docstring'] = ds
# Constructor docstring for classes
if inspect.isclass(obj):
out['isclass'] = True
# reconstruct the function definition and print it:
try:
obj_init = obj.__init__
except AttributeError:
init_def = init_ds = None
else:
init_def = self._getdef(obj_init,oname)
init_ds = getdoc(obj_init)
# Skip Python's auto-generated docstrings
if init_ds and \
init_ds.startswith('x.__init__(...) initializes'):
init_ds = None
if init_def or init_ds:
if init_def:
out['init_definition'] = self.format(init_def)
if init_ds:
out['init_docstring'] = init_ds
# and class docstring for instances:
else:
# First, check whether the instance docstring is identical to the
# class one, and print it separately if they don't coincide. In
# most cases they will, but it's nice to print all the info for
# objects which use instance-customized docstrings.
if ds:
try:
cls = getattr(obj,'__class__')
except:
class_ds = None
else:
class_ds = getdoc(cls)
# Skip Python's auto-generated docstrings
if class_ds and \
(class_ds.startswith('function(code, globals[,') or \
class_ds.startswith('instancemethod(function, instance,') or \
class_ds.startswith('module(name[,') ):
class_ds = None
if class_ds and ds != class_ds:
out['class_docstring'] = class_ds
# Next, try to show constructor docstrings
try:
init_ds = getdoc(obj.__init__)
# Skip Python's auto-generated docstrings
if init_ds and \
init_ds.startswith('x.__init__(...) initializes'):
init_ds = None
except AttributeError:
init_ds = None
if init_ds:
out['init_docstring'] = init_ds
# Call form docstring for callable instances
if hasattr(obj, '__call__'):
call_def = self._getdef(obj.__call__, oname)
if call_def is not None:
out['call_def'] = self.format(call_def)
call_ds = getdoc(obj.__call__)
# Skip Python's auto-generated docstrings
if call_ds and call_ds.startswith('x.__call__(...) <==> x(...)'):
call_ds = None
if call_ds:
out['call_docstring'] = call_ds
# Compute the object's argspec as a callable. The key is to decide
# whether to pull it from the object itself, from its __init__ or
# from its __call__ method.
if inspect.isclass(obj):
# Old-style classes need not have an __init__
callable_obj = getattr(obj, "__init__", None)
elif callable(obj):
callable_obj = obj
else:
callable_obj = None
if callable_obj:
try:
args, varargs, varkw, defaults = getargspec(callable_obj)
except (TypeError, AttributeError):
# For extensions/builtins we can't retrieve the argspec
pass
else:
out['argspec'] = dict(args=args, varargs=varargs,
varkw=varkw, defaults=defaults)
return object_info(**out)
def psearch(self,pattern,ns_table,ns_search=[],
ignore_case=False,show_all=False):
"""Search namespaces with wildcards for objects.
Arguments:
- pattern: string containing shell-like wildcards to use in namespace
searches and optionally a type specification to narrow the search to
objects of that type.
- ns_table: dict of name->namespaces for search.
Optional arguments:
- ns_search: list of namespace names to include in search.
- ignore_case(False): make the search case-insensitive.
- show_all(False): show all names, including those starting with
underscores.
"""
#print 'ps pattern:<%r>' % pattern # dbg
# defaults
type_pattern = 'all'
filter = ''
cmds = pattern.split()
len_cmds = len(cmds)
if len_cmds == 1:
# Only filter pattern given
filter = cmds[0]
elif len_cmds == 2:
# Both filter and type specified
filter,type_pattern = cmds
else:
raise ValueError('invalid argument string for psearch: <%s>' %
pattern)
# filter search namespaces
for name in ns_search:
if name not in ns_table:
raise ValueError('invalid namespace <%s>. Valid names: %s' %
(name,ns_table.keys()))
#print 'type_pattern:',type_pattern # dbg
search_result = []
for ns_name in ns_search:
ns = ns_table[ns_name]
tmp_res = list(list_namespace(ns,type_pattern,filter,
ignore_case=ignore_case,
show_all=show_all))
search_result.extend(tmp_res)
search_result.sort()
page.page('\n'.join(search_result))
| [
"tristram@oaten.name"
] | tristram@oaten.name |
51033624394c62c952b6c453f0d94e4a26252199 | 9fb2139bf41e2301f9ee9069d649c5afe8e7735c | /python/En19cs306027_Lovesh_Kumrawat_Self_created_function_similar_to_'.astype'_in_numpy.py | 48ce29a748e1c7cd8b8b7bf5eabfc24d5bf3a64b | [] | no_license | codewithgauri/HacktoberFest | 9bc23289b4d93f7832271644a2ded2a83aa22c87 | 8ce8f687a4fb7c3953d1e0a5b314e21e4553366e | refs/heads/master | 2023-01-02T07:20:51.634263 | 2020-10-26T07:02:34 | 2020-10-26T07:02:34 | 307,285,210 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 838 | py | # In 'numpy' we can convert values of matrix into 'integer' by 'np.matrix('1.4 0.6;1.8 3.2;1.4 4.2',dtype=int)' or 'matrix_variable.astype('int32')' but i created a function for this.
import numpy as np
def matrix_integer_values_any_shape(inp): #* For any shape of matrix.
lst=[]
mtx=np.matrix([int(j) for i in np.array(inp) for j in i])
for i in range(0,inp.shape[0]*inp.shape[1],inp.shape[1]):
temp=[]
[temp.append(mtx[0,i+j]) for j in range(inp.shape[1])],lst.append(temp)
return np.matrix(lst)
print('Real Matrix with Floating Values:')
z_4_3=np.matrix('1.4 3.6 5.7 4.3;1.8 3.2 34.64 235.77;1.4 34.213 4.2 653.567')
print(z_4_3,z_4_3.shape,'\n')
print('With Self Created Function:')
z=matrix_integer_values_any_shape(z_4_3)
print(z,z.shape,'\n')
print('With Inbuilt Function:')
z=z.astype('int32')
print(z,z.shape,'\n')
| [
"noreply@github.com"
] | codewithgauri.noreply@github.com |
43a34c8971cd8b4b94a667b95d9f50b718825d12 | 8a8974f433ed4c86eec3960ef953ddd21464448b | /doi_request/__init__.py | 99c59af476fba1092d357b903fbdaa02df3f9bef | [
"BSD-2-Clause"
] | permissive | scieloorg/doi_request | 70c533afb135ebd11a61b3c782750e4938033260 | 52da64d8e5cc1782cd91968f2a8da3d4c9c736dc | refs/heads/master | 2023-08-19T02:55:27.466086 | 2021-03-08T18:36:09 | 2021-03-08T18:36:09 | 87,971,952 | 1 | 4 | BSD-2-Clause | 2023-07-27T00:01:41 | 2017-04-11T19:33:40 | JavaScript | UTF-8 | Python | false | false | 2,233 | py | import os
from pyramid.config import Configurator
from pyramid.session import SignedCookieSessionFactory
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from doi_request.models import initialize_sql
VERSION = '1.2.0'
def version(request):
return VERSION
def db(request):
maker = request.registry.dbmaker
session = maker()
def cleanup(request):
if request.exception is not None:
session.rollback()
else:
session.commit()
session.close()
request.add_finished_callback(cleanup)
return session
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
# Database Config
engine = create_engine(os.environ.get('SQL_ENGINE', 'sqlite:///:memory:'))
static_assets = os.environ.get('STATIC_MEDIA', 'media')
config.registry.dbmaker = sessionmaker(bind=engine)
config.scan('doi_request.models') # the "important" line
initialize_sql(engine)
config.add_request_method(db, reify=True)
config.add_request_method(version)
config.include('pyramid_mako')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_static_view('media', static_assets, cache_max_age=3600)
config.add_route('list_deposits', '/')
config.add_route('help', '/help')
config.add_route('deposit_request', '/deposit/request')
config.add_route('expenses', '/expenses')
config.add_route('expenses_details', '/expenses/details')
config.add_route('deposit_post', '/deposit/post')
config.add_route('deposit', '/deposit')
config.add_route('downloads', '/downloads')
config.add_subscriber('doi_request.subscribers.add_renderer_globals',
'pyramid.events.BeforeRender')
config.add_subscriber('doi_request.subscribers.add_localizer',
'pyramid.events.NewRequest')
config.add_translation_dirs('doi_request:locale')
# Session config
navegation_session_factory = SignedCookieSessionFactory('sses_navegation')
config.set_session_factory(navegation_session_factory)
config.scan()
return config.make_wsgi_app()
| [
"fabiobatalha@gmail.com"
] | fabiobatalha@gmail.com |
58e052ed808c502f500414a25bdc06bdbdc8c904 | 46ac0965941d06fde419a6f216db2a653a245dbd | /sdks/python/test/test_NotificationConfigApple.py | 39490853d098bca2c7862b7c84916113eb5abb7c | [
"MIT",
"Unlicense"
] | permissive | b3nab/appcenter-sdks | 11f0bab00d020abb30ee951f7656a3d7ed783eac | bcc19c998b5f648a147f0d6a593dd0324e2ab1ea | refs/heads/master | 2022-01-27T15:06:07.202852 | 2019-05-19T00:12:43 | 2019-05-19T00:12:43 | 187,386,747 | 0 | 3 | MIT | 2022-01-22T07:57:59 | 2019-05-18T17:29:21 | Python | UTF-8 | Python | false | false | 985 | py | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: benedetto.abbenanti@gmail.com
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import unittest
import appcenter_sdk
from NotificationConfigApple.clsNotificationConfigApple import NotificationConfigApple # noqa: E501
from appcenter_sdk.rest import ApiException
class TestNotificationConfigApple(unittest.TestCase):
"""NotificationConfigApple unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testNotificationConfigApple(self):
"""Test NotificationConfigApple"""
# FIXME: construct object with mandatory attributes with example values
# model = appcenter_sdk.models.clsNotificationConfigApple.NotificationConfigApple() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"b3nab@users.noreply.github.com"
] | b3nab@users.noreply.github.com |
84eff5f9a5d610e7b13f9042ec2ffc0f4bff3810 | e81fabdd6988c787524755fac73aa9d3631fc64c | /polyaxon_schemas/ml/models.py | 14b1701cd6d611b5dabc5348fbf5a80c03d2c950 | [
"MIT"
] | permissive | granularai/polyaxon-schemas | 0aa06f15b7353ceb6d31f1e5cf63c269ab0e2ce4 | 017ae74701f21f12f0b25e75379681ea5d8baa9e | refs/heads/master | 2022-08-30T00:05:40.888476 | 2020-05-19T17:22:46 | 2020-05-19T17:22:46 | 265,312,701 | 0 | 0 | MIT | 2020-05-19T17:16:38 | 2020-05-19T17:16:37 | null | UTF-8 | Python | false | false | 14,986 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from marshmallow import EXCLUDE, fields
from polyaxon_schemas.base import BaseConfig, BaseMultiSchema, BaseSchema
from polyaxon_schemas.fields import ObjectOrListObject
from polyaxon_schemas.ml.bridges import BridgeSchema
from polyaxon_schemas.ml.graph import GraphSchema
from polyaxon_schemas.ml.losses import LossSchema
from polyaxon_schemas.ml.metrics import MetricSchema
from polyaxon_schemas.ml.optimizers import OptimizerSchema
class BaseModelSchema(BaseSchema):
graph = fields.Nested(GraphSchema)
loss = fields.Nested(LossSchema, allow_none=True)
optimizer = fields.Nested(OptimizerSchema, allow_none=True)
metrics = fields.Nested(MetricSchema, many=True, allow_none=True)
summaries = ObjectOrListObject(fields.Str, allow_none=True)
clip_gradients = fields.Float(allow_none=True)
clip_embed_gradients = fields.Float(allow_none=True)
name = fields.Str(allow_none=True)
@staticmethod
def schema_config():
return BaseModelConfig
class BaseModelConfig(BaseConfig):
SCHEMA = BaseModelSchema
IDENTIFIER = 'Model'
REDUCED_ATTRIBUTES = ['name', 'summaries']
def __init__(self,
graph,
loss=None,
optimizer=None,
metrics=None,
summaries=None,
clip_gradients=0.5,
clip_embed_gradients=0.,
name=None):
self.graph = graph
self.loss = loss
self.optimizer = optimizer
self.metrics = metrics
self.summaries = summaries
self.clip_gradients = clip_gradients
self.clip_embed_gradients = clip_embed_gradients
self.name = name
class ClassifierSchema(BaseModelSchema):
one_hot_encode = fields.Bool(allow_none=True)
n_classes = fields.Int(allow_none=True)
@staticmethod
def schema_config():
return ClassifierConfig
class ClassifierConfig(BaseModelConfig):
"""Classifier base model.
Args(programmatic):
mode: `str`, Specifies if this training, evaluation or prediction. See `Modes`.
graph_fn: Graph function. Follows the signature:
* Args:
* `mode`: Specifies if this training, evaluation or prediction. See `Modes`.
* `inputs`: the feature inputs.
Args(polyaxonfile):
graph: Graph definition. see [Graph]()
Args(shared):
loss: An instance of `LossConfig`. Default value `SigmoidCrossEntropyConfig`.
optimizer: An instance of `OptimizerConfig`.
Default value `AdamConfig(learning_rate=0.001)`.
metrics: a list of `MetricConfig` instances.
summaries: `str` or `list`. The verbosity of the tensorboard visualization.
Possible values:
[`all`, `activations`, `loss`, `learning_rate`, `variables`, `gradients`]
clip_gradients: `float`. Gradients clipping by global norm.
clip_embed_gradients: `float`. Embedding gradients clipping to a specified value.
one_hot_encode: `bool`. to one hot encode the outputs.
n_classes: `int`. The number of classes used in the one hot encoding.
name: `str`, the name of this model, everything will be encapsulated inside this scope.
Returns:
`EstimatorSpec`
Programmatic usage:
```python
def graph_fn(mode, features):
x = plx.layers.Conv2D(filters=32, kernel_size=5)(features['image'])
x = plx.layers.MaxPooling2D(pool_size=2)(x)
x = plx.layers.Conv2D(filters=64, kernel_size=5)(x)
x = plx.layers.MaxPooling2D(pool_size=2)(x)
x = plx.layers.Flatten()(x)
x = plx.layers.Dense(units=10)(x)
return x
model = plx.models.Classifier(
mode=mode,
graph_fn=graph_fn,
loss=SigmoidCrossEntropyConfig(),
optimizer=AdamConfig(
learning_rate=0.007, decay_type='exponential_decay', decay_rate=0.1),
metrics=[AccuracyConfig(), PrecisionConfig()],
summaries='all',
one_hot_encode=True,
n_classes=10)
```
Polyaxonfile usage:
```yaml
model:
classifier:
loss: SigmoidCrossEntropy
optimizer:
Adam:
learning_rate: 0.007
decay_type: exponential_decay
decay_rate: 0.2
metrics:
- Accuracy
- Precision
one_hot_encode: true
n_classes: 10
graph:
input_layers: image
layers:
- Conv2D:
filters: 32
kernel_size: 5
strides: 1
- MaxPooling2D:
pool_size: 2
- Conv2D:
filters: 64
kernel_size: 5
- MaxPooling2D:
pool_size: 2
- Flatten:
- Dense:
units: 1024
activation: tanh
- Dense:
units: 10
```
or use model_type to reduce the nesting level
```yaml
model:
model_type: classifier
loss: SigmoidCrossEntropy
optimizer:
Adam:
learning_rate: 0.007
decay_type: exponential_decay
decay_rate: 0.2
metrics:
- Accuracy
- Precision
one_hot_encode: true
n_classes: 10
graph:
input_layers: image
layers:
- Conv2D:
filters: 32
kernel_size: 5
strides: 1
- MaxPooling2D:
pool_size: 2
- Conv2D:
filters: 64
kernel_size: 5
- MaxPooling2D:
pool_size: 2
- Flatten:
- Dense:
units: 1024
activation: tanh
- Dense:
units: 10
```
"""
SCHEMA = ClassifierSchema
IDENTIFIER = 'Classifier'
def __init__(self,
graph,
loss=None,
optimizer=None,
metrics=None,
summaries=None,
clip_gradients=0.5,
clip_embed_gradients=0.,
one_hot_encode=None,
n_classes=None,
name=None,):
super(ClassifierConfig, self).__init__(
graph=graph,
loss=loss,
optimizer=optimizer,
metrics=metrics,
summaries=summaries,
clip_gradients=clip_gradients,
clip_embed_gradients=clip_embed_gradients,
name=name)
self.one_hot_encode = one_hot_encode
self.n_classes = n_classes
class RegressorSchema(BaseModelSchema):
@staticmethod
def schema_config():
return RegressorConfig
class RegressorConfig(BaseModelConfig):
"""Regressor base model.
Args(programmatic):
mode: `str`, Specifies if this training, evaluation or prediction. See `Modes`.
graph_fn: Graph function. Follows the signature:
* Args:
* `mode`: Specifies if this training, evaluation or prediction. See `Modes`.
* `inputs`: the feature inputs.
Args(polyaxonfile):
graph: Graph definition. see [Graph]()
Args(both):
loss: An instance of `LossConfig`. Default value `MeanSquaredErrorConfig`.
optimizer: An instance of `OptimizerConfig`.
Default value `AdamConfig(learning_rate=0.001)`.
metrics: a list of `MetricConfig` instances.
summaries: `str` or `list`. The verbosity of the tensorboard visualization.
Possible values:
[`all`, `activations`, `loss`, `learning_rate`, `variables`, `gradients`]
clip_gradients: `float`. Gradients clipping by global norm.
clip_embed_gradients: `float`. Embedding gradients clipping to a specified value.
name: `str`, the name of this model, everything will be encapsulated inside this scope.
Returns:
`EstimatorSpec`
Programmatic usage:
```python
def graph_fn(mode, features):
x = features['x']
x = plx.layers.LSTM(units=10)(x)
return plx.layers.Dense(units=1)(x)
model = plx.models.Regressor(
mode=mode,
graph_fn=graph_fn,
loss=MeanSquaredErrorConfig(),
optimizer=AdagradConfig(learning_rate=0.1),
metrics=[
RootMeanSquaredErrorConfig(),
MeanAbsoluteErrorConfig()])
```
Polyaxonfile usage:
```yaml
model:
regressor:
loss: MeanSquaredError
optimizer:
Adagrad:
learning_rate: 0.1
metrics:
- RootMeanSquaredError
- MeanAbsoluteError
graph:
input_layers: x
layers:
- LSTM:
units: 19
- Dense:
units: 1
```
or use model_type to reduce the nesting level
```yaml
model:
model_type: regressor:
loss: MeanSquaredError
optimizer:
Adagrad:
learning_rate: 0.1
metrics:
- RootMeanSquaredError
- MeanAbsoluteError
graph:
input_layers: x
layers:
- LSTM:
units: 19
- Dense:
units: 1
```
"""
SCHEMA = RegressorSchema
IDENTIFIER = 'Regressor'
class GeneratorSchema(BaseModelSchema):
encoder = fields.Nested(GraphSchema)
decoder = fields.Nested(GraphSchema)
bridge = fields.Nested(BridgeSchema)
class Meta:
ordered = True
exclude = ('graph',)
@staticmethod
def schema_config():
return GeneratorConfig
class GeneratorConfig(BaseModelConfig):
"""Generator base model.
Args(programmatic):
mode: `str`, Specifies if this training, evaluation or prediction. See `Modes`.
encoder_fn: Encoder Graph function. Follows the signature:
* Args:
* `mode`: Specifies if this training, evaluation or prediction. See `Modes`.
* `inputs`: the feature inputs.
decoder_fn: Decoder Graph function. Follows the signature:
* Args:
* `mode`: Specifies if this training, evaluation or prediction. See `Modes`.
* `inputs`: the feature inputs.
bridge_fn: The bridge to use. Follows the signature:
* Args:
* `mode`: Specifies if this training, evaluation or prediction. See `Modes`.
* `inputs`: the feature inputs.
* `encoder_fn`: the encoder function.
* `decoder_fn` the decoder function.
Args(polyaxonfile):
encoder: Graph definition. see [Graph]()
decoder: Graph definition. see [Graph]()
bridge: Graph definition. see [Graph]()
Args(shared):
loss: An instance of `LossConfig`. Default value `SigmoidCrossEntropyConfig`.
optimizer: An instance of `OptimizerConfig`.
Default value `AdadeltaConfig(learning_rate=0.4)`.
summaries: `str` or `list`. The verbosity of the tensorboard visualization.
Possible values: `all`, `activations`, `loss`, `learning_rate`, `variables`, `gradients`
metrics: a list of `MetricConfig` instances.
summaries: `str` or `list`. The verbosity of the tensorboard visualization.
Possible values: `all`, `activations`, `loss`, `learning_rate`, `variables`, `gradients`
clip_gradients: `float`. Gradients clipping by global norm.
clip_embed_gradients: `float`. Embedding gradients clipping to a specified value.
name: `str`, the name of this model, everything will be encapsulated inside this scope.
Returns:
`EstimatorSpec`
Programmatic usage:
```python
def encoder_fn(mode, features):
x = plx.layers.Dense(units=128)(features)
x = plx.layers.Dense(units=256)(x)
return x
def decoder_fn(mode, features):
x = plx.layers.Dense(units=256)(features)
return plx.layers.Dense(units=784)(x)
def bridge_fn(mode, features, labels, loss, encoder_fn, decoder_fn):
return plx.bridges.NoOpBridge(mode)(features, labels, loss, encoder_fn, decoder_fn)
model = plx.models.Generator(
mode=mode,
encoder_fn=encoder_fn,
decoder_fn=decoder_fn,
bridge_fn=bridge_fn,
loss=MeanSquaredErrorConfig(),
optimizer=AdadeltaConfig(learning_rate=0.9),
summaries=['loss'])
```
Polyaxonfile usage:
```yaml
model:
generator:
loss:
MeanSquaredError:
optimizer:
Adam:
learning_rate: 0.9
metrics:
- Accuracy
bridge: NoOpBridge
encoder:
input_layers: image
layers:
- Dense:
units: 128
- Dense:
units: 256
name: encoded
decoder:
input_layers: encoded
layers:
- Dense:
units: 256
- Dense:
units: 784
```
or use model_type to reduce the nesting level
```yaml
model:
model_type: generator:
loss:
MeanSquaredError:
optimizer:
Adam:
learning_rate: 0.9
metrics:
- Accuracy
bridge: NoOpBridge
encoder:
input_layers: image
layers:
- Dense:
units: 128
- Dense:
units: 256
name: encoded
decoder:
input_layers: encoded
layers:
- Dense:
units: 256
- Dense:
units: 784
```
"""
SCHEMA = GeneratorSchema
IDENTIFIER = 'Generator'
def __init__(self,
encoder,
decoder,
bridge,
loss=None,
optimizer=None,
metrics=None,
summaries=None,
clip_gradients=0.5,
clip_embed_gradients=0.,
name=None,):
super(GeneratorConfig, self).__init__(
graph=None,
loss=loss,
optimizer=optimizer,
metrics=metrics,
summaries=summaries,
clip_gradients=clip_gradients,
clip_embed_gradients=clip_embed_gradients,
name=name)
self.encoder = encoder
self.decoder = decoder
self.bridge = bridge
class ModelSchema(BaseMultiSchema):
__multi_schema_name__ = 'model'
__configs__ = {
BaseModelConfig.IDENTIFIER: BaseModelConfig,
ClassifierConfig.IDENTIFIER: ClassifierConfig,
RegressorConfig.IDENTIFIER: RegressorConfig,
GeneratorConfig.IDENTIFIER: GeneratorConfig,
}
__support_snake_case__ = True
class ModelConfig(BaseConfig):
SCHEMA = ModelSchema
IDENTIFIER = 'model'
UNKNOWN_BEHAVIOUR = EXCLUDE
| [
"mouradmourafiq@gmail.com"
] | mouradmourafiq@gmail.com |
a125ff9ec9ac7d4c723caf648f66e55462f31b62 | ee974d693ca4c4156121f8cb385328b52eaac07c | /env/lib/python3.6/site-packages/pip/_vendor/urllib3/filepost.py | e01a1a0d1044c86119534bb92e511348ffac1217 | [] | no_license | ngonhi/Attendance_Check_System_with_Face_Recognition | f4531cc4dee565d0e45c02217f73f3eda412b414 | 92ff88cbc0c740ad48e149033efd38137c9be88d | refs/heads/main | 2023-03-12T07:03:25.302649 | 2021-02-26T15:37:33 | 2021-02-26T15:37:33 | 341,493,686 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:27650543085246b415561217c916ce5c820588dd89fe70824bb6db3e60d439ae
size 2534
| [
"Nqk180998!"
] | Nqk180998! |
53e98abf1cde068fde4788a1920f72b9e87e3665 | 11334e46d3575968de5062c7b0e8578af228265b | /Examples/voltagereadspeed/orig/orig_batt_test.py | 45a6743f58a9ee101f6baf28fd3738d5bc9aad21 | [] | no_license | slowrunner/Carl | 99262f16eaf6d53423778448dee5e5186c2aaa1e | 1a3cfb16701b9a3798cd950e653506774c2df25e | refs/heads/master | 2023-06-08T05:55:55.338828 | 2023-06-04T02:39:18 | 2023-06-04T02:39:18 | 145,750,624 | 19 | 2 | null | 2023-06-04T02:39:20 | 2018-08-22T18:59:34 | Roff | UTF-8 | Python | false | false | 2,419 | py | #!/usr/bin/python3
import sys
from easygopigo3 import EasyGoPiGo3
from time import sleep
mybot = EasyGoPiGo3()
value = 0
count = 0
Reference_Input_Voltage = 12.00
file1 = open("./voltage_test.txt", "a")
def round_up(x, decimal_precision=2):
# "x" is the value to be rounded using 4/5 rounding rules
# always rounding away from zero
#
# "decimal_precision is the number of decimal digits desired
# after the decimal divider mark.
#
# It returns the **LESSER** of:
# (a) The number of digits requested
# (b) The number of digits in the number if less
# than the number of decimal digits requested
# Example: (Assume decimal_precision = 3)
# round_up(1.123456, 3) will return 1.123. (4 < 5)
# round_up(9.876543, 3) will return 9.877. (5 >= 5)
# round_up(9.87, 3) will return 9.87
# because there are only two decimal digits and we asked for 3
#
if decimal_precision < 0:
decimal_precision = 0
exp = 10 ** decimal_precision
x = exp * x
if x > 0:
val = (int(x + 0.5) / exp)
elif x < 0:
val = (int(x - 0.5) / exp)
else:
val = 0
if decimal_precision <= 0:
return (int(val))
else:
return (val)
try:
while True:
Measured_Battery_Voltage = round_up(mybot.get_voltage_battery(), 3)
Five_v_System_Voltage = round_up(mybot.get_voltage_5v(), 3)
Measured_voltage_differential = round_up((Reference_Input_Voltage - Measured_Battery_Voltage),3)
value = value + Measured_voltage_differential
count = count+1
print("Measured Battery Voltage =", Measured_Battery_Voltage)
print("Measured voltage differential = ", Measured_voltage_differential)
print("5v system voltage =", Five_v_System_Voltage, "\n")
print("Total number of measurements so far is ", count)
sleep(1.00)
except KeyboardInterrupt:
print("\nThat's All Folks!\n")
data = ["\nWe took ", str(count), " measurements and the average differential was ", str(round_up(value/count, 3)), "\n(based on an input reference voltage of ", str(Reference_Input_Voltage), ")\n"]
file1.writelines(data)
print("We took ", str(count), " measurements and the average differential was ", str(round_up(value/count, 3)), "\n(based on an input reference voltage of ", str(Reference_Input_Voltage), ")\n")
file1.close()
sys.exit(0)
| [
"slowrunner@users.noreply.github.com"
] | slowrunner@users.noreply.github.com |
a6ffeeae418a936374b98c0f082f122766428944 | 60d5b5b1f1c912d1655de3884efc09dfddd8d132 | /sites/kotourism/events/feeds.py | 27d81038a0eeef22fe3d06582bed6104baaf1b6b | [] | no_license | alexgula/django_sites | 15033c739401f24603e957c5a034d63652f0d21f | 038834c0f544d6997613d61d593a7d5abf673c70 | refs/heads/master | 2016-09-05T11:02:43.838095 | 2014-07-07T11:36:07 | 2014-07-07T11:36:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,464 | py | # coding=utf-8
from datetime import timedelta
from collections import namedtuple
from django.utils.timezone import now
from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext, pgettext, get_language
from picassoft.utils.templatetags.markup import restructuredtext
from .models import Event, EVENT_TYPES_SOURCE
EVENT_TYPES = dict(EVENT_TYPES_SOURCE)
EventType = namedtuple('EventType', ['code', 'desc'])
class EventFeed(Feed):
feed_type = Atom1Feed
description = _("Updates on changes and additions to ko-tourism.gov.ua.")
subtitle = description
def get_object(self, request, type_slug):
return EventType(type_slug, pgettext('plural', EVENT_TYPES[type_slug]))
def title(self, obj):
return ugettext("Latest tourists {} of Kyiv oblast.").format(obj.desc.lower())
def link(self, obj):
from django.core import urlresolvers
return urlresolvers.reverse('typed_event_list', kwargs=dict(type_slug=obj.code))
def items(self, obj):
return Event.objects.active().filter(type=obj.code, post_date__gte=now()-timedelta(weeks=13)).order_by('-post_date')
def item_title(self, item):
return item.name
def item_description(self, item):
return restructuredtext(item.desc)
def item_pubdate(self, item):
return item.post_date
| [
"alexgula@gmail.com"
] | alexgula@gmail.com |
cbc4a88ee7d1c9890db6f01b5bc2df4ac1869912 | 44064ed79f173ddca96174913910c1610992b7cb | /Second_Processing_app/temboo/Library/Facebook/Actions/Video/WantsToWatch/UpdateWantsToWatch.py | bff67d21bb448deb53c6b84900556134e8237aeb | [] | no_license | dattasaurabh82/Final_thesis | 440fb5e29ebc28dd64fe59ecd87f01494ed6d4e5 | 8edaea62f5987db026adfffb6b52b59b119f6375 | refs/heads/master | 2021-01-20T22:25:48.999100 | 2014-10-14T18:58:00 | 2014-10-14T18:58:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,766 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# UpdateWantsToWatch
# Updates an existing wants_to_watch action.
#
# Python version 2.6
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class UpdateWantsToWatch(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the UpdateWantsToWatch Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
Choreography.__init__(self, temboo_session, '/Library/Facebook/Actions/Video/WantsToWatch/UpdateWantsToWatch')
def new_input_set(self):
return UpdateWantsToWatchInputSet()
def _make_result_set(self, result, path):
return UpdateWantsToWatchResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return UpdateWantsToWatchChoreographyExecution(session, exec_id, path)
class UpdateWantsToWatchInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the UpdateWantsToWatch
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((required, string) The access token retrieved from the final step of the OAuth process.)
"""
InputSet._set_input(self, 'AccessToken', value)
def set_ActionID(self, value):
"""
Set the value of the ActionID input for this Choreo. ((required, string) The id of the action to update.)
"""
InputSet._set_input(self, 'ActionID', value)
def set_AiringEndTime(self, value):
"""
Set the value of the AiringEndTime input for this Choreo. ((optional, date) The time that the airing ends.)
"""
InputSet._set_input(self, 'AiringEndTime', value)
def set_AiringID(self, value):
"""
Set the value of the AiringID input for this Choreo. ((optional, string) The id of the video airing.)
"""
InputSet._set_input(self, 'AiringID', value)
def set_AiringStartTime(self, value):
"""
Set the value of the AiringStartTime input for this Choreo. ((optional, date) The time that the airing begins.)
"""
InputSet._set_input(self, 'AiringStartTime', value)
def set_EndTime(self, value):
"""
Set the value of the EndTime input for this Choreo. ((optional, date) The time that the user ended the action (e.g. 2013-06-24T18:53:35+0000).)
"""
InputSet._set_input(self, 'EndTime', value)
def set_Episode(self, value):
"""
Set the value of the Episode input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing an episode of a show.)
"""
InputSet._set_input(self, 'Episode', value)
def set_ExpiresIn(self, value):
"""
Set the value of the ExpiresIn input for this Choreo. ((optional, integer) The amount of time (in milliseconds) from the publish_time that the action will expire.)
"""
InputSet._set_input(self, 'ExpiresIn', value)
def set_Message(self, value):
"""
Set the value of the Message input for this Choreo. ((optional, string) A message attached to this action. Setting this parameter requires enabling of message capabilities.)
"""
InputSet._set_input(self, 'Message', value)
def set_Movie(self, value):
"""
Set the value of the Movie input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing a movie.)
"""
InputSet._set_input(self, 'Movie', value)
def set_Other(self, value):
"""
Set the value of the Other input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing any general video content.)
"""
InputSet._set_input(self, 'Other', value)
def set_Place(self, value):
"""
Set the value of the Place input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing the location associated with this action.)
"""
InputSet._set_input(self, 'Place', value)
def set_TVShow(self, value):
"""
Set the value of the TVShow input for this Choreo. ((optional, string) The URL or ID for an Open Graph object representing a TV show.)
"""
InputSet._set_input(self, 'TVShow', value)
def set_Tags(self, value):
"""
Set the value of the Tags input for this Choreo. ((optional, string) A comma separated list of other profile IDs that also performed this action.)
"""
InputSet._set_input(self, 'Tags', value)
class UpdateWantsToWatchResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the UpdateWantsToWatch Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Facebook.)
"""
return self._output.get('Response', None)
class UpdateWantsToWatchChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return UpdateWantsToWatchResultSet(response, path)
| [
"dattasaurabh82@gmail.com"
] | dattasaurabh82@gmail.com |
4c049858fdeff016c62349c76078e6d0a75dc918 | 10d98fecb882d4c84595364f715f4e8b8309a66f | /aloe/aloe/rfill/utils/rfill_consts.py | 18131c6e49d3a1e196701d0718b8cdc9deb7edbe | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | afcarl/google-research | 51c7b70d176c0d70a5ee31ea1d87590f3d6c6f42 | 320a49f768cea27200044c0d12f394aa6c795feb | refs/heads/master | 2021-12-02T18:36:03.760434 | 2021-09-30T20:59:01 | 2021-09-30T21:07:02 | 156,725,548 | 1 | 0 | Apache-2.0 | 2018-11-08T15:13:53 | 2018-11-08T15:13:52 | null | UTF-8 | Python | false | false | 2,663 | py | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Robustfill program graph representation.
Constants are predefined here.
"""
# pylint: skip-file
import string
CONSTANTS = ['dummy', ']', ',', '-', '.', '@', "'", '\"', '(', ')', ':', '%']
REGEXES = [None] * 10
REGEXES[1] = '[A-Z]([a-z])+' # ProperCase
REGEXES[2] = '[A-Z]+' # CAPS
REGEXES[3] = '[a-z]+' # lowercase
REGEXES[4] = r'\d+' # Digits
REGEXES[5] = '[a-zA-Z]+' # Alphabets
REGEXES[6] = '[a-zA-Z0-9]+' # Alphanumeric
REGEXES[7] = r'\s+' # Whitespace
REGEXES[8] = '^'
REGEXES[9] = '$'
STR_VOCAB = (''.join(CONSTANTS[1:]) + string.ascii_lowercase +
string.ascii_uppercase + string.digits) + ' '
RFILL_NODE_TYPES = {
'ConstPos--1': 0,
'ConstPos--2': 1,
'ConstPos--3': 2,
'ConstPos--4': 3,
'ConstPos-0': 4,
'ConstPos-1': 5,
'ConstPos-2': 6,
'ConstPos-3': 7,
'ConstPos-4': 8,
'ConstStr-1': 9,
'ConstStr-10': 10,
'ConstStr-11': 11,
'ConstStr-2': 12,
'ConstStr-3': 13,
'ConstStr-4': 14,
'ConstStr-5': 15,
'ConstStr-6': 16,
'ConstStr-7': 17,
'ConstStr-8': 18,
'ConstStr-9': 19,
'ConstTok': 20,
'RegPos': 21,
'RegexTok': 22,
'SubStr': 23,
'c1-1': 24,
'c1-10': 25,
'c1-11': 26,
'c1-2': 27,
'c1-3': 28,
'c1-4': 29,
'c1-5': 30,
'c1-6': 31,
'c1-7': 32,
'c1-8': 33,
'c1-9': 34,
'direct-End': 35,
'direct-Start': 36,
'expr_root': 37,
'idx--1': 38,
'idx--2': 39,
'idx--3': 40,
'idx--4': 41,
'idx-0': 42,
'idx-1': 43,
'idx-2': 44,
'idx-3': 45,
'idx-4': 46,
'r1-1': 47,
'r1-2': 48,
'r1-3': 49,
'r1-4': 50,
'r1-5': 51,
'r1-6': 52,
'r1-7': 53,
'r1-8': 54,
'r1-9': 55
}
RFILL_EDGE_TYPES = {
'c1': 0,
'direct': 1,
'idx': 2,
'p1': 3,
'p2': 4,
'pos_param': 5,
'r1': 6,
'subexpr': 7,
'succ': 8,
'rev-c1': 9,
'rev-direct': 10,
'rev-idx': 11,
'rev-p1': 12,
'rev-p2': 13,
'rev-pos_param': 14,
'rev-r1': 15,
'rev-subexpr': 16,
'rev-succ': 17
}
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
d63074174c227a34a29aa659680afcb60b7d057f | bf3049a786140bca8d6f5dfdd754086d9395a5e2 | /src/main/management/commands/correctdb.py | 8f1b309b5cbd2e24488be7108872853bd7e6a558 | [] | no_license | nvbn/djang0byte | ecefccc60f622c8aa55315ab478aacddbd8fc3b5 | 39deb1dc046c80edd6bfdfbef8391842eda35dd2 | refs/heads/reload | 2016-09-05T17:03:44.323217 | 2014-10-01T17:20:12 | 2014-10-01T17:20:12 | 2,582,402 | 26 | 7 | null | 2014-10-01T17:20:12 | 2011-10-15T16:06:06 | Python | UTF-8 | Python | false | false | 549 | py | # -*- coding: utf-8 -*-
from django.core.management.base import BaseCommand
from main.models import Post
from django.db.models import Q
class Command(BaseCommand):
def handle(self, **options):
qs = Post.objects.filter(
Q(title__icontains=u'решен') | Q(title__icontains=u'решён') | Q(title__icontains=u'solv'),
blog__type__is_qa=True,
)
print 'fix %d posts' % qs.count()
for post in qs: #Shit, update not work
post.solved = True
post.save(convert=True)
| [
"nvbn.rm@gmail.com"
] | nvbn.rm@gmail.com |
a716c01559400043f512cdc990ebfc7b5cb59da6 | 92949cad6725d61d4a40717bb3d859911e152664 | /ytree/frontends/rockstar/arbor.py | 6da1a94c02cf0fd0962d4e695a8e85b42ee0ae1d | [
"BSD-3-Clause"
] | permissive | jwise77/ytree | 662fca1df8358d7d1c13103bf86de97ddb16de2a | f179c07ae4696cce7ae6376417e814dabd6c9d1b | refs/heads/master | 2023-06-13T06:22:55.513112 | 2020-01-06T19:14:45 | 2020-01-06T19:14:45 | 238,534,101 | 0 | 0 | NOASSERTION | 2020-02-05T19:44:23 | 2020-02-05T19:44:22 | null | UTF-8 | Python | false | false | 4,186 | py | """
RockstarArbor class and member functions
"""
#-----------------------------------------------------------------------------
# Copyright (c) ytree development team. All rights reserved.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import glob
from yt.units.yt_array import \
UnitParseError
from ytree.data_structures.arbor import \
CatalogArbor
from ytree.frontends.rockstar.fields import \
RockstarFieldInfo, \
setup_field_groups
from ytree.frontends.rockstar.io import \
RockstarDataFile
class RockstarArbor(CatalogArbor):
"""
Class for Arbors created from Rockstar out_*.list files.
Use only descendent IDs to determine tree relationship.
"""
_suffix = ".list"
_field_info_class = RockstarFieldInfo
_data_file_class = RockstarDataFile
def _parse_parameter_file(self):
fgroups = setup_field_groups()
rems = ["%s%s%s" % (s[0], t, s[1])
for s in [("(", ")"), ("", "")]
for t in ["physical, peculiar",
"comoving", "physical"]]
f = open(self.filename, "r")
# Read the first line as a list of all fields.
fields = f.readline()[1:].strip().split()
# Get box size, cosmological parameters, and units.
while True:
line = f.readline()
if line is None or not line.startswith("#"):
break
elif line.startswith("#Om = "):
pars = line[1:].split(";")
for j, par in enumerate(["omega_matter",
"omega_lambda",
"hubble_constant"]):
v = float(pars[j].split(" = ")[1])
setattr(self, par, v)
elif line.startswith("#Box size:"):
pars = line.split(":")[1].strip().split()
self.box_size = self.quan(float(pars[0]), pars[1])
# Looking for <quantities> in <units>
elif line.startswith("#Units:"):
if " in " not in line: continue
quan, punits = line[8:].strip().split(" in ", 2)
for rem in rems:
while rem in punits:
pre, mid, pos = punits.partition(rem)
punits = pre + pos
try:
self.quan(1, punits)
except UnitParseError:
punits = ""
for group in fgroups:
if group.in_group(quan):
group.units = punits
break
f.close()
fi = {}
for i, field in enumerate(fields):
for group in fgroups:
units = ""
if group.in_group(field):
units = getattr(group, "units", "")
break
fi[field] = {"column": i, "units": units}
# the scale factor comes from the catalog file header
fields.append("scale_factor")
fi["scale_factor"] = {"source": "header", "units": ""}
self.field_list = fields
self.field_info.update(fi)
def _get_data_files(self):
"""
Get all out_*.list files and sort them in reverse order.
"""
prefix = self.filename.rsplit("_", 1)[0]
suffix = self._suffix
my_files = glob.glob("%s_*%s" % (prefix, suffix))
# sort by catalog number
my_files.sort(
key=lambda x:
self._get_file_index(x, prefix, suffix),
reverse=True)
self.data_files = \
[self._data_file_class(f, self) for f in my_files]
def _get_file_index(self, f, prefix, suffix):
return int(f[f.find(prefix)+len(prefix)+1:f.rfind(suffix)]),
@classmethod
def _is_valid(self, *args, **kwargs):
"""
File should end in .list.
"""
fn = args[0]
if not fn.endswith(".list"):
return False
return True
| [
"brittonsmith@gmail.com"
] | brittonsmith@gmail.com |
b11951b8ce3f201f333ca12180beaa1dba34b567 | f0b75bd94f133a13f469f429a696f26be3be9862 | /week 2/.history/utils_20200204173854.py | 2e69e8e8be516eac983005ccf56b3881f8eceb20 | [] | no_license | dechavez4/Python_handin_assignments | 023350fabd212cdf2a4ee9cd301306dc5fd6bea0 | 82fd8c991e560c18ecb2152ea5a8fc35dfc3c608 | refs/heads/master | 2023-01-11T23:31:27.220757 | 2020-05-22T10:33:56 | 2020-05-22T10:33:56 | 237,179,899 | 0 | 0 | null | 2022-12-30T20:14:04 | 2020-01-30T09:30:16 | Python | UTF-8 | Python | false | false | 400 | py | import os.path
from os import path
from sys import argv
import python_second_assignment as myList
# A. first function takes a path to a folder and writes all filenames in the folder to a specified output file
folderpath = "/Users/robin/Desktop/semester_4/python/myPythonCode/week 2"
def read_folder():
entries = os.listdir(folderpath)
for entry in entries:
print(entry)
read_folder() | [
"chavezgamingv2@hotmail.com"
] | chavezgamingv2@hotmail.com |
a8f873115c95b3d8eecb33fcdafaac877d78ad6a | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/KoubeiMarketingCampaignMerchantActivityOfflineModel.py | 3cbd886eb6742480b12b1db2f30f6611b7c11127 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 2,955 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class KoubeiMarketingCampaignMerchantActivityOfflineModel(object):
def __init__(self):
self._activity_id = None
self._memo = None
self._operator = None
self._operator_type = None
self._out_request_no = None
@property
def activity_id(self):
return self._activity_id
@activity_id.setter
def activity_id(self, value):
self._activity_id = value
@property
def memo(self):
return self._memo
@memo.setter
def memo(self, value):
self._memo = value
@property
def operator(self):
return self._operator
@operator.setter
def operator(self, value):
self._operator = value
@property
def operator_type(self):
return self._operator_type
@operator_type.setter
def operator_type(self, value):
self._operator_type = value
@property
def out_request_no(self):
return self._out_request_no
@out_request_no.setter
def out_request_no(self, value):
self._out_request_no = value
def to_alipay_dict(self):
params = dict()
if self.activity_id:
if hasattr(self.activity_id, 'to_alipay_dict'):
params['activity_id'] = self.activity_id.to_alipay_dict()
else:
params['activity_id'] = self.activity_id
if self.memo:
if hasattr(self.memo, 'to_alipay_dict'):
params['memo'] = self.memo.to_alipay_dict()
else:
params['memo'] = self.memo
if self.operator:
if hasattr(self.operator, 'to_alipay_dict'):
params['operator'] = self.operator.to_alipay_dict()
else:
params['operator'] = self.operator
if self.operator_type:
if hasattr(self.operator_type, 'to_alipay_dict'):
params['operator_type'] = self.operator_type.to_alipay_dict()
else:
params['operator_type'] = self.operator_type
if self.out_request_no:
if hasattr(self.out_request_no, 'to_alipay_dict'):
params['out_request_no'] = self.out_request_no.to_alipay_dict()
else:
params['out_request_no'] = self.out_request_no
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = KoubeiMarketingCampaignMerchantActivityOfflineModel()
if 'activity_id' in d:
o.activity_id = d['activity_id']
if 'memo' in d:
o.memo = d['memo']
if 'operator' in d:
o.operator = d['operator']
if 'operator_type' in d:
o.operator_type = d['operator_type']
if 'out_request_no' in d:
o.out_request_no = d['out_request_no']
return o
| [
"liuqun.lq@alibaba-inc.com"
] | liuqun.lq@alibaba-inc.com |
7384f79ed839dd47ca168b9eca226acf619fa1ef | bf769a3a3935a8e08f11fdf606f2e2e2bc6a5307 | /PyQtGui/stickyNotes/Ui_mainwindow.py | b82b0e288c448dca5912a3ce5bb22d61848d9bbb | [] | no_license | metanoia1989/QTStudy | b71f2c8cf6fd001a14db3f1b5ece82c1cc7f7a93 | 29465c6bb9fc0ef2e50a9bf2f66d996ecbd086c0 | refs/heads/master | 2021-12-25T16:50:26.915441 | 2021-10-10T01:26:14 | 2021-10-10T01:26:14 | 193,919,811 | 3 | 2 | null | 2021-01-25T09:23:30 | 2019-06-26T14:22:41 | HTML | UTF-8 | Python | false | false | 3,005 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'd:\WorkSpace\QT\QTStudy\PyQtGui\stickyNotes\mainwindow.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(378, 333)
MainWindow.setStyleSheet("QPushButton {\n"
" border: 0px;\n"
"}")
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.closeButton = QtWidgets.QPushButton(self.centralwidget)
self.closeButton.setMinimumSize(QtCore.QSize(25, 20))
self.closeButton.setMaximumSize(QtCore.QSize(25, 20))
self.closeButton.setBaseSize(QtCore.QSize(2, 0))
font = QtGui.QFont()
font.setPointSize(30)
font.setBold(True)
font.setWeight(75)
self.closeButton.setFont(font)
self.closeButton.setObjectName("closeButton")
self.horizontalLayout.addWidget(self.closeButton)
spacerItem = QtWidgets.QSpacerItem(228, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.moreButton = QtWidgets.QPushButton(self.centralwidget)
self.moreButton.setMinimumSize(QtCore.QSize(25, 25))
self.moreButton.setMaximumSize(QtCore.QSize(25, 25))
self.moreButton.setBaseSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(30)
font.setBold(True)
font.setWeight(75)
self.moreButton.setFont(font)
self.moreButton.setObjectName("moreButton")
self.horizontalLayout.addWidget(self.moreButton)
self.verticalLayout.addLayout(self.horizontalLayout)
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(11)
self.textEdit.setFont(font)
self.textEdit.setFrameShape(QtWidgets.QFrame.NoFrame)
self.textEdit.setFrameShadow(QtWidgets.QFrame.Plain)
self.textEdit.setLineWidth(0)
self.textEdit.setObjectName("textEdit")
self.verticalLayout.addWidget(self.textEdit)
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.closeButton.setText(_translate("MainWindow", "×"))
self.moreButton.setText(_translate("MainWindow", "+"))
| [
"sogaxili@gmail.com"
] | sogaxili@gmail.com |
6c9b556dd08e9ff9e09b54f91e034188ecff94ee | 9b0b2029225c680f978ae041552cf68d977144ac | /test_singleton.py | 99dedd8f298421b143d9fe46cae0786cf26e5f3d | [] | no_license | 1151332702/flask_rest_api | 59547189d5928a16f7cee7b36a99f365627f4814 | ad5df495157e12858c2869c8899ded10650c2e88 | refs/heads/master | 2021-06-23T10:47:01.280030 | 2019-10-24T10:31:25 | 2019-10-24T10:31:25 | 208,002,649 | 0 | 0 | null | 2021-03-20T01:59:21 | 2019-09-12T08:34:33 | Python | UTF-8 | Python | false | false | 985 | py | # -*- coding: utf-8 -*-
# @Time : 2019/1/31 16:37
# @Author : lilong
# @File : test_singleton.py
# @Description: 单例模式
def singletom(cls):
_instance = {}
def decorator(*args, **kwargs):
if cls not in _instance:
_instance[cls] = cls(*args, **kwargs)
return _instance[cls]
return decorator
@singletom
class A(object):
a = 100
def __init__(self, x):
self.x = x
class B(object):
a = 100
def __init__(self, x):
self.x = x
# 利用线程实现
import threading
class Singleton(object):
_instance_lock = threading.Lock()
def __init__(self):
pass
def __new__(cls, *args, **kwargs):
if not hasattr(Singleton, '_instance'):
with Singleton._instance_lock:
if not hasattr(Singleton, '_instance'):
Singleton._instance = object.__new__(cls)
return Singleton._instance
o1 = Singleton()
o2 = Singleton()
print(o1)
print(o2) | [
"12345678"
] | 12345678 |
4d0b47e8c26352d4a9d3a1017e02824ffa12e6bd | 704976ea552111c6a5af9cd7cb62b9d9abaf3996 | /rpython/rlib/rminiz_oxide.py | fb41ad9798b926b34e8fc7f80070d1dabfdca434 | [
"LicenseRef-scancode-unicode",
"BSD-3-Clause"
] | permissive | mesalock-linux/mesapy | 4f02c5819ce7f2f6e249d34840f1aa097577645d | ed546d59a21b36feb93e2309d5c6b75aa0ad95c9 | refs/heads/mesapy2.7 | 2023-08-16T21:33:02.239581 | 2019-08-13T10:29:43 | 2019-08-13T18:06:45 | 136,080,721 | 396 | 33 | NOASSERTION | 2020-04-01T03:05:18 | 2018-06-04T20:45:17 | Python | UTF-8 | Python | false | false | 14,758 | py | from __future__ import with_statement
import sys
from rpython.rlib import rgc
from rpython.rlib.rstring import StringBuilder
from rpython.rtyper.annlowlevel import llstr
from rpython.rtyper.lltypesystem import rffi, lltype
from rpython.rtyper.lltypesystem.rstr import copy_string_to_raw
from rpython.rtyper.tool import rffi_platform
from rpython.translator.platform import platform as compiler, CompilationError
from rpython.translator.tool.cbuild import ExternalCompilationInfo
from rpython.tool.version import rpythonroot
import py
libname = 'miniz_oxide_c_api'
ROOT = rpythonroot + '/lib_rust/miniz_oxide'
testonly_libraries = []
eci = ExternalCompilationInfo(
libraries=[libname],
library_dirs=[ROOT + '/target/release'],
includes=[ROOT + '/miniz.h'],
include_dirs=[ROOT],
testonly_libraries = testonly_libraries
)
eci = rffi_platform.configure_external_library(
libname, eci,
[dict(prefix='miniz_oxide-'),
])
constantnames = '''
Z_OK Z_STREAM_ERROR Z_BUF_ERROR Z_MEM_ERROR Z_STREAM_END Z_DATA_ERROR
Z_DEFLATED Z_DEFAULT_STRATEGY Z_DEFAULT_COMPRESSION
Z_NO_FLUSH Z_FINISH Z_SYNC_FLUSH Z_FULL_FLUSH
MAX_WBITS MAX_MEM_LEVEL
Z_BEST_SPEED Z_BEST_COMPRESSION Z_DEFAULT_COMPRESSION
Z_FILTERED Z_HUFFMAN_ONLY Z_DEFAULT_STRATEGY Z_NEED_DICT
'''.split()
class SimpleCConfig:
"""
Definitions for basic types defined by zlib.
"""
_compilation_info_ = eci
# XXX If Z_PREFIX was defined for the libz build, then these types are
# named z_uInt, z_uLong, and z_Bytef instead.
uInt = rffi_platform.SimpleType('uInt', rffi.UINT)
uLong = rffi_platform.SimpleType('uLong', rffi.ULONG)
Bytef = rffi_platform.SimpleType('Bytef', rffi.UCHAR)
voidpf = rffi_platform.SimpleType('voidpf', rffi.VOIDP)
ZLIB_VERSION = rffi_platform.DefinedConstantString('ZLIB_VERSION')
ZLIB_VERNUM = rffi_platform.DefinedConstantInteger('ZLIB_VERNUM')
for _name in constantnames:
setattr(SimpleCConfig, _name, rffi_platform.ConstantInteger(_name))
config = rffi_platform.configure(SimpleCConfig)
voidpf = config['voidpf']
uInt = config['uInt']
uLong = config['uLong']
Bytef = config['Bytef']
Bytefp = lltype.Ptr(lltype.Array(Bytef, hints={'nolength': True}))
ZLIB_VERSION = config['ZLIB_VERSION']
ZLIB_VERNUM = config['ZLIB_VERNUM']
for _name in constantnames:
globals()[_name] = config[_name]
# The following parameter is copied from zutil.h, version 0.95,
# according to CPython's zlibmodule.c
DEFLATED = Z_DEFLATED
if MAX_MEM_LEVEL >= 8:
DEF_MEM_LEVEL = 8
else:
DEF_MEM_LEVEL = MAX_MEM_LEVEL
OUTPUT_BUFFER_SIZE = 32*1024
INPUT_BUFFER_MAX = 2047*1024*1024
# Note: we assume that zlib never outputs less than OUTPUT_BUFFER_SIZE
# from an input of INPUT_BUFFER_MAX bytes. This should be true by a
# large margin (I think zlib never compresses by more than ~1000x).
class ComplexCConfig:
"""
Definitions of structure types defined by zlib and based on SimpleCConfig
definitions.
"""
_compilation_info_ = eci
z_stream = rffi_platform.Struct(
'z_stream',
[('next_in', Bytefp),
('avail_in', uInt),
('total_in', uLong),
('next_out', Bytefp),
('avail_out', uInt),
('total_out', uLong),
('msg', rffi.CCHARP),
('zalloc', lltype.Ptr(
lltype.FuncType([voidpf, uInt, uInt], voidpf))),
('zfree', lltype.Ptr(
lltype.FuncType([voidpf, voidpf], lltype.Void))),
('opaque', voidpf),
('data_type', rffi.INT),
('adler', uLong),
('reserved', uLong)
])
config = rffi_platform.configure(ComplexCConfig)
z_stream = config['z_stream']
z_stream_p = lltype.Ptr(z_stream)
def zlib_external(*a, **kw):
kw['compilation_info'] = eci
return rffi.llexternal(*a, **kw)
_crc32 = zlib_external('mz_crc32', [uLong, Bytefp, uInt], uLong)
_adler32 = zlib_external('mz_adler32', [uLong, Bytefp, uInt], uLong)
# XXX I want to call deflateInit2, not deflateInit2_
_deflateInit2_ = zlib_external(
'mz_deflateInit2',
[z_stream_p, # stream
rffi.INT, # level
rffi.INT, # method
rffi.INT, # window bits
rffi.INT, # mem level
rffi.INT], # strategy
rffi.INT)
_deflate = zlib_external('mz_deflate', [z_stream_p, rffi.INT], rffi.INT)
_deflateEnd = zlib_external('mz_deflateEnd', [z_stream_p], rffi.INT,
releasegil=False)
def _deflateInit2(stream, level, method, wbits, memlevel, strategy):
# size = rffi.sizeof(z_stream)
result = _deflateInit2_(
stream, level, method, wbits, memlevel, strategy)
return result
# XXX I also want to call inflateInit2 instead of inflateInit2_
_inflateInit2_ = zlib_external(
'mz_inflateInit2',
[z_stream_p, # stream
rffi.INT], # window bits
rffi.INT)
_inflate = zlib_external('mz_inflate', [z_stream_p, rffi.INT], rffi.INT)
_inflateEnd = zlib_external('mz_inflateEnd', [z_stream_p], rffi.INT,
releasegil=False)
def _inflateInit2(stream, wbits):
result = _inflateInit2_(stream, wbits)
return result
_zlibVersion = zlib_external('mz_version', [], rffi.CCHARP)
# ____________________________________________________________
def _crc_or_adler(string, start, function):
with rffi.scoped_nonmovingbuffer(string) as bytes:
remaining = len(string)
checksum = start
ptr = rffi.cast(Bytefp, bytes)
while remaining > 0:
count = min(remaining, 32*1024*1024)
checksum = function(checksum, ptr, count)
ptr = rffi.ptradd(ptr, count)
remaining -= count
return checksum
CRC32_DEFAULT_START = 0
def crc32(string, start=CRC32_DEFAULT_START):
"""
Compute the CRC32 checksum of the string, possibly with the given
start value, and return it as a unsigned 32 bit integer.
"""
return _crc_or_adler(string, start, _crc32)
ADLER32_DEFAULT_START = 1
def adler32(string, start=ADLER32_DEFAULT_START):
"""
Compute the Adler-32 checksum of the string, possibly with the given
start value, and return it as a unsigned 32 bit integer.
"""
return _crc_or_adler(string, start, _adler32)
def zlibVersion():
"""Return the runtime version of zlib library"""
return rffi.charp2str(_zlibVersion())
# ____________________________________________________________
class RZlibError(Exception):
"""Exception raised by failing operations in rpython.rlib.rzlib."""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def fromstream(stream, err, while_doing):
"""Return a RZlibError with a message formatted from a zlib error
code and stream.
"""
if stream.c_msg:
reason = rffi.charp2str(stream.c_msg)
elif err == Z_MEM_ERROR:
reason = "out of memory"
elif err == Z_BUF_ERROR:
reason = "incomplete or truncated stream"
elif err == Z_STREAM_ERROR:
reason = "inconsistent stream state"
elif err == Z_DATA_ERROR:
reason = "invalid input data"
else:
reason = ""
if reason:
delim = ": "
else:
delim = ""
msg = "Error %d %s%s%s" % (err, while_doing, delim, reason)
return RZlibError(msg)
fromstream = staticmethod(fromstream)
null_stream = lltype.nullptr(z_stream)
def deflateInit(level=Z_DEFAULT_COMPRESSION, method=Z_DEFLATED,
wbits=MAX_WBITS, memLevel=DEF_MEM_LEVEL,
strategy=Z_DEFAULT_STRATEGY):
"""
Allocate and return an opaque 'stream' object that can be used to
compress data.
"""
stream = lltype.malloc(z_stream, flavor='raw', zero=True)
rgc.add_memory_pressure(rffi.sizeof(z_stream))
err = _deflateInit2(stream, level, method, wbits, memLevel, strategy)
if err == Z_OK:
return stream
else:
try:
if err == Z_STREAM_ERROR:
raise ValueError("Invalid initialization option")
else:
raise RZlibError.fromstream(stream, err,
"while creating compression object")
finally:
lltype.free(stream, flavor='raw')
def deflateEnd(stream):
"""
Free the resources associated with the deflate stream.
"""
_deflateEnd(stream)
lltype.free(stream, flavor='raw')
def inflateInit(wbits=MAX_WBITS):
"""
Allocate and return an opaque 'stream' object that can be used to
decompress data.
"""
stream = lltype.malloc(z_stream, flavor='raw', zero=True)
rgc.add_memory_pressure(rffi.sizeof(z_stream))
err = _inflateInit2(stream, wbits)
if err == Z_OK:
return stream
else:
try:
if err == Z_STREAM_ERROR:
raise ValueError("Invalid initialization option")
else:
raise RZlibError.fromstream(stream, err,
"while creating decompression object")
finally:
lltype.free(stream, flavor='raw')
def inflateEnd(stream):
"""
Free the resources associated with the inflate stream.
Note that this may raise RZlibError.
"""
_inflateEnd(stream)
lltype.free(stream, flavor='raw')
def compress(stream, data, flush=Z_NO_FLUSH):
"""
Feed more data into a deflate stream. Returns a string containing
(a part of) the compressed data. If flush != Z_NO_FLUSH, this also
flushes the output data; see zlib.h or the documentation of the
zlib module for the possible values of 'flush'.
"""
# Warning, reentrant calls to the zlib with a given stream can cause it
# to crash. The caller of rpython.rlib.rzlib should use locks if needed.
data, _, avail_in = _operate(stream, data, flush, sys.maxint, _deflate,
"while compressing")
assert not avail_in, "not all input consumed by deflate"
return data
def decompress(stream, data, flush=Z_SYNC_FLUSH, max_length=sys.maxint):
"""
Feed more data into an inflate stream. Returns a tuple (string,
finished, unused_data_length). The string contains (a part of) the
decompressed data. If flush != Z_NO_FLUSH, this also flushes the
output data; see zlib.h or the documentation of the zlib module for
the possible values of 'flush'.
The 'string' is never longer than 'max_length'. The
'unused_data_length' is the number of unprocessed input characters,
either because they are after the end of the compressed stream or
because processing it would cause the 'max_length' to be exceeded.
"""
# Warning, reentrant calls to the zlib with a given stream can cause it
# to crash. The caller of rpython.rlib.rzlib should use locks if needed.
# _operate() does not support the Z_FINISH method of decompressing.
# We can use Z_SYNC_FLUSH instead and manually check that we got to
# the end of the data.
if flush == Z_FINISH:
flush = Z_SYNC_FLUSH
should_finish = True
else:
should_finish = False
while_doing = "while decompressing data"
data, err, avail_in = _operate(stream, data, flush, max_length, _inflate,
while_doing)
if should_finish:
# detect incomplete input
rffi.setintfield(stream, 'c_avail_in', 0)
err = _inflate(stream, Z_FINISH)
if err < 0:
raise RZlibError.fromstream(stream, err, while_doing)
finished = (err == Z_STREAM_END)
return data, finished, avail_in
def _operate(stream, data, flush, max_length, cfunc, while_doing):
"""Common code for compress() and decompress().
"""
# Prepare the input buffer for the stream
assert data is not None
with rffi.scoped_nonmovingbuffer(data) as inbuf:
stream.c_next_in = rffi.cast(Bytefp, inbuf)
end_inbuf = rffi.ptradd(stream.c_next_in, len(data))
# Prepare the output buffer
with lltype.scoped_alloc(rffi.CCHARP.TO, OUTPUT_BUFFER_SIZE) as outbuf:
# Strategy: we call deflate() to get as much output data as fits in
# the buffer, then accumulate all output into a StringBuffer
# 'result'.
result = StringBuilder()
while True:
avail_in = ptrdiff(end_inbuf, stream.c_next_in)
if avail_in > INPUT_BUFFER_MAX:
avail_in = INPUT_BUFFER_MAX
rffi.setintfield(stream, 'c_avail_in', avail_in)
stream.c_next_out = rffi.cast(Bytefp, outbuf)
bufsize = OUTPUT_BUFFER_SIZE
if max_length < bufsize:
if max_length <= 0:
err = Z_OK
break
bufsize = max_length
max_length -= bufsize
rffi.setintfield(stream, 'c_avail_out', bufsize)
err = cfunc(stream, flush)
if err == Z_OK or err == Z_STREAM_END:
# accumulate data into 'result'
avail_out = rffi.cast(lltype.Signed, stream.c_avail_out)
result.append_charpsize(outbuf, bufsize - avail_out)
# if the output buffer is full, there might be more data
# so we need to try again. Otherwise, we're done.
if avail_out > 0:
break
# We're also done if we got a Z_STREAM_END (which should
# only occur when flush == Z_FINISH).
if err == Z_STREAM_END:
break
else:
continue
elif err == Z_BUF_ERROR:
avail_out = rffi.cast(lltype.Signed, stream.c_avail_out)
# When compressing, we will only get Z_BUF_ERROR if
# the output buffer was full but there wasn't more
# output when we tried again, so it is not an error
# condition.
if avail_out == bufsize:
break
# fallback case: report this error
raise RZlibError.fromstream(stream, err, while_doing)
# When decompressing, if the compressed stream of data was truncated,
# then the zlib simply returns Z_OK and waits for more. If it is
# complete it returns Z_STREAM_END.
avail_in = ptrdiff(end_inbuf, stream.c_next_in)
return (result.build(), err, avail_in)
def ptrdiff(p, q):
x = rffi.cast(lltype.Unsigned, p) - rffi.cast(lltype.Unsigned, q)
return rffi.cast(lltype.Signed, x)
| [
"mssun@mesalock-linux.org"
] | mssun@mesalock-linux.org |
7297fcfd6947026a80a94e2d7b7df9700597adbe | df1ce92b397080896848c02d6e0300be7191f201 | /build/vision_msgs/cmake/vision_msgs-genmsg-context.py | 0827fdd31f5c0f97216f5d96d6c9c4bb5e4e978a | [] | no_license | predator4hack/ROS-projects | 4310f43292f3d41f85588041969a480041f74e22 | 30ef277c11ce210bc5d5724a95960e7d82674277 | refs/heads/master | 2023-03-27T23:06:25.688732 | 2021-03-17T20:43:16 | 2021-03-17T20:43:16 | 348,831,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,432 | py | # generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/chandan/catkin_ws/src/vision_msgs/msg/BoundingBox2D.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/BoundingBox3D.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/BoundingBox3DArray.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/Classification2D.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/Classification3D.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/Detection2DArray.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/Detection2D.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/Detection3DArray.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/Detection3D.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/ObjectHypothesis.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/ObjectHypothesisWithPose.msg;/home/chandan/catkin_ws/src/vision_msgs/msg/VisionInfo.msg"
services_str = ""
pkg_name = "vision_msgs"
dependencies_str = "std_msgs;sensor_msgs;geometry_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "vision_msgs;/home/chandan/catkin_ws/src/vision_msgs/msg;std_msgs;/opt/ros/melodic/share/std_msgs/cmake/../msg;sensor_msgs;/opt/ros/melodic/share/sensor_msgs/cmake/../msg;geometry_msgs;/opt/ros/melodic/share/geometry_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/melodic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
| [
"chandankum2440@gmail.com"
] | chandankum2440@gmail.com |
4dcff4ef9ca464f45f83c30736510b8ad3878da4 | 0e0ce88c886370df9af51855115c99dfc003e5da | /2019/04_Django_Curso_Gileno/venv/bin/django-admin | 7ee5fca0670f9c1080996902fba9af6a8a12d14c | [] | no_license | miguelzeph/Python_Git | ed80db9a4f060836203df8cc2e42e003b0df6afd | 79d3b00236e7f4194d2a23fb016b43e9d09311e6 | refs/heads/master | 2021-07-08T18:43:45.855023 | 2021-04-01T14:12:23 | 2021-04-01T14:12:23 | 232,007,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 315 | #!/home/miguel/PycharmProjects/Django_Curso_Gileno/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"miguel.junior.mat@hotmail.com"
] | miguel.junior.mat@hotmail.com | |
8b8dce1c9245308e1dfb6e30ce3d4510f9f9fcd4 | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible/module_utils/compat/importlib.py | eee0ddf7bcfc9b2fe9f587bbecf72611eb19c6a3 | [
"MIT"
] | permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 580 | py | # Copyright (c) 2020 Matt Martz <matt@sivel.net>
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
try:
from importlib import import_module
except ImportError:
# importlib.import_module returns the tail
# whereas __import__ returns the head
# compat to work like importlib.import_module
def import_module(name):
__import__(name)
return sys.modules[name]
| [
"sifang@cisco.com"
] | sifang@cisco.com |
d22ae5e1ce4798f74ede4ea8c8c34e9499269554 | ab8187626aa68c1f92301db78e9f8b0c4b088554 | /Greedy/1561_h.py | 249aef34c00cf89b7823a2d1498b920884dffe79 | [] | no_license | khj68/algorithm | 2818f87671019f9f2305ec761fd226e737f12025 | efebe142b9b52e966e0436be3b87fb32b4f7ea32 | refs/heads/master | 2023-04-25T02:33:13.403943 | 2021-05-04T03:09:38 | 2021-05-04T03:09:38 | 287,733,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | class Solution:
def maxCoins(self, piles: List[int]) -> int:
if len(piles) == 3:
return piles[1]
# print(len(piles)%2)
# print(list((i, a) for i, a in enumerate(sorted(piles)[len(piles)//3:]) if i % 2 == len(piles)%2))
return sum(a for i, a in enumerate(sorted(piles)[len(piles)//3:]) if i % 2 == 0) | [
"maga40@naver.com"
] | maga40@naver.com |
ee5061ff77da5e4d545b401ec0ba8bd3739049c2 | 4f74e6d72b98cd1da2190313e4a7eb9d342cc93d | /organizations_ext/migrations/0003_auto_20200516_1724.py | 57927d172cf2467165b5a315fade75348b42c88c | [
"BSD-3-Clause",
"MIT"
] | permissive | adamgogogo/glitchtip-backend | ef0c529b71d5a4632a235b40a10e0b428a1cee3a | ee71d1b732d92868189d520aa111c09b116b7b22 | refs/heads/master | 2023-02-01T23:10:53.734450 | 2020-12-19T19:32:10 | 2020-12-19T19:32:10 | 323,588,534 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 477 | py | # Generated by Django 3.0.6 on 2020-05-16 17:24
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('organizations_ext', '0002_organization_is_accepting_events'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='is_accepting_events',
field=models.BooleanField(default=True, help_text='Used for throttling at org level'),
),
]
| [
"david@burkesoftware.com"
] | david@burkesoftware.com |
8ffa10b8e5334346b910ca9a0e0357a52d68c971 | a2f6e449e6ec6bf54dda5e4bef82ba75e7af262c | /venv/Lib/site-packages/pandas/core/common.py | 395464a24bdf2b741b5354fd0af8e443c99af55b | [] | no_license | mylonabusiness28/Final-Year-Project- | e4b79ccce6c19a371cac63c7a4ff431d6e26e38f | 68455795be7902b4032ee1f145258232212cc639 | refs/heads/main | 2023-07-08T21:43:49.300370 | 2021-06-05T12:34:16 | 2021-06-05T12:34:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:b63cf39ef1bd3fc626c028f158f4a70e0c602beae099dd24576a97afa69d6250
size 14109
| [
"chuksajeh1@gmail.com"
] | chuksajeh1@gmail.com |
4af6ec7be882335ee26e155579371264b81c08ac | 5c2f520dde0cf8077facc0fcd9a92bc1a96d168b | /test/tests/75.py | 9893fe565375817373abc338de7167d39ba5b529 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0"
] | permissive | nagyist/pyston | b613337a030ef21a3f03708febebe76cedf34c61 | 14ba2e6e6fb5c7316f66ccca86e6c6a836d96cab | refs/heads/master | 2022-12-24T03:56:12.885732 | 2015-02-25T11:11:08 | 2015-02-25T11:28:13 | 31,314,596 | 0 | 0 | NOASSERTION | 2022-12-17T08:15:11 | 2015-02-25T13:24:41 | Python | UTF-8 | Python | false | false | 53 | py | def f():
if 0:
str = 0
print str
f()
| [
"kevmod@gmail.com"
] | kevmod@gmail.com |
fa68752af1398fdb45ba073bfa4cb5cbff503c27 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02576/s333433925.py | 598e6062e70a9510941cdb0d6beeb5f5be5bc532 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 107 | py | n, x, t = map(int, input().split(' '))
count = int(n / x)
if(n % x != 0):
count += 1
print(count * t) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
a8897543440a99c7f65add70fcdc2ec5e1552753 | 829fbf5717b902f48ca6e748a51cf4febb59451e | /test/functional/p2p_pos_fakestake.py | ada5258fb6080a031ddfe53a533cba6cbf4f6ad7 | [
"MIT"
] | permissive | suprnurd/KuboCoin | bb03614814c2112f7745a15e9774639e280f2aff | d77bae8cc5fe4efdbd16a384554b7829a704291f | refs/heads/master | 2022-11-06T09:53:34.348555 | 2020-05-24T22:27:08 | 2020-05-24T22:27:08 | 273,895,805 | 0 | 0 | MIT | 2020-06-21T12:02:38 | 2020-06-21T12:02:37 | null | UTF-8 | Python | false | false | 2,299 | py | #!/usr/bin/env python3
# Copyright (c) 2019 The KuboCoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Covers the scenario of a PoS block where the coinstake input prevout is already spent.
'''
from time import sleep
from fake_stake.base_test import kubocoin_FakeStakeTest
class PoSFakeStake(kubocoin_FakeStakeTest):
def run_test(self):
self.description = "Covers the scenario of a PoS block where the coinstake input prevout is already spent."
self.init_test()
INITAL_MINED_BLOCKS = 150 # First mined blocks (rewards collected to spend)
MORE_MINED_BLOCKS = 100 # Blocks mined after spending
STAKE_AMPL_ROUNDS = 2 # Rounds of stake amplification
self.NUM_BLOCKS = 3 # Number of spammed blocks
# 1) Starting mining blocks
self.log.info("Mining %d blocks.." % INITAL_MINED_BLOCKS)
self.node.generate(INITAL_MINED_BLOCKS)
# 2) Collect the possible prevouts
self.log.info("Collecting all unspent coins which we generated from mining...")
# 3) Create 10 addresses - Do the stake amplification
self.log.info("Performing the stake amplification (%d rounds)..." % STAKE_AMPL_ROUNDS)
utxo_list = self.node.listunspent()
address_list = []
for i in range(10):
address_list.append(self.node.getnewaddress())
utxo_list = self.stake_amplification(utxo_list, STAKE_AMPL_ROUNDS, address_list)
self.log.info("Done. Utxo list has %d elements." % len(utxo_list))
sleep(2)
# 4) Start mining again so that spent prevouts get confirmted in a block.
self.log.info("Mining %d more blocks..." % MORE_MINED_BLOCKS)
self.node.generate(MORE_MINED_BLOCKS)
sleep(2)
# 5) Create "Fake Stake" blocks and send them
self.log.info("Creating Fake stake blocks")
err_msgs = self.test_spam("Main", utxo_list)
if not len(err_msgs) == 0:
self.log.error("result: " + " | ".join(err_msgs))
raise AssertionError("TEST FAILED")
self.log.info("%s PASSED" % self.__class__.__name__)
if __name__ == '__main__':
PoSFakeStake().main()
| [
"ultrapoolcom@gmail.com"
] | ultrapoolcom@gmail.com |
d48cd1dfcd8d6ec6180938eff41489abb81ac7f5 | 9f1b8a1ada57198e2a06d88ddcdc0eda0c683df7 | /submission - lab9/set 2/AMANDA C NAGLE_19369_assignsubmission_file_lab9/lab9/F.py | d3445accd6f6b54d58b32508c8790ff3da0e9d33 | [] | no_license | sendurr/spring-grading | 90dfdced6327ddfb5c311ae8f42ae1a582768b63 | 2cc280ee3e0fba02e95b6e9f45ad7e13bc7fad54 | refs/heads/master | 2020-04-15T17:42:10.781884 | 2016-08-29T20:38:17 | 2016-08-29T20:38:17 | 50,084,068 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 203 | py | from math import *
class F:
def __init__ (self,a,w):
self.a=a
self.w=w
def value(self, x):
return (e**(-self.a*x))*sin(self.w*x)
f= F(a=1.0, w=0.1)
print f.value(x=pi)
f.a=2
print f.value(pi)
| [
"sendurr@hotmail.com"
] | sendurr@hotmail.com |
9f416affbe20c12f10bd3ac826e901fa23ecbeeb | 96a34a048c783a75736bf0ec775df22142f9ee53 | /packages/postgres-database/src/simcore_postgres_database/migration/versions/0208f6b32f32_adds_version_control_tables.py | d87b9322a22d297d87280f446fbcade581643aa2 | [
"MIT"
] | permissive | ITISFoundation/osparc-simcore | 77e5b9f7eb549c907f6ba2abb14862154cc7bb66 | f4c57ffc7b494ac06a2692cb5539d3acfd3d1d63 | refs/heads/master | 2023-08-31T17:39:48.466163 | 2023-08-31T15:03:56 | 2023-08-31T15:03:56 | 118,596,920 | 39 | 29 | MIT | 2023-09-14T20:23:09 | 2018-01-23T10:48:05 | Python | UTF-8 | Python | false | false | 6,043 | py | """Adds version control tables
Revision ID: 0208f6b32f32
Revises: d10c53a5bea6
Create Date: 2021-09-06 14:19:42.599645+00:00
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "0208f6b32f32"
down_revision = "d10c53a5bea6"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"projects_vc_snapshots",
sa.Column("checksum", sa.String(), nullable=False),
sa.Column(
"content",
postgresql.JSONB(astext_type=sa.Text()),
server_default=sa.text("'{}'::jsonb"),
nullable=False,
),
sa.PrimaryKeyConstraint("checksum"),
)
op.create_table(
"projects_vc_repos",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("project_uuid", sa.String(), nullable=False),
sa.Column("project_checksum", sa.String(), nullable=True),
sa.Column(
"created", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.Column(
"modified", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.ForeignKeyConstraint(
["project_uuid"],
["projects.uuid"],
name="fk_projects_vc_repos_project_uuid",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("project_uuid"),
)
op.create_table(
"projects_vc_commits",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("repo_id", sa.BigInteger(), nullable=False),
sa.Column("parent_commit_id", sa.BigInteger(), nullable=True),
sa.Column("snapshot_checksum", sa.String(), nullable=False),
sa.Column("message", sa.String(), nullable=True),
sa.Column(
"created", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.ForeignKeyConstraint(
["parent_commit_id"],
["projects_vc_commits.id"],
name="fk_projects_vc_commits_parent_commit_id",
onupdate="CASCADE",
),
sa.ForeignKeyConstraint(
["repo_id"],
["projects_vc_repos.id"],
name="fk_projects_vc_commits_repo_id",
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["snapshot_checksum"],
["projects_vc_snapshots.checksum"],
name="fk_projects_vc_commits_snapshot_checksum",
ondelete="RESTRICT",
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"projects_vc_branches",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("repo_id", sa.BigInteger(), nullable=False),
sa.Column("head_commit_id", sa.BigInteger(), nullable=True),
sa.Column("name", sa.String(), nullable=True),
sa.Column(
"created", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.Column(
"modified", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.ForeignKeyConstraint(
["head_commit_id"],
["projects_vc_commits.id"],
name="fk_projects_vc_branches_head_commit_id",
ondelete="RESTRICT",
),
sa.ForeignKeyConstraint(
["repo_id"],
["projects_vc_repos.id"],
name="projects_vc_branches_repo_id",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "repo_id", name="repo_branch_uniqueness"),
)
op.create_table(
"projects_vc_tags",
sa.Column("id", sa.BigInteger(), nullable=False),
sa.Column("repo_id", sa.BigInteger(), nullable=False),
sa.Column("commit_id", sa.BigInteger(), nullable=False),
sa.Column("name", sa.String(), nullable=True),
sa.Column("message", sa.String(), nullable=True),
sa.Column("hidden", sa.Boolean(), nullable=True),
sa.Column(
"created", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.Column(
"modified", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.ForeignKeyConstraint(
["commit_id"],
["projects_vc_commits.id"],
name="fk_projects_vc_tags_commit_id",
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["repo_id"],
["projects_vc_repos.id"],
name="fk_projects_vc_tags_repo_id",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name", "repo_id", name="repo_tag_uniqueness"),
)
op.create_table(
"projects_vc_heads",
sa.Column("repo_id", sa.BigInteger(), nullable=False),
sa.Column("head_branch_id", sa.BigInteger(), nullable=True),
sa.Column(
"modified", sa.DateTime(), server_default=sa.text("now()"), nullable=False
),
sa.ForeignKeyConstraint(
["head_branch_id"],
["projects_vc_branches.id"],
name="fk_projects_vc_heads_head_branch_id",
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(
["repo_id"],
["projects_vc_repos.id"],
name="projects_vc_branches_repo_id",
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("repo_id"),
sa.UniqueConstraint("head_branch_id"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("projects_vc_heads")
op.drop_table("projects_vc_tags")
op.drop_table("projects_vc_branches")
op.drop_table("projects_vc_commits")
op.drop_table("projects_vc_repos")
op.drop_table("projects_vc_snapshots")
# ### end Alembic commands ###
| [
"noreply@github.com"
] | ITISFoundation.noreply@github.com |
41df34a8c957aeb8225ae2d002e455b6999da7bb | c9ab605cdd2dbf92c9de05768ade0ecf1718be02 | /SW_Expert_Arcademy/Programming_Beginner/2-20.py | 174bde1133fa02c170f9ee6f159b87cef6a84d92 | [] | no_license | PyeongGang-Kim/TIL | 42d69308cf99d2e07644b51d7636e1b64551a697 | 8711501d131ee7d78fdaac544dda2008adf820a1 | refs/heads/master | 2023-01-12T21:10:38.027946 | 2021-10-23T07:19:48 | 2021-10-23T07:19:48 | 195,937,990 | 10 | 1 | null | 2023-01-07T11:25:30 | 2019-07-09T05:22:45 | HTML | UTF-8 | Python | false | false | 56 | py | print([i for i in [5, 6, 77, 45, 22, 12, 24] if i%2!=0]) | [
"pyeonggangkim@gmail.com"
] | pyeonggangkim@gmail.com |
cb7273d729baf1a6dfb7f10dcb88a7cee2620eb4 | bf000a932237a790770227e48a529a31d167e94e | /flash_es.py | 1a7d9a6224c607194385a037b0a23d66fe7afd90 | [] | no_license | o7s8r6/machineLearning | bf38b91d00def60e554384853b6f6b5640bc0929 | 5842dad6a1b12e0a02b3a8ecff0f610dd0fca0cb | refs/heads/master | 2021-05-27T15:56:57.559788 | 2014-07-22T05:57:44 | 2014-07-22T05:57:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 635 | py | import flask
import rawes
import json
from flask import Flask
query = "python"
es = rawes.Elastic('localhost:9200')
response = es.get('dns/nyc/_search', data={
"query":{
"bool":{
"must":[{
"wildcard":{
"answer":query
}}],
"must_not":[],
"should":[]}},
"from":0,
"size":50,
"sort":[],
"facets":{}
})
back = response['hits']['hits'][0]['_source'].values()
app = Flask(__name__)
@app.route('/')
def hello_world():
return back[3]
if __name__ == '__main__':
app.run()
| [
"ohprecio@gmail.com"
] | ohprecio@gmail.com |
ebe046ea0714150417fd9ff3a977d7327a8ab8d1 | a2fe6e6d6e91331efda6a9647f40d2c1ec5044e9 | /backend/util/response/error/__init__.py | 40e20635908ac703a44041c856bb8bfe54e618b3 | [
"MIT"
] | permissive | willrp/willbuyer | a1b66a805e8f5d1f00cbe6bf48baff9fe334782d | 069836a91c777ede6f62a16daa9f26e555d66bcb | refs/heads/master | 2022-12-13T08:52:29.408246 | 2021-07-20T04:40:24 | 2021-07-20T04:40:24 | 216,706,110 | 5 | 0 | MIT | 2022-12-08T03:17:44 | 2019-10-22T02:26:29 | Python | UTF-8 | Python | false | false | 80 | py | from .error_response import ErrorResponse
from .error_schema import ErrorSchema
| [
"willrogerpereira@hotmail.com"
] | willrogerpereira@hotmail.com |
f72c6047f22259d6bc991d05e3e8eb0bf3bc6385 | 677a3a76807d8585f65ec0e0839bb3a8b833e2fb | /2.Classes and Objects/Lab/Exercise/To-Do List/task.py | e16400b24b6a440e72139113289a663db3e02166 | [] | no_license | negative0101/Python-OOP | 0d531a1b72beb3e58f9486df88d457ecd59be10e | b5825e66a909c947a46458712d683e8a38035912 | refs/heads/main | 2023-07-14T11:27:34.841594 | 2021-08-20T08:49:04 | 2021-08-20T08:49:04 | 381,475,313 | 0 | 0 | null | 2021-07-25T19:52:38 | 2021-06-29T19:26:42 | Python | UTF-8 | Python | false | false | 984 | py |
class Task:
def __init__(self, name, due_date):
self.name = name
self.due_date = due_date
self.comments = []
self.completed = False
def change_name(self, new_name):
if new_name == self.name:
return 'Name cannot be the same.'
self.name = new_name
return self.name
def change_due_date(self, new_date):
if new_date == self.due_date:
return 'Date cannot be the same.'
self.due_date = new_date
return self.due_date
def add_comment(self, comment):
self.comments.append(comment)
def edit_comment(self, comment_number, new_comment):
try:
self.comments[comment_number] = new_comment
return f'{", ".join([str(i) for i in self.comments])}'
except IndexError:
return f'Cannot find comment.'
def details(self):
return f"Name: {self.name} - Due date: {self.due_date}"
| [
"noreply@github.com"
] | negative0101.noreply@github.com |
26e2bb321b86356992457a81f13187c17629437e | 2aace9bb170363e181eb7520e93def25f38dbe5c | /build/idea-sandbox/system/python_stubs/-57053121/ruamel_yaml/ext/_ruamel_yaml/SerializerError.py | e83d1c3946c75b93d44acb32132de12201751af3 | [] | no_license | qkpqkp/PlagCheck | 13cb66fd2b2caa2451690bb72a2634bdaa07f1e6 | d229904674a5a6e46738179c7494488ca930045e | refs/heads/master | 2023-05-28T15:06:08.723143 | 2021-06-09T05:36:34 | 2021-06-09T05:36:34 | 375,235,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | # encoding: utf-8
# module ruamel_yaml.ext._ruamel_yaml
# from C:\Users\Doly\Anaconda3\lib\site-packages\ruamel_yaml\ext\_ruamel_yaml.cp37-win_amd64.pyd
# by generator 1.147
# no doc
# imports
import builtins as __builtins__ # <module 'builtins' (built-in)>
import ruamel_yaml.error as __ruamel_yaml_error
import ruamel_yaml.events as __ruamel_yaml_events
import ruamel_yaml.nodes as __ruamel_yaml_nodes
import ruamel_yaml.tokens as __ruamel_yaml_tokens
class SerializerError(__ruamel_yaml_error.YAMLError):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
| [
"qinkunpeng2015@163.com"
] | qinkunpeng2015@163.com |
9d8f8fdd93316ade60c63318be40b169f5a7f2f8 | 8cf633e92a0671c8201268620a0372f250c8aeb2 | /35.搜索插入位置.py | 064363a1fe9b0bcd0030c245e5a0fdad177a2b06 | [
"Unlicense"
] | permissive | SprintGhost/LeetCode | 76da5c785009d474542e5f2cdac275675b8e60b8 | cdf1a86c83f2daedf674a871c4161da7e8fad17c | refs/heads/develop | 2021-06-06T04:04:28.883692 | 2021-01-01T14:09:26 | 2021-01-01T14:09:26 | 230,635,046 | 0 | 0 | Unlicense | 2020-12-11T14:55:36 | 2019-12-28T16:34:39 | Python | UTF-8 | Python | false | false | 2,189 | py | #
# @lc app=leetcode.cn id=35 lang=python3
#
# [35] 搜索插入位置
#
# Accepted
# 62/62 cases passed (60 ms)
# Your runtime beats 78.63 % of python3 submissions
# Your memory usage beats 95.67 % of python3 submissions (13.5 MB)
# @lc code=start
class Solution:
def searchInsert(self, nums, target: int) -> int:
nums_len = len(nums)
if (nums_len == 0):
return 0
start = 0
end = nums_len - 1
mid = nums_len // 2
while (mid <= end) and (mid > start):
if (target == nums[mid]):
return mid
elif (target > nums[mid]):
start = mid
mid = (end - mid) // 2 + mid
else:
end = mid
mid = (start + mid) // 2
if (start == 0) and (mid == start) and (target <= nums[start]):
return 0
if ((end == nums_len - 1) and (target > nums[end])) and ((mid == end) or (mid == end -1)):
return end + 1
return mid + 1
# A = Solution()
# print (A.searchInsert([3,4,7,9,10],8))
# print (A.searchInsert([1,3,5,6],7)
# Accepted
# 62/62 cases passed (52 ms)
# Your runtime beats 95.91 % of python3 submissions
# Your memory usage beats 97.58 % of python3 submissions (13.5 MB)
class Solution:
def searchInsert(self, nums: List[int], target: int) -> int:
# 返回大于等于 target 的索引,有可能是最后一个
size = len(nums)
# 特判
if size == 0:
return 0
left = 0
# 如果 target 比 nums里所有的数都大,则最后一个数的索引 + 1 就是候选值,因此,右边界应该是数组的长度
right = size
# 二分的逻辑一定要写对,否则会出现死循环或者数组下标越界
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
assert nums[mid] >= target
# [1,5,7] 2
right = mid
# 调试语句
# print('left = {}, right = {}, mid = {}'.format(left, right, mid))
return left
# @lc code=end
| [
"864047435@qq.com"
] | 864047435@qq.com |
5193ea9938e1348f0d38402741fadf0dcc544c3b | fbbe424559f64e9a94116a07eaaa555a01b0a7bb | /LightGBM_sklearn_scipy_numpy/source/sklearn/manifold/tests/test_mds.py | 7b432dea1370e22b8590a106c29161e9b1ee7eea | [
"MIT"
] | permissive | ryfeus/lambda-packs | 6544adb4dec19b8e71d75c24d8ed789b785b0369 | cabf6e4f1970dc14302f87414f170de19944bac2 | refs/heads/master | 2022-12-07T16:18:52.475504 | 2022-11-29T13:35:35 | 2022-11-29T13:35:35 | 71,386,735 | 1,283 | 263 | MIT | 2022-11-26T05:02:14 | 2016-10-19T18:22:39 | Python | UTF-8 | Python | false | false | 1,873 | py | import numpy as np
from numpy.testing import assert_array_almost_equal
from sklearn.manifold import mds
from sklearn.utils.testing import assert_raises
def test_smacof():
# test metric smacof using the data of "Modern Multidimensional Scaling",
# Borg & Groenen, p 154
sim = np.array([[0, 5, 3, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
Z = np.array([[-.266, -.539],
[.451, .252],
[.016, -.238],
[-.200, .524]])
X, _ = mds.smacof(sim, init=Z, n_components=2, max_iter=1, n_init=1)
X_true = np.array([[-1.415, -2.471],
[1.633, 1.107],
[.249, -.067],
[-.468, 1.431]])
assert_array_almost_equal(X, X_true, decimal=3)
def test_smacof_error():
# Not symmetric similarity matrix:
sim = np.array([[0, 5, 9, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
assert_raises(ValueError, mds.smacof, sim)
# Not squared similarity matrix:
sim = np.array([[0, 5, 9, 4],
[5, 0, 2, 2],
[4, 2, 1, 0]])
assert_raises(ValueError, mds.smacof, sim)
# init not None and not correct format:
sim = np.array([[0, 5, 3, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
Z = np.array([[-.266, -.539],
[.016, -.238],
[-.200, .524]])
assert_raises(ValueError, mds.smacof, sim, init=Z, n_init=1)
def test_MDS():
sim = np.array([[0, 5, 3, 4],
[5, 0, 2, 2],
[3, 2, 0, 1],
[4, 2, 1, 0]])
mds_clf = mds.MDS(metric=False, n_jobs=3, dissimilarity="precomputed")
mds_clf.fit(sim)
| [
"ryfeus@gmail.com"
] | ryfeus@gmail.com |
502738773bd47c5de6fe95d9b3588481c2cd2f96 | f4afb11d9d6b8f391a270fb9309285d0fa9acd1a | /push-branches.py | ec4387ca14bcce4bec8d48849b9ff6242339ef5a | [
"CC-BY-4.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Vasilov345/book | 2fdf886ab1dfe7c799d3797c8ed4d0e72bc4e5fb | bed2853f0ceaf3f868832ece860d566a7b43f870 | refs/heads/master | 2020-08-29T13:50:17.804015 | 2019-10-28T09:53:28 | 2019-10-28T09:53:28 | 218,051,897 | 0 | 1 | NOASSERTION | 2019-10-28T13:23:42 | 2019-10-28T13:23:41 | null | UTF-8 | Python | false | false | 704 | py | #!/usr/bin/env python
import subprocess
from pathlib import Path
from chapters import CHAPTERS, NO_EXERCISE
for chapter in CHAPTERS:
print('pushing', chapter, end=': ')
subprocess.run(
['git', 'push', '--force-with-lease', 'origin', chapter],
cwd=Path(__file__).parent / 'code'
)
if chapter in NO_EXERCISE:
continue
exercise_branch = f'{chapter}_exercise'
print('pushing', exercise_branch)
subprocess.run(
['git', 'push', '--force-with-lease', 'origin', exercise_branch],
cwd=Path(__file__).parent / 'code'
)
subprocess.run(
['git', 'push', '--force-with-lease', 'origin', 'master'],
cwd=Path(__file__).parent / 'code'
)
| [
"hjwp2@cantab.net"
] | hjwp2@cantab.net |
60eeee618441244ab474ad546c351024f2852928 | 93579502c2a51e39f8a55fbb30e5b6696c05c785 | /tests/hwsim/test_sae.py | 8824ad4c6f550a5f4d232d508c54f9b5c3074deb | [
"BSD-3-Clause"
] | permissive | pstratem/hostapd | 14950770d0202df2de3724b523e7a10412072633 | c404cd8c7d108314ec927b335c4c7b11ceb3bf9a | refs/heads/master | 2020-07-31T04:18:31.387996 | 2019-09-16T15:05:44 | 2019-09-16T15:09:54 | 210,481,241 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75,349 | py | # Test cases for SAE
# Copyright (c) 2013-2016, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import binascii
import os
import time
import logging
logger = logging.getLogger()
import socket
import struct
import subprocess
import hwsim_utils
import hostapd
from wpasupplicant import WpaSupplicant
from utils import HwsimSkip, alloc_fail, fail_test, wait_fail_trigger, start_monitor, stop_monitor, radiotap_build
from test_ap_psk import find_wpas_process, read_process_memory, verify_not_present, get_key_locations
@remote_compatible
def test_sae(dev, apdev):
"""SAE with default group"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
key_mgmt = hapd.get_config()['key_mgmt']
if key_mgmt.split(' ')[0] != "SAE":
raise Exception("Unexpected GET_CONFIG(key_mgmt): " + key_mgmt)
dev[0].request("SET sae_groups ")
id = dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
if dev[0].get_status_field('sae_group') != '19':
raise Exception("Expected default SAE group not used")
bss = dev[0].get_bss(apdev[0]['bssid'])
if 'flags' not in bss:
raise Exception("Could not get BSS flags from BSS table")
if "[WPA2-SAE-CCMP]" not in bss['flags']:
raise Exception("Unexpected BSS flags: " + bss['flags'])
res = hapd.request("STA-FIRST")
if "sae_group=19" not in res.splitlines():
raise Exception("hostapd STA output did not specify SAE group")
@remote_compatible
def test_sae_password_ecc(dev, apdev):
"""SAE with number of different passwords (ECC)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 19")
for i in range(10):
password = "12345678-" + str(i)
hapd.set("wpa_passphrase", password)
dev[0].connect("test-sae", psk=password, key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
@remote_compatible
def test_sae_password_ffc(dev, apdev):
"""SAE with number of different passwords (FFC)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '15'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 15")
for i in range(10):
password = "12345678-" + str(i)
hapd.set("wpa_passphrase", password)
dev[0].connect("test-sae", psk=password, key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
@remote_compatible
def test_sae_pmksa_caching(dev, apdev):
"""SAE and PMKSA caching"""
run_sae_pmksa_caching(dev, apdev)
@remote_compatible
def test_sae_pmksa_caching_pmkid(dev, apdev):
"""SAE and PMKSA caching (PMKID in AssocReq after SAE)"""
try:
dev[0].set("sae_pmkid_in_assoc", "1")
run_sae_pmksa_caching(dev, apdev)
finally:
dev[0].set("sae_pmkid_in_assoc", "0")
def run_sae_pmksa_caching(dev, apdev):
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=5)
if ev is None:
raise Exception("No connection event received from hostapd")
sta0 = hapd.get_sta(dev[0].own_addr())
if sta0['wpa'] != '2' or sta0['AKMSuiteSelector'] != '00-0f-ac-8':
raise Exception("SAE STA(0) AKM suite selector reported incorrectly")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].request("RECONNECT")
dev[0].wait_connected(timeout=15, error="Reconnect timed out")
if dev[0].get_status_field('sae_group') is not None:
raise Exception("SAE group claimed to have been used")
sta0 = hapd.get_sta(dev[0].own_addr())
if sta0['wpa'] != '2' or sta0['AKMSuiteSelector'] != '00-0f-ac-8':
raise Exception("SAE STA(0) AKM suite selector reported incorrectly after PMKSA caching")
@remote_compatible
def test_sae_pmksa_caching_disabled(dev, apdev):
"""SAE and PMKSA caching disabled"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['disable_pmksa_caching'] = '1'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
ev = hapd.wait_event(["AP-STA-CONNECTED"], timeout=5)
if ev is None:
raise Exception("No connection event received from hostapd")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].request("RECONNECT")
dev[0].wait_connected(timeout=15, error="Reconnect timed out")
if dev[0].get_status_field('sae_group') != '19':
raise Exception("Expected default SAE group not used")
def test_sae_groups(dev, apdev):
"""SAE with all supported groups"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
# This is the full list of supported groups, but groups 14-16 (2048-4096 bit
# MODP) and group 21 (521-bit random ECP group) are a bit too slow on some
# VMs and can result in hitting the mac80211 authentication timeout, so
# allow them to fail and just report such failures in the debug log.
sae_groups = [19, 25, 26, 20, 21, 1, 2, 5, 14, 15, 16, 22, 23, 24]
tls = dev[0].request("GET tls_library")
if tls.startswith("OpenSSL") and "run=OpenSSL 1." in tls:
logger.info("Add Brainpool EC groups since OpenSSL is new enough")
sae_groups += [27, 28, 29, 30]
heavy_groups = [14, 15, 16]
suitable_groups = [15, 16, 17, 18, 19, 20, 21]
groups = [str(g) for g in sae_groups]
params = hostapd.wpa2_params(ssid="test-sae-groups",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = ' '.join(groups)
hapd = hostapd.add_ap(apdev[0], params)
for g in groups:
logger.info("Testing SAE group " + g)
dev[0].request("SET sae_groups " + g)
id = dev[0].connect("test-sae-groups", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
if int(g) in heavy_groups:
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=5)
if ev is None:
logger.info("No connection with heavy SAE group %s did not connect - likely hitting timeout in mac80211" % g)
dev[0].remove_network(id)
time.sleep(0.1)
dev[0].dump_monitor()
continue
logger.info("Connection with heavy SAE group " + g)
else:
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=10)
if ev is None:
if "BoringSSL" in tls and int(g) in [25]:
logger.info("Ignore connection failure with group " + g + " with BoringSSL")
dev[0].remove_network(id)
dev[0].dump_monitor()
continue
if int(g) not in suitable_groups:
logger.info("Ignore connection failure with unsuitable group " + g)
dev[0].remove_network(id)
dev[0].dump_monitor()
continue
raise Exception("Connection timed out with group " + g)
if dev[0].get_status_field('sae_group') != g:
raise Exception("Expected SAE group not used")
pmksa = dev[0].get_pmksa(hapd.own_addr())
if not pmksa:
raise Exception("No PMKSA cache entry added")
if pmksa['pmkid'] == '00000000000000000000000000000000':
raise Exception("All zeros PMKID derived for group %s" % g)
dev[0].remove_network(id)
dev[0].wait_disconnected()
dev[0].dump_monitor()
@remote_compatible
def test_sae_group_nego(dev, apdev):
"""SAE group negotiation"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae-group-nego",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '19'
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 25 26 20 19")
dev[0].connect("test-sae-group-nego", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
if dev[0].get_status_field('sae_group') != '19':
raise Exception("Expected SAE group not used")
def test_sae_group_nego_no_match(dev, apdev):
"""SAE group negotiation (no match)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae-group-nego",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
# None-existing SAE group to force all attempts to be rejected
params['sae_groups'] = '0'
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae-group-nego", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-SSID-TEMP-DISABLED"], timeout=10)
dev[0].request("REMOVE_NETWORK all")
if ev is None:
raise Exception("Network profile disabling not reported")
@remote_compatible
def test_sae_anti_clogging(dev, apdev):
"""SAE anti clogging"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_anti_clogging_threshold'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
id = {}
for i in range(0, 2):
dev[i].scan(freq="2412")
id[i] = dev[i].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", only_add_network=True)
for i in range(0, 2):
dev[i].select_network(id[i])
for i in range(0, 2):
dev[i].wait_connected(timeout=10)
def test_sae_forced_anti_clogging(dev, apdev):
"""SAE anti clogging (forced)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE WPA-PSK'
params['sae_anti_clogging_threshold'] = '0'
hostapd.add_ap(apdev[0], params)
dev[2].connect("test-sae", psk="12345678", scan_freq="2412")
for i in range(0, 2):
dev[i].request("SET sae_groups ")
dev[i].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
def test_sae_mixed(dev, apdev):
"""Mixed SAE and non-SAE network"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE WPA-PSK'
params['sae_anti_clogging_threshold'] = '0'
hapd = hostapd.add_ap(apdev[0], params)
dev[2].connect("test-sae", psk="12345678", scan_freq="2412")
for i in range(0, 2):
dev[i].request("SET sae_groups ")
dev[i].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
sta0 = hapd.get_sta(dev[0].own_addr())
sta2 = hapd.get_sta(dev[2].own_addr())
if sta0['wpa'] != '2' or sta0['AKMSuiteSelector'] != '00-0f-ac-8':
raise Exception("SAE STA(0) AKM suite selector reported incorrectly")
if sta2['wpa'] != '2' or sta2['AKMSuiteSelector'] != '00-0f-ac-2':
raise Exception("PSK STA(2) AKM suite selector reported incorrectly")
def test_sae_and_psk(dev, apdev):
"""SAE and PSK enabled in network profile"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE WPA-PSK",
scan_freq="2412")
def test_sae_and_psk2(dev, apdev):
"""SAE and PSK enabled in network profile (use PSK)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-psk", passphrase="12345678")
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-psk", psk="12345678", key_mgmt="SAE WPA-PSK",
scan_freq="2412")
def test_sae_mixed_mfp(dev, apdev):
"""Mixed SAE and non-SAE network and MFP required with SAE"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE WPA-PSK'
params["ieee80211w"] = "1"
params['sae_require_mfp'] = '1'
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE", ieee80211w="2",
scan_freq="2412")
dev[0].dump_monitor()
dev[1].request("SET sae_groups ")
dev[1].connect("test-sae", psk="12345678", key_mgmt="SAE", ieee80211w="0",
scan_freq="2412", wait_connect=False)
ev = dev[1].wait_event(["CTRL-EVENT-CONNECTED",
"CTRL-EVENT-ASSOC-REJECT"], timeout=10)
if ev is None:
raise Exception("No connection result reported")
if "CTRL-EVENT-ASSOC-REJECT" not in ev:
raise Exception("SAE connection without MFP was not rejected")
if "status_code=31" not in ev:
raise Exception("Unexpected status code in rejection: " + ev)
dev[1].request("DISCONNECT")
dev[1].dump_monitor()
dev[2].connect("test-sae", psk="12345678", ieee80211w="0", scan_freq="2412")
dev[2].dump_monitor()
def test_sae_mfp(dev, apdev):
"""SAE and MFP enabled without sae_require_mfp"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params["ieee80211w"] = "1"
hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE", ieee80211w="2",
scan_freq="2412")
dev[1].request("SET sae_groups ")
dev[1].connect("test-sae", psk="12345678", key_mgmt="SAE", ieee80211w="0",
scan_freq="2412")
@remote_compatible
def test_sae_missing_password(dev, apdev):
"""SAE and missing password"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
id = dev[0].connect("test-sae",
raw_psk="46b4a73b8a951ad53ebd2e0afdb9c5483257edd4c21d12b7710759da70945858",
key_mgmt="SAE", scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(['CTRL-EVENT-SSID-TEMP-DISABLED'], timeout=10)
if ev is None:
raise Exception("Invalid network not temporarily disabled")
def test_sae_key_lifetime_in_memory(dev, apdev, params):
"""SAE and key lifetime in memory"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
password = "5ad144a7c1f5a5503baa6fa01dabc15b1843e8c01662d78d16b70b5cd23cf8b"
p = hostapd.wpa2_params(ssid="test-sae", passphrase=password)
p['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], p)
pid = find_wpas_process(dev[0])
dev[0].request("SET sae_groups ")
id = dev[0].connect("test-sae", psk=password, key_mgmt="SAE",
scan_freq="2412")
# The decrypted copy of GTK is freed only after the CTRL-EVENT-CONNECTED
# event has been delivered, so verify that wpa_supplicant has returned to
# eloop before reading process memory.
time.sleep(1)
dev[0].ping()
password = password.encode()
buf = read_process_memory(pid, password)
dev[0].request("DISCONNECT")
dev[0].wait_disconnected()
dev[0].relog()
sae_k = None
sae_keyseed = None
sae_kck = None
pmk = None
ptk = None
gtk = None
with open(os.path.join(params['logdir'], 'log0'), 'r') as f:
for l in f.readlines():
if "SAE: k - hexdump" in l:
val = l.strip().split(':')[3].replace(' ', '')
sae_k = binascii.unhexlify(val)
if "SAE: keyseed - hexdump" in l:
val = l.strip().split(':')[3].replace(' ', '')
sae_keyseed = binascii.unhexlify(val)
if "SAE: KCK - hexdump" in l:
val = l.strip().split(':')[3].replace(' ', '')
sae_kck = binascii.unhexlify(val)
if "SAE: PMK - hexdump" in l:
val = l.strip().split(':')[3].replace(' ', '')
pmk = binascii.unhexlify(val)
if "WPA: PTK - hexdump" in l:
val = l.strip().split(':')[3].replace(' ', '')
ptk = binascii.unhexlify(val)
if "WPA: Group Key - hexdump" in l:
val = l.strip().split(':')[3].replace(' ', '')
gtk = binascii.unhexlify(val)
if not sae_k or not sae_keyseed or not sae_kck or not pmk or not ptk or not gtk:
raise Exception("Could not find keys from debug log")
if len(gtk) != 16:
raise Exception("Unexpected GTK length")
kck = ptk[0:16]
kek = ptk[16:32]
tk = ptk[32:48]
fname = os.path.join(params['logdir'],
'sae_key_lifetime_in_memory.memctx-')
logger.info("Checking keys in memory while associated")
get_key_locations(buf, password, "Password")
get_key_locations(buf, pmk, "PMK")
if password not in buf:
raise HwsimSkip("Password not found while associated")
if pmk not in buf:
raise HwsimSkip("PMK not found while associated")
if kck not in buf:
raise Exception("KCK not found while associated")
if kek not in buf:
raise Exception("KEK not found while associated")
#if tk in buf:
# raise Exception("TK found from memory")
verify_not_present(buf, sae_k, fname, "SAE(k)")
verify_not_present(buf, sae_keyseed, fname, "SAE(keyseed)")
verify_not_present(buf, sae_kck, fname, "SAE(KCK)")
logger.info("Checking keys in memory after disassociation")
buf = read_process_memory(pid, password)
# Note: Password is still present in network configuration
# Note: PMK is in PMKSA cache
get_key_locations(buf, password, "Password")
get_key_locations(buf, pmk, "PMK")
verify_not_present(buf, kck, fname, "KCK")
verify_not_present(buf, kek, fname, "KEK")
verify_not_present(buf, tk, fname, "TK")
if gtk in buf:
get_key_locations(buf, gtk, "GTK")
verify_not_present(buf, gtk, fname, "GTK")
verify_not_present(buf, sae_k, fname, "SAE(k)")
verify_not_present(buf, sae_keyseed, fname, "SAE(keyseed)")
verify_not_present(buf, sae_kck, fname, "SAE(KCK)")
dev[0].request("PMKSA_FLUSH")
logger.info("Checking keys in memory after PMKSA cache flush")
buf = read_process_memory(pid, password)
get_key_locations(buf, password, "Password")
get_key_locations(buf, pmk, "PMK")
verify_not_present(buf, pmk, fname, "PMK")
dev[0].request("REMOVE_NETWORK all")
logger.info("Checking keys in memory after network profile removal")
buf = read_process_memory(pid, password)
get_key_locations(buf, password, "Password")
get_key_locations(buf, pmk, "PMK")
verify_not_present(buf, password, fname, "password")
verify_not_present(buf, pmk, fname, "PMK")
verify_not_present(buf, kck, fname, "KCK")
verify_not_present(buf, kek, fname, "KEK")
verify_not_present(buf, tk, fname, "TK")
verify_not_present(buf, gtk, fname, "GTK")
verify_not_present(buf, sae_k, fname, "SAE(k)")
verify_not_present(buf, sae_keyseed, fname, "SAE(keyseed)")
verify_not_present(buf, sae_kck, fname, "SAE(KCK)")
@remote_compatible
def test_sae_oom_wpas(dev, apdev):
"""SAE and OOM in wpa_supplicant"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '19 25 26 20'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 20")
with alloc_fail(dev[0], 1, "sae_set_group"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].request("SET sae_groups ")
with alloc_fail(dev[0], 2, "sae_set_group"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
with alloc_fail(dev[0], 1, "wpabuf_alloc;sme_auth_build_sae_commit"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
with alloc_fail(dev[0], 1, "wpabuf_alloc;sme_auth_build_sae_confirm"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
with alloc_fail(dev[0], 1, "=sme_authenticate"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
with alloc_fail(dev[0], 1, "radio_add_work;sme_authenticate"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_ALLOC_FAIL")
dev[0].request("REMOVE_NETWORK all")
@remote_compatible
def test_sae_proto_ecc(dev, apdev):
"""SAE protocol testing (ECC)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
dev[0].request("SET sae_groups 19")
tests = [("Confirm mismatch",
"1300" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
"0000800edebc3f260dc1fe7e0b20888af2b8a3316252ec37388a8504e25b73dc4240"),
("Commit without even full cyclic group field",
"13",
None),
("Too short commit",
"1300" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02",
None),
("Invalid commit scalar (0)",
"1300" + "0000000000000000000000000000000000000000000000000000000000000000" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
None),
("Invalid commit scalar (1)",
"1300" + "0000000000000000000000000000000000000000000000000000000000000001" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
None),
("Invalid commit scalar (> r)",
"1300" + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
None),
("Commit element not on curve",
"1300" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728d0000000000000000000000000000000000000000000000000000000000000000",
None),
("Invalid commit element (y coordinate > P)",
"1300" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
None),
("Invalid commit element (x coordinate > P)",
"1300" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
None),
("Different group in commit",
"1400" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
None),
("Too short confirm",
"1300" + "033d3635b39666ed427fd4a3e7d37acec2810afeaf1687f746a14163ff0e6d03" + "559cb8928db4ce4e3cbd6555e837591995e5ebe503ef36b503d9ca519d63728dd3c7c676b8e8081831b6bc3a64bdf136061a7de175e17d1965bfa41983ed02f8",
"0000800edebc3f260dc1fe7e0b20888af2b8a3316252ec37388a8504e25b73dc42")]
for (note, commit, confirm) in tests:
logger.info(note)
dev[0].scan_for_bss(bssid, freq=2412)
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
logger.info("Commit")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (commit)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (commit) not received")
hapd.dump_monitor()
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030001000000" + commit)
hapd.mgmt_tx(resp)
if confirm:
logger.info("Confirm")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (confirm)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (confirm) not received")
hapd.dump_monitor()
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030002000000" + confirm)
hapd.mgmt_tx(resp)
time.sleep(0.1)
dev[0].request("REMOVE_NETWORK all")
hapd.set("ext_mgmt_frame_handling", "0")
hapd.dump_monitor()
@remote_compatible
def test_sae_proto_ffc(dev, apdev):
"""SAE protocol testing (FFC)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
dev[0].request("SET sae_groups 2")
tests = [("Confirm mismatch",
"0200" + "0c70519d874e3e4930a917cc5e17ea7a26028211159f217bab28b8d6c56691805e49f03249b2c6e22c7c9f86b30e04ccad2deedd5e5108ae07b737c00001c59cd0eb08b1dfc7f1b06a1542e2b6601a963c066e0c65940983a03917ae57a101ce84b5cbbc76ff33ebb990aac2e54aa0f0ab6ec0a58113d927683502b2cb2347d2" + "a8c00117493cdffa5dd671e934bc9cb1a69f39e25e9dd9cd9afd3aea2441a0f5491211c7ba50a753563f9ce943b043557cb71193b28e86ed9544f4289c471bf91b70af5c018cf4663e004165b0fd0bc1d8f3f78adf42eee92bcbc55246fd3ee9f107ab965dc7d4986f23eb71d616ebfe6bfe0a6c1ac5dc1718acee17c9a17486",
"0000f3116a9731f1259622e3eb55d4b3b50ba16f8c5f5565b28e609b180c51460251"),
("Too short commit",
"0200" + "0c70519d874e3e4930a917cc5e17ea7a26028211159f217bab28b8d6c56691805e49f03249b2c6e22c7c9f86b30e04ccad2deedd5e5108ae07b737c00001c59cd0eb08b1dfc7f1b06a1542e2b6601a963c066e0c65940983a03917ae57a101ce84b5cbbc76ff33ebb990aac2e54aa0f0ab6ec0a58113d927683502b2cb2347d2" + "a8c00117493cdffa5dd671e934bc9cb1a69f39e25e9dd9cd9afd3aea2441a0f5491211c7ba50a753563f9ce943b043557cb71193b28e86ed9544f4289c471bf91b70af5c018cf4663e004165b0fd0bc1d8f3f78adf42eee92bcbc55246fd3ee9f107ab965dc7d4986f23eb71d616ebfe6bfe0a6c1ac5dc1718acee17c9a174",
None),
("Invalid element (0) in commit",
"0200" + "0c70519d874e3e4930a917cc5e17ea7a26028211159f217bab28b8d6c56691805e49f03249b2c6e22c7c9f86b30e04ccad2deedd5e5108ae07b737c00001c59cd0eb08b1dfc7f1b06a1542e2b6601a963c066e0c65940983a03917ae57a101ce84b5cbbc76ff33ebb990aac2e54aa0f0ab6ec0a58113d927683502b2cb2347d2" + "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
None),
("Invalid element (1) in commit",
"0200" + "0c70519d874e3e4930a917cc5e17ea7a26028211159f217bab28b8d6c56691805e49f03249b2c6e22c7c9f86b30e04ccad2deedd5e5108ae07b737c00001c59cd0eb08b1dfc7f1b06a1542e2b6601a963c066e0c65940983a03917ae57a101ce84b5cbbc76ff33ebb990aac2e54aa0f0ab6ec0a58113d927683502b2cb2347d2" + "0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001",
None),
("Invalid element (> P) in commit",
"0200" + "0c70519d874e3e4930a917cc5e17ea7a26028211159f217bab28b8d6c56691805e49f03249b2c6e22c7c9f86b30e04ccad2deedd5e5108ae07b737c00001c59cd0eb08b1dfc7f1b06a1542e2b6601a963c066e0c65940983a03917ae57a101ce84b5cbbc76ff33ebb990aac2e54aa0f0ab6ec0a58113d927683502b2cb2347d2" + "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
None)]
for (note, commit, confirm) in tests:
logger.info(note)
dev[0].scan_for_bss(bssid, freq=2412)
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
logger.info("Commit")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (commit)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (commit) not received")
hapd.dump_monitor()
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030001000000" + commit)
hapd.mgmt_tx(resp)
if confirm:
logger.info("Confirm")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (confirm)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (confirm) not received")
hapd.dump_monitor()
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030002000000" + confirm)
hapd.mgmt_tx(resp)
time.sleep(0.1)
dev[0].request("REMOVE_NETWORK all")
hapd.set("ext_mgmt_frame_handling", "0")
hapd.dump_monitor()
def test_sae_proto_confirm_replay(dev, apdev):
"""SAE protocol testing - Confirm replay"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
dev[0].request("SET sae_groups 19")
dev[0].scan_for_bss(bssid, freq=2412)
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
logger.info("Commit")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (commit)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (commit) not received")
bssid = hapd.own_addr().replace(':', '')
addr = dev[0].own_addr().replace(':', '')
hdr = "b0003a01" + bssid + addr + bssid + "1000"
hapd.dump_monitor()
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + binascii.hexlify(req['frame']).decode())
logger.info("Confirm")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (confirm)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (confirm) not received")
hapd.dump_monitor()
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + binascii.hexlify(req['frame']).decode())
logger.info("Replay Confirm")
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + binascii.hexlify(req['frame']).decode())
logger.info("Association Request")
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (AssocReq)")
if req['subtype'] == 0:
break
req = None
if not req:
raise Exception("Association Request frame not received")
hapd.dump_monitor()
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + binascii.hexlify(req['frame']).decode())
ev = hapd.wait_event(["MGMT-TX-STATUS"], timeout=5)
if ev is None:
raise Exception("Management frame TX status not reported (1)")
if "stype=1 ok=1" not in ev:
raise Exception("Unexpected management frame TX status (1): " + ev)
cmd = "MGMT_TX_STATUS_PROCESS %s" % (" ".join(ev.split(' ')[1:4]))
if "OK" not in hapd.request(cmd):
raise Exception("MGMT_TX_STATUS_PROCESS failed")
hapd.set("ext_mgmt_frame_handling", "0")
dev[0].wait_connected()
def test_sae_proto_hostapd(dev, apdev):
"""SAE protocol testing with hostapd"""
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = "19 65535"
hapd = hostapd.add_ap(apdev[0], params)
hapd.set("ext_mgmt_frame_handling", "1")
bssid = hapd.own_addr().replace(':', '')
addr = "020000000000"
addr2 = "020000000001"
hdr = "b0003a01" + bssid + addr + bssid + "1000"
hdr2 = "b0003a01" + bssid + addr2 + bssid + "1000"
group = "1300"
scalar = "f7df19f4a7fef1d3b895ea1de150b7c5a7a705c8ebb31a52b623e0057908bd93"
element_x = "21931572027f2e953e2a49fab3d992944102cc95aa19515fc068b394fb25ae3c"
element_y = "cb4eeb94d7b0b789abfdb73a67ab9d6d5efa94dd553e0e724a6289821cbce530"
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030001000000" + group + scalar + element_x + element_y)
# "SAE: Not enough data for scalar"
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030001000000" + group + scalar[:-2])
# "SAE: Do not allow group to be changed"
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030001000000" + "ffff" + scalar[:-2])
# "SAE: Unsupported Finite Cyclic Group 65535"
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr2 + "030001000000" + "ffff" + scalar[:-2])
def test_sae_proto_hostapd_ecc(dev, apdev):
"""SAE protocol testing with hostapd (ECC)"""
params = hostapd.wpa2_params(ssid="test-sae", passphrase="foofoofoo")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = "19"
hapd = hostapd.add_ap(apdev[0], params)
hapd.set("ext_mgmt_frame_handling", "1")
bssid = hapd.own_addr().replace(':', '')
addr = "020000000000"
addr2 = "020000000001"
hdr = "b0003a01" + bssid + addr + bssid + "1000"
hdr2 = "b0003a01" + bssid + addr2 + bssid + "1000"
group = "1300"
scalar = "9e9a959bf2dda875a4a29ce9b2afef46f2d83060930124cd9e39ddce798cd69a"
element_x = "dfc55fd8622b91d362f4d1fc9646474d7fba0ff7cce6ca58b8e96a931e070220"
element_y = "dac8a4e80724f167c1349cc9e1f9dd82a7c77b29d49789b63b72b4c849301a28"
# sae_parse_commit_element_ecc() failure to parse peer element
# (depending on crypto library, either crypto_ec_point_from_bin() failure
# or crypto_ec_point_is_on_curve() returning 0)
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030001000000" + group + scalar + element_x + element_y)
# Unexpected continuation of the connection attempt with confirm
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030002000000" + "0000" + "fd7b081ff4e8676f03612a4140eedcd3c179ab3a13b93863c6f7ca451340b9ae")
def test_sae_proto_hostapd_ffc(dev, apdev):
"""SAE protocol testing with hostapd (FFC)"""
params = hostapd.wpa2_params(ssid="test-sae", passphrase="foofoofoo")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = "22"
hapd = hostapd.add_ap(apdev[0], params)
hapd.set("ext_mgmt_frame_handling", "1")
bssid = hapd.own_addr().replace(':', '')
addr = "020000000000"
addr2 = "020000000001"
hdr = "b0003a01" + bssid + addr + bssid + "1000"
hdr2 = "b0003a01" + bssid + addr2 + bssid + "1000"
group = "1600"
scalar = "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000044cc46a73c07ef479dc66ec1f5e8ccf25131fa40"
element = "0f1d67025e12fc874cf718c35b19d1ab2db858215623f1ce661cbd1d7b1d7a09ceda7dba46866cf37044259b5cac4db15e7feb778edc8098854b93a84347c1850c02ee4d7dac46db79c477c731085d5b39f56803cda1eeac4a2fbbccb9a546379e258c00ebe93dfdd0a34cf8ce5c55cf905a89564a590b7e159fb89198e9d5cd"
# sae_parse_commit_element_ffc() failure to parse peer element
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030001000000" + group + scalar + element)
# Unexpected continuation of the connection attempt with confirm
hapd.request("MGMT_RX_PROCESS freq=2412 datarate=0 ssi_signal=-30 frame=" + hdr + "030002000000" + "0000" + "fd7b081ff4e8676f03612a4140eedcd3c179ab3a13b93863c6f7ca451340b9ae")
@remote_compatible
def test_sae_no_ffc_by_default(dev, apdev):
"""SAE and default groups rejecting FFC"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 15")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE", scan_freq="2412",
wait_connect=False)
ev = dev[0].wait_event(["SME: Trying to authenticate"], timeout=3)
if ev is None:
raise Exception("Did not try to authenticate")
ev = dev[0].wait_event(["SME: Trying to authenticate"], timeout=3)
if ev is None:
raise Exception("Did not try to authenticate (2)")
dev[0].request("REMOVE_NETWORK all")
def sae_reflection_attack(apdev, dev, group):
if "SAE" not in dev.get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="no-knowledge-of-passphrase")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev, params)
bssid = apdev['bssid']
dev.scan_for_bss(bssid, freq=2412)
hapd.set("ext_mgmt_frame_handling", "1")
dev.request("SET sae_groups %d" % group)
dev.connect("test-sae", psk="reflection-attack", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
# Commit
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame not received")
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = req['payload']
hapd.mgmt_tx(resp)
# Confirm
req = hapd.mgmt_rx(timeout=0.5)
if req is not None:
if req['subtype'] == 11:
raise Exception("Unexpected Authentication frame seen")
@remote_compatible
def test_sae_reflection_attack_ecc(dev, apdev):
"""SAE reflection attack (ECC)"""
sae_reflection_attack(apdev[0], dev[0], 19)
@remote_compatible
def test_sae_reflection_attack_ffc(dev, apdev):
"""SAE reflection attack (FFC)"""
sae_reflection_attack(apdev[0], dev[0], 15)
def sae_reflection_attack_internal(apdev, dev, group):
if "SAE" not in dev.get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="no-knowledge-of-passphrase")
params['wpa_key_mgmt'] = 'SAE'
params['sae_reflection_attack'] = '1'
hapd = hostapd.add_ap(apdev, params)
bssid = apdev['bssid']
dev.scan_for_bss(bssid, freq=2412)
dev.request("SET sae_groups %d" % group)
dev.connect("test-sae", psk="reflection-attack", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev.wait_event(["SME: Trying to authenticate"], timeout=10)
if ev is None:
raise Exception("No authentication attempt seen")
ev = dev.wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
@remote_compatible
def test_sae_reflection_attack_ecc_internal(dev, apdev):
"""SAE reflection attack (ECC) - internal"""
sae_reflection_attack_internal(apdev[0], dev[0], 19)
@remote_compatible
def test_sae_reflection_attack_ffc_internal(dev, apdev):
"""SAE reflection attack (FFC) - internal"""
sae_reflection_attack_internal(apdev[0], dev[0], 15)
@remote_compatible
def test_sae_commit_override(dev, apdev):
"""SAE commit override (hostapd)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_commit_override'] = '13ffbad00d215867a7c5ff37d87bb9bdb7cb116e520f71e8d7a794ca2606d537ddc6c099c40e7a25372b80a8fd443cd7dd222c8ea21b8ef372d4b3e316c26a73fd999cc79ad483eb826e7b3893ea332da68fa13224bcdeb4fb18b0584dd100a2c514'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="test-sae", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
@remote_compatible
def test_sae_commit_override2(dev, apdev):
"""SAE commit override (wpa_supplicant)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].set('sae_commit_override', '13ffbad00d215867a7c5ff37d87bb9bdb7cb116e520f71e8d7a794ca2606d537ddc6c099c40e7a25372b80a8fd443cd7dd222c8ea21b8ef372d4b3e316c26a73fd999cc79ad483eb826e7b3893ea332da68fa13224bcdeb4fb18b0584dd100a2c514')
dev[0].connect("test-sae", psk="test-sae", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
def test_sae_commit_invalid_scalar_element_ap(dev, apdev):
"""SAE commit invalid scalar/element from AP"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_commit_override'] = '1300' + 96*'00'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="test-sae", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
def test_sae_commit_invalid_element_ap(dev, apdev):
"""SAE commit invalid element from AP"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_commit_override'] = '1300' + 31*'00' + '02' + 64*'00'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="test-sae", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
def test_sae_commit_invalid_scalar_element_sta(dev, apdev):
"""SAE commit invalid scalar/element from STA"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].set('sae_commit_override', '1300' + 96*'00')
dev[0].connect("test-sae", psk="test-sae", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
def test_sae_commit_invalid_element_sta(dev, apdev):
"""SAE commit invalid element from STA"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].set('sae_commit_override', '1300' + 31*'00' + '02' + 64*'00')
dev[0].connect("test-sae", psk="test-sae", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection")
@remote_compatible
def test_sae_anti_clogging_proto(dev, apdev):
"""SAE anti clogging protocol testing"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="no-knowledge-of-passphrase")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
dev[0].scan_for_bss(bssid, freq=2412)
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="anti-cloggign", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
# Commit
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame not received")
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030001004c00" + "ffff00")
hapd.mgmt_tx(resp)
# Confirm (not received due to DH group being rejected)
req = hapd.mgmt_rx(timeout=0.5)
if req is not None:
if req['subtype'] == 11:
raise Exception("Unexpected Authentication frame seen")
@remote_compatible
def test_sae_no_random(dev, apdev):
"""SAE and no random numbers available"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
tests = [(1, "os_get_random;sae_derive_pwe_ecc")]
for count, func in tests:
with fail_test(dev[0], count, func):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
@remote_compatible
def test_sae_pwe_failure(dev, apdev):
"""SAE and pwe failure"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '19 15'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 19")
with fail_test(dev[0], 1, "hmac_sha256_vector;sae_derive_pwe_ecc"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
with fail_test(dev[0], 1, "sae_test_pwd_seed_ecc"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].request("SET sae_groups 15")
with fail_test(dev[0], 1, "hmac_sha256_vector;sae_derive_pwe_ffc"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].request("SET sae_groups 15")
with fail_test(dev[0], 1, "sae_test_pwd_seed_ffc"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
with fail_test(dev[0], 2, "sae_test_pwd_seed_ffc"):
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
@remote_compatible
def test_sae_bignum_failure(dev, apdev):
"""SAE and bignum failure"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '19 15 22'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 19")
tests = [(1, "crypto_bignum_init_set;dragonfly_get_rand_1_to_p_1"),
(1, "crypto_bignum_init;dragonfly_is_quadratic_residue_blind"),
(1, "crypto_bignum_mulmod;dragonfly_is_quadratic_residue_blind"),
(2, "crypto_bignum_mulmod;dragonfly_is_quadratic_residue_blind"),
(3, "crypto_bignum_mulmod;dragonfly_is_quadratic_residue_blind"),
(1, "crypto_bignum_legendre;dragonfly_is_quadratic_residue_blind"),
(1, "crypto_bignum_init_set;sae_test_pwd_seed_ecc"),
(1, "crypto_ec_point_compute_y_sqr;sae_test_pwd_seed_ecc"),
(1, "crypto_bignum_to_bin;sae_derive_pwe_ecc"),
(1, "crypto_ec_point_init;sae_derive_pwe_ecc"),
(1, "crypto_ec_point_solve_y_coord;sae_derive_pwe_ecc"),
(1, "crypto_ec_point_init;sae_derive_commit_element_ecc"),
(1, "crypto_ec_point_mul;sae_derive_commit_element_ecc"),
(1, "crypto_ec_point_invert;sae_derive_commit_element_ecc"),
(1, "crypto_bignum_init;=sae_derive_commit"),
(1, "crypto_ec_point_init;sae_derive_k_ecc"),
(1, "crypto_ec_point_mul;sae_derive_k_ecc"),
(1, "crypto_ec_point_add;sae_derive_k_ecc"),
(2, "crypto_ec_point_mul;sae_derive_k_ecc"),
(1, "crypto_ec_point_to_bin;sae_derive_k_ecc"),
(1, "crypto_bignum_legendre;dragonfly_get_random_qr_qnr"),
(1, "sha256_prf;sae_derive_keys"),
(1, "crypto_bignum_init;sae_derive_keys"),
(1, "crypto_bignum_init_set;sae_parse_commit_scalar"),
(1, "crypto_bignum_to_bin;sae_parse_commit_element_ecc"),
(1, "crypto_ec_point_from_bin;sae_parse_commit_element_ecc")]
for count, func in tests:
with fail_test(dev[0], count, func):
hapd.request("NOTE STA failure testing %d:%s" % (count, func))
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
hapd.dump_monitor()
dev[0].request("SET sae_groups 15")
tests = [(1, "crypto_bignum_init_set;sae_set_group"),
(2, "crypto_bignum_init_set;sae_set_group"),
(1, "crypto_bignum_init;sae_derive_commit"),
(2, "crypto_bignum_init;sae_derive_commit"),
(1, "crypto_bignum_init_set;sae_test_pwd_seed_ffc"),
(1, "crypto_bignum_exptmod;sae_test_pwd_seed_ffc"),
(1, "crypto_bignum_init;sae_derive_pwe_ffc"),
(1, "crypto_bignum_init;sae_derive_commit_element_ffc"),
(1, "crypto_bignum_exptmod;sae_derive_commit_element_ffc"),
(1, "crypto_bignum_inverse;sae_derive_commit_element_ffc"),
(1, "crypto_bignum_init;sae_derive_k_ffc"),
(1, "crypto_bignum_exptmod;sae_derive_k_ffc"),
(1, "crypto_bignum_mulmod;sae_derive_k_ffc"),
(2, "crypto_bignum_exptmod;sae_derive_k_ffc"),
(1, "crypto_bignum_to_bin;sae_derive_k_ffc"),
(1, "crypto_bignum_init_set;sae_parse_commit_element_ffc"),
(1, "crypto_bignum_init;sae_parse_commit_element_ffc"),
(2, "crypto_bignum_init_set;sae_parse_commit_element_ffc"),
(1, "crypto_bignum_exptmod;sae_parse_commit_element_ffc")]
for count, func in tests:
with fail_test(dev[0], count, func):
hapd.request("NOTE STA failure testing %d:%s" % (count, func))
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
hapd.dump_monitor()
def test_sae_bignum_failure_unsafe_group(dev, apdev):
"""SAE and bignum failure unsafe group"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '22'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups 22")
tests = [(1, "crypto_bignum_init_set;sae_test_pwd_seed_ffc"),
(1, "crypto_bignum_sub;sae_test_pwd_seed_ffc"),
(1, "crypto_bignum_div;sae_test_pwd_seed_ffc")]
for count, func in tests:
with fail_test(dev[0], count, func):
hapd.request("NOTE STA failure testing %d:%s" % (count, func))
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
wait_fail_trigger(dev[0], "GET_FAIL")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
hapd.dump_monitor()
def test_sae_invalid_anti_clogging_token_req(dev, apdev):
"""SAE and invalid anti-clogging token request"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
# Beacon more frequently since Probe Request frames are practically ignored
# in this test setup (ext_mgmt_frame_handled=1 on hostapd side) and
# wpa_supplicant scans may end up getting ignored if no new results are
# available due to the missing Probe Response frames.
params['beacon_int'] = '20'
hapd = hostapd.add_ap(apdev[0], params)
bssid = apdev[0]['bssid']
dev[0].request("SET sae_groups 19")
dev[0].scan_for_bss(bssid, freq=2412)
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["SME: Trying to authenticate"])
if ev is None:
raise Exception("No authentication attempt seen (1)")
dev[0].dump_monitor()
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (commit)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (commit) not received")
hapd.dump_monitor()
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030001004c0013")
hapd.mgmt_tx(resp)
ev = hapd.wait_event(["MGMT-TX-STATUS"], timeout=5)
if ev is None:
raise Exception("Management frame TX status not reported (1)")
if "stype=11 ok=1" not in ev:
raise Exception("Unexpected management frame TX status (1): " + ev)
ev = dev[0].wait_event(["SME: Trying to authenticate"])
if ev is None:
raise Exception("No authentication attempt seen (2)")
dev[0].dump_monitor()
for i in range(0, 10):
req = hapd.mgmt_rx()
if req is None:
raise Exception("MGMT RX wait timed out (commit) (2)")
if req['subtype'] == 11:
break
req = None
if not req:
raise Exception("Authentication frame (commit) not received (2)")
hapd.dump_monitor()
resp = {}
resp['fc'] = req['fc']
resp['da'] = req['sa']
resp['sa'] = req['da']
resp['bssid'] = req['bssid']
resp['payload'] = binascii.unhexlify("030001000100")
hapd.mgmt_tx(resp)
ev = hapd.wait_event(["MGMT-TX-STATUS"], timeout=5)
if ev is None:
raise Exception("Management frame TX status not reported (1)")
if "stype=11 ok=1" not in ev:
raise Exception("Unexpected management frame TX status (1): " + ev)
ev = dev[0].wait_event(["SME: Trying to authenticate"])
if ev is None:
raise Exception("No authentication attempt seen (3)")
dev[0].dump_monitor()
dev[0].request("DISCONNECT")
def test_sae_password(dev, apdev):
"""SAE and sae_password in hostapd configuration"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE WPA-PSK'
params['sae_password'] = "sae-password"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", psk="sae-password", key_mgmt="SAE",
scan_freq="2412")
dev[1].connect("test-sae", psk="12345678", scan_freq="2412")
dev[2].request("SET sae_groups ")
dev[2].connect("test-sae", sae_password="sae-password", key_mgmt="SAE",
scan_freq="2412")
def test_sae_password_short(dev, apdev):
"""SAE and short password"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae")
params['wpa_key_mgmt'] = 'SAE'
params['sae_password'] = "secret"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", sae_password="secret", key_mgmt="SAE",
scan_freq="2412")
def test_sae_password_long(dev, apdev):
"""SAE and long password"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae")
params['wpa_key_mgmt'] = 'SAE'
params['sae_password'] = 100*"A"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", sae_password=100*"A", key_mgmt="SAE",
scan_freq="2412")
def test_sae_connect_cmd(dev, apdev):
"""SAE with connect command"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
if "SAE" not in wpas.get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
hapd = hostapd.add_ap(apdev[0], params)
wpas.request("SET sae_groups ")
wpas.connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
# mac80211_hwsim does not support SAE offload, so accept both a successful
# connection and association rejection.
ev = wpas.wait_event(["CTRL-EVENT-CONNECTED", "CTRL-EVENT-ASSOC-REJECT",
"Association request to the driver failed"],
timeout=15)
if ev is None:
raise Exception("No connection result reported")
def run_sae_password_id(dev, apdev, groups=None):
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae")
params['wpa_key_mgmt'] = 'SAE'
if groups:
params['sae_groups'] = groups
else:
groups = ""
params['sae_password'] = ['secret|mac=ff:ff:ff:ff:ff:ff|id=pw id',
'foo|mac=02:02:02:02:02:02',
'another secret|mac=ff:ff:ff:ff:ff:ff|id=' + 29*'A']
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups " + groups)
dev[0].connect("test-sae", sae_password="secret", sae_password_id="pw id",
key_mgmt="SAE", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
# SAE Password Identifier element with the exact same length as the
# optional Anti-Clogging Token field
dev[0].connect("test-sae", sae_password="another secret",
sae_password_id=29*'A',
key_mgmt="SAE", scan_freq="2412")
dev[0].request("REMOVE_NETWORK all")
dev[0].wait_disconnected()
dev[0].connect("test-sae", sae_password="secret", sae_password_id="unknown",
key_mgmt="SAE", scan_freq="2412", wait_connect=False)
ev = dev[0].wait_event(["CTRL-EVENT-SAE-UNKNOWN-PASSWORD-IDENTIFIER"],
timeout=10)
if ev is None:
raise Exception("Unknown password identifier not reported")
dev[0].request("REMOVE_NETWORK all")
def test_sae_password_id(dev, apdev):
"""SAE and password identifier"""
run_sae_password_id(dev, apdev, "")
def test_sae_password_id_ecc(dev, apdev):
"""SAE and password identifier (ECC)"""
run_sae_password_id(dev, apdev, "19")
def test_sae_password_id_ffc(dev, apdev):
"""SAE and password identifier (FFC)"""
run_sae_password_id(dev, apdev, "15")
def test_sae_password_id_only(dev, apdev):
"""SAE and password identifier (exclusively)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae")
params['wpa_key_mgmt'] = 'SAE'
params['sae_password'] = 'secret|id=pw id'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
dev[0].connect("test-sae", sae_password="secret", sae_password_id="pw id",
key_mgmt="SAE", scan_freq="2412")
def test_sae_forced_anti_clogging_pw_id(dev, apdev):
"""SAE anti clogging (forced and Password Identifier)"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae")
params['wpa_key_mgmt'] = 'SAE'
params['sae_anti_clogging_threshold'] = '0'
params['sae_password'] = 'secret|id=' + 29*'A'
hostapd.add_ap(apdev[0], params)
for i in range(0, 2):
dev[i].request("SET sae_groups ")
dev[i].connect("test-sae", sae_password="secret",
sae_password_id=29*'A', key_mgmt="SAE", scan_freq="2412")
def test_sae_reauth(dev, apdev):
"""SAE reauthentication"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae",
passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params["ieee80211w"] = "2"
hapd = hostapd.add_ap(apdev[0], params)
dev[0].request("SET sae_groups ")
id = dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
ieee80211w="2", scan_freq="2412")
hapd.set("ext_mgmt_frame_handling", "1")
dev[0].request("DISCONNECT")
dev[0].wait_disconnected(timeout=10)
hapd.set("ext_mgmt_frame_handling", "0")
dev[0].request("PMKSA_FLUSH")
dev[0].request("REASSOCIATE")
dev[0].wait_connected(timeout=10, error="Timeout on re-connection")
def test_sae_anti_clogging_during_attack(dev, apdev):
"""SAE anti clogging during an attack"""
try:
run_sae_anti_clogging_during_attack(dev, apdev)
finally:
stop_monitor(apdev[1]["ifname"])
def build_sae_commit(bssid, addr, group=21, token=None):
if group == 19:
scalar = binascii.unhexlify("7332d3ebff24804005ccd8c56141e3ed8d84f40638aa31cd2fac11d4d2e89e7b")
element = binascii.unhexlify("954d0f4457066bff3168376a1d7174f4e66620d1792406f613055b98513a7f03a538c13dfbaf2029e2adc6aa96aa0ddcf08ac44887b02f004b7f29b9dbf4b7d9")
elif group == 21:
scalar = binascii.unhexlify("001eec673111b902f5c8a61c8cb4c1c4793031aeea8c8c319410903bc64bcbaea134ab01c4e016d51436f5b5426f7e2af635759a3033fb4031ea79f89a62a3e2f828")
element = binascii.unhexlify("00580eb4b448ea600ea277d5e66e4ed37db82bb04ac90442e9c3727489f366ba4b82f0a472d02caf4cdd142e96baea5915d71374660ee23acbaca38cf3fe8c5fb94b01abbc5278121635d7c06911c5dad8f18d516e1fbe296c179b7c87a1dddfab393337d3d215ed333dd396da6d8f20f798c60d054f1093c24d9c2d98e15c030cc375f0")
pass
frame = binascii.unhexlify("b0003a01")
frame += bssid + addr + bssid
frame += binascii.unhexlify("1000")
auth_alg = 3
transact = 1
status = 0
frame += struct.pack("<HHHH", auth_alg, transact, status, group)
if token:
frame += token
frame += scalar + element
return frame
def sae_rx_commit_token_req(sock, radiotap, send_two=False):
msg = sock.recv(1500)
ver, pad, len, present = struct.unpack('<BBHL', msg[0:8])
frame = msg[len:]
fc, duration = struct.unpack('<HH', frame[0:4])
if fc != 0xb0:
return False
frame = frame[4:]
da = frame[0:6]
if da[0] != 0xf2:
return False
sa = frame[6:12]
bssid = frame[12:18]
body = frame[20:]
alg, seq, status, group = struct.unpack('<HHHH', body[0:8])
if alg != 3 or seq != 1 or status != 76:
return False
token = body[8:]
frame = build_sae_commit(bssid, da, token=token)
sock.send(radiotap + frame)
if send_two:
sock.send(radiotap + frame)
return True
def run_sae_anti_clogging_during_attack(dev, apdev):
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_groups'] = '21'
hapd = hostapd.add_ap(apdev[0], params)
dev[0].scan_for_bss(hapd.own_addr(), freq=2412)
dev[0].request("SET sae_groups 21")
dev[1].scan_for_bss(hapd.own_addr(), freq=2412)
dev[1].request("SET sae_groups 21")
sock = start_monitor(apdev[1]["ifname"])
radiotap = radiotap_build()
bssid = binascii.unhexlify(hapd.own_addr().replace(':', ''))
for i in range(16):
addr = binascii.unhexlify("f2%010x" % i)
frame = build_sae_commit(bssid, addr)
sock.send(radiotap + frame)
sock.send(radiotap + frame)
count = 0
for i in range(150):
if sae_rx_commit_token_req(sock, radiotap, send_two=True):
count += 1
logger.info("Number of token responses sent: %d" % count)
if count < 10:
raise Exception("Too few token responses seen: %d" % count)
for i in range(16):
addr = binascii.unhexlify("f201%08x" % i)
frame = build_sae_commit(bssid, addr)
sock.send(radiotap + frame)
count = 0
for i in range(150):
if sae_rx_commit_token_req(sock, radiotap):
count += 1
if count == 10:
break
if count < 5:
raise Exception("Too few token responses in second round: %d" % count)
dev[0].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
dev[1].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", wait_connect=False)
count = 0
connected0 = False
connected1 = False
for i in range(1000):
if sae_rx_commit_token_req(sock, radiotap):
count += 1
addr = binascii.unhexlify("f202%08x" % i)
frame = build_sae_commit(bssid, addr)
sock.send(radiotap + frame)
while dev[0].mon.pending():
ev = dev[0].mon.recv()
logger.debug("EV0: " + ev)
if "CTRL-EVENT-CONNECTED" in ev:
connected0 = True
while dev[1].mon.pending():
ev = dev[1].mon.recv()
logger.debug("EV1: " + ev)
if "CTRL-EVENT-CONNECTED" in ev:
connected1 = True
if connected0 and connected1:
break
time.sleep(0.00000001)
if not connected0:
raise Exception("Real station(0) did not get connected")
if not connected1:
raise Exception("Real station(1) did not get connected")
if count < 1:
raise Exception("Too few token responses in third round: %d" % count)
def test_sae_sync(dev, apdev):
"""SAE dot11RSNASAESync"""
if "SAE" not in dev[0].get_capability("auth_alg"):
raise HwsimSkip("SAE not supported")
params = hostapd.wpa2_params(ssid="test-sae", passphrase="12345678")
params['wpa_key_mgmt'] = 'SAE'
params['sae_sync'] = '1'
hostapd.add_ap(apdev[0], params)
# TODO: More complete dot11RSNASAESync testing. For now, this is really only
# checking that sae_sync config parameter is accepted.
dev[0].request("SET sae_groups ")
dev[1].request("SET sae_groups ")
id = {}
for i in range(0, 2):
dev[i].scan(freq="2412")
id[i] = dev[i].connect("test-sae", psk="12345678", key_mgmt="SAE",
scan_freq="2412", only_add_network=True)
for i in range(0, 2):
dev[i].select_network(id[i])
for i in range(0, 2):
dev[i].wait_connected(timeout=10)
| [
"j@w1.fi"
] | j@w1.fi |
7ba9897c2ad10cc4193fe28eb298bb750d1d3d92 | e164451209c4f5ff5f433a40f0b784ed4b7bfe25 | /harvesting_2Dscan.py | abc7672bb4d289720e0316be80acb9c7ae45a004 | [] | no_license | longlivedpeople/Galapago-Framework | bd556a71d1baf2c95d36786f24b33fbe2ee2446c | e4966b5f280b8c72cd53eb3215f4ab11e87ed9a0 | refs/heads/master | 2023-08-19T00:19:26.740105 | 2023-08-11T14:57:41 | 2023-08-11T14:59:33 | 196,342,305 | 1 | 0 | null | 2023-04-12T09:01:03 | 2019-07-11T07:26:38 | Python | UTF-8 | Python | false | false | 18,265 | py | import ROOT as r
from ROOT import gROOT, TCanvas, TFile, TGraphErrors, SetOwnership
import math, sys, optparse, array, copy, os
import gc, inspect, __main__
import numpy as np
import time
import shutil
import include.Sample as Sample
import include.Launcher as Launcher
import include.helper as helper
import include.Canvas as Canvas
import include.CutManager as CutManager
#from include.Utils import *
xsecs = {}
xsecs['400'] = 5040. # pb-1
### Define palettes
bkgc = r.TColor.CreateGradientColorTable(2, np.array([0.00, 1.00]),
np.array([1.00, 0.00]),
np.array([1.00, 153/255.]),
np.array([1.00, 153./255.]), 255);
sigc = r.TColor.CreateGradientColorTable(2, np.array([0.00, 1.00]),
np.array([1.00, 204./255.]),
np.array([1.00, 0.00]),
np.array([1.00, 0.00]), 255);
bkgpalette_ = []
sigpalette_ = []
for i in range(0, 255):
bkgpalette_.append(bkgc + i)
sigpalette_.append(sigc + i)
bkgpalette = np.array(bkgpalette_, dtype=np.int32)
sigpalette = np.array(sigpalette_, dtype=np.int32)
#####################
#####
###
### Function to plot histograms with signal simulation and estimated background
### (could accept either data-driven or Monte Carlo)
###
### - Plots are drawn without ratio nor data in signal region (by the moment)
### - xsec to normalize the signal given in fb
###
#####
#####################
def makeBackgroundPlot2D(lumi, hname_bkg, zlog, treeDATA, inputdir, rebin = False, lines = [], xlabel = '', outtag = '', outdir = '', LLlabel = '', extralabel = '', xlog = False, ylog = False):
### Get histograms
luminosity = lumi
hbkg = treeDATA.getLoopTH2F(inputdir, hname_bkg)
hbkg.GetXaxis().SetTitleSize(0.045)
hbkg.GetYaxis().SetTitleSize(0.045)
hbkg.SetMinimum(0.1)
### Define palette
nc = r.TColor.CreateGradientColorTable(2, np.array([0.00, 1.00]),
np.array([1.00, 0.00]),
np.array([1.00, 153/255.]),
np.array([1.00, 153./255.]), 255);
r.gStyle.SetPalette(255, bkgpalette)
### Canvas object
plot = Canvas.Canvas(outtag+hname_bkg, 'png,pdf', 0.35, 0.65, 0.7, 0.89, 1, ww = 610, hh = 600, lsize = 0.028)
plot.addHisto(hbkg, 'COLZ,TEXT', '', '', '', 1, 0)
for line in lines:
plot.addLine(line[0], line[1], line[2], line[3], r.kRed, 2)
### Extralabel
plot.addLatex(0.13, 0.93, 'Background (predicted)', font = 42)
plot.addLatex(0.4, 0.85, extralabel, font = 42, size = 0.03)
### Save it
if not outdir:
outdir = os.path.dirname(os.path.abspath(__main__.__file__)) + '/2DPlots_' + outtag + '/'
plot.save(1, 1, ylog, luminosity, '', outputDir = outdir, zlog = zlog, is2d = False, xlog = xlog)
#####################
#####
###
### Function to plot histograms with signal simulation and estimated background
### (could accept either data-driven or Monte Carlo)
###
### - Plots are drawn without ratio nor data in signal region (by the moment)
### - xsec to normalize the signal given in fb
###
#####
#####################
def makeSignalPlot2D(name, lumi, hname_sig, zlog, treeSI, inputdir, rebin = False, lines = [], legend = '', xlabel = '', outtag = '', outdir = '', LLlabel = '', extralabel = '', xlog = False, ylog = False):
### Get histograms
luminosity = lumi
hsig = treeSI.getLoopTH2F(inputdir, hname_sig)
hsig.GetXaxis().SetTitleSize(0.045)
hsig.GetYaxis().SetTitleSize(0.045)
r.gStyle.SetPalette(r.kBird)
### Canvas object
plot = Canvas.Canvas('SIOnly_'+name, 'png,pdf', 0.35, 0.65, 0.7, 0.89, 1, ww = 610, hh = 600, lsize = 0.028)
plot.addHisto(hsig, 'COLZ', '', '', '', 1, 0)
### Extralabel
plot.addLatex(0.13, 0.93, legend, font = 42, size = 0.032)
plot.addLatex(0.4, 0.85, extralabel, font = 42, size = 0.03)
for line in lines:
plot.addLine(line[0], line[1], line[2], line[3], r.kRed, 2)
### Save it
if not outdir:
outdir = os.path.dirname(os.path.abspath(__main__.__file__)) + '/2DPlots_' + outtag + '/'
plot.save(1, 1, ylog, luminosity, '', outputDir = outdir, zlog = zlog, xlog = xlog, is2d = False)
def countJointYields2D(hname_bkg, hname_sig, treeDATA, treeSI, inputdir, xmins, ymins):
histo_bkg = treeDATA.getLoopTH2F(inputdir, hname_bkg)
histo_sig = treeSI.getLoopTH2F(inputdir, hname_sig)
xbinmax_bkg = histo_bkg.GetNbinsX() + 1
ybinmax_bkg = histo_bkg.GetNbinsY() + 1
xbinmax_sig = histo_sig.GetNbinsX() + 1
ybinmax_sig = histo_sig.GetNbinsY() + 1
xbins_bkg = [histo_bkg.GetXaxis().FindBin(x) for x in xmins]
ybins_bkg = [histo_bkg.GetYaxis().FindBin(y) for y in ymins]
xbins_sig = [histo_sig.GetXaxis().FindBin(x) for x in xmins]
ybins_sig = [histo_sig.GetYaxis().FindBin(y) for y in ymins]
for i in range(0, len(xmins)):
for j in range(0, len(ymins)):
sig_yield = histo_sig.Integral(xbins_sig[i], xbinmax_sig, ybins_sig[j], ybinmax_sig)
bkg_yield = histo_bkg.Integral(xbins_bkg[i], xbinmax_bkg, ybins_bkg[j], ybinmax_bkg)
print(">> For (%f, %f): " % (xmins[i], ymins[j]), " Bkg: %f" % bkg_yield, " Sig: %f" % sig_yield)
def countYields2D(hname, tree, inputdir, xedges, yedges):
histo = tree.getLoopTH2F(inputdir, hname)
# edges to bins
xbinmax = histo.GetNbinsX() + 1
ybinmax = histo.GetNbinsY() + 1
xbins = [histo.GetXaxis().FindBin(x) for x in xedges]
xbins.append(xbinmax)
ybins = [histo.GetYaxis().FindBin(y) for y in yedges]
ybins.append(ybinmax)
for i in range(0, len(xbins)-1):
for j in range(0, len(ybins)-1):
print('Bin in [{0},{1},{2},{3}] : '.format(xbins[i], xbins[i+1], ybins[j], ybins[j+1]), histo.Integral(xbins[i], xbins[i+1], ybins[j], ybins[j+1]))
################################# GLOBAL VARIABLES DEFINITION ####################################
runningfile = os.path.abspath(__file__)
WORKPATH = ''
for level in runningfile.split('/')[:-1]:
WORKPATH += level
WORKPATH += '/'
if __name__ == "__main__":
parser = optparse.OptionParser(usage='usage: %prog [opts] FilenameWithSamples', version='%prog 1.0')
parser.add_option('-i', '--input', action='store', type=str, dest='input', default='', help='Target directory')
(opts, args) = parser.parse_args()
############# Set the TDR plot style
r.gROOT.LoadMacro(WORKPATH + 'include/tdrstyle.C')
r.gROOT.SetBatch(1)
r.setTDRStyle()
r.gStyle.SetPadRightMargin(0.12)
############# Dat file
filename = 'dat/Samples_cern_UltraLegacy.dat'
############# EG data definition
DoubleEG2016 = []
#DoubleEG2016.append('DoubleEG_Run2016B_HIPM')
#DoubleEG2016.append('DoubleEG_Run2016C_HIPM')
#DoubleEG2016.append('DoubleEG_Run2016D_HIPM')
#DoubleEG2016.append('DoubleEG_Run2016E_HIPM')
#DoubleEG2016.append('DoubleEG_Run2016F_HIPM')
#DoubleEG2016.append('DoubleEG_Run2016F_noHIPM')
DoubleEG2016.append('DoubleEG_Run2016G_noHIPM')
DoubleEG2016.append('DoubleEG_Run2016H_noHIPM')
DoubleEG2017 = []
DoubleEG2017.append('DoubleEG_Run2017B')
DoubleEG2017.append('DoubleEG_Run2017C')
DoubleEG2017.append('DoubleEG_Run2017D')
DoubleEG2017.append('DoubleEG_Run2017E')
DoubleEG2017.append('DoubleEG_Run2017F')
EGamma2018 = []
EGamma2018.append('EGamma_Run2018A')
EGamma2018.append('EGamma_Run2018B')
EGamma2018.append('EGamma_Run2018C')
EGamma2018.append('EGamma_Run2018D')
############# Muon data definition
DoubleMuon2016 = []
DoubleMuon2016.append('DoubleMuon_Run2016B_HIPM')
DoubleMuon2016.append('DoubleMuon_Run2016C_HIPM')
DoubleMuon2016.append('DoubleMuon_Run2016D_HIPM')
DoubleMuon2016.append('DoubleMuon_Run2016E_HIPM')
DoubleMuon2016.append('DoubleMuon_Run2016F_HIPM')
DoubleMuon2016.append('DoubleMuon_Run2016F_noHIPM')
DoubleMuon2016.append('DoubleMuon_Run2016G_noHIPM')
DoubleMuon2016.append('DoubleMuon_Run2016H_noHIPM')
DoubleMuon2018 = []
DoubleMuon2018.append('DoubleMuon_Run2018A')
DoubleMuon2018.append('DoubleMuon_Run2018B')
DoubleMuon2018.append('DoubleMuon_Run2018C')
DoubleMuon2018.append('DoubleMuon_Run2018D')
############# Luminosity definition
lumi2016_MM = 35.9 # fb-1
lumi2016_EE = 16.2 # fb-1
lumi2017 = 41.5 # fb-1
lumi2018 = 59.7 # fb-1
############# Galapago Tree definitions
EE_SRIa_lines = []
treeDATA_EG2016 = Sample.Tree( fileName = helper.selectSamples(WORKPATH + filename, DoubleEG2016, 'DATA'), name = 'DATA', isdata = 1 )
treeDATA_EG2017 = Sample.Tree( fileName = helper.selectSamples(WORKPATH + filename, DoubleEG2017, 'DATA'), name = 'DATA', isdata = 1 )
treeDATA_EG2018 = Sample.Tree( fileName = helper.selectSamples(WORKPATH + filename, EGamma2018, 'DATA'), name = 'DATA', isdata = 1 )
treeDATA_Mu2016 = Sample.Tree( fileName = helper.selectSamples(WORKPATH + filename, DoubleMuon2016, 'DATA'), name = 'DATA', isdata = 1 )
treeDATA_Mu2018 = Sample.Tree( fileName = helper.selectSamples(WORKPATH + filename, DoubleMuon2018, 'DATA'), name = 'DATA', isdata = 1 )
########################################################
######## Background optimization (Lxy/dxy bins) ########
########################################################
www = '/eos/user/f/fernance/www/DisplacedLeptons-analysis/SignalRegionOptimization/2DPlots_nLL1_forMassWindows_plusLxy_160223/'
#www = '/eos/user/f/fernance/www/DisplacedLeptons-analysis/SignalRegionOptimization/2DPlots_nLL1_forMassWindows_140223/'
#### -> Electron plots
makeBackgroundPlot2D(lumi = lumi2016_EE, hname_bkg = 'hEEBCRI_mass_trackIxy', zlog = True, treeDATA = treeDATA_EG2016, inputdir = opts.input, lines = [], xlabel = '', outtag = '2016', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = False, xlog = False)
makeBackgroundPlot2D(lumi = lumi2017, hname_bkg = 'hEEBCRI_mass_trackIxy', zlog = True, treeDATA = treeDATA_EG2017, inputdir = opts.input, lines = [], xlabel = '', outtag = '2017', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = False, xlog = False)
makeBackgroundPlot2D(lumi = lumi2018, hname_bkg = 'hEEBCRI_mass_trackIxy', zlog = True, treeDATA = treeDATA_EG2018, inputdir = opts.input, lines = [], xlabel = '', outtag = '2018', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = False, xlog = False)
#### -> Muon plots
makeBackgroundPlot2D(lumi = lumi2016_MM, hname_bkg = 'hMMBCRI_mass_trackIxy', zlog = True, treeDATA = treeDATA_Mu2016, inputdir = opts.input, lines = [], xlabel = '', outtag = '2016', LLlabel = 'MM', extralabel = 'm_{#mu#mu} < 81 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = False, xlog = False)
makeBackgroundPlot2D(lumi = lumi2018, hname_bkg = 'hMMBCRI_mass_trackIxy', zlog = True, treeDATA = treeDATA_Mu2018, inputdir = opts.input, lines = [], xlabel = '', outtag = '2018', LLlabel = 'MM', extralabel = 'm_{#mu#mu} < 81 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = False, xlog = False)
"""
#### -> Electron plots
EE_SRIa_lines = []
EE_SRIa_lines.append([3e-2, 20, 1e2, 20])
EE_SRIa_lines.append([3e-2, 6, 1e2, 6])
EE_SRIa_lines.append([3e-2, 3, 1e2, 3])
EE_SRIa_lines.append([3e-2, 3, 3e-2, 1e4])
makeBackgroundPlot2D(lumi = lumi2016_EE, hname_bkg = 'hEEBCRIa_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_EG2016, inputdir = opts.input, lines = EE_SRIa_lines, xlabel = '', outtag = '2016', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
makeBackgroundPlot2D(lumi = lumi2017, hname_bkg = 'hEEBCRIa_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_EG2017, inputdir = opts.input, lines = EE_SRIa_lines, xlabel = '', outtag = '2017', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
makeBackgroundPlot2D(lumi = lumi2018, hname_bkg = 'hEEBCRIa_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_EG2018, inputdir = opts.input, lines = EE_SRIa_lines, xlabel = '', outtag = '2018', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
EE_SRIb_lines = []
EE_SRIb_lines.append([2e-2, 15, 1e2, 15])
EE_SRIb_lines.append([2e-2, 6, 1e2, 6])
EE_SRIb_lines.append([2e-2, 3, 1e2, 3])
EE_SRIb_lines.append([2e-2, 3, 2e-2, 1e4])
makeBackgroundPlot2D(lumi = lumi2016_EE, hname_bkg = 'hEEBCRIb_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_EG2016, inputdir = opts.input, lines = EE_SRIb_lines, xlabel = '', outtag = '2016', LLlabel = 'EE', extralabel = 'm_{ee} > 101 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
makeBackgroundPlot2D(lumi = lumi2017, hname_bkg = 'hEEBCRIb_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_EG2017, inputdir = opts.input, lines = EE_SRIb_lines, xlabel = '', outtag = '2017', LLlabel = 'EE', extralabel = 'm_{ee} > 101 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
makeBackgroundPlot2D(lumi = lumi2018, hname_bkg = 'hEEBCRIb_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_EG2018, inputdir = opts.input, lines = EE_SRIb_lines, xlabel = '', outtag = '2018', LLlabel = 'EE', extralabel = 'm_{ee} > 101 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
#### -> Muon plots
MM_SRIa_lines = []
MM_SRIa_lines.append([2e-2, 3, 1e2, 3])
MM_SRIa_lines.append([2e-2, 9, 1e2, 9])
MM_SRIa_lines.append([0.2, 3, 0.2, 1e4])
MM_SRIa_lines.append([2e-2, 3, 2e-2, 1e4])
makeBackgroundPlot2D(lumi = lumi2018, hname_bkg = 'hMMBCRIa_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_Mu2018, inputdir = opts.input, lines = MM_SRIa_lines, xlabel = '', outtag = '2018', LLlabel = 'MM', extralabel = 'm_{#mu#mu} < 81 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
makeBackgroundPlot2D(lumi = lumi2016_MM, hname_bkg = 'hMMBCRIa_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_Mu2016, inputdir = opts.input, lines = MM_SRIa_lines, xlabel = '', outtag = '2016', LLlabel = 'MM', extralabel = 'm_{#mu#mu} < 81 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
MM_SRIb_lines = []
MM_SRIb_lines.append([2e-2, 3, 1e2, 3])
MM_SRIb_lines.append([2e-2, 9, 1e2, 9])
MM_SRIb_lines.append([7e-2, 3, 7e-2, 1e4])
MM_SRIb_lines.append([2e-2, 3, 2e-2, 1e4])
makeBackgroundPlot2D(lumi = lumi2018, hname_bkg = 'hMMBCRIb_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_Mu2018, inputdir = opts.input, lines = MM_SRIb_lines, xlabel = '', outtag = '2018', LLlabel = 'MM', extralabel = 'm_{#mu#mu} < 101 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
makeBackgroundPlot2D(lumi = lumi2016_MM, hname_bkg = 'hMMBCRIb_Lxy_trackIxy_log', zlog = True, treeDATA = treeDATA_Mu2016, inputdir = opts.input, lines = MM_SRIb_lines, xlabel = '', outtag = '2016', LLlabel = 'MM', extralabel = 'm_{#mu#mu} > 101 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, ylog = True, xlog = True)
####################################################
######## Signal optimization (Lxy/dxy bins) ########
####################################################
Signals = []
Signals.append('HSS_500_50_1')
Signals.append('HSS_500_50_10')
Signals.append('HSS_500_50_100')
Signals.append('HSS_500_50_1000')
Signals.append('HSS_500_150_1')
Signals.append('HSS_500_150_10')
Signals.append('HSS_500_150_100')
Signals.append('HSS_500_150_1000')
#Signals.append('RPV_350_148_1')
#Signals.append('RPV_350_148_10')
#Signals.append('RPV_350_148_100')
#Signals.append('RPV_350_148_1000')
for signal in Signals:
label = signal + '_2018'
values = signal.split('_')
point = '({0} GeV, {1} GeV, {2} mm)'.format(values[1], values[2], values[3])
llegend = 'H#rightarrowSS ' + point if 'HSS' in signal else 'RPV ' + point
treeSI_2018 = Sample.Tree( fileName = helper.selectSamples(WORKPATH + 'dat/CombSignal_2018UL_Fall22.dat', [label], 'SI'), name = 'SI', isdata = 0 )
makeSignalPlot2D(name = 'hMMSRIa_'+label, lumi = lumi2018, hname_sig = 'hMMSRIa_Lxy_trackIxy_log', zlog = True, treeSI = treeSI_2018, inputdir = opts.input, legend = llegend, lines = MM_SRIa_lines, xlabel = '', outtag = '2018', LLlabel = 'MM', extralabel = 'm_{#mu#mu} < 81 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, xlog = True, ylog = True)
makeSignalPlot2D(name = 'hEESRIa_'+label, lumi = lumi2018, hname_sig = 'hEESRIa_Lxy_trackIxy_log', zlog = True, treeSI = treeSI_2018, inputdir = opts.input, legend = llegend, lines = EE_SRIa_lines, xlabel = '', outtag = '2018', LLlabel = 'EE', extralabel = 'm_{ee} < 81 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, xlog = True, ylog = True)
makeSignalPlot2D(name = 'hMMSRIb_'+label, lumi = lumi2018, hname_sig = 'hMMSRIb_Lxy_trackIxy_log', zlog = True, treeSI = treeSI_2018, inputdir = opts.input, legend = llegend, lines = MM_SRIb_lines, xlabel = '', outtag = '2018', LLlabel = 'MM', extralabel = 'm_{#mu#mu} > 101 GeV, N_{#mu#mu} = 1, |#Delta#Phi| < #pi/4', outdir = www, xlog = True, ylog = True)
makeSignalPlot2D(name = 'hEESRIb_'+label, lumi = lumi2018, hname_sig = 'hEESRIb_Lxy_trackIxy_log', zlog = True, treeSI = treeSI_2018, inputdir = opts.input, legend = llegend, lines = EE_SRIb_lines, xlabel = '', outtag = '2018', LLlabel = 'EE', extralabel = 'm_{ee} > 101 GeV, N_{ee} = 1, |#Delta#Phi| < #pi/4', outdir = www, xlog = True, ylog = True)
"""
| [
"celia.fernandez.madrazo@cern.ch"
] | celia.fernandez.madrazo@cern.ch |
f7afb7aabe368850e011393d0592ecf4deedc093 | b7e1d227d41542bf20f92d08bb0d453058cf6d19 | /orders/migrations/0004_auto_20191013_1723.py | 67a3105db42b46792574796f27d8aa582c0765e5 | [] | no_license | rusrom/django-ecommerce | dfa35bdb2832abf4077dd0883ec0e5e79ffa9662 | aebef77713ab7c1c2118d5c190deee5ccfbd3cb9 | refs/heads/master | 2020-08-04T23:36:09.610480 | 2019-10-22T14:00:04 | 2019-10-22T14:00:04 | 212,315,359 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 909 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2019-10-13 17:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('addresses', '0001_initial'),
('orders', '0003_auto_20191004_1111'),
]
operations = [
migrations.AddField(
model_name='order',
name='billing_address',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='billing_address', to='addresses.Address'),
),
migrations.AddField(
model_name='order',
name='shipping_address',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='shipping_address', to='addresses.Address'),
),
]
| [
"rusrom@guyfawkes.33mail.com"
] | rusrom@guyfawkes.33mail.com |
66998a2b2dbaa169192f4819c93e0ab1ea9968bf | 038e6e41d117431869edad4952a5b1463d5131bc | /users/migrations/0002_auto_20210129_1642.py | db97d69b8e56b712626a94b937e9838ae9e2dd5d | [
"MIT"
] | permissive | MikaelSantilio/aprepi-django | c49290855b7c83ecaf08de82ee9eedf8e8baa15a | 5e2b5ecffb287eab929c0759ea35ab073cc19d96 | refs/heads/master | 2023-06-19T00:18:15.986920 | 2021-06-15T20:15:59 | 2021-06-15T20:15:59 | 329,428,268 | 0 | 1 | MIT | 2021-02-05T16:21:45 | 2021-01-13T20:50:18 | Python | UTF-8 | Python | false | false | 566 | py | # Generated by Django 3.1.5 on 2021-01-29 19:42
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='benefactor',
name='user',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, related_name='benefactor', serialize=False, to=settings.AUTH_USER_MODEL),
),
]
| [
"mikael.santilio@gmail.com"
] | mikael.santilio@gmail.com |
c7baff98c23d9891b7074ee474eaeecbc0782f9c | 52c2ccb6fb55126a65bff2b4b7f653e4b0805759 | /tibiawikisql/__init__.py | 39d6370c269f15c32186c0bb8b11a6b6e8f306b6 | [
"Apache-2.0"
] | permissive | Galarzaa90/tibiawiki-sql | 4907236d518cdc6a53f32645efa3b22517e91f90 | 982be5ebd7905354b6c6a31c4247b2ee21bbe943 | refs/heads/master | 2022-08-09T09:18:46.533611 | 2022-07-23T13:56:07 | 2022-07-23T13:56:07 | 108,594,636 | 22 | 11 | Apache-2.0 | 2022-06-28T16:46:13 | 2017-10-27T20:52:55 | Python | UTF-8 | Python | false | false | 905 | py | # Copyright 2021 Allan Galarza
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API that reads and parses information from `TibiaWiki <https://tibiawiki.fandom.com>`_."""
__author__ = "Allan Galarza"
__copyright__ = "Copyright 2021 Allan Galarza"
__license__ = "Apache 2.0"
__version__ = "6.2.0"
from tibiawikisql import models
from tibiawikisql.api import Article, Image, WikiClient, WikiEntry
| [
"allan.galarza@gmail.com"
] | allan.galarza@gmail.com |
3d5c3143ec6fcbc98bae2f7ffda05e8e64398b8e | 2049bda43e392d5f5981fbfdb70090ba226e4ef8 | /apps/catalogue/migrations/0102_auto__chg_field_product_is_contain_lithium_battery.py | 103f764ace511541fb5b60a8fa1813b13bad19ea | [] | no_license | embedded1/django-package-forwarding | 2ef84a1fde5ba6817d42d89f983512bdc3d77bc3 | 8c3286e9a7da8f4ae0401a81c8037585b3bb7ba6 | refs/heads/master | 2020-06-22T17:05:36.637695 | 2019-07-26T09:34:40 | 2019-07-26T09:34:40 | 197,738,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,921 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Product.is_contain_lithium_battery'
db.alter_column('catalogue_product', 'is_contain_lithium_battery', self.gf('django.db.models.fields.CharField')(max_length=32))
def backwards(self, orm):
# Changing field 'Product.is_contain_lithium_battery'
db.alter_column('catalogue_product', 'is_contain_lithium_battery', self.gf('django.db.models.fields.NullBooleanField')(null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.additionalpackagereceiver': {
'Meta': {'unique_together': "(('package_owner', 'first_name', 'last_name'),)", 'object_name': 'AdditionalPackageReceiver'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'db_index': 'True'}),
'package_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'additional_receivers'", 'to': "orm['auth.User']"}),
'verification_status': ('django.db.models.fields.CharField', [], {'default': "'Unverified'", 'max_length': '128', 'db_index': 'True'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.contributor': {
'Meta': {'object_name': 'Contributor'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.contributorrole': {
'Meta': {'object_name': 'ContributorRole'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'catalogue.customsformitem': {
'Meta': {'object_name': 'CustomsFormItem'},
'customs_form': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'items'", 'to': "orm['catalogue.ProductCustomsForm']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {}),
'value': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.packagelocation': {
'Meta': {'object_name': 'PackageLocation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loc1': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'loc2': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'loc3': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'package': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'location'", 'unique': 'True', 'to': "orm['catalogue.Product']"}),
'warehouse': ('django.db.models.fields.CharField', [], {'default': "'USH'", 'max_length': '64'})
},
'catalogue.packagereceiverdocument': {
'Meta': {'object_name': 'PackageReceiverDocument'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'receiver': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documents'", 'null': 'True', 'to': "orm['catalogue.AdditionalPackageReceiver']"})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'additional_receiver': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'receiver_packages'", 'null': 'True', 'to': "orm['catalogue.AdditionalPackageReceiver']"}),
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'combined_products': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'master'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['catalogue.Product']"}),
'condition': ('django.db.models.fields.CharField', [], {'default': "'Perfect'", 'max_length': '128'}),
'date_consolidated': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_client_id_missing': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'is_contain_lithium_battery': ('django.db.models.fields.CharField', [], {'default': "'No Battery'", 'max_length': '32'}),
'is_contain_prohibited_items': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_sent_outside_usa': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'packages'", 'null': 'True', 'to': "orm['auth.User']"}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'products'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_file': ('django.db.models.fields.files.FileField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productconsolidationrequests': {
'Meta': {'ordering': "['date_created']", 'object_name': 'ProductConsolidationRequests'},
'container': ('django.db.models.fields.CharField', [], {'default': "'One box'", 'max_length': '64'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_packing': ('django.db.models.fields.CharField', [], {'default': "'Keep'", 'max_length': '64'}),
'package': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'consolidation_requests'", 'unique': 'True', 'to': "orm['catalogue.Product']"})
},
'catalogue.productcontributor': {
'Meta': {'object_name': 'ProductContributor'},
'contributor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Contributor']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ContributorRole']", 'null': 'True', 'blank': 'True'})
},
'catalogue.productcustomsform': {
'Meta': {'object_name': 'ProductCustomsForm'},
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'package': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'customs_form'", 'unique': 'True', 'null': 'True', 'to': "orm['catalogue.Product']"})
},
'catalogue.productimage': {
'Meta': {'ordering': "['display_order']", 'unique_together': "(('product', 'display_order'),)", 'object_name': 'ProductImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original': ('django.db.models.fields.files.ImageField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'images'", 'to': "orm['catalogue.Product']"})
},
'catalogue.productpackagingimage': {
'Meta': {'ordering': "['display_order']", 'unique_together': "(('product', 'display_order'),)", 'object_name': 'ProductPackagingImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original': ('django.db.models.fields.files.ImageField', [], {'max_length': '255'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packaging_images'", 'to': "orm['catalogue.Product']"})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productspecialrequests': {
'Meta': {'object_name': 'ProductSpecialRequests'},
'custom_requests_details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'custom_requests_done': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'express_checkout_done': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'extra_protection_done': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'filling_customs_declaration_done': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_custom_requests': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'is_express_checkout': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_extra_protection': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_filling_customs_declaration': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_photos': ('django.db.models.fields.CharField', [], {'default': "'Zero'", 'max_length': '16'}),
'is_remove_invoice': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_repackaging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'package': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'special_requests'", 'unique': 'True', 'null': 'True', 'to': "orm['catalogue.Product']"}),
'photos_done': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'remove_invoice_done': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'repackaging_done': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['catalogue'] | [
"asili@usendhome.com"
] | asili@usendhome.com |
6ab61b38006dc25fb6f347f5d1870d9bd6cbebf7 | cb46ad4fedaf1dd6fad71c3ec3766604578fb340 | /tests.py | 22eb08230b9927699a2f32fd13466bbc6eb70b2e | [] | no_license | whitneybelba/Flask-Testing | 2ae14ef04988ce3b1ee8f65bbdd20ff4a0c4a8c1 | 2e2e5be334f1d7a9331f63fd9a9d33aabf1b10db | refs/heads/master | 2021-01-20T18:24:27.019968 | 2016-08-05T20:02:18 | 2016-08-05T20:02:18 | 65,045,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,776 | py | import unittest
from party import app
from model import db, example_data, connect_to_db
class PartyTests(unittest.TestCase):
"""Tests for my party site."""
def setUp(self):
self.client = app.test_client()
app.config['TESTING'] = True
def test_homepage(self):
result = self.client.get("/")
self.assertIn("board games, rainbows, and ice cream sundaes", result.data)
def test_no_rsvp_yet(self):
# FIXME: Add a test to show we see the RSVP form, but NOT the
# party details
result = self.client.get("/")
self.assertNotIn("Party Details", result.data)
def test_rsvp(self):
result = self.client.post("/rsvp",
data={"name": "Jane",
"email": "jane@jane.com"},
follow_redirects=True)
self.assertIn("Party Details", result.data)
class PartyTestsDatabase(unittest.TestCase):
"""Flask tests that use the database."""
def setUp(self):
"""Stuff to do before every test."""
self.client = app.test_client()
app.config['TESTING'] = True
# Connect to test database (uncomment when testing database)
# connect_to_db(app, "postgresql:///testdb")
# Create tables and add sample data (uncomment when testing database)
# db.create_all()
# example_data()
def tearDown(self):
"""Do at end of every test."""
# (uncomment when testing database)
# db.session.close()
# db.drop_all()
def test_games(self):
#FIXME: test that the games page displays the game from example_data()
print "FIXME"
if __name__ == "__main__":
unittest.main()
| [
"info@hackbrightacademy.com"
] | info@hackbrightacademy.com |
f45604ccd387ede76cedb96bfd1ca2278d507cf0 | 534d24cd509640dc598b0e385752d81ea59307a7 | /vueSuit/vueAdmin/widgets.py | 0068b50474a5f839750b7c5425778fb5ebc44430 | [
"BSD-3-Clause"
] | permissive | Ryuchen/django-vue-suit | 612cb7496cf9424182bf31655432df86d77df417 | 28c08a157cd243b475673ca7486aedb1719759ea | refs/heads/master | 2020-07-04T05:11:45.888119 | 2019-12-04T10:35:45 | 2019-12-04T10:35:45 | 202,167,517 | 24 | 3 | null | null | null | null | UTF-8 | Python | false | false | 17,183 | py | """
Form Widget classes specific to the Django admin site.
"""
import copy
import json
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.db.models.deletion import CASCADE
from django.urls import reverse
from django.urls.exceptions import NoReverseMatch
from django.utils.html import smart_urlquote
from django.utils.safestring import mark_safe
from django.utils.text import Truncator
from django.utils.translation import get_language, gettext as _
class FilteredSelectMultiple(forms.SelectMultiple):
"""
A SelectMultiple with a JavaScript filter interface.
Note that the resulting JavaScript assumes that the jsi18n
catalog has been loaded in the page
"""
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
js = [
'vendor/jquery/jquery%s.js' % extra,
'jquery.init.js',
'core.js',
'SelectBox.js',
'SelectFilter2.js',
]
return forms.Media(js=["admin/js/%s" % path for path in js])
def __init__(self, verbose_name, is_stacked, attrs=None, choices=()):
self.verbose_name = verbose_name
self.is_stacked = is_stacked
super().__init__(attrs, choices)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['attrs']['class'] = 'selectfilter'
if self.is_stacked:
context['widget']['attrs']['class'] += 'stacked'
context['widget']['attrs']['data-field-name'] = self.verbose_name
context['widget']['attrs']['data-is-stacked'] = int(self.is_stacked)
return context
class AdminDateWidget(forms.DateInput):
class Media:
js = [
'admin/js/calendar.js',
'admin/js/admin/DateTimeShortcuts.js',
]
def __init__(self, attrs=None, format=None):
attrs = {'class': 'vDateField', 'size': '10', **(attrs or {})}
super().__init__(attrs=attrs, format=format)
class AdminTimeWidget(forms.TimeInput):
class Media:
js = [
'admin/js/calendar.js',
'admin/js/admin/DateTimeShortcuts.js',
]
def __init__(self, attrs=None, format=None):
attrs = {'class': 'vTimeField', 'size': '8', **(attrs or {})}
super().__init__(attrs=attrs, format=format)
class AdminSplitDateTime(forms.SplitDateTimeWidget):
"""
A SplitDateTime Widget that has some admin-specific styling.
"""
template_name = 'admin/widgets/split_datetime.html'
def __init__(self, attrs=None):
widgets = [AdminDateWidget, AdminTimeWidget]
# Note that we're calling MultiWidget, not SplitDateTimeWidget, because
# we want to define widgets.
forms.MultiWidget.__init__(self, widgets, attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['date_label'] = _('Date:')
context['time_label'] = _('Time:')
return context
class AdminRadioSelect(forms.RadioSelect):
template_name = 'admin/widgets/radio.html'
class AdminFileWidget(forms.ClearableFileInput):
template_name = 'admin/widgets/clearable_file_input.html'
def url_params_from_lookup_dict(lookups):
"""
Convert the type of lookups specified in a ForeignKey limit_choices_to
attribute to a dictionary of query parameters
"""
params = {}
if lookups and hasattr(lookups, 'items'):
for k, v in lookups.items():
if callable(v):
v = v()
if isinstance(v, (tuple, list)):
v = ','.join(str(x) for x in v)
elif isinstance(v, bool):
v = ('0', '1')[v]
else:
v = str(v)
params[k] = v
return params
class ForeignKeyRawIdWidget(forms.TextInput):
"""
A Widget for displaying ForeignKeys in the "raw_id" interface rather than
in a <select> box.
"""
template_name = 'admin/widgets/foreign_key_raw_id.html'
def __init__(self, rel, admin_site, attrs=None, using=None):
self.rel = rel
self.admin_site = admin_site
self.db = using
super().__init__(attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
rel_to = self.rel.model
if rel_to in self.admin_site._registry:
# The related object is registered with the same AdminSite
related_url = reverse(
'admin:%s_%s_changelist' % (
rel_to._meta.app_label,
rel_to._meta.model_name,
),
current_app=self.admin_site.name,
)
params = self.url_parameters()
if params:
related_url += '?' + '&'.join('%s=%s' % (k, v) for k, v in params.items())
context['related_url'] = mark_safe(related_url)
context['link_title'] = _('Lookup')
# The JavaScript code looks for this class.
context['widget']['attrs'].setdefault('class', 'vForeignKeyRawIdAdminField')
else:
context['related_url'] = None
if context['widget']['value']:
context['link_label'], context['link_url'] = self.label_and_url_for_value(value)
else:
context['link_label'] = None
return context
def base_url_parameters(self):
limit_choices_to = self.rel.limit_choices_to
if callable(limit_choices_to):
limit_choices_to = limit_choices_to()
return url_params_from_lookup_dict(limit_choices_to)
def url_parameters(self):
from .views.main import TO_FIELD_VAR
params = self.base_url_parameters()
params.update({TO_FIELD_VAR: self.rel.get_related_field().name})
return params
def label_and_url_for_value(self, value):
key = self.rel.get_related_field().name
try:
obj = self.rel.model._default_manager.using(self.db).get(**{key: value})
except (ValueError, self.rel.model.DoesNotExist, ValidationError):
return '', ''
try:
url = reverse(
'%s:%s_%s_change' % (
self.admin_site.name,
obj._meta.app_label,
obj._meta.object_name.lower(),
),
args=(obj.pk,)
)
except NoReverseMatch:
url = '' # Admin not registered for target model.
return Truncator(obj).words(14), url
class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
"""
A Widget for displaying ManyToMany ids in the "raw_id" interface rather than
in a <select multiple> box.
"""
template_name = 'admin/widgets/many_to_many_raw_id.html'
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
if self.rel.model in self.admin_site._registry:
# The related object is registered with the same AdminSite
context['widget']['attrs']['class'] = 'vManyToManyRawIdAdminField'
return context
def url_parameters(self):
return self.base_url_parameters()
def label_and_url_for_value(self, value):
return '', ''
def value_from_datadict(self, data, files, name):
value = data.get(name)
if value:
return value.split(',')
def format_value(self, value):
return ','.join(str(v) for v in value) if value else ''
class RelatedFieldWidgetWrapper(forms.Widget):
"""
This class is a wrapper to a given widget to add the add icon for the
admin interface.
"""
template_name = 'admin/widgets/related_widget_wrapper.html'
def __init__(self, widget, rel, admin_site, can_add_related=None,
can_change_related=False, can_delete_related=False,
can_view_related=False):
self.needs_multipart_form = widget.needs_multipart_form
self.attrs = widget.attrs
self.choices = widget.choices
self.widget = widget
self.rel = rel
# Backwards compatible check for whether a user can add related
# objects.
if can_add_related is None:
can_add_related = rel.model in admin_site._registry
self.can_add_related = can_add_related
# XXX: The UX does not support multiple selected values.
multiple = getattr(widget, 'allow_multiple_selected', False)
self.can_change_related = not multiple and can_change_related
# XXX: The deletion UX can be confusing when dealing with cascading deletion.
cascade = getattr(rel, 'on_delete', None) is CASCADE
self.can_delete_related = not multiple and not cascade and can_delete_related
self.can_view_related = not multiple and can_view_related
# so we can check if the related object is registered with this AdminSite
self.admin_site = admin_site
def __deepcopy__(self, memo):
obj = copy.copy(self)
obj.widget = copy.deepcopy(self.widget, memo)
obj.attrs = self.widget.attrs
memo[id(self)] = obj
return obj
@property
def is_hidden(self):
return self.widget.is_hidden
@property
def media(self):
return self.widget.media
def get_related_url(self, info, action, *args):
return reverse("admin:%s_%s_%s" % (info + (action,)),
current_app=self.admin_site.name, args=args)
def get_context(self, name, value, attrs):
from .views.main import IS_POPUP_VAR, TO_FIELD_VAR
rel_opts = self.rel.model._meta
info = (rel_opts.app_label, rel_opts.model_name)
self.widget.choices = self.choices
url_params = '&'.join("%s=%s" % param for param in [
(TO_FIELD_VAR, self.rel.get_related_field().name),
(IS_POPUP_VAR, 1),
])
context = {
'rendered_widget': self.widget.render(name, value, attrs),
'is_hidden': self.is_hidden,
'name': name,
'url_params': url_params,
'model': rel_opts.verbose_name,
'can_add_related': self.can_add_related,
'can_change_related': self.can_change_related,
'can_delete_related': self.can_delete_related,
'can_view_related': self.can_view_related,
}
if self.can_add_related:
context['add_related_url'] = self.get_related_url(info, 'add')
if self.can_delete_related:
context['delete_related_template_url'] = self.get_related_url(info, 'delete', '__fk__')
if self.can_view_related or self.can_change_related:
context['change_related_template_url'] = self.get_related_url(info, 'change', '__fk__')
return context
def value_from_datadict(self, data, files, name):
return self.widget.value_from_datadict(data, files, name)
def value_omitted_from_data(self, data, files, name):
return self.widget.value_omitted_from_data(data, files, name)
def id_for_label(self, id_):
return self.widget.id_for_label(id_)
class AdminTextareaWidget(forms.Textarea):
def __init__(self, attrs=None):
super().__init__(attrs={'class': 'vLargeTextField', **(attrs or {})})
class AdminTextInputWidget(forms.TextInput):
def __init__(self, attrs=None):
super().__init__(attrs={'class': 'vTextField', **(attrs or {})})
class AdminEmailInputWidget(forms.EmailInput):
def __init__(self, attrs=None):
super().__init__(attrs={'class': 'vTextField', **(attrs or {})})
class AdminURLFieldWidget(forms.URLInput):
template_name = 'admin/widgets/url.html'
def __init__(self, attrs=None, validator_class=URLValidator):
super().__init__(attrs={'class': 'vURLField', **(attrs or {})})
self.validator = validator_class()
def get_context(self, name, value, attrs):
try:
self.validator(value if value else '')
url_valid = True
except ValidationError:
url_valid = False
context = super().get_context(name, value, attrs)
context['current_label'] = _('Currently:')
context['change_label'] = _('Change:')
context['widget']['href'] = smart_urlquote(context['widget']['value']) if value else ''
context['url_valid'] = url_valid
return context
class AdminIntegerFieldWidget(forms.NumberInput):
class_name = 'vIntegerField'
def __init__(self, attrs=None):
super().__init__(attrs={'class': self.class_name, **(attrs or {})})
class AdminBigIntegerFieldWidget(AdminIntegerFieldWidget):
class_name = 'vBigIntegerField'
class AdminUUIDInputWidget(forms.TextInput):
def __init__(self, attrs=None):
super().__init__(attrs={'class': 'vUUIDField', **(attrs or {})})
# Mapping of lowercase language codes [returned by Django's get_language()] to
# language codes supported by select2.
# See django/contrib/admin/static/admin/js/vendor/select2/i18n/*
SELECT2_TRANSLATIONS = {x.lower(): x for x in [
'ar', 'az', 'bg', 'ca', 'cs', 'da', 'de', 'el', 'en', 'es', 'et',
'eu', 'fa', 'fi', 'fr', 'gl', 'he', 'hi', 'hr', 'hu', 'id', 'is',
'it', 'ja', 'km', 'ko', 'lt', 'lv', 'mk', 'ms', 'nb', 'nl', 'pl',
'pt-BR', 'pt', 'ro', 'ru', 'sk', 'sr-Cyrl', 'sr', 'sv', 'th',
'tr', 'uk', 'vi',
]}
SELECT2_TRANSLATIONS.update({'zh-hans': 'zh-CN', 'zh-hant': 'zh-TW'})
class AutocompleteMixin:
"""
Select widget mixin that loads options from AutocompleteJsonView via AJAX.
Renders the necessary data attributes for select2 and adds the static form
media.
"""
url_name = '%s:%s_%s_autocomplete'
def __init__(self, rel, admin_site, attrs=None, choices=(), using=None):
self.rel = rel
self.admin_site = admin_site
self.db = using
self.choices = choices
self.attrs = {} if attrs is None else attrs.copy()
def get_url(self):
model = self.rel.model
return reverse(self.url_name % (self.admin_site.name, model._meta.app_label, model._meta.model_name))
def build_attrs(self, base_attrs, extra_attrs=None):
"""
Set select2's AJAX attributes.
Attributes can be set using the html5 data attribute.
Nested attributes require a double dash as per
https://select2.org/configuration/data-attributes#nested-subkey-options
"""
attrs = super().build_attrs(base_attrs, extra_attrs=extra_attrs)
attrs.setdefault('class', '')
attrs.update({
'data-ajax--cache': 'true',
'data-ajax--type': 'GET',
'data-ajax--url': self.get_url(),
'data-theme': 'admin-autocomplete',
'data-allow-clear': json.dumps(not self.is_required),
'data-placeholder': '', # Allows clearing of the input.
'class': attrs['class'] + (' ' if attrs['class'] else '') + 'admin-autocomplete',
})
return attrs
def optgroups(self, name, value, attr=None):
"""Return selected options based on the ModelChoiceIterator."""
default = (None, [], 0)
groups = [default]
has_selected = False
selected_choices = {
str(v) for v in value
if str(v) not in self.choices.field.empty_values
}
if not self.is_required and not self.allow_multiple_selected:
default[1].append(self.create_option(name, '', '', False, 0))
choices = (
(obj.pk, self.choices.field.label_from_instance(obj))
for obj in self.choices.queryset.using(self.db).filter(pk__in=selected_choices)
)
for option_value, option_label in choices:
selected = (
str(option_value) in value and
(has_selected is False or self.allow_multiple_selected)
)
has_selected |= selected
index = len(default[1])
subgroup = default[1]
subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index))
return groups
@property
def media(self):
extra = '' if settings.DEBUG else '.min'
i18n_name = SELECT2_TRANSLATIONS.get(get_language())
i18n_file = ('admin/js/vendor/select2/i18n/%s.js' % i18n_name,) if i18n_name else ()
return forms.Media(
js=(
'admin/js/vendor/jquery/jquery%s.js' % extra,
'admin/js/vendor/select2/select2.full%s.js' % extra,
) + i18n_file + (
'admin/js/jquery.init.js',
'admin/js/autocomplete.js',
),
css={
'screen': (
'admin/css/vendor/select2/select2%s.css' % extra,
'admin/css/autocomplete.css',
),
},
)
class AutocompleteSelect(AutocompleteMixin, forms.Select):
pass
class AutocompleteSelectMultiple(AutocompleteMixin, forms.SelectMultiple):
pass
| [
"chenhao@geyecloud.com"
] | chenhao@geyecloud.com |
bbf1ede6f241dd795910462f1b9f83ffd97a1947 | d2189145e7be2c836017bea0d09a473bf1bc5a63 | /20 Clases CBLUE (RVT)/Identificador.py | e9dd400cd7a6736317231a0f4c9e5294b27ecb62 | [] | no_license | emilianoNM/Tecnicas3 | 12d10ce8d78803c8d2cd6a721786a68f7ee2809d | 6ad7f0427ab9e23643a28ac16889bca8791421d0 | refs/heads/master | 2020-03-25T18:06:34.126165 | 2018-11-24T04:42:14 | 2018-11-24T04:42:14 | 144,013,045 | 3 | 5 | null | 2018-09-14T10:47:26 | 2018-08-08T12:49:57 | Python | UTF-8 | Python | false | false | 249 | py |
# coding: utf-8
# In[ ]:
#20 Clases Empresa
#Por Cblue (RVT)
class Identificador(object):
def__init__(self,LogotipoEMP,ColorCaracteristico)
self.LogotipoEMP=LogotipoEMP
self.ColorCaracteristico=ColorCaracteristico
| [
"noreply@github.com"
] | emilianoNM.noreply@github.com |
afb52d927ca3b977196758f7985e91c241b07721 | 2e74c7339c63385172629eaa84680a85a4731ee9 | /sdg/data_prep/compileNTDs.py | 26c89bfdf2636badf5e8fa2d616af8f4faa2ea46 | [] | no_license | zhusui/ihme-modeling | 04545182d0359adacd22984cb11c584c86e889c2 | dfd2fe2a23bd4a0799b49881cb9785f5c0512db3 | refs/heads/master | 2021-01-20T12:30:52.254363 | 2016-10-11T00:33:36 | 2016-10-11T00:33:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,898 | py | import pandas as pd
import sys
from scipy.stats import gmean
from getpass import getuser
sys.path.append(SDG_REPO)
import sdg_utils.draw_files as dw
INDICATOR_ID_COLS = ['location_id', 'year_id']
def compile_df_using_dict(input_file_dict):
"""Compile dataframe together using dictionary indicator_id-->file"""
dfs = []
for indicator_id in input_file_dict.keys():
print indicator_id
df = pd.read_hdf(input_file_dict[indicator_id])
df = df[INDICATOR_ID_COLS + dw.DRAW_COLS]
dfs.append(df)
df = pd.concat(dfs, ignore_index=True)
return df
def get_child_indicators(indic_table, parent_stamp):
"""Use indicator table filepaths to pull ntd data"""
tbl_rows = indic_table.ix[
(indic_table['indicator_stamp'].str.startswith(parent_stamp)) & \
(indic_table['indicator_level']==3)
]
input_file_dict = tbl_rows[
['indicator_id', 'clean_input_data_file']
].set_index('indicator_id').to_dict()['clean_input_data_file']
df = compile_df_using_dict(input_file_dict)
return df
def compile_sum(indic_table, parent_stamp,
assert_0_1=False):
"""Sum together the children of the given parent.
Optionally assert that values are between 0 and 1.
"""
df = get_child_indicators(indic_table, parent_stamp)
df = df.groupby(INDICATOR_ID_COLS)[dw.DRAW_COLS].sum()
if assert_0_1:
assert df.applymap(lambda x: x>0 and x<1).values.all(), \
'sum produced rates outside of realistic bounds'
df = df.reset_index()
return df
def compile_ncds(indic_table):
"""Compile together aggregate indicators for NTDs and NCDs"""
print 'NCDS'
ncds = compile_sum(indic_table, 'i_341', assert_0_1=True)
out_path = "/ihme/scratch/projects/sdg/input_data/dalynator/{}/ncds.h5".format(dw.DALY_VERS)
ncds.to_hdf(out_path, key="data", format="table",
data_columns=['location_id', 'year_id'])
def compile_ntds(indic_table):
print 'NTDs'
ntds = compile_sum(indic_table, 'i_335', assert_0_1=False)
# cant assert that prevalence is below 1 because it might be above
assert (ntds[dw.DRAW_COLS] > 0).values.all(), 'values below 0 in ntds'
out_path = "/ihme/scratch/projects/sdg/input_data/como_prev/{}/ntds.h5".format(dw.COMO_VERS)
ntds.to_hdf(out_path, key="data", format="table",
data_columns=['location_id', 'year_id'])
def compile_tb(indic_table):
print 'TB'
tb = compile_sum(indic_table, 'i_332')
out_path = "/ihme/scratch/projects/sdg/input_data/como_inc/{}/tb.h5".format(dw.COMO_VERS)
tb.to_hdf(out_path, key="data", format="table",
data_columns=['location_id', 'year_id'])
indic_table = pd.read_csv(
"/home/j/WORK/10_gbd/04_journals/"
"gbd2015_capstone_lancet_SDG/02_inputs/indicator_ids.csv"
)
compile_ntds(indic_table)
compile_tb(indic_table) | [
"nsidles@uw.edu"
] | nsidles@uw.edu |
c187f26acf02adbbc468f5bf96a6b6af692a8d02 | a1c6caa9ff52d4377529c7727eb5517041b04311 | /Exe16_Loja_de_tinta.py | adb1f3b81505e3857f7a47487058062e967cf7f7 | [
"MIT"
] | permissive | lucaslk122/Exercicios-em-python | b83b5abd5da7a7cf15ac50a213bad708501c8863 | 2daa92dcb19296f580d673376af375d11c9c041b | refs/heads/main | 2022-12-26T04:22:40.365925 | 2020-10-09T18:05:31 | 2020-10-09T18:05:31 | 302,362,419 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | import math
print("1L de tinta pinta 3 metros quadrados, cada galão possui 18L e custam R$80.00 cada")
Area = float(input("Digite o tamanho da area em metros quadrados a ser pintada: "))
Litros = float(Area/3)
Latas = float(Litros/18)
Preço = float(round(Latas,0)*80)
print(f"Voce vai precisar de {math.ceil(Latas)} lata de tinta para pintar {Area}m^2, portanto, pagará R${round(Preço,2)}")
| [
"71664028+lucaslk122@users.noreply.github.com"
] | 71664028+lucaslk122@users.noreply.github.com |
6508bfa49a20c5e33ddd9d77cb81eb6289502300 | c8a131d6f9d58f54a8736cae939529741b3858f8 | /mysite/blog/models.py | 8685f23c28416b661749aa9d952e46b42f882ddd | [] | no_license | aliensmart/simple_web | 0e5d7fd1205bfed3145cce45dbcf374a9e63f7bc | 9b948de933ff5304f92ca80d5c556507f3000095 | refs/heads/master | 2022-06-28T17:28:33.325312 | 2020-05-10T13:28:47 | 2020-05-10T13:28:47 | 261,683,101 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,278 | py | from django.db import models
from django.utils import timezone
from django.urls import reverse
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User', on_delete=models.CASCADE)
title = models.CharField(max_length=200)
text = models.TextField()
create_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now
self.save()
def approve_comments(self):
return self.comments.filter(approve_comment=True)
def get_absolute_url(self):
return reverse("post_detail", kwargs={'pk':self.pk})
def __str__(self):
return self.title
class Comment(models.Model):
port = models.ForeignKey('blog.Post', related_name='comments', on_delete=models.CASCADE)
author = models.CharField(max_length=200)
text = models.TextField()
create_date = models.DateTimeField(default=timezone.now)
approved_comment = models.BooleanField(default = False)
def approve(self):
self.approved_comment = True
self.save()
def get_absolute_url(self):
return reverse('post_list')
def __str__(self):
return self.text | [
"kaoua17@gmail.com"
] | kaoua17@gmail.com |
e3282eb1cfcf9fe7aac2622513c0d4ec3a83ef22 | ff23e5c890216a1a63278ecb40cd7ac79ab7a4cd | /clients/hydra/python/test/test_is_ready200_response.py | 703e166a1c68f6cddf8085ea803a197d52a0a45b | [
"Apache-2.0"
] | permissive | ory/sdk | fcc212166a92de9d27b2dc8ff587dcd6919e53a0 | 7184e13464948d68964f9b605834e56e402ec78a | refs/heads/master | 2023-09-01T10:04:39.547228 | 2023-08-31T08:46:23 | 2023-08-31T08:46:23 | 230,928,630 | 130 | 85 | Apache-2.0 | 2023-08-14T11:09:31 | 2019-12-30T14:21:17 | C# | UTF-8 | Python | false | false | 785 | py | """
Ory Hydra API
Documentation for all of Ory Hydra's APIs. # noqa: E501
The version of the OpenAPI document: v2.2.0-rc.3
Contact: hi@ory.sh
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import ory_hydra_client
from ory_hydra_client.model.is_ready200_response import IsReady200Response
class TestIsReady200Response(unittest.TestCase):
"""IsReady200Response unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testIsReady200Response(self):
"""Test IsReady200Response"""
# FIXME: construct object with mandatory attributes with example values
# model = IsReady200Response() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"3372410+aeneasr@users.noreply.github.com"
] | 3372410+aeneasr@users.noreply.github.com |
11cbf11fd837f408935a4f30b359536ebafaf26d | d7249238cf7c42ec7ee4dd8dfa7c1ba80b0d93d6 | /01_sklearn/02_weight_height_test.py | 9d612682a720ea8065bdf9a44692f105330ce222 | [] | no_license | happyquokkka/TIL_AI | eb47e9e419d7344e8a5fac27dc00ccaf16962368 | 4d13716867dfd9d938f866f769a74ea03b93c88a | refs/heads/master | 2023-07-18T14:24:45.290908 | 2021-08-27T13:34:44 | 2021-08-27T13:34:44 | 399,787,222 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,053 | py | import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import matplotlib.pyplot as plt
# pip install pandas
# 공공데이터포털 -> '교육부 학생건강검사 결과분석' rawdata 서울 2015 다운로드 -> weight_height 로 파일명 변경
# 문제상황 : 여학생의 몸무게를 입력하면, 키를 예측하고 싶다.
# 데이터 준비
pd.set_option("display.width", 300)
pd.set_option("display.max_rows",1000)
pd.set_option("display.max_columns",30)
df = pd.read_csv("weight_height.csv", encoding="euc-kr")
df = df[['학교명', '학년', '성별', '키', '몸무게']]
df.dropna(inplace=True)
# 학년 표기 변경 : 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12 로 변경
df['grade'] = list(map(lambda x:0 if x[-4:] == "초등학교" else (6 if x[-3:] == "중학교" else 9), df["학교명"])) + df["학년"]
# 얘는 왜 list로 변환하는거지? df["학년"] list와 더하려고?
df.drop(["학교명", "학년"], axis="columns", inplace=True)
# 컬럼명 변경
df.columns = ["gender", "height", "weight", "grade"]
# df['gender'] 의 값을, 남 -> 0, 여 -> 1로 변환
df['gender'] = df['gender'].map(lambda x:0 if x=="남" else 1)
# print(df)
# 여자인 df만 분리
is_girl = df["gender"] == 1
girl_df = df[is_girl]
# print(girl_df)
# 데이터 분할
X = girl_df['weight']
y = girl_df['height']
# train / test set 분리
train_X, test_X, train_y, test_y = train_test_split(X, y, test_size=0.3, random_state=1)
# 2차원 데이터로 가공
train_X = train_X.values.reshape(-1, 1)
test_X = test_X.values.reshape(-1, 1)
# 모델 준비 (선형회귀분석모델)
linear = LinearRegression()
# 학습
linear.fit(train_X, train_y)
# 예측 및 평가
predict = linear.predict(test_X)
# 그래프 그리기
plt.plot(test_X, test_y, "b.")
plt.plot(test_X, predict, "r.")
plt.xlim(10, 140)
plt.ylim(100, 220)
plt.grid()
# plt.show()
# 몸무게가 50kg인 사람의 키는?
pred_grd = linear.predict([[60]])
print("여학생 키 예측 :", pred_grd) | [
"yjmooon96@gmail.com"
] | yjmooon96@gmail.com |
cc5830bd8d5626e4aad789d1ad95107800ba6894 | a177699f24936458b9fe9eb73d9af51668601d20 | /src/zojax/subscription/subscription.py | 0c39a99d0d13a4ec928e717e197664cdebc47436 | [
"ZPL-2.1"
] | permissive | Zojax/zojax.subscription | 868dfbcc07f0abcc3f0e92ebf8051de0a544bfba | f72fa6cf7ad885519d4da23dc5cd5b6b1ae0d92c | refs/heads/master | 2020-05-17T03:36:52.743843 | 2014-01-31T11:06:15 | 2014-01-31T11:06:15 | 2,018,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,665 | py | ##############################################################################
#
# Copyright (c) 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
$Id$
"""
from persistent import Persistent
from zope import interface
from zope.component import getUtility, queryUtility
from zope.security.management import queryInteraction
from zope.app.intid.interfaces import IIntIds
from interfaces import ISubscription, ISubscriptionDescription
class Subscription(Persistent):
interface.implements(ISubscription)
id = None
oid = 0
principal = u''
type = u''
def __init__(self, principal=None, **kw):
if principal is None:
interaction = queryInteraction()
if interaction is not None:
for participation in interaction.participations:
principal = participation.principal.id
self.principal = principal
for attr, value in kw.items():
setattr(self, attr, value)
@property
def object(self):
return getUtility(IIntIds).queryObject(self.oid)
@property
def description(self):
return getUtility(ISubscriptionDescription, self.type)
| [
"andrey.fedoseev@gmail.com"
] | andrey.fedoseev@gmail.com |
34fc4c1264be99808dc65ed55068f734bfeeeb41 | 00472c7df0835c6c1870f4c92fcca3922b74727e | /2장.py | 4658bc93bb69d81ac7f918b85661035f6c3f6a42 | [] | no_license | ljg95924/Machine_Learning_Study | cd46374aab46ca67909f4fc7c3c7b488beb5480f | a31db06bed8dc95530f094a98de2b9bfa3714fd3 | refs/heads/master | 2021-04-15T18:22:19.684733 | 2018-03-28T12:11:01 | 2018-03-28T12:11:01 | 126,568,461 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,226 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Mar 24 15:03:07 2018
@author: ljg
"""
import mglearn
import matplotlib.pyplot as plt
import numpy as np
X,y=mglearn.datasets.make_forge()
mglearn.discrete_scatter(X[:,0],X[:,1],y)
plt.legend(['Class 0','Class 1'],loc=4)
plt.xlabel('First feature')
plt.ylabel('Second feature')
print('X.shape: {}'.format(X.shape))
X,y=mglearn.datasets.make_wave(n_samples=40)
plt.plot(X,y,'o')
plt.ylim(-3,3)
plt.xlabel('feature')
plt.ylabel('Target')
from sklearn.datasets import load_breast_cancer
cancer=load_breast_cancer()
print('cancer.keys():\n{}'.format(cancer.keys()))
print('Shape of cancer data: {}'.format(cancer.data.shape))
print('Sample counts per class:\n{}'.format({n: v for n,v in zip(cancer.target_names,np.bincount(cancer.target))}))
#cancer.target_names
#np.bincount(cancer.target)
#cancer.target
print('Feature names:\n{}'.format(cancer.feature_names))
from sklearn.datasets import load_boston
boston=load_boston()
print('Data shape: {}'.format(boston.data.shape))
X,y = mglearn.datasets.load_extended_boston()
print('X.shape: {}'.format(X.shape))
mglearn.plots.plot_knn_classification(n_neighbors=1)
mglearn.plots.plot_knn_classification(n_neighbors=3)
from sklearn.model_selection import train_test_split
X,y =mglearn.datasets.make_forge()
X_train,X_test,y_train,y_test=train_test_split(X,y,random_state=0)
from sklearn.neighbors import KNeighborsClassifier
clf=KNeighborsClassifier(n_neighbors=3)
clf.fit(X_train,y_train)
print('Test set predictions:{}'.format(clf.predict(X_test)))
print('Test set accuracy: {:.2f}'.format(clf.score(X_test,y_test)))
fig,axes=plt.subplots(1,3,figsize=(10,3))
for n_neighbors,ax in zip([1,3,9],axes):
clf = KNeighborsClassifier(n_neighbors=n_neighbors).fit(X,y)
mglearn.plots.plot_2d_separator(clf,X,fill=True,eps=0.5,ax=ax,alpha=.4)
mglearn.discrete_scatter(X[:,0],X[:,1],y,ax=ax)
ax.set_title('{} neighbors(s)'.format(n_neighbors))
ax.set_xlabel('feature 0')
ax.set_ylabel('feature 1')
axes[0].legend(loc=3)
#In[18]:
from sklearn.datasets import load_breast_cancer
cancer = load_breast_cancer()
X_train,X_test,y_train,y_test=train_test_split(
cancer.data,cancer.target,stratify=cancer.target,random_state=66)
training_accuracy=[]
test_accuracy=[]
neighbors_settings=range(1,11)
for n_neighbors in neighbors_settings:
clf=KNeighborsClassifier(n_neighbors=n_neighbors)
clf.fit(X_train,y_train)
training_accuracy.append(clf.score(X_train,y_train))
test_accuracy.append(clf.score(X_test,y_test))
plt.plot(neighbors_settings,training_accuracy,label='training accurary')
plt.plot(neighbors_settings,test_accuracy,label='test accuracy')
plt.ylabel('Accuracy')
plt.xlabel('n_neighbors')
plt.legend()
#In[19]:
mglearn.plots.plot_knn_regression(n_neighbors=1)
#In[20]
mglearn.plots.plot_knn_regression(n_neighbors=3)
#In[21]
from sklearn.neighbors import KNeighborsRegressor
X,y=mglearn.datasets.make_wave(n_samples=40)
X_train,X_test,y_train,y_test=train_test_split(X,y,random_state=0)
reg=KNeighborsRegressor(n_neighbors=3)
reg.fit(X_train,y_train)
#In[22]
print('Test set predictions:\n{}'.format(reg.predict(X_test)))
#In[23]
print('Test set R^2: {:.2f}'.format(reg.score(X_test,y_test)))
#In[24]
fig,axes=plt.subplots(1,3,figsize=(15,4))
line=np.linspace(-3,3,1000).reshape(-1,1)
for n_neighbors,ax in zip([1,3,9],axes):
reg=KNeighborsRegressor(n_neighbors=n_neighbors)
reg.fit(X_train,y_train)
ax.plot(line,reg.predict(line))
ax.plot(X_train,y_train,'^',c=mglearn.cm2(0),markersize=8)
ax.plot(X_test,y_test,'v',c=mglearn.cm2(1),markersize=8)
ax.set_title(
'{} neighbors(s)\n train score: {:.2f} test score: {:.2f}'.format(
n_neighbors, reg.score(X_train,y_train),
reg.score(X_test,y_test)))
ax.set_xlabel('Feature')
ax.set_ylabel('Target')
axes[0].legend(['Model predictions','Training data/target',
'Test data/target'],loc='best')
#In[25]
mglearn.plots.plot_linear_regression_wave()
#In[26]
from sklearn.linear_model import LinearRegression
X,y=mglearn.datasets.make_wave(n_samples=60)
X_train,X_test,y_train,y_test=train_test_split(X,y,random_state=42)
lr=LinearRegression().fit(X_train,y_train)
#In[27]
print('lr.coef_:{}'.format(lr.coef_))
print('lr.intercept_:{}'.format(lr.intercept_))
#In[28]
print('Training set score: {:.2f}'.format(lr.score(X_test,y_test)))
print('Test set scireL{:.2f}'.format(lr.score(X_test,y_test)))
#In[29]
X,y=mglearn.datasets.load_extended_boston()
X_train,X_test,y_train,y_test=train_test_split(X,y,random_state=0)
lr=LinearRegression().fit(X_train,y_train)
#In[30]
print("Training set score: {:.2f}".format(lr.score(X_train, y_train)))
print("Test set score: {:.2f}".format(lr.score(X_test, y_test)))
#In[31]
from sklearn.linear_model import Ridge
ridge=Ridge().fit(X_train,y_train)
print("Training set score: {:.2f}".format(ridge.score(X_train, y_train)))
print("Test set score: {:.2f}".format(ridge.score(X_test, y_test)))
#In[32]
ridge10 = Ridge(alpha=10).fit(X_train, y_train)
print("Training set score: {:.2f}".format(ridge10.score(X_train, y_train)))
print("Test set score: {:.2f}".format(ridge10.score(X_test, y_test)))
#In[33]
ridge01 = Ridge(alpha=0.1).fit(X_train, y_train)
print("Training set score: {:.2f}".format(ridge01.score(X_train, y_train)))
print("Test set score: {:.2f}".format(ridge01.score(X_test, y_test)))
#In[34]
plt.plot(ridge.coef_, 's', label="Ridge alpha=1")
plt.plot(ridge10.coef_, '^', label="Ridge alpha=10")
plt.plot(ridge01.coef_, 'v', label="Ridge alpha=0.1")
plt.plot(lr.coef_, 'o', label="LinearRegression")
plt.xlabel("Coefficient index")
plt.ylabel("Coefficient magnitude")
plt.hlines(0, 0, len(lr.coef_))
plt.ylim(-25, 25)
plt.legend()
#In[35]
mglearn.plots.plot_ridge_n_samples()
#In[36]
from sklearn.linear_model import Lasso
lasso = Lasso().fit(X_train, y_train)
print("Training set score: {:.2f}".format(lasso.score(X_train, y_train)))
print("Test set score: {:.2f}".format(lasso.score(X_test, y_test)))
print("Number of features used: {}".format(np.sum(lasso.coef_ != 0)))
#In[37]
lasso001 = Lasso(alpha=0.01, max_iter=100000).fit(X_train, y_train)
print("Training set score: {:.2f}".format(lasso001.score(X_train, y_train)))
print("Test set score: {:.2f}".format(lasso001.score(X_test, y_test)))
print("Number of features used: {}".format(np.sum(lasso001.coef_ != 0)))
#In[38]
lasso00001 = Lasso(alpha=0.0001, max_iter=100000).fit(X_train, y_train)
print("Training set score: {:.2f}".format(lasso00001.score(X_train, y_train)))
print("Test set score: {:.2f}".format(lasso00001.score(X_test, y_test)))
print("Number of features used: {}".format(np.sum(lasso00001.coef_ != 0)))
#In[39]
plt.plot(lasso.coef_, 's', label="Lasso alpha=1")
plt.plot(lasso001.coef_, '^', label="Lasso alpha=0.01")
plt.plot(lasso00001.coef_, 'v', label="Lasso alpha=0.0001")
plt.plot(ridge01.coef_, 'o', label="Ridge alpha=0.1")
plt.legend(ncol=2, loc=(0, 1.05))
plt.ylim(-25, 25)
plt.xlabel("Coefficient index")
plt.ylabel("Coefficient magnitude")
#In[40]
from sklearn.linear_model import LogisticRegression
from sklearn.svm import LinearSVC
X, y = mglearn.datasets.make_forge()
fig, axes = plt.subplots(1, 2, figsize=(10, 3))
for model, ax in zip([LinearSVC(), LogisticRegression()], axes):
clf = model.fit(X, y)
mglearn.plots.plot_2d_separator(clf, X, fill=False, eps=0.5,
ax=ax, alpha=.7)
mglearn.discrete_scatter(X[:, 0], X[:, 1], y, ax=ax)
ax.set_title("{}".format(clf.__class__.__name__))
ax.set_xlabel("Feature 0")
ax.set_ylabel("Feature 1")
axes[0].legend()
#In[41]
mglearn.plots.plot_linear_svc_regularization()
#In[42]
from sklearn.datasets import load_breast_cancer
cancer = load_breast_cancer()
X_train, X_test, y_train, y_test = train_test_split(
cancer.data, cancer.target, stratify=cancer.target, random_state=42)
logreg = LogisticRegression().fit(X_train, y_train)
print("Training set score: {:.3f}".format(logreg.score(X_train, y_train)))
print("Test set score: {:.3f}".format(logreg.score(X_test, y_test)))
#In[43]
logreg100 = LogisticRegression(C=100).fit(X_train, y_train)
print("Training set score: {:.3f}".format(logreg100.score(X_train, y_train)))
print("Test set score: {:.3f}".format(logreg100.score(X_test, y_test)))
#In[44]
logreg001 = LogisticRegression(C=0.01).fit(X_train, y_train)
print("Training set score: {:.3f}".format(logreg001.score(X_train, y_train)))
print("Test set score: {:.3f}".format(logreg001.score(X_test, y_test)))
#In[45]
plt.plot(logreg.coef_.T, 'o', label="C=1")
plt.plot(logreg100.coef_.T, '^', label="C=100")
plt.plot(logreg001.coef_.T, 'v', label="C=0.001")
plt.xticks(range(cancer.data.shape[1]), cancer.feature_names, rotation=90)
plt.hlines(0, 0, cancer.data.shape[1])
plt.ylim(-5, 5)
plt.xlabel("Coefficient index")
plt.ylabel("Coefficient magnitude")
plt.legend()
#In[46]
for C, marker in zip([0.001, 1, 100], ['o', '^', 'v']):
lr_l1 = LogisticRegression(C=C, penalty="l1").fit(X_train, y_train)
print("Training accuracy of l1 logreg with C={:.3f}: {:.2f}".format(
C, lr_l1.score(X_train, y_train)))
print("Test accuracy of l1 logreg with C={:.3f}: {:.2f}".format(
C, lr_l1.score(X_test, y_test)))
plt.plot(lr_l1.coef_.T, marker, label="C={:.3f}".format(C))
plt.xticks(range(cancer.data.shape[1]), cancer.feature_names, rotation=90)
plt.hlines(0, 0, cancer.data.shape[1])
plt.xlabel("Coefficient index")
plt.ylabel("Coefficient magnitude")
plt.ylim(-5, 5)
plt.legend(loc=3)
#In[47]
from sklearn.datasets import make_blobs
X, y = make_blobs(random_state=42)
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
plt.legend(["Class 0", "Class 1", "Class 2"])
#In[48]
linear_svm = LinearSVC().fit(X, y)
print("Coefficient shape: ", linear_svm.coef_.shape)
print("Intercept shape: ", linear_svm.intercept_.shape)
#In[49]
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
line = np.linspace(-15, 15)
for coef, intercept, color in zip(linear_svm.coef_, linear_svm.intercept_,
['b', 'r', 'g']):
plt.plot(line, -(line * coef[0] + intercept) / coef[1], c=color)
plt.ylim(-10, 15)
plt.xlim(-10, 8)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
plt.legend(['Class 0', 'Class 1', 'Class 2', 'Line class 0', 'Line class 1',
'Line class 2'], loc=(1.01, 0.3))
#In[50]
mglearn.plots.plot_2d_classification(linear_svm, X, fill=True, alpha=.7)
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
line = np.linspace(-15, 15)
for coef, intercept, color in zip(linear_svm.coef_, linear_svm.intercept_,
['b', 'r', 'g']):
plt.plot(line, -(line * coef[0] + intercept) / coef[1], c=color)
plt.legend(['Class 0', 'Class 1', 'Class 2', 'Line class 0', 'Line class 1',
'Line class 2'], loc=(1.01, 0.3))
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[51]:
logreg = LogisticRegression().fit(X_train, y_train)
#In[52]
logreg = LogisticRegression()
y_pred = logreg.fit(X_train, y_train).predict(X_test)
#In[53]
y_pred = LogisticRegression().fit(X_train, y_train).predict(X_test)
#In[54]:
X = np.array([[0, 1, 0, 1],
[1, 0, 1, 1],
[0, 0, 0, 1],
[1, 0, 1, 0]])
y = np.array([0, 1, 0, 1])
#In[55]:
counts = {}
for label in np.unique(y):
# iterate over each class
# count (sum) entries of 1 per feature
counts[label] = X[y == label].sum(axis=0)
print("Feature counts:\n{}".format(counts))
#In[56]:
import os
os.environ["PATH"] += os.pathsep + 'C:/Program Files (x86)/Graphviz2.38/bin/'
mglearn.plots.plot_animal_tree()
#In[58]
from sklearn.tree import DecisionTreeClassifier
cancer = load_breast_cancer()
X_train, X_test, y_train, y_test = train_test_split(
cancer.data, cancer.target, stratify=cancer.target, random_state=42)
tree = DecisionTreeClassifier(random_state=0)
tree.fit(X_train, y_train)
print("Accuracy on training set: {:.3f}".format(tree.score(X_train, y_train)))
print("Accuracy on test set: {:.3f}".format(tree.score(X_test, y_test)))
#In[59]
tree = DecisionTreeClassifier(max_depth=4, random_state=0)
tree.fit(X_train, y_train)
print("Accuracy on training set: {:.3f}".format(tree.score(X_train, y_train)))
print("Accuracy on test set: {:.3f}".format(tree.score(X_test, y_test)))
#In[60]
from sklearn.tree import export_graphviz
export_graphviz(tree, out_file="tree.dot", class_names=["malignant", "benign"],
feature_names=cancer.feature_names, impurity=False, filled=True)
#In[61]
import graphviz
with open("tree.dot") as f:
dot_graph = f.read()
graphviz.Source(dot_graph)
#In[62]
print("Feature importances:\n{}".format(tree.feature_importances_))
#In[63]
def plot_feature_importances_cancer(model):
n_features = cancer.data.shape[1]
plt.barh(range(n_features), model.feature_importances_, align='center')
plt.yticks(np.arange(n_features), cancer.feature_names)
plt.xlabel("Feature importance")
plt.ylabel("Feature")
plot_feature_importances_cancer(tree)
#In[64]
tree = mglearn.plots.plot_tree_not_monotone()
display(tree)
#In[65]
import os
os.getcwd()
from os import chdir
chdir("C:\\Users\\ljg\\Documents\\GitHub\\Machine_Learning_Study")
import pandas as pd
ram_prices = pd.read_csv("data\\ram_price.csv")
plt.semilogy(ram_prices.date, ram_prices.price)
plt.xlabel("Year")
plt.ylabel("Price in $/Mbyte")
#In[66]
from sklearn.tree import DecisionTreeRegressor
# use historical data to forecast prices after the year 2000
data_train = ram_prices[ram_prices.date < 2000]
data_test = ram_prices[ram_prices.date >= 2000]
# predict prices based on date
X_train = data_train.date[:, np.newaxis]
# we use a log-transform to get a simpler relationship of data to target
y_train = np.log(data_train.price)
tree = DecisionTreeRegressor().fit(X_train, y_train)
linear_reg = LinearRegression().fit(X_train, y_train)
# predict on all data
X_all = ram_prices.date[:, np.newaxis]
pred_tree = tree.predict(X_all)
pred_lr = linear_reg.predict(X_all)
# undo log-transform
price_tree = np.exp(pred_tree)
price_lr = np.exp(pred_lr)
#In[67]
plt.semilogy(data_train.date, data_train.price, label="Training data")
plt.semilogy(data_test.date, data_test.price, label="Test data")
plt.semilogy(ram_prices.date, price_tree, label="Tree prediction")
plt.semilogy(ram_prices.date, price_lr, label="Linear prediction")
plt.legend()
#In[68]
from sklearn.ensemble import RandomForestClassifier
from sklearn.datasets import make_moons
X, y = make_moons(n_samples=100, noise=0.25, random_state=3)
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y,
random_state=42)
forest = RandomForestClassifier(n_estimators=5, random_state=2)
forest.fit(X_train, y_train)
#In[69]
fig, axes = plt.subplots(2, 3, figsize=(20, 10))
for i, (ax, tree) in enumerate(zip(axes.ravel(), forest.estimators_)):
ax.set_title("Tree {}".format(i))
mglearn.plots.plot_tree_partition(X_train, y_train, tree, ax=ax)
mglearn.plots.plot_2d_separator(forest, X_train, fill=True, ax=axes[-1, -1],
alpha=.4)
axes[-1, -1].set_title("Random Forest")
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train)
#In[70]
X_train, X_test, y_train, y_test = train_test_split(
cancer.data, cancer.target, random_state=0)
forest = RandomForestClassifier(n_estimators=100, random_state=0)
forest.fit(X_train, y_train)
print("Accuracy on training set: {:.3f}".format(forest.score(X_train, y_train)))
print("Accuracy on test set: {:.3f}".format(forest.score(X_test, y_test)))
#In[71]:
plot_feature_importances_cancer(forest)
#In[72]:
from sklearn.ensemble import GradientBoostingClassifier
X_train, X_test, y_train, y_test = train_test_split(
cancer.data, cancer.target, random_state=0)
gbrt = GradientBoostingClassifier(random_state=0)
gbrt.fit(X_train, y_train)
print("Accuracy on training set: {:.3f}".format(gbrt.score(X_train, y_train)))
print("Accuracy on test set: {:.3f}".format(gbrt.score(X_test, y_test)))
#In[73]:
gbrt = GradientBoostingClassifier(random_state=0, max_depth=1)
gbrt.fit(X_train, y_train)
print("Accuracy on training set: {:.3f}".format(gbrt.score(X_train, y_train)))
print("Accuracy on test set: {:.3f}".format(gbrt.score(X_test, y_test)))
#In[74]:
gbrt = GradientBoostingClassifier(random_state=0, learning_rate=0.01)
gbrt.fit(X_train, y_train)
print("Accuracy on training set: {:.3f}".format(gbrt.score(X_train, y_train)))
print("Accuracy on test set: {:.3f}".format(gbrt.score(X_test, y_test)))
#In[75]:
gbrt = GradientBoostingClassifier(random_state=0, max_depth=1)
gbrt.fit(X_train, y_train)
plot_feature_importances_cancer(gbrt)
#In[76]:
X, y = make_blobs(centers=4, random_state=8)
y = y % 2
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[77]:
from sklearn.svm import LinearSVC
linear_svm = LinearSVC().fit(X, y)
mglearn.plots.plot_2d_separator(linear_svm, X)
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[78]:
# add the squared first feature
X_new = np.hstack([X, X[:, 1:] ** 2])
from mpl_toolkits.mplot3d import Axes3D, axes3d
figure = plt.figure()
# visualize in 3D
ax = Axes3D(figure, elev=-152, azim=-26)
# plot first all the points with y == 0, then all with y == 1
mask = y == 0
ax.scatter(X_new[mask, 0], X_new[mask, 1], X_new[mask, 2], c='b',
cmap=mglearn.cm2, s=60)
ax.scatter(X_new[~mask, 0], X_new[~mask, 1], X_new[~mask, 2], c='r', marker='^',
cmap=mglearn.cm2, s=60)
ax.set_xlabel("feature0")
ax.set_ylabel("feature1")
ax.set_zlabel("feature1 ** 2")
#In[79]
linear_svm_3d = LinearSVC().fit(X_new, y)
coef, intercept = linear_svm_3d.coef_.ravel(), linear_svm_3d.intercept_
# show linear decision boundary
figure = plt.figure()
ax = Axes3D(figure, elev=-152, azim=-26)
xx = np.linspace(X_new[:, 0].min() - 2, X_new[:, 0].max() + 2, 50)
yy = np.linspace(X_new[:, 1].min() - 2, X_new[:, 1].max() + 2, 50)
XX, YY = np.meshgrid(xx, yy)
ZZ = (coef[0] * XX + coef[1] * YY + intercept) / -coef[2]
ax.plot_surface(XX, YY, ZZ, rstride=8, cstride=8, alpha=0.3)
ax.scatter(X_new[mask, 0], X_new[mask, 1], X_new[mask, 2], c='b',
cmap=mglearn.cm2, s=60)
ax.scatter(X_new[~mask, 0], X_new[~mask, 1], X_new[~mask, 2], c='r', marker='^',
cmap=mglearn.cm2, s=60)
ax.set_xlabel("feature0")
ax.set_ylabel("feature1")
ax.set_zlabel("feature0 ** 2")
#In[80]:
ZZ = YY ** 2
dec = linear_svm_3d.decision_function(np.c_[XX.ravel(), YY.ravel(), ZZ.ravel()])
plt.contourf(XX, YY, dec.reshape(XX.shape), levels=[dec.min(), 0, dec.max()],
cmap=mglearn.cm2, alpha=0.5)
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[81]:
from sklearn.svm import SVC
X, y = mglearn.tools.make_handcrafted_dataset()
svm = SVC(kernel='rbf', C=10, gamma=0.1).fit(X, y)
mglearn.plots.plot_2d_separator(svm, X, eps=.5)
mglearn.discrete_scatter(X[:, 0], X[:, 1], y)
# plot support vectors
sv = svm.support_vectors_
# class labels of support vectors are given by the sign of the dual coefficients
sv_labels = svm.dual_coef_.ravel() > 0
mglearn.discrete_scatter(sv[:, 0], sv[:, 1], sv_labels, s=15, markeredgewidth=3)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[82]:
fig, axes = plt.subplots(3, 3, figsize=(15, 10))
for ax, C in zip(axes, [-1, 0, 3]):
for a, gamma in zip(ax, range(-1, 2)):
mglearn.plots.plot_svm(log_C=C, log_gamma=gamma, ax=a)
axes[0, 0].legend(["class 0", "class 1", "sv class 0", "sv class 1"],
ncol=4, loc=(.9, 1.2))
#In[83]:
X_train, X_test, y_train, y_test = train_test_split(
cancer.data, cancer.target, random_state=0)
svc = SVC()
svc.fit(X_train, y_train)
print("Accuracy on training set: {:.2f}".format(svc.score(X_train, y_train)))
print("Accuracy on test set: {:.2f}".format(svc.score(X_test, y_test)))
#In[84]:
plt.plot(X_train.min(axis=0), 'o', label="min")
plt.plot(X_train.max(axis=0), '^', label="max")
plt.legend(loc=4)
plt.xlabel("Feature index")
plt.ylabel("Feature magnitude")
plt.yscale("log")
#In[85]:
# compute the minimum value per feature on the training set
min_on_training = X_train.min(axis=0)
# compute the range of each feature (max - min) on the training set
range_on_training = (X_train - min_on_training).max(axis=0)
# subtract the min, and divide by range
# afterward, min=0 and max=1 for each feature
X_train_scaled = (X_train - min_on_training) / range_on_training
print("Minimum for each feature\n{}".format(X_train_scaled.min(axis=0)))
print("Maximum for each feature\n {}".format(X_train_scaled.max(axis=0)))
#In[86]:
X_test_scaled = (X_test - min_on_training) / range_on_training
#In[87]:
svc = SVC()
svc.fit(X_train_scaled, y_train)
print("Accuracy on training set: {:.3f}".format(
svc.score(X_train_scaled, y_train)))
print("Accuracy on test set: {:.3f}".format(svc.score(X_test_scaled, y_test)))
#In[88]:
svc = SVC(C=1000)
svc.fit(X_train_scaled, y_train)
print("Accuracy on training set: {:.3f}".format(
svc.score(X_train_scaled, y_train)))
print("Accuracy on test set: {:.3f}".format(svc.score(X_test_scaled, y_test)))
#In[89]:
display(mglearn.plots.plot_logistic_regression_graph())
#In[90]:
display(mglearn.plots.plot_single_hidden_layer_graph())
#In[91]:
line = np.linspace(-3, 3, 100)
plt.plot(line, np.tanh(line), label="tanh")
plt.plot(line, np.maximum(line, 0), label="relu")
plt.legend(loc="best")
plt.xlabel("x")
plt.ylabel("relu(x), tanh(x)")
#In[92]:
mglearn.plots.plot_two_hidden_layer_graph()
#In[93]:
from sklearn.neural_network import MLPClassifier
from sklearn.datasets import make_moons
X, y = make_moons(n_samples=100, noise=0.25, random_state=3)
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y,random_state=42)
mlp = MLPClassifier(solver='lbfgs', random_state=0).fit(X_train, y_train)
mglearn.plots.plot_2d_separator(mlp, X_train, fill=True, alpha=.3)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[94]
mlp = MLPClassifier(solver='lbfgs', random_state=0, hidden_layer_sizes=[10])
mlp.fit(X_train, y_train)
mglearn.plots.plot_2d_separator(mlp, X_train, fill=True, alpha=.3)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[95]:
mlp = MLPClassifier(solver='lbfgs', random_state=0,
hidden_layer_sizes=[10, 10])
mlp.fit(X_train, y_train)
mglearn.plots.plot_2d_separator(mlp, X_train, fill=True, alpha=.3)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[96]:
mlp = MLPClassifier(solver='lbfgs', activation='tanh',
random_state=0, hidden_layer_sizes=[10, 10])
mlp.fit(X_train, y_train)
mglearn.plots.plot_2d_separator(mlp, X_train, fill=True, alpha=.3)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train)
plt.xlabel("Feature 0")
plt.ylabel("Feature 1")
#In[97]:
fig, axes = plt.subplots(2, 4, figsize=(20, 8))
for axx, n_hidden_nodes in zip(axes, [10, 100]):
for ax, alpha in zip(axx, [0.0001, 0.01, 0.1, 1]):
mlp = MLPClassifier(solver='lbfgs', random_state=0,
hidden_layer_sizes=[n_hidden_nodes, n_hidden_nodes],
alpha=alpha)
mlp.fit(X_train, y_train)
mglearn.plots.plot_2d_separator(mlp, X_train, fill=True, alpha=.3, ax=ax)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train, ax=ax)
ax.set_title("n_hidden=[{}, {}]\nalpha={:.4f}".format(
n_hidden_nodes, n_hidden_nodes, alpha))
#In[98]:
fig, axes = plt.subplots(2, 4, figsize=(20, 8))
for i, ax in enumerate(axes.ravel()):
mlp = MLPClassifier(solver='lbfgs', random_state=i,
hidden_layer_sizes=[100, 100])
mlp.fit(X_train, y_train)
mglearn.plots.plot_2d_separator(mlp, X_train, fill=True, alpha=.3, ax=ax)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train, ax=ax)
#In[99]:
print("Cancer data per-feature maxima:\n{}".format(cancer.data.max(axis=0)))
#In[100]:
X_train, X_test, y_train, y_test = train_test_split(
cancer.data, cancer.target, random_state=0)
mlp = MLPClassifier(random_state=42)
mlp.fit(X_train, y_train)
print("Accuracy on training set: {:.2f}".format(mlp.score(X_train, y_train)))
print("Accuracy on test set: {:.2f}".format(mlp.score(X_test, y_test)))
#In[101]:
mean_on_train = X_train.mean(axis=0)
std_on_train = X_train.std(axis=0)
X_train_scaled = (X_train - mean_on_train) / std_on_train
X_test_scaled = (X_test - mean_on_train) / std_on_train
mlp = MLPClassifier(random_state=0)
mlp.fit(X_train_scaled, y_train)
print("Accuracy on training set: {:.3f}".format(
mlp.score(X_train_scaled, y_train)))
print("Accuracy on test set: {:.3f}".format(mlp.score(X_test_scaled, y_test)))
#In[102]:
mlp = MLPClassifier(max_iter=1000, random_state=0)
mlp.fit(X_train_scaled, y_train)
print("Accuracy on training set: {:.3f}".format(
mlp.score(X_train_scaled, y_train)))
print("Accuracy on test set: {:.3f}".format(mlp.score(X_test_scaled, y_test)))
#In[103]:
mlp = MLPClassifier(max_iter=1000, alpha=1, random_state=0)
mlp.fit(X_train_scaled, y_train)
print("Accuracy on training set: {:.3f}".format(
mlp.score(X_train_scaled, y_train)))
print("Accuracy on test set: {:.3f}".format(mlp.score(X_test_scaled, y_test)))
#In[104]:
plt.figure(figsize=(20, 5))
plt.imshow(mlp.coefs_[0], interpolation='none', cmap='viridis')
plt.yticks(range(30), cancer.feature_names)
plt.xlabel("Columns in weight matrix")
plt.ylabel("Input feature")
plt.colorbar()
#In[105]:
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.datasets import make_blobs, make_circles
X, y = make_circles(noise=0.25, factor=0.5, random_state=1)
y_named = np.array(["blue", "red"])[y]
X_train, X_test, y_train_named, y_test_named, y_train, y_test = \
train_test_split(X, y_named, y, random_state=0)
gbrt = GradientBoostingClassifier(random_state=0)
gbrt.fit(X_train, y_train_named)
#In[106]:
print("X_test.shape: {}".format(X_test.shape))
print("Decision function shape: {}".format(
gbrt.decision_function(X_test).shape))
#In[107]:
print("Decision function:\n{}".format(gbrt.decision_function(X_test)[:6]))
#In[108]:
print("Thresholded decision function:\n{}".format(
gbrt.decision_function(X_test) > 0))
print("Predictions:\n{}".format(gbrt.predict(X_test)))
#In[109]:
greater_zero = (gbrt.decision_function(X_test) > 0).astype(int)
pred = gbrt.classes_[greater_zero]
print("pred is equal to predictions: {}".format(
np.all(pred == gbrt.predict(X_test))))
#In[110]:
decision_function = gbrt.decision_function(X_test)
print("Decision function minimum: {:.2f} maximum: {:.2f}".format(
np.min(decision_function), np.max(decision_function)))
#In[111]:
fig, axes = plt.subplots(1, 2, figsize=(13, 5))
mglearn.tools.plot_2d_separator(gbrt, X, ax=axes[0], alpha=.4,
fill=True, cm=mglearn.cm2)
scores_image = mglearn.tools.plot_2d_scores(gbrt, X, ax=axes[1],
alpha=.4, cm=mglearn.ReBl)
for ax in axes:
# plot training and test points
mglearn.discrete_scatter(X_test[:, 0], X_test[:, 1], y_test,
markers='^', ax=ax)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train,
markers='o', ax=ax)
ax.set_xlabel("Feature 0")
ax.set_ylabel("Feature 1")
cbar = plt.colorbar(scores_image, ax=axes.tolist())
axes[0].legend(["Test class 0", "Test class 1", "Train class 0",
"Train class 1"], ncol=4, loc=(.1, 1.1))
#In[112]:
print("Shape of probabilities: {}".format(gbrt.predict_proba(X_test).shape))
#In[113]:
print("Predicted probabilities:\n{}".format(
gbrt.predict_proba(X_test[:6])))
#In[114]:
fig, axes = plt.subplots(1, 2, figsize=(13, 5))
mglearn.tools.plot_2d_separator(
gbrt, X, ax=axes[0], alpha=.4, fill=True, cm=mglearn.cm2)
scores_image = mglearn.tools.plot_2d_scores(
gbrt, X, ax=axes[1], alpha=.5, cm=mglearn.ReBl, function='predict_proba')
for ax in axes:
# plot training and test points
mglearn.discrete_scatter(X_test[:, 0], X_test[:, 1], y_test,
markers='^', ax=ax)
mglearn.discrete_scatter(X_train[:, 0], X_train[:, 1], y_train,
markers='o', ax=ax)
ax.set_xlabel("Feature 0")
ax.set_ylabel("Feature 1")
cbar = plt.colorbar(scores_image, ax=axes.tolist())
axes[0].legend(["Test class 0", "Test class 1", "Train class 0",
"Train class 1"], ncol=4, loc=(.1, 1.1))
#In[115]:
from sklearn.datasets import load_iris
iris = load_iris()
X_train, X_test, y_train, y_test = train_test_split(
iris.data, iris.target, random_state=42)
gbrt = GradientBoostingClassifier(learning_rate=0.01, random_state=0)
gbrt.fit(X_train, y_train)
#In[116]:
print("Decision function shape: {}".format(gbrt.decision_function(X_test).shape))
print("Decision function:\n{}".format(gbrt.decision_function(X_test)[:6, :]))
#In[117]:
print("Argmax of decision function:\n{}".format(
np.argmax(gbrt.decision_function(X_test), axis=1)))
print("Predictions:\n{}".format(gbrt.predict(X_test)))
#In[118]:
print("Predicted probabilities:\n{}".format(gbrt.predict_proba(X_test)[:6]))
print("Sums: {}".format(gbrt.predict_proba(X_test)[:6].sum(axis=1)))
#In[119]:
print("Argmax of predicted probabilities:\n{}".format(
np.argmax(gbrt.predict_proba(X_test), axis=1)))
print("Predictions:\n{}".format(gbrt.predict(X_test)))
#In[120]:
logreg = LogisticRegression()
# represent each target by its class name in the iris dataset
named_target = iris.target_names[y_train]
logreg.fit(X_train, named_target)
print("unique classes in training data: {}".format(logreg.classes_))
print("predictions: {}".format(logreg.predict(X_test)[:10]))
argmax_dec_func = np.argmax(logreg.decision_function(X_test), axis=1)
print("argmax of decision function: {}".format(argmax_dec_func[:10]))
print("argmax combined with classes_: {}".format(
logreg.classes_[argmax_dec_func][:10]))
| [
"ljg95924@naver.com"
] | ljg95924@naver.com |
754a9e6ac4ad159d561e3bc49223a97c83cfe3d5 | 5b04b47b9498890ef95fa6e78fe7135c570a3f43 | /try_tables.py | 01c7612337c5cd00e0169b6d9038e2becabec074 | [] | no_license | donboyd5/puf_analysis_frozen | 8ca3ea7e899a7db60fe7b3ed681cbd80ef0f59e8 | 3aa2b1cb680aad16987ed776bcfdc878418e7a64 | refs/heads/main | 2023-04-12T12:55:48.495699 | 2020-12-16T09:48:39 | 2020-12-16T09:48:39 | 361,897,853 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,398 | py | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 23 03:14:39 2020
@author: donbo
"""
# %% notes
# https://pandas.pydata.org/pandas-docs/stable/user_guide/style.html
# https://pbpython.com/styling-pandas.html
# https://mkaz.blog/code/python-string-format-cookbook/
# https://www.youtube.com/watch?v=Sj42rqym9lk
# https://mode.com/example-gallery/python_dataframe_styling/
# https://github.com/spyder-ide/spyder-notebook
# https://groups.google.com/g/spyderlib
# https://plotly.com/python/table/
# https://mode.com/example-gallery/python_dataframe_styling/
# %% imports
import pandas as pd
import plotly.graph_objects as go
import matplotlib
# %% check 1
df = pd.DataFrame([[3,2,10,4],[20,1,3,2],[5,4,6,1]])
df.style.background_gradient()
pathfn = r'c:\temp\fn.html'
# x create and write to new file
# w for writing, overwrite existing
# a for appending
# note that directory must exist
f = open(pathfn, mode='a')
f.write(df.style.background_gradient().render())
f.close()
# %% data
df = pd.read_csv('https://raw.githubusercontent.com/plotly/datasets/master/2014_usa_states.csv')
fig = go.Figure(data=[go.Table(
header=dict(values=list(df.columns),
fill_color='paleturquoise',
align='left'),
cells=dict(values=[df.Rank, df.State, df.Postal, df.Population],
fill_color='lavender',
align='left'))
])
fig.show()
| [
"donboyd5@gmail.com"
] | donboyd5@gmail.com |
3c4fa1ec3cbb17b076c82c304c1c922f7a61ff2b | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/Gio/ConverterFlags.py | a973af808109d91b5dc90dbdf029b6c6f0d35170 | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 12,853 | py | # encoding: utf-8
# module gi.repository.Gio
# from /usr/lib64/girepository-1.0/Gio-2.0.typelib
# by generator 1.147
# no doc
# imports
import gi as __gi
import gi.overrides as __gi_overrides
import gi.overrides.Gio as __gi_overrides_Gio
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.GObject as __gi_repository_GObject
import gobject as __gobject
class ConverterFlags(__gobject.GFlags):
# no doc
def as_integer_ratio(self): # real signature unknown; restored from __doc__
"""
Return integer ratio.
Return a pair of integers, whose ratio is exactly equal to the original int
and with a positive denominator.
>>> (10).as_integer_ratio()
(10, 1)
>>> (-10).as_integer_ratio()
(-10, 1)
>>> (0).as_integer_ratio()
(0, 1)
"""
pass
def bit_length(self): # real signature unknown; restored from __doc__
"""
Number of bits necessary to represent self in binary.
>>> bin(37)
'0b100101'
>>> (37).bit_length()
6
"""
pass
def conjugate(self, *args, **kwargs): # real signature unknown
""" Returns self, the complex conjugate of any int. """
pass
def from_bytes(self, *args, **kwargs): # real signature unknown
"""
Return the integer represented by the given array of bytes.
bytes
Holds the array of bytes to convert. The argument must either
support the buffer protocol or be an iterable object producing bytes.
Bytes and bytearray are examples of built-in objects that support the
buffer protocol.
byteorder
The byte order used to represent the integer. If byteorder is 'big',
the most significant byte is at the beginning of the byte array. If
byteorder is 'little', the most significant byte is at the end of the
byte array. To request the native byte order of the host system, use
`sys.byteorder' as the byte order value.
signed
Indicates whether two's complement is used to represent the integer.
"""
pass
def to_bytes(self, *args, **kwargs): # real signature unknown
"""
Return an array of bytes representing an integer.
length
Length of bytes object to use. An OverflowError is raised if the
integer is not representable with the given number of bytes.
byteorder
The byte order used to represent the integer. If byteorder is 'big',
the most significant byte is at the beginning of the byte array. If
byteorder is 'little', the most significant byte is at the end of the
byte array. To request the native byte order of the host system, use
`sys.byteorder' as the byte order value.
signed
Determines whether two's complement is used to represent the integer.
If signed is False and a negative integer is given, an OverflowError
is raised.
"""
pass
def __abs__(self, *args, **kwargs): # real signature unknown
""" abs(self) """
pass
def __add__(self, *args, **kwargs): # real signature unknown
""" Return self+value. """
pass
def __and__(self, *args, **kwargs): # real signature unknown
""" Return self&value. """
pass
def __bool__(self, *args, **kwargs): # real signature unknown
""" self != 0 """
pass
def __ceil__(self, *args, **kwargs): # real signature unknown
""" Ceiling of an Integral returns itself. """
pass
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __divmod__(self, *args, **kwargs): # real signature unknown
""" Return divmod(self, value). """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __float__(self, *args, **kwargs): # real signature unknown
""" float(self) """
pass
def __floordiv__(self, *args, **kwargs): # real signature unknown
""" Return self//value. """
pass
def __floor__(self, *args, **kwargs): # real signature unknown
""" Flooring an Integral returns itself. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __getnewargs__(self, *args, **kwargs): # real signature unknown
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __index__(self, *args, **kwargs): # real signature unknown
""" Return self converted to an integer, if self is suitable for use as an index into a list. """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __int__(self, *args, **kwargs): # real signature unknown
""" int(self) """
pass
def __invert__(self, *args, **kwargs): # real signature unknown
""" ~self """
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lshift__(self, *args, **kwargs): # real signature unknown
""" Return self<<value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
def __mod__(self, *args, **kwargs): # real signature unknown
""" Return self%value. """
pass
def __mul__(self, *args, **kwargs): # real signature unknown
""" Return self*value. """
pass
def __neg__(self, *args, **kwargs): # real signature unknown
""" -self """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __or__(self, *args, **kwargs): # real signature unknown
""" Return self|value. """
pass
def __pos__(self, *args, **kwargs): # real signature unknown
""" +self """
pass
def __pow__(self, *args, **kwargs): # real signature unknown
""" Return pow(self, value, mod). """
pass
def __radd__(self, *args, **kwargs): # real signature unknown
""" Return value+self. """
pass
def __rand__(self, *args, **kwargs): # real signature unknown
""" Return value&self. """
pass
def __rdivmod__(self, *args, **kwargs): # real signature unknown
""" Return divmod(value, self). """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __rfloordiv__(self, *args, **kwargs): # real signature unknown
""" Return value//self. """
pass
def __rlshift__(self, *args, **kwargs): # real signature unknown
""" Return value<<self. """
pass
def __rmod__(self, *args, **kwargs): # real signature unknown
""" Return value%self. """
pass
def __rmul__(self, *args, **kwargs): # real signature unknown
""" Return value*self. """
pass
def __ror__(self, *args, **kwargs): # real signature unknown
""" Return value|self. """
pass
def __round__(self, *args, **kwargs): # real signature unknown
"""
Rounding an Integral returns itself.
Rounding with an ndigits argument also returns an integer.
"""
pass
def __rpow__(self, *args, **kwargs): # real signature unknown
""" Return pow(value, self, mod). """
pass
def __rrshift__(self, *args, **kwargs): # real signature unknown
""" Return value>>self. """
pass
def __rshift__(self, *args, **kwargs): # real signature unknown
""" Return self>>value. """
pass
def __rsub__(self, *args, **kwargs): # real signature unknown
""" Return value-self. """
pass
def __rtruediv__(self, *args, **kwargs): # real signature unknown
""" Return value/self. """
pass
def __rxor__(self, *args, **kwargs): # real signature unknown
""" Return value^self. """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Returns size in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __sub__(self, *args, **kwargs): # real signature unknown
""" Return self-value. """
pass
def __truediv__(self, *args, **kwargs): # real signature unknown
""" Return self/value. """
pass
def __trunc__(self, *args, **kwargs): # real signature unknown
""" Truncating an Integral returns itself. """
pass
def __xor__(self, *args, **kwargs): # real signature unknown
""" Return self^value. """
pass
denominator = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""the denominator of a rational number in lowest terms"""
first_value_name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
first_value_nick = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
imag = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""the imaginary part of a complex number"""
numerator = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""the numerator of a rational number in lowest terms"""
real = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""the real part of a complex number"""
value_names = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
value_nicks = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
FLUSH = 2
INPUT_AT_END = 1
NONE = 0
__class__ = type
__dict__ = None # (!) real value is "mappingproxy({'__module__': 'gi.repository.Gio', '__dict__': <attribute '__dict__' of 'ConverterFlags' objects>, '__doc__': None, '__gtype__': <GType GConverterFlags (94269256688720)>, '__flags_values__': {0: <flags 0 of type Gio.ConverterFlags>, 1: <flags G_CONVERTER_INPUT_AT_END of type Gio.ConverterFlags>, 2: <flags G_CONVERTER_FLUSH of type Gio.ConverterFlags>}, '__info__': gi.EnumInfo(ConverterFlags), 'NONE': <flags 0 of type Gio.ConverterFlags>, 'INPUT_AT_END': <flags G_CONVERTER_INPUT_AT_END of type Gio.ConverterFlags>, 'FLUSH': <flags G_CONVERTER_FLUSH of type Gio.ConverterFlags>})"
__flags_values__ = {
0: 0,
1: 1,
2: 2,
}
__gtype__ = None # (!) real value is '<GType GConverterFlags (94269256688720)>'
__info__ = gi.EnumInfo(ConverterFlags)
| [
"ttys3@outlook.com"
] | ttys3@outlook.com |
2168445a69fd58027a1d22b32431a5f3569f52c3 | 1c39d98a7f4be6939bcbacbf3b4f7d9610bf2ea9 | /0.Basis/0.pythonbook/25.RegularExpressions.py | c8cad87c86ebaf715bf911029b2cde0a21867e3e | [] | no_license | vlong638/VL.Python | 03ae0da96164d9cd2de521faea2cb86e68152bc8 | 06499fa1f7f7e4b4ae9f9e470c04f78bce86a7b1 | refs/heads/master | 2021-01-13T11:01:29.137801 | 2016-11-04T05:47:09 | 2016-11-04T05:47:09 | 69,540,586 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,928 | py | import PrintHelper
PrintHelper.PrintTitle('Regular Expressions With Python')
print('适用于数据挖掘,数据处理,抓取网页数据,大量的数据集合的模式化匹配')
print('正则表达式是字符串的模式化匹配,本章介绍如何使用正则表达式进行匹配')
PrintHelper.PrintSubtitle('The re Module')
PrintHelper.PrintHint('re,正则表达式模块')
PrintHelper.PrintHint('@pattern=re.compile(r@pattenString)','编译pattern')
PrintHelper.PrintHint('@pattern.findall(@stringForMatch)','使用pattern对象进行字符串匹配')
PrintHelper.PrintCode('import re')
PrintHelper.PrintCode('pAplus = re.compile( r\"a+\" )')
PrintHelper.PrintCode('lAplus = pAplus.findall( \"aardvark\" )')
PrintHelper.PrintCode('print( lAplus )')
import re
pAplus = re.compile( r"a+" )
lAplus = pAplus.findall( "aardvark" )
print( lAplus )
PrintHelper.PrintSubtitle('Shorthand,简写')
PrintHelper.PrintHint('re.findall(r@patternString,@stringForMatch)','直接使用patternString进行字符串匹配')
PrintHelper.PrintSubtitle('Match Objects')
print('当你需要更多的匹配信息的时候,re模块提供了一种称为MatchObjects的对象类型')
print('这种对象包含了更多的关于匹配的信息')
PrintHelper.PrintHint('@match=re.search(r@patternString,@stringForMatch)')
PrintHelper.PrintHint('@match.group()','匹配的内容')
PrintHelper.PrintHint('@match.start()','匹配的起始位置')
PrintHelper.PrintHint('@match.end()','匹配的结束位置')
PrintHelper.PrintCode('m = re.search( r\"a+\", \"Look out for the aardvark!\" )')
PrintHelper.PrintCode('print( \"{} is found at index {}\".format( m.group(), m.start() ) )')
m = re.search( r"a+", "Look out for the aardvark!" )
print( "{} is found at index {}".format( m.group(), m.start() ) )
PrintHelper.PrintSubtitle('Lists Of Matches',"多项匹配结果")
PrintHelper.PrintCode('import re')
PrintHelper.PrintCode('mlist = re.finditer( r\"a+\", \"Look out! A dangerous aardvark is on the loose!\" )')
PrintHelper.PrintCode('for m in mlist:')
PrintHelper.PrintCode(' print( \"{} is found at index {} and ends at index {}.\".format( m.group(), m.start(), m.end())) ')
import re
mlist = re.finditer( r"a+", "Look out! A dangerous aardvark is on the loose!" )
for m in mlist:
print( "{} is found at index {} and ends at index {}.".format( m.group(), m.start(), m.end()))
PrintHelper.PrintTitle('Writing Regular Expressions')
PrintHelper.PrintSubtitle('Regular Expressions With Square Brackets')
PrintHelper.PrintCode('import re')
PrintHelper.PrintCode('slist = re.findall( r\"b[aeiou]ll\", \"Bill Gates and Uwe Boll \\ drank Red Bull at a football match in Campbell.\" )')
PrintHelper.PrintCode('print( slist )')
import re
slist = re.findall( r"b[aeiou]ll", "Bill Gates and Uwe Boll \ drank Red Bull at a football match in Campbell." )
print( slist )
PrintHelper.PrintSubtitle('Special Sequences,特殊序列标记')
PrintHelper.PrintSampleWithDescription('\\b','word boundary,单词边界')
PrintHelper.PrintSampleWithDescription('\\B','not a word boundary,非单次边界')
PrintHelper.PrintSampleWithDescription('\\d','digit,数字')
PrintHelper.PrintSampleWithDescription('\\D','not a digit,非数字')
PrintHelper.PrintSampleWithDescription('\\n','newline,换行')
PrintHelper.PrintSampleWithDescription('\\r','carriage return,回车')
PrintHelper.PrintSampleWithDescription('\\s','whitespace,空格')
PrintHelper.PrintSampleWithDescription('\\S','not a whitespace,非空格')
PrintHelper.PrintSampleWithDescription('\\t','tabulation,缩进')
PrintHelper.PrintSampleWithDescription('\\w','alphanumeric character,数字+字母')
PrintHelper.PrintSampleWithDescription('\\W','not an alphanumeric character,非数字和字母')
PrintHelper.PrintSampleWithDescription('\\/','forward slash,斜杠,左下斜杠')
PrintHelper.PrintSampleWithDescription('\\\\','backslash,反斜杠,右下反斜杠')
PrintHelper.PrintSampleWithDescription('\\\"','double quote,双眼号')
PrintHelper.PrintSampleWithDescription('\\\'','single quote,单眼号')
PrintHelper.PrintSampleWithDescription('-','start of a string,字符串头')
PrintHelper.PrintSampleWithDescription('$','end of a string,字符串尾')
PrintHelper.PrintSampleWithDescription('.','any character,任意字符')
PrintHelper.PrintSubtitle('Repetition,重复标记')
PrintHelper.PrintSampleWithDescription('*','zero or more,0次或更多')
PrintHelper.PrintSampleWithDescription('+','one or more,至少一次')
PrintHelper.PrintSampleWithDescription('?','zero or one,可能出现一次')
PrintHelper.PrintSampleWithDescription('{n,m}','at least n and at most m,至少n次,最多m次')
PrintHelper.PrintSampleWithDescription('{n,}','at least n,至少n次')
PrintHelper.PrintSampleWithDescription('{n}','exactly n,正好n次')
PrintHelper.PrintTitle('Grouping,匹配分组')
PrintHelper.PrintCode('import re')
PrintHelper.PrintCode('pDate = re.compile( r\"(\\d{1,2})-(\\d{1,2})-(\\d{4})\" )')
PrintHelper.PrintCode('m = pDate.search( \"In response to your letter of 25-3-2015, \\ I decided to hire a hitman to get you.\" )')
PrintHelper.PrintCode('if m:')
PrintHelper.PrintCode(' print( \"Date {}; day {}; month {}; year {}\"')
PrintHelper.PrintCode(' .format( m.group(0), m.group(1), m.group(2), m.group(3) ) )')
import re
pDate = re.compile( r"(\d{1,2})-(\d{1,2})-(\d{4})" )
m = pDate.search( "In response to your letter of 25-3-2015, \ I decided to hire a hitman to get you." )
if m:
print( "Date {}; day {}; month {}; year {}"
.format( m.group(0), m.group(1), m.group(2), m.group(3) ) )
PrintHelper.PrintSubtitle('findall() and Groups')
PrintHelper.PrintHint('findall(@stringForMatch)')
PrintHelper.PrintCode('import re')
PrintHelper.PrintCode('pDate = re.compile( r\"(\\d{1,2})-(\\d{1,2})-(\\d{4})\" )')
PrintHelper.PrintCode('datelist = pDate.findall( \"In response to your letter of \\ 25-3-2015, on 27-3-2015 I decided to hire a hitman to get you.\" )')
PrintHelper.PrintCode('for date in datelist:')
PrintHelper.PrintCode(' print( date )')
import re
pDate = re.compile( r"(\d{1,2})-(\d{1,2})-(\d{4})" )
datelist = pDate.findall( "In response to your letter of \ 25-3-2015, on 27-3-2015 I decided to hire a hitman to get you." )
for date in datelist:
print( date )
PrintHelper.PrintSubtitle('Named Groups,组命名')
PrintHelper.PrintHint('(?P<@name>...)')
PrintHelper.PrintCode('pDate = re.compile( r\"(?P<day>\\d{1,2})-(?P<month>\\d{1,2})-(?P<year>\\d{4})\")')
PrintHelper.PrintCode('m = pDate.search( \"In response to your letter of 25-3-2015, \\ I curse you.\" )')
PrintHelper.PrintCode('if m:')
PrintHelper.PrintCode(' print( \"day is {}\".format( m.group( \'day\') ) )')
PrintHelper.PrintCode(' print( \"month is {}\".format( m.group( \'month\') ) )')
PrintHelper.PrintCode(' print( \"year is {}\".format( m.group( \'year\') ) )')
pDate = re.compile( r"(?P<day>\d{1,2})-(?P<month>\d{1,2})-(?P<year>\d{4})")
m = pDate.search( "In response to your letter of 25-3-2015, \ I curse you." )
if m:
print( "day is {}".format( m.group( 'day') ) )
print( "month is {}".format( m.group( 'month') ) )
print( "year is {}".format( m.group( 'year') ) )
PrintHelper.PrintSubtitle('Referring Within A Regular Expression,匹配内引用')
PrintHelper.PrintHint('\\n,如(\\S).*\\1指代任意非空格的字符,且重复两次,\\1指第一个匹配项')
PrintHelper.PrintTitle('Replacing,替换')
print('正则通常用以匹配内容,当然你也可以使用正则进行替换处理')
PrintHelper.PrintHint('re.sub(r@pattern,@replace,@stringForMatch)')
PrintHelper.PrintCode('import re')
PrintHelper.PrintCode('s = re.sub( r\"([iy])se\", \"\\g<1>ze\", \"Whether you categorise, \\ emphasise, or analyse, you should use American spelling!\" )')
PrintHelper.PrintCode('print( s )')
import re
s = re.sub( r"([iy])se", "\g<1>ze", "Whether you categorise, \ emphasise, or analyse, you should use American spelling!" )
print( s )
print()
| [
"vlong638@163.com"
] | vlong638@163.com |
2ee3baa013cf6894c49b26fbbcd9f1e7e01a7640 | 6ff51e18e843e07fb9a08f299d6cd90c17ec54f0 | /softwares/base16/output/prompt-toolkit/base16/base16-codeschool.py | 979d22ad9c27e68a5a3835d2c224fab69e871df6 | [] | no_license | xzdandy/Configs | 79258716f658a38dbcf6483acd206b747d445fe2 | cc6f32462f49998fac69327ec6983de8067352ae | refs/heads/master | 2022-05-05T14:32:16.011938 | 2022-03-31T23:03:07 | 2022-03-31T23:03:07 | 173,981,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,365 | py | # -*- coding: utf-8 -*-
# base16-prompt-toolkit (https://github.com/memeplex/base16-prompt-toolkit)
# Base16 Prompt Toolkit template by Carlos Pita (carlosjosepita@gmail.com
# Codeschool scheme by blockloop
from prompt_toolkit.terminal.vt100_output import _256_colors
from pygments.style import Style
from pygments.token import (Keyword, Name, Comment, String, Error, Text,
Number, Operator, Literal, Token)
# See http://chriskempson.com/projects/base16/ for a description of the role
# of the different colors in the base16 palette.
base00 = '#232c31'
base01 = '#1c3657'
base02 = '#2a343a'
base03 = '#3f4944'
base04 = '#84898c'
base05 = '#9ea7a6'
base06 = '#a7cfa3'
base07 = '#b5d8f6'
base08 = '#2a5491'
base09 = '#43820d'
base0A = '#a03b1e'
base0B = '#237986'
base0C = '#b02f30'
base0D = '#484d79'
base0E = '#c59820'
base0F = '#c98344'
# See https://github.com/jonathanslenders/python-prompt-toolkit/issues/355
colors = (globals()['base0' + d] for d in '08BADEC5379F1246')
for i, color in enumerate(colors):
r, g, b = int(color[1:3], 16), int(color[3:5], 16), int(color[5:], 16)
_256_colors[r, g, b] = i + 6 if i > 8 else i
# See http://pygments.org/docs/tokens/ for a description of the different
# pygments tokens.
class Base16Style(Style):
background_color = base00
highlight_color = base02
default_style = base05
styles = {
Text: base05,
Error: '%s bold' % base08,
Comment: base03,
Keyword: base0E,
Keyword.Constant: base09,
Keyword.Namespace: base0D,
Name.Builtin: base0D,
Name.Function: base0D,
Name.Class: base0D,
Name.Decorator: base0E,
Name.Exception: base08,
Number: base09,
Operator: base0E,
Literal: base0B,
String: base0B
}
# See https://github.com/jonathanslenders/python-prompt-toolkit/blob/master/prompt_toolkit/styles/defaults.py
# for a description of prompt_toolkit related pseudo-tokens.
overrides = {
Token.Prompt: base0B,
Token.PromptNum: '%s bold' % base0B,
Token.OutPrompt: base08,
Token.OutPromptNum: '%s bold' % base08,
Token.Menu.Completions.Completion: 'bg:%s %s' % (base01, base04),
Token.Menu.Completions.Completion.Current: 'bg:%s %s' % (base04, base01),
Token.MatchingBracket.Other: 'bg:%s %s' % (base03, base00)
}
| [
"xzdandy@gmail.com"
] | xzdandy@gmail.com |
5b7e1f7fad59a55adecd2e085f93e88b7e02d4c3 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2653/60698/274540.py | b57d02b4c40b0bf4962611a78c22b05da38bef51 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | def test():
t = int(input())
for _ in range(0, t):
nx=input().split()
n=int(nx[0])
x=int(nx[1])
if x>=10:
print(0)
return
else:
time=10-x
num=n-1
print (num*time)
test() | [
"1069583789@qq.com"
] | 1069583789@qq.com |
45afbd57726537a58057dcd1c6c67e4566de6212 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/SYMMNTPCLIENT.py | 991e6c1fb91a1ddbc390746576c20fd10ad69885 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 11,788 | py | #
# PySNMP MIB module SYMMNTPCLIENT (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/neermitt/Dev/kusanagi/mibs.snmplabs.com/asn1/SYMMNTPCLIENT
# Produced by pysmi-0.3.4 at Tue Jul 30 11:35:06 2019
# On host NEERMITT-M-J0NV platform Darwin version 18.6.0 by user neermitt
# Using Python version 3.7.4 (default, Jul 9 2019, 18:13:23)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion")
InetAddressType, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType")
ModuleCompliance, NotificationGroup, ObjectGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup", "ObjectGroup")
TimeTicks, ObjectIdentity, Unsigned32, iso, NotificationType, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Counter64, IpAddress, Integer32, Bits, ModuleIdentity, Gauge32 = mibBuilder.importSymbols("SNMPv2-SMI", "TimeTicks", "ObjectIdentity", "Unsigned32", "iso", "NotificationType", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Counter64", "IpAddress", "Integer32", "Bits", "ModuleIdentity", "Gauge32")
TextualConvention, DisplayString, RowStatus = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "RowStatus")
EnableValue, symmPacketService = mibBuilder.importSymbols("SYMM-COMMON-SMI", "EnableValue", "symmPacketService")
symmNTPClient = ModuleIdentity((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3))
symmNTPClient.setRevisions(('2018-03-21 11:07',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: symmNTPClient.setRevisionsDescriptions((' Symmetricom NTP Client ',))
if mibBuilder.loadTexts: symmNTPClient.setLastUpdated('201806280521Z')
if mibBuilder.loadTexts: symmNTPClient.setOrganization('Symmetricom')
if mibBuilder.loadTexts: symmNTPClient.setContactInfo('Symmetricom Technical Support 1-888-367-7966 toll free USA 1-408-428-7907 worldwide Support@symmetricom.com ')
if mibBuilder.loadTexts: symmNTPClient.setDescription('This is the Symmetricom NTP Client MIB. It has two main nodes: NTPClient status and NTPClient configuration.')
class DateAndTime(TextualConvention, OctetString):
description = "A date-time specification. field octets contents range ----- ------ -------- ----- 1 1-2 year* 0..65536 2 3 month 1..12 3 4 day 1..31 4 5 hour 0..23 5 6 minutes 0..59 6 7 seconds 0..60 (use 60 for leap-second) 7 8 deci-seconds 0..9 8 9 direction from UTC '+' / '-' 9 10 hours from UTC* 0..13 10 11 minutes from UTC 0..59 * Notes: - the value of year is in network-byte order - daylight saving time in New Zealand is +13 For example, Tuesday May 26, 1992 at 1:30:15 PM EDT would be displayed as: 1992-5-26,13:30:15.0,-4:0 Note that if only local time is known, then timezone information (fields 8-10) is not present."
status = 'current'
displayHint = '2d-1d-1d,1d:1d:1d.1d,1a1d:1d'
subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(8, 8), ValueSizeConstraint(11, 11), )
class TLocalTimeOffset(TextualConvention, OctetString):
description = "A local time offset specification. field octets contents range ----- ------ -------- ----- 1 1 direction from UTC '+' / '-' 2 2 hours from UTC* 0..13 3 3 minutes from UTC 0..59 * Notes: - the value of year is in network-byte order - The hours range is 0..13 For example, the -6 local time offset would be displayed as: -6:0 "
status = 'current'
displayHint = '1a1d:1d'
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(3, 3)
fixedLength = 3
class NTPCLIENTTIME(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("syncNow", 1), ("writeOnlyObject", 2))
ntpClientStatusInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1))
ntpcTimeOffset = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 1), Integer32()).setUnits('seconds').setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcTimeOffset.setStatus('current')
if mibBuilder.loadTexts: ntpcTimeOffset.setDescription('NTP client Time Offset')
ntpcLastUpdate = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcLastUpdate.setStatus('current')
if mibBuilder.loadTexts: ntpcLastUpdate.setDescription('NTP client Last Update')
ntpcStatus = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcStatus.setStatus('current')
if mibBuilder.loadTexts: ntpcStatus.setDescription('NTP client Status')
ntpcServerIP = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcServerIP.setStatus('current')
if mibBuilder.loadTexts: ntpcServerIP.setDescription('NTP client ServerIP')
ntpcServerLeapIndicator = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcServerLeapIndicator.setStatus('current')
if mibBuilder.loadTexts: ntpcServerLeapIndicator.setDescription('NTP client Server Leap Indicator')
ntpcServerStratum = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcServerStratum.setStatus('current')
if mibBuilder.loadTexts: ntpcServerStratum.setDescription('NTP client Server Leap Indicator')
ntpcServerRefID = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ntpcServerRefID.setStatus('current')
if mibBuilder.loadTexts: ntpcServerRefID.setDescription('NTP client Server Reference ID')
ntpClientConfigInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2))
ntpcServerIPAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 1), )
if mibBuilder.loadTexts: ntpcServerIPAddrTable.setStatus('current')
if mibBuilder.loadTexts: ntpcServerIPAddrTable.setDescription("The NTP-Client Server IP Table. This table's row be added or deleted")
ntpcServerIPAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 1, 1), ).setIndexNames((0, "SYMMNTPCLIENT", "ntpcServerIPAddrIndex"))
if mibBuilder.loadTexts: ntpcServerIPAddrEntry.setStatus('current')
if mibBuilder.loadTexts: ntpcServerIPAddrEntry.setDescription('NTP-Client Server entry')
ntpcServerIPAddrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3)))
if mibBuilder.loadTexts: ntpcServerIPAddrIndex.setStatus('current')
if mibBuilder.loadTexts: ntpcServerIPAddrIndex.setDescription('Local index of the NTP Client Server IP table.')
ntpcServerIPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 1, 1, 2), DisplayString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntpcServerIPAddress.setStatus('current')
if mibBuilder.loadTexts: ntpcServerIPAddress.setDescription('NTP-Client Server IP Address. IPv4 or IPv6 Address')
ntpClientState = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 2), EnableValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntpClientState.setStatus('current')
if mibBuilder.loadTexts: ntpClientState.setDescription('The NTP-Client State ')
ntpClientSyncOnBoot = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 3), EnableValue()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntpClientSyncOnBoot.setStatus('current')
if mibBuilder.loadTexts: ntpClientSyncOnBoot.setDescription('The NTP-Client Sync-On-Boot ')
ntpClientPollInterval = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(4, 17))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntpClientPollInterval.setStatus('current')
if mibBuilder.loadTexts: ntpClientPollInterval.setDescription('The NTP-Client Poll Interval ')
ntpClientTime = MibScalar((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 2, 5), NTPCLIENTTIME()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ntpClientTime.setStatus('current')
if mibBuilder.loadTexts: ntpClientTime.setDescription('The NTP-Client Time. This is write-only object. Valid ')
ntpClientConformance = ObjectIdentity((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 3))
if mibBuilder.loadTexts: ntpClientConformance.setStatus('current')
if mibBuilder.loadTexts: ntpClientConformance.setDescription('This subtree contains conformance statements for the SYMMNTPCLIENT.mib . ')
ntpClientCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 3, 1))
ntpClientBasicCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 3, 1, 1)).setObjects(("SYMMNTPCLIENT", "ntpClientStatusInfoGroup"), ("SYMMNTPCLIENT", "ntpClientConfigInfoGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ntpClientBasicCompliance = ntpClientBasicCompliance.setStatus('current')
if mibBuilder.loadTexts: ntpClientBasicCompliance.setDescription('The compliance statement for SNMP entities which have NTP packet service.')
ntpClientUocGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 3, 2))
ntpClientStatusInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 3, 2, 1)).setObjects(("SYMMNTPCLIENT", "ntpcTimeOffset"), ("SYMMNTPCLIENT", "ntpcLastUpdate"), ("SYMMNTPCLIENT", "ntpcStatus"), ("SYMMNTPCLIENT", "ntpcServerIP"), ("SYMMNTPCLIENT", "ntpcServerLeapIndicator"), ("SYMMNTPCLIENT", "ntpcServerStratum"), ("SYMMNTPCLIENT", "ntpcServerRefID"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ntpClientStatusInfoGroup = ntpClientStatusInfoGroup.setStatus('current')
if mibBuilder.loadTexts: ntpClientStatusInfoGroup.setDescription('A collection of objects providing information applicable to NTP-Client status group.')
ntpClientConfigInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9070, 1, 2, 5, 1, 3, 3, 2, 2)).setObjects(("SYMMNTPCLIENT", "ntpcServerIPAddress"), ("SYMMNTPCLIENT", "ntpClientState"), ("SYMMNTPCLIENT", "ntpClientSyncOnBoot"), ("SYMMNTPCLIENT", "ntpClientPollInterval"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ntpClientConfigInfoGroup = ntpClientConfigInfoGroup.setStatus('current')
if mibBuilder.loadTexts: ntpClientConfigInfoGroup.setDescription('A collection of objects providing information applicable to NTP-Client configuration group.')
mibBuilder.exportSymbols("SYMMNTPCLIENT", ntpcServerLeapIndicator=ntpcServerLeapIndicator, ntpcServerStratum=ntpcServerStratum, ntpcServerIPAddress=ntpcServerIPAddress, symmNTPClient=symmNTPClient, ntpcServerIPAddrIndex=ntpcServerIPAddrIndex, ntpcTimeOffset=ntpcTimeOffset, ntpClientConfigInfo=ntpClientConfigInfo, TLocalTimeOffset=TLocalTimeOffset, ntpClientConfigInfoGroup=ntpClientConfigInfoGroup, ntpClientSyncOnBoot=ntpClientSyncOnBoot, ntpClientConformance=ntpClientConformance, ntpcLastUpdate=ntpcLastUpdate, PYSNMP_MODULE_ID=symmNTPClient, ntpClientPollInterval=ntpClientPollInterval, ntpcStatus=ntpcStatus, NTPCLIENTTIME=NTPCLIENTTIME, DateAndTime=DateAndTime, ntpClientState=ntpClientState, ntpClientBasicCompliance=ntpClientBasicCompliance, ntpcServerIPAddrTable=ntpcServerIPAddrTable, ntpcServerIPAddrEntry=ntpcServerIPAddrEntry, ntpClientTime=ntpClientTime, ntpClientStatusInfo=ntpClientStatusInfo, ntpcServerIP=ntpcServerIP, ntpClientStatusInfoGroup=ntpClientStatusInfoGroup, ntpClientCompliances=ntpClientCompliances, ntpcServerRefID=ntpcServerRefID, ntpClientUocGroups=ntpClientUocGroups)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
fc414bd6e874eb9170259bd11f5421cd9647564f | 393a9ce7e465d211f99926afbc20f8e72fa6ce4d | /venv/bin/pip3 | 114101786a2c6dd465e49a127d0fda760e887e51 | [] | no_license | GarrettMatthews/Artemia | 210079f50c94c09c732c25efc6554b009e339856 | 008f8c46abdf5dbf270b057a820665e796d4a213 | refs/heads/master | 2020-07-29T05:29:45.979474 | 2019-12-10T17:52:38 | 2019-12-10T17:52:38 | 209,684,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | #!/home/garrett/Desktop/Git_Repositories/Artemia/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"garrettemathews@gmail.com"
] | garrettemathews@gmail.com | |
cc1186b87d73c60fd1286cb040e3c7be0884f0c9 | 7b9f0f9be9d7422546c300b0d4dead3b10fb7ee7 | /ariaml/createInput.py | df341539f8702167c06251b98a295bc65b24a1fa | [
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
] | permissive | aria-jpl/ariamh | 73f163f0c80da949f6ea88469fd2c67ab229ef20 | b6a06330cb5a592022070724b34eb6a011e7cbdc | refs/heads/develop | 2022-12-26T13:45:29.287226 | 2022-02-10T05:08:43 | 2022-02-10T05:08:43 | 124,922,867 | 6 | 7 | Apache-2.0 | 2022-12-13T13:53:45 | 2018-03-12T17:02:52 | Python | UTF-8 | Python | false | false | 330 | py | #!/usr/bin/env python
import sys, json
def write_input(ctx_file, in_file):
with open(ctx_file) as f:
j = json.load(f)
input = { "url": j['rule_hit']['_source']['urls'][0] }
with open(in_file, 'w') as f:
json.dump(input, f, indent=2)
if __name__ == "__main__": write_input(sys.argv[1], sys.argv[2])
| [
"pymonger@gmail.com"
] | pymonger@gmail.com |
ea5bb42024088a52e5faddafce1ae405b2c755af | c6d389f085c683f33cc0d0ab6497b3f042f7c905 | /distanceBetweenPointAndSegment.py | ed8315fdfa93c2689eee34f35a3b9d99e47aff07 | [] | no_license | irhadSaric/computer-geometry | 0d23fbafbedb18b22df30cc8071f4103237eef2d | 25a73c756472896c316d685ca6792c8c94f31361 | refs/heads/master | 2020-04-04T08:01:38.501815 | 2019-02-26T20:05:08 | 2019-02-26T20:05:08 | 155,768,457 | 0 | 0 | null | 2019-02-26T20:10:33 | 2018-11-01T19:56:17 | Python | UTF-8 | Python | false | false | 635 | py | from math import sqrt
def distancePtoS(point, segment):
x = point[0]
y = point[1]
x1 = segment[0][0]
y1 = segment[0][1]
x2 = segment[1][0]
y2 = segment[1][1]
A = x - x1
B = y - y1
C = x2 - x1
D = y2 - y1
dot = A * C + B * D
len_sq = C * C + D * D
param = -1
if (len_sq != 0): #// in case of 0 length line
param = dot / len_sq
if (param < 0):
xx = x1
yy = y1
elif param > 1:
xx = x2
yy = y2
else:
xx = x1 + param * C
yy = y1 + param * D
dx = x - xx
dy = y - yy
return sqrt(dx * dx + dy * dy) | [
"irhad.saric@hotmail.com"
] | irhad.saric@hotmail.com |
205750ec03969e29011a40ca0bb85064d668571e | 678e374616008bd3d72e2c399ece7e219601bebc | /src/billing/models.py | fd28f1b45c723ff87727da6e38eda558ca08bff0 | [
"MIT"
] | permissive | LABETE/srvup_and_drf | 74d454f7fc2aa1f7869b2a40f15f18cfdecc9835 | e6b09ce8f8b01bbcbdce291efbbba16c2837b38f | refs/heads/master | 2016-08-12T19:04:05.411849 | 2016-01-26T17:16:26 | 2016-01-26T17:16:26 | 50,445,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,892 | py | import datetime
import random
from django.conf import settings
from django.contrib.auth.signals import user_logged_in
from django.db import models
from django.db.models.signals import post_save
from django.utils import timezone
from .signals import membership_dates_update
from .utils import update_braintree_membership
def user_logged_in_receiver(sender, user, **kwargs):
update_braintree_membership(user)
user_logged_in.connect(user_logged_in_receiver)
class UserMerchantId(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
customer_id = models.CharField(max_length=120)
subscription_id = models.CharField(max_length=120, null=True, blank=True)
plan_id = models.CharField(max_length=120, null=True, blank=True)
merchant_name = models.CharField(max_length=120, default="Braintree")
def __str__(self):
return self.customer_id
class Membership(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
start_date = models.DateTimeField(default=timezone.now)
end_date = models.DateTimeField(default=timezone.now)
def __str__(self):
return str(self.user.username)
def update_status(self):
if self.end_date >= timezone.now() and not self.user.is_member:
self.user.is_member = True
self.user.save()
elif self.end_date <= timezone.now() and self.user.is_member:
self.user.is_member = False
self.user.save()
def update_membership_status(sender, instance, created, *args, **kwargs):
if not created:
instance.update_status()
post_save.connect(update_membership_status, sender=Membership)
def update_membership_dates(sender, new_start_date, **kwargs):
membership = sender
current_end_date = membership.end_date
if current_end_date >= new_start_date:
membership.end_date = current_end_date + \
datetime.timedelta(days=30, hours=10)
membership.save()
else:
membership.start_date = new_start_date
membership.end_date = new_start_date + \
datetime.timedelta(days=30, hours=10)
membership.save()
membership_dates_update.connect(update_membership_dates)
class TransactionManager(models.Manager):
def create_new(self, user, transaction_id, amount, card_type,
success=None, last_four=None, transaction_status=None):
if not user:
raise ValueError("Must be a user.")
if not transaction_id:
raise ValueError("Must complete a transaction to add new.")
new_order_id = "{0}{1}{2}".format(
transaction_id[:2], random.randint(1, 9), transaction_id[2:])
new_trans = self.model(
user=user,
transaction_id=transaction_id,
amount=amount,
order_id=new_order_id,
card_type=card_type
)
if success:
new_trans.success = success
if last_four:
new_trans.last_four = last_four
if transaction_status:
new_trans.transaction_status = transaction_status
new_trans.save(using=self._db)
return new_trans
class Transaction(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
transaction_id = models.CharField(max_length=120)
order_id = models.CharField(max_length=120)
amount = models.DecimalField(max_digits=100, decimal_places=2)
success = models.BooleanField(default=True)
transaction_status = models.CharField(
max_length=220, null=True, blank=True)
card_type = models.CharField(max_length=120)
last_four = models.PositiveIntegerField(null=True, blank=True)
timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
objects = TransactionManager()
class Meta:
ordering = ["-timestamp"]
def __str__(self):
return self.order_id
| [
"eddie.valv@gmail.com"
] | eddie.valv@gmail.com |
4e87a4a33512bdf4d49b79da2a44373a7c62dab8 | e8bf00dba3e81081adb37f53a0192bb0ea2ca309 | /domains/nav/problems/training/problem1032_SD.py | bce3f354d7fa46d2c39311b6b2bf4b2ab138740f | [
"BSD-3-Clause"
] | permissive | patras91/rae_release | 1e6585ee34fe7dbb117b084df982ca8a8aed6795 | 0e5faffb7eb732fdb8e3bbf2c6d2f2cbd520aa30 | refs/heads/master | 2023-07-13T20:09:41.762982 | 2021-08-11T17:02:58 | 2021-08-11T17:02:58 | 394,797,515 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,203 | py | __author__ = 'patras'
from domain_springDoor import *
from timer import DURATION
from state import state, rv
DURATION.TIME = {
'unlatch1': 5,
'unlatch2': 5,
'holdDoor': 2,
'passDoor': 3,
'releaseDoor': 2,
'closeDoors': 3,
'move': 7,
'take': 2,
'put': 2,
}
DURATION.COUNTER = {
'unlatch1': 5,
'unlatch2': 5,
'holdDoor': 2,
'passDoor': 3,
'releaseDoor': 2,
'closeDoors': 3,
'move': 7,
'take': 2,
'put': 2,
}
rv.LOCATIONS = [1, 2, 3, 4, 5, 6]
rv.EDGES = {1: [4], 2: [5], 3: [6], 4: [1, 5], 5: [2, 4, 6], 6: [3, 5]}
rv.DOORS = ['d1', 'd2', 'd3']
rv.DOORLOCATIONS = {(1, 4): 'd3', (2, 5): 'd2', (3, 6): 'd1'}
rv.DOORTYPES = {'d1': 'ordinary', 'd2': 'spring', 'd3': 'spring'}
rv.ROBOTS = ['r1', 'r2', 'r3']
def ResetState():
state.load = {'r1': NIL, 'r2': NIL, 'r3': NIL}
state.status = {'r1': 'free', 'r2': 'free', 'r3': 'free'}
state.loc = {'r1': 1, 'r2': 5, 'r3': 3}
state.pos = {'o1': 3}
state.doorStatus = {'d1': 'closed', 'd2': 'closed', 'd3': 'closed', }
state.doorType = {'d1': UNK, 'd2': UNK, 'd3': UNK, }
tasks = {
5: [['fetch', 'r1', 'o1', 3]],
9: [['collision', 'r1']],
}
eventsEnv = {
} | [
"patras@umd.edu"
] | patras@umd.edu |
14b61fbbc57d056ce76adcf743c6de1c24e1669e | ca27df9a42fdba2fb9c42ced68b1b3e734e0fb76 | /src/modu/editable/datatypes/boolean.py | b695dbf72fee4b17b8ff56ca82ed177cbfd26c68 | [
"MIT"
] | permissive | philchristensen/modu | 1696ecf36908367b0358b06c3bee02552fc76651 | 795f3bc413956b98522ac514dafe35cbab0d57a3 | refs/heads/master | 2016-09-06T10:54:59.286492 | 2015-03-29T21:22:47 | 2015-03-29T21:22:47 | 829,469 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,285 | py | # modu
# Copyright (c) 2006-2010 Phil Christensen
# http://modu.bubblehouse.org
#
#
# See LICENSE for details
"""
Datatypes to manage boolean-type fields.
"""
from zope.interface import implements
from modu.persist import sql
from modu.editable import IDatatype, define
from modu.util import form
from modu import persist
class CheckboxField(define.definition):
"""
Displays a field as an HTML checkbox.
Provides modified update behavior to deal with the fact that checkboxes
only submit form data when checked.
"""
implements(IDatatype)
search_list = ['unchecked', 'checked', 'no search']
def get_element(self, req, style, storable):
"""
@see: L{modu.editable.define.definition.get_element()}
"""
frm = form.FormNode(self.name)
if(style == 'search'):
search_value = getattr(storable, self.get_column_name(), '2')
frm(type='radiogroup', options=self.search_list, value=search_value)
else:
frm(type='checkbox', value=self.get('checked_value', 1))
if(str(getattr(storable, self.get_column_name(), None)) == str(self.get('checked_value', 1))):
frm(checked=True)
default_value = self.get('default_value', None)
if(not storable.get_id() and default_value is not None):
frm(checked=bool(default_value))
if(style == 'listing' or self.get('read_only', False)):
frm(disabled=True)
return frm
def get_search_value(self, value, req, frm):
if(value is not None):
value = value.value
if(value == '0'):
return sql.RAW('COALESCE(%%s, 0) <> %s' % self.get('checked_value', 1))
elif(value == '1'):
return sql.RAW('COALESCE(%%s, 0) = %s' % self.get('checked_value', 1))
# a trick - return a statement that is always true
return sql.RAW('COALESCE(%s = %%s, 1) = 1' % sql.escape_dot_syntax(self.get_column_name()))
def update_storable(self, req, form, storable):
"""
@see: L{modu.editable.define.definition.update_storable()}
"""
form_name = '%s-form' % storable.get_table()
if(form_name in req.data):
form_data = req.data[form_name]
if(self.name in form_data):
setattr(storable, self.get_column_name(), form_data[self.name].value)
else:
setattr(storable, self.get_column_name(), self.get('unchecked_value', 0))
return True
class NonNullSearchField(define.definition):
search_list = ['empty', 'not empty', 'no search']
def get_element(self, req, style, storable):
if(style != 'search'):
return form.FormNode(self.name)(type='label', value='n/a - Search Use Only')
else:
search_value = getattr(storable, self.get_column_name(), '2')
frm = form.FormNode(self.name)
frm(type='radiogroup', options=self.search_list, value=search_value)
return frm
def get_search_value(self, value, req, frm):
if(value is not None):
if(value.value == '0'):
return sql.RAW('ISNULL(%s)')
elif(value.value == '1'):
return sql.RAW('NOT(ISNULL(%s))')
# a trick
return sql.RAW('IF(%s, 1, 1)')
def update_storable(self, req, form, storable):
pass
class NonBlankSearchField(NonNullSearchField):
def get_search_value(self, value, req, frm):
if(value is not None):
if(value.value == '0'):
return sql.RAW('ISNULL(%s)')
elif(value.value == '1'):
return sql.RAW("IFNULL(%s, '') <> ''")
# a trick
return sql.RAW('IF(%s, 1, 1)')
| [
"phil@bubblehouse.org"
] | phil@bubblehouse.org |
1081ea17c0124f9ba46398a111ac9aa9c1c6bc52 | 3ac0923505e1e03a07742355edec43f23ead82b7 | /Daily/PY/LCP7-传递信息.py | 47dde08f7b68165b4b97ab8cafc5df622c6a5329 | [] | no_license | lock19960613/SCL | 5055c940d2529eef981a29698c7ea04212a8b9de | 3ea28fd8f5d5233411341283c85667e4b9fc64d5 | refs/heads/main | 2023-08-03T04:36:33.555296 | 2021-09-11T06:48:49 | 2021-09-11T06:48:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 502 | py | from typing import List
class Solution:
def numWays(self, n: int, relation: List[List[int]], k: int) -> int:
edges = [[] for _ in range(n)]
for s2d in relation:
edges[s2d[0]].append(s2d[1])
ans = [0]
def dfs(idx,step,ans):
if step == k:
if idx == n - 1:
ans[0] += 1
return
for dst in edges[idx]:
dfs(dst,step + 1,ans)
dfs(0,0,ans)
return ans[0] | [
"597494370@qq.com"
] | 597494370@qq.com |
e39b7a772ddc094a8023faf4ec83d011face4a00 | 82aee3211216f55392d5a757eb57f02c859e9a28 | /Easy/141_linkedListCycle.py | d23cc846dcdedbbb257bffebfd2150b7ee2dddbb | [] | no_license | Yucheng7713/CodingPracticeByYuch | 505d18095d4b9a35c1f3b23632a90a76d811b64a | 1461b10b8910fa90a311939c6df9082a8526f9b1 | refs/heads/master | 2022-05-01T11:51:00.612603 | 2022-04-18T09:46:55 | 2022-04-18T09:46:55 | 198,961,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 490 | py | # Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def hasCycle(self, head):
if not head:
return False
l_index = f_index = head
while f_index != None and f_index.next != None:
l_index = l_index.next
f_index = f_index.next.next
if l_index == f_index:
return True
return False | [
"yuchengh@usc.edu"
] | yuchengh@usc.edu |
7bb147fb781245b85941bb53ade305b787c1f256 | 7ba9ba1570ef44ced18bf7689329d5f5d4bcc350 | /src/fracx/api/models.py | f1a9f5909798a2fd90a16181348846977c0a88d7 | [
"MIT"
] | permissive | la-mar/permian-frac-exchange | 90992393cdcdb6c6a8b697a5c7d8fc64a4bff2f2 | a7ba410c02b49d05c5ad28eff0619a3c198d3fd0 | refs/heads/master | 2020-06-12T04:40:52.642629 | 2020-04-14T23:50:07 | 2020-04-14T23:50:07 | 194,196,884 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,102 | py | import logging
from sqlalchemy.sql import func
from api.mixins import CoreMixin
from config import get_active_config
from fracx import db
conf = get_active_config()
logger = logging.getLogger(__name__)
class FracSchedule(CoreMixin, db.Model):
__tablename__ = conf.FRAC_SCHEDULE_TABLE_NAME
api14 = db.Column(db.String(14), nullable=False, primary_key=True)
api10 = db.Column(db.String(10), nullable=False)
wellname = db.Column(db.String(), nullable=True)
operator = db.Column(db.String())
frac_start_date = db.Column(db.Date(), primary_key=True)
frac_end_date = db.Column(db.Date(), primary_key=True)
status = db.Column(db.String())
tvd = db.Column(db.Integer())
target_formation = db.Column(db.String())
shllat = db.Column(db.Float())
shllon = db.Column(db.Float())
bhllat = db.Column(db.Float())
bhllon = db.Column(db.Float())
created_at = db.Column(
db.DateTime(timezone=True), default=func.now(), nullable=False
)
updated_at = db.Column(
db.DateTime(timezone=True), default=func.now(), nullable=False
)
| [
"brocklfriedrich@gmail.com"
] | brocklfriedrich@gmail.com |
435534ff95ee6c8f8f98473eb45892defd916779 | 2789e0e7eeec008af8dcceeea454fd9cf9f6ca56 | /LSDPlottingTools/LSDMap_BasicPlotting.py | 5222d87284c4b3f4d854a56c4feabbd319eaf2e6 | [] | no_license | dongjwOU/LSDMappingTools | 52c17234344a3d3f9c50d66dea36a580603a8988 | 6e6820cbca555941a2b7f3f64a48ed1a6f11905e | refs/heads/master | 2021-01-13T15:37:47.723562 | 2016-12-19T18:04:49 | 2016-12-19T18:04:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,406 | py | ## LSDMap_BasicPlotting.py
##=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
## These functions are tools to deal with rasters
##=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
## SMM
## 26/07/2014
##=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
import osgeo.gdal as gdal
import numpy as np
import numpy.ma as ma
from osgeo import osr
from os.path import exists
from osgeo.gdalconst import GA_ReadOnly
from numpy import uint8
from matplotlib import rcParams
import LSDMap_GDALIO as LSDMap_IO
import LSDMap_BasicManipulation as LSDMap_BM
import LSDOSystemTools as LSDOst
#==============================================================================
# Formats ticks for an imshow plot in UTM
# Filename is the name of the file with full path
# x_max, x_min, y_max, y_min are the extent of the plotting area (NOT the DEM)
# n_target ticks are the number of ticks for plotting
#------------------------------------------------------------------------------
def GetTicksForUTM(FileName,x_max,x_min,y_max,y_min,n_target_tics):
CellSize,XMin,XMax,YMin,YMax = LSDMap_IO.GetUTMMaxMin(FileName)
NDV, xsize, ysize, GeoT, Projection, DataType = LSDMap_IO.GetGeoInfo(FileName)
xmax_UTM = XMax
xmin_UTM = XMin
ymax_UTM = YMax
ymin_UTM = YMin
#print "now UTM, xmax: " +str(xmax_UTM)+" x_min: " +str(xmin_UTM)+" y_maxb: " +str(ymax_UTM)+" y_minb: " +str(ymin_UTM)
dy_fig = ymax_UTM-ymin_UTM
dx_fig = xmax_UTM-xmin_UTM
dx_spacing = dx_fig/n_target_tics
dy_spacing = dy_fig/n_target_tics
if (dx_spacing>dy_spacing):
dy_spacing = dx_spacing
str_dy = str(dy_spacing)
str_dy = str_dy.split('.')[0]
n_digits = str_dy.__len__()
nd = int(n_digits)
first_digit = float(str_dy[0])
#print "str_dy: " +str_dy+ " n_digits: " +str(nd)+" first_digit: " + str(first_digit)
dy_spacing_rounded = first_digit*pow(10,(nd-1))
#print "n_digits: "+str(n_digits)+" dy_spacing: " +str(dy_spacing) + " and rounded: "+str(dy_spacing_rounded)
str_xmin = str(xmin_UTM)
str_ymin = str(ymin_UTM)
#print "before split str_xmin: "+ str_xmin + " str ymin: " + str_ymin
str_xmin = str_xmin.split('.')[0]
str_ymin = str_ymin.split('.')[0]
#print "after split str_xmin: "+ str_xmin + " str ymin: " + str_ymin
xmin_UTM = float(str_xmin)
ymin_UTM = float(str_ymin)
#print "UTM: "+ str(xmin_UTM) + " str ymin: " + str(ymin_UTM)
n_digx = str_xmin.__len__()
n_digy = str_ymin.__len__()
#print "n_dig_x: " + str(n_digx)+ " nd: " + str(nd)
if (n_digx-nd+1) >= 1:
front_x = str_xmin[:(n_digx-nd+1)]
else:
front_x = str_xmin
if (n_digy-nd+1) >= 1:
front_y = str_ymin[:(n_digy-nd+1)]
else:
front_y = str_ymin
#print "xmin: " + str_xmin + " ymin: " + str_ymin + " n_digx: " + str(n_digx)+ " n_digy: " + str(n_digy)
#print "frontx: " +front_x+" and fronty: "+ front_y
round_xmin = float(front_x)*pow(10,nd-1)
round_ymin = float(front_y)*pow(10,nd-1)
#print "x_min: " +str(xmin_UTM)+ " round xmin: " +str(round_xmin)+ " y_min: " +str(ymin_UTM)+" round y_min: " + str(round_ymin)
# now we need to figure out where the xllocs and ylocs are
xUTMlocs = np.zeros(2*n_target_tics)
yUTMlocs = np.zeros(2*n_target_tics)
xlocs = np.zeros(2*n_target_tics)
ylocs = np.zeros(2*n_target_tics)
new_x_labels = []
new_y_labels = []
round_ymax = round_ymin+dy_spacing_rounded*(2*n_target_tics-1)
#print "n_target_tics: " + str(n_target_tics) + " round_ymax: " +str(round_ymax)
for i in range(0,2*n_target_tics):
xUTMlocs[i] = round_xmin+(i)*dy_spacing_rounded
yUTMlocs[i] = round_ymin+(i)*dy_spacing_rounded
#xlocs[i] = (xUTMlocs[i]-XMin)
xlocs[i] = xUTMlocs[i]
# need to account for the rows starting at the upper boundary
ylocs[i] = round_ymax-(yUTMlocs[i]-round_ymin)
#print "i: " + str(i) +" yUTM: " + str(yUTMlocs[i])+ " rounded, reversed: " +str( ylocs[i])
new_x_labels.append( str(xUTMlocs[i]).split(".")[0] )
new_y_labels.append( str(yUTMlocs[i]).split(".")[0] )
#print xUTMlocs
#print xlocs
#print yUTMlocs
#print ylocs
#print new_x_labels
#print new_y_labels
new_xlocs = []
new_xUTMlocs = []
x_labels = []
# Now loop through these to get rid of those not in range
for index,xloc in enumerate(xlocs):
#print xloc
if (xloc < XMax and xloc > XMin):
new_xlocs.append(xloc)
new_xUTMlocs.append(xUTMlocs[index])
x_labels.append(new_x_labels[index])
new_ylocs = []
new_yUTMlocs = []
y_labels = []
# Now loop through these to get rid of those not in range
for index,yloc in enumerate(ylocs):
#print yloc
if (yloc < YMax and yloc > YMin):
new_ylocs.append(yloc)
new_yUTMlocs.append(yUTMlocs[index])
y_labels.append(new_y_labels[index])
#print "======================================="
#print "I am getting the tick marks now"
#print "X extent: " + str(XMin)+ " " +str(XMax)
#print "Y extent: " + str(YMin)+ " " +str(YMax)
#print "x ticks: "
#print new_xlocs
#print "y ticks: "
#print new_ylocs
#return xlocs,ylocs,new_x_labels,new_y_labels
return new_xlocs,new_ylocs,x_labels,y_labels
#==============================================================================
#==============================================================================
def LogStretchDensityPlot(FileName, thiscmap='gray',colorbarlabel='Elevation in meters',clim_val = (0,0)):
import matplotlib.pyplot as plt
import matplotlib.lines as mpllines
label_size = 20
#title_size = 30
axis_size = 28
# Set up fonts for plots
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = label_size
# get the data
raster = LSDMap_IO.ReadRasterArrayBlocks(FileName)
# get the log of the raster
raster = np.log10(raster)
# now get the extent
extent_raster = LSDMap_IO.GetRasterExtent(FileName)
x_min = extent_raster[0]
x_max = extent_raster[1]
y_min = extent_raster[2]
y_max = extent_raster[3]
# make a figure, sized for a ppt slide
fig = plt.figure(1, facecolor='white',figsize=(10,7.5))
# make room for the colorbar
fig.subplots_adjust(bottom=0.2)
fig.subplots_adjust(top=0.9)
#fig.subplots_adjust(left=0.2)
#fig.subplots_adjust(right=0.8)
ax1 = fig.add_subplot(1,1,1)
im = ax1.imshow(raster[::-1], thiscmap, extent = extent_raster)
print "The is the extent raster data element"
print extent_raster
print "now I am in the mapping routine"
print "x_min: " + str(x_min)
print "x_max: " + str(x_max)
print "y_min: " + str(y_min)
print "y_max: " + str(y_max)
# now get the tick marks
n_target_tics = 5
xlocs,ylocs,new_x_labels,new_y_labels = GetTicksForUTM(FileName,x_max,x_min,y_max,y_min,n_target_tics)
plt.xticks(xlocs, new_x_labels, rotation=60) #[1:-1] skips ticks where we have no data
plt.yticks(ylocs, new_y_labels)
print "The x locs are: "
print xlocs
print "The x labels are: "
print new_x_labels
# some formatting to make some of the ticks point outward
for line in ax1.get_xticklines():
line.set_marker(mpllines.TICKDOWN)
#line.set_markeredgewidth(3)
for line in ax1.get_yticklines():
line.set_marker(mpllines.TICKLEFT)
#line.set_markeredgewidth(3)
#plt.xlim(x_min,x_max)
#plt.ylim(y_max,y_min)
plt.xlabel('Easting (m)',fontsize = axis_size)
plt.ylabel('Northing (m)', fontsize = axis_size)
ax1.set_xlabel("Easting (m)")
ax1.set_ylabel("Northing (m)")
# set the colour limits
print "Setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
if (clim_val == (0,0)):
print "I don't think I should be here"
im.set_clim(0, np.max(raster))
else:
print "Now setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
im.set_clim(clim_val[0],clim_val[1])
cbar = fig.colorbar(im, orientation='horizontal')
cbar.set_label(colorbarlabel)
#plt.tight_layout()
plt.show()
#==============================================================================
#==============================================================================
def BasicDensityPlot(FileName, thiscmap='gray',colorbarlabel='Elevation in meters',clim_val = (0,0)):
import matplotlib.pyplot as plt
import matplotlib.lines as mpllines
label_size = 20
#title_size = 30
axis_size = 28
# Set up fonts for plots
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = label_size
# get the data
raster = LSDMap_IO.ReadRasterArrayBlocks(FileName)
# now get the extent
extent_raster = LSDMap_IO.GetRasterExtent(FileName)
x_min = extent_raster[0]
x_max = extent_raster[1]
y_min = extent_raster[2]
y_max = extent_raster[3]
# make a figure, sized for a ppt slide
fig = plt.figure(1, facecolor='white',figsize=(10,7.5))
# make room for the colorbar
#fig.subplots_adjust(bottom=0.1)
#fig.subplots_adjust(top=0.9)
#fig.subplots_adjust(left=0.2)
#fig.subplots_adjust(right=0.8)
ax1 = fig.add_subplot(1,1,1)
im = ax1.imshow(raster[::-1], thiscmap, extent = extent_raster)
print "The is the extent raster data element"
print extent_raster
print "now I am in the mapping routine"
print "x_min: " + str(x_min)
print "x_max: " + str(x_max)
print "y_min: " + str(y_min)
print "y_max: " + str(y_max)
# now get the tick marks
n_target_tics = 5
xlocs,ylocs,new_x_labels,new_y_labels = GetTicksForUTM(FileName,x_max,x_min,y_max,y_min,n_target_tics)
plt.xticks(xlocs, new_x_labels, rotation=60) #[1:-1] skips ticks where we have no data
plt.yticks(ylocs, new_y_labels)
print "The x locs are: "
print xlocs
print "The x labels are: "
print new_x_labels
# some formatting to make some of the ticks point outward
for line in ax1.get_xticklines():
line.set_marker(mpllines.TICKDOWN)
#line.set_markeredgewidth(3)
for line in ax1.get_yticklines():
line.set_marker(mpllines.TICKLEFT)
#line.set_markeredgewidth(3)
plt.xlim(x_min,x_max)
plt.ylim(y_max,y_min)
plt.xlabel('Easting (m)',fontsize = axis_size)
plt.ylabel('Northing (m)', fontsize = axis_size)
ax1.set_xlabel("Easting (m)")
ax1.set_ylabel("Northing (m)")
# set the colour limits
print "Setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
if (clim_val == (0,0)):
print "I don't think I should be here"
im.set_clim(0, np.max(raster))
else:
print "Now setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
im.set_clim(clim_val[0],clim_val[1])
cbar = fig.colorbar(im, orientation='vertical')
cbar.set_label(colorbarlabel)
#plt.tight_layout()
plt.show()
#==============================================================================
#==============================================================================
def BasicDensityPlotGridPlot(FileName, thiscmap='gray',colorbarlabel='Elevation in meters',
clim_val = (0,0),FigFileName = 'Image.pdf', FigFormat = 'show'):
print "======================================"
print "Yo, I'm doing a draped plot"
print FigFileName
print FigFormat
print "======================================"
import matplotlib.pyplot as plt
import matplotlib.lines as mpllines
from mpl_toolkits.axes_grid1 import AxesGrid
label_size = 20
#title_size = 30
axis_size = 28
# Set up fonts for plots
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = label_size
# get the data
raster = LSDMap_IO.ReadRasterArrayBlocks(FileName)
# now get the extent
extent_raster = LSDMap_IO.GetRasterExtent(FileName)
x_min = extent_raster[0]
x_max = extent_raster[1]
y_min = extent_raster[2]
y_max = extent_raster[3]
# make a figure, sized for a ppt slide
fig = plt.figure(1, facecolor='white',figsize=(10,7.5))
gs = plt.GridSpec(100,75,bottom=0.1,left=0.1,right=0.9,top=1.0)
ax = fig.add_subplot(gs[10:100,10:75])
#grid = AxesGrid(fig, 111, # similar to subplot(144)
# nrows_ncols=(1, 1),
# axes_pad=(0.8, 0.3),
# label_mode="1",
# share_all=True,
# cbar_location="right",
# cbar_mode="each",
# cbar_size="7%",
# cbar_pad="2%",
# )
# now get the tick marks
n_target_tics = 5
xlocs,ylocs,new_x_labels,new_y_labels = GetTicksForUTM(FileName,x_max,x_min,y_max,y_min,n_target_tics)
#print "xmax: " + str(x_max)
#print "xmin: " + str(x_min)
#print "ymax: " + str(y_max)
#print "ymin: " + str(y_min)
im = ax.imshow(raster[::-1], thiscmap, extent = extent_raster, interpolation="nearest")
#im = grid[0].imshow(raster, thiscmap, interpolation="nearest")
cbar = plt.colorbar(im)
cbar.set_label(colorbarlabel)
# set the colour limits
#print "Setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
if (clim_val == (0,0)):
#print "I don't think I should be here"
im.set_clim(0, np.max(raster))
else:
print "Now setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
im.set_clim(clim_val[0],clim_val[1])
# go through the ticks
ax.spines['top'].set_linewidth(2.5)
ax.spines['left'].set_linewidth(2.5)
ax.spines['right'].set_linewidth(2.5)
ax.spines['bottom'].set_linewidth(2.5)
# This affects all axes because we set share_all = True.
ax.set_xlim(x_min,x_max)
ax.set_ylim(y_max,y_min)
ax.set_xticks(xlocs)
ax.set_yticks(ylocs)
ax.set_xticklabels(new_x_labels,rotation=60)
ax.set_yticklabels(new_y_labels)
ax.set_xlabel("Easting (m)")
ax.set_ylabel("Northing (m)")
# This gets all the ticks, and pads them away from the axis so that the corners don't overlap
ax.tick_params(axis='both', width=2.5, pad = 10)
for tick in ax.xaxis.get_major_ticks():
tick.set_pad(10)
print "The figure format is: " + FigFormat
if FigFormat == 'show':
plt.show()
elif FigFormat == 'return':
return fig
else:
plt.savefig(FigFileName,format=FigFormat)
fig.clf()
#==============================================================================
#==============================================================================
def BasicDrapedPlotGridPlot(FileName, DrapeName, thiscmap='gray',drape_cmap='gray',
colorbarlabel='Elevation in meters',clim_val = (0,0),
drape_alpha = 0.6,FigFileName = 'Image.pdf',FigFormat = 'show'):
print "======================================"
print "Yo, I'm doing a draped plot"
print FigFileName
print FigFormat
print "======================================"
import matplotlib.pyplot as plt
import matplotlib.lines as mpllines
from mpl_toolkits.axes_grid1 import AxesGrid
label_size = 20
#title_size = 30
axis_size = 28
# Set up fonts for plots
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = label_size
# get the data
raster = LSDMap_IO.ReadRasterArrayBlocks(FileName)
raster_drape = LSDMap_IO.ReadRasterArrayBlocks(DrapeName)
# now get the extent
extent_raster = LSDMap_IO.GetRasterExtent(FileName)
x_min = extent_raster[0]
x_max = extent_raster[1]
y_min = extent_raster[2]
y_max = extent_raster[3]
# make a figure, sized for a ppt slide
fig = plt.figure(1, facecolor='white',figsize=(10,7.5))
gs = plt.GridSpec(100,75,bottom=0.1,left=0.1,right=0.9,top=1.0)
ax = fig.add_subplot(gs[10:100,10:75])
#grid = AxesGrid(fig, 111,
# nrows_ncols=(1, 1),
# axes_pad=(0.45, 0.15),
# label_mode="1",
# share_all=True,
# cbar_location="right",
# cbar_mode="each",
# cbar_size="7%",
# cbar_pad="2%",
# )
# now get the tick marks
n_target_tics = 5
xlocs,ylocs,new_x_labels,new_y_labels = GetTicksForUTM(FileName,x_max,x_min,y_max,y_min,n_target_tics)
print "xmax: " + str(x_max)
print "xmin: " + str(x_min)
print "ymax: " + str(y_max)
print "ymin: " + str(y_min)
#Z1 = np.array(([0, 1]*4 + [1, 0]*4)*4)
#Z1.shape = (8, 8) # chessboard
#im2 = ax.imshow(Z1, cmap=plt.cm.gray, interpolation='nearest',
# extent=extent_raster)
#plt.hold(True)
im1 = ax.imshow(raster[::-1], thiscmap, extent = extent_raster, interpolation="nearest")
cbar = plt.colorbar(im1)
cbar.set_label(colorbarlabel)
# set the colour limits
print "Setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
if (clim_val == (0,0)):
print "Im setting colour limits based on minimum and maximum values"
im1.set_clim(0, np.max(raster))
else:
print "Now setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
im1.set_clim(clim_val[0],clim_val[1])
plt.hold(True)
# Now for the drape: it is in grayscale
#print "drape_cmap is: "+drape_cmap
im3 = ax.imshow(raster_drape[::-1], drape_cmap, extent = extent_raster, alpha = drape_alpha, interpolation="nearest")
# Set the colour limits of the drape
im3.set_clim(0,np.max(raster_drape))
ax.spines['top'].set_linewidth(1.5)
ax.spines['left'].set_linewidth(1.5)
ax.spines['right'].set_linewidth(1.5)
ax.spines['bottom'].set_linewidth(1.5)
#ax.spines['bottom'].set_capstyle('projecting')
#for spine in ax.spines.values():
# spine.set_capstyle('projecting')
# This affects all axes because we set share_all = True.
ax.set_xlim(x_min,x_max)
ax.set_ylim(y_max,y_min)
ax.set_xticks(xlocs)
ax.set_yticks(ylocs)
ax.set_xticklabels(new_x_labels,rotation=60)
ax.set_yticklabels(new_y_labels)
ax.set_xlabel("Easting (m)")
ax.set_ylabel("Northing (m)")
# This gets all the ticks, and pads them away from the axis so that the corners don't overlap
ax.tick_params(axis='both', width=1.5, pad = 10)
for tick in ax.xaxis.get_major_ticks():
tick.set_pad(10)
print "The figure format is: " + FigFormat
if FigFormat == 'show':
plt.show()
elif FigFormat == 'return':
return fig
else:
plt.savefig(FigFileName,format=FigFormat,dpi=250)
fig.clf()
#==============================================================================
#==============================================================================
def DrapedOverHillshade(FileName, DrapeName, thiscmap='gray',drape_cmap='gray',
colorbarlabel='Elevation in meters',clim_val = (0,0),
drape_alpha = 0.6, ShowColorbar = False):
import matplotlib.pyplot as plt
import matplotlib.lines as mpllines
from mpl_toolkits.axes_grid1 import AxesGrid
label_size = 20
#title_size = 30
axis_size = 28
# Set up fonts for plots
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = label_size
hillshade = Hillshade(FileName)
#hillshade = LSDMap_IO.ReadRasterArrayBlocks(DrapeName)
raster_drape = LSDMap_IO.ReadRasterArrayBlocks(DrapeName)
# now get the extent
extent_raster = LSDMap_IO.GetRasterExtent(FileName)
x_min = extent_raster[0]
x_max = extent_raster[1]
y_min = extent_raster[2]
y_max = extent_raster[3]
# make a figure, sized for a ppt slide
fig = plt.figure(1, facecolor='white',figsize=(10,7.5))
if ShowColorbar:
grid = AxesGrid(fig, 111,
nrows_ncols=(1, 1),
axes_pad=(0.45, 0.15),
label_mode="1",
share_all=True,
cbar_location="right",
cbar_mode="each",
cbar_size="7%",
cbar_pad="2%",
)
else:
grid = AxesGrid(fig, 111,
nrows_ncols=(1, 1),
axes_pad=(0.45, 0.15),
label_mode="1",
share_all=True,
)
# now get the tick marks
n_target_tics = 5
xlocs,ylocs,new_x_labels,new_y_labels = GetTicksForUTM(FileName,x_max,x_min,y_max,y_min,n_target_tics)
print "xmax: " + str(x_max)
print "xmin: " + str(x_min)
print "ymax: " + str(y_max)
print "ymin: " + str(y_min)
im1 = grid[0].imshow(hillshade[::-1], thiscmap, extent = extent_raster, interpolation="nearest")
#im = grid[0].imshow(raster, thiscmap, interpolation="nearest")
if ShowColorbar:
cbar = grid.cbar_axes[0].colorbar(im1)
cbar.set_label_text(colorbarlabel)
# set the colour limits
print "Setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
if (clim_val == (0,0)):
print "I don't think I should be here"
im1.set_clim(0, np.max(hillshade))
else:
print "Now setting colour limits to "+str(clim_val[0])+" and "+str(clim_val[1])
im1.set_clim(clim_val[0],clim_val[1])
# Now for the drape: it is in grayscape
im2 = grid[0].imshow(raster_drape[::-1], drape_cmap, extent = extent_raster, alpha = drape_alpha, interpolation="nearest")
# Set the colour limits of the drape
im2.set_clim(0,np.max(raster_drape))
# This affects all axes because we set share_all = True.
grid.axes_llc.set_xlim(x_min,x_max)
grid.axes_llc.set_ylim(y_max,y_min)
grid.axes_llc.set_xticks(xlocs)
grid.axes_llc.set_yticks(ylocs)
grid.axes_llc.set_xticklabels(new_x_labels,rotation=60)
grid.axes_llc.set_yticklabels(new_y_labels)
grid.axes_llc.set_xlabel("Easting (m)")
grid.axes_llc.set_ylabel("Northing (m)")
plt.show()
#==============================================================================
#==============================================================================
# Make a simple hillshade plot
def Hillshade(raster_file, azimuth = 315, angle_altitude = 45, NoDataValue = -9999):
array = LSDMap_IO.ReadRasterArrayBlocks(raster_file,raster_band=1)
x, y = np.gradient(array)
slope = np.pi/2. - np.arctan(np.sqrt(x*x + y*y))
aspect = np.arctan2(-x, y)
azimuthrad = azimuth*np.pi / 180.
altituderad = angle_altitude*np.pi / 180.
shaded = np.sin(altituderad) * np.sin(slope)\
+ np.cos(altituderad) * np.cos(slope)\
* np.cos(azimuthrad - aspect)
#this_array = 255*(shaded + 1)/2
return 255*(shaded + 1)/2
#==============================================================================
#==============================================================================
def SwathPlot(path, filename, axis):
# get the path to the raster file
NewPath = LSDOst.AppendSepToDirectoryPath(path)
FileName = NewPath+filename
# get the data vectors
means,medians,std_deviations,twentyfifth_percentile,seventyfifth_percentile = LSDMap_BM.SimpleSwath(path, filename, axis)
print "Means shape is: "
print means.shape
x_vec,y_vec = LSDMap_IO.GetLocationVectors(FileName)
print "X shape is: "
print x_vec.shape
print "Y shape is: "
print y_vec.shape
import matplotlib.pyplot as plt
import matplotlib.lines as mpllines
from mpl_toolkits.axes_grid1 import AxesGrid
label_size = 20
#title_size = 30
axis_size = 28
# Set up fonts for plots
rcParams['font.family'] = 'sans-serif'
rcParams['font.sans-serif'] = ['arial']
rcParams['font.size'] = label_size
# make a figure, sized for a ppt slide
fig = plt.figure(1, facecolor='white',figsize=(10,7.5))
gs = plt.GridSpec(100,75,bottom=0.1,left=0.1,right=0.9,top=1.0)
ax = fig.add_subplot(gs[10:100,10:75])
if axis == 0:
dir_vec = x_vec
else:
dir_vec = y_vec
min_sd = np.subtract(means,std_deviations)
plus_sd = np.add(means,std_deviations)
ax.plot(dir_vec,means, linewidth = 2, color = "red")
#ax.fill_between(dir_vec, twentyfifth_percentile, seventyfifth_percentile, facecolor='green', alpha = 0.7, interpolate=True)
ax.fill_between(dir_vec, min_sd, plus_sd, facecolor='blue', alpha = 0.5, interpolate=True)
ax.set_xlim(dir_vec[0],dir_vec[-1])
plt.show()
#==============================================================================
#==============================================================================
#==============================================================================
def round_to_n(x, n):
if n < 1:
raise ValueError("number of significant digits must be >= 1")
# Use %e format to get the n most significant digits, as a string.
format = "%." + str(n-1) + "e"
as_string = format % x
return float(as_string)
#==============================================================================
| [
"simon.m.mudd@ed.ac.uk"
] | simon.m.mudd@ed.ac.uk |
383074dc0b73c2d849f90519a9ec6f5795dc935c | e92a3d0fb77120be99de6040cb6cd34eda0a95f4 | /urllib, requests, re, webcrawler - усиленная работа со скрапперами сайтов/code/filter_example_10.py | 7ec48ce5918ab84d7e1f5f1e182bb92b97a14424 | [] | no_license | Python18Academy/python_first_level | 495f85631f5afc737aa156ef8ca0ea307340c322 | 9ce490da3108474b135a17086f4d11f2a3bbbe55 | refs/heads/master | 2023-09-04T17:00:36.920987 | 2021-03-31T18:44:37 | 2021-03-31T18:44:37 | 331,934,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 134 | py | a = [1, -4, 6, 8, -10]
def func(x):
if x > 0:
return 1
else:
return 0
b = filter(func, a)
b = list(b)
print(b) | [
"isakura313@gmail.com"
] | isakura313@gmail.com |
5a4fca19914b7786c3fc10ad8986966a961fd341 | 5585352909cb26420ec3f4b54df2253a2112c5c9 | /0925/isLongPressedName.py | c6c189d7901cc900138377cabcd1a1d06b6031af | [] | no_license | minuso/leetcode | fd05472a782463b27575c9149081bcd38f03a7c5 | 56cafa52a6a3534efc2c32db4acf516b2a285a46 | refs/heads/master | 2020-04-25T17:18:56.299245 | 2019-06-16T16:19:09 | 2019-06-16T16:19:09 | 172,943,397 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 239 | py | def isLongPressedName(self, name: str, typed: str) -> bool:
i, j = 0, 0
while i < len(name) and j < len(typed):
if name[i] == typed[j]:
i, j = i+1, j+1
else:
j += 1
return i == len(name) | [
"minuscholar@gmail.com"
] | minuscholar@gmail.com |
319efc41188b3161c74b9492ad912388557f640e | 91b2fb1fb6df216f2e365c3366bab66a567fc70d | /Week10/每日一题/面试题 02.05. 链表求和.py | adeaf8a3b372d2228ba46b48489634b0acdac3df | [] | no_license | hrz123/algorithm010 | d17aee642f03f607a7984beb099eec18f2de1c8e | 817911d4282d2e226518b3533dff28282a91b3d4 | refs/heads/master | 2022-12-20T14:09:26.365781 | 2020-10-11T04:15:57 | 2020-10-11T04:15:57 | 270,178,423 | 1 | 0 | null | 2020-06-07T03:21:09 | 2020-06-07T03:21:09 | null | UTF-8 | Python | false | false | 2,392 | py | # 面试题 02.05. 链表求和.py
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
carry = 0
dummy = ListNode(0)
cur = dummy
while l1 or l2:
n1 = l1.val if l1 else 0
n2 = l2.val if l2 else 0
carry, val = divmod(n1 + n2 + carry, 10)
cur.next = ListNode(val)
cur = cur.next
l1 = l1.next if l1 else None
l2 = l2.next if l2 else None
if carry:
cur.next = ListNode(1)
return dummy.next
# 进阶
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
v1, v2 = 0, 0
while l1:
v1 = v1 * 10 + l1.val
l1 = l1.next
while l2:
v2 = v2 * 10 + l2.val
l2 = l2.next
val = v1 + v2
pre, cur = None, None
while val:
val, mod = divmod(val, 10)
cur = ListNode(mod)
cur.next = pre
pre = cur
return cur
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
dummy = ListNode(0)
cur = dummy
carry = 0
while l1 or l2:
n1, l1 = (l1.val, l1.next) if l1 else (0, l1)
n2, l2 = (l2.val, l2.next) if l2 else (0, l2)
carry, mod = divmod(n1 + n2 + carry, 10)
cur.next = ListNode(mod)
cur = cur.next
if carry:
cur.next = ListNode(1)
return dummy.next
class Solution:
def addTwoNumbers(self, l1: ListNode, l2: ListNode) -> ListNode:
v1, v2 = 0, 0
while l1:
v1 = v1 * 10 + l1.val
l1 = l1.next
while l2:
v2 = v2 * 10 + l2.val
l2 = l2.next
val = v1 + v2
pre = cur = None
while val:
val, mod = divmod(val, 10)
cur = ListNode(val)
cur.next = pre
pre = cur
return cur
def main():
sol = Solution()
l1 = ListNode(1)
l1.next = ListNode(2)
l2 = ListNode(2)
l2.next = ListNode(8)
l3 = sol.addTwoNumbers(l1, l2)
while l3:
print(l3.val)
l3 = l3.next
if __name__ == '__main__':
main()
| [
"2403076194@qq.com"
] | 2403076194@qq.com |
d687316af88ab599c537375566f06965c12be41d | e522dc3b8ae16fb6adf8c679c2fcd61e06979f29 | /example/gpio_example.py | 389b5f9b1201efa4304a5a4758bf4f01ffb70ffb | [
"MIT"
] | permissive | amaork/raspi-io | 96e92330555e7700f54633f582efbc7620f8b10b | aaea4532569010a64f3c54036b9db7eb81515d1a | refs/heads/master | 2021-09-17T15:27:43.853195 | 2021-08-27T08:51:24 | 2021-08-27T08:51:24 | 94,192,125 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | #!/usr/bin/env python3.5
from raspi_io import GPIO
import raspi_io.utility as utility
if __name__ == "__main__":
io = [20, 21]
gpio = GPIO(utility.scan_server()[0])
gpio.setmode(GPIO.BCM)
gpio.setup(io, GPIO.OUT)
gpio.output(io, 1)
gpio.output(io, 0)
gpio.output(io, [1, 0])
gpio.output(io, [0, 1])
gpio.setup(21, GPIO.IN, GPIO.PUD_DOWN)
print(gpio.input(21))
print(gpio.input(21))
print(gpio.input(21))
| [
"amaork@gmail.com"
] | amaork@gmail.com |
40d8d8ed9cbea9b92a4400832f01caf2cd5d0677 | 897871d09b8b1e86c5a48599839ba9534260f2c9 | /aromawine3-new_update__with_checkout/admin_manage_setting/views.py | 5a329af7a0a1959caaf73fde9e879a8106fc268a | [] | no_license | sidkushwah123/wine | 0b8b8fdf44068b4488d5f1ae5d34a24d3fff19a9 | bb29e84fb4a0709aca36e819ae6191147a9691b5 | refs/heads/main | 2023-07-27T14:03:06.814484 | 2021-09-11T15:25:39 | 2021-09-11T15:25:39 | 405,354,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,252 | py | from django.shortcuts import render,HttpResponseRedirect
from django.views import generic
from django.contrib.messages.views import SuccessMessageMixin
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from .models import AwAdminSetting,AwManageShipping
from .forms import AwAdminSettingForm,AwManageShippingForm
from .serializers import ApiWineProjectInfoSerializers
from django.urls import reverse
from django.urls import reverse_lazy
from django.contrib import messages
from django.conf import settings
from django.contrib.auth.mixins import UserPassesTestMixin
from django.shortcuts import redirect
from django.template.defaulttags import register
from django.template.loader import render_to_string
from rest_framework import generics
from rest_framework.views import APIView
from rest_framework.response import Response
# Create your views here.
# API Start================
class ApiWineProjectInfo(APIView):
def get(self,request):
project_info = {}
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
get_res_sri = ApiWineProjectInfoSerializers(get_data,context={"request": request})
project_info = get_res_sri.data
return Response({"data":project_info},status=200)
# API END================
@register.filter(name='get_logo')
def get_logo(demo):
logo_image = settings.BASE_URL+"static/web/assets/image/wine-logo.svg"
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.Logo:
logo_image = get_data.Logo.url
return str(logo_image)
@register.filter(name='get_favicon')
def get_favicon(demo):
logo_image = settings.BASE_URL+"static/web/assets/image/logo.png"
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.favicon:
logo_image = get_data.favicon.url
return str(logo_image)
@register.filter(name='get_project_name')
def get_project_name(demo):
project_name = "AROMA OF WINE"
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.Project_Name:
project_name = get_data.Project_Name
return str(project_name)
@register.filter(name='get_tag_line')
def get_tag_line(demo):
project_tag_line = "Wine inspired by your palate"
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.Project_Tag_Line:
project_tag_line = get_data.Project_Tag_Line
return str(project_tag_line)
@register.filter(name='get_GST')
def get_GST(demo):
get_GST_amount = "0"
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.GST:
get_GST_amount = get_data.GST
return str(get_GST_amount)
@register.filter(name='get_duty')
def get_duty(demo):
get_duty_amount = "0"
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.Duty:
get_duty_amount = get_data.Duty
return str(get_duty_amount)
@register.filter(name='get_analytics')
def get_analytics(demo):
get_analytics_data = ""
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
if get_data.Analytics:
get_analytics_data = get_data.Analytics
return str(get_analytics_data)
@register.filter(name='get_social_media_info')
def get_social_media_info(demo):
get_data = ""
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
return render_to_string('web/home/social_media_link.html', {"get_data":get_data})
@method_decorator(login_required , name="dispatch")
class ManageGeneralSettingView(SuccessMessageMixin,generic.CreateView):
form_class = AwAdminSettingForm
template_name = 'admin/setting/general.html'
def get_success_message(self, cleaned_data):
print(cleaned_data)
return "General add successfully."
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['Page_title'] = "General Setting"
return context
def dispatch(self, request, *args, **kwargs):
if AwAdminSetting.objects.all().exists():
get_data = AwAdminSetting.objects.all().first()
return HttpResponseRedirect(reverse('admin_manage_setting:update_general',args=(get_data.id,)))
else:
return super(ManageGeneralSettingView, self).dispatch(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save()
messages.info(self.request, 'Update successfully.')
# success_url = reverse_lazy('admin_manage_setting:update_general' self.kwargs['pk'])
return HttpResponseRedirect(reverse('admin_manage_setting:update_general',args=(self.object.id,)))
@method_decorator(login_required, name="dispatch")
class GeneralUpdateView(SuccessMessageMixin, generic.UpdateView):
form_class = AwAdminSettingForm
template_name = 'admin/setting/general.html'
queryset = AwAdminSetting.objects.all()
def get_success_message(self, cleaned_data):
print(cleaned_data)
return "update successfully."
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super().get_context_data(**kwargs)
# Add in a QuerySet of all the books
context['Page_title'] = "General Setting"
return context
def form_valid(self, form):
self.object = form.save()
messages.info(self.request, 'Update successfully.')
# success_url = reverse_lazy('admin_manage_setting:update_general' self.kwargs['pk'])
return HttpResponseRedirect(reverse('admin_manage_setting:update_general',args=(self.kwargs['pk'],)))
@method_decorator(login_required , name="dispatch")
class ManageShippingSettingView(SuccessMessageMixin,generic.TemplateView):
# queryset = AwProducers.objects.all().order_by("-id")
form_class = AwManageShippingForm
template_name = 'admin/setting/shipping.html'
def get(self, request, *args, **kwargs):
form = self.form_class
queryset = AwManageShipping.objects.all().order_by("-id")
return render(request, self.template_name,{'form_class': form, 'Page_title': "Manage Shipping", "object": queryset})
def post(self, request, *args, **kwargs):
form = self.form_class(request.POST)
queryset = AwManageShipping.objects.all().order_by("-id")
if form.is_valid():
Shipping_ins = form.save(commit=False)
Shipping_ins.Created_by = request.user
Shipping_ins.save()
form.save_m2m()
messages.info(request, "Shipping add successfully.")
return render(request, self.template_name,
{'form_class': form, "object": queryset, 'Page_title': "Manage Shipping"})
class ShippingDeleteView(SuccessMessageMixin,generic.DeleteView):
model = AwManageShipping
template_name = 'admin/setting/shipping_delete.html'
success_url = reverse_lazy('admin_manage_setting:shipping')
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['Page_title'] = "Delete shipping"
return context
def get_success_message(self, cleaned_data):
print(cleaned_data)
return "shipping remove successfully."
def update_shipping(request):
if request.method == 'POST':
ids = request.POST.getlist('ids[]')
Free_Shipping_Amount = request.POST.getlist('Free_Shipping_Amount[]')
Free_Flat_Shipping = request.POST.getlist('Free_Flat_Shipping[]')
for i in range(0,len(ids)):
AwManageShipping.objects.filter(id=ids[i]).update(Free_Shipping_Amount=Free_Shipping_Amount[i],Free_Flat_Shipping=Free_Flat_Shipping[i])
messages.info(request, "Shipping update successfully.")
return redirect(settings.BASE_URL+"admin/settings/fee-shipping")
| [
"sachinkushwah0007@gmail.com"
] | sachinkushwah0007@gmail.com |
92f985f89dee5987ed7ece4d443d4638d9a09a6e | 711756b796d68035dc6a39060515200d1d37a274 | /output_exocyst_tags/optimized_3617_sml.py | ee0308f6fdb401a92bd718d4bf86136600712fb3 | [] | no_license | batxes/exocyst_scripts | 8b109c279c93dd68c1d55ed64ad3cca93e3c95ca | a6c487d5053b9b67db22c59865e4ef2417e53030 | refs/heads/master | 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,722 | py | import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Sec3_GFPN" not in marker_sets:
s=new_marker_set('Sec3_GFPN')
marker_sets["Sec3_GFPN"]=s
s= marker_sets["Sec3_GFPN"]
mark=s.place_marker((405.721, 519.261, 370.977), (0.15, 0.78, 0.66), 2)
if "Sec3_GFPC" not in marker_sets:
s=new_marker_set('Sec3_GFPC')
marker_sets["Sec3_GFPC"]=s
s= marker_sets["Sec3_GFPC"]
mark=s.place_marker((391.015, 482.144, 340.67), (0.15, 0.78, 0.66), 2)
if "Sec3_Anch" not in marker_sets:
s=new_marker_set('Sec3_Anch')
marker_sets["Sec3_Anch"]=s
s= marker_sets["Sec3_Anch"]
mark=s.place_marker((544.978, 316.762, 358.566), (0.15, 0.58, 0.66), 2)
if "Sec5_GFPN" not in marker_sets:
s=new_marker_set('Sec5_GFPN')
marker_sets["Sec5_GFPN"]=s
s= marker_sets["Sec5_GFPN"]
mark=s.place_marker((440.321, 499.369, 427.91), (0.38, 0.24, 0.37), 2)
if "Sec5_GFPC" not in marker_sets:
s=new_marker_set('Sec5_GFPC')
marker_sets["Sec5_GFPC"]=s
s= marker_sets["Sec5_GFPC"]
mark=s.place_marker((408.241, 464.21, 314.908), (0.38, 0.24, 0.37), 2)
if "Sec6_GFPN" not in marker_sets:
s=new_marker_set('Sec6_GFPN')
marker_sets["Sec6_GFPN"]=s
s= marker_sets["Sec6_GFPN"]
mark=s.place_marker((399.76, 448.081, 410.929), (0.84, 0.98, 0.24), 2)
if "Sec6_GFPC" not in marker_sets:
s=new_marker_set('Sec6_GFPC')
marker_sets["Sec6_GFPC"]=s
s= marker_sets["Sec6_GFPC"]
mark=s.place_marker((580.779, 491.271, 431.133), (0.84, 0.98, 0.24), 2)
if "Sec6_Anch" not in marker_sets:
s=new_marker_set('Sec6_Anch')
marker_sets["Sec6_Anch"]=s
s= marker_sets["Sec6_Anch"]
mark=s.place_marker((565.705, 663.093, 336.353), (0.84, 0.78, 0.24), 2)
if "Sec8_GFPC" not in marker_sets:
s=new_marker_set('Sec8_GFPC')
marker_sets["Sec8_GFPC"]=s
s= marker_sets["Sec8_GFPC"]
mark=s.place_marker((615.651, 489.717, 370.16), (0.62, 0.67, 0.45), 2)
if "Sec8_Anch" not in marker_sets:
s=new_marker_set('Sec8_Anch')
marker_sets["Sec8_Anch"]=s
s= marker_sets["Sec8_Anch"]
mark=s.place_marker((491.39, 296.306, 340.713), (0.62, 0.47, 0.45), 2)
if "Sec10_GFPN" not in marker_sets:
s=new_marker_set('Sec10_GFPN')
marker_sets["Sec10_GFPN"]=s
s= marker_sets["Sec10_GFPN"]
mark=s.place_marker((650.698, 503.069, 401.115), (0, 0.91, 0), 2)
if "Sec10_GFPC" not in marker_sets:
s=new_marker_set('Sec10_GFPC')
marker_sets["Sec10_GFPC"]=s
s= marker_sets["Sec10_GFPC"]
mark=s.place_marker((432.995, 465.737, 224.378), (0, 0.91, 0), 2)
if "Sec10_Anch" not in marker_sets:
s=new_marker_set('Sec10_Anch')
marker_sets["Sec10_Anch"]=s
s= marker_sets["Sec10_Anch"]
mark=s.place_marker((514.351, 652.063, 435.66), (0, 0.71, 0), 2)
if "Sec15_GFPN" not in marker_sets:
s=new_marker_set('Sec15_GFPN')
marker_sets["Sec15_GFPN"]=s
s= marker_sets["Sec15_GFPN"]
mark=s.place_marker((471.642, 498.911, 488.563), (0.11, 0.51, 0.86), 2)
if "Sec15_GFPC" not in marker_sets:
s=new_marker_set('Sec15_GFPC')
marker_sets["Sec15_GFPC"]=s
s= marker_sets["Sec15_GFPC"]
mark=s.place_marker((658.697, 497.172, 336.078), (0.11, 0.51, 0.86), 2)
if "Sec15_Anch" not in marker_sets:
s=new_marker_set('Sec15_Anch')
marker_sets["Sec15_Anch"]=s
s= marker_sets["Sec15_Anch"]
mark=s.place_marker((565.639, 553.271, 211.433), (0.11, 0.31, 0.86), 2)
if "Exo70_GFPN" not in marker_sets:
s=new_marker_set('Exo70_GFPN')
marker_sets["Exo70_GFPN"]=s
s= marker_sets["Exo70_GFPN"]
mark=s.place_marker((398.709, 504.236, 326.959), (0.89, 0.47, 0.4), 2)
if "Exo70_GFPC" not in marker_sets:
s=new_marker_set('Exo70_GFPC')
marker_sets["Exo70_GFPC"]=s
s= marker_sets["Exo70_GFPC"]
mark=s.place_marker((637.387, 452.385, 390.377), (0.89, 0.47, 0.4), 2)
if "Exo70_Anch" not in marker_sets:
s=new_marker_set('Exo70_Anch')
marker_sets["Exo70_Anch"]=s
s= marker_sets["Exo70_Anch"]
mark=s.place_marker((424.742, 698.993, 443.663), (0.89, 0.27, 0.4), 2)
if "Exo84_GFPN" not in marker_sets:
s=new_marker_set('Exo84_GFPN')
marker_sets["Exo84_GFPN"]=s
s= marker_sets["Exo84_GFPN"]
mark=s.place_marker((451.255, 515.043, 419.368), (0.5, 0.7, 0), 2)
if "Exo84_GFPC" not in marker_sets:
s=new_marker_set('Exo84_GFPC')
marker_sets["Exo84_GFPC"]=s
s= marker_sets["Exo84_GFPC"]
mark=s.place_marker((425.023, 454.16, 296.551), (0.5, 0.7, 0), 2)
if "Exo84_Anch" not in marker_sets:
s=new_marker_set('Exo84_Anch')
marker_sets["Exo84_Anch"]=s
s= marker_sets["Exo84_Anch"]
mark=s.place_marker((520.301, 618.746, 241.537), (0.5, 0.5, 0), 2)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| [
"batxes@gmail.com"
] | batxes@gmail.com |
b72f8e277d42961c941209ec8450b8269aa663e5 | 240dc81851dd0243c0b14511f6d8b563ab91c890 | /admin/backup_views.py | 76ae812e8d20d8a67fe351feec027d7f34568084 | [] | no_license | prcek/TSReg | 0aac7ffc7992b731d12dc3959d661bc8c3639744 | ea6eac514d8e783ddaeeed6181b9ab45d5673c05 | refs/heads/master | 2020-05-30T03:19:46.737202 | 2017-06-08T08:14:00 | 2017-06-08T08:14:00 | 2,208,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,578 | py | # -*- coding: utf-8 -*-
from django import forms
from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.shortcuts import render_to_response, redirect, get_object_or_404
from django.template import RequestContext,Context, loader
import utils.config as cfg
from utils.data import UnicodeReader
from utils.mail import valid_email
from enroll.models import Course,Student,Season
from admin.models import FileBlob,CourseBackup
from google.appengine.api import taskqueue
import urllib
import logging
import cStringIO
import datetime
from utils.locale import local_timezone
ERROR_MESSAGES={'required': 'Položka musí být vyplněna', 'invalid': 'Neplatná hodnota'}
class SeasonField(forms.ChoiceField):
def valid_value(self, value):
self._set_choices(Season.get_SEASON_CHOICES())
return super(SeasonField,self).valid_value(value)
class SeasonFilterForm(forms.Form):
season_key = SeasonField(label='sezóna', error_messages=ERROR_MESSAGES)
def __init__(self,data = None, **kwargs):
super(self.__class__,self).__init__(data, **kwargs)
self.fields['season_key']._set_choices(Season.get_SEASON_CHOICES())
def index(request):
season = None
if request.method == 'POST':
filter_form = SeasonFilterForm(request.POST)
if filter_form.is_valid():
season = Season.get(str(filter_form.cleaned_data['season_key']))
if not season is None:
request.session['backup_season_key']=str(season.key())
else:
bskey = request.session.get('backup_season_key',None)
if not bskey is None:
season = Season.get(str(bskey))
if season is None:
filter_form = SeasonFilterForm()
else:
filter_form = SeasonFilterForm({'season_key':str(season.key())})
if season is None:
course_list = None
else:
course_list = Course.list_season(str(season.key()))
return render_to_response('admin/backup_index.html', RequestContext(request, { 'filter_form':filter_form, 'course_list': course_list}))
def plan_backup(request,course_id):
course = Course.get_by_id(int(course_id))
if course is None:
raise Http404
logging.info('course: %s'%course)
taskqueue.add(url='/task/course_backup/', params={'course_id':course.key().id()})
return HttpResponseRedirect('../..')
def plan_fullsync(request,course_id):
course = Course.get_by_id(int(course_id))
if course is None:
raise Http404
logging.info('course: %s'%course)
taskqueue.add(url='/task/course_fullsync/', params={'course_id':course.key().id()})
return HttpResponseRedirect('../..')
def index_course(request, course_id):
course = Course.get_by_id(int(course_id))
if course is None:
raise Http404
logging.info('course: %s'%course)
backup_list = CourseBackup.list_for_course(str(course.key()))
return render_to_response('admin/backup_list.html', RequestContext(request, { 'backup_list': backup_list, 'course':course}))
def get_backup(request, course_id, course_backup_id):
course = Course.get_by_id(int(course_id))
if course is None:
raise Http404
course_backup = CourseBackup.get_by_id(int(course_backup_id))
if course_backup is None:
raise Http404
r = HttpResponse(course_backup.data,mimetype='application/vnd.ms-excel')
file_name = urllib.quote(course_backup.filename)
logging.info(file_name)
r['Content-Disposition'] = "attachment; filename*=UTF-8''%s"%file_name
return r
| [
"tomas.hluchan@gmail.com"
] | tomas.hluchan@gmail.com |
f172ee20a98e4007c2809b15d762cb8e1c58a40a | 2ca07aecfa6ff25b0baae6dc9a707a284c2d1b6d | /common/sectools/sectools/common/crypto/openssl_binary_implementation.py | 2ddc52ade7e20fcb1109fc2c986ff934e6ecc241 | [
"BSD-3-Clause",
"OpenSSL",
"MIT",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-ssleay-windows",
"BSL-1.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | zhilangtaosha/msm8996-wp-1-0_test_device | ef05af263ba7955263ff91eb81d45b2437bc492e | 6af9b44abbc4a367a9aaae26707079974c535f08 | refs/heads/master | 2023-03-19T02:42:09.581740 | 2021-02-21T01:20:19 | 2021-02-21T01:20:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 50,300 | py | # ===============================================================================
#
# Copyright (c) 2013-2016 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
# ===============================================================================
import subprocess
import tempfile
import hashlib
import os
import array
import re
import utility_functions
import binascii
from sectools.common.utils.c_logging import logger
from common_classes import HmacParams
from sectools.common.utils.c_misc import hexdump
openssl_binary_path = None # This field must be overridden by a service provider
ccm_crypto_path = None # This field must be overridden by a service provider
cbc_crypto_path = None # This field must be overridden by a service provider
SHA1_OU_STRING = '07 0000 SHA1'
SHA256_OU_STRING = '07 0001 SHA256'
PSS_SALT_LENGTH = '-1'
PSS_HASH_ALGORITHM = 'sha256'
@utility_functions.logwrap
def gen_rsa_key_pair(key_size_in_bits, key_exponent, priv_key_output_file, pub_key_output_file):
""" Generate RSA Key pair
input:
key_size_in_bits: Size of the key in bits.
key_exponent: [3, 65537]
Exponent used in key calculation.
priv_key_output_file: File name for storing private key
pub_key_output_file: File name for storing public key
output:
returned value: {"public_key": [Generated public key],
"private_key": [Generated private key] }
Dictionary holding the values of public and private keys
"""
logger.debug("the openssl_binary is:{0}".format(openssl_binary_path))
if key_exponent == 3:
exponent_str = "3"
elif key_exponent == 257:
exponent_str = "257"
elif key_exponent == 65537:
exponent_str = "65537"
else:
logger.warning("Exponent value supplied is INVALID! going with default exponent of 65537.")
exponent_str = "65537"
key_size_str = str(key_size_in_bits)
if priv_key_output_file is not None:
pk_file = open(priv_key_output_file, "wb")
else:
pk_file = tempfile.NamedTemporaryFile(delete=False)
logger.debug("No output file specified for private key storage, so creating temp file: " + pk_file.name)
try:
private_key = utility_functions.system_command_logged([openssl_binary_path, "genpkey",
"-algorithm", "RSA",
"-outform", "PEM",
"-pkeyopt", "rsa_keygen_bits:" + key_size_str,
"-pkeyopt", "rsa_keygen_pubexp:" + exponent_str], stderr_to_temp=True)
except subprocess.CalledProcessError, e:
os.unlink(pk_file.name)
logger.critical("gen_rsa_key_pair: OPENSSL Errored out on generation of RSA key.")
logger.critical("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
raise RuntimeError("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
logger.debug("Writing generated private key to PEM file:" + pk_file.name)
pk_file.write(private_key)
pk_file.close()
try:
public_key = utility_functions.system_command_logged([openssl_binary_path, "rsa", "-in", pk_file.name, "-pubout"], stderr_to_temp=True)
except subprocess.CalledProcessError, e:
os.unlink(pk_file.name)
logger.critical("gen_rsa_key_pair: OPENSSL could not get public key")
logger.critical("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
raise RuntimeError("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
if pub_key_output_file is not None:
logger.debug("Writing public key to file: " + pub_key_output_file)
pubk_file = open(pub_key_output_file, "wb")
pubk_file.write(public_key)
pubk_file.close()
if priv_key_output_file is None:
logger.debug("Since private key file is temporary, deleting: " + pk_file.name)
os.unlink(pk_file.name)
return {'private_key': private_key, 'public_key': public_key}
def generate_hash(hashing_algorithm, file_to_hash):
""" Function to generate hashes of input file using the standard hashing algoritm specified
input: hashing_algorithm: ["SHA1"|"SHA256"]
file_to_hash: The file to calculate the hash of.
output: string representing hash of file_to_hash
"""
generated_hash = ''
if hashing_algorithm.lower() not in ["sha1", "sha256"]:
logger.warning("The algorithm specified is invalid! Using SHA256 as default.")
hashing_algorithm = "-sha256"
else:
hashing_algorithm = "-" + hashing_algorithm
try:
generated_hash = utility_functions.system_command_logged([openssl_binary_path, "dgst", hashing_algorithm, file_to_hash]).rstrip().split("= ")[1]
except:
logger.critical("generate_hash: OPENSSL Hash generation failed")
return generated_hash
def privkey_der_to_pem(der_privkey):
""" Convert binary DER format PRIVATE key into base64 coded ASCII PEM format
input:
der_privkey: String containing binary PRIVATE KEY
output
pem_privkey: String containing base64 PEM PRIVATE KEY
"""
pem_privkey = ''
der_tempfile_name = utility_functions.store_data_to_temp_file(der_privkey)
try:
pem_privkey = utility_functions.system_command_logged([openssl_binary_path, 'rsa', '-in', der_tempfile_name, '-inform', 'DER', '-outform', 'PEM'], stderr_to_temp=True)
logger.debug2("PEM Format Private Key: " + pem_privkey)
except:
logger.critical("privkey_der_to_pem: OPENSSL Could not convert DER key to PEM key")
finally:
os.unlink(der_tempfile_name)
logger.debug("Deleting temporary file: " + der_tempfile_name)
return pem_privkey
def privkey_pem_to_der(pem_privkey):
""" Convert PEM format PRIVATE key into DER format
input:
pem_privkey: String containing base64 PEM Private key
output
der_privkey: String containing binary Private key
"""
der_privkey = ''
pem_tempfile_name = utility_functions.store_data_to_temp_file(pem_privkey)
try:
der_privkey = utility_functions.system_command_logged([openssl_binary_path, 'rsa', '-in', pem_tempfile_name, '-inform', 'PEM', '-outform', 'DER'], stderr_to_temp=True)
logger.debug2("PEM Format private key: " + hexdump(der_privkey))
except:
logger.critical("privkey_pem_to_der: OPENSSL Could not convert PEM key to DER key")
finally:
os.unlink(pem_tempfile_name)
logger.debug("Deleting temporary file: " + pem_tempfile_name)
return der_privkey
def cert_der_to_pem(der_certificate):
""" Convert binary DER format certificate into base64 coded ASCII PEM format
input:
der_certificate: String containing binary certificate
output
pem_certificate: String containing base64 PEM certificate
"""
pem_certificate = ''
der_tempfile_name = utility_functions.store_data_to_temp_file(der_certificate)
try:
pem_certificate = utility_functions.system_command_logged([openssl_binary_path, 'x509', '-in', der_tempfile_name, '-inform', 'DER', '-outform', 'PEM'], stderr_to_temp=True)
logger.debug2("PEM Format certificate: " + pem_certificate)
except:
logger.critical("cert_der_to_pem: OPENSSL could not convert DER cert to PEM")
finally:
os.unlink(der_tempfile_name)
logger.debug("Deleting temporary file: " + der_tempfile_name)
return pem_certificate
def cert_pem_to_der(pem_certificate):
""" Convert PEM format certificate into DER format
input:
pem_certificate: String containing base64 PEM certificate
output
der_certificate: String containing binary certificate
"""
der_certificate = ''
pem_tempfile_name = utility_functions.store_data_to_temp_file(pem_certificate)
try:
der_certificate = utility_functions.system_command_logged([openssl_binary_path, 'x509', '-in', pem_tempfile_name, '-inform', 'PEM', '-outform', 'DER'], stderr_to_temp=True)
logger.debug2("PEM Format certificate: " + hexdump(der_certificate))
except:
logger.critical("cert_pem_to_der: OPENSSL could not convert PEM cert to DER")
finally:
os.unlink(pem_tempfile_name)
logger.debug("Deleting temporary file: " + pem_tempfile_name)
return der_certificate
def encrypt_with_private_key(message, private_key, use_pss=False):
""" Encrypt a message with a private key
input:
message: String representing message
private_key: String representing the private key
use_pss: Use PSS instead of PKCS
output:
signature: String representing encrypted message
"""
encrypted_message = ''
private_key_tempfile_name = utility_functions.store_data_to_temp_file(private_key)
message_tempfile_name = utility_functions.store_data_to_temp_file(message)
command_list = [openssl_binary_path, "pkeyutl", "-sign", "-inkey", private_key_tempfile_name, '-in', message_tempfile_name, ]
# use pss padding and salt instead of PKCS
if use_pss:
logger.debug("Signing with PSS")
command_list += ["-pkeyopt", "rsa_padding_mode:pss", "-pkeyopt", "rsa_pss_saltlen:"+PSS_SALT_LENGTH, "-pkeyopt", "digest:"+PSS_HASH_ALGORITHM]
try:
encrypted_message = utility_functions.system_command_logged(command_list, stderr_to_temp=True)
except:
logger.critical("encrypt_with_private_key: OPENSSL could not perform RSA sign operation")
finally:
os.unlink(private_key_tempfile_name)
os.unlink(message_tempfile_name)
return encrypted_message
def _decrypt_with_public_key_pss(encrypted_message, public_key, image_hash):
""" Decrypt an encrypted message with a public key
input:
encrypted_message = String representation of encrypted message
public_key = String representation of public key
image_hash = message to verify against
output:
message = String representing decrypted message
"""
message = ''
public_key_tempfile_name = utility_functions.store_data_to_temp_file(public_key)
encrypted_message_tempfile_name = utility_functions.store_data_to_temp_file(encrypted_message)
image_hash_tempfile_name = utility_functions.store_data_to_temp_file(image_hash)
command_list = [openssl_binary_path, 'pkeyutl', '-verify', '-pubin', '-inkey', public_key_tempfile_name, '-in', image_hash_tempfile_name, "-sigfile", encrypted_message_tempfile_name,"-pkeyopt", "rsa_padding_mode:pss", "-pkeyopt", "rsa_pss_saltlen:"+PSS_SALT_LENGTH, "-pkeyopt", "digest:"+PSS_HASH_ALGORITHM]
logger.debug("Attempt to verify cert with PSS")
# execute command
try:
process = subprocess.Popen(command_list, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
process.wait()
output = process.stdout.read()
if output.strip() == 'Signature Verified Successfully':
message = image_hash
except:
logger.critical("decrypt_with_public_key_pss: OPENSSL could not decrypt with public key")
finally:
os.unlink(public_key_tempfile_name)
os.unlink(encrypted_message_tempfile_name)
return message
def _decrypt_with_public_key_pkcs(encrypted_message, public_key):
message = ''
public_key_tempfile_name = utility_functions.store_data_to_temp_file(public_key)
encrypted_message_tempfile_name = utility_functions.store_data_to_temp_file(encrypted_message)
command_list = [openssl_binary_path, 'rsautl', '-inkey', public_key_tempfile_name, '-pubin', '-in', encrypted_message_tempfile_name]
try:
message = utility_functions.system_command_logged(command_list, stderr_to_temp=True)
except:
logger.critical("decrypt_with_public_key_pkcs: OPENSSL could not decrypt with public key")
finally:
os.unlink(public_key_tempfile_name)
os.unlink(encrypted_message_tempfile_name)
return message
def decrypt_with_public_key(encrypted_message, public_key, image_hash=None, use_pss=False):
""" Decrypt an encrypted message with a public key
input:
encrypted_message = String representation of encrypted message
public_key = String representation of public key
image_hash = message to verify against
use_pss: Use PSS instead of PKCS
output:
message = String representing decrypted message
"""
if not image_hash and use_pss:
raise RuntimeError('Can decrypt without image_hash while using PSS')
if use_pss:
return _decrypt_with_public_key_pss(encrypted_message, public_key, image_hash)
else:
return _decrypt_with_public_key_pkcs(encrypted_message, public_key)
def decrypt_with_private_der_key(encrypted_message, private_key):
""" Decrypt an encrypted message with a private key
input:
encrypted_message = String representation of encrypted message
private_key = String representation of private key
output:
message = String representing decrypted message
"""
message = ''
private_key_tempfile_name = utility_functions.store_data_to_temp_file(private_key)
encrypted_message_tempfile_name = utility_functions.store_data_to_temp_file(encrypted_message)
pt_tempfile_name = utility_functions.store_data_to_temp_file('')
command_list = [openssl_binary_path, 'rsautl', '-decrypt', '-inkey', private_key_tempfile_name, '-in', encrypted_message_tempfile_name, '-keyform', 'DER']
try:
message = utility_functions.system_command_logged(command_list, stderr_to_temp=True)
except:
logger.debug2("decrypt_with_private_der_key: OPENSSL could not decrypt with private key")
finally:
os.unlink(private_key_tempfile_name)
os.unlink(encrypted_message_tempfile_name)
os.unlink(pt_tempfile_name)
return message
def create_root_certficate(root_cert_params, root_key_pair, days, configfile, serial_num, use_pss):
""" Creates a self signed Root cert
input:
root_cert_params={
'C' : "US",
'ST' : "California",
'L' : "San Diego",
'O' : "ASIC",
'CN' : "Qualcomm",
'OU' : [r"General Use Test Key (for testing only)", r"CDMA Technologies"]
}
key_pair = {"public_key": [Generated public key],
"private_key": [Generated private key] }
"""
return _create_self_signed_certificate(root_cert_params, root_key_pair, days, configfile, serial_num, use_pss)
def create_certificate(certificate_params, certificate_key_pair, CACertificate, CA_key_pair, days, configfile, serial_num, extfile_name, use_pss=False):
""" Generate a certificate.
input:
certificate_params ={
'C' : "US",
'ST' : "California",
'L' : "San Diego",
'O' : "ASIC",
'CN' : "Qualcomm",
'OU' : [r"General Use Test Key (for testing only)", r"CDMA Technologies"]
}
Dictionary of parameters to put in the certificate. The parameters above are an example
If the same parameter has multiple values as 'OU' above, create a list of values
CACertificate: String representation of CA certificate used to sign the cert
certificate_key_pair = None | key_pair = {"public_key": [Generated public key],
"private_key": [Generated private key] }
Dictionary holding the values of public and private keys. If this is None, a key
is generated.
days = validity period of certificate in days
configfile = configfile used by openssl
output:
certificate: String representation of PEM certificate.
certificate_key_pair : {"public_key": certificate public key],
"private_key": certificate private key] }
"""
csr, csr_key_pair = _create_sigining_request(certificate_params, certificate_key_pair, days, configfile)
logger.debug2(csr + repr(csr_key_pair))
certificate, ca_key_pair = _sign_csr_with_CA_certificate(csr, CACertificate, CA_key_pair, days, serial_num, extfile_name, use_pss=use_pss)
return (certificate, csr_key_pair)
def get_public_key_from_cert_chain(certificate_chain_list):
""" Verify certificate chain and extract public key from chain list
"""
if verify_certificate_chain(certificate_chain_list):
return _extract_public_key_from_certificate(certificate_chain_list[0])
else:
logger.critical("get_public_key_from_cert_chain: The certificate chain could not be verified!")
raise RuntimeError("The certificate chain could not be verified!")
def verify_certificate_chain(certificate_chain):
""" Verify the certificate chain to be valid
input:
certificate_chain: [cert1,cert2,cert3]
List of certificates (*in PEM FORMAT*) in the certificate chain. It assumes that the last certificate is the Root CA certificate.
output:
[True|False]
Boolean value
"""
CAfile_contents = _create_CAfile_contents_from_cert_chain(certificate_chain)
CAfile_tempfile_name = utility_functions.store_data_to_temp_file(CAfile_contents)
level1_cert_to_verify_contents = certificate_chain[0]
level1_cert_to_verify_tempfile_name = utility_functions.store_data_to_temp_file(level1_cert_to_verify_contents)
try:
verify_level1_cert_command_out = utility_functions.system_command_logged([openssl_binary_path, 'verify', '-CAfile', CAfile_tempfile_name, level1_cert_to_verify_tempfile_name])
except:
logger.critical("verify_certificate_chain: OPENSSL could not verify cert chain")
raise RuntimeError("verify_certificate_chain: OPENSSL could not verify cert chain")
finally:
os.unlink(CAfile_tempfile_name)
os.unlink(level1_cert_to_verify_tempfile_name)
logger.debug("OpenSSL verify command output: " + verify_level1_cert_command_out)
logger.debug("Deleting temporary files: " + CAfile_tempfile_name + ", " + level1_cert_to_verify_tempfile_name)
if verify_level1_cert_command_out.rstrip() == level1_cert_to_verify_tempfile_name + ": OK":
logger.debug("The certificate chain is verified")
return True
else:
logger.debug("The certificate chain is not verified")
return False
def get_der_certificate_text(der_certificate_path):
""" Return text contents of certificate
input:
der_certificate_path = path to DER certficate
output:
certificate_text = String representation certificate contents
"""
proc = subprocess.Popen([openssl_binary_path, 'x509', '-in', der_certificate_path, '-inform', 'DER', '-noout', '-text', '-certopt', 'ext_dump'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
certificate_text, error = proc.communicate()
if error:
raise RuntimeError('Openssl command failed to get cert text.\n' + error)
return certificate_text
def get_certificate_text_from_binary(certificate_blob):
""" Return a certificate class
input:
certificate_blob: PEM/DER format BLOB of certificate
output:
certificate_text: String representation certificate contents
"""
if 'BEGIN CERTIFICATE' in certificate_blob:
certificate_der = cert_pem_to_der(certificate_blob)
else:
certificate_der = certificate_blob
der_certificate_path = utility_functions.store_data_to_temp_file(certificate_der)
certificate_text = get_der_certificate_text(der_certificate_path)
os.unlink(der_certificate_path)
return certificate_text
def get_asn1_text(pem_certificate_path):
""" Return text contents of certificate
input:
pem_certificate_path = path to PEM certficate
output:
certificate_text = String representation asn1 contents
"""
# Get the following error when using DER format. So, switch to PEM
# before calling asn1parse:
# 110140:error:0D07207B:asn1 encoding routines:ASN1_get_object:header too long:.\crypto\asn1\asn1_lib.c:150:
certificate_text = subprocess.check_output([openssl_binary_path, 'asn1parse', '-in', pem_certificate_path, '-inform', 'PEM'])
return certificate_text
def get_asn1_text_from_binary(certificate_blob):
""" Return a certificate class
input:
certificate_blob: DER/PEM format BLOB of certificate
output:
certificate_text: String representation asn1 contents
"""
if 'BEGIN CERTIFICATE' not in certificate_blob:
certificate_pem = cert_der_to_pem(certificate_blob)
else:
certificate_pem = certificate_blob
certificate_path = utility_functions.store_data_to_temp_file(certificate_pem)
certificate_text = get_asn1_text(certificate_path)
os.unlink(certificate_path)
return certificate_text
def get_hmacparams_from_certificate_chain(certificate_chain_blob):
""" Return a dictionary of HMAC parameters
input:
certificate_chain_blob: DER format BLOB of certificate chain
output:
hmac_params: dictionary of HMAC parameters
"""
der_certificate_path = utility_functions.store_data_to_temp_file(certificate_chain_blob)
certificate_text = get_der_certificate_text(der_certificate_path)
certificate_subject_dictionary = _extract_certificate_subject_params(certificate_text)
hmac_params = _get_hmacparams_from_certificate_subject_dictionary(certificate_subject_dictionary)
os.unlink(der_certificate_path)
return hmac_params
def _get_hmacparams_from_certificate_subject_dictionary(certificate_subject_dictionary):
""" Return a dictionary of the HMAC params from the certificate subject dictionary
input:
certificate_subject_dictionary = dictionary of subject params from certificate
output:
hmac_params = Dictionary of HMAC parameters from certificate subject
"""
sw_id_re = re.compile(r'01 ([0-9A-F]{16}) SW_ID')
hw_id_re = re.compile(r'02 ([0-9A-F]{16}) HW_ID')
if 'OU' in certificate_subject_dictionary.keys() and type(certificate_subject_dictionary['OU']) == list:
certificate_subject_dictionary['OU'].sort()
sw_id_element = sw_id_re.match(certificate_subject_dictionary['OU'][0])
hw_id_element = hw_id_re.match(certificate_subject_dictionary['OU'][1])
if sw_id_element == None:
logger.critical("Error in certificate subject. SW_ID field not found. Not a valid certificate. Exiting")
raise RuntimeError("Error in certificate subject. SW_ID field not found. Not a valid certificate. Exiting")
elif hw_id_element == None:
logger.critical("Error in certificate subject. HW_ID field not found. Not a valid certificate. Exiting")
raise RuntimeError("Error in certificate subject. HW_ID field not found. Not a valid certificate. Exiting")
else:
logger.debug("Valid certificate: Found SW_ID and HW_ID")
sw_id_text = sw_id_element.group(1)
hw_id_text = hw_id_element.group(1)
logger.debug('SW_ID = ' + sw_id_text)
logger.debug('HW_ID = ' + hw_id_text)
hw_id_int = int(hw_id_text, 16)
sw_id_int = int(sw_id_text, 16)
hmac_params = HmacParams(hw_id_int, sw_id_int)
return hmac_params
def get_hash_algorithm_from_certicate_chain(certificate_chain_blob):
""" Return a string of hash algorithm
input:
certificate_chain_blob: DER format BLOB of certificate chain
output:
hash_algo: string of hash algorithm
"""
der_certificate_path = utility_functions.store_data_to_temp_file(certificate_chain_blob)
certificate_text = get_der_certificate_text(der_certificate_path)
certificate_subject_dictionary = _extract_certificate_subject_params(certificate_text)
hash_algo = _get_hash_algorithm_from_certificate_subject_dictionary(certificate_subject_dictionary)
os.unlink(der_certificate_path)
return hash_algo
def _get_hash_algorithm_from_certificate_subject_dictionary(certificate_subject_dictionary):
""" Return a string of SHA algorithm from the certificate subject dictionary
input:
certificate_subject_dictionary = dictionary of subject params from certificate
output:
hash_algo: sha1 or sha256
"""
sha_algo = None
if 'OU' in certificate_subject_dictionary.keys() and type(certificate_subject_dictionary['OU']) == list:
certificate_subject_dictionary['OU'].sort()
import fnmatch
sha_algo_field = fnmatch.filter(certificate_subject_dictionary['OU'], '07*SHA*')
if sha_algo_field is None:
raise RuntimeError("Error in certificate subject. SHA algorithm (OU 7) field not found. Not a valid certificate. Exiting")
else:
sha_algo_field = sha_algo_field[0]
logger.debug('SHA Algorithm = ' + sha_algo_field)
if sha_algo_field == SHA1_OU_STRING:
sha_algo = 'sha1'
elif sha_algo_field == SHA256_OU_STRING:
sha_algo = 'sha256'
else:
raise RuntimeError("Unknown SHA algorithm string: " + sha_algo_field)
return sha_algo
def _extract_public_key_from_certificate(certificate):
""" Pulls out public key stored in certificate.
input:
certificate = String representation of PEM certificate
output:
public_key = String representation of public key in certificate
"""
certificate_tempfile_name = utility_functions.store_data_to_temp_file(certificate)
command_list = [openssl_binary_path, 'x509', '-in', certificate_tempfile_name, '-pubkey', '-noout', '-inform', 'PEM']
try:
public_key = utility_functions.system_command_logged(command_list)
except:
os.unlink(certificate_tempfile_name)
logger.critical("_extract_public_key_from_certificate: OPENSSL could not extract public key from cert")
os.unlink(certificate_tempfile_name)
return public_key
def split_certificate_blob_into_certs(certificate_blob):
""" Split a binary certificate chain blob into single binary certificates
input:
certificate_blob: String containing the entire certificate blob
output:
[cert1, cert2, ...]
List of individual certificates found in the blob
This function looks for a pattern ('0\x82.{2}0\x82') that marks the beginning of all certs, and splits the
blob on these markers.
"""
offsets = []
certificates_list = []
certificate_start_pattern = r'0\x82.{2}0\x82'
for matches in re.finditer(certificate_start_pattern, certificate_blob, re.DOTALL):
offsets.append(matches.start())
logger.debug("Offsets:" + repr(offsets))
for index, offset_val in enumerate(offsets):
start_offset = offset_val
if index < len(offsets) - 1:
end_offset = offsets[index + 1]
else:
end_offset = None
certificate = certificate_blob[start_offset:end_offset]
certificates_list.append(certificate)
return certificates_list
def _create_CAfile_contents_from_cert_chain(certificate_chain):
""" Creates a CAfile that is used as input to the openssl verify command.
input:
certificate_chain: [cert1,cert2,cert3,...]
Certificate list of certs in *PEM format*.
output:
CAfile_contents: String representing the concatenation of cert2 to certn
The openssl verify function needs a CAfile to verify the certchain. This file contains
a chain of all the higher level certs that issue the lowest level cert1.
"""
CAfile_contents = "\n".join(certificate_chain[-(len(certificate_chain) - 1):])
return CAfile_contents
def _create_list_of_CAfile_contents_from_cert_chain(certificate_chain,):
""" Creates list of CAfile that is used as input to the openssl verify command.
input:
certificate_chain: [cert1,cert2,cert3,...]
Certificate list of certs in *PEM format*.
output:
CAfile_contents: String representing the concatenation of cert2 and certi
The openssl verify function needs a CAfile to verify the certchain. This file contains
a chain of all the higher level certs that issue the lowest level cert1.
"""
CAfile_list = []
if len(certificate_chain) > 3:
# mutliple root cert case
pass
return CAfile_list
def _get_subject_string_from_certificate_params(certificate_params):
subject_list = []
for key in certificate_params:
if type(certificate_params[key]) == list:
for item in certificate_params[key]:
subject_list.append(key + '=' + item)
continue
subject_list.append(key + "=" + certificate_params[key])
logger.debug("Subject List = " + repr(subject_list))
subject_string = r'/' + r"/".join(subject_list)
logger.debug("Subject String = " + subject_string)
return subject_string
def _execute_openssl_certificate_command(command_list, key_pair, key_tempfile_name, shell=False):
try:
certificate_request = utility_functions.system_command_logged(command_list, stderr_to_temp=True, shell=shell)
except subprocess.CalledProcessError, e:
logger.critical("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
raise RuntimeError("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) +
" \nCommand = " + ' '.join(e.cmd))
# str(e.cmd))
logger.debug("Generated Output of openssl certificate command: " + certificate_request)
if key_pair == None:
generated_key_file = open(key_tempfile_name, "rb")
private_key = generated_key_file.read()
generated_key_file.close()
public_key = get_public_key_from_private_key(private_key)
os.unlink(key_tempfile_name)
return (certificate_request, {'public_key':public_key, 'private_key':private_key})
else:
os.unlink(key_tempfile_name)
return (certificate_request, key_pair)
def _create_self_signed_certificate(certificate_params, key_pair, days, configfile, serial_num, use_pss):
""" Create a self signed certificate
input:
certificate_params ={
'C' : "US",
'ST' : "California",
'L' : "San Diego",
'O' : "ASIC",
'CN' : "Qualcomm",
}
Dictionary of parameters to put in the certificate. The parameters above are an example
key_pair = None | key_pair = {"public_key": [Generated public key],
"private_key": [Generated private key] }
Dictionary holding the values of public and private keys. If this is None, a key
is generated.
days = validity period of certificate in days
configfile = configfile used by openssl
serial_num = Serial number of certificate
output:
certificate: String representation of PEM certificate.
key_pair : {"public_key": [Generated public key],
"private_key": [Generated private key] }
"""
subject_string = _get_subject_string_from_certificate_params(certificate_params)
if key_pair == None:
logger.debug("No key pair provided, will generate a key RSA 2048 bits")
key_tempfile_name = "certificate.key"
command_list = [openssl_binary_path, "req", "-new", "-x509", "-keyout", key_tempfile_name, "-subj", subject_string, "-newkey", "rsa:2048", "-days", str(days), "-config", configfile, "-set_serial", str(serial_num), "-sha256"]
else:
key_tempfile_name = utility_functions.store_data_to_temp_file(key_pair['private_key'])
command_list = [openssl_binary_path, "req", "-new", "-key", key_tempfile_name, "-x509", "-subj", subject_string, "-days", str(days), "-config", configfile, "-set_serial", str(serial_num), "-sha256"]
if use_pss:
command_list += ("-sigopt rsa_padding_mode:pss -sigopt rsa_pss_saltlen:"+PSS_SALT_LENGTH+" -sigopt digest:"+PSS_HASH_ALGORITHM).split()
return _execute_openssl_certificate_command(command_list, key_pair, key_tempfile_name)
def _sign_csr_with_CA_certificate(certificate_signing_request, CA_certificate, CA_key_pair, days, serial_num, extfile_name, use_pss=False):
""" Sign a Certificate signing request with a higher level CA certificate
input:
certificate_signing_request: String form of CSR
CA_certificate: String representation of a higher level CA certificate
CA_key_pair : {"public_key": [Generated public key],
"private_key": [Generated private key] }
The key pair of the CA_certificate
days = validity period of certificate in days
serial_num = Serial number of certificate
extfile_name = Name of the extensions file to be used by openssl
output:
CA_signed_certificate: String representation of CA Signed certificate (PEM)
CA_key_pair: {"public_key": CA public key,
"private_key": CA private key }
"""
CA_certificate_tempfile_name = utility_functions.store_data_to_temp_file(CA_certificate)
CA_privkey_tempfile_name = utility_functions.store_data_to_temp_file(CA_key_pair['private_key'])
certificate_signing_request_tempfile_name = utility_functions.store_data_to_temp_file(certificate_signing_request)
command_list = [openssl_binary_path, "x509", "-req", "-in", certificate_signing_request_tempfile_name, "-CAkey",
CA_privkey_tempfile_name, "-CA", CA_certificate_tempfile_name, "-days", str(days),
"-set_serial", str(serial_num), "-extfile", extfile_name, "-sha256"]
if use_pss:
command_list += ("-sigopt rsa_padding_mode:pss -sigopt rsa_pss_saltlen:"+PSS_SALT_LENGTH+ " -sigopt digest:"+PSS_HASH_ALGORITHM).split()
logger.debug("Command_list = " + repr(command_list))
CA_signed_certificate = utility_functions.system_command_logged(command_list, stderr_to_temp=True)
logger.debug("Generated Output of openssl certificate command: " + CA_signed_certificate)
os.unlink(CA_certificate_tempfile_name)
os.unlink(CA_privkey_tempfile_name)
os.unlink(certificate_signing_request_tempfile_name)
return (CA_signed_certificate, CA_key_pair)
def _create_sigining_request(certificate_params, key_pair, days=7300, configfile="opensslroot.cfg"):
""" Create a CSR (Certificate Signing Request)
input:
certificate_params ={
'C' : "US",
'ST' : "California",
'L' : "San Diego",
'O' : "ASIC",
'CN' : "Qualcomm",
}
Dictionary of parameters to put in the certificate. The parameters above are an example
key_pair = None | key_pair = {"public_key": [Generated public key],
"private_key": [Generated private key] }
Dictionary holding the values of public and private keys. If this is None, a key
is generated.
days = validity period of certificate in days
configfile = configfile used by openssl
output:
certificate_request: String representation of PEM certificate signing request (CSR).
key_pair : {"public_key": [Generated public key],
"private_key": [Generated private key] }
"""
subject_string = _get_subject_string_from_certificate_params(certificate_params)
if key_pair == None:
logger.debug("No key pair provided, will generate a key RSA 2048 bits")
key_tempfile_name = "certificate.key"
command_list = [openssl_binary_path, "req", "-new", "-nodes", "-keyout", key_tempfile_name, "-subj", subject_string, "-newkey", "rsa:2048", "-days", str(days), "-config", configfile]
else:
key_tempfile_name = utility_functions.store_data_to_temp_file(key_pair['private_key'])
command_list = [openssl_binary_path, "req", "-new", "-key", key_tempfile_name, "-subj", subject_string, "-days", str(days), "-config", configfile]
logger.debug("Command List: " + repr(command_list))
return _execute_openssl_certificate_command(command_list, key_pair, key_tempfile_name)
def get_public_key_from_private_key(private_key):
""" Extracts public key from provided private key
input:
private_key: String representation of private key
output:
public key: String representation of public key
"""
privkey_tempfile = utility_functions.store_data_to_temp_file(private_key)
try:
public_key = utility_functions.system_command_logged([openssl_binary_path, "rsa", "-in", privkey_tempfile, "-pubout"], stderr_to_temp=True)
except subprocess.CalledProcessError, e:
logger.critical("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
raise RuntimeError("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
os.unlink(privkey_tempfile)
return public_key
def _extract_certificate_subject_params(certificate_text):
""" Extracts Subject parameter from provided certificate text
input:
certificate_text: String representation of Certificate
output:
certificate_subject_dictionary: Dictionary of certificate parameters
"""
subject_re = re.compile(r'Subject:.*\n?')
certificate_subject = subject_re.search(certificate_text).group(0)
certificate_subject_contents = certificate_subject.split(':')
certificate_subject_list = certificate_subject_contents[1].split(',')
return utility_functions.normalize_param_list_into_dict(certificate_subject_list)
def cbc_cts_encrypt_binary(binary_blob, hex_preexisting_128_bit_key, hex_preexisting_iv):
""" Function to encrypt binary with a CBC 128 bit cipher.
input:
binary_blob: Binary blob to encrypt
hex_preexisting_128_bit_key: hex representarion of 128bit key | None,
if None, the key is generated
hex_preexisting_iv: hex representarion of image IV | None,
if None, the IV is generated
output:
(encrypted_binary, encryption_key, image_iv): Tuple with the encrypted binary, the key, and the IV
"""
if hex_preexisting_128_bit_key == None:
encryption_key = os.urandom(16)
encryption_key = binascii.hexlify(encryption_key)
else:
encryption_key = hex_preexisting_128_bit_key
if hex_preexisting_iv == None:
image_iv = os.urandom(16)
image_iv = binascii.hexlify(image_iv)
else:
image_iv = hex_preexisting_iv
binary_blob_file = utility_functions.store_data_to_temp_file(binary_blob)
encrypted_image_file_name = utility_functions.store_data_to_temp_file('')
command_list = [cbc_crypto_path, "--input-file", binary_blob_file, '--key', encryption_key, '--iv', image_iv, '--output', encrypted_image_file_name, '--operation=encrypt']
utility_functions.system_command_logged(command_list)
with open(encrypted_image_file_name, 'rb') as encrypted_output_file:
encrypted_binary = encrypted_output_file.read()
os.unlink(binary_blob_file)
os.unlink(encrypted_image_file_name)
return (encrypted_binary, encryption_key, image_iv)
def cbc_cts_decrypt_binary(encrypted_blob, hex_preexisting_128_bit_key, hex_preexisting_iv):
""" Function to decrypt a CBC encrypted binary.
input:
encrypted_blob: Encrypted Binary blob to decrypt
hex_preexisting_128_bit_key: hex representarion of 128bit key
hex_preexisting_iv: hex representarion of image IV
output:
plaintext_binary: Decrypted plaintext binary
"""
if hex_preexisting_128_bit_key == None:
raise RuntimeError('Key must be supplied')
else:
encryption_key = hex_preexisting_128_bit_key
if hex_preexisting_iv == None:
raise RuntimeError('IV must be supplied')
else:
image_iv = hex_preexisting_iv
encrypted_blob_file = utility_functions.store_data_to_temp_file(encrypted_blob)
plaintext_binary_file_name = utility_functions.store_data_to_temp_file('')
command_list = [cbc_crypto_path, "--input-file", encrypted_blob_file, '--key', encryption_key, '--iv', image_iv, '--output', plaintext_binary_file_name, '--operation=decrypt']
utility_functions.system_command_logged(command_list)
with open(plaintext_binary_file_name, 'rb') as plaintext_output_file:
plaintext_binary = plaintext_output_file.read()
os.unlink(encrypted_blob_file)
os.unlink(plaintext_binary_file_name)
return plaintext_binary
def ccm_encrypt_binary(binary_blob, hex_preexisting_128_bit_key, hex_preexisting_104_bit_iv, hex_preexisting_aad):
""" Function to encrypt binary with a CCM 128 bit cipher.
input:
binary_blob: Binary blob to encrypt
hex_preexisting_128_bit_key: hex representarion of 128bit key | None,
if None, the key is generated
hex_preexisting_iv: hex representarion of image IV | None,
if None, the IV is generated
hex_preexisting_aad: hex representation of Additional Authentication data needed by the algorithm
output:
(encrypted_binary, encryption_key, image_iv, hex_preexisting_aad): Tuple with the encrypted binary, the key, the IV, and the AAD
"""
if hex_preexisting_aad == None:
raise RuntimeError('AAD must be supplied')
if hex_preexisting_128_bit_key == None:
encryption_key = os.urandom(16)
encryption_key = binascii.hexlify(encryption_key)
else:
encryption_key = hex_preexisting_128_bit_key
if hex_preexisting_104_bit_iv == None:
image_iv = os.urandom(13)
image_iv = binascii.hexlify(image_iv)
else:
image_iv = hex_preexisting_104_bit_iv
binary_blob_file = utility_functions.store_data_to_temp_file(binary_blob)
encrypted_image_file_name = utility_functions.store_data_to_temp_file('')
command_list = [ccm_crypto_path, "--input-file", binary_blob_file, '--key', encryption_key, '--iv', image_iv, '--output', encrypted_image_file_name, '--operation=encrypt', '--aad', hex_preexisting_aad]
utility_functions.system_command_logged(command_list)
with open(encrypted_image_file_name, 'rb') as encrypted_output_file:
encrypted_binary = encrypted_output_file.read()
os.unlink(binary_blob_file)
os.unlink(encrypted_image_file_name)
return (encrypted_binary, encryption_key, image_iv, hex_preexisting_aad)
def ccm_decrypt_binary(encrypted_blob, hex_preexisting_128_bit_key, hex_preexisting_104_bit_iv, hex_preexisting_aad):
""" Function to decrypt a CCM encrypted binary.
input:
encrypted_blob: Encrypted Binary blob to decrypt
hex_preexisting_128_bit_key: hex representarion of 128bit key
hex_preexisting_iv: hex representarion of image IV
hex_preexisting_aad: hex representation of Additional Authentication data needed by the algorithm
output:
plaintext_binary: Decrypted plaintext binary
"""
if hex_preexisting_aad == None:
raise RuntimeError('AAD must be supplied')
if hex_preexisting_128_bit_key == None:
raise RuntimeError('Key must be supplied')
else:
encryption_key = hex_preexisting_128_bit_key
if hex_preexisting_104_bit_iv == None:
raise RuntimeError('IV must be supplied')
else:
image_iv = hex_preexisting_104_bit_iv
encrypted_blob_file = utility_functions.store_data_to_temp_file(encrypted_blob)
plaintext_binary_file_name = utility_functions.store_data_to_temp_file('')
command_list = [ccm_crypto_path, "--input-file", encrypted_blob_file, '--key', encryption_key, '--iv', image_iv, '--output', plaintext_binary_file_name, '--operation=decrypt', '--aad', hex_preexisting_aad]
output = utility_functions.system_command_logged(command_list)
if "Caught HashVerificationFailed..." in output:
raise subprocess.CalledProcessError(-1, output)
with open(plaintext_binary_file_name, 'rb') as plaintext_output_file:
plaintext_binary = plaintext_output_file.read()
os.unlink(encrypted_blob_file)
os.unlink(plaintext_binary_file_name)
return plaintext_binary
def cbc_encrypt_binary(binary_blob, hex_preexisting_128_bit_key, hex_preexisting_iv):
""" Function to encrypt binary with a CBC 128 bit cipher.
input:
binary_blob: Binary blob to encrypt
hex_preexisting_128_bit_key: hex representarion of 128bit key | None,
if None, the key is generated
hex_preexisting_iv: hex representarion of image IV | None,
if None, the IV is generated
output:
(encrypted_binary, encryption_key, image_iv): Tuple with the encrypted binary, the key, and the IV
"""
if hex_preexisting_128_bit_key == None:
encryption_key = os.urandom(16)
encryption_key = binascii.hexlify(encryption_key)
else:
encryption_key = hex_preexisting_128_bit_key
if hex_preexisting_iv == None:
image_iv = os.urandom(16)
image_iv = binascii.hexlify(image_iv)
else:
image_iv = hex_preexisting_iv
binary_blob_file = utility_functions.store_data_to_temp_file(binary_blob)
encrypted_image_file_name = utility_functions.store_data_to_temp_file('')
command_list = [openssl_binary_path, "enc", "-aes-128-cbc", "-in", binary_blob_file, "-K", encryption_key, "-iv", image_iv, "-out", encrypted_image_file_name, "-nopad"]
utility_functions.system_command_logged(command_list)
with open(encrypted_image_file_name, 'rb') as encrypted_output_file:
encrypted_binary = encrypted_output_file.read()
os.unlink(binary_blob_file)
os.unlink(encrypted_image_file_name)
return (encrypted_binary, encryption_key, image_iv)
def cbc_decrypt_binary(encrypted_blob, hex_preexisting_128_bit_key, hex_preexisting_iv):
""" Function to decrypt a CBC encrypted binary.
input:
encrypted_blob: Encrypted Binary blob to decrypt
hex_preexisting_128_bit_key: hex representarion of 128bit key
hex_preexisting_iv: hex representarion of image IV
output:
plaintext_binary: Decrypted plaintext binary
"""
if hex_preexisting_128_bit_key == None:
raise RuntimeError('Key must be supplied')
else:
encryption_key = hex_preexisting_128_bit_key
if hex_preexisting_iv == None:
raise RuntimeError('IV must be supplied')
else:
image_iv = hex_preexisting_iv
encrypted_blob_file = utility_functions.store_data_to_temp_file(encrypted_blob)
plaintext_binary_file_name = utility_functions.store_data_to_temp_file('')
command_list = [openssl_binary_path, "enc", "-aes-128-cbc", "-d", "-in", encrypted_blob_file, "-K", encryption_key, "-iv", image_iv, "-out", plaintext_binary_file_name, "-nopad"]
utility_functions.system_command_logged(command_list)
with open(plaintext_binary_file_name, 'rb') as plaintext_output_file:
plaintext_binary = plaintext_output_file.read()
os.unlink(encrypted_blob_file)
os.unlink(plaintext_binary_file_name)
return plaintext_binary
def get_public_key_from_certificate(certificate):
if 'BEGIN CERTIFICATE' not in certificate:
certificate = cert_der_to_pem(certificate)
pubkey = _extract_public_key_from_certificate(certificate)
pubkey_file_name = utility_functions.store_data_to_temp_file(pubkey)
command_list = [openssl_binary_path, 'rsa', '-pubin', '-inform', 'PEM', '-text', '-noout', '<', pubkey_file_name]
logger.debug("Command_list = " + repr(command_list))
pubkey_text = utility_functions.system_command_logged(command_list, stderr_to_temp=True)
logger.debug2("Pubkey text: " + pubkey_text)
os.unlink(pubkey_file_name)
def cert_uses_pss(certificate):
"""
certificate must be in PEM form
REturns true if the certificate uses PSS in signature
"""
cert_tempfile_name = utility_functions.store_data_to_temp_file(certificate)
command_list = [openssl_binary_path, "x509", "-text", "-inform", "PEM", "-in", cert_tempfile_name, ] + "-noout -certopt no_header -certopt no_version -certopt no_serial -certopt no_validity -certopt no_subject -certopt no_issuer -certopt no_pubkey -certopt no_sigdump -certopt no_sigdump -certopt no_aux -certopt no_extensions".split()
try:
output = utility_functions.system_command_logged(command_list, stderr_to_temp=True)
uses_pss = output.index("Signature Algorithm: rsassaPss")
except ValueError:
uses_pss = False
except:
logger.critical("cert_uses_pss: OPENSSL could not parse certificate")
uses_pss = False
finally:
os.unlink(cert_tempfile_name)
return uses_pss
def cert_version_check(certificate, version):
"""
certificate must be in DER form
current version is 3 (i.e. v3)
Returns true if cert version match
"""
der_cert_text = get_der_certificate_text(certificate)
match = re.search('Version: (\d+) \(.*?\)\n', der_cert_text, re.IGNORECASE)
if match is None:
raise RuntimeError('Could not get the Version info from certificate')
cert_version = match.group(1)
if int(cert_version, 10) == version:
return True
else:
logger.critical('Certificate version is incorrect: ' + str(cert_version))
return False
| [
"lonelyjskj@gmail.com"
] | lonelyjskj@gmail.com |
a8aded7aeb17247f05860c2b32054adfb9a56e9f | 6b77ae897567d391780efa7457368301077e2330 | /tcpssh/venv/Scripts/pip3-script.py | ebf45c3346cbc964e3ded4837c79c1dbb2c4cfe5 | [] | no_license | wapjin/python-code | 03ba079ebc551941c3d0d1122db5fb944070e397 | d82f7e64b6c62bf8eb04f960c8fa03b02495b2fc | refs/heads/master | 2020-12-13T17:59:25.265569 | 2020-03-21T08:12:16 | 2020-03-21T08:12:16 | 234,477,075 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | #!"D:\python code\tcpssh\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"1045833538@qq.com"
] | 1045833538@qq.com |
ea8ce920973c4519f0a07fbdcfc341110505353f | bf9b4ff0145381084e11b2df64a8399b46328b36 | /libby/main.py | ba1af7b19c80ad799f69e55ca89c5b4114208fd1 | [] | no_license | kwarwp/grete | f6fbf7e21464536cb27253aa02de4b3292179883 | e5f02660c57ba3d271b059d8b7972099226a0987 | refs/heads/master | 2023-04-06T21:11:37.811621 | 2023-03-16T21:32:04 | 2023-03-16T21:32:04 | 128,999,048 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,438 | py | # grete.libby.main.py
#REVISADO PRONTO
from _spy.vitollino.main import Cena,Elemento,Texto, Sala, INVENTARIO
#CAMPO = "https://image.freepik.com/fotos-gratis/paisagem-de-campo-de-golfe_1388-96.jpg"
#CASAL = "https://images.vexels.com/media/users/3/129903/isolated/preview/c996f5193090b3a642ffc069bc81da0c-silhueta-do-casal-andando-12-by-vexels.png
'''
def boyfriendsatthecamp():
campo = Cena (img = CAMPO)
casal = Elemento(img = CASAL, tit = "casal", style = dict(left = 150, top = 60, height = 200))
txtcasal = Texto(casal,"let's eat something!")
casal.entra(campo)
casal.vai = txtcasal.vai
campo.vai()
loversatthecamp()
'''
sn = "https://i.imgur.com/evlSZig.jpg"
sl = "https://i.imgur.com/Ax1XDBU.jpg"
ss = "https://i.imgur.com/9Vg7DzJ.jpg"
so = "https://i.imgur.com/haPQ4rZ.jpg"
#OBJECTS
tomada = "https://i.imgur.com/l6INRuQ.jpg"
interruptor = "https://i.imgur.com/olpkjL0.jpg"
interfone = "https://i.imgur.com/4s1Pbpv.jpg"
extintor = "https://i.imgur.com/AJzKYaE.jpg"
garrafa_termica = "https://i.imgur.com/M9oUgH6.jpg"
bebedouro = "https://i.imgur.com/GDRYgs3.jpg"
# grete.amanda.main.py
from _spy.vitollino.main import STYLE, INVENTARIO, Sala, Texto, Cena
STYLE["width"] = 800
STYLE["height"] = "600px"
children = "https://i.imgur.com/4fTrn8X.jpg"
toy = "https://i.imgur.com/57cOaZ9.jpg"
sckoolhouse = "https://i.imgur.com/oXsdN2c.jpg"
leyden = "https://i.imgur.com/abeXKwL.jpg"
volcano = "https://i.imgur.com/4Y5aie8.jpg"
globe = "https://i.imgur.com/EQtHzod.jpg"
ball = "https://i.imgur.com/rBbRsFU.jpg"
TIRINHA_DA_SECRETARIA= "https://i.imgur.com/555hVt2.png"
SECRETARY = None
def secretary():
global SECRETARY
if SECRETARY:
return SECRETARY
def _gone_secretary():
try:
gimnasium().sul.vai()
except:
from anastasia.main import gimnasium
gimnasium().sul.vai()
def _go_kitchen():
try:
kitchen().oeste.vai()
except:
from callie.main import kitchen
kitchen().oeste.vai()
def _go_secretary():
SECRETARY.oeste.meio= Cena(vai = _gone_secretary)
_vai = Cena()
def redir():
_vai.vai = _gone_secretary
historia = Cena(TIRINHA_DA_SECRETARIA, _vai, _vai, _vai)
texto = """Cleison enteres the school excitedly and sees the three secretaries. Boring,
so boring that you would even want to die just to look at them.
He went to speak to them. Getting closer, he saw on their shirts the following sentence :
'Become Claudemilson'. He said hello and they answerd:
-Become Claudemillson! Where is your shirt?
He said:
-I am a former student, I just came to visit the school. See you!
He left and the three of them looked at him leaving, just turning their heads.
"""
_vai.vai = Texto(historia, '', texto, foi=redir).vai
historia.vai()
def go_secretary():
_go_secretary()
SECRETARY = _sala = Sala(sn,sl,ss,so, "trig")
from naomi.main import Elemento
_sala.oeste.meio = Cena(TIRINHA_DA_SECRETARIA, vai = go_secretary)
_sala.sul.meio= Cena(vai = _go_kitchen)
bebedouro_ = Elemento(bebedouro, tit = "switch", drag=True,
x = 460, y = 192, w = 80, h = 90, drop="drinking fountain",
cena=_sala.sul, texto="Please help me, fix my name.")
tomada_ = Elemento(tomada, tit = "thermal bottle", drag=True,
x = 185, y = 30, w = 80, h = 100, drop="socket",
cena=_sala.leste, texto="Please help me, fix my name.")
extintor_ = Elemento(extintor, tit = "socket", drag=True,
x = 30, y = 500, w = 100, h = 120,drop="fire extinguisher",
cena=_sala.leste, texto="Please help me, fix my name.")
garrafa_termica_ = Elemento(garrafa_termica, tit = "fire extinguisher", drag=True,
x = 520, y = 220, w = 90, h = 60, drop="thermal bottle",
cena=_sala.sul, texto="Please help me, fix my name.")
interfone_ = Elemento(interfone, tit = "drinking fountain", drag=True,
x = 700, y = 220, w = 90, h = 60, drop="communicator",
cena=_sala.sul, texto="Please help me, fix my name.")
interruptor_ = Elemento(interruptor, tit = "communicator", drag=True,
x = 100, y = 220, w = 90, h = 60, drop="switch",
cena=_sala.oeste, texto="Please help me, fix my name.")
return _sala
if __name__ == "__main__":
INVENTARIO.inicia()
secretary().norte.vai()
| [
"38007182+kwarwp@users.noreply.github.com"
] | 38007182+kwarwp@users.noreply.github.com |
55f4c8eb3f284879551872c0d056599a50209d67 | 6643bd4ecd44a21944debc75d79c4616bdac7868 | /datawarehouse/opertaion/script/ETL_P2P.py | e41ca1fb1dac6f879da61768c24a2d36d4b7928b | [] | no_license | smartbrandnew/vobileETLCode | d14fb9f34eb1f76e6e60253557c8b964137f6cb4 | 210e9060ed9fc1d278373910cfe203808b1afb04 | refs/heads/master | 2021-07-14T07:30:08.018828 | 2016-07-04T08:40:57 | 2016-07-04T08:40:57 | 62,543,866 | 0 | 2 | null | 2020-07-22T19:56:50 | 2016-07-04T08:12:01 | Python | UTF-8 | Python | false | false | 2,050 | py | #!/usr/bin/python
import MySQLdb
import sys
import time
import datetime
# downloads
conn=MySQLdb.connect(host='p2p-1-replica.c85gtgxi0qgc.us-west-1.rds.amazonaws.com',user='kettle',passwd='k3UTLe',port=3306)
conn.select_db('hubble_stat')
dlcur=conn.cursor()
dlcur.execute('select date_format(finished_at, "%Y-%m-%d"), count(*) from finishedDNAIdentifyTask where error_code = 0 and download_time > 0 and finished_at >="2015-07-01" and finished_at <"2016-03-01" and dna_generate_time > 0 group by 1')
dlrows = dlcur.fetchall()
conn=MySQLdb.connect(host='192.168.110.114',user='kettle',passwd='k3UTLe',port=3306)
dlcur=conn.cursor()
conn.select_db('DW_VTMetrics')
for e in dlrows:
downloads_insert = "insert into VTMetricsReport(company_id,website_type,reportedDate,downloads) values('%s','%s','%s','%s')" %(14,'P2P',e[0],e[1])
dlcur.execute(downloads_insert)
conn.commit()
dlcur.close()
conn.close()
#matched seed/ matches/ Ips of matches / matches with notices sent
conn=MySQLdb.connect(host='eqx-vtweb-slave-db',user='kettle',passwd='k3UTLe',port=3306)
conn.select_db('tracker2')
mtcur=conn.cursor()
mtcur.execute('select date(a.created_at), count(distinct a.key_id) Matched_Seed, count(a.id) Matches, sum(a.view_count), sum(if(a.count_send_notice > 0,1,0)) send_Notices from matchedVideo a, mddb.trackingWebsite b where a.company_id = 14 and a.trackingWebsite_id = b.id and a.hide_flag = 2 and b.website_type in ("p2p") and a.created_at >= "2015-07-01" and a.created_at < "2016-03-01" group by 1')
mtrows = mtcur.fetchall()
conn=MySQLdb.connect(host='192.168.110.114',user='kettle',passwd='k3UTLe',port=3306)
mtcur=conn.cursor()
conn.select_db('DW_VTMetrics')
for e in mtrows:
matches_insert = "update VTMetricsReport set matchedSeeds = '%s', matches = '%s', matchedURLs_IPs = '%s', matchedWithNoticeSent = '%s' where company_id = 14 and website_type='%s' and reportedDate in ('%s')" %(e[1],e[2],e[3],e[4],'P2P',e[0])
mtcur.execute(matches_insert)
conn.commit()
mtcur.close()
conn.close()
| [
"smartbrandnew@163.com"
] | smartbrandnew@163.com |
6c882c2b99ed74a91f891acc54ce5e7717911120 | d94b6845aeeb412aac6850b70e22628bc84d1d6d | /yoto/problems/base.py | 28a7ba39f965d68d325b980d3acc1a0d43a249af | [
"CC-BY-4.0",
"Apache-2.0"
] | permissive | ishine/google-research | 541aea114a68ced68736340e037fc0f8257d1ea2 | c1ae273841592fce4c993bf35cdd0a6424e73da4 | refs/heads/master | 2023-06-08T23:02:25.502203 | 2023-05-31T01:00:56 | 2023-05-31T01:06:45 | 242,478,569 | 0 | 0 | Apache-2.0 | 2020-06-23T01:55:11 | 2020-02-23T07:59:42 | Jupyter Notebook | UTF-8 | Python | false | false | 1,804 | py | # coding=utf-8
# Copyright 2023 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common interfaces for multi-objective problems."""
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class Problem(object):
"""Abstract class for multi-loss optimization problems."""
@abc.abstractmethod
def losses_and_metrics(self, inputs, inputs_extra=None, training=False):
"""Compute the losses and some additional metrics.
Args:
inputs: Dict[ Str: tf.Tensor]. Maps input names (for instance, "image" or
"label") to their values.
inputs_extra: tf.Tensor. Additional conditioning inputs.
training: Bool. Whether to run the model in the training mode (mainly
important for models with batch normalization).
Returns:
losses: Dict[ Str: tf.Tensor]. A dictionary mapping loss names to tensors
of their per-sample values.
metrics: Dict[ Str: tf.Tensor]. A dictionary mapping metric names to
tensors of their per-sample values.
"""
@abc.abstractmethod
def initialize_model(self):
pass
@abc.abstractproperty
def losses_keys(self):
"""Names of the losses used in the problem (keys in the dict of losses)."""
@abc.abstractproperty
def module_spec(self):
"""TF Hub Module spec."""
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
c308f7c1b5031043658b922ec5cf81a010636bb4 | 1647dd424edd275a4b078bcc5a0cba7312e81fdc | /common_utils/data_types/correlation_matrix.py | f117d3adb988b301dc88494b9d433ea3ee02523a | [
"MIT"
] | permissive | CroP-BioDiv/zcitools | a72646bec8795bd24aca2dc7fa91c116be6abd7a | 3340a92f710f4acb5d3507bec639c40a17dfb5f2 | refs/heads/master | 2022-09-23T07:09:16.671748 | 2022-09-17T11:09:21 | 2022-09-17T11:09:21 | 222,237,739 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,334 | py | from common_utils.import_method import import_pandas
from common_utils.exceptions import ZCItoolsValueError
class CorrelationMatrix:
def __init__(self, columns, list_values=None):
self._columns = [c.replace(' ', '_') for c in columns]
self._columns_lower = [c.lower().replace(' ', '_') for c in columns]
self._values = dict() # tuple(sorted(c1, c2)) -> value. Note: value can be missing
if list_values:
assert len(columns) - 1 == len(list_values), (len(columns), len(list_values))
for i, (c1, vs) in enumerate(zip(self._columns, list_values)):
c2s = self._columns[i+1:]
assert len(c2s) == len(vs), (i, c1, len(c2s), len(vs))
for c2, v in zip(c2s, vs):
if v is not None:
self.set(c1, c2, v)
def num_columns(self):
return len(self._columns)
def check_column(self, c, to_assert=False):
c = c.lower()
if c in self._columns_lower:
return c
if to_assert:
assert False, (c, self._columns)
#
def set(self, c1, c2, v):
c1 = self.check_column(c1, to_assert=True)
c2 = self.check_column(c2, to_assert=True)
k = (c1, c2) if c1 < c2 else (c2, c1)
if v is None:
self._values.pop(k)
else:
self._values[k] = v
def get(self, c1, c2):
c1 = self.check_column(c1, to_assert=True)
c2 = self.check_column(c2, to_assert=True)
k = (c1, c2) if c1 < c2 else (c2, c1)
return self._values.get(k)
@staticmethod
def from_excel(filename, triangular='L'):
df = import_pandas().read_excel(filename, sheet_name='Sheet1')
columns = list(df.columns[1:])
if triangular.upper() == 'L':
list_values = [list(df[c1][i+1:]) for i, c1 in enumerate(columns[:-1])]
else:
raise NotImplementedError('Upper triangular')
return CorrelationMatrix(columns, list_values=list_values)
@staticmethod
def from_file(filename, triangular='L'): # Lower/Upper triangular
if filename.endswith('.xlsx'):
return CorrelationMatrix.from_excel(filename, triangular=triangular)
raise ZCItoolsValueError(f"Can't import correlation data from file {filename}!")
| [
"ante.turudic@gmail.com"
] | ante.turudic@gmail.com |
50fb251c0bd246cad074f7cca232173dd0b7c5ed | f998a574343292d050777f616b408a74fde05738 | /eshop_docker/eshop/apps/trade/adminx.py | e70a39d7f2b85434781c6c7f8b3084dfc5e3750e | [] | no_license | Boomshakal/Django | 7987e0572fc902bd56360affea0b5087a4cb04a7 | a149691c472eab3440028bf2460cd992acec0f8a | refs/heads/master | 2023-01-11T06:16:29.283428 | 2022-12-23T08:00:05 | 2022-12-23T08:00:05 | 199,360,433 | 0 | 0 | null | 2020-06-06T09:37:02 | 2019-07-29T02:01:09 | Python | UTF-8 | Python | false | false | 716 | py | # -*- coding:utf-8 -*-
__author__ = 'xojisi'
__date__ = '2018/2/1 13:42'
import xadmin
from .models import ShoppingCart, OrderInfo, OrderGoods
class ShoppingCartAdmin(object):
list_display = ["user", "goods", "nums", ]
class OrderInfoAdmin(object):
list_display = ["user", "order_sn", "trade_no", "pay_status", "post_script", "order_mount",
"order_mount", "pay_time", "add_time"]
class OrderGoodsInline(object):
model = OrderGoods
exclude = ['add_time', ]
extra = 1
style = 'tab'
inlines = [OrderGoodsInline, ]
xadmin.site.register(ShoppingCart, ShoppingCartAdmin)
xadmin.site.register(OrderInfo, OrderInfoAdmin)
| [
"362169885@qq.com"
] | 362169885@qq.com |
0c9aadc2d0cb47adbdf5eb01b41759191fbb4b89 | 5a391bc46a3649d22e4f928ff995daf55caebf9d | /Day2/AsyncHttp2.py | db9e05a3dc416a83571e0943db548571c01b008a | [] | no_license | artheadsweden/Python_Adv_Nov_19 | d4d4c45b34394bf3b64ba3c64c91213bd5d21594 | edae48c6cdab2e3c2090394fc499f4cc500df13a | refs/heads/master | 2020-09-20T05:41:35.817957 | 2019-11-27T21:34:31 | 2019-11-27T21:34:31 | 224,390,626 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,021 | py | from aiohttp import ClientSession
import asyncio
async def get(session, url):
async with session.get(url) as response:
return await response.text()
async def fetch(url):
async with ClientSession() as session:
print("Fetching", url)
html = await get(session, url)
print(url, "is done")
return html
async def print_when_done(tasks):
result = [await res for res in asyncio.as_completed(tasks)]
print("Got", len(result), "pages")
print("They got the titles")
for page in result:
title_index = page.index("<title>")
title_end_index = page.index("</title>")
title = page[title_index+7: title_end_index]
print(title.strip())
async def get_urls():
urls = ["http://cnn.com", "http://bbc.com", "http://aljazeera.com"]
tasks = [fetch(url) for url in urls]
await print_when_done(tasks)
def main():
loop = asyncio.get_event_loop()
loop.run_until_complete(get_urls())
if __name__ == '__main__':
main()
| [
"joakim@arthead.se"
] | joakim@arthead.se |
fb3497f271ad83c243e1b67f2e965dad71f998f4 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/helpers/typeshed/stubs/Pillow/PIL/SgiImagePlugin.pyi | e7f4ff96e1b777c06448cd814b82d888da0b87e3 | [
"Apache-2.0",
"MIT"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 301 | pyi | from typing import Any, ClassVar
from typing_extensions import Literal
from .ImageFile import ImageFile, PyDecoder
MODES: Any
class SgiImageFile(ImageFile):
format: ClassVar[Literal["SGI"]]
format_description: ClassVar[str]
class SGI16Decoder(PyDecoder):
def decode(self, buffer): ...
| [
"intellij-monorepo-bot-no-reply@jetbrains.com"
] | intellij-monorepo-bot-no-reply@jetbrains.com |
cd2837c6567f62732024d941943feef047ced534 | 662422157827bfa076cde1369117b04646bc09af | /DNS/bruteforce_ipv6.py | 2ed8ed208fa1325a6cffa48b16d7209ecee4d5ea | [] | no_license | macavalcantep/MBA_IMPACTA | d1d1e37de6bc1a2d02a8bb9a0e5112f9b9fd8ef5 | 9228d7f55a23b51c8e4b908333eb6e89431c76a2 | refs/heads/main | 2023-08-04T03:06:54.290331 | 2021-09-27T21:20:11 | 2021-09-27T21:20:11 | 402,221,762 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | #!/usr/bin/python
import dns.resolver
myquery = dns.resolver.Resolver()
domain = "yahoo.com"
#host ="www"
#target = host + "." + domain
def func_a(_target):
question = myquery.query(_target, 'AAAA')
for _addr in question:
print('[+] - ' + _target + '---> ' + str(_addr))
def bruteforce_dns_ipv6(_wordlist):
with open(_wordlist, 'r') as machines:
while True:
machine = machines.readline().strip("\n")
if not machine:
break
try:
target = machine + "." + domain
func_a(target)
except:
pass
bruteforce_dns_ipv6("file.txt")
| [
"yellow@battosai.yellow.dojo"
] | yellow@battosai.yellow.dojo |
734cf2f816bfa171b109a2ecd2be7c9fe690212b | decea024dde21a9e4847992c53c67da2bc3f365c | /Lectures/Intro to Frameworks/Lab/ORMs/app/models.py | 2f5358169b9efc5eaf88d1be4ea29ac2cdbeceda | [] | no_license | rjglushko/IOLab | bd0d2fd941a0ab592ca84ff9c679b24cad9dbf7d | 08e08691ecf488ef35faf3d431681756c7d7995a | refs/heads/master | 2021-01-10T04:21:44.095280 | 2016-03-19T05:03:22 | 2016-03-19T05:03:22 | 47,841,381 | 0 | 3 | null | 2016-03-19T04:28:29 | 2015-12-11T17:36:30 | HTML | UTF-8 | Python | false | false | 472 | py | from app import db
class Customer(db.Model):
id = db.Column(db.Integer, primary_key=True)
company = db.Column(db.String(120), unique=False)
email = db.Column(db.String(120))
# You need to a relationship to Address table here
# see http://flask-sqlalchemy.pocoo.org/2.1/models/#one-to-many-relationships
def __repr__(self):
return '<Customer %r>' % self.email
# Your Address code should go here
# class Address(db.Model):
| [
"="
] | = |
5b06e6d0a8f9dec3336df8b334522c5d6177989a | 3ca57c41e909849729a6be0a9cbfd0f09d8c23cc | /plot_compound_length_thesis.py | d53d1d4da7ad778f3e5e5953e26eb96fa44c5cec | [
"Apache-2.0"
] | permissive | evanmiltenburg/MeasureDiversity | ec060132e51a4203daa725eeca2f0d3ca4ba6e38 | 8b79349ac339d949156cdc90d3cab9abc332c978 | refs/heads/master | 2021-03-27T06:27:27.749091 | 2019-12-29T12:40:13 | 2019-12-29T12:40:13 | 117,252,509 | 12 | 3 | Apache-2.0 | 2019-07-11T10:51:06 | 2018-01-12T14:46:43 | Python | UTF-8 | Python | false | false | 2,827 | py | import json
from matplotlib import pyplot as plt
from matplotlib.lines import Line2D
import seaborn as sns
import pandas as pd
sns.set_style("white")
sns.set_context('paper', font_scale=7)
my_palette = sns.cubehelix_palette(10, start=.8, rot=-.95)#sns.color_palette("cubehelix", 10)
sns.set_palette(my_palette)
system2label = {'Dai-et-al-2017': 'Dai et al. 2017',
'Liu-et-al-2017': 'Liu et al. 2017',
'Mun-et-al-2017': 'Mun et al. 2017',
'Shetty-et-al-2016': 'Shetty et al. 2016',
'Shetty-et-al-2017': 'Shetty et al. 2017',
'Tavakoli-et-al-2017': 'Tavakoli et al. 2017',
'Vinyals-et-al-2017': 'Vinyals et al. 2017',
'Wu-et-al-2016': 'Wu et al. 2016',
'Zhou-et-al-2017': 'Zhou et al. 2017'}
with open('Data/Output/nouns_pps.json') as f:
data = json.load(f)
def get_val(data):
val = {2:[], 3:[], 4:[]}
for compound_data in data['val']['compound_data']:
val[2].append(compound_data['compound_lengths']['2'])
val[3].append(compound_data['compound_lengths']['3'])
val[4].append(compound_data['compound_lengths']['4'])
for i, results in val.items():
val[i] = round(sum(results)/len(results))
return val
to_plot = dict(system=[],
length=[],
number=[])
for system, label in system2label.items():
to_plot['system'].extend([label] * 3)
to_plot['length'].extend(['len2','len3','len4'])
to_plot['number'].extend([data[system]['compound_data']['compound_lengths'].get(i,0)
for i in ['2','3','4']])
val = get_val(data)
to_plot['system'].extend(['zzzval'] * 3)
to_plot['length'].extend(['len2','len3','len4'])
to_plot['number'].extend([val[i] for i in [2,3,4]])
df = pd.DataFrame(to_plot)
ax = sns.barplot(x='number', y='length', hue='system', data=df)
fig, ax = plt.subplots(figsize=(45,28))
ax = sns.barplot(x='number', y='length', hue='system', data=df)
ax.set_xscale('log')
labels = list(system2label.values()) + ['Validation data']
legend_markers = [Line2D(range(1), range(1),
linewidth=0, # Invisible line
marker='o',
markersize=40,
markerfacecolor=my_palette[i]) for i, name in enumerate(labels)]
plt.legend(legend_markers, labels, numpoints=1, loc='center left', bbox_to_anchor=(1, 0.5), ncol=1, handletextpad=-0.3, columnspacing=0)
sns.despine()
plt.tick_params(direction='in', length=10, width=4, bottom=True, left=True)
plt.tight_layout()
plt.ylabel('Compound length',labelpad=50)
ax.set_yticklabels(['2','3','4'])
ax.tick_params(axis='both', which='major', pad=15)
plt.xlabel('Number of tokens',labelpad=50)
plt.savefig('Data/Output/compound_lengths_thesis.pdf')
| [
"emielonline@gmail.com"
] | emielonline@gmail.com |
313d24d9e21f7f163d1bf7b7e6dcd9aa369c6860 | 24aa1d8455a923ce64b6066438742c6d85945c80 | /medi_app/migrations/0002_alter_employeebank_added_on.py | 432e68209ebc2e94f5102be737dc8ab10a0a1a19 | [] | no_license | babor99/react_django_medical_store | f81c5e9f436817b16e2c508a6dd6de3d895d511f | e8ab6d7801b6d1c2be86e78ad0165e6df436688f | refs/heads/main | 2023-07-04T04:33:12.048265 | 2021-08-10T20:32:19 | 2021-08-10T20:32:19 | 394,774,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | # Generated by Django 3.2 on 2021-05-06 11:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('medi_app', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='employeebank',
name='added_on',
field=models.DateTimeField(auto_now_add=True),
),
]
| [
"islambabor947@gmail.com"
] | islambabor947@gmail.com |
3787d9fc4d0fa05467d40d0804ef8861a48641e2 | 56503bd3bd81541764333f19382488e2bd98cc9c | /a_path.py | b8b48f49c1916a29140ca64ee3d776af2744466e | [] | no_license | idrissabanli/algoritms | ea58ea5f9fcfd9051efc1d216cad950f459d2ea7 | b614750237e16e2de4d9565582ceb67f2c7bc150 | refs/heads/master | 2020-12-27T03:19:59.964091 | 2020-02-02T09:29:49 | 2020-02-02T09:29:49 | 237,747,480 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,935 | py | import random
# a = [[gedilecek_yol, oldugu_yerden_mesafe, cemi, yerlesme]]
a = [[0,0,0,1], [0,0,0,0], [0,0,0,0], [0,0,0,0]]
a = []
array_count = 10
f_i, f_j = random.randint(0, array_count-1), random.randint(0, array_count-1)
l_i, l_j = random.randint(0, array_count-1), random.randint(0, array_count-1)
print(f_i, f_j)
print(l_i, l_j)
c_i, c_j = f_i, f_j
for i in range(array_count):
a.append([])
for j in range(array_count):
if i == 0 and j ==0:
a[i].append([0,0,0,1])
else:
a[i].append([0,0,0,0])
while (c_i, c_j) != (l_i, l_j):
min_element = 1000
min_element_ci = 0
min_element_cj = 0
if c_j < len(a[c_i])-1 and a[c_i][c_j+1][2] == 0:
a[c_i][c_j+1][0] = 10*(abs(l_i-c_i) + abs(l_j-(c_j+1)))
a[c_i][c_j+1][1] += 10
a[c_i][c_j+1][2] = a[c_i][c_j+1][0] + a[c_i][c_j+1][1]
if min_element > a[c_i][c_j+1][2]:
min_element = a[c_i][c_j+1][2]
min_element_ci = c_i
min_element_cj = c_j+1
if c_j > 0 and a[c_i][c_j-1][2] == 0:
a[c_i][c_j-1][0] = 10*(abs(l_i-c_i) + abs(l_j-(c_j-1)))
a[c_i][c_j-1][1] += 10
a[c_i][c_j-1][2] = a[c_i][c_j-1][1] + a[c_i][c_j-1][0]
if min_element > a[c_i][c_j-1][2]:
min_element = a[c_i][c_j-1][2]
min_element_ci = c_i
min_element_cj = c_j-1
if c_i > 0 and a[c_i-1][c_j][2] == 0:
a[c_i-1][c_j][0] = 10*(abs(l_i-(c_i-1)) + abs(l_j-c_j))
a[c_i-1][c_j][1] += 10
a[c_i-1][c_j][2] = a[c_i-1][c_j][1] + a[c_i-1][c_j][0]
if min_element > a[c_i-1][c_j][2]:
min_element = a[c_i-1][c_j][2]
min_element_ci = c_i-1
min_element_cj = c_j
if c_i < len(a)-1 and a[c_i+1][c_j][2] == 0:
a[c_i+1][c_j][0] = 10*(abs(l_i-(c_i+1)) + abs(l_j-c_j))
a[c_i+1][c_j][1] += 10
a[c_i+1][c_j][2] = a[c_i+1][c_j][1] + a[c_i+1][c_j][0]
if min_element > a[c_i+1][c_j][2]:
min_element = a[c_i+1][c_j][2]
min_element_ci = c_i+1
min_element_cj = c_j
if c_i < len(a)-1 and c_j< len(a[c_i])-1 and a[c_i+1][c_j+1][2] == 0:
a[c_i+1][c_j+1][0] = 10*(abs(l_i-(c_i+1)) + abs(l_j-(c_j+1)))
a[c_i+1][c_j+1][1] += 14
a[c_i+1][c_j+1][2] = a[c_i+1][c_j+1][1] + a[c_i+1][c_j+1][0]
if min_element > a[c_i+1][c_j+1][2]:
min_element = a[c_i+1][c_j+1][2]
min_element_ci = c_i+1
min_element_cj = c_j+1
if c_i > 0 and c_j > 0 and a[c_i-1][c_j-1][2] == 0:
a[c_i-1][c_j-1][0] = 10*(abs(l_i-(c_i-1)) + abs(l_j-(c_j-1)))
a[c_i-1][c_j-1][1] += 14
a[c_i-1][c_j-1][2] = a[c_i-1][c_j-1][1] +a[c_i-1][c_j-1][0]
if min_element > a[c_i-1][c_j-1][2]:
min_element = a[c_i-1][c_j-1][2]
min_element_ci = c_i-1
min_element_cj = c_j-1
if c_i > 0 and c_j < len(a[c_i])-1 and a[c_i-1][c_j+1][2] == 0:
a[c_i-1][c_j+1][0] = 10*(abs(l_i-(c_i-1)) + abs(l_j-c_j+1))
a[c_i-1][c_j+1][1] += 14
a[c_i-1][c_j+1][2] = a[c_i-1][c_j+1][1] + a[c_i-1][c_j+1][0]
if min_element > a[c_i-1][c_j+1][2]:
min_element = a[c_i-1][c_j+1][2]
min_element_ci = c_i-1
min_element_cj = c_j+1
if c_j > 0 and c_i < len(a[c_i])-1 and a[c_i+1][c_j-1][2] == 0:
a[c_i+1][c_j-1][0] = 10*(abs(l_i-(c_i+1)) + abs(l_j-(c_j-1)))
a[c_i+1][c_j-1][1] += 14
a[c_i+1][c_j-1][2] = a[c_i+1][c_j-1][1] + a[c_i+1][c_j-1][0]
if min_element > a[c_i+1][c_j-1][2]:
min_element = a[c_i+1][c_j-1][2]
min_element_ci = c_i+1
min_element_cj = c_j-1
print(min_element, min_element_ci, min_element_cj)
c_i = min_element_ci
c_j = min_element_cj
# for i, el in enumerate (a):
# print()
# # print(i)
# for j in el:
# print(j, end=',')
# input()
| [
"idris@labrin.net"
] | idris@labrin.net |
4681a6c8e7bf85efe88951c9576d5b58a74d27d8 | 87e5a0b52ee4ce63d8713867408cfe86c935cb72 | /src/engineer/kenya_non_crop.py | e2d2556d2493341dae33f57a8db9e9665c3ead7d | [] | no_license | BatmanDao/crop-maml | 593cb04f886658bd577782e0564ac3949c9e6203 | 54296a6f1431a6d729013b5761a296566f03de1b | refs/heads/main | 2023-06-02T18:28:50.423061 | 2021-06-17T18:28:23 | 2021-06-17T18:28:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,752 | py | from dataclasses import dataclass
import pandas as pd
from pathlib import Path
import geopandas
from datetime import datetime
import json
from typing import Optional
from src.processors import KenyaNonCropProcessor
from src.exporters import KenyaNonCropSentinelExporter
from .base import BaseEngineer, BaseDataInstance
from .pv_kenya import PVKenyaEngineer
@dataclass
class KenyaNonCropDataInstance(BaseDataInstance):
crop_int: Optional[int]
is_crop: bool = False
crop_label: str = "non_crop"
class KenyaNonCropEngineer(BaseEngineer):
sentinel_dataset = KenyaNonCropSentinelExporter.dataset
dataset = KenyaNonCropProcessor.dataset
def __init__(self, data_folder: Path) -> None:
super().__init__(data_folder)
classes_to_index_path = (
data_folder / "features" / PVKenyaEngineer.dataset / "classes_to_index.json"
)
if classes_to_index_path.exists():
with classes_to_index_path.open("r") as f:
c2i = json.load(f)
max_idx = max(c2i.values())
self.non_crop_index: Optional[int] = max_idx + 1
else:
self.non_crop_index = None
print(
f"Using crop_int value: {self.non_crop_index if self.non_crop_index is not None else 'None'}"
)
@staticmethod
def read_labels(data_folder: Path) -> pd.DataFrame:
pv_kenya = data_folder / "processed" / KenyaNonCropProcessor.dataset / "data.geojson"
assert pv_kenya.exists(), "Kenya Non Crop processor must be run to load labels"
return geopandas.read_file(pv_kenya)
def process_single_file(
self,
path_to_file: Path,
nan_fill: float,
max_nan_ratio: float,
add_ndvi: bool,
add_ndwi: bool,
calculate_normalizing_dict: bool,
start_date: datetime,
days_per_timestep: int,
is_test: bool,
) -> Optional[KenyaNonCropDataInstance]:
da = self.load_tif(path_to_file, days_per_timestep=days_per_timestep, start_date=start_date)
# first, we find the label encompassed within the da
min_lon, min_lat = float(da.x.min()), float(da.y.min())
max_lon, max_lat = float(da.x.max()), float(da.y.max())
overlap = self.labels[
(
(self.labels.lon <= max_lon)
& (self.labels.lon >= min_lon)
& (self.labels.lat <= max_lat)
& (self.labels.lat >= min_lat)
)
]
if len(overlap) == 0:
return None
else:
label_lat = overlap.iloc[0].lat
label_lon = overlap.iloc[0].lon
closest_lon = self.find_nearest(da.x, label_lon)
closest_lat = self.find_nearest(da.y, label_lat)
labelled_np = da.sel(x=closest_lon).sel(y=closest_lat).values
if add_ndvi:
labelled_np = self.calculate_ndvi(labelled_np)
if add_ndwi:
labelled_np = self.calculate_ndwi(labelled_np)
labelled_array = self.maxed_nan_to_num(
labelled_np, nan=nan_fill, max_ratio=max_nan_ratio
)
if (not is_test) and calculate_normalizing_dict:
self.update_normalizing_values(self.normalizing_dict_interim, labelled_array)
if labelled_array is not None:
return KenyaNonCropDataInstance(
label_lat=label_lat,
label_lon=label_lon,
instance_lat=closest_lat,
instance_lon=closest_lon,
labelled_array=labelled_array,
crop_int=self.non_crop_index,
)
else:
return None
| [
"gabriel.tseng@mail.mcgill.ca"
] | gabriel.tseng@mail.mcgill.ca |
bf818d5614f87f280643af66ba624c44ea3b74c5 | 7d9d1a6060f42be44e0e45483a16b00eb0b96abf | /mtefdweb/migrations/0001_initial.py | f81055474ddd6c611b2f863723af86512c9f4195 | [] | no_license | aaugustin/mtefd | 88e50fadb936957721f6dd1f4e2424a2ab4a50b4 | a859fbcd9aa8e5169733f8518fde1e251c45f9fe | refs/heads/master | 2023-07-06T04:24:30.114023 | 2015-10-02T13:34:39 | 2015-10-02T13:34:39 | 24,799,959 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 999 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Funder',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=100)),
('email', models.CharField(max_length=100)),
('perk', models.PositiveSmallIntegerField(choices=[(0, b''), (1, b'Thanks!'), (2, b'Double thanks!'), (3, b'Bronze sponsor'), (4, b'Silver sponsor'), (5, b'Gold sponsor'), (6, b'Platinum sponsor'), (7, b'Diamond sponsor')])),
('appearance', models.CharField(max_length=1, choices=[(b'V', b'Visible'), (b'I', b'Identity-Only'), (b'A', b'Anonymous')])),
],
options={
},
bases=(models.Model,),
),
]
| [
"aymeric.augustin@m4x.org"
] | aymeric.augustin@m4x.org |
709ada493cfba766e485de824e84804f68da4de4 | 09a6d8dbad5b92f93791948b5bf9b75f5cb2e5ce | /tests/pulse/test_pwc_functions.py | b56faa67bad6d475eee2b67e8138544469e64a97 | [
"Apache-2.0"
] | permissive | PennyLaneAI/pennylane | 458efd5d9457e90ada31ca2ef0fb6bb96a24e9a7 | 0843183ff15a013c2622af5e61fea431d18076d3 | refs/heads/master | 2023-09-03T17:00:43.105784 | 2023-09-01T16:15:07 | 2023-09-01T16:15:07 | 129,936,360 | 1,431 | 410 | Apache-2.0 | 2023-09-14T21:30:56 | 2018-04-17T16:45:42 | Python | UTF-8 | Python | false | false | 18,275 | py | # Copyright 2018-2023 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the convenience functions used in pulsed programming.
"""
# pylint: disable=import-outside-toplevel
import inspect
from functools import partial
import numpy as np
import pytest
import pennylane as qml
from pennylane.pulse.parametrized_hamiltonian import ParametrizedHamiltonian
# error expected to be raised locally - test will pass in CI, where it will be run without jax installed
def test_error_raised_if_jax_not_installed():
"""Test that an error is raised if a convenience function is called without jax installed"""
try:
import jax # pylint: disable=unused-import
pytest.skip()
except ImportError:
with pytest.raises(ImportError, match="Module jax is required"):
qml.pulse.pwc(10)
with pytest.raises(ImportError, match="Module jax is required"):
qml.pulse.pwc_from_function(10, 10)
@pytest.mark.jax
class TestPWC:
"""Unit tests for the pwc function"""
def test_pwc_returns_callable(self):
"""Tests that the pwc function returns a callable with arguments (params, t)"""
c = qml.pulse.pwc(10)
argspec = inspect.getfullargspec(c)
assert callable(c)
assert argspec.args == ["params", "t"]
def test_t_out_of_bounds_returns_0(self):
"""Tests that requesting a value for the pwc function outside the defined window returns 0"""
f_pwc = qml.pulse.pwc(timespan=(1, 3))
constants = np.linspace(0, 12, 13)
assert f_pwc(constants, 1.5) != 0
assert f_pwc(constants, 0) == 0
assert f_pwc(constants, 4) == 0
def test_bins_match_params_array(self):
"""Test the pwc function contains bins matching the array of constants passed as params"""
f_pwc = qml.pulse.pwc(timespan=(1, 3))
constants = np.linspace(0, 12, 13)
y = [float(f_pwc(constants, i)) for i in np.linspace(1, 3, 100)]
assert set(y) == set(constants)
def test_t_input_types(self):
"""Tests that both shapes for input t work when creating a pwc function"""
constants = np.linspace(0, 12, 13)
# should be identical
f1 = qml.pulse.pwc(10)
f2 = qml.pulse.pwc((0, 10))
assert np.all([f1(constants, t) == f2(constants, t) for t in np.linspace(-2, 12, 200)])
assert f1(constants, 10) == 0
assert f2(constants, 10) == 0
assert f1(constants, 0) == constants[0]
assert f2(constants, 0) == constants[0]
# should set t1=1 instead of default t1=0
f3 = qml.pulse.pwc((1, 3))
assert f3(constants, 3) == 0
assert f3(constants, 1) == constants[0]
assert f3(constants, 0) == 0
def test_function_call_is_jittable(self):
"""Test that jax.jit can be used on the callable produced by pwc_from_function"""
import jax
f = qml.pulse.pwc(10)
assert jax.jit(f)([1.2, 2.3], 2) != 0
assert jax.jit(f)([1.2, 2.3], 13) == 0
@pytest.mark.jax
class TestPWC_from_function:
"""Unit tests for the pwc_from_function decorator"""
def test_pwc_from_function_returns_callable(self):
"""Tests that the pwc function returns a callable with arguments (fn), which if
passed in turn returns a callable with arguments (params, t)"""
def f(x):
return x**2
c1 = qml.pulse.pwc_from_function(10, 10)
c2 = c1(f)
argspec1 = inspect.getfullargspec(c1)
argspec2 = inspect.getfullargspec(c2)
assert callable(c1)
assert callable(c2)
assert argspec1.args == ["fn"]
assert argspec2.args == ["params", "t"]
def test_use_as_decorator_returns_callable(self):
"""Test that decorating a function with pwc_from_function returns a callable with arguments (params, t)"""
@qml.pulse.pwc_from_function(9, 10)
def f(param, t):
return t**2 + param
argspec = inspect.getfullargspec(f)
assert callable(f)
assert argspec.args == ["params", "t"]
def test_expected_values_are_returned(self):
"""Test that output values for the pwc version functions and the inital function match each other
when t is one of the input time_bins"""
def f_initial(param, t):
return t**2 + param
f_pwc = qml.pulse.pwc_from_function(timespan=9, num_bins=10)(f_initial)
@qml.pulse.pwc_from_function(timespan=9, num_bins=10)
def f_decorated(param, t):
return t**2 + param
time_bins = np.linspace(0, 9, 10)
for i in time_bins:
# pwc functions should match initial function between times i-1 and i,
# i.e. points just before time i should always match
assert f_initial(2, i) == f_pwc(2, i * 0.99)
assert f_initial(2, i) == f_decorated(
2, i * 0.99
) # 0.99*i because for edge point, f_pwc(i) is 0
@pytest.mark.parametrize("num_bins", [10, 15, 21])
def test_num_bins_is_correct(self, num_bins):
"""Test the pwc function has been divided into the expected number of bins"""
def f_initial(param, t):
return t + param
f_pwc = qml.pulse.pwc_from_function(timespan=9, num_bins=num_bins)(f_initial)
# check that there are only a limited number of unique output values for the pwc function
y = [float(f_pwc(2, i)) for i in np.linspace(0, 9, 1000)]
assert len(set(y)) == num_bins + 1 # all bins plus 0 at the edges
def test_t_out_of_bounds_returns_0(self):
"""Tests that requesting a value for the pwc function outside the defined window returns 0"""
def f_initial(param, t):
return t + param
f_pwc = qml.pulse.pwc_from_function(timespan=(1, 3), num_bins=10)(f_initial)
assert f_pwc(3, 1.5) != 0
assert f_pwc(3, 0) == 0
assert f_pwc(3, 4) == 0
def test_t_input_types(self):
"""Tests that both shapes for input t work when creating a pwc function"""
def f(params, t):
return params[1] * t**2 + params[0]
params = [1.2, 2.3]
# should be identical, t1=0, t2=10
f1 = qml.pulse.pwc_from_function(10, 12)(f)
f2 = qml.pulse.pwc_from_function((0, 10), 12)(f)
assert np.all([f1(params, t) == f2(params, t) for t in np.linspace(-2, 12, 200)])
assert f1(params, 10) == 0
assert f2(params, 10) == 0
assert f1(params, 0) == params[0]
assert f2(params, 0) == params[0]
# should set t1=1 instead of default t1=0
f3 = qml.pulse.pwc_from_function((1, 3), 12)(f)
assert f3(params, 3) == 0
assert f3(params, 1) == f(params, 1)
assert f3(params, 0) == 0
def test_function_call_is_jittable(self):
"""Test that jax.jit can be used on the callable produced by pwc_from_function"""
import jax
@qml.pulse.pwc_from_function((1, 3), 12)
def f(params, t):
return params[1] * t**2 + params[0]
assert jax.jit(f)([1.2, 2.3], 0) == 0
assert jax.jit(f)([1.2, 2.3], 2) != 0
assert jax.jit(f)([1.2, 2.3], 4) == 0
@pytest.mark.jax
class TestIntegration:
"""Test integration of pwc functions with the pulse module."""
def integral_pwc( # pylint:disable = too-many-arguments
self, t1, t2, num_bins, integration_bounds, fn, params, pwc_from_function=False
):
"""Helper function that integrates a pwc function."""
from jax import numpy as jnp
# constants set by array if pwc, constants must be found if pwc_from_function
constants = jnp.array(params)
if pwc_from_function:
time_bins = np.linspace(t1, t2, num_bins)
constants = jnp.array(list(fn(params, time_bins)) + [0])
# get start and end point as indicies, without casting to int
start = num_bins / (t2 - t1) * (integration_bounds[0] - t1)
end = num_bins / (t2 - t1) * (integration_bounds[1] - t1)
# get indices of bins that are completely within the integration window
complete_indices = np.linspace(
int(start) + 1, int(end) - 1, int(end) - int(start) - 1, dtype=int
)
relevant_indices = np.array([i for i in complete_indices if -1 < i < num_bins])
# find area contributed by bins that are completely within the integration window
bin_width = (t2 - t1) / num_bins
main_area = np.sum(constants[relevant_indices] * bin_width)
# if start index is not out of range, add contribution from partial bin at start
if start >= 0:
width = bin_width * 1 - (start - int(start))
main_area += constants[int(start)] * width
# if end index is not out of range, add contribution from partial bin at end
if end < num_bins:
width = bin_width * (end - int(end))
main_area += constants[int(end)] * width
return main_area
def test_parametrized_hamiltonian_with_pwc(self):
"""Test that a pwc function can be used to create a ParametrizedHamiltonian"""
f1 = qml.pulse.pwc((1, 6))
f2 = qml.pulse.pwc((0.5, 3))
H = f1 * qml.PauliX(0) + f2 * qml.PauliY(1)
constants = np.linspace(0, 9, 10)
assert isinstance(H, ParametrizedHamiltonian)
# at t=7 and t=0.2, both terms are 0
assert qml.math.allequal(qml.matrix(H(params=[constants, constants], t=7)), 0)
assert qml.math.allequal(qml.matrix(H(params=[constants, constants], t=0.2)), 0)
# at t=4, only term 1 is non-zero
true_mat = qml.matrix(f1(constants, 4) * qml.PauliX(0), wire_order=[0, 1])
assert qml.math.allequal(qml.matrix(H(params=[constants, constants], t=4)), true_mat)
# at t=0.7, only term 2 is non-zero
true_mat = qml.matrix(f2(constants, 0.7) * qml.PauliY(1), wire_order=[0, 1])
assert qml.math.allequal(qml.matrix(H(params=[constants, constants], t=0.7)), true_mat)
# at t=1.5, both are non-zero and output is as expected
true_mat = qml.matrix(f1(constants, 1.5) * qml.PauliX(0), wire_order=[0, 1]) + qml.matrix(
f2(constants, 1.5) * qml.PauliY(1), wire_order=[0, 1]
)
assert qml.math.allequal(qml.matrix(H(params=[constants, constants], t=1.5)), true_mat)
@pytest.mark.slow
def test_qnode_pwc(self):
"""Test that the evolution of a parametrized hamiltonian defined with a pwc function be executed on a QNode."""
import jax
f1 = qml.pulse.pwc((1, 6))
f2 = qml.pulse.pwc((0.5, 3))
H = f1 * qml.PauliX(0) + f2 * qml.PauliY(1)
t = (0, 4)
dev = qml.device("default.qubit", wires=2)
@qml.qnode(dev, interface="jax")
def circuit(params):
qml.evolve(H)(params=params, t=t)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
@qml.qnode(dev, interface="jax")
def true_circuit(params):
# ops X0 and Y1 are commuting - time evolution of f1*X0 + f2*X1 is exp(-i*F1*X0)exp(-i*F2*Y1)
# Where Fj = integral of fj(p,t)dt over evolution time t
coeff1 = partial(self.integral_pwc, 1, 6, 10, (0, 4), f1)
coeff2 = partial(self.integral_pwc, 0.5, 3, 10, (0, 4), f2)
qml.prod(
qml.exp(qml.PauliX(0), -1j * coeff1(params[0])),
qml.exp(qml.PauliY(1), -1j * coeff2(params[1])),
)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
constants = np.linspace(0, 9, 10)
params = [constants, constants]
assert qml.math.allclose(circuit(params), true_circuit(params), atol=5e-3)
assert qml.math.allclose(
jax.grad(circuit)(params), jax.grad(true_circuit)(params), atol=5e-3
)
def test_qnode_pwc_jit(self):
"""Test that the evolution of a parametrized hamiltonian defined with a pwc function can executed on
a QNode using jax-jit, and the results don't differ from execution without jitting."""
import jax
f1 = qml.pulse.pwc((1, 6))
f2 = qml.pulse.pwc((0.5, 3))
H = f1 * qml.PauliX(0) + f2 * qml.PauliY(1)
t = (0, 4)
dev = qml.device("default.qubit", wires=2)
@qml.qnode(dev, interface="jax")
def circuit(params):
qml.evolve(H)(params=params, t=t)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
@jax.jit
@qml.qnode(dev, interface="jax")
def jitted_circuit(params):
qml.evolve(H)(params=params, t=t)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
constants = np.linspace(0, 9, 10)
params = [constants, constants]
assert qml.math.allclose(jitted_circuit(params), circuit(params), atol=5e-3)
assert qml.math.allclose(
jax.grad(jitted_circuit)(params), jax.grad(circuit)(params), atol=5e-3
)
def test_parametrized_hamiltonian_with_pwc_from_function(self):
"""Test that a function decorated by pwc_from_function can be used to create a ParametrizedHamiltonian"""
@qml.pulse.pwc_from_function((2, 5), 20)
def f1(params, t):
return params + t
@qml.pulse.pwc_from_function((3, 7), 10)
def f2(params, t):
return params[0] + params[1] * t**2
H = f1 * qml.PauliX(0) + f2 * qml.PauliY(1)
params = [1.2, [2.3, 3.4]]
assert isinstance(H, ParametrizedHamiltonian)
# at t=8 and t=1, both terms are 0
assert qml.math.allequal(qml.matrix(H(params, t=8)), 0)
assert qml.math.allequal(qml.matrix(H(params, t=1)), 0)
# at t=2.5, only term 1 is non-zero
true_mat = qml.matrix(f1(params[0], 2.5) * qml.PauliX(0), wire_order=[0, 1])
assert qml.math.allequal(qml.matrix(H(params, t=2.5)), true_mat)
# # at t=6, only term 2 is non-zero
true_mat = qml.matrix(f2(params[1], 6) * qml.PauliY(1), wire_order=[0, 1])
assert qml.math.allequal(qml.matrix(H(params, t=6)), true_mat)
#
# # at t=4, both are non-zero and output is as expected
true_mat = qml.matrix(f1(params[0], 4) * qml.PauliX(0), wire_order=[0, 1]) + qml.matrix(
f2(params[1], 4) * qml.PauliY(1), wire_order=[0, 1]
)
assert qml.math.allequal(qml.matrix(H(params, t=4)), true_mat)
def test_qnode_pwc_from_function(self):
"""Test that the evolution of a ParametrizedHamiltonian defined with a function decorated by pwc_from_function
can be executed on a QNode."""
import jax
@qml.pulse.pwc_from_function((2, 5), 20)
def f1(params, t):
return params + t
@qml.pulse.pwc_from_function((3, 7), 10)
def f2(params, t):
return params[0] + params[1] * t**2
H = f1 * qml.PauliX(0) + f2 * qml.PauliY(1)
t = (1, 4)
dev = qml.device("default.qubit", wires=2)
@qml.qnode(dev, interface="jax")
def circuit(params):
qml.evolve(H)(params=params, t=t)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
@qml.qnode(dev, interface="jax")
def true_circuit(params):
# ops X0 and Y1 are commuting - time evolution of f1*X0 + f2*X1 is exp(-i*F1*X0)exp(-i*F2*Y1)
# Where Fj = integral of fj(p,t)dt over evolution time t
coeff1 = partial(self.integral_pwc, 2, 5, 20, (1, 4), f1)
coeff2 = partial(self.integral_pwc, 3, 7, 10, (1, 4), f2)
qml.prod(
qml.exp(qml.PauliX(0), -1j * coeff1(params[0], pwc_from_function=True)),
qml.exp(qml.PauliY(1), -1j * coeff2(params[1], pwc_from_function=True)),
)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
params = [1.2, [2.3, 3.4]]
circuit_grad_flattened, _ = jax.flatten_util.ravel_pytree(jax.grad(circuit)(params))
true_grad_flattened, _ = jax.flatten_util.ravel_pytree(jax.grad(true_circuit)(params))
assert qml.math.allclose(circuit(params), true_circuit(params), atol=5e-2)
assert qml.math.allclose(circuit_grad_flattened, true_grad_flattened, atol=5e-2)
def test_qnode_pwc_from_function_jit(self):
"""Test that the evolution of a ParametrizedHamiltonian defined with a function decorated by pwc_from_function
can be executed on a QNode using jax-jit, and the results don't differ from an execution without jitting.
"""
import jax
@qml.pulse.pwc_from_function((2, 5), 20)
def f1(params, t):
return params + t
@qml.pulse.pwc_from_function((3, 7), 10)
def f2(params, t):
return params[0] + params[1] * t**2
H = f1 * qml.PauliX(0) + f2 * qml.PauliY(1)
t = (1, 4)
dev = qml.device("default.qubit", wires=2)
@qml.qnode(dev, interface="jax")
def circuit(params):
qml.evolve(H)(params=params, t=t)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
@jax.jit
@qml.qnode(dev, interface="jax")
def jitted_circuit(params):
qml.evolve(H)(params=params, t=t)
return qml.expval(qml.PauliZ(0) @ qml.PauliZ(1))
params = [1.2, [2.3, 3.4]]
circuit_grad_flattened, _ = jax.flatten_util.ravel_pytree(jax.grad(circuit)(params))
jitted_grad_flattened, _ = jax.flatten_util.ravel_pytree(jax.grad(jitted_circuit)(params))
assert qml.math.allclose(jitted_circuit(params), circuit(params), atol=5e-2)
assert qml.math.allclose(circuit_grad_flattened, jitted_grad_flattened, atol=5e-2)
| [
"noreply@github.com"
] | PennyLaneAI.noreply@github.com |
772a363f4897ff0d1fb383ba03270c69caf9beef | 23611933f0faba84fc82a1bc0a85d97cf45aba99 | /google-cloud-sdk/lib/surface/sql/flags/list.py | 7002af2f8b9fb14f62ee5bb4319d675f8b877141 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | KaranToor/MA450 | 1f112d1caccebdc04702a77d5a6cee867c15f75c | c98b58aeb0994e011df960163541e9379ae7ea06 | refs/heads/master | 2021-06-21T06:17:42.585908 | 2020-12-24T00:36:28 | 2020-12-24T00:36:28 | 79,285,433 | 1 | 1 | Apache-2.0 | 2020-12-24T00:38:09 | 2017-01-18T00:05:44 | Python | UTF-8 | Python | false | false | 1,913 | py | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Lists customizable MySQL flags for Google Cloud SQL instances."""
from googlecloudsdk.calliope import base
class _BaseList(object):
"""Lists customizable MySQL flags for Google Cloud SQL instances."""
def Collection(self):
return 'sql.flags'
def Run(self, unused_args):
"""Lists customizable MySQL flags for Google Cloud SQL instances.
Args:
unused_args: argparse.Namespace, The arguments that this command was
invoked with.
Returns:
A dict object that has the list of flag resources if the command ran
successfully.
Raises:
HttpException: A http error response was received while executing api
request.
ToolException: An error other than http error occured while executing the
command.
"""
sql_client = self.context['sql_client']
sql_messages = self.context['sql_messages']
result = sql_client.flags.List(sql_messages.SqlFlagsListRequest())
return iter(result.items)
@base.ReleaseTracks(base.ReleaseTrack.GA)
class List(_BaseList, base.ListCommand):
"""Lists customizable MySQL flags for Google Cloud SQL instances."""
pass
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class ListBeta(_BaseList, base.ListCommand):
"""Lists customizable MySQL flags for Google Cloud SQL instances."""
pass
| [
"toork@uw.edu"
] | toork@uw.edu |
6a0caeb63c63789f851099fc1d4eaa48c2e5701d | b429842319179d4df8586d5cb8287edd8e0bd3c8 | /escuela/core/migrations/0001_initial.py | 484e9f1903757b29db1cbd31413c523d96563c5f | [] | no_license | escuelagithub/escuelagithub | c5bd1be8bcb5b5fd7c1720415dd6d10d7816e0bf | 1e98619935ca3d772f0b77fdcc1de426f6a84729 | refs/heads/master | 2023-04-08T17:18:54.529244 | 2021-04-20T18:55:38 | 2021-04-20T18:55:38 | 359,922,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,080 | py | # Generated by Django 3.2 on 2021-04-08 19:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='curso',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=50)),
('descripcion', models.TextField()),
('disciplina', models.CharField(max_length=20)),
('avatar', models.ImageField(blank=True, null=True, upload_to='core')),
('precio', models.DecimalField(decimal_places=2, default=0, max_digits=5)),
('calificacion', models.DecimalField(decimal_places=1, default=0, max_digits=2)),
('alumnos', models.IntegerField(default=0)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
),
]
| [
"gjangoinminutes@gmail.com"
] | gjangoinminutes@gmail.com |
5f0228e362c4758a1fdd7c1d13f7c08a4b579228 | 3f83a4411545140ef01e73ec037fdc94fdcb8fa0 | /mwbase/about.py | 77e81dc0ba13a40349274c1f5178b074611307ea | [
"MIT"
] | permissive | mediawiki-utilities/python-mwbase | 3a3759deb171e5b29589ff873e3c3c972a8daf78 | 5a7255fdbd2e9c5d844b48e7b6a293f5ffb17e6e | refs/heads/master | 2021-01-22T23:24:08.677862 | 2020-06-01T15:23:17 | 2020-06-01T15:23:17 | 85,629,682 | 2 | 1 | MIT | 2020-06-01T15:23:23 | 2017-03-20T21:34:42 | Python | UTF-8 | Python | false | false | 384 | py | __name__ = "mwbase"
__version__ = "0.1.4"
__author__ = "Aaron Halfaker"
__author_email__ = "aaron.halfaker@gmail.com"
__description__ = "Data structure normalization for MediaWiki's Wikibase"
__license__ = "MIT"
__url__ = "https://github.com/mediawiki-utilities/python-mwbase"
all = [__name__, __version__, __author__, __author_email__, __description__,
__license__, __url__]
| [
"aaron.halfaker@gmail.com"
] | aaron.halfaker@gmail.com |
9ee0bd4abc310a7ff56057247faa835681db1e26 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/9/wid.py | 466d948d44bf1b86b08a23550d7cda72df0e2b60 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'wID':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"juliettaylorswift@gmail.com"
] | juliettaylorswift@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.