code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
#!usr/bin/env python
from pysnmp.entity.rfc3413.oneliner import cmdgen
import shlex
import subprocess
import re
import os
import sys
import smtplib
from devices.models import AP as AccessPoint
## This file is used to update the access point inventory data. Use the
## updateAccessPoints function to run the update. The function
## updateStatus will only check if the APs are up or down, and send an
## email report on APs that are currently down or that have recovered.
## Do an snmpwalk using cmdgen from PySNMP to get data about each AP.
## Takes a dictionary of OIDs and a list of controller IPs.
def snmpwalk(OIDs, controllers):
APs = dict()
cmdGen = cmdgen.CommandGenerator()
for key in OIDs:
for controller in controllers:
errorIndication, errorStatus, errorIndex, varBindTable = cmdGen.bulkCmd(
cmdgen.CommunityData('spa'),
cmdgen.UdpTransportTarget((controller, 161)),
0, 1000, OIDs[key]
)
for varBindTableRow in varBindTable:
for name, val in varBindTableRow:
## make a unique identifier for each AP
num = str(name)[len(OIDs["mac"]):].strip('.')
try:
if key not in APs[num]:
APs[num][key] = str(val.prettyPrint())
except:
APs[num] = {key: str(val.prettyPrint())}
return APs
## Add or update all access points using Django.
def updateAccessPoints(path, AP_OIDs, controller_IPs):
APs = snmpwalk(AP_OIDs, controller_IPs)
for AP in APs:
if APs[AP]["serial"] != "0":
## make a new AP object if necessary
try:
new_AP = AccessPoint(serialno=APs[AP]["serial"], ip=APs[AP]["ip"])
new_AP.save()
except:
pass
## Update the AP's data
update_AP = AccessPoint.objects.get(serialno=APs[AP]["serial"], autoupdate=1)
update_AP.ip = APs[AP]["ip"]
update_AP.mac = APs[AP]["mac"].lower()[2:]
update_AP.name = APs[AP]["name"]
update_AP.model = APs[AP]["model"]
update_AP.save()
## Get the names of all the access points which are currently up and connected to
## a controller. Compare to the names in the database to find the APs that are down.
def updateStatus(controller_IPs, status_oid, email):
AP_command = []
for controller in controller_IPs:
AP_command.append("snmpwalk -v2c -c spa " + controller + " " + status_oid)
# Get the names of the APs connected to each controller.
# Compare to APs stored in the database to determine which are down and
# which have recovered.
upAPs = []
for cmd in AP_command:
upAPs.extend(runCommand(cmd))
storedAPs = AccessPoint.objects.all()
downAPs = []
recoveredAPs = []
for ap in storedAPs:
if ap.name not in upAPs:
ap.laststatus = "down"
if ap.checkstatus == True:
downAPs.append(ap)
else:
if ap.laststatus == "down" and ap.checkstatus == True:
recoveredAPs.append(ap)
ap.laststatus = "up"
ap.save()
# Send emails about down or recovered access points.
if len(downAPs) > 0:
message = '\nThe following access points are not responding:\n'
subject = 'APs are not responding'
sendEmail(message, subject, downAPs, email)
if len(recoveredAPs) > 0:
message = '\nThe following access points were down but have recovered:\n'
subject = 'APs have recovered'
sendEmail(message, subject, recoveredAPs, email)
#takes a string "com" and runs the command, returning a list of AP names
def runCommand(com):
args = shlex.split(com) #separates com into indv. args
p = subprocess.Popen(args, stdout=subprocess.PIPE) #runs command, saves stdout
#communicate() returns a tuple (stdout,stderr) but we only want stdout
stdout = p.communicate()[0]
#clean the data
stdout = stdout.replace("SNMPv2-SMI::enterprises.","")
stdout = stdout.replace("Hex-STRING:","")
stdout = stdout.replace("STRING:","")
stdout = stdout.replace("IpAddress:","")
stdout = stdout.replace("\"", "")
stdout = stdout.replace(" ", "")
#split stdout into lines
stdoutLines = stdout.split("\n")
stdoutLines = stdoutLines[:-1] #removes last empty row
#parse stdout into list
names = []
for line in stdoutLines:
names.append(line.split("=")[1])
return names
## Send an email report on access point status.
def sendEmail(messageBody, subject, APs, email):
for ap in APs:
messageBody += "\t" + ap.ip + "\t" + ap.name + "\n"
toHeaderBuild = []
for to in email["to"]:
toHeaderBuild.append("<" + to + ">")
msg = "From: <" + email["from"] + "> \nTo: " + ', '.join(toHeaderBuild) + " \nSubject: " + subject + " \n" + messageBody
s = smtplib.SMTP(email["server"])
s.sendmail(email["from"], email["to"], msg)
s.quit()
|
lindseypack/NIM
|
code/ap/apInv.py
|
Python
|
mit
| 5,106
|
import json
from rest_framework.renderers import JSONRenderer
class CustomJSONRenderer(JSONRenderer):
charset = 'utf-8'
object_label = 'object'
pagination_object_label = 'objects'
pagination_count_label = 'count'
def render(self, data, media_type=None, renderer_context=None):
if data.get('results', None) is not None:
return json.dumps({
self.pagination_object_label: data['results'],
self.pagination_count_label: data['count']
})
# If the view throws an error (such as the user can't be authenticated
# or something similar), `data` will contain an `errors` key. We want
# the default JSONRenderer to handle rendering errors, so we need to
# check for this case.
elif data.get('errors', None) is not None:
return super(CustomJSONRenderer, self).render(data)
return json.dumps({
self.object_label: data
})
|
jsbUSMC/django-edge-api
|
apps/core/renderers.py
|
Python
|
mit
| 977
|
"""Support for Melnor RainCloud sprinkler water timer."""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_MONITORED_CONDITIONS
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.icon import icon_for_battery_level
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import (
DATA_RAINCLOUD,
ICON_MAP,
SENSORS,
UNIT_OF_MEASUREMENT_MAP,
RainCloudEntity,
)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_MONITORED_CONDITIONS, default=list(SENSORS)): vol.All(
cv.ensure_list, [vol.In(SENSORS)]
)
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up a sensor for a raincloud device."""
raincloud = hass.data[DATA_RAINCLOUD].data
sensors = []
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if sensor_type == "battery":
sensors.append(RainCloudSensor(raincloud.controller.faucet, sensor_type))
else:
# create a sensor for each zone managed by a faucet
for zone in raincloud.controller.faucet.zones:
sensors.append(RainCloudSensor(zone, sensor_type))
add_entities(sensors, True)
class RainCloudSensor(RainCloudEntity, SensorEntity):
"""A sensor implementation for raincloud device."""
@property
def native_value(self):
"""Return the state of the sensor."""
return self._state
@property
def native_unit_of_measurement(self):
"""Return the units of measurement."""
return UNIT_OF_MEASUREMENT_MAP.get(self._sensor_type)
def update(self):
"""Get the latest data and updates the states."""
_LOGGER.debug("Updating RainCloud sensor: %s", self._name)
if self._sensor_type == "battery":
self._state = self.data.battery
else:
self._state = getattr(self.data, self._sensor_type)
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._sensor_type == "battery" and self._state is not None:
return icon_for_battery_level(
battery_level=int(self._state), charging=False
)
return ICON_MAP.get(self._sensor_type)
|
rohitranjan1991/home-assistant
|
homeassistant/components/raincloud/sensor.py
|
Python
|
mit
| 2,656
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.core.plot.memory Contains the MemoryPlotter class, used for creating plots of the memory consumption
# of a SKIRT simulation as a function of time.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
import numpy as np
import matplotlib.pyplot as plt
# Import the relevant PTS classes and modules
from ..basics.map import Map
from .plotter import Plotter
from ..tools.logging import log
from ..tools import filesystem as fs
# -----------------------------------------------------------------
class MemoryPlotter(Plotter):
"""
This class ...
"""
def __init__(self):
"""
The constructor ...
:return:
"""
# Call the constructor of the base class
super(MemoryPlotter, self).__init__()
# -- Attributes --
# A data structure to store the memory (de)allocation information
self.allocation = None
# -----------------------------------------------------------------
@staticmethod
def default_input():
"""
This function ...
:return:
"""
return "memory.dat"
# -----------------------------------------------------------------
def prepare_data(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Preparing the input data into plottable format...")
# Get the number of processes
ranks = np.unique(self.table["Process rank"])
assert len(ranks) == max(ranks) + 1
processes = len(ranks)
# Initialize the data structure to contain the memory usage information in plottable format
self.data = [Map({"times": [], "memory": []}) for i in range(processes)]
# Loop over the different entries in the memory table
for i in range(len(self.table)):
# Get the process rank
rank = self.table["Process rank"][i]
# Get the time and memory usage
time = self.table["Simulation time"][i]
memory = self.table["Memory usage"][i]
# Add the data point to the data structure
self.data[rank].times.append(time)
self.data[rank].memory.append(memory)
# Check whether (de)allocation information is present in the memory table
if "Array (de)allocation" in self.table.colnames:
# Initialize the data structure for plotting the memory usage of the root process and the memory
# allocation curve
self.allocation = Map({"times": [], "allocation": [], "cumulative": []})
# Get the mask covering entries that do not contain array (de)allocation information
mask = self.table["Array (de)allocation"].mask
# Check whether the first entry of the table corresponds to the root process
assert self.table["Process rank"][0] == 0
# Create a variable to store the cumulative sum of allocated memory
cumulative_sum = 0.0
# Loop over the different entries in the memory table
for i in range(len(self.table)):
# Get the process rank
rank = self.table["Process rank"][i]
# Only add the contributions from the root process
if rank > 0: break
# If the entry is masked because it does not contain memory allocation information, skip it
if mask[i]: continue
# Get the time and the amount of (de)allocated memory
time = self.table["Simulation time"][i]
allocation = self.table["Array (de)allocation"][i]
# Add the allocated memory to the sum
cumulative_sum += allocation
# Add the data point to the data structure
self.allocation.times.append(time)
self.allocation.allocation.append(allocation)
self.allocation.cumulative.append(cumulative_sum)
# -----------------------------------------------------------------
def plot(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Making the plots...")
# Make a plot of the memory usage as a function of time
self.plot_memory()
# Make a plot of the memory (de)allocation information, if present
if self.allocation is not None: self.plot_allocation()
# -----------------------------------------------------------------
def plot_memory(self):
"""
This function ...
:return:
"""
# Determine the path to the plot file
plot_path = fs.join(self.output_path, "memory.pdf")
# Initialize figure
plt.figure()
plt.clf()
# Loop over the different processes
for rank in range(len(self.data)):
# Name of the current process
process = "P" + str(rank)
# Plot the memory usage
plt.plot(self.data[rank].times, self.data[rank].memory, label=process)
# Set the axis labels
plt.xlabel("Time (s)", fontsize='large')
plt.ylabel("Memory usage (GB)", fontsize='large')
# Set the plot title
plt.title("Memory consumption")
# Set the legend
if len(self.data) > 16: plt.legend(loc='upper center', ncol=8, bbox_to_anchor=(0.5, -0.1), prop={'size': 8})
else: plt.legend(loc='lower right', ncol=4, prop={'size': 8})
# Save the figure
plt.savefig(plot_path, bbox_inches='tight', pad_inches=0.25)
plt.close()
# -----------------------------------------------------------------
def plot_allocation(self):
"""
This function ...
:return:
"""
# Determine the path to the plot file
plot_path = fs.join(self.output_path, "allocation.pdf")
# Initialize figure
plt.figure()
plt.clf()
# Plot the memory usage of the root process
plt.plot(self.data[0].times, self.data[0].memory, label="total memory usage")
# Plot the memory allocation of the root process
plt.step(self.allocation.times, self.allocation.cumulative, where="post", linestyle="--", label="allocated array memory")
# Set the axis labels
plt.xlabel("Time (s)", fontsize='large')
plt.ylabel("Memory usage (GB)", fontsize='large')
# Set the plot title
plt.title("Memory (de)allocation")
# Set the legend
plt.legend(loc='lower right', prop={'size': 8})
# Save the figure
plt.savefig(plot_path, bbox_inches='tight', pad_inches=0.25)
plt.close()
# -----------------------------------------------------------------
|
Stargrazer82301/CAAPR
|
CAAPR/CAAPR_AstroMagic/PTS/pts/core/plot/memory.py
|
Python
|
mit
| 7,242
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_propeller_demo.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
tfroehlich82/django-propeller
|
manage.py
|
Python
|
mit
| 819
|
'''
A condition
'''
from base import Base
from compares import const
class ComparisonMixin(object):
'''
Compare two values with a comparison utility
to denote if a change has validated.
'''
def compare(self, a, b, ctype=None):
'''
compare 'a' against 'b' for a comparison of `ctype`
by defauly ctype will compare for an exact match
'''
if ctype is None:
ctype = const.EXACT
# internal importer for core.compares.simple.
Comp = self.get_comparison_class(ctype)
# new class of
comp = Comp(self)
# perform comparison
return comp.match(a,b)
def get_comparison_class(self, compare):
'''
Return the compare class by string
'''
m = __import__('core.compares.simple', fromlist=[compare])
# print 'module', m
# print 'compare', compare
k = getattr(m, compare)
return k
class Condition(Base, ComparisonMixin):
'''
A condition perpetuates changes of an object base upon
rules applied at configuration.
'''
def __init__(self, node, attr, value=None, valid=None):
'''
A condition requires
a node (Node|String|iterable),
the attribute to monitor (String),
a value to validate condition.
Optionally `valid` callback when the condition is met
'''
self.watch = node
self.field = attr
self.target = value
self._valid_cb = valid
def valid(self):
'''
Is this condition valid
'''
vs = self._validate()
for node in vs:
val = vs[node]
if val == False: return False
return True
def get_nodes(self):
'''
return a list of Nodes retrieved from the machine using the
`watch` attr. Each item in the `watch` iterable will be
parsed into a Node type.
'''
if isinstance(self.watch, (tuple, list,) ) is not True:
# create iterable
return [self.watch]
# is iterable
return self.watch
def _validate(self, nodes=None, field=None, ctype=None):
'''
validate the condition against the assigned node.
Returns boolean
Provide nodes as a node, a list of nodes or a string for
network aquisition.
ctype defines the comapre utility to use for validation
'''
nodes = nodes or self.get_nodes()
# attr of the node to inspect
field = field or self.field
# the value to target.
value = self.target
if len(nodes) == 0:
return (False, 'no machine node %s' % self.watch)
r = {};
# print 'nodes', nodes
for node in nodes:
# current value
v = node.get(field)
# print 'node:', v, 'cache', cv, 'ctype', ctype
c = self.compare(v, value, ctype)
r.update({ node: c })
# import pdb;pdb.set_trace()
return r
|
Strangemother/python-state-machine
|
scratch/machine_2/core/conditions.py
|
Python
|
mit
| 3,066
|
# -*- coding: utf-8 -*-
#
# b3j0f.sync documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 14 17:29:14 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
from b3j0f.sync import __version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
source_suffix = ['.rst']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'b3j0f.sync'
copyright = u'2015, b3j0f'
author = u'b3j0f'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'b3j0fsyncdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'b3j0fsync.tex', u'b3j0f.sync Documentation',
u'b3j0f', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'b3j0fsync', u'b3j0f.sync Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'b3j0fsync', u'b3j0f.sync Documentation',
author, 'b3j0fsync', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
|
b3j0f/sync
|
docs/source/conf.py
|
Python
|
mit
| 11,487
|
from google.appengine.api import mail
import json
import webapp2
class SendEmail(webapp2.RequestHandler):
def post(self):
to = self.request.get("to")
user_name = self.request.get("name")
user_address = self.request.get("email")
subject = self.request.get("subject")
body = self.request.get("message")
behalf_of = user_name + " <" + user_address +">"
sender_address ="CB Contact <contact@codingbarrier.appspotmail.com>"
if "<anthony@codebarrier.com>" not in to:
if "<mail@anthonybarrera.com>" not in to:
return
mail.send_mail(sender= sender_address,
to = to,
#cc = behalf_of,
reply_to = behalf_of,
subject = subject+" | "+user_name+" | "+user_address,
body = body,
headers = {"On-Behalf-Of":behalf_of})
self.response.out.write(json.dumps({"done":"true"}))
|
ZenBarrier/Coding-Barrier
|
contact.py
|
Python
|
mit
| 976
|
from django.shortcuts import render
from django.conf.urls import patterns, url
from django.core.urlresolvers import reverse_lazy
from django.views.generic import TemplateView
from django.contrib.auth.decorators import login_required
from edamame import base, utils, generic
from . import models
class SiteViews(base.Views):
def index(self, request):
"""view function
"""
return render(request, 'index.html')
test_page = utils.to_method(
TemplateView.as_view(template_name='test_page.html'))
def get_urls(self):
urlpatterns = patterns(
'',
url(r'^$', self.wrap_view(self.index), name='index'),
url(r'^test_page$',
self.wrap_view(self.test_page), name='test_page'),
)
return urlpatterns
site_views = SiteViews()
class NoteViews(generic.ModelViews):
model = models.Note
success_url = reverse_lazy('note:index')
note_views = NoteViews()
class MembersOnlyViews(base.Views):
members_only = utils.to_method(render, template_name='members_only.html')
view_decorators = (
(login_required, (), {'login_url': 'auth:login'}),
)
def get_urls(self):
urlpatterns = patterns(
'',
url(r'^$', self.wrap_view(self.members_only), name='members_only'),
)
return urlpatterns
members_only_views = MembersOnlyViews()
|
tokibito/django-edamame
|
example/note/views.py
|
Python
|
mit
| 1,407
|
import matplotlib.pyplot as plt
import numpy as np
from phuzzy.mpl import TruncNorm
def test_truncnorm():
alpha0 = [0, 2]
# alpha1 = [2]
p = TruncNorm(alpha0=alpha0, alpha1=None, number_of_alpha_levels=7)
print(p)
print(p.df)
print(p.df.values.tolist())
ref = [[0.0, 0.0, 2.0], [0.16666666666666666, 0.36898774621220265, 1.6310122537877976],
[0.3333333333333333, 0.505893899432985, 1.4941061005670149], [0.5, 0.6075291624853785, 1.3924708375146215],
[0.6666666666666666, 0.6998279866511387, 1.3001720133488615],
[0.8333333333333333, 0.7987198538648325, 1.2012801461351676], [1.0, 1.0, 1.0]]
assert np.allclose(p.df.values.tolist(), ref)
def plot():
alpha0 = [0, 2]
p = TruncNorm(alpha0=alpha0, alpha1=None, number_of_alpha_levels=17, std=(alpha0[1] - alpha0[0]) / 6.)
p.plot(show=True)
plt.show()
|
lepy/phuzzy
|
tests/test_truncnorm.py
|
Python
|
mit
| 885
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_request(
location: str,
publisher_name: str,
offer: str,
skus: str,
version: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERIALIZER.url("offer", offer, 'str'),
"skus": _SERIALIZER.url("skus", skus, 'str'),
"version": _SERIALIZER.url("version", version, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_request(
location: str,
publisher_name: str,
offer: str,
skus: str,
subscription_id: str,
*,
expand: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERIALIZER.url("offer", offer, 'str'),
"skus": _SERIALIZER.url("skus", skus, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
if expand is not None:
query_parameters['$expand'] = _SERIALIZER.query("expand", expand, 'str')
if top is not None:
query_parameters['$top'] = _SERIALIZER.query("top", top, 'int')
if orderby is not None:
query_parameters['$orderby'] = _SERIALIZER.query("orderby", orderby, 'str')
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_offers_request(
location: str,
publisher_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_publishers_request(
location: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_list_skus_request(
location: str,
publisher_name: str,
offer: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-12-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus')
path_format_arguments = {
"location": _SERIALIZER.url("location", location, 'str'),
"publisherName": _SERIALIZER.url("publisher_name", publisher_name, 'str'),
"offer": _SERIALIZER.url("offer", offer, 'str'),
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class VirtualMachineImagesOperations(object):
"""VirtualMachineImagesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_12_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def get(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
version: str,
**kwargs: Any
) -> "_models.VirtualMachineImage":
"""Gets a virtual machine image.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offer: str
:param skus: A valid image SKU.
:type skus: str
:param version: A valid image SKU version.
:type version: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualMachineImage, or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2020_12_01.models.VirtualMachineImage
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualMachineImage"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
location=location,
publisher_name=publisher_name,
offer=offer,
skus=skus,
version=version,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualMachineImage', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions/{version}'} # type: ignore
@distributed_trace
def list(
self,
location: str,
publisher_name: str,
offer: str,
skus: str,
expand: Optional[str] = None,
top: Optional[int] = None,
orderby: Optional[str] = None,
**kwargs: Any
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of all virtual machine image versions for the specified location, publisher, offer,
and SKU.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offer: str
:param skus: A valid image SKU.
:type skus: str
:param expand: The expand expression to apply on the operation.
:type expand: str
:param top:
:type top: int
:param orderby:
:type orderby: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2020_12_01.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_request(
location=location,
publisher_name=publisher_name,
offer=offer,
skus=skus,
subscription_id=self._config.subscription_id,
expand=expand,
top=top,
orderby=orderby,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus/{skus}/versions'} # type: ignore
@distributed_trace
def list_offers(
self,
location: str,
publisher_name: str,
**kwargs: Any
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of virtual machine image offers for the specified location and publisher.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2020_12_01.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_offers_request(
location=location,
publisher_name=publisher_name,
subscription_id=self._config.subscription_id,
template_url=self.list_offers.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_offers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers'} # type: ignore
@distributed_trace
def list_publishers(
self,
location: str,
**kwargs: Any
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of virtual machine image publishers for the specified Azure location.
:param location: The name of a supported Azure region.
:type location: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2020_12_01.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_publishers_request(
location=location,
subscription_id=self._config.subscription_id,
template_url=self.list_publishers.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_publishers.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers'} # type: ignore
@distributed_trace
def list_skus(
self,
location: str,
publisher_name: str,
offer: str,
**kwargs: Any
) -> List["_models.VirtualMachineImageResource"]:
"""Gets a list of virtual machine image SKUs for the specified location, publisher, and offer.
:param location: The name of a supported Azure region.
:type location: str
:param publisher_name: A valid image publisher.
:type publisher_name: str
:param offer: A valid image publisher offer.
:type offer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VirtualMachineImageResource, or the result of cls(response)
:rtype: list[~azure.mgmt.compute.v2020_12_01.models.VirtualMachineImageResource]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VirtualMachineImageResource"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_skus_request(
location=location,
publisher_name=publisher_name,
offer=offer,
subscription_id=self._config.subscription_id,
template_url=self.list_skus.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VirtualMachineImageResource]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_skus.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/publishers/{publisherName}/artifacttypes/vmimage/offers/{offer}/skus'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_12_01/operations/_virtual_machine_images_operations.py
|
Python
|
mit
| 21,016
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the listtransactions API
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, COIN
from io import BytesIO
def txFromHex(hexstring):
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(hexstring))
tx.deserialize(f)
return tx
class ListTransactionsTest(BitcoinTestFramework):
def setup_nodes(self):
#This test requires mocktime
enable_mocktime()
return start_nodes(4, self.options.tmpdir)
def run_test(self):
# Simple send, 0 to 1:
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":0})
assert_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":0})
# mine a block, confirmations should change:
self.nodes[0].generate(1)
self.sync_all()
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid},
{"category":"send","account":"","amount":Decimal("-0.1"),"confirmations":1})
assert_array_result(self.nodes[1].listtransactions(),
{"txid":txid},
{"category":"receive","account":"","amount":Decimal("0.1"),"confirmations":1})
# send-to-self:
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 0.2)
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"send"},
{"amount":Decimal("-0.2")})
assert_array_result(self.nodes[0].listtransactions(),
{"txid":txid, "category":"receive"},
{"amount":Decimal("0.2")})
# sendmany from node1: twice to self, twice to node2:
send_to = { self.nodes[0].getnewaddress() : 0.11,
self.nodes[1].getnewaddress() : 0.22,
self.nodes[0].getaccountaddress("from1") : 0.33,
self.nodes[1].getaccountaddress("toself") : 0.44 }
txid = self.nodes[1].sendmany("", send_to)
self.sync_all()
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.11")},
{"txid":txid} )
assert_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.11")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.22")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.22")},
{"txid":txid} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.33")},
{"txid":txid} )
assert_array_result(self.nodes[0].listtransactions(),
{"category":"receive","amount":Decimal("0.33")},
{"txid":txid, "account" : "from1"} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"send","amount":Decimal("-0.44")},
{"txid":txid, "account" : ""} )
assert_array_result(self.nodes[1].listtransactions(),
{"category":"receive","amount":Decimal("0.44")},
{"txid":txid, "account" : "toself"} )
multisig = self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.nodes[0].importaddress(multisig["redeemScript"], "watchonly", False, True)
txid = self.nodes[1].sendtoaddress(multisig["address"], 0.1)
self.nodes[1].generate(1)
self.sync_all()
assert(len(self.nodes[0].listtransactions("watchonly", 100, 0, False)) == 0)
assert_array_result(self.nodes[0].listtransactions("watchonly", 100, 0, True),
{"category":"receive","amount":Decimal("0.1")},
{"txid":txid, "account" : "watchonly"} )
# rbf is disabled in Sprint Core
# self.run_rbf_opt_in_test()
# Check that the opt-in-rbf flag works properly, for sent and received
# transactions.
def run_rbf_opt_in_test(self):
# Check whether a transaction signals opt-in RBF itself
def is_opt_in(node, txid):
rawtx = node.getrawtransaction(txid, 1)
for x in rawtx["vin"]:
if x["sequence"] < 0xfffffffe:
return True
return False
# Find an unconfirmed output matching a certain txid
def get_unconfirmed_utxo_entry(node, txid_to_match):
utxo = node.listunspent(0, 0)
for i in utxo:
if i["txid"] == txid_to_match:
return i
return None
# 1. Chain a few transactions that don't opt-in.
txid_1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 1)
assert(not is_opt_in(self.nodes[0], txid_1))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_1}, {"bip125-replaceable":"no"})
# Tx2 will build off txid_1, still not opting in to RBF.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_1)
# Create tx2 using createrawtransaction
inputs = [{"txid":utxo_to_use["txid"], "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.999}
tx2 = self.nodes[1].createrawtransaction(inputs, outputs)
tx2_signed = self.nodes[1].signrawtransaction(tx2)["hex"]
txid_2 = self.nodes[1].sendrawtransaction(tx2_signed)
# ...and check the result
assert(not is_opt_in(self.nodes[1], txid_2))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_2}, {"bip125-replaceable":"no"})
# Tx3 will opt-in to RBF
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[0], txid_2)
inputs = [{"txid": txid_2, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[1].getnewaddress(): 0.998}
tx3 = self.nodes[0].createrawtransaction(inputs, outputs)
tx3_modified = txFromHex(tx3)
tx3_modified.vin[0].nSequence = 0
tx3 = bytes_to_hex_str(tx3_modified.serialize())
tx3_signed = self.nodes[0].signrawtransaction(tx3)['hex']
txid_3 = self.nodes[0].sendrawtransaction(tx3_signed)
assert(is_opt_in(self.nodes[0], txid_3))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_3}, {"bip125-replaceable":"yes"})
# Tx4 will chain off tx3. Doesn't signal itself, but depends on one
# that does.
utxo_to_use = get_unconfirmed_utxo_entry(self.nodes[1], txid_3)
inputs = [{"txid": txid_3, "vout":utxo_to_use["vout"]}]
outputs = {self.nodes[0].getnewaddress(): 0.997}
tx4 = self.nodes[1].createrawtransaction(inputs, outputs)
tx4_signed = self.nodes[1].signrawtransaction(tx4)["hex"]
txid_4 = self.nodes[1].sendrawtransaction(tx4_signed)
assert(not is_opt_in(self.nodes[1], txid_4))
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"yes"})
# Replace tx3, and check that tx4 becomes unknown
tx3_b = tx3_modified
tx3_b.vout[0].nValue -= int(Decimal("0.004") * COIN) # bump the fee
tx3_b = bytes_to_hex_str(tx3_b.serialize())
tx3_b_signed = self.nodes[0].signrawtransaction(tx3_b)['hex']
txid_3b = self.nodes[0].sendrawtransaction(tx3_b_signed, True)
assert(is_opt_in(self.nodes[0], txid_3b))
assert_array_result(self.nodes[0].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
sync_mempools(self.nodes)
assert_array_result(self.nodes[1].listtransactions(), {"txid": txid_4}, {"bip125-replaceable":"unknown"})
# Check gettransaction as well:
for n in self.nodes[0:2]:
assert_equal(n.gettransaction(txid_1)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_2)["bip125-replaceable"], "no")
assert_equal(n.gettransaction(txid_3)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_3b)["bip125-replaceable"], "yes")
assert_equal(n.gettransaction(txid_4)["bip125-replaceable"], "unknown")
# After mining a transaction, it's no longer BIP125-replaceable
self.nodes[0].generate(1)
assert(txid_3b not in self.nodes[0].getrawmempool())
assert_equal(self.nodes[0].gettransaction(txid_3b)["bip125-replaceable"], "no")
assert_equal(self.nodes[0].gettransaction(txid_4)["bip125-replaceable"], "unknown")
if __name__ == '__main__':
ListTransactionsTest().main()
|
BurningMan44/SprintCoin
|
qa/rpc-tests/listtransactions.py
|
Python
|
mit
| 10,132
|
#!/usr/bin/env python
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
import aaf2
import traceback
import subprocess
import json
import os
import datetime
import sys
import tempfile
import shutil
import time
import fractions
from aaf2 import auid
from pprint import pprint
FFMPEG_EXEC = "ffmpeg"
FFPROBE_EXEC = "ffprobe"
Audio_Profiles = aaf2.audio.pcm_profiles
Video_Profiles = aaf2.video.dnx_profiles
# FFMPEG_EXEC = "/Users/mark/Dev/ffmpeg/ffmpeg_g"
# FFPROBE_EXEC = "/Users/mark/Dev/ffmpeg/ffprobe_g"
def probe(path, show_packets=False):
cmd = [FFPROBE_EXEC, '-of','json','-show_format','-show_streams', path]
if show_packets:
cmd.extend(['-show_packets',])
print(subprocess.list2cmdline(cmd))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout,stderr = p.communicate()
if p.returncode != 0:
raise subprocess.CalledProcessError(p.returncode, subprocess.list2cmdline(cmd), stderr)
return json.loads(stdout)
def timecode_to_seconds(time_string):
try:
return float(time_string)
except:
pass
for format in ("%H:%M:%S.%f", "%H:%M:%S", "%M:%S.%f","%M:%S"):
try:
t = datetime.datetime.strptime(time_string, format)
seconds = 0
if t.minute:
seconds += 60*t.minute
if t.hour:
seconds += 60 * 60 * t.hour
seconds += t.second
seconds += float(t.strftime(".%f"))
return seconds
except:
#print traceback.format_exc()
pass
raise ValueError("invalid time format: %s" % time_string)
def seconds_to_timecode(seconds):
format = "%S.%f"
t = datetime.timedelta(seconds=float(seconds))
return str(t)
def has_alpha(stream):
if stream['pix_fmt'] in ('yuva444p10le','rgba'):
return True
return False
def conform_media(path,
output_dir,
start=None,
end=None,
duration=None,
width=None,
height=None,
frame_rate=None,
video_profile_name=None,
audio_profile_name=None,
ignore_alpha=False):
if not video_profile_name:
video_profile_name = 'dnx_1080p_36_23.97'
if not audio_profile_name:
audio_profile_name = 'pcm_48000_s16le'
video_profile = Video_Profiles[video_profile_name]
audio_profile = Audio_Profiles[audio_profile_name]
format = probe(path)
out_files = []
cmd = [FFMPEG_EXEC,'-y', '-nostdin']
# cmd.extend(['-loglevel', 'debug'])
if end:
duration = timecode_to_seconds(end) - timecode_to_seconds(start)
duration = seconds_to_timecode(duration)
end = None
if start:
start_seconds = timecode_to_seconds(start)
fast_start = max(0,int(start_seconds-30))
if fast_start:
start = seconds_to_timecode(start_seconds - fast_start)
cmd.extend(['-ss', seconds_to_timecode(fast_start)])
frame_rate = video_profile['frame_rate']
pix_fmt = video_profile['pix_fmt']
bitrate = video_profile['bitrate']
dnxhd_profile = video_profile.get("video_profile", None)
if format['format']['format_name'] == "image2":
frame_rate = frame_rate or "24000/1001"
cmd.extend([ '-framerate', frame_rate])
cmd.extend(['-i', path,])
if video_profile['size']:
width, height = video_profile['size']
else:
width = None
height = None
interlaced = video_profile['interlaced']
#sample_rate =44100
sample_rate = audio_profile['sample_rate']
for stream in format['streams']:
#pprint(stream)
stream_index = stream['index']
if stream['codec_type'] == 'video':
out_meta = {}
# pprint(stream)
alpha = has_alpha(stream)
passes = 1
if alpha and not ignore_alpha:
passes = 2
for i in range(passes):
if i == 1:
cmd.extend(['-an', '-f', 'rawvideo', '-pix_fmt', 'gray'])
if frame_rate:
cmd.extend(['-r', frame_rate])
else:
cmd.extend(['-an','-vcodec', 'dnxhd', '-pix_fmt', pix_fmt])
if dnxhd_profile:
cmd.extend(['-profile:v', dnxhd_profile])
if bitrate:
cmd.extend(['-vb', '%dM' % bitrate])
if frame_rate:
cmd.extend(['-r', frame_rate])
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
vfilter = []
if i == 1:
vfilter.append("alphaextract")
if width and height:
out_width = width
out_height = height
input_width = stream['width']
input_height = stream['height']
max_width = width
max_height = height
scale = min(max_width/ float(input_width), max_height/float(input_height) )
scale_width = int(input_width*scale)
scale_height = int(input_height*scale)
padding_ofs_x = (max_width - scale_width)//2
padding_ofs_y = (max_height - scale_height)//2
vfilter.append("scale=%d:%d,pad=%d:%d:%d:%d" % (scale_width,scale_height,
max_width,max_height, padding_ofs_x,padding_ofs_y))
else:
out_width = stream['width']
out_height = stream['height']
if vfilter:
cmd.extend(['-vf', ','.join(vfilter)])
# cmd.extend(['-s', "%dx%d" % (width, height)])
if i == 1:
out_file = os.path.join(output_dir, 'out_%d.alpha' % (stream_index))
out_meta['path_alpha'] = out_file
else:
out_rate = frame_rate or str(stream['avg_frame_rate'])
out_file = os.path.join(output_dir, 'out_%d.dnxhd' % (stream_index))
out_meta = {'path':out_file, 'frame_rate':out_rate, 'type': 'video', 'profile':video_profile_name}
out_meta['width'] = out_width
out_meta['height'] = out_height
cmd.extend([out_file])
#pprint(stream)
print("USING FRAMREATE", out_rate, str(stream['avg_frame_rate']))
out_files.append(out_meta)
elif stream['codec_type'] == 'audio':
input_sample_rate = int(stream['sample_rate'])
channels = stream['channels']
cmd.extend(['-vn', '-acodec', 'pcm_s16le', '-ar', str(sample_rate)])
# afilter = ['-af', "aresample=async=1:first_pts=0"]
# cmd.extend(afilter)
if not start is None:
cmd.extend(['-ss', str(start)])
if not duration is None:
cmd.extend(['-t', str(duration)])
out_file = os.path.join(output_dir, 'out_%d_%d_%d.wav' % (stream_index, sample_rate, channels))
cmd.extend([out_file])
out_files.append({'path':out_file, 'sample_rate':sample_rate, 'channels':channels,'type': 'audio'})
print(subprocess.list2cmdline(cmd))
subprocess.check_call(cmd)
return out_files
def create_matte_key_definition(f):
opdef = f.create.OperationDef(auid.AUID("0c864774-e428-3b2d-8115-1c736806191a"), 'MatteKey_2')
opdef['IsTimeWarp'].value = False
opdef['OperationCategory'].value = 'OperationCategory_Effect'
opdef['NumberInputs'].value = 3
opdef['Bypass'].value = 2
opdef.media_kind = "picture"
f.dictionary.register_def(opdef)
return opdef
def import_video_essence(f, mastermob, stream, compmob=None, tapemob=None):
tape = None
edit_rate = stream['frame_rate']
if tapemob:
timecode_fps= int(round(float(fractions.Fraction(edit_rate))))
start_time = timecode_fps * 60 * 60
tape = tapemob.create_source_clip(1, start=start_time)
alpha_path = stream.get("path_alpha", None)
color_slot = mastermob.import_dnxhd_essence(stream['path'], edit_rate, tape=tape)
if alpha_path:
pixel_layout = [{u'Code': u'CompAlpha', u'Size': 8}]
width = stream['width']
height = stream['height']
source_mob = f.create.SourceMob()
f.content.mobs.append(source_mob)
if tapemob:
tape = tapemob.create_source_clip(1, start=start_time)
source_slot = source_mob.import_rawvideo_essence(alpha_path, edit_rate, width, height, pixel_layout, tape=tape)
length = source_slot.segment.length
essence_group = f.create.EssenceGroup()
alpha_slot = mastermob.create_picture_slot(edit_rate)
alpha_slot.segment = essence_group
source_clip = source_mob.create_source_clip(source_slot.slot_id)
source_clip.length = length
essence_group['Choices'].append(source_clip)
essence_group.length = length
opdef = create_matte_key_definition(f)
slot = compmob.create_picture_slot(edit_rate)
op_group = f.create.OperationGroup(opdef)
slot.segment = op_group
scope = f.create.ScopeReference()
scope['RelativeScope'].value = 1
scope['RelativeSlot'].value = 1
scope.length = length
sequence = f.create.Sequence(length=length)
sequence.components.append(scope)
op_group.segments.append(sequence)
op_group.segments.append(mastermob.create_source_clip(color_slot.slot_id, length=length))
op_group.segments.append(mastermob.create_source_clip(alpha_slot.slot_id, length=length))
def create_aaf(path, media_streams, mobname, tape_name=None, start_timecode=None):
with aaf2.open(path, 'w') as f:
mastermob = f.create.MasterMob(mobname)
f.content.mobs.append(mastermob)
edit_rate = None
for stream in media_streams:
if stream['type'] == 'video':
edit_rate =fractions.Fraction(stream['frame_rate'])
break
alpha = False
compmob = None
for stream in media_streams:
if stream.get('path_alpha', False):
alpha = True
compmob = f.create.CompositionMob(mastermob.name)
compmob.usage = 'Usage_Template'
f.content.mobs.append(compmob)
# this hides the mastermob in avid bin
mastermob['AppCode'].value = 1
mastermob.usage = "Usage_LowerLevel"
break
tapemob = None
timecode_fps= int(round(float(edit_rate)))
if tape_name:
tapemob = f.create.SourceMob()
tapemob.create_tape_slots(tape_name, edit_rate, timecode_fps)
f.content.mobs.append(tapemob)
for stream in media_streams:
if stream['type'] == 'video':
print("importing video...")
start = time.time()
import_video_essence(f, mastermob, stream, compmob, tapemob)
print("imported video in %f secs" % (time.time()- start))
for stream in media_streams:
if stream['type'] == 'audio':
print("importing audio...")
start = time.time()
sample_rate = stream['sample_rate']
slot = mastermob.import_audio_essence(stream['path'], edit_rate)
if compmob:
sound_slot = compmob.create_sound_slot(edit_rate)
sound_slot.segment = mastermob.create_source_clip(slot.slot_id, length = slot.segment.length)
print("imported audio in %f secs" % (time.time()- start))
if __name__ == "__main__":
from optparse import OptionParser
usage = "usage: %prog [options] output_aaf_file media_file"
parser = OptionParser(usage=usage)
parser.add_option('-s', '--start', type="string", dest="start",default=None,
help = "start recording at, in timecode or seconds")
parser.add_option('-e', '--end', type="string", dest='end',default=None,
help = "end recording at in timecode or seconds")
parser.add_option('-d', '--duration', type="string", dest='duration',default=None,
help = "record duration in timecode or seconds")
parser.add_option('--tape', type="string", dest="tape_name",default=None,
help = "tape name")
parser.add_option('--start_timecode', type="string", dest="start_timecode", default=None,
help = "start timecode [default 01:00:00:00]")
parser.add_option('--ignore_alpha', action='store_true', dest="ignore_alpha", default=False,
help = "ignore alpha channel if present")
parser.add_option("-v", '--video-profile', type='string', dest = 'video_profile', default="dnx_1080p_36_23.97",
help = "encoding profile for video [default: 1080p_36_23.97]")
parser.add_option("-a", '--audio-profile', type='string', dest = 'audio_profile',default='pcm_48000_s16le',
help = 'encoding profile for audio [default: pcm_48000]')
parser.add_option("--size", type='string', dest='size', default=None,
help = "video resolution for dnxhr [default: src size]")
parser.add_option("--framerate", type='string', dest='framerate',
help = "video framerate for dnxhr [default: use src rate]")
parser.add_option('--list-profiles', dest='list_profiles',
action="store_true",default=False,
help = "lists profiles")
(options, args) = parser.parse_args()
if options.list_profiles:
titles = ['Audio Profile', 'Sample Rate', 'Sample Fmt']
row_format ="{:<25}{:<15}{:<15}"
print("")
print(row_format.format( *titles))
print("")
for key,value in sorted(Audio_Profiles.items()):
print(row_format.format(key, value['sample_rate'], value['sample_format']))
titles = ['Video Profile', "Size", 'Frame Rate', "Bitrate", "Pix Fmt", "Codec"]
row_format ="{:<25}{:<15}{:<15}{:<10}{:<12}{:<10}"
print("")
print(row_format.format( *titles))
print("")
for key, value in sorted(Video_Profiles.items()):
codec = 'dnxhd'
if key.startswith("dnxhr"):
codec = 'dnxhr'
print(row_format.format(key, value['size'],
value['frame_rate'], value['bitrate'], value['pix_fmt'], codec))
sys.exit()
if len(args) < 2:
parser.error("not enough args")
details = probe(args[1])
#if not os.path.exists(args[1]):
#parser.error("No such file or directory: %s" % args[1])
if options.end and options.duration:
parser.error("Can only use --duration or --end not both")
print(options.audio_profile)
if not options.audio_profile in Audio_Profiles:
parser.error("No such audio profile: %s" % options.audio_profile)
if not options.video_profile.lower() in Video_Profiles:
parser.error("No such video profile: %s" % options.video_profile)
aaf_file = args[0]
# tempdir = os.path.join(os.getcwd(), 'samples', 'convert')
# if not os.path.exists(tempdir):
# os.makedirs(tempdir)
tempdir = tempfile.mkdtemp("-aaf_import")
print(tempdir)
media_streams = []
width = None
height = None
if options.size and options.video_profile.lower().startswith("dnxhr"):
width,height = options.size.split("x")
width = int(width)
height = int(height)
try:
for src in args[1:]:
media_streams.extend(conform_media(src,
output_dir=tempdir,
start=options.start,
end=options.end,
duration=options.duration,
width=width,
height=height,
frame_rate=options.framerate,
video_profile_name = options.video_profile.lower(),
audio_profile_name = options.audio_profile.lower(),
ignore_alpha = options.ignore_alpha)
)
except:
print(traceback.format_exc())
shutil.rmtree(tempdir)
parser.error("error conforming media")
try:
basename = os.path.basename(args[1])
name,ext = os.path.splitext(basename)
if details['format']['format_name'] == 'image2':
name, padding = os.path.splitext(name)
create_aaf(aaf_file, media_streams, name, options.tape_name, options.start_timecode)
finally:
pass
shutil.rmtree(tempdir)
|
markreidvfx/pyaaf2
|
examples/import_media.py
|
Python
|
mit
| 17,403
|
from helper import greeting
greeting("hello world...")
|
brentbaum/cs3240-labdemo
|
hello.py
|
Python
|
mit
| 55
|
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self, plotly_name="color", parent_name="histogram2dcontour.textfont", **kwargs
):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/histogram2dcontour/textfont/_color.py
|
Python
|
mit
| 424
|
"""
Entry point to API application. This will be for running simple checks on the application
"""
from flask import jsonify, url_for, redirect, request
from flask_login import current_user
from . import home
from ..__meta__ import __version__, __project__, __copyright__
@home.route("")
@home.route("home")
@home.route("index")
def index():
"""
Entry point into the app
:return: renders the api information
"""
return jsonify({
"version": __version__,
"project": __project__,
"copyright": __copyright__
})
|
BrianLusina/Arco
|
server/app/mod_home/views.py
|
Python
|
mit
| 557
|
# coding: UTF-8
import unittest
from usig_normalizador_amba.Callejero import Callejero
from usig_normalizador_amba.Partido import Partido
from usig_normalizador_amba.Calle import Calle
from tests.test_commons import cargarCallejeroEstatico
class CallejeroTestCase(unittest.TestCase):
p = Partido('jose_c_paz', 'José C. Paz', 'Partido de José C. Paz', 2430431)
c = Callejero(p)
cargarCallejeroEstatico(c)
p = Partido('general_san_martin', 'General San Martin', 'Partido de General San Martin', 1719022)
c_san_martin = Callejero(p)
cargarCallejeroEstatico(c_san_martin)
def _checkCalle(self, calle, codigo, nombre, codigo_partido, localidad):
self.assertTrue(isinstance(calle, Calle))
self.assertEqual(calle.codigo, codigo)
self.assertEqual(calle.nombre, nombre)
self.assertEqual(calle.partido.codigo, codigo_partido)
self.assertEqual(calle.localidad, localidad)
def testCallejero_callejero_inexistent(self):
p = Partido('jose_paz', 'José C. Paz', 'Partido de José C. Paz', 2430431)
self.assertRaises(ValueError, Callejero, p)
def testCallejero_buscarCalle_calle_inexistente(self):
res = self.c.buscarCalle('kokusai dori')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 0, 'No debería haber matching.')
def testCallejero_buscarCalle_unica_calle_existente(self):
res = self.c.buscarCalle('Santiago de Compostela')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 solo matching.')
self._checkCalle(res[0], 53658, 'Santiago de Compostela', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_nombre_permutado(self):
res = self.c.buscarCalle('Compostela Santiago de')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 solo matching.')
self._checkCalle(res[0], 53658, 'Santiago de Compostela', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_nombre_incompleto(self):
res = self.c.buscarCalle('Compos Santi')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 solo matching.')
self._checkCalle(res[0], 53658, 'Santiago de Compostela', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_nombre_con_acento_y_case(self):
res = self.c.buscarCalle('PoToSÍ')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 solo matching.')
self._checkCalle(res[0], 341221, 'Potosí', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_nombre_con_enie(self):
res = self.c.buscarCalle('Roque Saenz Peña')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 77440, 'Roque Sáenz Peña', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_multiples_calles_existentes(self):
res = self.c.buscarCalle('San')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 16, 'Debería haber 16 matchings.')
resCalles = ['San Lorenzo', 'San Nicolás', 'San Blas', 'San Salvador', 'San Luis', 'San Marino', 'San Agustín',
'Santiago del Estero', 'Santiago de Compostela', 'Santiago L. Copello', 'Santa Marta', 'Santo Domingo',
'Santa Ana', 'Santiago de Liniers', 'Santa María', 'Santiago Davobe']
for calle in res:
self.assertTrue(isinstance(calle, Calle))
self.assertTrue(calle.nombre in resCalles)
def testCallejero_buscarCalle_calles_con_y_01(self):
res = self.c.buscarCalle('Gelly y Obes')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matchings.')
self._checkCalle(res[0], 77481, 'Gelly y Obes', 'jose_c_paz', 'José C. Paz')
res = self.c.buscarCalle('g y o')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matchings.')
self._checkCalle(res[0], 77481, 'Gelly y Obes', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_calles_con_y_02(self):
res = self.c.buscarCalle('Vicente López y Planes')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matchings.')
self._checkCalle(res[0], 11702, 'Vicente López y Planes', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_calles_con_e_01(self):
res = self.c.buscarCalle('Jose e Rodo')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 78817, 'José E. Rodó', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCodigo_codigo_valido(self):
res = self.c.buscarCodigo(314724)
self.assertTrue(isinstance(res, list))
self.assertTrue(res[0][0] == 314724)
self.assertTrue(res[0][1] == 'Avenida Derqui (M) / Fray Antonio Marchena (JCP)')
def testCallejero_buscarCodigo_codigo_invalido(self):
res = self.c.buscarCodigo(666)
self.assertTrue(res == [])
def testCallejero_buscarCalle_sinonimos_01(self):
res1 = self.c.buscarCalle('11')
self.assertTrue(isinstance(res1, list))
self.assertEqual(len(res1), 1, 'Debería haber 1 matching.')
res2 = self.c.buscarCalle('once')
self.assertTrue(isinstance(res2, list))
self.assertEqual(len(res2), 1, 'Debería haber 1 matching.')
self.assertEqual(res1[0].codigo, res2[0].codigo)
def testCallejero_buscarCalle_sinonimos_02(self):
res1 = self.c.buscarCalle('3') # 3 de Febrero, Tres Sargentos y Las Tres Marías
self.assertTrue(isinstance(res1, list))
self.assertEqual(len(res1), 3, 'Debería haber 1 matching.')
self.assertTrue(res1[0].codigo in [78879, 53341, 237007])
self.assertTrue(res1[1].codigo in [78879, 53341, 237007])
self.assertTrue(res1[2].codigo in [78879, 53341, 237007])
def testCallejero_buscarCalle_muchos_espacios(self):
res = self.c.buscarCalle(' puerto principe ')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 183044, 'Puerto Príncipe', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_calle_con_parentesis(self):
res = self.c.buscarCalle('Coliqueo (JCP)')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 186501, 'Intendente Arricau (SM) / Cacique Coliqueo (JCP)', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_caracteres_raros(self):
res = self.c.buscarCalle('puerto principe |°¬!#$%&/()=?\¿¡*¸+~{[^}]\'`-_.:,;<>·@')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 183044, 'Puerto Príncipe', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_calle_con_acente_escrito_sin_acento(self):
res = self.c.buscarCalle('potosi')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 341221, 'Potosí', 'jose_c_paz', 'José C. Paz')
def testCallejero_buscarCalle_calle_con_numeros(self):
res = self.c_san_martin.buscarCalle('26 de Julio de 1890')
self.assertTrue(isinstance(res, list))
self.assertEqual(len(res), 1, 'Debería haber 1 matching.')
self._checkCalle(res[0], 70996, '103 - 26 de Julio de 1890', 'general_san_martin', 'General San Martín')
|
usig/normalizador-amba
|
tests/CallejeroTestCase.py
|
Python
|
mit
| 7,895
|
from huzzer.function_generator import generate_expression, generate_unary_expr
from huzzer.expressions import VariableExpression, FunctionExpression, BRANCH_EXPRESSIONS
from huzzer.namers import DefaultNamer
from huzzer import INT, BOOL
empty_variables = {
INT: [],
BOOL: []
}
def test_generate_unary_expr():
ints = [generate_unary_expr(INT, empty_variables, 0) for i in range(50)]
assert all([
x.type_signiature == (INT, INT) and len(x.args) == 1 and type(x.args[0]) == int
for x in ints
])
bools = [generate_unary_expr(BOOL, empty_variables, 0) for i in range(10)]
assert all([
x.type_signiature == (BOOL, BOOL) and len(x.args) == 1 and type(x.args[0]) == bool
for x in bools
])
bool_variable = VariableExpression(BOOL, 1)
just_bools = {
INT: [],
BOOL: [bool_variable]
}
var_expr = generate_unary_expr(BOOL, just_bools, 1)
assert var_expr is bool_variable
int_expr = generate_unary_expr(INT, just_bools, 1)
assert int_expr is not bool_variable
# haskell_type,
# variables,
# functions,
# branch_expressions,
# tree_depth,
# branching_probability=0.4,
# variable_probability=0.7,
# function_call_probability=0.5
def test_generate_expression():
int_function = FunctionExpression([BOOL, INT, INT], 1)
bool_function = FunctionExpression([BOOL, BOOL, BOOL, BOOL], 2)
functions = {
INT: [int_function],
BOOL: [bool_function]
}
# this should definitely start with the bool func, as the probabilities are one
bool_expr = generate_expression(
BOOL,
empty_variables,
functions,
BRANCH_EXPRESSIONS,
2,
branching_probability=1.0,
function_call_probability=1.0
)
assert type(bool_expr) == type(bool_function) and bool_expr.function_id == 2
expr = generate_expression(
BOOL,
empty_variables,
functions,
BRANCH_EXPRESSIONS,
1,
branching_probability=1.0,
function_call_probability=1.0
)
assert expr.type_signiature == (BOOL, BOOL)
assert type(expr) != type(bool_function)
bool_variable = VariableExpression(BOOL, 1)
int_variable = VariableExpression(INT, 2)
variables = {
INT: [int_variable],
BOOL: [bool_variable]
}
var_expr = generate_expression(
BOOL,
variables,
functions,
BRANCH_EXPRESSIONS,
1,
branching_probability=1.0,
function_call_probability=1.0,
variable_probability=1.0
)
assert type(var_expr) is type(bool_variable) and var_expr.var_id == bool_variable.var_id
func_expr_with_only_vars = generate_expression(
BOOL,
variables,
functions,
BRANCH_EXPRESSIONS,
2,
branching_probability=1.0,
function_call_probability=1.0,
variable_probability=1.0
)
assert type(func_expr_with_only_vars) == type(bool_function) and \
all([arg is bool_variable for arg in func_expr_with_only_vars.args])
|
coopie/huzzer
|
test/test_function_generator.py
|
Python
|
mit
| 3,071
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 21 22:48:37 2016
@author: burger
"""
import numpy as np
from matplotlib import pyplot as plt
def sigma(x, a=1, b=0):
return 1/(1+np.exp(-(a*x+b)))
x = np.asarray([[0.0, .1], [0, 1], [.9, .05], [.9, .95]])
markers = 'v<>^'
a = .5*np.ones((2,))
proj = np.dot(x, a)
def trafo(x, y):
return sigma(x, 2, -2), sigma(y, 5, 0)
proj_line = np.arange(-50, 50, .02)
proj_transformed_x, proj_transformed_y = trafo(proj_line, proj_line)
proj_x, proj_y = trafo(proj, proj)
a = (x[0] + x[3])/2
b = (x[1] + x[2])/2
c = (a + b)/2
m = (proj_y[3] - proj_y[0])/(proj_x[3] - proj_x[0])
X = np.mean(proj_x) + proj_line
Y = np.mean(proj_y) + m*proj_line
plt.figure()
plt.hold(True)
ms = 10
for i in range(len(x)):
plt.plot(x[i, 0], x[i, 1], 'g'+markers[i], MarkerSize=ms)
plt.plot(proj[i], proj[i], 'b'+markers[i], MarkerSize=ms)
plt.plot(proj_x[i], proj_y[i], 'r'+markers[i], MarkerSize=ms)
dots = 3
plt.plot(proj_line, proj_line, 'k.', MarkerSize=dots)
plt.plot(proj_transformed_x, proj_transformed_y, 'r.', MarkerSize=dots)
plt.plot(X, Y, 'k')
for x in proj_line[::4]:
a, b = trafo(proj_line, x*np.ones_like(proj_line))
plt.plot(a, b, 'k')
a, b = trafo(x*np.ones_like(proj_line), proj_line)
plt.plot(a, b, 'k')
#plot(proj_line, y*np.ones_like(proj_line), 'k')
plt.xlim([-.05, 1.05])
plt.ylim([-.05, 1.05])
plt.show()
|
burgerdev/hostload
|
visualization/xor2.py
|
Python
|
mit
| 1,409
|
"""Utility methods for flake8."""
import collections
import fnmatch as _fnmatch
import inspect
import io
import logging
import os
import platform
import re
import sys
import tokenize
from typing import Callable, Dict, Generator, List, Optional, Pattern
from typing import Sequence, Set, Tuple, Union
from flake8 import exceptions
from flake8._compat import lru_cache
if False: # `typing.TYPE_CHECKING` was introduced in 3.5.2
from flake8.plugins.manager import Plugin
DIFF_HUNK_REGEXP = re.compile(r"^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@.*$")
COMMA_SEPARATED_LIST_RE = re.compile(r"[,\s]")
LOCAL_PLUGIN_LIST_RE = re.compile(r"[,\t\n\r\f\v]")
string_types = (str, type(u""))
def parse_comma_separated_list(value, regexp=COMMA_SEPARATED_LIST_RE):
# type: (str, Pattern[str]) -> List[str]
"""Parse a comma-separated list.
:param value:
String to be parsed and normalized.
:param regexp:
Compiled regular expression used to split the value when it is a
string.
:type regexp:
_sre.SRE_Pattern
:returns:
List of values with whitespace stripped.
:rtype:
list
"""
assert isinstance(value, string_types), value
separated = regexp.split(value)
item_gen = (item.strip() for item in separated)
return [item for item in item_gen if item]
_Token = collections.namedtuple("Token", ("tp", "src"))
_CODE, _FILE, _COLON, _COMMA, _WS = "code", "file", "colon", "comma", "ws"
_EOF = "eof"
_FILE_LIST_TOKEN_TYPES = [
(re.compile(r"[A-Z]+[0-9]*(?=$|\s|,)"), _CODE),
(re.compile(r"[^\s:,]+"), _FILE),
(re.compile(r"\s*:\s*"), _COLON),
(re.compile(r"\s*,\s*"), _COMMA),
(re.compile(r"\s+"), _WS),
]
def _tokenize_files_to_codes_mapping(value):
# type: (str) -> List[_Token]
tokens = []
i = 0
while i < len(value):
for token_re, token_name in _FILE_LIST_TOKEN_TYPES:
match = token_re.match(value, i)
if match:
tokens.append(_Token(token_name, match.group().strip()))
i = match.end()
break
else:
raise AssertionError("unreachable", value, i)
tokens.append(_Token(_EOF, ""))
return tokens
def parse_files_to_codes_mapping(value_): # noqa: C901
# type: (Union[Sequence[str], str]) -> List[Tuple[str, List[str]]]
"""Parse a files-to-codes mapping.
A files-to-codes mapping a sequence of values specified as
`filenames list:codes list ...`. Each of the lists may be separated by
either comma or whitespace tokens.
:param value: String to be parsed and normalized.
:type value: str
"""
if not isinstance(value_, string_types):
value = "\n".join(value_)
else:
value = value_
ret = [] # type: List[Tuple[str, List[str]]]
if not value.strip():
return ret
class State:
seen_sep = True
seen_colon = False
filenames = [] # type: List[str]
codes = [] # type: List[str]
def _reset(): # type: () -> None
if State.codes:
for filename in State.filenames:
ret.append((filename, State.codes))
State.seen_sep = True
State.seen_colon = False
State.filenames = []
State.codes = []
def _unexpected_token(): # type: () -> exceptions.ExecutionError
def _indent(s): # type: (str) -> str
return " " + s.strip().replace("\n", "\n ")
return exceptions.ExecutionError(
"Expected `per-file-ignores` to be a mapping from file exclude "
"patterns to ignore codes.\n\n"
"Configured `per-file-ignores` setting:\n\n{}".format(
_indent(value)
)
)
for token in _tokenize_files_to_codes_mapping(value):
# legal in any state: separator sets the sep bit
if token.tp in {_COMMA, _WS}:
State.seen_sep = True
# looking for filenames
elif not State.seen_colon:
if token.tp == _COLON:
State.seen_colon = True
State.seen_sep = True
elif State.seen_sep and token.tp == _FILE:
State.filenames.append(token.src)
State.seen_sep = False
else:
raise _unexpected_token()
# looking for codes
else:
if token.tp == _EOF:
_reset()
elif State.seen_sep and token.tp == _CODE:
State.codes.append(token.src)
State.seen_sep = False
elif State.seen_sep and token.tp == _FILE:
_reset()
State.filenames.append(token.src)
State.seen_sep = False
else:
raise _unexpected_token()
return ret
def normalize_paths(paths, parent=os.curdir):
# type: (Sequence[str], str) -> List[str]
"""Normalize a list of paths relative to a parent directory.
:returns:
The normalized paths.
:rtype:
[str]
"""
assert isinstance(paths, list), paths
return [normalize_path(p, parent) for p in paths]
def normalize_path(path, parent=os.curdir):
# type: (str, str) -> str
"""Normalize a single-path.
:returns:
The normalized path.
:rtype:
str
"""
# NOTE(sigmavirus24): Using os.path.sep and os.path.altsep allow for
# Windows compatibility with both Windows-style paths (c:\\foo\bar) and
# Unix style paths (/foo/bar).
separator = os.path.sep
# NOTE(sigmavirus24): os.path.altsep may be None
alternate_separator = os.path.altsep or ""
if separator in path or (
alternate_separator and alternate_separator in path
):
path = os.path.abspath(os.path.join(parent, path))
return path.rstrip(separator + alternate_separator)
def _stdin_get_value_py3(): # type: () -> str
stdin_value = sys.stdin.buffer.read()
fd = io.BytesIO(stdin_value)
try:
coding, _ = tokenize.detect_encoding(fd.readline)
return stdin_value.decode(coding)
except (LookupError, SyntaxError, UnicodeError):
return stdin_value.decode("utf-8")
@lru_cache(maxsize=1)
def stdin_get_value(): # type: () -> str
"""Get and cache it so plugins can use it."""
if sys.version_info < (3,):
return sys.stdin.read()
else:
return _stdin_get_value_py3()
def stdin_get_lines(): # type: () -> List[str]
"""Return lines of stdin split according to file splitting."""
if sys.version_info < (3,):
return list(io.BytesIO(stdin_get_value()))
else:
return list(io.StringIO(stdin_get_value()))
def parse_unified_diff(diff=None):
# type: (Optional[str]) -> Dict[str, Set[int]]
"""Parse the unified diff passed on stdin.
:returns:
dictionary mapping file names to sets of line numbers
:rtype:
dict
"""
# Allow us to not have to patch out stdin_get_value
if diff is None:
diff = stdin_get_value()
number_of_rows = None
current_path = None
parsed_paths = collections.defaultdict(set) # type: Dict[str, Set[int]]
for line in diff.splitlines():
if number_of_rows:
# NOTE(sigmavirus24): Below we use a slice because stdin may be
# bytes instead of text on Python 3.
if line[:1] != "-":
number_of_rows -= 1
# We're in the part of the diff that has lines starting with +, -,
# and ' ' to show context and the changes made. We skip these
# because the information we care about is the filename and the
# range within it.
# When number_of_rows reaches 0, we will once again start
# searching for filenames and ranges.
continue
# NOTE(sigmavirus24): Diffs that we support look roughly like:
# diff a/file.py b/file.py
# ...
# --- a/file.py
# +++ b/file.py
# Below we're looking for that last line. Every diff tool that
# gives us this output may have additional information after
# ``b/file.py`` which it will separate with a \t, e.g.,
# +++ b/file.py\t100644
# Which is an example that has the new file permissions/mode.
# In this case we only care about the file name.
if line[:3] == "+++":
current_path = line[4:].split("\t", 1)[0]
# NOTE(sigmavirus24): This check is for diff output from git.
if current_path[:2] == "b/":
current_path = current_path[2:]
# We don't need to do anything else. We have set up our local
# ``current_path`` variable. We can skip the rest of this loop.
# The next line we will see will give us the hung information
# which is in the next section of logic.
continue
hunk_match = DIFF_HUNK_REGEXP.match(line)
# NOTE(sigmavirus24): pep8/pycodestyle check for:
# line[:3] == '@@ '
# But the DIFF_HUNK_REGEXP enforces that the line start with that
# So we can more simply check for a match instead of slicing and
# comparing.
if hunk_match:
(row, number_of_rows) = [
1 if not group else int(group)
for group in hunk_match.groups()
]
assert current_path is not None
parsed_paths[current_path].update(
range(row, row + number_of_rows)
)
# We have now parsed our diff into a dictionary that looks like:
# {'file.py': set(range(10, 16), range(18, 20)), ...}
return parsed_paths
def is_windows():
# type: () -> bool
"""Determine if we're running on Windows.
:returns:
True if running on Windows, otherwise False
:rtype:
bool
"""
return os.name == "nt"
def is_using_stdin(paths):
# type: (List[str]) -> bool
"""Determine if we're going to read from stdin.
:param list paths:
The paths that we're going to check.
:returns:
True if stdin (-) is in the path, otherwise False
:rtype:
bool
"""
return "-" in paths
def _default_predicate(*args): # type: (*str) -> bool
return False
def filenames_from(arg, predicate=None):
# type: (str, Optional[Callable[[str], bool]]) -> Generator[str, None, None] # noqa: E501
"""Generate filenames from an argument.
:param str arg:
Parameter from the command-line.
:param callable predicate:
Predicate to use to filter out filenames. If the predicate
returns ``True`` we will exclude the filename, otherwise we
will yield it. By default, we include every filename
generated.
:returns:
Generator of paths
"""
if predicate is None:
predicate = _default_predicate
if predicate(arg):
return
if os.path.isdir(arg):
for root, sub_directories, files in os.walk(arg):
if predicate(root):
sub_directories[:] = []
continue
# NOTE(sigmavirus24): os.walk() will skip a directory if you
# remove it from the list of sub-directories.
for directory in sub_directories:
joined = os.path.join(root, directory)
if predicate(joined):
sub_directories.remove(directory)
for filename in files:
joined = os.path.join(root, filename)
if not predicate(joined):
yield joined
else:
yield arg
def fnmatch(filename, patterns):
# type: (str, Sequence[str]) -> bool
"""Wrap :func:`fnmatch.fnmatch` to add some functionality.
:param str filename:
Name of the file we're trying to match.
:param list patterns:
Patterns we're using to try to match the filename.
:param bool default:
The default value if patterns is empty
:returns:
True if a pattern matches the filename, False if it doesn't.
``default`` if patterns is empty.
"""
if not patterns:
return True
return any(_fnmatch.fnmatch(filename, pattern) for pattern in patterns)
def parameters_for(plugin):
# type: (Plugin) -> Dict[str, bool]
"""Return the parameters for the plugin.
This will inspect the plugin and return either the function parameters
if the plugin is a function or the parameters for ``__init__`` after
``self`` if the plugin is a class.
:param plugin:
The internal plugin object.
:type plugin:
flake8.plugins.manager.Plugin
:returns:
A dictionary mapping the parameter name to whether or not it is
required (a.k.a., is positional only/does not have a default).
:rtype:
dict([(str, bool)])
"""
func = plugin.plugin
is_class = not inspect.isfunction(func)
if is_class: # The plugin is a class
func = plugin.plugin.__init__
if sys.version_info < (3, 3):
argspec = inspect.getargspec(func)
start_of_optional_args = len(argspec[0]) - len(argspec[-1] or [])
parameter_names = argspec[0]
parameters = collections.OrderedDict(
[
(name, position < start_of_optional_args)
for position, name in enumerate(parameter_names)
]
)
else:
parameters = collections.OrderedDict(
[
(parameter.name, parameter.default is parameter.empty)
for parameter in inspect.signature(func).parameters.values()
if parameter.kind == parameter.POSITIONAL_OR_KEYWORD
]
)
if is_class:
parameters.pop("self", None)
return parameters
def matches_filename(path, patterns, log_message, logger):
# type: (str, Sequence[str], str, logging.Logger) -> bool
"""Use fnmatch to discern if a path exists in patterns.
:param str path:
The path to the file under question
:param patterns:
The patterns to match the path against.
:type patterns:
list[str]
:param str log_message:
The message used for logging purposes.
:returns:
True if path matches patterns, False otherwise
:rtype:
bool
"""
if not patterns:
return False
basename = os.path.basename(path)
if basename not in {".", ".."} and fnmatch(basename, patterns):
logger.debug(log_message, {"path": basename, "whether": ""})
return True
absolute_path = os.path.abspath(path)
match = fnmatch(absolute_path, patterns)
logger.debug(
log_message,
{"path": absolute_path, "whether": "" if match else "not "},
)
return match
def get_python_version(): # type: () -> str
"""Find and format the python implementation and version.
:returns:
Implementation name, version, and platform as a string.
:rtype:
str
"""
return "%s %s on %s" % (
platform.python_implementation(),
platform.python_version(),
platform.system(),
)
|
TeamSPoon/logicmoo_workspace
|
packs_web/butterfly/lib/python3.7/site-packages/flake8/utils.py
|
Python
|
mit
| 15,155
|
import unittest
"""
Test for the local and the web html page for table table generation
"""
class TestNET(unittest.TestCase):
def test_net(self):
pass
if __name__ == "__main__":
unittest.main()
|
shashankg7/pynet
|
tests/test_net.py
|
Python
|
mit
| 225
|
# gunicorn configuration
bind = '0.0.0.0:8000'
workers = 3
# These log settings assume that gunicorn log config will be included in the django base.py logging configuration
accesslog = '-'
errorlog = '-'
access_log_format = '{"request": "%(r)s", "http_status_code": "%(s)s", "http_request_url": "%(U)s", "http_query_string": "%(q)s", "http_verb": "%(m)s", "http_version": "%(H)s", "http_referer": "%(f)s", "x_forwarded_for": "%({x-forwarded-for}i)s", "remote_address": "%(h)s", "request_usec": "%(D)s", "request_sec": "%(L)s"}'
|
penzance/hdt_monitor
|
hdt_monitor/settings/gunicorn_config.py
|
Python
|
mit
| 530
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-03 11:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Team',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('description', models.TextField()),
('company_link', models.CharField(max_length=500)),
('image_link', models.CharField(max_length=500)),
],
),
]
|
OKThess/website
|
main/migrations/0001_initial.py
|
Python
|
mit
| 736
|
"""Hello World API implemented using Google Cloud Endpoints.
Defined here are the ProtoRPC messages needed to define Schemas for methods
as well as those methods defined in an API.
"""
import endpoints
from protorpc import messages, message_types, remote # TODO remove messages and message types when possible
from google.appengine.ext import ndb
from libs.endpoints_proto_datastore.ndb import EndpointsModel
WEB_CLIENT_ID = '' # TODO make this secure
ANDROID_CLIENT_ID = '' # TODO figure out android at some point
IOS_CLIENT_ID = '' # probably not, unless I get some help
ANDROID_AUDIENCE = WEB_CLIENT_ID
class Note(EndpointsModel):
title = ndb.StringProperty()
content = ndb.StringProperty()
date_created = ndb.DateTimeProperty(auto_now_add=True)
owner = ndb.UserProperty()
"""
class NoteMessage(messages.Message):
title = messages.StringField(1)
content = messages.StringField(2)
# Hotness
hotness = messages.IntegerField(3, default=0)
# Color
color = messages.BytesField(4, default='#ffffff')
# Attach
# Due Date
due_date = message_types.DateTimeField(5)
# Reminder
reminders = message_types.DateTimeField(6, repeated=True)
# Task : is a note
# Note
sub_notes = messages.MessageField('Note', 7, repeated=True)
# Image : is a file
# File
# A URL to a file. The file can probably be served by the datastore.
files = messages.BytesField(8, repeated=True)
#dsid = messages.BytesField(12, required=True)
#date_created = message_types.DateTimeField(9, required=True)
#date_updated = message_types.DateTimeField(10, required=True)
#date_accessed = message_types.DateTimeField(11, required=True)
class NoteCollectionMessage(messages.Message):
items = messages.MessageField(Note, 1, repeated=True)
FOO_NOTES = NoteCollection(items=[
Note(#dsid='esgihsel',
title='my first note!',
content='this is my very first note'),
Note(#dsid='3f2o02hg',
title='my second note!',
content='i have more notes'),
Note(#dsid='0evwhfwf',
title='my third note!',
content='',
color=None,
hotness=2),
Note(#dsid='rkbn31ha',
title='my first note'),
])
"""
@endpoints.api(name='helloendpoints', version='v1',
allowed_client_ids=[WEB_CLIENT_ID, ANDROID_CLIENT_ID,
endpoints.API_EXPLORER_CLIENT_ID],
audiences=[ANDROID_AUDIENCE],
scopes=[endpoints.EMAIL_SCOPE])
class EndpointsTestAPI(remote.Service):
"""This is a test for Endpoints, a learning experience."""
#@endpoints.method(message_types.VoidMessage, NoteCollection,
# path='notes', http_method='GET',
# name='notes.list')
#def notes_list(self, request):
# return FOO_NOTES
@Note.method(user_required=True,
path='note', http_method='PUT', name='note.add')
def note_add(self, note):
note.owner = endpoints.get_current_user()
note.put()
return note
@Note.query_method(user_required=True,
query_fields=('limit', 'order', 'pageToken'),
path='notes', http_method='GET', name='notes.list')
def notes_list(self, query):
return query.filter(Note.owner == endpoints.get_current_user())
application = endpoints.api_server([EndpointsTestAPI])
|
AlexanderOtavka/hello-endpoints
|
api/__init__.py
|
Python
|
mit
| 3,467
|
__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
'ConstructorError']
from error import *
from nodes import *
import datetime
import binascii, re, sys, types
class ConstructorError(MarkedYAMLError):
pass
class BaseConstructor(object):
yaml_constructors = {}
yaml_multi_constructors = {}
def __init__(self):
self.constructed_objects = {}
self.recursive_objects = {}
self.state_generators = []
self.deep_construct = False
def check_data(self):
# If there are more documents available?
return self.check_node()
def get_data(self):
# Construct and return the next document.
if self.check_node():
return self.construct_document(self.get_node())
def get_single_data(self):
# Ensure that the stream contains a single document and construct it.
node = self.get_single_node()
if node is not None:
return self.construct_document(node)
return None
def construct_document(self, node):
data = self.construct_object(node)
while self.state_generators:
state_generators = self.state_generators
self.state_generators = []
for generator in state_generators:
for dummy in generator:
pass
self.constructed_objects = {}
self.recursive_objects = {}
self.deep_construct = False
return data
def construct_object(self, node, deep=False):
if node in self.constructed_objects:
return self.constructed_objects[node]
if deep:
old_deep = self.deep_construct
self.deep_construct = True
if node in self.recursive_objects:
raise ConstructorError(None, None,
"found unconstructable recursive node", node.start_mark)
self.recursive_objects[node] = None
constructor = None
tag_suffix = None
if node.tag in self.yaml_constructors:
constructor = self.yaml_constructors[node.tag]
else:
for tag_prefix in self.yaml_multi_constructors:
if node.tag.startswith(tag_prefix):
tag_suffix = node.tag[len(tag_prefix):]
constructor = self.yaml_multi_constructors[tag_prefix]
break
else:
if None in self.yaml_multi_constructors:
tag_suffix = node.tag
constructor = self.yaml_multi_constructors[None]
elif None in self.yaml_constructors:
constructor = self.yaml_constructors[None]
elif isinstance(node, ScalarNode):
constructor = self.__class__.construct_scalar
elif isinstance(node, SequenceNode):
constructor = self.__class__.construct_sequence
elif isinstance(node, MappingNode):
constructor = self.__class__.construct_mapping
if tag_suffix is None:
data = constructor(self, node)
else:
data = constructor(self, tag_suffix, node)
if isinstance(data, types.GeneratorType):
generator = data
data = generator.next()
if self.deep_construct:
for dummy in generator:
pass
else:
self.state_generators.append(generator)
self.constructed_objects[node] = data
del self.recursive_objects[node]
if deep:
self.deep_construct = old_deep
return data
def construct_scalar(self, node):
if not isinstance(node, ScalarNode):
raise ConstructorError(None, None,
"expected a scalar node, but found %s" % node.id,
node.start_mark)
return node.value
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
raise ConstructorError(None, None,
"expected a sequence node, but found %s" % node.id,
node.start_mark)
return [self.construct_object(child, deep=deep)
for child in node.value]
def construct_mapping(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
mapping = {}
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError, exc:
raise ConstructorError("while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
def construct_pairs(self, node, deep=False):
if not isinstance(node, MappingNode):
raise ConstructorError(None, None,
"expected a mapping node, but found %s" % node.id,
node.start_mark)
pairs = []
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
value = self.construct_object(value_node, deep=deep)
pairs.append((key, value))
return pairs
def add_constructor(cls, tag, constructor):
if not 'yaml_constructors' in cls.__dict__:
cls.yaml_constructors = cls.yaml_constructors.copy()
cls.yaml_constructors[tag] = constructor
add_constructor = classmethod(add_constructor)
def add_multi_constructor(cls, tag_prefix, multi_constructor):
if not 'yaml_multi_constructors' in cls.__dict__:
cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
cls.yaml_multi_constructors[tag_prefix] = multi_constructor
add_multi_constructor = classmethod(add_multi_constructor)
class SafeConstructor(BaseConstructor):
def construct_scalar(self, node):
if isinstance(node, MappingNode):
for key_node, value_node in node.value:
if key_node.tag == u'tag:yaml.org,2002:value':
return self.construct_scalar(value_node)
return BaseConstructor.construct_scalar(self, node)
def flatten_mapping(self, node):
merge = []
index = 0
while index < len(node.value):
key_node, value_node = node.value[index]
if key_node.tag == u'tag:yaml.org,2002:merge':
del node.value[index]
if isinstance(value_node, MappingNode):
self.flatten_mapping(value_node)
merge.extend(value_node.value)
elif isinstance(value_node, SequenceNode):
submerge = []
for subnode in value_node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing a mapping",
node.start_mark,
"expected a mapping for merging, but found %s"
% subnode.id, subnode.start_mark)
self.flatten_mapping(subnode)
submerge.append(subnode.value)
submerge.reverse()
for value in submerge:
merge.extend(value)
else:
raise ConstructorError("while constructing a mapping", node.start_mark,
"expected a mapping or list of mappings for merging, but found %s"
% value_node.id, value_node.start_mark)
elif key_node.tag == u'tag:yaml.org,2002:value':
key_node.tag = u'tag:yaml.org,2002:str'
index += 1
else:
index += 1
if merge:
node.value = merge + node.value
def construct_mapping(self, node, deep=False):
if isinstance(node, MappingNode):
self.flatten_mapping(node)
return BaseConstructor.construct_mapping(self, node, deep=deep)
def construct_yaml_null(self, node):
self.construct_scalar(node)
return None
bool_values = {
u'yes': True,
u'no': False,
u'true': True,
u'false': False,
u'on': True,
u'off': False,
}
def construct_yaml_bool(self, node):
value = self.construct_scalar(node)
return self.bool_values[value.lower()]
def construct_yaml_int(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '')
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '0':
return 0
elif value.startswith('0b'):
return sign*int(value[2:], 2)
elif value.startswith('0x'):
return sign*int(value[2:], 16)
elif value[0] == '0':
return sign*int(value, 8)
elif ':' in value:
digits = [int(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*int(value)
inf_value = 1e300
while inf_value != inf_value*inf_value:
inf_value *= inf_value
nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
def construct_yaml_float(self, node):
value = str(self.construct_scalar(node))
value = value.replace('_', '').lower()
sign = +1
if value[0] == '-':
sign = -1
if value[0] in '+-':
value = value[1:]
if value == '.inf':
return sign*self.inf_value
elif value == '.nan':
return self.nan_value
elif ':' in value:
digits = [float(part) for part in value.split(':')]
digits.reverse()
base = 1
value = 0.0
for digit in digits:
value += digit*base
base *= 60
return sign*value
else:
return sign*float(value)
def construct_yaml_binary(self, node):
value = self.construct_scalar(node)
try:
return str(value).decode('base64')
except (binascii.Error, UnicodeEncodeError), exc:
raise ConstructorError(None, None,
"failed to decode base64 data: %s" % exc, node.start_mark)
timestamp_regexp = re.compile(
ur'''^(?P<year>[0-9][0-9][0-9][0-9])
-(?P<month>[0-9][0-9]?)
-(?P<day>[0-9][0-9]?)
(?:(?:[Tt]|[ \t]+)
(?P<hour>[0-9][0-9]?)
:(?P<minute>[0-9][0-9])
:(?P<second>[0-9][0-9])
(?:\.(?P<fraction>[0-9]*))?
(?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
(?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
def construct_yaml_timestamp(self, node):
value = self.construct_scalar(node)
match = self.timestamp_regexp.match(node.value)
values = match.groupdict()
year = int(values['year'])
month = int(values['month'])
day = int(values['day'])
if not values['hour']:
return datetime.date(year, month, day)
hour = int(values['hour'])
minute = int(values['minute'])
second = int(values['second'])
fraction = 0
if values['fraction']:
fraction = values['fraction'][:6]
while len(fraction) < 6:
fraction += '0'
fraction = int(fraction)
delta = None
if values['tz_sign']:
tz_hour = int(values['tz_hour'])
tz_minute = int(values['tz_minute'] or 0)
delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
if values['tz_sign'] == '-':
delta = -delta
data = datetime.datetime(year, month, day, hour, minute, second, fraction)
if delta:
data -= delta
return data
def construct_yaml_omap(self, node):
# Note: we do not check for duplicate keys, because it's too
# CPU-expensive.
omap = []
yield omap
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing an ordered map", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
omap.append((key, value))
def construct_yaml_pairs(self, node):
# Note: the same code as `construct_yaml_omap`.
pairs = []
yield pairs
if not isinstance(node, SequenceNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a sequence, but found %s" % node.id, node.start_mark)
for subnode in node.value:
if not isinstance(subnode, MappingNode):
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a mapping of length 1, but found %s" % subnode.id,
subnode.start_mark)
if len(subnode.value) != 1:
raise ConstructorError("while constructing pairs", node.start_mark,
"expected a single mapping item, but found %d items" % len(subnode.value),
subnode.start_mark)
key_node, value_node = subnode.value[0]
key = self.construct_object(key_node)
value = self.construct_object(value_node)
pairs.append((key, value))
def construct_yaml_set(self, node):
data = set()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
try:
return value.encode('ascii')
except UnicodeEncodeError:
return value
def construct_yaml_seq(self, node):
data = []
yield data
data.extend(self.construct_sequence(node))
def construct_yaml_map(self, node):
data = {}
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_yaml_object(self, node, cls):
data = cls.__new__(cls)
yield data
if hasattr(data, '__setstate__'):
state = self.construct_mapping(node, deep=True)
data.__setstate__(state)
else:
state = self.construct_mapping(node)
data.__dict__.update(state)
def construct_undefined(self, node):
raise ConstructorError(None, None,
"could not determine a constructor for the tag %r" % node.tag.encode('utf-8'),
node.start_mark)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:null',
SafeConstructor.construct_yaml_null)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:bool',
SafeConstructor.construct_yaml_bool)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:int',
SafeConstructor.construct_yaml_int)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:float',
SafeConstructor.construct_yaml_float)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:binary',
SafeConstructor.construct_yaml_binary)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:timestamp',
SafeConstructor.construct_yaml_timestamp)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:omap',
SafeConstructor.construct_yaml_omap)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:pairs',
SafeConstructor.construct_yaml_pairs)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:set',
SafeConstructor.construct_yaml_set)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:str',
SafeConstructor.construct_yaml_str)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:seq',
SafeConstructor.construct_yaml_seq)
SafeConstructor.add_constructor(
u'tag:yaml.org,2002:map',
SafeConstructor.construct_yaml_map)
SafeConstructor.add_constructor(None,
SafeConstructor.construct_undefined)
class Constructor(SafeConstructor):
def construct_python_str(self, node):
return self.construct_scalar(node).encode('utf-8')
def construct_python_unicode(self, node):
return self.construct_scalar(node)
def construct_python_long(self, node):
return long(self.construct_yaml_int(node))
def construct_python_complex(self, node):
return complex(self.construct_scalar(node))
def construct_python_tuple(self, node):
return tuple(self.construct_sequence(node))
def find_python_module(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python module", mark,
"expected non-empty name appended to the tag", mark)
try:
__import__(name)
except ImportError, exc:
raise ConstructorError("while constructing a Python module", mark,
"cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark)
return sys.modules[name]
def find_python_name(self, name, mark):
if not name:
raise ConstructorError("while constructing a Python object", mark,
"expected non-empty name appended to the tag", mark)
if u'.' in name:
module_name, object_name = name.rsplit('.', 1)
else:
module_name = '__builtin__'
object_name = name
try:
__import__(module_name)
except ImportError, exc:
raise ConstructorError("while constructing a Python object", mark,
"cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark)
module = sys.modules[module_name]
if not hasattr(module, object_name):
raise ConstructorError("while constructing a Python object", mark,
"cannot find %r in the module %r" % (object_name.encode('utf-8'),
module.__name__), mark)
return getattr(module, object_name)
def construct_python_name(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python name", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_name(suffix, node.start_mark)
def construct_python_module(self, suffix, node):
value = self.construct_scalar(node)
if value:
raise ConstructorError("while constructing a Python module", node.start_mark,
"expected the empty value, but found %r" % value.encode('utf-8'),
node.start_mark)
return self.find_python_module(suffix, node.start_mark)
class classobj: pass
def make_python_instance(self, suffix, node,
args=None, kwds=None, newobj=False):
if not args:
args = []
if not kwds:
kwds = {}
cls = self.find_python_name(suffix, node.start_mark)
if newobj and isinstance(cls, type(self.classobj)) \
and not args and not kwds:
instance = self.classobj()
instance.__class__ = cls
return instance
elif newobj and isinstance(cls, type):
return cls.__new__(cls, *args, **kwds)
else:
return cls(*args, **kwds)
def set_python_instance_state(self, instance, state):
if hasattr(instance, '__setstate__'):
instance.__setstate__(state)
else:
slotstate = {}
if isinstance(state, tuple) and len(state) == 2:
state, slotstate = state
if hasattr(instance, '__dict__'):
instance.__dict__.update(state)
elif state:
slotstate.update(state)
for key, value in slotstate.items():
setattr(object, key, value)
def construct_python_object(self, suffix, node):
# Format:
# !!python/object:module.name { ... state ... }
instance = self.make_python_instance(suffix, node, newobj=True)
yield instance
deep = hasattr(instance, '__setstate__')
state = self.construct_mapping(node, deep=deep)
self.set_python_instance_state(instance, state)
def construct_python_object_apply(self, suffix, node, newobj=False):
# Format:
# !!python/object/apply # (or !!python/object/new)
# args: [ ... arguments ... ]
# kwds: { ... keywords ... }
# state: ... state ...
# listitems: [ ... listitems ... ]
# dictitems: { ... dictitems ... }
# or short format:
# !!python/object/apply [ ... arguments ... ]
# The difference between !!python/object/apply and !!python/object/new
# is how an object is created, check make_python_instance for details.
if isinstance(node, SequenceNode):
args = self.construct_sequence(node, deep=True)
kwds = {}
state = {}
listitems = []
dictitems = {}
else:
value = self.construct_mapping(node, deep=True)
args = value.get('args', [])
kwds = value.get('kwds', {})
state = value.get('state', {})
listitems = value.get('listitems', [])
dictitems = value.get('dictitems', {})
instance = self.make_python_instance(suffix, node, args, kwds, newobj)
if state:
self.set_python_instance_state(instance, state)
if listitems:
instance.extend(listitems)
if dictitems:
for key in dictitems:
instance[key] = dictitems[key]
return instance
def construct_python_object_new(self, suffix, node):
return self.construct_python_object_apply(suffix, node, newobj=True)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/none',
Constructor.construct_yaml_null)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/bool',
Constructor.construct_yaml_bool)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/str',
Constructor.construct_python_str)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/unicode',
Constructor.construct_python_unicode)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/int',
Constructor.construct_yaml_int)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/long',
Constructor.construct_python_long)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/float',
Constructor.construct_yaml_float)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/complex',
Constructor.construct_python_complex)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/list',
Constructor.construct_yaml_seq)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/tuple',
Constructor.construct_python_tuple)
Constructor.add_constructor(
u'tag:yaml.org,2002:python/dict',
Constructor.construct_yaml_map)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/name:',
Constructor.construct_python_name)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/module:',
Constructor.construct_python_module)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object:',
Constructor.construct_python_object)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/apply:',
Constructor.construct_python_object_apply)
Constructor.add_multi_constructor(
u'tag:yaml.org,2002:python/object/new:',
Constructor.construct_python_object_new)
|
croxis/SpaceDrive
|
spacedrive/renderpipeline/rplibs/yaml/yaml_py2/constructor.py
|
Python
|
mit
| 25,820
|
"""
WSGI config for Texas LAN Web project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
TexasLAN/texaslan.org
|
config/wsgi.py
|
Python
|
mit
| 1,450
|
def unique_elems(data):
found_numbers = {}
for number in data:
if number not in found_numbers:
found_numbers[number] = number
else:
return False
return True
|
GeorgeGkas/Data_Structures_and_Algorithms_in_Python
|
Chapter1/C-1/15.py
|
Python
|
mit
| 210
|
from typing import Any, Collection, Dict, List, Optional, overload
from ..language import Node, OperationType
from ..pyutils import is_iterable
__all__ = ["ast_to_dict"]
@overload
def ast_to_dict(
node: Node, locations: bool = False, cache: Optional[Dict[Node, Any]] = None
) -> Dict:
...
@overload
def ast_to_dict(
node: Collection[Node],
locations: bool = False,
cache: Optional[Dict[Node, Any]] = None,
) -> List[Node]:
...
@overload
def ast_to_dict(
node: OperationType,
locations: bool = False,
cache: Optional[Dict[Node, Any]] = None,
) -> str:
...
def ast_to_dict(
node: Any, locations: bool = False, cache: Optional[Dict[Node, Any]] = None
) -> Any:
"""Convert a language AST to a nested Python dictionary.
Set `location` to True in order to get the locations as well.
"""
"""Convert a node to a nested Python dictionary."""
if isinstance(node, Node):
if cache is None:
cache = {}
elif node in cache:
return cache[node]
cache[node] = res = {}
res.update(
{
key: ast_to_dict(getattr(node, key), locations, cache)
for key in ("kind",) + node.keys[1:]
}
)
if locations:
loc = node.loc
if loc:
res["loc"] = dict(start=loc.start, end=loc.end)
return res
if is_iterable(node):
return [ast_to_dict(sub_node, locations, cache) for sub_node in node]
if isinstance(node, OperationType):
return node.value
return node
|
graphql-python/graphql-core
|
src/graphql/utilities/ast_to_dict.py
|
Python
|
mit
| 1,596
|
import time
from socketio import packet
def test():
p = packet.Packet(packet.EVENT, 'hello')
start = time.time()
count = 0
while True:
p = packet.Packet(encoded_packet=p.encode())
count += 1
if time.time() - start >= 5:
break
return count
if __name__ == '__main__':
count = test()
print('text_packet:', count, 'packets processed.')
|
miguelgrinberg/python-socketio
|
tests/performance/text_packet.py
|
Python
|
mit
| 400
|
from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
import re
import xmlrpc.client
from flexget import plugin
from flexget.event import event
from flexget.utils.template import RenderError
from flexget.plugin import get_plugin_by_name
from socket import error as socket_error
log = logging.getLogger('aria2')
# TODO: stop using torrent_info_hash[0:16] as the GID
# for RENAME_CONTENT_FILES:
# to rename TV episodes, content_is_episodes must be set to yes
class OutputAria2(object):
"""
aria2 output plugin
Version 1.0.0
Configuration:
server: Where aria2 daemon is running. default 'localhost'
port: Port of that server. default '6800'
username: XML-RPC username set in aria2. default ''
password: XML-RPC password set in aria2. default ''
do: [add-new|remove-completed] What action to take with incoming
entries.
uri: URI of file to download. Can include inline Basic Auth para-
meters and use jinja2 templating with any fields available
in the entry. If you are using any of the dynamic renaming
options below, the filename can be included in this setting
using {{filename}}.
exclude_samples:
[yes|no] Exclude any files that include the word 'sample' in
their name. default 'no'
exclude_non_content:
[yes|no] Exclude any non-content files, as defined by filename
extensions not listed in file_exts. (See below.) default 'no'
rename_content_files:
[yes|no] If set, rename all content files (as defined by
extensions listed in file_exts). default 'no'
rename_template:
If set, and rename_content_files is yes, all content files
will be renamed using the value of this field as a template.
Will be parsed with jinja2 and can include any fields
available in the entry. default ''
parse_filename:
[yes|no] If yes, filenames will be parsed with either the
series parser (if content_is_episodes is set to yes) or the
movie parser. default: 'no'
content_is_episodes:
[yes|no] If yes, files will be parsed by the series plugin
parser to attempt to determine series name and series_id. If
no, files will be treated as movies. Note this has no effect
unless parse_filename is set to yes. default 'no'
keep_parent_folders:
[yes|no] If yes, any parent folders within the torrent itself
will be kept and created within the download directory.
For example, if a torrent has this structure:
MyTorrent/
MyFile.mkv
If this is set to yes, the MyTorrent folder will be created in
the download directory. If set to no, the folder will be
ignored and the file will be downloaded directly into the
download directory. default: 'no'
fix_year: [yes|no] If yes, and the last four characters of the series
name are numbers, enclose them in parantheses as they are
likely a year. Example: Show Name 1995 S01E01.mkv would become
Show Name (1995) S01E01.mkv. default 'yes'
file_exts: [list] File extensions of all files considered to be content
files. Used to determine which files to rename or which files
to exclude from download, with appropriate options set. (See
above.)
default: ['.mkv', '.avi', '.mp4', '.wmv', '.asf', '.divx',
'.mov', '.mpg', '.rm']
aria_config:
"Parent folder" for any options to be passed directly to aria.
Any command line option listed at
http://aria2.sourceforge.net/manual/en/html/aria2c.html#options
can be used by removing the two dashes (--) in front of the
command name, and changing key=value to key: value. All
options will be treated as jinja2 templates and rendered prior
to passing to aria2. default ''
Sample configuration:
aria2:
server: myserver
port: 6802
do: add-new
exclude_samples: yes
exclude_non_content: yes
parse_filename: yes
content_is_episodes: yes
rename_content_files: yes
rename_template: '{{series_name}} - {{series_id||lower}}'
aria_config:
max-connection-per-server: 4
max-concurrent-downloads: 4
split: 4
file-allocation: none
dir: "/Volumes/all_my_tv/{{series_name}}"
"""
schema = {
'type': 'object',
'properties': {
'server': {'type': 'string', 'default': 'localhost'},
'port': {'type': 'integer', 'default': 6800},
'username': {'type': 'string', 'default': ''},
'password': {'type': 'string', 'default': ''},
'do': {'type': 'string', 'enum': ['add-new', 'remove-completed']},
'uri': {'type': 'string'},
'exclude_samples': {'type': 'boolean', 'default': False},
'exclude_non_content': {'type': 'boolean', 'default': True},
'rename_content_files': {'type': 'boolean', 'default': False},
'content_is_episodes': {'type': 'boolean', 'default': False},
'keep_parent_folders': {'type': 'boolean', 'default': False},
'parse_filename': {'type': 'boolean', 'default': False},
'fix_year': {'type': 'boolean', 'default': True},
'rename_template': {'type': 'string', 'default': ''},
'file_exts': {
'type': 'array',
'items': {'type': 'string'},
'default': ['.mkv', '.avi', '.mp4', '.wmv', '.asf', '.divx', '.mov', '.mpg', '.rm']
},
'aria_config': {
'type': 'object',
'additionalProperties': {'oneOf': [{'type': 'string'}, {'type': 'integer'}]}
}
},
'required': ['do'],
'additionalProperties': False
}
def on_task_output(self, task, config):
if 'aria_config' not in config:
config['aria_config'] = {}
if 'uri' not in config and config['do'] == 'add-new':
raise plugin.PluginError('uri (path to folder containing file(s) on server) is required when adding new '
'downloads.', log)
if 'dir' not in config['aria_config']:
if config['do'] == 'add-new':
raise plugin.PluginError('dir (destination directory) is required.', log)
else:
config['aria_config']['dir'] = ''
if config['keep_parent_folders'] and config['aria_config']['dir'].find('{{parent_folders}}') == -1:
raise plugin.PluginError('When using keep_parent_folders, you must specify {{parent_folders}} in the dir '
'option to show where it goes.', log)
if config['rename_content_files'] and not config['rename_template']:
raise plugin.PluginError('When using rename_content_files, you must specify a rename_template.', log)
if config['username'] and not config['password']:
raise plugin.PluginError('If you specify an aria2 username, you must specify a password.')
try:
userpass = ''
if config['username']:
userpass = '%s:%s@' % (config['username'], config['password'])
baseurl = 'http://%s%s:%s/rpc' % (userpass, config['server'], config['port'])
log.debug('base url: %s' % baseurl)
s = xmlrpc.client.ServerProxy(baseurl)
log.info('Connected to daemon at ' + baseurl + '.')
except xmlrpc.client.ProtocolError as err:
raise plugin.PluginError('Could not connect to aria2 at %s. Protocol error %s: %s'
% (baseurl, err.errcode, err.errmsg), log)
except xmlrpc.client.Fault as err:
raise plugin.PluginError('XML-RPC fault: Unable to connect to aria2 daemon at %s: %s'
% (baseurl, err.faultString), log)
except socket_error as e:
(error, msg) = e.args
raise plugin.PluginError('Socket connection issue with aria2 daemon at %s: %s'
% (baseurl, msg), log)
except:
raise plugin.PluginError('Unidentified error during connection to aria2 daemon at %s' % baseurl, log)
# loop entries
for entry in task.accepted:
config['aria_dir'] = config['aria_config']['dir']
if 'aria_gid' in entry:
config['aria_config']['gid'] = entry['aria_gid']
elif 'torrent_info_hash' in entry:
config['aria_config']['gid'] = entry['torrent_info_hash'][0:16]
elif 'gid' in config['aria_config']:
del(config['aria_config']['gid'])
if 'content_files' not in entry:
if entry['url']:
entry['content_files'] = [entry['url']]
else:
entry['content_files'] = [entry['title']]
else:
if not isinstance(entry['content_files'], list):
entry['content_files'] = [entry['content_files']]
counter = 0
for cur_file in entry['content_files']:
entry['parent_folders'] = ''
# reset the 'dir' or it will only be rendered on the first loop
config['aria_config']['dir'] = config['aria_dir']
cur_filename = cur_file.split('/')[-1]
if cur_file.split('/')[0] != cur_filename and config['keep_parent_folders']:
lastSlash = cur_file.rfind('/')
cur_path = cur_file[:lastSlash]
if cur_path[0:1] == '/':
cur_path = cur_path[1:]
entry['parent_folders'] = cur_path
log.debug('parent folders: %s' % entry['parent_folders'])
file_dot = cur_filename.rfind(".")
file_ext = cur_filename[file_dot:]
if len(entry['content_files']) > 1 and 'gid' in config['aria_config']:
# if there is more than 1 file, need to give unique gids, this will work up to 999 files
counter += 1
strCounter = str(counter)
if len(entry['content_files']) > 99:
# sorry not sorry if you have more than 999 files
config['aria_config']['gid'] = ''.join([config['aria_config']['gid'][0:-3],
strCounter.rjust(3, str('0'))])
else:
config['aria_config']['gid'] = ''.join([config['aria_config']['gid'][0:-2],
strCounter.rjust(2, str('0'))])
if config['exclude_samples'] == True:
# remove sample files from download list
if cur_filename.lower().find('sample') > -1:
continue
if file_ext not in config['file_exts']:
if config['exclude_non_content'] == True:
# don't download non-content files, like nfos - definable in file_exts
continue
if config['parse_filename']:
if config['content_is_episodes']:
metainfo_series = plugin.get_plugin_by_name('metainfo_series')
guess_series = metainfo_series.instance.guess_series
if guess_series(cur_filename):
parser = guess_series(cur_filename)
entry['series_name'] = parser.name
# if the last four chars are numbers, REALLY good chance it's actually a year...
# fix it if so desired
log.verbose(entry['series_name'])
if re.search(r'\d{4}', entry['series_name'][-4:]) is not None and config['fix_year']:
entry['series_name'] = ''.join([entry['series_name'][0:-4], '(',
entry['series_name'][-4:], ')'])
log.verbose(entry['series_name'])
parser.data = cur_filename
parser.parse
log.debug(parser.id_type)
if parser.id_type == 'ep':
entry['series_id'] = ''.join(['S', str(parser.season).rjust(2, str('0')), 'E',
str(parser.episode).rjust(2, str('0'))])
elif parser.id_type == 'sequence':
entry['series_id'] = parser.episode
elif parser.id_type and parser.id:
entry['series_id'] = parser.id
else:
parser = get_plugin_by_name('parsing').instance.parse_movie(cur_filename)
parser.parse()
log.info(parser)
testname = parser.name
testyear = parser.year
parser.data = entry['title']
parser.parse()
log.info(parser)
if len(parser.name) > len(testname):
entry['name'] = parser.name
entry['movie_name'] = parser.name
else:
entry['name'] = testname
entry['movie_name'] = testname
if parser.year:
entry['year'] = parser.year
entry['movie_year'] = parser.year
else:
entry['year'] = testyear
entry['movie_year'] = testyear
if config['rename_content_files']:
if config['content_is_episodes']:
try:
config['aria_config']['out'] = entry.render(config['rename_template']) + file_ext
log.verbose(config['aria_config']['out'])
except RenderError as e:
log.error('Could not rename file %s: %s.' % (cur_filename, e))
continue
else:
try:
config['aria_config']['out'] = entry.render(config['rename_template']) + file_ext
log.verbose(config['aria_config']['out'])
except RenderError as e:
log.error('Could not rename file %s: %s. Try enabling imdb_lookup in this task'
' to assist.' % (cur_filename, e))
continue
elif 'torrent_info_hash' not in entry:
config['aria_config']['out'] = cur_filename
if config['do'] == 'add-new':
log.debug('Adding new file')
new_download = 0
if 'gid' in config['aria_config']:
try:
r = s.aria2.tellStatus(config['aria_config']['gid'], ['gid', 'status'])
log.info('Download status for %s (gid %s): %s' % (
config['aria_config'].get('out', config['uri']), r['gid'],
r['status']))
if r['status'] == 'paused':
try:
if not task.manager.options.test:
s.aria2.unpause(r['gid'])
log.info(' Unpaused download.')
except xmlrpc.client.Fault as err:
raise plugin.PluginError(
'aria2 response to unpause request: %s' % err.faultString, log)
else:
log.info(' Therefore, not re-adding.')
except xmlrpc.client.Fault as err:
if err.faultString[-12:] == 'is not found':
new_download = 1
else:
raise plugin.PluginError('aria2 response to download status request: %s'
% err.faultString, log)
except xmlrpc.client.ProtocolError as err:
raise plugin.PluginError('Could not connect to aria2 at %s. Protocol error %s: %s'
% (baseurl, err.errcode, err.errmsg), log)
except socket_error as e:
(error, msg) = e.args
raise plugin.PluginError('Socket connection issue with aria2 daemon at %s: %s'
% (baseurl, msg), log)
else:
new_download = 1
if new_download == 1:
try:
entry['filename'] = cur_file
cur_uri = entry.render(config['uri'])
log.verbose('uri: %s' % cur_uri)
except RenderError as e:
raise plugin.PluginError('Unable to render uri: %s' % e)
try:
for key, value in config['aria_config'].items():
log.trace('rendering %s: %s' % (key, value))
config['aria_config'][key] = entry.render(str(value))
log.debug('dir: %s' % config['aria_config']['dir'])
if not task.manager.options.test:
r = s.aria2.addUri([cur_uri], config['aria_config'])
else:
if 'gid' not in config['aria_config']:
r = '1234567890123456'
else:
r = config['aria_config']['gid']
log.info('%s successfully added to aria2 with gid %s.' % (
config['aria_config'].get('out', config['uri']),
r))
except xmlrpc.client.Fault as err:
raise plugin.PluginError('aria2 response to add URI request: %s' % err.faultString, log)
except socket_error as e:
(error, msg) = e.args
raise plugin.PluginError('Socket connection issue with aria2 daemon at %s: %s'
% (baseurl, msg), log)
except RenderError as e:
raise plugin.PluginError('Unable to render one of the fields being passed to aria2:'
'%s' % e)
elif config['do'] == 'remove-completed':
try:
r = s.aria2.tellStatus(config['aria_config']['gid'], ['gid', 'status'])
log.info('Status of download with gid %s: %s' % (r['gid'], r['status']))
if r['status'] in ['complete', 'removed']:
if not task.manager.options.test:
try:
a = s.aria2.removeDownloadResult(r['gid'])
if a == 'OK':
log.info('Download with gid %s removed from memory' % r['gid'])
except xmlrpc.client.Fault as err:
raise plugin.PluginError('aria2 response to remove request: %s'
% err.faultString, log)
except socket_error as e:
(error, msg) = e.args
raise plugin.PluginError('Socket connection issue with aria2 daemon at %s: %s'
% (baseurl, msg), log)
else:
log.info('Download with gid %s could not be removed because of its status: %s'
% (r['gid'], r['status']))
except xmlrpc.client.Fault as err:
if err.faultString[-12:] == 'is not found':
log.warning('Download with gid %s could not be removed because it was not found. It was '
'possibly previously removed or never added.' % config['aria_config']['gid'])
else:
raise plugin.PluginError('aria2 response to status request: %s' % err.faultString, log)
except socket_error as e:
(error, msg) = e.args
raise plugin.PluginError('Socket connection issue with aria2 daemon at %s: %s'
% (baseurl, msg), log)
@event('plugin.register')
def register_plugin():
plugin.register(OutputAria2, 'aria2', api_ver=2)
|
qvazzler/Flexget
|
flexget/plugins/plugin_aria2.py
|
Python
|
mit
| 22,207
|
import unittest
import os
from latexbuild.utils import (
random_str_uuid,
random_name_filepath,
list_filepathes_with_predicate,
read_file,
recursive_apply,
)
PATH_FILE = os.path.abspath(__file__)
PATH_TEST = os.path.dirname(PATH_FILE)
class TestRandomStrUuid(unittest.TestCase):
'''Test class for random_str_uuid'''
def test_correct_length(self):
l1, l2 = 4, 7
val1, val2 = random_str_uuid(l1), random_str_uuid(l2)
len1, len2 = len(val1), len(val2)
self.assertEqual(l1, len1)
self.assertEqual(l2, len2)
def test_random(self):
l = 7
v1, v2 = random_str_uuid(l), random_str_uuid(l)
self.assertNotEqual(v1, v2)
def test_below_1(self):
self.assertEqual(1, len(random_str_uuid(1)))
self.assertRaises(ValueError, random_str_uuid, 0)
def test_above_32(self):
self.assertEqual(32, len(random_str_uuid(32)))
self.assertRaises(ValueError, random_str_uuid, 33)
class TestRandomNameFilepath(unittest.TestCase):
'''Test class for random_name_filepath'''
PATH = "/hello/world/test.txt"
def test_correct_length(self):
len_random = 5
path_finish = random_name_filepath(self.PATH, len_random)
self.assertEqual(len(self.PATH) + len_random, len(path_finish))
def test_extension_still_there(self):
path_finish = random_name_filepath(self.PATH, 7)
ext_path_start = os.path.splitext(self.PATH)[-1]
ext_path_finish = os.path.splitext(path_finish)[-1]
self.assertEqual(ext_path_start, ext_path_finish)
def test_beginning_still_there(self):
len_random = 5
path_finish = random_name_filepath(self.PATH, len_random)
beg_start = os.path.splitext(self.PATH)[0]
beg_finish = os.path.splitext(path_finish)[0]
beg_finish_same = beg_finish[:-len_random]
self.assertEqual(beg_start, beg_finish_same)
def test_middle_is_random(self):
len_random = 5
path_1 = random_name_filepath(self.PATH, len_random)
path_2 = random_name_filepath(self.PATH, len_random)
beg_1 = os.path.splitext(path_1)[0][-len_random:]
beg_2 = os.path.splitext(path_2)[0][-len_random:]
self.assertNotEqual(beg_1, beg_2)
class TestListFilepathesWithPredicate(unittest.TestCase):
'''Test class for list_filepathes_with_predicate'''
def test_this_file(self):
most_of_this_file = PATH_FILE[:-2]
files = list_filepathes_with_predicate(PATH_TEST, most_of_this_file)
self.assertEqual(files, [PATH_FILE])
def test_not_a_match(self):
impossible_prefix = "no root therefore impossible"
files = list_filepathes_with_predicate(PATH_TEST, impossible_prefix)
self.assertEqual(files, [])
def test_invalid_directory(self):
self.assertRaises(ValueError, list_filepathes_with_predicate,
"notadirectory", "anything")
class TestReadFile(unittest.TestCase):
'''Test class for read_file'''
'''This function is too simple to warrant testing at this time'''
pass
class TestRecursiveApply(unittest.TestCase):
'''Test class for recursive_apply'''
def test_nested_objects(self):
inval = {
'hello': {'man': 'woman', 'dog': 'cat'},
'world': 'smartiepants',
'brownie': [
'flower',
{'sugar': 'bad'},
'chocolate',
]
}
expected_outval = {
'hello': {'man': 'womanTEST', 'dog': 'catTEST'},
'world': 'smartiepantsTEST',
'brownie': [
'flowerTEST',
{'sugar': 'badTEST'},
'chocolateTEST',
]
}
func = lambda s: s + 'TEST'
actual_outval = recursive_apply(inval, lambda s: s + 'TEST')
self.assertEqual(actual_outval, expected_outval)
if __name__ == '__main__':
unittest.main()
|
pappasam/latexbuild
|
tests/test_utils.py
|
Python
|
mit
| 4,066
|
__author__ = 'oier'
import json
from flask import Flask, make_response
app = Flask(__name__)
import seaborn as sns
import numpy as np
import pandas as pd
import os
from datetime import datetime
import matplotlib.pyplot as plt
import sys
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from io import StringIO
from sklearn import linear_model
from models import InputForm, ValueSelector
from flask import Flask, render_template, request
from compute import compute, load_data, line_plot
@app.route('/')
def index():
return 'Hello World!'
def form_values(request):
data = load_data()
form = ValueSelector(request)
form.value.choices = [(k,i) for k,i in enumerate(data.columns)]
return(form)
@app.route('/blood', methods=['GET', 'POST'])
def blood():
form = form_values(request.form)
if request.method == 'POST':# and form.validate():
result = line_plot(form.value.data)
else:
print("False")
result = None
return render_template('plot.html',
form=form, result=result)
@app.route('/vib1', methods=['GET', 'POST'])
def vib1():
#form = InputForm(request.form)
form = form_values(request.form)
if request.method == 'POST' and form.validate():
result = compute(form.A.data, form.b.data,
form.w.data, form.T.data)
else:
result = None
return render_template('view_plain.html',
form=form, result=result)
if __name__ == '__main__':
app.run()
|
oiertwo/vampyr
|
flask/index.py
|
Python
|
mit
| 1,592
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"""Write your forwards methods here."""
# Replace all null values with blanks
orm.TweetChunk.objects.filter(tz_country__isnull=True).update(tz_country='')
def backwards(self, orm):
"""Write your backwards methods here."""
# Nothing to do -- blanks are still ok in the previous version
models = {
u'map.maptimeframe': {
'Meta': {'object_name': 'MapTimeFrame'},
'analysis_time': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'calculated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'chunks_added': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'missing_data': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'node_cache_hits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'node_cache_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'nodes_added': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'tweet_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'map.treenode': {
'Meta': {'object_name': 'TreeNode', 'index_together': "[['parent', 'word']]"},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['map.TreeNode']"}),
'word': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
u'map.tweetchunk': {
'Meta': {'object_name': 'TweetChunk'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'node': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['map.TreeNode']"}),
'tweet': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['twitter_stream.Tweet']"}),
'tz_country': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'})
},
u'map.tz_country': {
'Meta': {'object_name': 'Tz_Country'},
'country': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user_time_zone': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
u'twitter_stream.tweet': {
'Meta': {'object_name': 'Tweet'},
'analyzed_by': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'favorite_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'filter_level': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '6', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_reply_to_status_id': ('django.db.models.fields.BigIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '9', 'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'retweet_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'retweeted_status_id': ('django.db.models.fields.BigIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'truncated': ('django.db.models.fields.BooleanField', [], {}),
'tweet_id': ('django.db.models.fields.BigIntegerField', [], {}),
'user_followers_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user_friends_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'user_geo_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user_id': ('django.db.models.fields.BigIntegerField', [], {}),
'user_location': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'user_name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'user_screen_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'user_time_zone': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '150', 'null': 'True', 'blank': 'True'}),
'user_utc_offset': ('django.db.models.fields.IntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'user_verified': ('django.db.models.fields.BooleanField', [], {})
}
}
complete_apps = ['map']
symmetrical = True
|
michaelbrooks/twitter-feels
|
twitter_feels/apps/map/migrations/0003_replace_null_tweetchunk_tz_country.py
|
Python
|
mit
| 5,898
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._dev_spaces_management_client_enums import *
class ContainerHostMapping(msrest.serialization.Model):
"""Container host mapping object specifying the Container host resource ID and its associated Controller resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param container_host_resource_id: ARM ID of the Container Host resource.
:type container_host_resource_id: str
:ivar mapped_controller_resource_id: ARM ID of the mapped Controller resource.
:vartype mapped_controller_resource_id: str
"""
_validation = {
'mapped_controller_resource_id': {'readonly': True},
}
_attribute_map = {
'container_host_resource_id': {'key': 'containerHostResourceId', 'type': 'str'},
'mapped_controller_resource_id': {'key': 'mappedControllerResourceId', 'type': 'str'},
}
def __init__(
self,
*,
container_host_resource_id: Optional[str] = None,
**kwargs
):
super(ContainerHostMapping, self).__init__(**kwargs)
self.container_host_resource_id = container_host_resource_id
self.mapped_controller_resource_id = None
class Resource(msrest.serialization.Model):
"""An Azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.type = None
class TrackedResource(Resource):
"""The resource model definition for a ARM tracked top level resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param tags: A set of tags. Tags for the Azure resource.
:type tags: dict[str, str]
:param location: Region where the Azure resource is located.
:type location: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
location: Optional[str] = None,
**kwargs
):
super(TrackedResource, self).__init__(**kwargs)
self.tags = tags
self.location = location
class Controller(TrackedResource):
"""Controller.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource Id for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:param tags: A set of tags. Tags for the Azure resource.
:type tags: dict[str, str]
:param location: Region where the Azure resource is located.
:type location: str
:param sku: Required. Model representing SKU for Azure Dev Spaces Controller.
:type sku: ~dev_spaces_management_client.models.Sku
:ivar provisioning_state: Provisioning state of the Azure Dev Spaces Controller. Possible
values include: "Succeeded", "Failed", "Canceled", "Updating", "Creating", "Deleting",
"Deleted".
:vartype provisioning_state: str or ~dev_spaces_management_client.models.ProvisioningState
:ivar host_suffix: DNS suffix for public endpoints running in the Azure Dev Spaces Controller.
:vartype host_suffix: str
:ivar data_plane_fqdn: DNS name for accessing DataPlane services.
:vartype data_plane_fqdn: str
:ivar target_container_host_api_server_fqdn: DNS of the target container host's API server.
:vartype target_container_host_api_server_fqdn: str
:param target_container_host_resource_id: Required. Resource ID of the target container host.
:type target_container_host_resource_id: str
:param target_container_host_credentials_base64: Required. Credentials of the target container
host (base64).
:type target_container_host_credentials_base64: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'sku': {'required': True},
'provisioning_state': {'readonly': True},
'host_suffix': {'readonly': True},
'data_plane_fqdn': {'readonly': True},
'target_container_host_api_server_fqdn': {'readonly': True},
'target_container_host_resource_id': {'required': True},
'target_container_host_credentials_base64': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'location': {'key': 'location', 'type': 'str'},
'sku': {'key': 'sku', 'type': 'Sku'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'host_suffix': {'key': 'properties.hostSuffix', 'type': 'str'},
'data_plane_fqdn': {'key': 'properties.dataPlaneFqdn', 'type': 'str'},
'target_container_host_api_server_fqdn': {'key': 'properties.targetContainerHostApiServerFqdn', 'type': 'str'},
'target_container_host_resource_id': {'key': 'properties.targetContainerHostResourceId', 'type': 'str'},
'target_container_host_credentials_base64': {'key': 'properties.targetContainerHostCredentialsBase64', 'type': 'str'},
}
def __init__(
self,
*,
sku: "Sku",
target_container_host_resource_id: str,
target_container_host_credentials_base64: str,
tags: Optional[Dict[str, str]] = None,
location: Optional[str] = None,
**kwargs
):
super(Controller, self).__init__(tags=tags, location=location, **kwargs)
self.sku = sku
self.provisioning_state = None
self.host_suffix = None
self.data_plane_fqdn = None
self.target_container_host_api_server_fqdn = None
self.target_container_host_resource_id = target_container_host_resource_id
self.target_container_host_credentials_base64 = target_container_host_credentials_base64
class ControllerConnectionDetails(msrest.serialization.Model):
"""ControllerConnectionDetails.
:param orchestrator_specific_connection_details: Base class for types that supply values used
to connect to container orchestrators.
:type orchestrator_specific_connection_details:
~dev_spaces_management_client.models.OrchestratorSpecificConnectionDetails
"""
_attribute_map = {
'orchestrator_specific_connection_details': {'key': 'orchestratorSpecificConnectionDetails', 'type': 'OrchestratorSpecificConnectionDetails'},
}
def __init__(
self,
*,
orchestrator_specific_connection_details: Optional["OrchestratorSpecificConnectionDetails"] = None,
**kwargs
):
super(ControllerConnectionDetails, self).__init__(**kwargs)
self.orchestrator_specific_connection_details = orchestrator_specific_connection_details
class ControllerConnectionDetailsList(msrest.serialization.Model):
"""ControllerConnectionDetailsList.
:param connection_details_list: List of Azure Dev Spaces Controller connection details.
:type connection_details_list:
list[~dev_spaces_management_client.models.ControllerConnectionDetails]
"""
_attribute_map = {
'connection_details_list': {'key': 'connectionDetailsList', 'type': '[ControllerConnectionDetails]'},
}
def __init__(
self,
*,
connection_details_list: Optional[List["ControllerConnectionDetails"]] = None,
**kwargs
):
super(ControllerConnectionDetailsList, self).__init__(**kwargs)
self.connection_details_list = connection_details_list
class ControllerList(msrest.serialization.Model):
"""ControllerList.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: List of Azure Dev Spaces Controllers.
:type value: list[~dev_spaces_management_client.models.Controller]
:ivar next_link: The URI that can be used to request the next page for list of Azure Dev Spaces
Controllers.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Controller]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["Controller"]] = None,
**kwargs
):
super(ControllerList, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ControllerUpdateParameters(msrest.serialization.Model):
"""Parameters for updating an Azure Dev Spaces Controller.
:param tags: A set of tags. Tags for the Azure Dev Spaces Controller.
:type tags: dict[str, str]
:param target_container_host_credentials_base64: Credentials of the target container host
(base64).
:type target_container_host_credentials_base64: str
"""
_attribute_map = {
'tags': {'key': 'tags', 'type': '{str}'},
'target_container_host_credentials_base64': {'key': 'properties.targetContainerHostCredentialsBase64', 'type': 'str'},
}
def __init__(
self,
*,
tags: Optional[Dict[str, str]] = None,
target_container_host_credentials_base64: Optional[str] = None,
**kwargs
):
super(ControllerUpdateParameters, self).__init__(**kwargs)
self.tags = tags
self.target_container_host_credentials_base64 = target_container_host_credentials_base64
class DevSpacesErrorResponse(msrest.serialization.Model):
"""Error response indicates that the service is not able to process the incoming request. The reason is provided in the error message.
:param error: The details of the error.
:type error: ~dev_spaces_management_client.models.ErrorDetails
"""
_attribute_map = {
'error': {'key': 'error', 'type': 'ErrorDetails'},
}
def __init__(
self,
*,
error: Optional["ErrorDetails"] = None,
**kwargs
):
super(DevSpacesErrorResponse, self).__init__(**kwargs)
self.error = error
class ErrorDetails(msrest.serialization.Model):
"""ErrorDetails.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar code: Status code for the error.
:vartype code: str
:ivar message: Error message describing the error in detail.
:vartype message: str
:ivar target: The target of the particular error.
:vartype target: str
"""
_validation = {
'code': {'readonly': True},
'message': {'readonly': True},
'target': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ErrorDetails, self).__init__(**kwargs)
self.code = None
self.message = None
self.target = None
class OrchestratorSpecificConnectionDetails(msrest.serialization.Model):
"""Base class for types that supply values used to connect to container orchestrators.
You probably want to use the sub-classes and not this class directly. Known
sub-classes are: KubernetesConnectionDetails.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar instance_type: Required. Gets the Instance type.Constant filled by server.
:vartype instance_type: str
"""
_validation = {
'instance_type': {'required': True, 'readonly': True},
}
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
}
_subtype_map = {
'instance_type': {'Kubernetes': 'KubernetesConnectionDetails'}
}
def __init__(
self,
**kwargs
):
super(OrchestratorSpecificConnectionDetails, self).__init__(**kwargs)
self.instance_type = None # type: Optional[str]
class KubernetesConnectionDetails(OrchestratorSpecificConnectionDetails):
"""Contains information used to connect to a Kubernetes cluster.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar instance_type: Required. Gets the Instance type.Constant filled by server.
:vartype instance_type: str
:param kube_config: Gets the kubeconfig for the cluster.
:type kube_config: str
"""
_validation = {
'instance_type': {'required': True, 'readonly': True},
}
_attribute_map = {
'instance_type': {'key': 'instanceType', 'type': 'str'},
'kube_config': {'key': 'kubeConfig', 'type': 'str'},
}
def __init__(
self,
*,
kube_config: Optional[str] = None,
**kwargs
):
super(KubernetesConnectionDetails, self).__init__(**kwargs)
self.instance_type = 'Kubernetes' # type: str
self.kube_config = kube_config
class ListConnectionDetailsParameters(msrest.serialization.Model):
"""Parameters for listing connection details of an Azure Dev Spaces Controller.
All required parameters must be populated in order to send to Azure.
:param target_container_host_resource_id: Required. Resource ID of the target container host
mapped to the Azure Dev Spaces Controller.
:type target_container_host_resource_id: str
"""
_validation = {
'target_container_host_resource_id': {'required': True},
}
_attribute_map = {
'target_container_host_resource_id': {'key': 'targetContainerHostResourceId', 'type': 'str'},
}
def __init__(
self,
*,
target_container_host_resource_id: str,
**kwargs
):
super(ListConnectionDetailsParameters, self).__init__(**kwargs)
self.target_container_host_resource_id = target_container_host_resource_id
class ResourceProviderOperationDefinition(msrest.serialization.Model):
"""ResourceProviderOperationDefinition.
:param name: Resource provider operation name.
:type name: str
:param display:
:type display: ~dev_spaces_management_client.models.ResourceProviderOperationDisplay
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'display': {'key': 'display', 'type': 'ResourceProviderOperationDisplay'},
}
def __init__(
self,
*,
name: Optional[str] = None,
display: Optional["ResourceProviderOperationDisplay"] = None,
**kwargs
):
super(ResourceProviderOperationDefinition, self).__init__(**kwargs)
self.name = name
self.display = display
class ResourceProviderOperationDisplay(msrest.serialization.Model):
"""ResourceProviderOperationDisplay.
:param provider: Name of the resource provider.
:type provider: str
:param resource: Name of the resource type.
:type resource: str
:param operation: Name of the resource provider operation.
:type operation: str
:param description: Description of the resource provider operation.
:type description: str
"""
_attribute_map = {
'provider': {'key': 'provider', 'type': 'str'},
'resource': {'key': 'resource', 'type': 'str'},
'operation': {'key': 'operation', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
provider: Optional[str] = None,
resource: Optional[str] = None,
operation: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
super(ResourceProviderOperationDisplay, self).__init__(**kwargs)
self.provider = provider
self.resource = resource
self.operation = operation
self.description = description
class ResourceProviderOperationList(msrest.serialization.Model):
"""ResourceProviderOperationList.
Variables are only populated by the server, and will be ignored when sending a request.
:param value: Resource provider operations list.
:type value: list[~dev_spaces_management_client.models.ResourceProviderOperationDefinition]
:ivar next_link: The URI that can be used to request the next page for list of Azure
operations.
:vartype next_link: str
"""
_validation = {
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceProviderOperationDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[List["ResourceProviderOperationDefinition"]] = None,
**kwargs
):
super(ResourceProviderOperationList, self).__init__(**kwargs)
self.value = value
self.next_link = None
class Sku(msrest.serialization.Model):
"""Model representing SKU for Azure Dev Spaces Controller.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the SKU for Azure Dev Spaces Controller. Possible values
include: "S1".
:type name: str or ~dev_spaces_management_client.models.SkuName
:param tier: The tier of the SKU for Azure Dev Spaces Controller. Possible values include:
"Standard".
:type tier: str or ~dev_spaces_management_client.models.SkuTier
"""
_validation = {
'name': {'required': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'tier': {'key': 'tier', 'type': 'str'},
}
def __init__(
self,
*,
name: Union[str, "SkuName"],
tier: Optional[Union[str, "SkuTier"]] = None,
**kwargs
):
super(Sku, self).__init__(**kwargs)
self.name = name
self.tier = tier
|
Azure/azure-sdk-for-python
|
sdk/aks/azure-mgmt-devspaces/azure/mgmt/devspaces/models/_models_py3.py
|
Python
|
mit
| 19,905
|
"""
.. module:: CreateProfileForm
:synopsis: A form for completing a user's profile.
.. moduleauthor:: Dan Schlosser <dan@schlosser.io>
"""
from flask.ext.wtf import Form
from wtforms import StringField, HiddenField
from wtforms.validators import URL, Email, Required
EMAIL_ERROR = 'Please provide a valid email address.'
class CreateProfileForm(Form):
"""A form for completing a :class:`~app.models.User` profile after they
login to Eventum for the first time.
:ivar email: :class:`wtforms.fields.StringField` - The user's email
address.
:ivar name: :class:`wtforms.fields.StringField` - The user's name.
:ivar next: :class:`wtforms.fields.HiddenField` - The URL that they should
be redirected to after completing their profile.
"""
name = StringField('Full Name')
email = StringField('Email Address', [Email(message=EMAIL_ERROR),
Required(message=EMAIL_ERROR)])
next = HiddenField('hidden', [URL(require_tld=False)])
|
danrschlosser/eventum
|
eventum/forms/CreateProfileForm.py
|
Python
|
mit
| 1,023
|
# __author__ = MelissaChan
# -*- coding: utf-8 -*-
# 16-2-14 下午4:37
# 词表转换向量
# 创建词汇表
def createVocabList(dataSet):
vocabSet = set([])
for document in dataSet:
vocabSet = vocabSet | set(document)
return list(vocabSet)
# 转换为向量
def setOfWords2Vec(vocablist,inputset):
returnVec = [0] * len(vocablist)
for word in inputset:
if word in vocablist:
returnVec[vocablist.index(word)] = 1 # 每个单词只出现一次
else:print "the word: %s is not in my vocabulary!" %word
return returnVec
def bagOfWord2Vec(vocablist,inputset):
returnVec = [0] * len(vocablist) # 每个单词出现多次
for word in inputset:
returnVec[vocablist.index(word)] += 1
return returnVec
# 测试数据集
def loadDataSet():
postingList = [['my','dog','has','flea','problems','please','help'],
['maybe','not','take','him','to','dog','park','stupid'],
['my','dalmatian','is','so','cute','I','love','him'],
['stop','posting','stupid','worthless','garbage'],
['mr','licks','ate','my','steak','how','to','stop','him'],
['quit','buying','worthless','stupid','dog','food']]
classVec = [0,1,0,1,0,1]
return postingList,classVec
# 训练函数
from numpy import *
def trainNB0(trainMatrix,trainCategory):
numTrainDocs = len(trainMatrix)
numWords = len(trainMatrix[0])
pAbusive = sum(trainCategory)/float(numTrainDocs)
p0num = ones(numWords); p1num = ones(numWords) # 为避免为乘数为0,初始化次数为1,分母为2
p0denom = 2.0; p1denom = 2.0
for i in range(numTrainDocs):
if trainCategory[i] == 1:
p1num += trainMatrix[i]
p1denom += sum(trainMatrix[i])
else:
p0num += trainMatrix[i]
p0denom += sum(trainMatrix[i])
p1vec = log(p1num/p1denom) # 自然对数避免下溢出
p0vec = log(p0num/p0denom)
return p0vec,p1vec,pAbusive
# 分类函数
def classify(vec2Classify,p0vec,p1vec,pClass1):
p1 = sum(vec2Classify * p1vec) + log(pClass1)
p0 = sum(vec2Classify * p0vec) + log(1.0 - pClass1)
if p1 > p0:
return 1
else:return 0
# 测试封装
postlist,classvec = loadDataSet()
myVocabList = createVocabList(postlist)
# print myVocabList
trainMat = []
for postinDoc in postlist:
trainMat.append(setOfWords2Vec(myVocabList,postinDoc))
p0v,p1v,pAb = trainNB0(array(trainMat),array(classvec))
testEntry = ['love','my','dalmatian']
testEntry2 = ['stupid','my','my']
thisDoc = array(setOfWords2Vec(myVocabList,testEntry))
thisDoc2 = array(setOfWords2Vec(myVocabList,testEntry2))
# print testEntry2,'classified as: ',classify(thisDoc2,p0v,p1v,pAb)
# print testEntry,'classified as: ',classify(thisDoc,p0v,p1v,pAb)
# print p0v
# print p1v
# 垃圾邮件过滤器
# 文本解析
def textParse(bigString):
import re
listOfTokens = re.split(r'\W*',bigString)
return [tok.lower() for tok in listOfTokens if len(tok) > 2]
# 过滤器
def spamTest():
# 导入并解析文本
docList=[]; classList = []; fullText =[]
for i in range(1,26):
wordList = textParse(open('/home/melissa/桌面/email/spam/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(1)
wordList = textParse(open('/home/melissa/桌面/email/ham/%d.txt' % i).read())
docList.append(wordList)
fullText.extend(wordList)
classList.append(0)
# 随机构建训练集
vocabList = createVocabList(docList)
trainingSet = range(50); testSet=[]
for i in range(10):
randIndex = int(random.uniform(0,len(trainingSet)))
testSet.append(trainingSet[randIndex])
del(trainingSet[randIndex])
trainMat=[]; trainClasses = []
for docIndex in trainingSet:
trainMat.append(bagOfWord2Vec(vocabList, docList[docIndex]))
trainClasses.append(classList[docIndex])
p0V,p1V,pSpam = trainNB0(array(trainMat),array(trainClasses))
# 对测试集进行分类
errorCount = 0
for docIndex in testSet:
wordVector = bagOfWord2Vec(vocabList, docList[docIndex])
if classify(array(wordVector),p0V,p1V,pSpam) != classList[docIndex]:
errorCount += 1
print "classification error",docList[docIndex]
print 'the error rate is: ',float(errorCount)/len(testSet)
for i in range(10):
spamTest()
|
MelissaChan/MachineLearning
|
naiveBayes/bayes.py
|
Python
|
mit
| 4,473
|
import unittest
class UnitParsingTest(unittest.TestCase):
def _assert_meters(self, tag_value, expected):
from vectordatasource.meta.function import mz_to_float_meters
parsed = mz_to_float_meters(tag_value)
if parsed is None and expected is not None:
self.fail("Failed to parse %r, but expected %r."
% (tag_value, expected))
elif parsed is not None and expected is None:
self.fail("Parsed %r as %r, but expected parsing to fail."
% (tag_value, parsed))
elif parsed != expected and abs(parsed - expected) > 0.001:
self.fail("Expected %r from %r, but got %r instead."
% (expected, tag_value, parsed))
def test_parse_miles(self):
self._assert_meters('1mi', 1609.3440)
def test_parse_kilometers(self):
self._assert_meters('1km', 1000.0)
def test_parse_meters(self):
self._assert_meters('1m', 1.0)
def test_parse_nautical_miles(self):
self._assert_meters('1nmi', 1852.0)
def test_parse_feet(self):
self._assert_meters('1ft', 0.3048)
def test_parse_space_variations(self):
self._assert_meters('1.0 m', 1.0)
self._assert_meters('10.0m', 10.0)
self._assert_meters('1 m', 1.0)
self._assert_meters('1m', 1.0)
def test_imperial(self):
self._assert_meters('1\'', 0.3048)
self._assert_meters('1.5\'', 0.3048 * 1.5)
self._assert_meters('1\'6"', 0.3048 * 1.5)
# this is technically allowed by the regex, so it should be parsed
# properly, but doesn't make any sense.
self._assert_meters('1.5\'6"', 0.3048 * 2)
def test_numeric(self):
# just a number on its own is assumed to be in meters
self._assert_meters('1234', 1234.0)
def test_junk_units(self):
# we shouldn't parse anything that's not a unit that we can convert.
self._assert_meters('1nm', None)
self._assert_meters('1foo', None)
self._assert_meters('1 foo', None)
self._assert_meters('not 1', None)
self._assert_meters('1mm', None)
def test_none(self):
# missing tags will be passed through as None, so we have to handle
# that by returning None.
self._assert_meters(None, None)
def test_finite(self):
# should return a finite number or None
self._assert_meters('NaN', None)
self._assert_meters('Inf', None)
self._assert_meters('-Inf', None)
class ToFloatTest(unittest.TestCase):
def test_finite(self):
# to_float should return a finite number or None. technically, both
# Inf and NaN are valid values for floats, but they do strange things
# and may raise unexpected exceptions during arithmetic. in general,
# we do not expect to see valid uses of NaN or Inf in input data.
from vectordatasource.util import to_float
self.assertIsNone(to_float('NaN'))
self.assertIsNone(to_float('Inf'))
self.assertIsNone(to_float('-Inf'))
|
mapzen/vector-datasource
|
test/test_meta_functions.py
|
Python
|
mit
| 3,085
|
#!/usr/bin/env python
# encoding: utf-8
import glob
import os
import subprocess
'''
Convert 23andMe files to
PLINK format
'''
def twenty3_and_me_files():
"""Return the opensnp files that are 23 and me format"""
all_twenty3_and_me_files= glob.glob('../opensnp_datadump.current/*.23andme.txt')
fifteen_mb = 15 * 1000 * 1000
non_junk_files = [path for path in all_twenty3_and_me_files if os.path.getsize(path) > fifteen_mb]
return non_junk_files
def run_plink_format(usable_files):
"""Reformat the 23andMe files into plink binary stuff"""
for f in usable_files:
gid = f.split("/")[-1].split("_")[1].replace("file","")
call = "../plink_v190/plink --23file "+ f + " F" + gid + "ID" + gid + "I 1"
call += " --out ../plink_binaries/" + gid
print "convert gid " + gid
subprocess.call(call,shell=True)
usable_files = twenty3_and_me_files()
run_plink_format(usable_files)
|
ciyer/opensnp-fun
|
run_plink_reformat.py
|
Python
|
mit
| 886
|
import _plotly_utils.basevalidators
class NticksValidator(_plotly_utils.basevalidators.IntegerValidator):
def __init__(
self, plotly_name="nticks", parent_name="layout.ternary.baxis", **kwargs
):
super(NticksValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
min=kwargs.pop("min", 1),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/layout/ternary/baxis/_nticks.py
|
Python
|
mit
| 459
|
import unittest
import warnings
from collections import OrderedDict
import numpy as np
import numpy.testing as np_test
from pgmpy.extern.six.moves import range
from pgmpy.factors.discrete import DiscreteFactor
from pgmpy.factors.discrete import JointProbabilityDistribution as JPD
from pgmpy.factors import factor_divide
from pgmpy.factors import factor_product
from pgmpy.factors.discrete.CPD import TabularCPD
from pgmpy.independencies import Independencies
from pgmpy.models import BayesianModel
from pgmpy.models import MarkovModel
class TestFactorInit(unittest.TestCase):
def test_class_init(self):
phi = DiscreteFactor(['x1', 'x2', 'x3'], [2, 2, 2], np.ones(8))
self.assertEqual(phi.variables, ['x1', 'x2', 'x3'])
np_test.assert_array_equal(phi.cardinality, np.array([2, 2, 2]))
np_test.assert_array_equal(phi.values, np.ones(8).reshape(2, 2, 2))
def test_class_init1(self):
phi = DiscreteFactor([1, 2, 3], [2, 3, 2], np.arange(12))
self.assertEqual(phi.variables, [1, 2, 3])
np_test.assert_array_equal(phi.cardinality, np.array([2, 3, 2]))
np_test.assert_array_equal(phi.values, np.arange(12).reshape(2, 3, 2))
def test_class_init_sizeerror(self):
self.assertRaises(ValueError, DiscreteFactor, ['x1', 'x2', 'x3'], [2, 2, 2], np.ones(9))
def test_class_init_typeerror(self):
self.assertRaises(TypeError, DiscreteFactor, 'x1', [3], [1, 2, 3])
self.assertRaises(ValueError, DiscreteFactor, ['x1', 'x1', 'x3'], [2, 3, 2], range(12))
def test_init_size_var_card_not_equal(self):
self.assertRaises(ValueError, DiscreteFactor, ['x1', 'x2'], [2], np.ones(2))
class TestFactorMethods(unittest.TestCase):
def setUp(self):
self.phi = DiscreteFactor(['x1', 'x2', 'x3'], [2, 2, 2], np.random.uniform(5, 10, size=8))
self.phi1 = DiscreteFactor(['x1', 'x2', 'x3'], [2, 3, 2], range(12))
self.phi2 = DiscreteFactor([('x1', 0), ('x2', 0), ('x3', 0)], [2, 3, 2], range(12))
# This larger factor (phi3) caused a bug in reduce
card3 = [3, 3, 3, 2, 2, 2, 2, 2, 2]
self.phi3 = DiscreteFactor(['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I'],
card3, np.arange(np.prod(card3), dtype=np.float))
self.tup1 = ('x1', 'x2')
self.tup2 = ('x2', 'x3')
self.tup3 = ('x3', (1, 'x4'))
self.phi4 = DiscreteFactor([self.tup1, self.tup2, self.tup3], [2, 3, 4], np.random.uniform(3, 10, size=24))
self.phi5 = DiscreteFactor([self.tup1, self.tup2, self.tup3], [2, 3, 4], range(24))
self.card6 = [4, 2, 1, 3, 5, 6]
self.phi6 = DiscreteFactor([self.tup1, self.tup2, self.tup3, self.tup1 + self.tup2,
self.tup2 + self.tup3, self.tup3 + self.tup1], self.card6,
np.arange(np.prod(self.card6), dtype=np.float))
self.var1 = 'x1'
self.var2 = ('x2', 1)
self.var3 = frozenset(['x1', 'x2'])
self.phi7 = DiscreteFactor([self.var1, self.var2], [3, 2], [3, 2, 4, 5, 9, 8])
self.phi8 = DiscreteFactor([self.var2, self.var3], [2, 2], [2, 1, 5, 6])
self.phi9 = DiscreteFactor([self.var1, self.var3], [3, 2], [3, 2, 4, 5, 9, 8])
self.phi10 = DiscreteFactor([self.var3], [2], [3, 6])
def test_scope(self):
self.assertListEqual(self.phi.scope(), ['x1', 'x2', 'x3'])
self.assertListEqual(self.phi1.scope(), ['x1', 'x2', 'x3'])
self.assertListEqual(self.phi4.scope(), [self.tup1, self.tup2, self.tup3])
def test_assignment(self):
self.assertListEqual(self.phi.assignment([0]), [[('x1', 0), ('x2', 0), ('x3', 0)]])
self.assertListEqual(self.phi.assignment([4, 5, 6]), [[('x1', 1), ('x2', 0), ('x3', 0)],
[('x1', 1), ('x2', 0), ('x3', 1)],
[('x1', 1), ('x2', 1), ('x3', 0)]])
self.assertListEqual(self.phi1.assignment(np.array([4, 5, 6])), [[('x1', 0), ('x2', 2), ('x3', 0)],
[('x1', 0), ('x2', 2), ('x3', 1)],
[('x1', 1), ('x2', 0), ('x3', 0)]])
self.assertListEqual(self.phi4.assignment(np.array([11, 12, 23])),
[[(self.tup1, 0), (self.tup2, 2), (self.tup3, 3)],
[(self.tup1, 1), (self.tup2, 0), (self.tup3, 0)],
[(self.tup1, 1), (self.tup2, 2), (self.tup3, 3)]])
def test_assignment_indexerror(self):
self.assertRaises(IndexError, self.phi.assignment, [10])
self.assertRaises(IndexError, self.phi.assignment, [1, 3, 10, 5])
self.assertRaises(IndexError, self.phi.assignment, np.array([1, 3, 10, 5]))
self.assertRaises(IndexError, self.phi4.assignment, [2, 24])
self.assertRaises(IndexError, self.phi4.assignment, np.array([24, 2, 4, 30]))
def test_get_cardinality(self):
self.assertEqual(self.phi.get_cardinality(['x1']), {'x1': 2})
self.assertEqual(self.phi.get_cardinality(['x2']), {'x2': 2})
self.assertEqual(self.phi.get_cardinality(['x3']), {'x3': 2})
self.assertEqual(self.phi.get_cardinality(['x1', 'x2']), {'x1': 2, 'x2': 2})
self.assertEqual(self.phi.get_cardinality(['x1', 'x3']), {'x1': 2, 'x3': 2})
self.assertEqual(self.phi.get_cardinality(['x1', 'x2', 'x3']), {'x1': 2, 'x2': 2, 'x3': 2})
self.assertEqual(self.phi4.get_cardinality([self.tup1, self.tup3]),
{self.tup1: 2, self.tup3: 4})
def test_get_cardinality_scopeerror(self):
self.assertRaises(ValueError, self.phi.get_cardinality, ['x4'])
self.assertRaises(ValueError, self.phi4.get_cardinality, [('x1', 'x4')])
self.assertRaises(ValueError, self.phi4.get_cardinality, [('x3', (2, 'x4'))])
def test_get_cardinality_typeerror(self):
self.assertRaises(TypeError, self.phi.get_cardinality, 'x1')
def test_marginalize(self):
self.phi1.marginalize(['x1'])
np_test.assert_array_equal(self.phi1.values, np.array([[6, 8],
[10, 12],
[14, 16]]))
self.phi1.marginalize(['x2'])
np_test.assert_array_equal(self.phi1.values, np.array([30, 36]))
self.phi1.marginalize(['x3'])
np_test.assert_array_equal(self.phi1.values, np.array(66))
self.phi5.marginalize([self.tup1])
np_test.assert_array_equal(self.phi5.values, np.array([[12, 14, 16, 18],
[20, 22, 24, 26],
[28, 30, 32, 34]]))
self.phi5.marginalize([self.tup2])
np_test.assert_array_equal(self.phi5.values, np.array([60, 66, 72, 78]))
self.phi5.marginalize([self.tup3])
np_test.assert_array_equal(self.phi5.values, np.array([276]))
def test_marginalize_scopeerror(self):
self.assertRaises(ValueError, self.phi.marginalize, ['x4'])
self.phi.marginalize(['x1'])
self.assertRaises(ValueError, self.phi.marginalize, ['x1'])
self.assertRaises(ValueError, self.phi4.marginalize, [('x1', 'x3')])
self.phi4.marginalize([self.tup2])
self.assertRaises(ValueError, self.phi4.marginalize, [self.tup2])
def test_marginalize_typeerror(self):
self.assertRaises(TypeError, self.phi.marginalize, 'x1')
def test_marginalize_shape(self):
values = ['A', 'D', 'F', 'H']
phi3_mar = self.phi3.marginalize(values, inplace=False)
# Previously a sorting error caused these to be different
np_test.assert_array_equal(phi3_mar.values.shape, phi3_mar.cardinality)
phi6_mar = self.phi6.marginalize([self.tup1, self.tup2], inplace=False)
np_test.assert_array_equal(phi6_mar.values.shape, phi6_mar.cardinality)
self.phi6.marginalize([self.tup1, self.tup3 + self.tup1], inplace=True)
np_test.assert_array_equal(self.phi6.values.shape, self.phi6.cardinality)
def test_normalize(self):
self.phi1.normalize()
np_test.assert_almost_equal(self.phi1.values,
np.array([[[0, 0.01515152],
[0.03030303, 0.04545455],
[0.06060606, 0.07575758]],
[[0.09090909, 0.10606061],
[0.12121212, 0.13636364],
[0.15151515, 0.16666667]]]))
self.phi5.normalize()
np_test.assert_almost_equal(self.phi5.values,
[[[0., 0.00362319, 0.00724638, 0.01086957],
[0.01449275, 0.01811594, 0.02173913, 0.02536232],
[0.02898551, 0.0326087, 0.03623188, 0.03985507]],
[[0.04347826, 0.04710145, 0.05072464, 0.05434783],
[0.05797101, 0.0615942, 0.06521739, 0.06884058],
[0.07246377, 0.07608696, 0.07971014, 0.08333333]]])
def test_reduce(self):
self.phi1.reduce([('x1', 0), ('x2', 0)])
np_test.assert_array_equal(self.phi1.values, np.array([0, 1]))
self.phi5.reduce([(self.tup1, 0), (self.tup3, 1)])
np_test.assert_array_equal(self.phi5.values, np.array([1, 5, 9]))
def test_reduce1(self):
self.phi1.reduce([('x2', 0), ('x1', 0)])
np_test.assert_array_equal(self.phi1.values, np.array([0, 1]))
self.phi5.reduce([(self.tup3, 1), (self.tup1, 0)])
np_test.assert_array_equal(self.phi5.values, np.array([1, 5, 9]))
def test_reduce_shape(self):
values = [('A', 0), ('D', 0), ('F', 0), ('H', 1)]
phi3_reduced = self.phi3.reduce(values, inplace=False)
# Previously a sorting error caused these to be different
np_test.assert_array_equal(phi3_reduced.values.shape, phi3_reduced.cardinality)
values = [(self.tup1, 2), (self.tup3, 0)]
phi6_reduced = self.phi6.reduce(values, inplace=False)
np_test.assert_array_equal(phi6_reduced.values.shape, phi6_reduced.cardinality)
self.phi6.reduce(values, inplace=True)
np_test.assert_array_equal(self.phi6.values.shape, self.phi6.cardinality)
def test_complete_reduce(self):
self.phi1.reduce([('x1', 0), ('x2', 0), ('x3', 1)])
np_test.assert_array_equal(self.phi1.values, np.array([1]))
np_test.assert_array_equal(self.phi1.cardinality, np.array([]))
np_test.assert_array_equal(self.phi1.variables, OrderedDict())
self.phi5.reduce([(('x1', 'x2'), 1), (('x2', 'x3'), 0), (('x3', (1, 'x4')), 3)])
np_test.assert_array_equal(self.phi5.values, np.array([15]))
np_test.assert_array_equal(self.phi5.cardinality, np.array([]))
np_test.assert_array_equal(self.phi5.variables, OrderedDict())
def test_reduce_typeerror(self):
self.assertRaises(TypeError, self.phi1.reduce, 'x10')
self.assertRaises(TypeError, self.phi1.reduce, ['x10'])
self.assertRaises(TypeError, self.phi1.reduce, [('x1', 'x2')])
self.assertRaises(TypeError, self.phi1.reduce, [(0, 'x1')])
self.assertRaises(TypeError, self.phi1.reduce, [(0.1, 'x1')])
self.assertRaises(TypeError, self.phi1.reduce, [(0.1, 0.1)])
self.assertRaises(TypeError, self.phi1.reduce, [('x1', 0.1)])
self.assertRaises(TypeError, self.phi5.reduce, [(('x1', 'x2'), 0), (('x2', 'x3'), 0.2)])
def test_reduce_scopeerror(self):
self.assertRaises(ValueError, self.phi1.reduce, [('x4', 1)])
self.assertRaises(ValueError, self.phi5.reduce, [((('x1', 0.1), 0))])
def test_reduce_sizeerror(self):
self.assertRaises(IndexError, self.phi1.reduce, [('x3', 5)])
self.assertRaises(IndexError, self.phi5.reduce, [(('x2', 'x3'), 3)])
def test_identity_factor(self):
identity_factor = self.phi.identity_factor()
self.assertEqual(list(identity_factor.variables), ['x1', 'x2', 'x3'])
np_test.assert_array_equal(identity_factor.cardinality, [2, 2, 2])
np_test.assert_array_equal(identity_factor.values, np.ones(8).reshape(2, 2, 2))
identity_factor1 = self.phi5.identity_factor()
self.assertEqual(list(identity_factor1.variables), [self.tup1, self.tup2, self.tup3])
np_test.assert_array_equal(identity_factor1.cardinality, [2, 3, 4])
np_test.assert_array_equal(identity_factor1.values, np.ones(24).reshape(2, 3, 4))
def test_factor_product(self):
phi = DiscreteFactor(['x1', 'x2'], [2, 2], range(4))
phi1 = DiscreteFactor(['x3', 'x4'], [2, 2], range(4))
prod = factor_product(phi, phi1)
expected_factor = DiscreteFactor(['x1', 'x2', 'x3', 'x4'], [2, 2, 2, 2],
[0, 0, 0, 0, 0, 1, 2, 3, 0, 2, 4, 6, 0, 3, 6, 9])
self.assertEqual(prod, expected_factor)
self.assertEqual(sorted(prod.variables), ['x1', 'x2', 'x3', 'x4'])
phi = DiscreteFactor(['x1', 'x2'], [3, 2], range(6))
phi1 = DiscreteFactor(['x2', 'x3'], [2, 2], range(4))
prod = factor_product(phi, phi1)
expected_factor = DiscreteFactor(['x1', 'x2', 'x3'], [3, 2, 2],
[0, 0, 2, 3, 0, 2, 6, 9, 0, 4, 10, 15])
self.assertEqual(prod, expected_factor)
self.assertEqual(prod.variables, expected_factor.variables)
prod = factor_product(self.phi7, self.phi8)
expected_factor = DiscreteFactor([self.var1, self.var2, self.var3], [3, 2, 2],
[6, 3, 10, 12, 8, 4, 25, 30, 18, 9, 40, 48])
self.assertEqual(prod, expected_factor)
self.assertEqual(prod.variables, expected_factor.variables)
def test_product(self):
phi = DiscreteFactor(['x1', 'x2'], [2, 2], range(4))
phi1 = DiscreteFactor(['x3', 'x4'], [2, 2], range(4))
prod = phi.product(phi1, inplace=False)
expected_factor = DiscreteFactor(['x1', 'x2', 'x3', 'x4'], [2, 2, 2, 2],
[0, 0, 0, 0, 0, 1, 2, 3, 0, 2, 4, 6, 0, 3, 6, 9])
self.assertEqual(prod, expected_factor)
self.assertEqual(sorted(prod.variables), ['x1', 'x2', 'x3', 'x4'])
phi = DiscreteFactor(['x1', 'x2'], [3, 2], range(6))
phi1 = DiscreteFactor(['x2', 'x3'], [2, 2], range(4))
prod = phi.product(phi1, inplace=False)
expected_factor = DiscreteFactor(['x1', 'x2', 'x3'], [3, 2, 2],
[0, 0, 2, 3, 0, 2, 6, 9, 0, 4, 10, 15])
self.assertEqual(prod, expected_factor)
self.assertEqual(sorted(prod.variables), ['x1', 'x2', 'x3'])
phi7_copy = self.phi7
phi7_copy.product(self.phi8, inplace=True)
expected_factor = DiscreteFactor([self.var1, self.var2, self.var3], [3, 2, 2],
[6, 3, 10, 12, 8, 4, 25, 30, 18, 9, 40, 48])
self.assertEqual(expected_factor, phi7_copy)
self.assertEqual(phi7_copy.variables, [self.var1, self.var2, self.var3])
def test_factor_product_non_factor_arg(self):
self.assertRaises(TypeError, factor_product, 1, 2)
def test_factor_mul(self):
phi = DiscreteFactor(['x1', 'x2'], [2, 2], range(4))
phi1 = DiscreteFactor(['x3', 'x4'], [2, 2], range(4))
prod = phi * phi1
sorted_vars = ['x1', 'x2', 'x3', 'x4']
for axis in range(prod.values.ndim):
exchange_index = prod.variables.index(sorted_vars[axis])
prod.variables[axis], prod.variables[exchange_index] = prod.variables[exchange_index], prod.variables[axis]
prod.values = prod.values.swapaxes(axis, exchange_index)
np_test.assert_almost_equal(prod.values.ravel(),
np.array([0, 0, 0, 0, 0, 1, 2, 3,
0, 2, 4, 6, 0, 3, 6, 9]))
self.assertEqual(prod.variables, ['x1', 'x2', 'x3', 'x4'])
def test_factor_divide(self):
phi1 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 2, 4])
phi2 = DiscreteFactor(['x1'], [2], [1, 2])
expected_factor = phi1.divide(phi2, inplace=False)
phi3 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 1, 2])
self.assertEqual(phi3, expected_factor)
self.phi9.divide(self.phi10, inplace=True)
np_test.assert_array_almost_equal(self.phi9.values, np.array([1.000000, 0.333333, 1.333333,
0.833333, 3.000000, 1.333333]).reshape(3, 2))
self.assertEqual(self.phi9.variables, [self.var1, self.var3])
def test_factor_divide_truediv(self):
phi1 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 2, 4])
phi2 = DiscreteFactor(['x1'], [2], [1, 2])
div = phi1 / phi2
phi3 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 1, 2])
self.assertEqual(phi3, div)
self.phi9 = self.phi9 / self.phi10
np_test.assert_array_almost_equal(self.phi9.values, np.array([1.000000, 0.333333, 1.333333,
0.833333, 3.000000, 1.333333]).reshape(3, 2))
self.assertEqual(self.phi9.variables, [self.var1, self.var3])
def test_factor_divide_invalid(self):
phi1 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 3, 4])
phi2 = DiscreteFactor(['x1'], [2], [0, 2])
div = phi1.divide(phi2, inplace=False)
np_test.assert_array_equal(div.values.ravel(), np.array([np.inf, np.inf, 1.5, 2]))
def test_factor_divide_no_common_scope(self):
phi1 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 3, 4])
phi2 = DiscreteFactor(['x3'], [2], [0, 2])
self.assertRaises(ValueError, factor_divide, phi1, phi2)
phi2 = DiscreteFactor([self.var3], [2], [2, 1])
self.assertRaises(ValueError, factor_divide, self.phi7, phi2)
def test_factor_divide_non_factor_arg(self):
self.assertRaises(TypeError, factor_divide, 1, 1)
def test_eq(self):
self.assertFalse(self.phi == self.phi1)
self.assertTrue(self.phi == self.phi)
self.assertTrue(self.phi1 == self.phi1)
self.assertTrue(self.phi5 == self.phi5)
self.assertFalse(self.phi5 == self.phi6)
self.assertTrue(self.phi6 == self.phi6)
def test_eq1(self):
phi1 = DiscreteFactor(['x1', 'x2', 'x3'], [2, 4, 3], range(24))
phi2 = DiscreteFactor(['x2', 'x1', 'x3'], [4, 2, 3],
[0, 1, 2, 12, 13, 14, 3, 4, 5, 15, 16, 17, 6, 7,
8, 18, 19, 20, 9, 10, 11, 21, 22, 23])
self.assertTrue(phi1 == phi2)
self.assertEqual(phi2.variables, ['x2', 'x1', 'x3'])
phi3 = DiscreteFactor([self.tup1, self.tup2, self.tup3], [2, 4, 3], range(24))
phi4 = DiscreteFactor([self.tup2, self.tup1, self.tup3], [4, 2, 3],
[0, 1, 2, 12, 13, 14, 3, 4, 5, 15, 16, 17,
6, 7, 8, 18, 19, 20, 9, 10, 11, 21, 22, 23])
self.assertTrue(phi3 == phi4)
def test_hash(self):
phi1 = DiscreteFactor(['x1', 'x2'], [2, 2], [1, 2, 3, 4])
phi2 = DiscreteFactor(['x2', 'x1'], [2, 2], [1, 3, 2, 4])
self.assertEqual(hash(phi1), hash(phi2))
phi1 = DiscreteFactor(['x1', 'x2', 'x3'], [2, 2, 2], range(8))
phi2 = DiscreteFactor(['x3', 'x1', 'x2'], [2, 2, 2], [0, 2, 4, 6, 1, 3, 5, 7])
self.assertEqual(hash(phi1), hash(phi2))
var1 = TestHash(1, 2)
phi3 = DiscreteFactor([var1, self.var2, self.var3], [2, 4, 3], range(24))
phi4 = DiscreteFactor([self.var2, var1, self.var3], [4, 2, 3],
[0, 1, 2, 12, 13, 14, 3, 4, 5, 15, 16, 17,
6, 7, 8, 18, 19, 20, 9, 10, 11, 21, 22, 23])
self.assertEqual(hash(phi3), hash(phi4))
var1 = TestHash(2, 3)
var2 = TestHash('x2', 1)
phi3 = DiscreteFactor([var1, var2, self.var3], [2, 2, 2], range(8))
phi4 = DiscreteFactor([self.var3, var1, var2], [2, 2, 2], [0, 2, 4, 6, 1, 3, 5, 7])
self.assertEqual(hash(phi3), hash(phi4))
def test_maximize_single(self):
self.phi1.maximize(['x1'])
self.assertEqual(self.phi1, DiscreteFactor(['x2', 'x3'], [3, 2], [6, 7, 8, 9, 10, 11]))
self.phi1.maximize(['x2'])
self.assertEqual(self.phi1, DiscreteFactor(['x3'], [2], [10, 11]))
self.phi2 = DiscreteFactor(['x1', 'x2', 'x3'], [3, 2, 2], [0.25, 0.35, 0.08, 0.16, 0.05, 0.07,
0.00, 0.00, 0.15, 0.21, 0.08, 0.18])
self.phi2.maximize(['x2'])
self.assertEqual(self.phi2, DiscreteFactor(['x1', 'x3'], [3, 2], [0.25, 0.35, 0.05,
0.07, 0.15, 0.21]))
self.phi5.maximize([('x1', 'x2')])
self.assertEqual(self.phi5, DiscreteFactor([('x2', 'x3'), ('x3', (1, 'x4'))], [3, 4],
[12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]))
self.phi5.maximize([('x2', 'x3')])
self.assertEqual(self.phi5, DiscreteFactor([('x3', (1, 'x4'))], [4], [20, 21, 22, 23]))
def test_maximize_list(self):
self.phi1.maximize(['x1', 'x2'])
self.assertEqual(self.phi1, DiscreteFactor(['x3'], [2], [10, 11]))
self.phi5.maximize([('x1', 'x2'), ('x2', 'x3')])
self.assertEqual(self.phi5, DiscreteFactor([('x3', (1, 'x4'))], [4], [20, 21, 22, 23]))
def test_maximize_shape(self):
values = ['A', 'D', 'F', 'H']
phi3_max = self.phi3.maximize(values, inplace=False)
# Previously a sorting error caused these to be different
np_test.assert_array_equal(phi3_max.values.shape, phi3_max.cardinality)
phi = DiscreteFactor([self.var1, self.var2, self.var3], [3, 2, 2], [3, 2, 4, 5, 9, 8, 3, 2, 4, 5, 9, 8])
phi_max = phi.marginalize([self.var1, self.var2], inplace=False)
np_test.assert_array_equal(phi_max.values.shape, phi_max.cardinality)
def test_maximize_scopeerror(self):
self.assertRaises(ValueError, self.phi.maximize, ['x10'])
def test_maximize_typeerror(self):
self.assertRaises(TypeError, self.phi.maximize, 'x1')
def tearDown(self):
del self.phi
del self.phi1
del self.phi2
del self.phi3
del self.phi4
del self.phi5
del self.phi6
del self.phi7
del self.phi8
del self.phi9
del self.phi10
class TestHash:
# Used to check the hash function of DiscreteFactor class.
def __init__(self, x, y):
self.x = x
self.y = y
def __hash__(self):
return hash(str(self.x) + str(self.y))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.x == other.x and self.y == other.y
class TestTabularCPDInit(unittest.TestCase):
def test_cpd_init(self):
cpd = TabularCPD('grade', 3, [[0.1, 0.1, 0.1]])
self.assertEqual(cpd.variable, 'grade')
self.assertEqual(cpd.variable_card, 3)
self.assertEqual(list(cpd.variables), ['grade'])
np_test.assert_array_equal(cpd.cardinality, np.array([3]))
np_test.assert_array_almost_equal(cpd.values, np.array([0.1, 0.1, 0.1]))
values = [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]]
evidence = ['intel', 'diff']
evidence_card = [3, 2]
valid_value_inputs = [values, np.asarray(values)]
valid_evidence_inputs = [evidence, set(evidence), np.asarray(evidence)]
valid_evidence_card_inputs = [evidence_card, np.asarray(evidence_card)]
for value in valid_value_inputs:
for evidence in valid_evidence_inputs:
for evidence_card in valid_evidence_card_inputs:
cpd = TabularCPD('grade', 3, values, evidence=['intel', 'diff'], evidence_card=[3, 2])
self.assertEqual(cpd.variable, 'grade')
self.assertEqual(cpd.variable_card, 3)
np_test.assert_array_equal(cpd.cardinality, np.array([3, 3, 2]))
self.assertListEqual(list(cpd.variables), ['grade', 'intel', 'diff'])
np_test.assert_array_equal(cpd.values, np.array([0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.1, 0.1, 0.1, 0.1, 0.1, 0.1,
0.8, 0.8, 0.8, 0.8, 0.8, 0.8]).reshape(3, 3, 2))
cpd = TabularCPD('grade', 3, [[0.1, 0.1],
[0.1, 0.1],
[0.8, 0.8]],
evidence=['evi1'], evidence_card=[2.0])
self.assertEqual(cpd.variable, 'grade')
self.assertEqual(cpd.variable_card, 3)
np_test.assert_array_equal(cpd.cardinality, np.array([3, 2]))
self.assertListEqual(list(cpd.variables), ['grade', 'evi1'])
np_test.assert_array_equal(cpd.values, np.array([0.1, 0.1,
0.1, 0.1,
0.8, 0.8]).reshape(3, 2))
def test_cpd_init_event_card_not_int(self):
self.assertRaises(TypeError, TabularCPD, 'event', '2', [[0.1, 0.9]])
def test_cpd_init_cardinality_not_specified(self):
self.assertRaises(ValueError, TabularCPD, 'event', 3, [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
['evi1', 'evi2'], [5])
self.assertRaises(ValueError, TabularCPD, 'event', 3, [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
['evi1', 'evi2'], [5.0])
self.assertRaises(ValueError, TabularCPD, 'event', 3, [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
['evi1'], [5, 6])
self.assertRaises(TypeError, TabularCPD, 'event', 3, [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
'evi1', [5, 6])
def test_cpd_init_value_not_2d(self):
self.assertRaises(TypeError, TabularCPD, 'event', 3, [[[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]]],
['evi1', 'evi2'], [5, 6])
class TestTabularCPDMethods(unittest.TestCase):
def setUp(self):
self.cpd = TabularCPD('grade', 3, [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
evidence=['intel', 'diff'], evidence_card=[3, 2])
self.cpd2 = TabularCPD('J', 2, [[0.9, 0.3, 0.9, 0.3, 0.8, 0.8, 0.4, 0.4],
[0.1, 0.7, 0.1, 0.7, 0.2, 0.2, 0.6, 0.6]],
evidence=['A', 'B', 'C'], evidence_card=[2, 2, 2])
def test_marginalize_1(self):
self.cpd.marginalize(['diff'])
self.assertEqual(self.cpd.variable, 'grade')
self.assertEqual(self.cpd.variable_card, 3)
self.assertListEqual(list(self.cpd.variables), ['grade', 'intel'])
np_test.assert_array_equal(self.cpd.cardinality, np.array([3, 3]))
np_test.assert_array_equal(self.cpd.values.ravel(), np.array([0.1, 0.1, 0.1,
0.1, 0.1, 0.1,
0.8, 0.8, 0.8]))
def test_marginalize_2(self):
self.assertRaises(ValueError, self.cpd.marginalize, ['grade'])
def test_marginalize_3(self):
copy_cpd = self.cpd.copy()
copy_cpd.marginalize(['intel', 'diff'])
self.cpd.marginalize(['intel'])
self.cpd.marginalize(['diff'])
np_test.assert_array_almost_equal(self.cpd.values, copy_cpd.values)
def test_normalize(self):
cpd_un_normalized = TabularCPD('grade', 2, [[0.7, 0.2, 0.6, 0.2], [0.4, 0.4, 0.4, 0.8]],
['intel', 'diff'], [2, 2])
cpd_un_normalized.normalize()
np_test.assert_array_almost_equal(cpd_un_normalized.values, np.array([[[0.63636364, 0.33333333],
[0.6, 0.2]],
[[0.36363636, 0.66666667],
[0.4, 0.8]]]))
def test_normalize_not_in_place(self):
cpd_un_normalized = TabularCPD('grade', 2, [[0.7, 0.2, 0.6, 0.2], [0.4, 0.4, 0.4, 0.8]],
['intel', 'diff'], [2, 2])
np_test.assert_array_almost_equal(cpd_un_normalized.normalize(inplace=False).values,
np.array([[[0.63636364, 0.33333333],
[0.6, 0.2]],
[[0.36363636, 0.66666667],
[0.4, 0.8]]]))
def test_normalize_original_safe(self):
cpd_un_normalized = TabularCPD('grade', 2, [[0.7, 0.2, 0.6, 0.2], [0.4, 0.4, 0.4, 0.8]],
['intel', 'diff'], [2, 2])
cpd_un_normalized.normalize(inplace=False)
np_test.assert_array_almost_equal(cpd_un_normalized.values, np.array([[[0.7, 0.2], [0.6, 0.2]],
[[0.4, 0.4], [0.4, 0.8]]]))
def test__repr__(self):
grade_cpd = TabularCPD('grade', 3, [[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
evidence=['intel', 'diff'], evidence_card=[3, 2])
intel_cpd = TabularCPD('intel', 3, [[0.5], [0.3], [0.2]])
diff_cpd = TabularCPD('grade', 3, [[0.1, 0.1], [0.1, 0.1], [0.8, 0.8]], evidence=['diff'], evidence_card=[2])
self.assertEqual(repr(grade_cpd), '<TabularCPD representing P(grade:3 | intel:3, diff:2) at {address}>'
.format(address=hex(id(grade_cpd))))
self.assertEqual(repr(intel_cpd), '<TabularCPD representing P(intel:3) at {address}>'
.format(address=hex(id(intel_cpd))))
self.assertEqual(repr(diff_cpd), '<TabularCPD representing P(grade:3 | diff:2) at {address}>'
.format(address=hex(id(diff_cpd))))
def test_copy(self):
copy_cpd = self.cpd.copy()
np_test.assert_array_equal(self.cpd.get_values(), copy_cpd.get_values())
def test_copy_original_safe(self):
copy_cpd = self.cpd.copy()
copy_cpd.reorder_parents(['diff', 'intel'])
np_test.assert_array_equal(self.cpd.get_values(),
np.array([[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]]))
def test_reduce_1(self):
self.cpd.reduce([('diff', 0)])
np_test.assert_array_equal(self.cpd.get_values(), np.array([[0.1, 0.1, 0.1],
[0.1, 0.1, 0.1],
[0.8, 0.8, 0.8]]))
def test_reduce_2(self):
self.cpd.reduce([('intel', 0)])
np_test.assert_array_equal(self.cpd.get_values(), np.array([[0.1, 0.1],
[0.1, 0.1],
[0.8, 0.8]]))
def test_reduce_3(self):
self.cpd.reduce([('intel', 0), ('diff', 0)])
np_test.assert_array_equal(self.cpd.get_values(), np.array([[0.1],
[0.1],
[0.8]]))
def test_reduce_4(self):
self.assertRaises(ValueError, self.cpd.reduce, [('grade', 0)])
def test_reduce_5(self):
copy_cpd = self.cpd.copy()
copy_cpd.reduce([('intel', 2), ('diff', 1)])
self.cpd.reduce([('intel', 2)])
self.cpd.reduce([('diff', 1)])
np_test.assert_array_almost_equal(self.cpd.values, copy_cpd.values)
def test_get_values(self):
np_test.assert_array_equal(self.cpd.get_values(),
np.array([[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]]))
def test_reorder_parents_inplace(self):
new_vals = self.cpd2.reorder_parents(['B', 'A', 'C'])
np_test.assert_array_equal(new_vals, np.array([[0.9, 0.3, 0.8, 0.8, 0.9, 0.3, 0.4, 0.4],
[0.1, 0.7, 0.2, 0.2, 0.1, 0.7, 0.6, 0.6]]))
np_test.assert_array_equal(self.cpd2.get_values(),
np.array([[0.9, 0.3, 0.8, 0.8, 0.9, 0.3, 0.4, 0.4],
[0.1, 0.7, 0.2, 0.2, 0.1, 0.7, 0.6, 0.6]]))
def test_reorder_parents(self):
new_vals = self.cpd2.reorder_parents(['B', 'A', 'C'])
np_test.assert_array_equal(new_vals, np.array([[0.9, 0.3, 0.8, 0.8, 0.9, 0.3, 0.4, 0.4],
[0.1, 0.7, 0.2, 0.2, 0.1, 0.7, 0.6, 0.6]]))
def test_reorder_parents_no_effect(self):
self.cpd2.reorder_parents(['C', 'A', 'B'], inplace=False)
np_test.assert_array_equal(self.cpd2.get_values(),
np.array([[0.9, 0.3, 0.9, 0.3, 0.8, 0.8, 0.4, 0.4],
[0.1, 0.7, 0.1, 0.7, 0.2, 0.2, 0.6, 0.6]]))
def test_reorder_parents_warning(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
self.cpd2.reorder_parents(['A', 'B', 'C'], inplace=False)
assert("Same ordering provided as current" in str(w[-1].message))
np_test.assert_array_equal(self.cpd2.get_values(),
np.array([[0.9, 0.3, 0.9, 0.3, 0.8, 0.8, 0.4, 0.4],
[0.1, 0.7, 0.1, 0.7, 0.2, 0.2, 0.6, 0.6]]))
def tearDown(self):
del self.cpd
class TestJointProbabilityDistributionInit(unittest.TestCase):
def test_jpd_init(self):
jpd = JPD(['x1', 'x2', 'x3'], [2, 3, 2], np.ones(12) / 12)
np_test.assert_array_equal(jpd.cardinality, np.array([2, 3, 2]))
np_test.assert_array_equal(jpd.values, np.ones(12).reshape(2, 3, 2) / 12)
self.assertEqual(jpd.get_cardinality(['x1', 'x2', 'x3']), {'x1': 2, 'x2': 3, 'x3': 2})
def test_jpd_init_exception(self):
self.assertRaises(ValueError, JPD, ['x1', 'x2', 'x3'], [2, 2, 2], np.ones(8))
class TestJointProbabilityDistributionMethods(unittest.TestCase):
def setUp(self):
self.jpd = JPD(['x1', 'x2', 'x3'], [2, 3, 2], values=np.ones(12) / 12)
self.jpd1 = JPD(['x1', 'x2', 'x3'], [2, 3, 2], values=np.ones(12) / 12)
self.jpd2 = JPD(['x1', 'x2', 'x3'], [2, 2, 3],
[0.126, 0.168, 0.126, 0.009, 0.045, 0.126, 0.252, 0.0224, 0.0056, 0.06, 0.036, 0.024])
self.jpd3 = JPD(['x1', 'x2', 'x3'], [2, 2, 2],
[5.0e-04, 5.225e-04, 0.00, 8.9775e-03, 9.9e-03, 5.39055e-02, 0.00, 9.261945e-01])
def test_jpd_marginal_distribution_list(self):
self.jpd.marginal_distribution(['x1', 'x2'])
np_test.assert_array_almost_equal(self.jpd.values,
np.array([[0.16666667, 0.16666667, 0.16666667],
[0.16666667, 0.16666667, 0.16666667]]))
np_test.assert_array_equal(self.jpd.cardinality, np.array([2, 3]))
dic = {'x1': 2, 'x2': 3}
self.assertEqual(self.jpd.get_cardinality(['x1', 'x2']), dic)
self.assertEqual(self.jpd.scope(), ['x1', 'x2'])
np_test.assert_almost_equal(np.sum(self.jpd.values), 1)
new_jpd = self.jpd1.marginal_distribution(['x1', 'x2'], inplace=False)
self.assertTrue(self.jpd1 != self.jpd)
self.assertTrue(new_jpd == self.jpd)
def test_marginal_distribution_str(self):
self.jpd.marginal_distribution('x1')
np_test.assert_array_almost_equal(self.jpd.values, np.array([0.5, 0.5]))
np_test.assert_array_equal(self.jpd.cardinality, np.array([2]))
self.assertEqual(self.jpd.scope(), ['x1'])
np_test.assert_almost_equal(np.sum(self.jpd.values), 1)
new_jpd = self.jpd1.marginal_distribution('x1', inplace=False)
self.assertTrue(self.jpd1 != self.jpd)
self.assertTrue(self.jpd == new_jpd)
def test_conditional_distribution_list(self):
self.jpd = self.jpd1.copy()
self.jpd.conditional_distribution([('x1', 1), ('x2', 0)])
np_test.assert_array_almost_equal(self.jpd.values, np.array([0.5, 0.5]))
np_test.assert_array_equal(self.jpd.cardinality, np.array([2]))
self.assertEqual(self.jpd.scope(), ['x3'])
np_test.assert_almost_equal(np.sum(self.jpd.values), 1)
new_jpd = self.jpd1.conditional_distribution([('x1', 1), ('x2', 0)], inplace=False)
self.assertTrue(self.jpd1 != self.jpd)
self.assertTrue(self.jpd == new_jpd)
def test_check_independence(self):
self.assertTrue(self.jpd2.check_independence(['x1'], ['x2']))
self.assertRaises(TypeError, self.jpd2.check_independence, 'x1', ['x2'])
self.assertRaises(TypeError, self.jpd2.check_independence, ['x1'], 'x2')
self.assertRaises(TypeError, self.jpd2.check_independence, ['x1'], ['x2'], 'x3')
self.assertFalse(self.jpd2.check_independence(['x1'], ['x2'], ('x3',), condition_random_variable=True))
self.assertFalse(self.jpd2.check_independence(['x1'], ['x2'], [('x3', 0)]))
self.assertTrue(self.jpd1.check_independence(['x1'], ['x2'], ('x3',), condition_random_variable=True))
self.assertTrue(self.jpd1.check_independence(['x1'], ['x2'], [('x3', 1)]))
self.assertTrue(self.jpd3.check_independence(['x1'], ['x2'], ('x3',), condition_random_variable=True))
def test_get_independencies(self):
independencies = Independencies(['x1', 'x2'], ['x2', 'x3'], ['x3', 'x1'])
independencies1 = Independencies(['x1', 'x2'])
self.assertEqual(self.jpd1.get_independencies(), independencies)
self.assertEqual(self.jpd2.get_independencies(), independencies1)
self.assertEqual(self.jpd1.get_independencies([('x3', 0)]), independencies1)
self.assertEqual(self.jpd2.get_independencies([('x3', 0)]), Independencies())
def test_minimal_imap(self):
bm = self.jpd1.minimal_imap(order=['x1', 'x2', 'x3'])
self.assertEqual(sorted(bm.edges()), sorted([('x1', 'x3'), ('x2', 'x3')]))
bm = self.jpd1.minimal_imap(order=['x2', 'x3', 'x1'])
self.assertEqual(sorted(bm.edges()), sorted([('x2', 'x1'), ('x3', 'x1')]))
bm = self.jpd2.minimal_imap(order=['x1', 'x2', 'x3'])
self.assertEqual(list(bm.edges()), [])
bm = self.jpd2.minimal_imap(order=['x1', 'x2'])
self.assertEqual(list(bm.edges()), [])
def test_repr(self):
self.assertEqual(repr(self.jpd1), '<Joint Distribution representing P(x1:2, x2:3, x3:2) at {address}>'.format(
address=hex(id(self.jpd1))))
def test_is_imap(self):
G1 = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
diff_cpd = TabularCPD('diff', 2, [[0.2], [0.8]])
intel_cpd = TabularCPD('intel', 3, [[0.5], [0.3], [0.2]])
grade_cpd = TabularCPD('grade', 3,
[[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1, 0.1, 0.1],
[0.8, 0.8, 0.8, 0.8, 0.8, 0.8]],
evidence=['diff', 'intel'],
evidence_card=[2, 3])
G1.add_cpds(diff_cpd, intel_cpd, grade_cpd)
val = [0.01, 0.01, 0.08, 0.006, 0.006, 0.048, 0.004, 0.004, 0.032,
0.04, 0.04, 0.32, 0.024, 0.024, 0.192, 0.016, 0.016, 0.128]
jpd = JPD(['diff', 'intel', 'grade'], [2, 3, 3], val)
self.assertTrue(jpd.is_imap(G1))
self.assertRaises(TypeError, jpd.is_imap, MarkovModel())
def tearDown(self):
del self.jpd
del self.jpd1
del self.jpd2
del self.jpd3
#
# class TestTreeCPDInit(unittest.TestCase):
# def test_init_single_variable_nodes(self):
# tree = TreeCPD([('B', DiscreteFactor(['A'], [2], [0.8, 0.2]), 0),
# ('B', 'C', 1),
# ('C', DiscreteFactor(['A'], [2], [0.1, 0.9]), 0),
# ('C', 'D', 1),
# ('D', DiscreteFactor(['A'], [2], [0.9, 0.1]), 0),
# ('D', DiscreteFactor(['A'], [2], [0.4, 0.6]), 1)])
#
# self.assertTrue('B' in tree.nodes())
# self.assertTrue('C' in tree.nodes())
# self.assertTrue('D' in tree.nodes())
# self.assertTrue(DiscreteFactor(['A'], [2], [0.8, 0.2]) in tree.nodes())
# self.assertTrue(DiscreteFactor(['A'], [2], [0.1, 0.9]) in tree.nodes())
# self.assertTrue(DiscreteFactor(['A'], [2], [0.9, 0.1]) in tree.nodes())
# self.assertTrue(DiscreteFactor(['A'], [2], [0.4, 0.6]) in tree.nodes())
#
# self.assertTrue(('B', DiscreteFactor(['A'], [2], [0.8, 0.2]) in tree.edges()))
# self.assertTrue(('B', DiscreteFactor(['A'], [2], [0.1, 0.9]) in tree.edges()))
# self.assertTrue(('B', DiscreteFactor(['A'], [2], [0.9, 0.1]) in tree.edges()))
# self.assertTrue(('B', DiscreteFactor(['A'], [2], [0.4, 0.6]) in tree.edges()))
# self.assertTrue(('C', 'D') in tree.edges())
# self.assertTrue(('B', 'C') in tree.edges())
#
# self.assertEqual(tree['B'][DiscreteFactor(['A'], [2], [0.8, 0.2])]['label'], 0)
# self.assertEqual(tree['B']['C']['label'], 1)
# self.assertEqual(tree['C'][DiscreteFactor(['A'], [2], [0.1, 0.9])]['label'], 0)
# self.assertEqual(tree['C']['D']['label'], 1)
# self.assertEqual(tree['D'][DiscreteFactor(['A'], [2], [0.9, 0.1])]['label'], 0)
# self.assertEqual(tree['D'][DiscreteFactor(['A'], [2], [0.4, 0.6])]['label'], 1)
#
# self.assertRaises(ValueError, tree.add_edges_from, [('F', 'G')])
#
# def test_init_self_loop(self):
# self.assertRaises(ValueError, TreeCPD, [('B', 'B', 0)])
#
# def test_init_cycle(self):
# self.assertRaises(ValueError, TreeCPD, [('A', 'B', 0), ('B', 'C', 1), ('C', 'A', 0)])
#
# def test_init_multi_variable_nodes(self):
# tree = TreeCPD([(('B', 'C'), DiscreteFactor(['A'], [2], [0.8, 0.2]), (0, 0)),
# (('B', 'C'), 'D', (0, 1)),
# (('B', 'C'), DiscreteFactor(['A'], [2], [0.1, 0.9]), (1, 0)),
# (('B', 'C'), 'E', (1, 1)),
# ('D', DiscreteFactor(['A'], [2], [0.9, 0.1]), 0),
# ('D', DiscreteFactor(['A'], [2], [0.4, 0.6]), 1),
# ('E', DiscreteFactor(['A'], [2], [0.3, 0.7]), 0),
# ('E', DiscreteFactor(['A'], [2], [0.8, 0.2]), 1)
# ])
#
# self.assertTrue(('B', 'C') in tree.nodes())
# self.assertTrue('D' in tree.nodes())
# self.assertTrue('E' in tree.nodes())
# self.assertTrue(DiscreteFactor(['A'], [2], [0.8, 0.2]) in tree.nodes())
# self.assertTrue(DiscreteFactor(['A'], [2], [0.9, 0.1]) in tree.nodes())
#
# self.assertTrue((('B', 'C'), DiscreteFactor(['A'], [2], [0.8, 0.2]) in tree.edges()))
# self.assertTrue((('B', 'C'), 'E') in tree.edges())
# self.assertTrue(('D', DiscreteFactor(['A'], [2], [0.4, 0.6])) in tree.edges())
# self.assertTrue(('E', DiscreteFactor(['A'], [2], [0.8, 0.2])) in tree.edges())
#
# self.assertEqual(tree[('B', 'C')][DiscreteFactor(['A'], [2], [0.8, 0.2])]['label'], (0, 0))
# self.assertEqual(tree[('B', 'C')]['D']['label'], (0, 1))
# self.assertEqual(tree['D'][DiscreteFactor(['A'], [2], [0.9, 0.1])]['label'], 0)
# self.assertEqual(tree['E'][DiscreteFactor(['A'], [2], [0.3, 0.7])]['label'], 0)
#
#
# class TestTreeCPD(unittest.TestCase):
# def setUp(self):
# self.tree1 = TreeCPD([('B', DiscreteFactor(['A'], [2], [0.8, 0.2]), '0'),
# ('B', 'C', '1'),
# ('C', DiscreteFactor(['A'], [2], [0.1, 0.9]), '0'),
# ('C', 'D', '1'),
# ('D', DiscreteFactor(['A'], [2], [0.9, 0.1]), '0'),
# ('D', DiscreteFactor(['A'], [2], [0.4, 0.6]), '1')])
#
# self.tree2 = TreeCPD([('C','A','0'),('C','B','1'),
# ('A', DiscreteFactor(['J'], [2], [0.9, 0.1]), '0'),
# ('A', DiscreteFactor(['J'], [2], [0.3, 0.7]), '1'),
# ('B', DiscreteFactor(['J'], [2], [0.8, 0.2]), '0'),
# ('B', DiscreteFactor(['J'], [2], [0.4, 0.6]), '1')])
#
# def test_add_edge(self):
# self.tree1.add_edge('yolo', 'yo', 0)
# self.assertTrue('yolo' in self.tree1.nodes() and 'yo' in self.tree1.nodes())
# self.assertTrue(('yolo', 'yo') in self.tree1.edges())
# self.assertEqual(self.tree1['yolo']['yo']['label'], 0)
#
# def test_add_edges_from(self):
# self.tree1.add_edges_from([('yolo', 'yo', 0), ('hello', 'world', 1)])
# self.assertTrue('yolo' in self.tree1.nodes() and 'yo' in self.tree1.nodes() and
# 'hello' in self.tree1.nodes() and 'world' in self.tree1.nodes())
# self.assertTrue(('yolo', 'yo') in self.tree1.edges())
# self.assertTrue(('hello', 'world') in self.tree1.edges())
# self.assertEqual(self.tree1['yolo']['yo']['label'], 0)
# self.assertEqual(self.tree1['hello']['world']['label'], 1)
#
# def test_to_tabular_cpd(self):
# tabular_cpd = self.tree1.to_tabular_cpd()
# self.assertEqual(tabular_cpd.evidence, ['D', 'C', 'B'])
# self.assertEqual(tabular_cpd.evidence_card, [2, 2, 2])
# self.assertEqual(list(tabular_cpd.variables), ['A', 'B', 'C', 'D'])
# np_test.assert_array_equal(tabular_cpd.values,
# np.array([0.8, 0.8, 0.8, 0.8, 0.1, 0.1, 0.9, 0.4,
# 0.2, 0.2, 0.2, 0.2, 0.9, 0.9, 0.1, 0.6]))
#
# tabular_cpd = self.tree2.to_tabular_cpd()
# self.assertEqual(tabular_cpd.evidence, ['A', 'B', 'C'])
# self.assertEqual(tabular_cpd.evidence_card, [2, 2, 2])
# self.assertEqual(list(tabular_cpd.variables), ['J', 'C', 'B', 'A'])
# np_test.assert_array_equal(tabular_cpd.values,
# np.array([ 0.9, 0.3, 0.9, 0.3, 0.8, 0.8, 0.4, 0.4,
# 0.1, 0.7, 0.1, 0.7, 0.2, 0.2, 0.6, 0.6]))
#
# @unittest.skip('Not implemented yet')
# def test_to_tabular_cpd_parent_order(self):
# tabular_cpd = self.tree1.to_tabular_cpd('A', parents_order=['D', 'C', 'B'])
# self.assertEqual(tabular_cpd.evidence, ['D', 'C', 'B'])
# self.assertEqual(tabular_cpd.evidence_card, [2, 2, 2])
# self.assertEqual(list(tabular_cpd.variables), ['A', 'D', 'C', 'B'])
# np_test.assert_array_equal(tabular_cpd.values,
# np.array([0.8, 0.1, 0.8, 0.9, 0.8, 0.1, 0.8, 0.4,
# 0.2, 0.9, 0.2, 0.1, 0.2, 0.9, 0.2, 0.6]))
#
# tabular_cpd = self.tree2.to_tabular_cpd('A', parents_order=['E', 'D', 'C', 'B'])
#
# @unittest.skip('Not implemented yet')
# def test_to_rule_cpd(self):
# rule_cpd = self.tree1.to_rule_cpd()
# self.assertEqual(rule_cpd.cardinality(), {'A': 2, 'B': 2, 'C': 2, 'D': 2})
# self.assertEqual(rule_cpd.scope(), {'A', 'B', 'C', 'D'})
# self.assertEqual(rule_cpd.variable, 'A')
# self.assertEqual(rule_cpd.rules, {('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.1,
# ('A_0', 'B_1', 'C_1', 'D_0'): 0.9,
# ('A_1', 'B_1', 'C_1', 'D_0'): 0.1,
# ('A_0', 'B_1', 'C_1', 'D_1'): 0.4,
# ('A_1', 'B_!', 'C_1', 'D_1'): 0.6})
#
# rule_cpd = self.tree2.to_rule_cpd()
# self.assertEqual(rule_cpd.cardinality(), {'A': 2, 'B': 2, 'C': 2, 'D': 2, 'E': 2})
# self.assertEqual(rule_cpd.scope(), {'A', 'B', 'C', 'D', 'E'})
# self.assertEqual(rule_cpd.variable, 'A')
# self.assertEqual(rule_cpd.rules, {('A_0', 'B_0', 'C_0'): 0.8,
# ('A_1', 'B_0', 'C_0'): 0.2,
# ('A_0', 'B_0', 'C_1', 'D_0'): 0.9,
# ('A_1', 'B_0', 'C_1', 'D_0'): 0.1,
# ('A_0', 'B_0', 'C_1', 'D_1'): 0.4,
# ('A_1', 'B_0', 'C_1', 'D_1'): 0.6,
# ('A_0', 'B_1', 'C_0'): 0.1,
# ('A_1', 'B_1', 'C_0'): 0.9,
# ('A_0', 'B_1', 'C_1', 'E_0'): 0.3,
# ('A_1', 'B_1', 'C_1', 'E_0'): 0.7,
# ('A_0', 'B_1', 'C_1', 'E_1'): 0.8,
# ('A_1', 'B_1', 'C_1', 'E_1'): 0.2})
#
#
# class TestRuleCPDInit(unittest.TestCase):
# def test_init_without_errors_rules_none(self):
# rule_cpd = RuleCPD('A')
# self.assertEqual(rule_cpd.variable, 'A')
#
# def test_init_without_errors_rules_not_none(self):
# rule_cpd = RuleCPD('A', {('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.4,
# ('A_1', 'B_1', 'C_0'): 0.6,
# ('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
# self.assertEqual(rule_cpd.variable, 'A')
# self.assertEqual(rule_cpd.rules, {('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.4,
# ('A_1', 'B_1', 'C_0'): 0.6,
# ('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
#
# def test_init_with_errors(self):
# self.assertRaises(ValueError, RuleCPD, 'A', {('A_0',): 0.5,
# ('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.4,
# ('A_1', 'B_1', 'C_0'): 0.6,
# ('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
#
#
# class TestRuleCPDMethods(unittest.TestCase):
# def setUp(self):
# self.rule_cpd_with_rules = RuleCPD('A', {('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.4,
# ('A_1', 'B_1', 'C_0'): 0.6})
# self.rule_cpd_without_rules = RuleCPD('A')
#
# def test_add_rules_single(self):
# self.rule_cpd_with_rules.add_rules({('A_0', 'B_1', 'C_1'): 0.9})
# self.assertEqual(self.rule_cpd_with_rules.rules, {('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.4,
# ('A_1', 'B_1', 'C_0'): 0.6,
# ('A_0', 'B_1', 'C_1'): 0.9})
# self.assertEqual(self.rule_cpd_with_rules.variable, 'A')
# self.rule_cpd_without_rules.add_rules({('A_0', 'B_1', 'C_1'): 0.9})
# self.assertEqual(self.rule_cpd_without_rules.rules, {('A_0', 'B_1', 'C_1'): 0.9})
# self.assertEqual(self.rule_cpd_without_rules.variable, 'A')
#
# def test_add_rules_multiple(self):
# self.rule_cpd_with_rules.add_rules({('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
# self.assertEqual(self.rule_cpd_with_rules.rules, {('A_0', 'B_0'): 0.8,
# ('A_1', 'B_0'): 0.2,
# ('A_0', 'B_1', 'C_0'): 0.4,
# ('A_1', 'B_1', 'C_0'): 0.6,
# ('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
# self.assertEqual(self.rule_cpd_with_rules.variable, 'A')
# self.rule_cpd_without_rules.add_rules({('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
# self.assertEqual(self.rule_cpd_without_rules.rules, {('A_0', 'B_1', 'C_1'): 0.9,
# ('A_1', 'B_1', 'C_1'): 0.1})
# self.assertEqual(self.rule_cpd_without_rules.variable, 'A')
#
# def test_add_rules_error(self):
# self.assertRaises(ValueError, self.rule_cpd_with_rules.add_rules, {('A_0',): 0.8})
#
# def test_scope(self):
# self.assertEqual(self.rule_cpd_with_rules.scope(), {'A', 'B', 'C'})
# self.assertEqual(self.rule_cpd_without_rules.scope(), set())
#
# def test_cardinality(self):
# self.assertEqual(self.rule_cpd_with_rules.cardinality(), {'A': 2, 'B': 2, 'C': 1})
# self.assertEqual(self.rule_cpd_without_rules.cardinality(), {})
#
# def tearDown(self):
# del self.rule_cpd_without_rules
#
|
khalibartan/pgmpy
|
pgmpy/tests/test_factors/test_discrete/test_Factor.py
|
Python
|
mit
| 56,244
|
# CIS 410/510pm
# Homework 5 beta 0.0.1
# Cameron Palk
# May 2016
#
# Special thanks to Daniel Lowd for the skeletor code
import sys
import tokenize
from functools import reduce
global_card = []
num_vars = 0
''' Calc Strides
'''
def calcStrides( scope ):
rev_scope = list( reversed( scope ) )
res = [ 1 ] + [ 0 ] * ( len( scope ) - 1 )
for idx in range( 1, len( rev_scope ) ):
res[ idx ] = res[ idx - 1 ] * global_card[ rev_scope[ idx - 1 ] ]
stride = list( reversed( res ) )
return { scope[i] : stride[i] for i in range( len( scope ) ) }
# FACTOR CLASS DEFINITION
class Factor( dict ):
# Constructor
def __init__(self, scope_, vals_):
self.scope = scope_
self.vals = vals_
self.stride = calcStrides( scope_ )
#
# Are two object EQual, True of False
def __eq__(self, other):
return (self.scope == other.scope and
self.vals == other.vals and
self.stride == other.stride )
#
# A string used for printing the Factor Objects
def __repr__( self ):
style = "\n{0}\nScope: {1}\nStride: {2}\nCard: {3}\nVals:\n{4}\n{0}\n"
vertBar = ''.join( ['-'] * 50 )
return style.format( vertBar, self.scope, self.stride,
{ v : global_card[v] for v in self.scope },
'\n'.join( [ str( round( e, 3 ) ) for e in self.vals ] ) )
#
# What the '*' character does between our objects
def __mul__( self, other ):
new_scope = list( set( self.scope ).union( set( other.scope ) ) )
assignment = { e : 0 for e in new_scope }
card = { u : global_card[ u ] for u in new_scope }
val_count = reduce( lambda agg, x: agg * global_card[x], new_scope, 1 )
new_vals = [ 0 ] * val_count
idx1 = idx2 = 0
for i in range( 0, val_count ):
new_vals[ i ] = self.vals[ idx1 ] * other.vals[ idx2 ]
for rv in reversed( new_scope ):
if assignment[ rv ] == card[ rv ] - 1:
idx1 -= assignment[ rv ] * self.stride [ rv ] if rv in self.stride else 0
idx2 -= assignment[ rv ] * other.stride[ rv ] if rv in other.stride else 0
assignment[ rv ] = 0
else:
idx1 += self.stride [ rv ] if rv in self.scope else 0
idx2 += other.stride[ rv ] if rv in other.scope else 0
assignment[ rv ] += 1
break
#
return Factor( new_scope, new_vals )
#
# Sum out the variable and return a new Factor
def sumOut( self ):
# TODO Sum out a RV
return
#
# Helper Functions:
def containsRV( self, rv ):
return rv in self.scope
#
# END FACTOR CLASS DEFINITION
# IGNORE DANIELS READER BELOW
#
# Read in all tokens from stdin. Save it to a (global) buf that we use
# later. (Is there a better way to do this? Almost certainly.)
curr_token = 0
token_buf = []
def read_tokens():
global token_buf
for line in sys.stdin:
token_buf.extend(line.strip().split())
#
def next_token():
global curr_token
global token_buf
curr_token += 1
return token_buf[ curr_token - 1 ]
#
def next_int():
return int( next_token() )
#
def next_float():
return float( next_token() )
#
def read_model():
# Read in all tokens and throw away the first (expected to be "MARKOV")
read_tokens()
s = next_token()
# Get number of vars, followed by their ranges
global num_vars
num_vars = next_int()
global global_card
global_card = [ next_int() for i in range( num_vars ) ]
# Get number and scopes of factors
num_factors = int(next_token())
factor_scopes = []
for i in range(num_factors):
factor_scopes.append( [ next_int() for i in range( next_int() ) ] )
# Read in all factor values
factor_vals = []
for i in range(num_factors):
factor_vals.append( [ next_float() for i in range( next_int() ) ] )
return [ Factor(s,v) for (s,v) in zip( factor_scopes, factor_vals ) ]
#
# IGNORE DANIELS READER ABOVE
''' Factor Count With Var
@input factors Factors we want to look through
@input rv A RV
@return [int] The number of times the rv occures in the factors scopes
'''
def factorCountWithVar( factors, rv ):
return sum( [ 1 if f.containsRV( rv ) else 0 for f in factors ] )
''' Factor Stats
'''
def factorStats( factors, possibleVariables ):
return { v: factorCountWithVar(factors,v) for v in range( num_vars ) if v in possibleVariables }
''' Compute Partition Function
@input factors An array of Factor objects representing the graph
@return [float] The partition function ( why is it called a function? )
'''
def computePartitionFunction( factors ):
# TODO: Implement a faster way to computer partition function by summing out variables
f = reduce( Factor.__mul__, factors )
z = sum( f.vals )
return z
#
''' Main '''
def main():
# Read file
factors = read_model()
# Computer partition function
z = computePartitionFunction( factors )
# Print results
print( "Z =", z )
return
# Run main if this module is being run directly
if __name__ == '__main__':
main()
|
CKPalk/ProbabilisticMethods
|
A5/hw5_start.py
|
Python
|
mit
| 4,767
|
# -*- coding: utf-8 -*-
import unittest
import json
from api_health.verifier import Verifier
class JsonVerifier(unittest.TestCase):
def test_constructor_should_be_smart_about_params(self):
simple_json = u'{ "foo": "bar" }'
json_dict = json.loads(simple_json)
try:
v1 = Verifier(simple_json)
v2 = Verifier(json_dict)
except:
self.fail('Verifier() constructor should deal with both '
'string and object json')
self.assertTrue(v1.has_property('foo'))
self.assertTrue(v2.has_property('foo'))
def test_should_check_for_json_property(self):
simple_json = u'{ "foo": "bar" }'
verifier = Verifier(simple_json)
self.assertTrue(verifier.has_property('foo'))
self.assertTrue(verifier.does_not_have_property('bu'))
self.assertFalse(verifier.has_property('bleh'))
def test_should_check_arrays(self):
array_json = u'{ "foo": "bar", "baz": [ 1, 2, 3] }'
verifier = Verifier(array_json)
self.assertTrue(verifier.has_property("baz[1]"))
|
bigodines/api-health
|
tests/test_verifier.py
|
Python
|
mit
| 1,105
|
from props.graph_representation.word import Word,NO_INDEX, strip_punctuations
import cgi
from copy import deepcopy
from props.dependency_tree.definitions import time_prep, definite_label,\
adjectival_mod_dependencies
COPULA = "SAME AS" # the textual value of a copula node
PROP = "PROP" # the textual value of a property node
RCMOD_PROP = "PROP" # the textual value of a property for rcmod node
POSSESSIVE = "POSSESS" # the textual value of a possessive node
APPOSITION = "appos" # the textual value of an appositio n node
PREP = "PREP" # the textual value of a preposition node
PREP_TYPE = "TYPE" # the textual value of a preposition node's type
COND = "COND" # the textual value of a conditional node
TIME = "TIME" # the textual value of a time node
LOCATION = "LOCATION" # the textual value of a location node
CONJUNCTION = "CONJ -" # the textual value of a conjunction node
ADVERB = "ADV" # the textual value of a conjunction node
EXISTENSIAL = "EXISTS" # the textual value of a conjunction node
COND_TYPE= PREP_TYPE # the textual value of a conditional node's type
## Node shapes
RECT_NODE_SHAPE = "rect"
DEFAULT_NODE_SHAPE = "ellipse"
PRINT_FEATURES = [("Tense",lambda t:t),
("Determiner",lambda t:"det: "+t["Value"]),
("Time Value",lambda t:"date: "+t),
("Negation", lambda t:"negated"),
("Passive Voice", lambda t:"passive"),
("Modal",lambda t:"modal: "+ " ".join(t["Value"])),
("Definite",lambda t:t),
("Modifier",lambda t:"modifer: "+t)]
global nodeCounter
nodeCounter =0
class Node:
"""
node class
represents a single node in the representation graph.
@type isPredicate: bool
@var isPredicate: denotes if this node is a predicate
@type text: list of Word object
@var text: the text contained within this node
@type features: dict
@var features: syntactic features of this node (e.g., definiteness)
@type propagateTo: list
@var propagateTo: list of Node objects onto which the properties of this node should propogate to
@type span: list
@var span: list of indices in the original sentence which this node spans
@todo think if this is needed, or consumed by Word
@type valid: bool
@var valid: debug variable, indicates if this node should be converted
@type uid: int
@var uid: unique id for this node, to be able to distinguish nodes with identical features
"""
def __init__(self,isPredicate,text,features,valid,orderText = True):
"""
initialize a node object
@type orderText: boolean
@param orderText: defines if text elements should be sorted by indices upon printing in the __str__ function
"""
self.isPredicate = isPredicate
self.text = text
self.features = features
self.valid = valid
global nodeCounter
self.uid = nodeCounter
nodeCounter +=1
self.propagateTo = []
self.orderText = orderText
self.nodeShape = DEFAULT_NODE_SHAPE
self.__str__() # calculate variables in str
def get_text(self,gr):
return self.text
def copy(self):
"""
'copy constructor'
"""
# get proper type and new uid
ret = self.__class__(isPredicate = self.isPredicate,
text = self.text,
features = self.features,
valid = self.valid)
# copy propagations
for curNode in self.propagateTo:
addSymmetricPropogation(ret, curNode)
return ret
def addPropogation(self,node):
"""
Add a node onto which this node's properties should propogate.
@type node: Node
@param node: The node onto which to propogate
"""
if node not in self.propagateTo:
self.propagateTo.append(node)
def minIndex(self):
"""
Minimum index covered by this node
@rtype: int
"""
if not self.text:
return NO_INDEX # TODO: why is this happenning?
return min([w.index for w in self.text])
def maxIndex(self):
"""
Minimum index covered by this node
@rtype: int
"""
if not self.text:
return NO_INDEX # TODO: why is this happenning?
return max([w.index for w in self.text])
def __str__(self):
ret = '<TABLE BORDER="0" CELLSPACING="0"><TR><TD>'
filtered_spans = []
for feat,_ in PRINT_FEATURES:
if (feat in self.features) and (isinstance(self.features[feat], dict)) and ("Span" in self.features[feat]):
filtered_spans.extend(self.features[feat]["Span"])
if 'Lemma' in self.features and len(self.text)==1:
self.str = [Word(index = self.text[0].index,word=self.features['Lemma'])]
else:
ls = self.text
if self.orderText:
ls = sorted(self.text,key=lambda word:word.index)
# self.str stores the words as displayed in the node
self.str = [w for w in ls if w.index not in filtered_spans]
self.str = strip_punctuations(self.str)
ret+= " ".join([str(x) for x in self.str])
ret+="</TD></TR>"
for feat, printFunc in PRINT_FEATURES:
if feat in self.features:
if self.isPredicate and feat =="Definite":
continue
ret += "<TR><TD>"
ret+= '<FONT POINT-SIZE="10">{0}</FONT>'.format(cgi.escape(str(printFunc(self.features[feat]))))
ret+="</TD></TR>"
ret +="</TABLE>"
return ret
def __hash__(self):
return self.__str__().__hash__()
class CopularNode(Node):
"""
A class representing a copular head "BE" node.
"""
@classmethod
def init(cls,index,features,valid):
if "Lemma" in features:
del(features["Lemma"])
return cls(isPredicate=True,
text=[Word(index,COPULA)],
features=features,
valid=valid)
class PossessiveNode(Node):
"""
A class representing a copular head "HAS" node.
"""
@classmethod
def init(cls,index,features,valid):
return cls(isPredicate=True,
text=[Word(index,POSSESSIVE)],
features=features,
valid=valid)
class PropNode(Node):
"""
A class representing a prop head node
"""
@classmethod
def init(cls,features,valid,index,parent_relation):
if "Lemma" in features:
del(features["Lemma"])
ret = cls(isPredicate=True,
text=[Word(index,PROP)],
features=features,
valid=valid)
ret.parent_relation = parent_relation
return ret
def copy(self):
ret = Node.copy(self)
ret.parent_relation = self.parent_relation
if hasattr(self, 'str'):
ret.str = self.str
return ret
def is_relative(self):
if "relative" not in self.features:
return False
return self.features["relative"]
def is_prenominal(self):
# TODO: this should be a property of the edge and not the node
return (self.parent_relation == "amod")
def get_text(self,gr):
return []
class RCMODPropNode(Node):
"""
A class representing a prop head for rcmod node
"""
@classmethod
def init(cls,features,valid):
return cls(isPredicate=True,
text=[Word(NO_INDEX,RCMOD_PROP)],
features=features,
valid=valid)
def is_prenominal(self):
return False
class TimeNode(Node):
"""
A class representing a time head node
"""
@classmethod
def init(cls,features):
return cls(isPredicate=False,
text=[Word(NO_INDEX,TIME)],
features=features,
valid=True)
cls.nodeShape = RECT_NODE_SHAPE
def get_text(self,gr):
neighbors = gr.neighbors(self)
ret = []
for n in neighbors:
ret.extend(n.get_text(gr))
return sorted(ret,key=lambda x:x.index)
class LocationNode(Node):
"""
A class representing a location head node
"""
@classmethod
def init(cls,features):
return cls(isPredicate=True,
text=[Word(NO_INDEX,LOCATION)],
features=features,
valid=True)
def get_text(self,gr):
neighbors = gr.neighbors(self)
ret = []
for n in neighbors:
ret.extend(n.get_text(gr))
return sorted(ret,key=lambda x:x.index)
class PrepNode(Node):
"""
A class representing a preposition head node
"""
@classmethod
def init(cls,index,prepType,features,valid):
prepType = prepType.lower()
ret = cls(isPredicate=True,
text=[Word(index,"{0}-{1}".format(PREP,prepType))],
features=features,
valid=valid)
ret.prepType = prepType
return ret
def copy(self):
ret = Node.copy(self)
ret.prepType = self.prepType
if hasattr(self, 'str'):
ret.str = self.str
return ret
def get_text(self,gr):
return [Word(index = self.text[0].index,
word = self.prepType)]
def is_time_prep(self):
return self.prepType in time_prep
class CondNode(Node):
"""
A class representing a conditional/temporal head node
"""
@classmethod
def init(cls,index,condType,features,valid):
condType = condType.lower()
ret= cls(isPredicate=True,
text=[Word(index,"{0}-{1}".format(COND,condType))],
features=features,
valid=valid)
ret.condType = condType
ret.nodeShape = RECT_NODE_SHAPE
return ret
def get_text(self,gr):
return [Word(index = self.text[0].index,
word = self.condType)]
def copy(self):
ret = Node.copy(self)
ret.condType = self.condType
return ret
class AppositionNode(Node):
"""
A class representing an apposition head node
"""
@classmethod
def init(cls,index,features):
return cls(isPredicate=False,
text=[Word(index,APPOSITION)],
features=features,
valid=False)
class ConjunctionNode(Node):
"""
A class representing an conjunction head node
"""
@classmethod
def init(cls,text,features):
"""
initialize a conjunction head node
"""
conjType = " ".join([x.word for x in sorted(text,
key=lambda word:word.index)])
text = [Word(NO_INDEX,CONJUNCTION)] + text
ret = cls(isPredicate=True,
text=text,
features=features,
valid=True)
ret.conjType = conjType
ret.__str__()
return ret
def copy(self):
ret = Node.copy(self)
ret.conjType = self.conjType
return ret
def get_text(self,gr):
neighbors = gr.neighbors(self)
ret = []
for n in neighbors:
ret.extend(n.get_text(gr))
return sorted(ret,key=lambda x:x.index)
class advNode(Node):
"""
A class representing an adverb head node
"""
@classmethod
def init(cls,features):
"""
initialize an adverb head node
"""
return cls(isPredicate=True,
text=[Word(NO_INDEX,ADVERB)],
features=features,
valid=True)
def isCopular(node):
"""
check if this node is an copular instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is an copular instance
"""
return isinstance(node,CopularNode)
def isApposition(node):
"""
check if this node is an apposition instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is an apposition instance
"""
return isinstance(node,AppositionNode)
def isProp(node):
"""
check if this node is a prop instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is prop node instance
"""
#TODO: efficiency
return isinstance(node,PropNode)
def isRcmodProp(node):
"""
check if this node is a prop instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is prop node instance
"""
#TODO: efficiency
return isinstance(node,RCMODPropNode)
def isConjunction(node):
"""
check if this node is a conjunction instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is conjunction node instance
"""
#TODO: efficiency
return isinstance(node,ConjunctionNode)
def isPreposition(node):
"""
check if this node is a preposition instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is preposition node instance
"""
#TODO: efficiency
return isinstance(node,PrepNode)
def isTime(node):
"""
check if this node is a time instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is time node instance
"""
#TODO: efficiency
return isinstance(node,TimeNode)
def isLocation(node):
"""
check if this node is a location instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is location node instance
"""
#TODO: efficiency
return isinstance(node,LocationNode)
def isAdverb(node):
"""
check if this node is a adverb instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is adverb node instance
"""
#TODO: efficiency
return isinstance(node,advNode)
def isCondition(node):
"""
check if this node is a Cond instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is condition node instance
"""
#TODO: efficiency
return isinstance(node,CondNode)
def isDefinite(node):
return node.features.get("Definite",False) == definite_label
def isNominal(node,gr):
if node.isPredicate: #predicate
return False
if [father for father in gr.incidents(node) if isProp(father)]: #prop
return False
return True
def isPossessive(node):
"""
check if this node is a Possessive instance
@type node: Node
@param node: node to examine
@rtype bool
@return True iff this node is possessive node instance
"""
#TODO: efficiency
return isinstance(node,PossessiveNode)
def join(node1,node2,gr):
"""
Returns a node which is the concatenation of two nodes
Raises in error in case they have contradicting features
@type node1: Node
@param node1: first node to be joined
@type node2: Node
@param node2: second node to be joined
@rtype Node
@return a node representing the union of both nodes
"""
# make sure everything is ok
if node1.isPredicate != node2.isPredicate:
#raise Exception("Contradicting isPredicate value")
print "Contradicting isPredicate value"
if (not node1.valid) or (not node2.valid):
raise Exception("Invalid node cannot be joined")
# join all values
isPredicate = (node1.isPredicate and node2.isPredicate)
text = list(set(node1.get_text(gr)).union(node2.get_text(gr)))
features = {}
features.update(node1.features)
features.update(node2.features)
valid = node1.valid
# remove contradicting features
for k in set(node1.features).intersection(node2.features):
if node1.features[k]!=node2.features[k]:
del(features[k])
print("Contradicting features")
# return new node
return Node(isPredicate = isPredicate,
text = text,
features = features,
valid = valid)
def addSymmetricPropogation(node1,node2):
"""
Add two nodes onto each other's propogation
@type node1: Node
@param node: The node onto which to propogate node2
@type node1: Node2
@param node: The node onto which to propogate node1
"""
node1.addPropogation(node2)
node2.addPropogation(node1)
if __name__ == "__main__":
copNode = CopularNode(index = 1,
features={"tense":"past"},
valid=True)
n = copNode.copy()
|
gabrielStanovsky/props
|
props/graph_representation/node.py
|
Python
|
mit
| 18,324
|
#!/usr/bin/python
"Assignment 5 - This defines a topology for running a firewall. It is not \
necessarily the topology that will be used for grading, so feel free to \
edit and create new topologies and share them."
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.node import CPULimitedHost, RemoteController
from mininet.util import custom
from mininet.link import TCLink
from mininet.cli import CLI
class FWTopo(Topo):
''' Creates the following topoplogy:
e1 e2 e3
| | |
\ | /
firwall (s1)
/ | \
| | |
w1 w2 w3
'''
def __init__(self, cpu=.1, bw=10, delay=None, **params):
super(FWTopo,self).__init__()
# Host in link configuration
hconfig = {'cpu': cpu}
lconfig = {'bw': bw, 'delay': delay}
# Create the firewall switch
s1 = self.addSwitch('s1')
# Create East hosts and links)
e1 = self.addHost('e1', **hconfig)
e2 = self.addHost('e2', **hconfig)
e3 = self.addHost('e3', **hconfig)
self.addLink(s1, e1, port1=1, port2=1, **lconfig)
self.addLink(s1, e2, port1=2, port2=1, **lconfig)
self.addLink(s1, e3, port1=3, port2=1, **lconfig)
# Create West hosts and links)
w1 = self.addHost('w1', **hconfig)
w2 = self.addHost('w2', **hconfig)
w3 = self.addHost('w3', **hconfig)
self.addLink(s1, w1, port1=4, port2=1, **lconfig)
self.addLink(s1, w2, port1=5, port2=1, **lconfig)
self.addLink(s1, w3, port1=6, port2=1, **lconfig)
def main():
print "Starting topology"
topo = FWTopo()
net = Mininet(topo=topo, link=TCLink, controller=RemoteController, autoSetMacs=True)
net.start()
try:
from unit_tests import run_tests
raw_input('Unit tests to be run next. Make sure your firewall is running, then press a key')
run_tests(net)
except ImportError:
raise
CLI(net)
if __name__ == '__main__':
main()
|
PicoGeyer/CS-6250-A5_firewall_test
|
testing-topo.py
|
Python
|
mit
| 2,069
|
from application import CONFIG, app
from .models import *
from flask import current_app, session
from flask.ext.login import login_user, logout_user, current_user
from flask.ext.principal import Principal, Identity, AnonymousIdentity, identity_changed, identity_loaded, RoleNeed
import bcrypt
import re
import sendgrid
import time
from itsdangerous import URLSafeTimedSerializer
AuthenticationError = Exception("AuthenticationError", "Invalid credentials.")
UserExistsError = Exception("UserExistsError", "Email already exists in database.")
UserDoesNotExistError = Exception("UserDoesNotExistError", "Account with given email does not exist.")
login_manager = LoginManager()
login_manager.init_app(app)
principals = Principal(app)
sg = sendgrid.SendGridClient(CONFIG["SENDGRID_API_KEY"])
ts = URLSafeTimedSerializer(CONFIG["SECRET_KEY"])
@login_manager.user_loader
def load_user(user_id):
user_entries = StaffUserEntry.objects(id = user_id)
if user_entries.count() != 1:
return None
currUser = user_entries[0]
user = User(currUser.id, currUser.email, currUser.firstname, currUser.lastname, currUser.roles)
return user
@identity_loaded.connect_via(app)
def on_identity_loaded(sender, identity):
identity.user = current_user
if hasattr(current_user, 'roles'):
for role in current_user.roles:
identity.provides.add(RoleNeed(role))
def get_user(email):
entries = StaffUserEntry.objects(email = email)
if entries.count() == 1:
return entries[0]
return None
def verify_user(email, password):
currUser = get_user(email)
if currUser is None:
return None
hashed = currUser.hashed
if bcrypt.hashpw(password.encode("utf-8"), hashed.encode("utf-8")) == hashed.encode("utf-8"):
return load_user(currUser.id)
else:
return None
def login(email):
user = load_user(get_user(email).id)
if user != None:
login_user(user)
identity_changed.send(current_app._get_current_object(), identity = Identity(user.uid))
else:
raise UserDoesNotExistError
def logout():
logout_user()
for key in ('identity.name', 'identity.auth_type'):
session.pop(key, None)
identity_changed.send(current_app._get_current_object(), identity = AnonymousIdentity())
def tokenize_email(email):
return ts.dumps(email, salt = CONFIG["EMAIL_TOKENIZER_SALT"])
def detokenize_email(token):
return ts.loads(token, salt = CONFIG["EMAIL_TOKENIZER_SALT"], max_age = 86400)
def send_recovery_email(email):
user = get_user(email)
if user is None:
raise UserDoesNotExistError
token = tokenize_email(email)
message = sendgrid.Mail()
message.add_to(email)
message.set_from("noreply@hackbca.com")
message.set_subject("hackBCA III - Account Recovery")
message.set_html("<p></p>")
message.add_filter("templates", "enable", "1")
message.add_filter("templates", "template_id", CONFIG["SENDGRID_ACCOUNT_RECOVERY_TEMPLATE"])
message.add_substitution("prefix", "staff")
message.add_substitution("token", token)
status, msg = sg.send(message)
def change_name(email, firstname, lastname):
account = get_user(email)
if account is None:
raise UserDoesNotExistError
account.firstname = firstname
account.lastname = lastname
account.save()
login(email) #To update navbar
def change_password(email, password):
account = get_user(email)
if account is None:
raise UserDoesNotExistError
hashed = str(bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt()))[2:-1]
account.hashed = hashed
account.save()
def get_user_attr(email, attr):
user = get_user(email)
if user is None:
raise UserDoesNotExistError
return getattr(user, attr)
def set_user_attr(email, attr, value):
user = get_user(email)
if user is None:
raise UserDoesNotExistError
setattr(user, attr, value)
user.save()
|
hackBCA/missioncontrol
|
application/mod_user/controllers.py
|
Python
|
mit
| 3,822
|
"""
WSGI config for first_app project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "first_app.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
IlyaSergeev/taxi_service
|
first_app/wsgi.py
|
Python
|
mit
| 393
|
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
import matplotlib.ticker as ticker
r = mlab.csv2rec('data/imdb.csv')
r.sort()
r = r[-30:] # get the last 30 days
N = len(r)
ind = np.arange(N) # the evenly spaced plot indices
def format_date(x, pos=None):
thisind = np.clip(int(x+0.5), 0, N-1)
return r.date[thisind].strftime('%Y-%m-%d')
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(ind, r.adj_close, 'o-')
ax.xaxis.set_major_formatter(ticker.FuncFormatter(format_date))
fig.autofmt_xdate()
plt.show()
|
janusnic/21v-python
|
unit_20/matplotlib/pyplot_index_formatter.py
|
Python
|
mit
| 556
|
# WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class MandatePdfsService(base_service.BaseService):
"""Service class that provides access to the mandate_pdfs
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.MandatePdf
RESOURCE_NAME = 'mandate_pdfs'
def create(self,params=None, headers=None):
"""Create a mandate PDF.
Generates a PDF mandate and returns its temporary URL.
Customer and bank account details can be left blank (for a blank
mandate), provided manually, or inferred from the ID of an existing
[mandate](#core-endpoints-mandates).
By default, we'll generate PDF mandates in English.
To generate a PDF mandate in another language, set the
`Accept-Language` header when creating the PDF mandate to the relevant
[ISO 639-1](http://en.wikipedia.org/wiki/List_of_ISO_639-1_codes)
language code supported for the scheme.
| Scheme | Supported languages
|
| :--------------- |
:-------------------------------------------------------------------------------------------------------------------------------------------
|
| ACH | English (`en`)
|
| Autogiro | English (`en`), Swedish (`sv`)
|
| Bacs | English (`en`)
|
| BECS | English (`en`)
|
| BECS NZ | English (`en`)
|
| Betalingsservice | Danish (`da`), English (`en`)
|
| PAD | English (`en`)
|
| SEPA Core | Danish (`da`), Dutch (`nl`), English (`en`),
French (`fr`), German (`de`), Italian (`it`), Portuguese (`pt`),
Spanish (`es`), Swedish (`sv`) |
Args:
params (dict, optional): Request body.
Returns:
MandatePdf
"""
path = '/mandate_pdfs'
if params is not None:
params = {self._envelope_key(): params}
response = self._perform_request('POST', path, params, headers,
retry_failures=True)
return self._resource_for(response)
|
gocardless/gocardless-pro-python
|
gocardless_pro/services/mandate_pdfs_service.py
|
Python
|
mit
| 3,470
|
# -*- coding: utf-8 -*-
"""
@file
@brief image et synthèse
"""
from .image_synthese_facette import Rectangle
from .image_synthese_base import Rayon, Couleur
from .image_synthese_sphere import Sphere
class RectangleImage(Rectangle):
"""définit un rectangle contenant un portrait"""
def __init__(self, a, b, c, d, nom_image, pygame, invertx=False):
"""initialisation, si d == None, d est calculé comme étant
le symétrique de b par rapport au milieu du segment [ac],
la texture est une image,
si invertx == True, inverse l'image selon l'axe des x"""
Rectangle.__init__(self, a, b, c, d, Couleur(0, 0, 0))
self.image = pygame.image.load(nom_image)
self.nom_image = nom_image
self.invertx = invertx
def __str__(self):
"""affichage"""
s = "rectangle image --- a : " + str(self.a)
s += " b : " + str(self.b)
s += " c : " + str(self.c)
s += " d : " + str(self.d)
s += " image : " + self.nom_image
return s
def couleur_point(self, p):
"""retourne la couleur au point de coordonnée p"""
ap = p - self.a
ab = self.b - self.a
ad = self.d - self.a
abn = ab.norme2()
adn = ad.norme2()
x = ab.scalaire(ap) / abn
y = ad.scalaire(ap) / adn
sx, sy = self.image.get_size()
k, li = int(x * sx), int(y * sy)
k = min(k, sx - 1)
li = min(li, sy - 1)
li = sy - li - 1
if not self.invertx:
c = self.image.get_at((k, li))
else:
c = self.image.get_at((sx - k - 1, li))
cl = Couleur(float(c[0]) / 255, float(c[1]) / 255, float(c[2]) / 255)
return cl
class SphereReflet (Sphere):
"""implémente une sphère avec un reflet"""
def __init__(self, centre, rayon, couleur, reflet):
"""initialisation, reflet est un coefficient de réflexion"""
Sphere.__init__(self, centre, rayon, couleur)
self.reflet = reflet
def __str__(self):
"""affichage"""
s = "sphere reflet --- centre : " + str(self.centre)
s += " rayon : " + str(self.rayon)
s += " couleur : " + str(self.couleur)
return s
def rayon_reflechi(self, rayon, p):
"""retourne le rayon réfléchi au point p de la surface,
si aucune, retourne None"""
if p == rayon.origine:
return None
n = self.normale(p, rayon)
n = n.renorme()
y = n.scalaire(rayon.direction)
d = rayon.direction - n * y * 2
r = Rayon(p, d, rayon.pixel, rayon.couleur * self.reflet)
return r
|
sdpython/ensae_teaching_cs
|
src/ensae_teaching_cs/special/image/image_synthese_facette_image.py
|
Python
|
mit
| 2,656
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2014 Jonathan F. Donges
# Author: Jonathan F. Donges <donges@pik-potsdam.de>
# URL: <http://www.pik-potsdam.de/members/donges/software>
"""
Performs recurrence analysis of paleoclimate proxy records.
This script provides analyses for this publication:
J.F. Donges, R.V. Donner, N. Marwan, S.F.M. Breitenbach, K. Rehfeld, and J. Kurths,
Nonlinear regime shifts in Holocene Asian monsoon variability: Potential impacts on cultural change and migratory patterns,
Climate of the Past 11, 709-741 (2015),
DOI: 10.5194/cp-11-709-2015
"""
#
# Imports
#
# Import cPickle for loading and saving data
import cPickle
# Import np for fast numerics
import numpy as np
# Import progress bar for easy progress bar handling
import progressbar
# Import class for recurrence network analysis
from pyunicorn.timeseries import RecurrenceNetwork
#
# Settings
#
# Name of data directory
DATA_DIR = "../../data/raw_proxy_data/"
# List of data FILENAMES
FILENAMES = ["Dimarshim_D1.dat", "Qunf_Q5_orig.dat", "Hoti.dat",
"Mawmluh.dat", "Tianmen_TM18_older.dat", "Dongge_DA.dat",
"Lianhua_d18O_d13C.dat", "Heshang_HS4.dat", "Jiuxian.dat",
"Liang-Luar.dat"]
# Names of proxy records / caves
NAMES = ["Dimarshim", "Qunf", "Hoti", "Mawmluh", "Tianmen", "Dongge",
"Lianhua", "Heshang", "Jiuxian", "Liang-Luar"]
# Specify symbol used for commenting in data file
COMMENT_SYMBOL = "%"
# Settings for the time dependent recurrence plot
# Window length [a] / [ka]
T_TIME = [750., 750., 750., 750., 750., 750., 750., 750., 750. ,750.]
# Step size [a] / [ka]
DELTA_TIME = 50.
# Settings for the embedding
DIM = 3
TAU = 2 # Only used if ADAPT_DELAY == False
ADAPT_DELAY = True # If true, the delay in units of data points is estimated to match the given DELAY_TIME
# Explicitly set delay times for time-delay embedding
DELAY_TIMES = [100., 216., 57., 146., 90., 185., 193., 60., 73., 135.] # in years [a]
# Settings for the recurrence plot
METRIC = "supremum" # metric for recurrence definition
RR = 0.05 # prescribed recurrence rate
# Settings for significance testing
# Ensemble size
N_ENSEMBLE = 1000
# Choose whether whole embedded state vectors or the scalar time series should be shuffled (Different null-hypothesis!)
SHUFFLE_EMBEDDED = True
# Settings for detrending
DETREND = True
DETRENDING_WINDOW_SIZE = 1000. # measured in [a] / [ka]
#
# Functions
#
def detrend_time_series(data, window_size):
"""
"""
# Get length of data array
n = data.shape[0]
# Initialize a local copy of data array
detrended_data = np.empty(n)
# Detrend data
for j in xrange(n):
# Get distance of sample from boundaries of time series
dist = min(j, n - 1 - j)
if window_size / 2 > dist:
half_size = dist
else:
half_size = window_size / 2
detrended_data[j] = data[j] - data[j - half_size:j + half_size + 1].mean()
return detrended_data
def autocorrelation(data, lag):
"""Return autocorrelation of data at specified lag."""
return np.corrcoef(data[lag:], data[:-lag])[0,1]
#
# The main script
#
print "Recurrence network analysis of paleoclimate records"
print "---------------------------------------------------"
#
# Import data
#
time_list = []
data_list = []
sampling_time_list = []
delay_list = []
# Get number of time series
n_time_series = len(FILENAMES)
# Load data
for i in xrange(n_time_series):
time, data = np.loadtxt(DATA_DIR + FILENAMES[i], comments=COMMENT_SYMBOL,
unpack=True, usecols=(0,1,))
average_sampling_time = np.diff(time).mean()
time_list.append(time)
if DETREND:
# Detrend data!
detrended_data = detrend_time_series(data=data,
window_size=DETRENDING_WINDOW_SIZE / average_sampling_time)
data_list.append(detrended_data)
else:
data_list.append(data)
# Get average sampling times
sampling_time_list.append(average_sampling_time)
# Get delay time
delay_list.append(int(DELAY_TIMES[i] / average_sampling_time))
# Temporaray: Get length of time series
n = len(time)
#
# Print some statistics
#
print "Average sampling time:"
for i in xrange(n_time_series):
print FILENAMES[i], ": (", np.diff(time_list[i]).mean(), "pm", np.diff(time_list[i]).std(), ") ka"
#
# Analyze time dependent recurrence networks by moving a window over
# the time series
#
# Initialize list of window mid-points used for estimating time scale of windowed measures
step_sequence = []
# Create dictionary of symbols for each windowed measure to be calculated
symbols = {"Average path length": "$\mathcal{L}$",
"Transitivity": "$\mathcal{T}$"}
#symbols = {"Average path length": "$\mathcal{L}$",
# "n.s.i. average path length": "$\mathcal{L}^*$",
# "Clustering": "$\mathcal{C}$",
# "n.s.i. clustering": "$\mathcal{C}^*$"}
#symbols = {"Determinism": "$DET$",
# "Laminarity": "$LAM$",
# "Mean diagonal line length": "$L_{mean}$",
# "Trapping time": "$TT$",
# "Diagonal line entropy": "$ENTR$",
# "Autocorrelation": "$ACF(1)$",
# "Mean": "Mean",
# "Standard deviation": "STD"}
# Initialize dictionaries
results = {}
surrogate_results = {}
for measure in symbols.keys():
results[measure] = []
surrogate_results[measure] = []
# Run analysis for each time series separately
for i in xrange(n_time_series):
print "Analyzing original data from", FILENAMES[i]
# Get time and data arrays
time = time_list[i]
data = data_list[i]
sampling_time = sampling_time_list[i]
# Set delay
if ADAPT_DELAY:
TAU = delay_list[i]
# Get window and step size in units of samples
T = int(T_TIME[i] / sampling_time)
delta = int(DELTA_TIME / sampling_time)
# Get length of time series
t_max = len(time)
# Get required time series length before embedding to achive window length T in the recurrence plot
T_embedded = T + (DIM - 1) * TAU
# Get number of steps
t_steps = int((t_max - T_embedded) / float(delta) + 1)
print "Length of record:", t_max
print "Size of moving window:", T
print "Step size:", delta
print "Number of steps for moving window:", t_steps
print "Embedding dimension:", DIM
print "Embedding delay:", TAU
print "Prescribed link density / recurrence rate:", RR
# Initializations
local_step_sequence = np.empty((t_steps), dtype=int)
local_result = {}
for measure in symbols.keys():
local_result[measure] = np.empty(t_steps)
# Initialize progress bar
progress = progressbar.ProgressBar().start()
# Loop over moving windows
for j in xrange(t_steps):
# Get time series section for current window
time_series = data[j * delta:j * delta + T_embedded]
local_step_sequence[j] = j * delta + T_embedded / 2
# Prepare recurrence network from original data
rec_net = RecurrenceNetwork(time_series.flatten(), dim=DIM, tau=TAU,
metric=METRIC, normalize=False,
silence_level=2, recurrence_rate=RR)
# Calculations for original recurrence network
local_result["Average path length"][j] = rec_net.average_path_length()
local_result["Transitivity"][j] = rec_net.transitivity()
#local_result["Assortativity"][j] = rec_net.assortativity()
#local_result["Diameter"][j] = rec_net.diameter()
# Calculate RQA measures
#local_result["Determinism"][j] = rec_net.determinism()
#local_result["Laminarity"][j] = rec_net.laminarity()
#local_result["Mean diagonal line length"][j] = rec_net.average_diaglength()
#local_result["Trapping time"][j] = rec_net.trapping_time()
#local_result["Diagonal line entropy"][j] = rec_net.diag_entropy()
#local_result["Autocorrelation"][j] = autocorrelation(time_series, lag=1)
#local_result["Mean"][j] = time_series.mean()
#local_result["Standard deviation"][j] = time_series.std()
# Update progress bar every step
progress.update(int(100 * j / float(t_steps)))
# Terminate progress bar
progress.finish()
# Store window mid-point
step_sequence.append(local_step_sequence)
# Store results
for measure in symbols.keys():
results[measure].append(local_result[measure])
#
# Calculate significance levels for network measures
#
print "Calculating significance levels based on", N_ENSEMBLE, "surrogates..."
# Initialize progress bar
progress = progressbar.ProgressBar().start()
# Create a copy of data for generating surrogates from
surrogate_data = data.copy()
if SHUFFLE_EMBEDDED:
# Get embedding of full time series
surrogate_embedding = rec_net.embed_time_series(surrogate_data,
DIM, TAU)
# Prepare stuff
local_surrogate_result = {}
for measure in symbols.keys():
local_surrogate_result[measure] = np.empty(N_ENSEMBLE)
for j in xrange(N_ENSEMBLE):
if SHUFFLE_EMBEDDED:
# Shuffle embedded time series along time axis, that is, whole
# embedded state vectors are shuffled around.
permuted_indices = np.random.permutation(surrogate_embedding.shape[0])
# Use the first T state vectors from the shuffled and embedded
# time series as a surrogate for one window
surrogate_series = surrogate_embedding[permuted_indices[:T],:]
# Prepare recurrence network from surrogate data for shuffled
# embedded time series
rec_net = RecurrenceNetwork(surrogate_series.copy(),
metric=METRIC, normalize=False,
silence_level=2, recurrence_rate=RR)
else:
# Shuffle dust time series
permuted_indices = np.random.permutation(surrogate_data.shape[0])
# Use the first T_embedded states from the shuffled dust time series as a surrogate for one window
surrogate_series = surrogate_data[permuted_indices[:T_embedded]]
# Prepare recurrence network from surrogate data for shuffled time series
rec_net = RecurrenceNetwork(surrogate_series.copy(), dim=DIM,
tau=TAU, metric=METRIC,
normalize=False, silence_level=2,
recurrence_rate=RR)
# Calculate measures for surrogate network
local_surrogate_result["Average path length"][j] = rec_net.average_path_length()
local_surrogate_result["Transitivity"][j] = rec_net.transitivity()
#local_surrogate_result["Assortativity"][j] = rec_net.assortativity()
#local_surrogate_result["Diameter"][j] = rec_net.diameter()
# Calculate RQA measures
#local_surrogate_result["Determinism"][j] = rec_net.determinism()
#local_surrogate_result["Laminarity"][j] = rec_net.laminarity()
#local_surrogate_result["Mean diagonal line length"][j] = rec_net.average_diaglength()
#local_surrogate_result["Trapping time"][j] = rec_net.trapping_time()
#local_surrogate_result["Diagonal line entropy"][j] = rec_net.diag_entropy()
#local_surrogate_result["Autocorrelation"][j] = autocorrelation(data, lag=1)
#local_surrogate_result["Mean"][j] = data.mean()
#local_surrogate_result["Standard deviation"][j] = data.std()
# Update progress bar every step
progress.update(int(100 * j / float(N_ENSEMBLE)))
# Store results
for measure in symbols.keys():
surrogate_results[measure].append(local_surrogate_result[measure])
# Terminate progress bar
progress.finish()
#
# Save results
#
print "Saving results..."
# Initialize storage dictionary
storage = {}
# Store parameters
storage["FILENAMES"] = FILENAMES
storage["NAMES"] = NAMES
storage["T_TIME"] = T_TIME
storage["DELTA_TIME"] = DELTA_TIME
storage["DETRENDING_WINDOW_SIZE"] = DETRENDING_WINDOW_SIZE
storage["DIM"] = DIM
storage["TAU"] = TAU
storage["ADAPT_DELAY"] = ADAPT_DELAY
storage["DELAY_TIMES"] = DELAY_TIMES
storage["METRIC"] = METRIC
storage["RR"] = RR
storage["N_ENSEMBLE"] = N_ENSEMBLE
storage["SHUFFLE_EMBEDDED"] = SHUFFLE_EMBEDDED
# Store symbols
storage["symbols"] = symbols
# Store raw input data
storage["time_list"] = time_list
storage["data_list"] = data_list
# Store axes
storage["step_sequence"] = step_sequence
# Store results
storage["results"] = results
storage["surrogate_results"] = surrogate_results
# Save to file
filename = "results_speleo_comparison_W_" + str(T_TIME[0]) + "y_M_" + str(N_ENSEMBLE) + "_DETREND_" + str(DETREND) + ".pickle"
file = open(filename, 'w')
cPickle.dump(storage, file)
file.close()
|
pik-copan/pyregimeshifts
|
scripts/raw_proxy_records_analysis/recurrence_analysis_speleo_raw.py
|
Python
|
mit
| 13,113
|
"""
http://coreygoldberg.blogspot.com/2013/01/python-matrix-in-your-terminal.html
Create "The Matrix" of binary numbers scrolling vertically in your terminal.
original code adapted from juancarlospaco:
- http://ubuntuforums.org/showpost.php?p=10306676
Inspired by the movie: The Matrix
- Corey Goldberg (2013)
Requires:
- Linux
- Python 2.7 or 3+
"""
import fcntl
import time
import random
import struct
import sys
import termios
class message(str):
def __new__(cls, text, speed):
self = super(message, cls).__new__(cls, text)
self.speed = speed
self.y = -1 * len(text)
self.x = random.randint(0, display().width)
self.skip = 0
return self
def move(self):
if self.speed > self.skip:
self.skip += 1
else:
self.skip = 0
self.y += 1
class display(list):
def __init__(self):
self.height, self.width = struct.unpack('hh', fcntl.ioctl(1, termios.TIOCGWINSZ, '1234'))
self[:] = [' ' for y in range(self.height) for x in range(self.width)]
def set_vertical(self, x, y, string):
string = string[::-1]
if x < 0:
x = 80 + x
if x >= self.width:
x = self.width - 1
if y < 0:
string = string[abs(y):]
y = 0
if y + len(string) > self.height:
string = string[0:self.height - y]
if y >= self.height:
return
start = y * self.width + x
length = self.width * (y + len(string))
step = self.width
self[start:length:step] = string
def __str__(self):
return ''.join(self)
def matrix(iterations, sleep_time=.08):
messages = []
d = display()
for _ in range(iterations):
messages.append(message('10' * 16, random.randint(1, 5)))
for text in messages:
d.set_vertical(text.x, text.y, text)
text.move()
sys.stdout.write('\033[1m\033[32m%s\033[0m\r' % d)
sys.stdout.flush()
time.sleep(sleep_time)
if __name__ == '__main__':
while True:
try:
matrix(150)
except KeyboardInterrupt:
sys.stdout.write('\n\033[1m\033[32m=== Matrix Stopped ====\033[0m\n')
sys.exit()
|
Pyroseza/Random
|
matrix_cool.py
|
Python
|
mit
| 2,355
|
from __future__ import division
import math
import numpy as np
from time import time
import sympy as sp
import mpmath as mp
from mpmath.ctx_mp_python import mpf
from scipy.misc import factorial
from scipy.special import gamma
precision = 53
mp.prec = precision
mp.pretty = True
def calculate_factorial_ratio(n, i):
# This function calculates (n + i - 1)! / (n - i)!
mp.dps = 50
k = (n - i)
result = 1
for j in range(k + 2*i - 1, k, -1):
result = mp.fmul(result, j)
return result
def n_choose_k(n, k):
j = n - k
numerator = 1
for i in range(1, k + 1):
numerator *= (j + i)
denominator = factorial(k)
return numerator / denominator
def dirichlet_eta(s, N):
def calculate_d_n(n):
total = 0.0
for k in range(n + 1):
if k % 2 == 0:
alternating_factor = 1
else:
alternating_factor = -1
total += alternating_factor * n_choose_k(n, k) / ( k + 1)**s
return total
eta = 0.0
for n in range(N + 1):
d_n = calculate_d_n(n)
eta += d_n / (2**(n + 1))
return eta
def alternating_series(s, N):
eta = dirichlet_eta(s, N)
denominator = 1 - 2**(1 - s)
zeta = eta / denominator
return zeta
def riemann_siegel_theta(t):
first_term = np.angle( gamma( (2.j*t + 1) / 4) )
second_term = t * np.log(np.pi) / 2
return first_term - second_term
def zeta_function(s, N):
z = alternating_series(s, N)
return z
def z_function(t, N=100000):
zeta = zeta_function(1/2 + (1.j)*t, N)
return mp.re( np.exp( 1.j * riemann_siegel_theta(t) ) * zeta )
def calculate_z(t): # Convenient wrapper to use for roots.py
return z_function(t, N=25)
if __name__ == '__main__':
# print zeta_function(s=1/2 + 25.j, N=1000)
# print z_function(t=18, N=100)
start = time()
eta = dirichlet_eta(1, N=25)
print eta
print abs(eta - np.log(2))
end = time()
print "Calculated using alternating series in {:.4f} seconds.".format(float(end - start))
|
tripatheea/Riemann-Zeta
|
python/dirichlet.py
|
Python
|
mit
| 1,897
|
"""Contains the :code:`Address` class, representing a collection of reverse
geocoding results. Primarily, this functions as a container for a set of
:code:`errorgeopy.Location` objects after a successful reverse geocode, and
exposes methods that operate on this set of results, including:
- de-duplication
- extracting the results that best match a pre-expected outcome
- finding the longest common substring of candidate addresses
.. moduleauthor Richard Law <richard.m.law@gmail.com>
"""
# import usaddress
from fuzzywuzzy import process as fuzzyprocess
from errorgeopy.utils import (long_substr, check_location_type,
check_addresses_exist)
from functools import wraps
class Address(object):
"""Represents a collection of parsed reverse geocoder responses (parsed with
geopy). Each member of the :code:`address` property (which is iterable) is a
:code:`geopy.address` object. The raw respones can therefore be obtained
with:
>>> [a.raw for a in Address.addresses]
:code:`errorgeopy` adds methods that operate on the collection of addresses
that consider the set of addresses as a related set.
Attributes:
:code:`addresses` (:code:`list`): Collection of reverse geocoding
responses from as many services that were capable of returning a
response to a query. Each member of the array is a
:code:`geopy.location.Location` object.
"""
@check_location_type
def __init__(self, addresses):
self._addresses = addresses or None
def __unicode__(self):
return '\n'.join([str(a) for a in self.addresses])
def __str__(self):
return self.__unicode__()
@property
def addresses(self):
"""A list of reverse geocoding results from all configured providers.
The single central property of the Address object.
Notes:
Depending on configuration, a provider may return more than one
result for a given query. All results from all providers are
available in this property, in a *flat* (not nested) structure.
The list may be empty if no provider could match an address.
"""
return self._addresses if self._addresses else []
@check_addresses_exist
def dedupe(self, threshold=95):
"""dedupe(threshold=95)
Produces a fuzzily de-duplicated version of the candidate addresses,
using :code:`fuzzywuzzy.proccess.dedupe`.
Note:
See https://github.com/seatgeek/fuzzywuzzy/blob/master/fuzzywuzzy/process.py
for detail on the deduplication algorithm implementation. This
method does not modify the :code:`Address.addresses`. property.
Kwargs:
threshold (int): the numerical value (0,100) point at which you
expect to find duplicates. Defaults to 95 out of 100, which is
higher than the fuzzywuzzy default (70); this higher threshold is
used by defauly since addresses are more sensitive to small changes
(e.g. "250 Main Street" and "150 Main Street" have a small edit
distance when considered as strings, but may have a reasonably large
physical distance when considered as physical addresses).
Returns:
A list of :code:`geopy.location.Location` objects (essentially a
filtered list of the original set).
"""
return fuzzyprocess.dedupe([str(a) for a in self.addresses], threshold)
@check_addresses_exist
def longest_common_substring(self, dedupe=False):
"""longest_common_substring(dedupe=False)
Returns the longest common substring of the reverse geocoded
addresses. Note that if there is no common substring, a string of length
zero is returned. If the longest common substring is whitespace, that is
stripped, and a string of length zero is returned.
Kwargs:
dedupe (bool): whether to first perform a deduplication operation on
the set of addresses. Defaults to False.
Returns:
str
"""
addresses = self.addresses if not dedupe else self.dedupe()
return long_substr([str(a) for a in addresses])
@check_addresses_exist
def longest_common_sequence(self, separator=' '):
"""longest_common_sequence(separator='')
Returns the longest common sequence of the reverse geocoded
addresses... or it would, if I had written this code.
Raises:
NotImplementedError
"""
# return utils.longest_common_sequence([str(a) for a in self.addresses],
# separator)
raise NotImplementedError
@check_addresses_exist
def regex(self):
"""regex()
Returns a regular expression that matches all of the reverse geocoded
addresses... well it would if I had written this code.
Raises:
NotImplementedError
"""
raise NotImplementedError
@check_addresses_exist
def extract(self, expectation, limit=4):
"""extract(extraction, limit=4)
Returns the address or addresses within the set of the reverse
geocoded addresses that best match an expected result. Uses fuzzywuzzy
under the hood for matching.
Args:
expectation (str): The string indicating your expected result for a
reverse geocoding operation. It should probably look like an
address. Results are returned in the order that best meets this
expected address.
Kwargs:
limit (int): The maximum number of match candidates to retrieve
from fuzzywuzzy. The length of the returned array may be longer, if
the set of addresses has identical addresses that are good matches
for the expected address (i.e. if two geocoders resolve to the same
string address).
Returns:
list. Return value is a list of tuples, where each tuple contains a
geopy Location, and a matching score based on an extension of the
Levenshtien distance between the expectation and the Location's
address (a higher score is a better match). The algorithm is
implemented by SeatGeek's fuzzywuzzy, and you can read more here:
http://chairnerd.seatgeek.com/fuzzywuzzy-fuzzy-string-matching-in-python/
"""
extractions = fuzzyprocess.extractBests(
expectation, [str(a) for a in self.addresses],
limit=limit)
result = []
for extraction in extractions:
result.extend([(x, extraction[1]) for x in self.addresses
if str(x) == extraction[0]])
return result
@check_addresses_exist
def parse(self):
"""parse()
Raises:
NotImplementedError
"""
# return [usaddress.parse(str(a)) for a in self.addresses]
raise NotImplementedError
@check_addresses_exist
def tag(self, summarise=True):
"""tag(summarise=True)
Raises:
NotImplementedError
"""
# tagged_addresses = [usaddress.tag(str(a)) for a in self.addresses]
# if not summarise:
# return tags
# summarised_tags = OrderedDict()
# for address in tagged_addresses[0]:
# for k, v in address.items():
# if k not in summarised_tags:
# summarised_tags[k] = set([v])
# else:
# summarised_tags[k] = summarised_tags[k].add(v)
# return summarised_tags, set([a[1] for a in tagged_addresses])
raise NotImplementedError
|
alpha-beta-soup/errorgeopy
|
errorgeopy/address.py
|
Python
|
mit
| 7,782
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import os
from core.base_processor import xBaseProcessor
from utilities.export_helper import xExportHelper
from utilities.file_utility import xFileUtility
from definitions.constant_data import xConstantData
class xProcessorPhp(xBaseProcessor) :
def __init__(self, p_strSuffix, p_strConfig) :
return super(xProcessorPhp, self).__init__('PHP', p_strSuffix, p_strConfig)
def ProcessExport(self, p_strWorkbookName, p_cWorkbook, p_cWorkSheet, p_mapExportConfigs, p_mapDatabaseConfigs, p_mapIndexSheetConfigs, p_mapDataSheetConfigs, p_mapPreloadDataMaps, p_nCategoryLevel) :
print('>>>>> 正在处理 工作表 [{0}] => [{1}]'.format(p_mapIndexSheetConfigs['DATA_SHEET'], self.Type.lower()))
strExportDirectory = self.GetExportDirectory(p_mapExportConfigs)
self.PrepareExportDirectory(strExportDirectory)
lstCategoryLevelColumnIndexIndexs = self.GetCategoryLevelColumnIndexList(p_nCategoryLevel, self.Config, p_mapExportConfigs, p_mapDataSheetConfigs)
mapGenerateControl = { }
mapGenerateControl['level_index'] = 0
mapGenerateControl['ident'] = '\t'
strContent = ''
strContent += '<?php\n'
strContent += '\n'
strContent += '// ////////////////////////////////////////////////////////////////////////////////////////////\n'
strContent += '// \n'
strContent += '// {0}\n'.format(self.GetCopyrightString(p_mapExportConfigs['COPYRIGHT']['ORGANIZATION'], p_mapExportConfigs['COPYRIGHT']['SINCE_YEAR']))
strContent += '// \n'
strContent += '// Create By : {0}\n'.format(self.GetAuthorString())
strContent += '// \n'
strContent += '// Description : {0}\n'.format(p_cWorkSheet.title)
strContent += '// \n'
strContent += '// ////////////////////////////////////////////////////////////////////////////////////////////\n'
strContent += '\n'
strContent += 'return array('
strContent += self.__ConvertPHPContent(p_mapExportConfigs, p_mapDataSheetConfigs, p_mapPreloadDataMaps, lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, mapGenerateControl)
strContent += '\n'
strContent += ');\n'
strContent += '\n'
strContent += '// end\n'
strFileName = '{0}.{1}'.format(p_mapIndexSheetConfigs['DATA_FILE_NAME'], self.Suffix.lower())
strFilePath = os.path.join(strExportDirectory, strFileName)
xFileUtility.DeleteFile(strFilePath)
bSuccess = xFileUtility.WriteDataToFile(strFilePath, 'w', strContent)
if bSuccess :
print('>>>>> 工作表 [{0}] => [{1}] 处理成功!'.format(p_mapIndexSheetConfigs['DATA_SHEET'], self.Type.lower()))
else :
print('>>>>> 工作表 [{0}] => [{1}] 处理失败!'.format(p_mapIndexSheetConfigs['DATA_SHEET'], self.Type.lower()))
return bSuccess
def __ConvertPHPContent(self, p_mapExportConfigs, p_mapDataSheetConfigs, p_mixPreloadDatas, p_lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, p_mapGenerateControl) :
if type(p_mixPreloadDatas) == dict and p_mixPreloadDatas.has_key('datas') :
return self.__ConvertPHPContent(p_mapExportConfigs, p_mapDataSheetConfigs, p_mixPreloadDatas['datas'], p_lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, p_mapGenerateControl)
if type(p_mixPreloadDatas) == dict :
strContent = ''
p_mapGenerateControl['level_index'] += 1
for mixKey in p_mixPreloadDatas :
if mixKey is None :
continue
strContent += '\n{0}'.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'], p_mapGenerateControl['ident']))
strKey = '{0}'.format(mixKey)
strKey = strKey.replace('\'', '\\\\\'')
if xConstantData.MYSQL_DATA_DEFINITIONS[p_mapDataSheetConfigs[p_lstCategoryLevelColumnIndexIndexs[p_mapGenerateControl['level_index'] - 1]][xConstantData.DATA_SHEET_ROW_DATA_TYPE].upper()]['IS_STRING'] :
strContent += '\'{0}\' => array('.format(strKey)
else :
strContent += '{0} => array('.format(strKey)
strContent += self.__ConvertPHPContent(p_mapExportConfigs, p_mapDataSheetConfigs, p_mixPreloadDatas[mixKey], p_lstCategoryLevelColumnIndexIndexs, p_nCategoryLevel, p_mapGenerateControl)
if p_mapGenerateControl['level_index'] < len(p_lstCategoryLevelColumnIndexIndexs) :
strContent += '\n{0}'.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'], p_mapGenerateControl['ident']))
if type(p_mixPreloadDatas[mixKey]) == list and len(p_mixPreloadDatas[mixKey]) > 1 :
strContent += '\n{0}'.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'], p_mapGenerateControl['ident']))
strContent += '),'
p_mapGenerateControl['level_index'] -= 1
return strContent
if type(p_mixPreloadDatas) == list :
nPreloadDataSize = len(p_mixPreloadDatas)
strContent = ''
for mapLineDatas in p_mixPreloadDatas :
nDataColumnIndex = 0
if self.IsEmptyLine(mapLineDatas) :
nPreloadDataSize -= 1
continue
if nPreloadDataSize > 1 :
strContent += '\n{0}array('.format(self.GenerateIdentIdentifier(p_mapGenerateControl['level_index'] + 1, p_mapGenerateControl['ident']))
for nColumnIndex in p_mapDataSheetConfigs :
if not xExportHelper.IsDataSheetColumnLanguageAvailable(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_LANGUAGE_CODE], self.Config, p_mapExportConfigs) :
continue
if not xExportHelper.IsDataSheetColumnExportTypeAvailable(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_EXPORT_IDENTIFIER], self.Config, p_mapExportConfigs) :
continue
# if p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_AUTO_INCREMENT_IDENTIFIER] is not None :
# continue
strCellValue = ''
strFieldName = xExportHelper.GetFieldNameAsI18N(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_FIELD], p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_LANGUAGE_CODE], self.Config, p_mapExportConfigs)
if mapLineDatas[strFieldName] is None :
if p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DEFAULT_VALUE] is not None :
strCellValue = '{0}'.format(p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DEFAULT_VALUE])
else :
if xConstantData.MYSQL_DATA_DEFINITIONS[p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DATA_TYPE].upper()]['IS_STRING'] :
strCellValue = ''
else :
strCellValue = '0'
else :
strCellValue = '{0}'.format(mapLineDatas[strFieldName])
strCellValue = strCellValue.replace('\'', '\\\\\'')
if nDataColumnIndex > 0 :
strContent += ' '
if xConstantData.MYSQL_DATA_DEFINITIONS[p_mapDataSheetConfigs[nColumnIndex][xConstantData.DATA_SHEET_ROW_DATA_TYPE].upper()]['IS_STRING'] :
strContent += '\'{0}\' => \'{1}\','.format(strFieldName, strCellValue)
else :
strContent += '\'{0}\' => {1},'.format(strFieldName, strCellValue)
nDataColumnIndex += 1
if nPreloadDataSize > 1 :
strContent += '),'
return strContent
|
xLemon/xExcelConvertor
|
excel_convertor/processors/processor_php.py
|
Python
|
mit
| 7,054
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import datetime
from frappe import _
import frappe
import frappe.database
import frappe.utils
from frappe.utils import cint, flt, get_datetime, datetime, date_diff, today
import frappe.utils.user
from frappe import conf
from frappe.sessions import Session, clear_sessions, delete_session
from frappe.modules.patch_handler import check_session_stopped
from frappe.translate import get_lang_code
from frappe.utils.password import check_password, delete_login_failed_cache
from frappe.core.doctype.activity_log.activity_log import add_authentication_log
from frappe.twofactor import (should_run_2fa, authenticate_for_2factor,
confirm_otp_token, get_cached_user_pass)
from frappe.website.utils import get_home_page
from six.moves.urllib.parse import quote
class HTTPRequest:
def __init__(self):
# Get Environment variables
self.domain = frappe.request.host
if self.domain and self.domain.startswith('www.'):
self.domain = self.domain[4:]
if frappe.get_request_header('X-Forwarded-For'):
frappe.local.request_ip = (frappe.get_request_header('X-Forwarded-For').split(",")[0]).strip()
elif frappe.get_request_header('REMOTE_ADDR'):
frappe.local.request_ip = frappe.get_request_header('REMOTE_ADDR')
else:
frappe.local.request_ip = '127.0.0.1'
# language
self.set_lang()
# load cookies
frappe.local.cookie_manager = CookieManager()
# set db
self.connect()
# login
frappe.local.login_manager = LoginManager()
if frappe.form_dict._lang:
lang = get_lang_code(frappe.form_dict._lang)
if lang:
frappe.local.lang = lang
self.validate_csrf_token()
# write out latest cookies
frappe.local.cookie_manager.init_cookies()
# check status
check_session_stopped()
def validate_csrf_token(self):
if frappe.local.request and frappe.local.request.method in ("POST", "PUT", "DELETE"):
if not frappe.local.session: return
if not frappe.local.session.data.csrf_token \
or frappe.local.session.data.device=="mobile" \
or frappe.conf.get('ignore_csrf', None):
# not via boot
return
csrf_token = frappe.get_request_header("X-Frappe-CSRF-Token")
if not csrf_token and "csrf_token" in frappe.local.form_dict:
csrf_token = frappe.local.form_dict.csrf_token
del frappe.local.form_dict["csrf_token"]
if frappe.local.session.data.csrf_token != csrf_token:
frappe.local.flags.disable_traceback = True
frappe.throw(_("Invalid Request"), frappe.CSRFTokenError)
def set_lang(self):
from frappe.translate import guess_language
frappe.local.lang = guess_language()
def get_db_name(self):
"""get database name from conf"""
return conf.db_name
def connect(self, ac_name = None):
"""connect to db, from ac_name or db_name"""
frappe.local.db = frappe.database.get_db(user = self.get_db_name(), \
password = getattr(conf, 'db_password', ''))
class LoginManager:
def __init__(self):
self.user = None
self.info = None
self.full_name = None
self.user_type = None
if frappe.local.form_dict.get('cmd')=='login' or frappe.local.request.path=="/api/method/login":
if self.login()==False: return
self.resume = False
# run login triggers
self.run_trigger('on_session_creation')
else:
try:
self.resume = True
self.make_session(resume=True)
self.get_user_info()
self.set_user_info(resume=True)
except AttributeError:
self.user = "Guest"
self.get_user_info()
self.make_session()
self.set_user_info()
def login(self):
# clear cache
frappe.clear_cache(user = frappe.form_dict.get('usr'))
user, pwd = get_cached_user_pass()
self.authenticate(user=user, pwd=pwd)
if self.force_user_to_reset_password():
doc = frappe.get_doc("User", self.user)
frappe.local.response["redirect_to"] = doc.reset_password(send_email=False, password_expired=True)
frappe.local.response["message"] = "Password Reset"
return False
if should_run_2fa(self.user):
authenticate_for_2factor(self.user)
if not confirm_otp_token(self):
return False
self.post_login()
def post_login(self):
self.run_trigger('on_login')
validate_ip_address(self.user)
self.validate_hour()
self.get_user_info()
self.make_session()
self.setup_boot_cache()
self.set_user_info()
def get_user_info(self, resume=False):
self.info = frappe.db.get_value("User", self.user,
["user_type", "first_name", "last_name", "user_image"], as_dict=1)
self.user_type = self.info.user_type
def setup_boot_cache(self):
frappe.cache_manager.build_table_count_cache()
frappe.cache_manager.build_domain_restriced_doctype_cache()
frappe.cache_manager.build_domain_restriced_page_cache()
def set_user_info(self, resume=False):
# set sid again
frappe.local.cookie_manager.init_cookies()
self.full_name = " ".join(filter(None, [self.info.first_name,
self.info.last_name]))
if self.info.user_type=="Website User":
frappe.local.cookie_manager.set_cookie("system_user", "no")
if not resume:
frappe.local.response["message"] = "No App"
frappe.local.response["home_page"] = '/' + get_home_page()
else:
frappe.local.cookie_manager.set_cookie("system_user", "yes")
if not resume:
frappe.local.response['message'] = 'Logged In'
frappe.local.response["home_page"] = "/desk"
if not resume:
frappe.response["full_name"] = self.full_name
# redirect information
redirect_to = frappe.cache().hget('redirect_after_login', self.user)
if redirect_to:
frappe.local.response["redirect_to"] = redirect_to
frappe.cache().hdel('redirect_after_login', self.user)
frappe.local.cookie_manager.set_cookie("full_name", self.full_name)
frappe.local.cookie_manager.set_cookie("user_id", self.user)
frappe.local.cookie_manager.set_cookie("user_image", self.info.user_image or "")
def make_session(self, resume=False):
# start session
frappe.local.session_obj = Session(user=self.user, resume=resume,
full_name=self.full_name, user_type=self.user_type)
# reset user if changed to Guest
self.user = frappe.local.session_obj.user
frappe.local.session = frappe.local.session_obj.data
self.clear_active_sessions()
def clear_active_sessions(self):
"""Clear other sessions of the current user if `deny_multiple_sessions` is not set"""
if not (cint(frappe.conf.get("deny_multiple_sessions")) or cint(frappe.db.get_system_setting('deny_multiple_sessions'))):
return
if frappe.session.user != "Guest":
clear_sessions(frappe.session.user, keep_current=True)
def authenticate(self, user=None, pwd=None):
if not (user and pwd):
user, pwd = frappe.form_dict.get('usr'), frappe.form_dict.get('pwd')
if not (user and pwd):
self.fail(_('Incomplete login details'), user=user)
if cint(frappe.db.get_value("System Settings", "System Settings", "allow_login_using_mobile_number")):
user = frappe.db.get_value("User", filters={"mobile_no": user}, fieldname="name") or user
if cint(frappe.db.get_value("System Settings", "System Settings", "allow_login_using_user_name")):
user = frappe.db.get_value("User", filters={"username": user}, fieldname="name") or user
self.check_if_enabled(user)
if not frappe.form_dict.get('tmp_id'):
self.user = self.check_password(user, pwd)
else:
self.user = user
def force_user_to_reset_password(self):
if not self.user:
return
from frappe.core.doctype.user.user import STANDARD_USERS
if self.user in STANDARD_USERS:
return False
reset_pwd_after_days = cint(frappe.db.get_single_value("System Settings",
"force_user_to_reset_password"))
if reset_pwd_after_days:
last_password_reset_date = frappe.db.get_value("User",
self.user, "last_password_reset_date") or today()
last_pwd_reset_days = date_diff(today(), last_password_reset_date)
if last_pwd_reset_days > reset_pwd_after_days:
return True
def check_if_enabled(self, user):
"""raise exception if user not enabled"""
doc = frappe.get_doc("System Settings")
if cint(doc.allow_consecutive_login_attempts) > 0:
check_consecutive_login_attempts(user, doc)
if user=='Administrator': return
if not cint(frappe.db.get_value('User', user, 'enabled')):
self.fail('User disabled or missing', user=user)
def check_password(self, user, pwd):
"""check password"""
try:
# returns user in correct case
return check_password(user, pwd)
except frappe.AuthenticationError:
self.update_invalid_login(user)
self.fail('Incorrect password', user=user)
def fail(self, message, user=None):
if not user:
user = _('Unknown User')
frappe.local.response['message'] = message
add_authentication_log(message, user, status="Failed")
frappe.db.commit()
raise frappe.AuthenticationError
def update_invalid_login(self, user):
last_login_tried = get_last_tried_login_data(user)
failed_count = 0
if last_login_tried > get_datetime():
failed_count = get_login_failed_count(user)
frappe.cache().hset('login_failed_count', user, failed_count + 1)
def run_trigger(self, event='on_login'):
for method in frappe.get_hooks().get(event, []):
frappe.call(frappe.get_attr(method), login_manager=self)
def validate_hour(self):
"""check if user is logging in during restricted hours"""
login_before = int(frappe.db.get_value('User', self.user, 'login_before', ignore=True) or 0)
login_after = int(frappe.db.get_value('User', self.user, 'login_after', ignore=True) or 0)
if not (login_before or login_after):
return
from frappe.utils import now_datetime
current_hour = int(now_datetime().strftime('%H'))
if login_before and current_hour > login_before:
frappe.throw(_("Login not allowed at this time"), frappe.AuthenticationError)
if login_after and current_hour < login_after:
frappe.throw(_("Login not allowed at this time"), frappe.AuthenticationError)
def login_as_guest(self):
"""login as guest"""
self.login_as("Guest")
def login_as(self, user):
self.user = user
self.post_login()
def logout(self, arg='', user=None):
if not user: user = frappe.session.user
self.run_trigger('on_logout')
if user == frappe.session.user:
delete_session(frappe.session.sid, user=user, reason="User Manually Logged Out")
self.clear_cookies()
else:
clear_sessions(user)
def clear_cookies(self):
clear_cookies()
class CookieManager:
def __init__(self):
self.cookies = {}
self.to_delete = []
def init_cookies(self):
if not frappe.local.session.get('sid'): return
# sid expires in 3 days
expires = datetime.datetime.now() + datetime.timedelta(days=3)
if frappe.session.sid:
self.set_cookie("sid", frappe.session.sid, expires=expires, httponly=True)
if frappe.session.session_country:
self.set_cookie("country", frappe.session.session_country)
def set_cookie(self, key, value, expires=None, secure=False, httponly=False, samesite="Lax"):
if not secure and hasattr(frappe.local, 'request'):
secure = frappe.local.request.scheme == "https"
# Cordova does not work with Lax
if frappe.local.session.data.device == "mobile":
samesite = None
self.cookies[key] = {
"value": value,
"expires": expires,
"secure": secure,
"httponly": httponly,
"samesite": samesite
}
def delete_cookie(self, to_delete):
if not isinstance(to_delete, (list, tuple)):
to_delete = [to_delete]
self.to_delete.extend(to_delete)
def flush_cookies(self, response):
for key, opts in self.cookies.items():
response.set_cookie(key, quote((opts.get("value") or "").encode('utf-8')),
expires=opts.get("expires"),
secure=opts.get("secure"),
httponly=opts.get("httponly"),
samesite=opts.get("samesite"))
# expires yesterday!
expires = datetime.datetime.now() + datetime.timedelta(days=-1)
for key in set(self.to_delete):
response.set_cookie(key, "", expires=expires)
@frappe.whitelist()
def get_logged_user():
return frappe.session.user
def clear_cookies():
if hasattr(frappe.local, "session"):
frappe.session.sid = ""
frappe.local.cookie_manager.delete_cookie(["full_name", "user_id", "sid", "user_image", "system_user"])
def get_last_tried_login_data(user, get_last_login=False):
locked_account_time = frappe.cache().hget('locked_account_time', user)
if get_last_login and locked_account_time:
return locked_account_time
last_login_tried = frappe.cache().hget('last_login_tried', user)
if not last_login_tried or last_login_tried < get_datetime():
last_login_tried = get_datetime() + datetime.timedelta(seconds=60)
frappe.cache().hset('last_login_tried', user, last_login_tried)
return last_login_tried
def get_login_failed_count(user):
return cint(frappe.cache().hget('login_failed_count', user)) or 0
def check_consecutive_login_attempts(user, doc):
login_failed_count = get_login_failed_count(user)
last_login_tried = (get_last_tried_login_data(user, True)
+ datetime.timedelta(seconds=doc.allow_login_after_fail))
if login_failed_count >= cint(doc.allow_consecutive_login_attempts):
locked_account_time = frappe.cache().hget('locked_account_time', user)
if not locked_account_time:
frappe.cache().hset('locked_account_time', user, get_datetime())
if last_login_tried > get_datetime():
frappe.throw(_("Your account has been locked and will resume after {0} seconds")
.format(doc.allow_login_after_fail), frappe.SecurityException)
else:
delete_login_failed_cache(user)
def validate_ip_address(user):
"""check if IP Address is valid"""
user = frappe.get_cached_doc("User", user) if not frappe.flags.in_test else frappe.get_doc("User", user)
ip_list = user.get_restricted_ip_list()
if not ip_list:
return
system_settings = frappe.get_cached_doc("System Settings") if not frappe.flags.in_test else frappe.get_single("System Settings")
# check if bypass restrict ip is enabled for all users
bypass_restrict_ip_check = system_settings.bypass_restrict_ip_check_if_2fa_enabled
# check if two factor auth is enabled
if system_settings.enable_two_factor_auth and not bypass_restrict_ip_check:
# check if bypass restrict ip is enabled for login user
bypass_restrict_ip_check = user.bypass_restrict_ip_check_if_2fa_enabled
for ip in ip_list:
if frappe.local.request_ip.startswith(ip) or bypass_restrict_ip_check:
return
frappe.throw(_("Access not allowed from this IP Address"), frappe.AuthenticationError)
|
adityahase/frappe
|
frappe/auth.py
|
Python
|
mit
| 14,424
|
from .ui import *
from .windows_views import *
|
Saldenisov/QY_itegrating_sphere
|
views/__init__.py
|
Python
|
mit
| 46
|
from collections import OrderedDict
import logging
import json
import re
import itertools
import sublime
import sublime_plugin
from ..lib import inhibit_word_completions
from .commandinfo import (
get_command_name,
get_builtin_command_meta_data,
get_builtin_commands,
iter_python_command_classes,
get_args_from_command_name
)
__all__ = (
"SublimeTextCommandCompletionPythonListener",
"SublimeTextCommandArgsCompletionListener",
"SublimeTextCommandArgsCompletionPythonListener",
"SublimeTextCommandCompletionListener",
)
KIND_APPLICATION = (sublime.KIND_ID_FUNCTION, "A", "Application Command")
KIND_WINDOW = (sublime.KIND_ID_FUNCTION, "W", "Window Command")
KIND_TEXT = (sublime.KIND_ID_FUNCTION, "T", "Text Command")
KIND_MAP = {
'application': KIND_APPLICATION,
'window': KIND_WINDOW,
'text': KIND_TEXT,
}
KIND_COMMAND = (sublime.KIND_ID_FUNCTION, "C", "Command") # fallback
KIND_SNIPPET = sublime.KIND_SNIPPET
logger = logging.getLogger(__name__)
def _escape_in_snippet(v):
return v.replace("}", "\\}").replace("$", "\\$")
def is_plugin(view):
"""Use some heuristics to determine whether a Python view shows a plugin.
Or the console input widget, should it be using the Python syntax.
"""
return (view.find("import sublime", 0, sublime.LITERAL) is not None
or sublime.packages_path() in (view.file_name() or "")
or view.settings().get('is_widget'))
def create_args_snippet_from_command_args(command_args, quote_char='"', for_json=True):
"""Create an argument snippet to insert from the arguments to run a command.
Parameters:
command_args (dict)
The arguments with their default value.
quote_char (str)
Which char should be used for string quoting.
for_json (bool)
Whether it should be done for a json or a python file.
Returns (str)
The formatted entry to insert into the sublime text package
file.
"""
counter = itertools.count(1)
def make_snippet_item(k, v):
if v is not None:
if isinstance(v, str):
v = '{q}${{{i}:{v}}}{q}'.format(i=next(counter),
v=_escape_in_snippet(v),
q=quote_char)
else:
if for_json:
dumps = json.dumps(v)
else: # python
dumps = repr(v)
v = '${{{i}:{v}}}'.format(i=next(counter), v=_escape_in_snippet(dumps))
else:
v = '${i}'.format(i=next(counter))
return '{q}{k}{q}: {v}'.format(k=k, v=v, q=quote_char)
keys = iter(command_args)
if not isinstance(command_args, OrderedDict):
keys = sorted(keys)
snippet_items = (make_snippet_item(k, command_args[k]) for k in keys)
if for_json:
args_content = ",\n\t".join(snippet_items)
args_snippet = '"args": {{\n\t{0}\n}},$0'.format(args_content)
else:
args_content = ", ".join(snippet_items)
args_snippet = '{{{0}}}'.format(args_content)
return args_snippet
def _builtin_completions(names):
_, data = get_builtin_command_meta_data()
for name in names:
yield sublime.CompletionItem(
trigger=name,
annotation="built-in",
completion=name,
kind=KIND_MAP.get(data[name].get("command_type"), KIND_COMMAND),
details=data[name].get('doc_string') or "",
# TODO link to show full description
)
def _plugin_completions(cmd_classes):
for cmd_class in cmd_classes:
name = get_command_name(cmd_class)
module = cmd_class.__module__
package_name = module.split(".")[0]
if issubclass(cmd_class, sublime_plugin.TextCommand):
kind = KIND_TEXT
elif issubclass(cmd_class, sublime_plugin.WindowCommand):
kind = KIND_WINDOW
elif issubclass(cmd_class, sublime_plugin.ApplicationCommand):
kind = KIND_APPLICATION
else:
kind = KIND_COMMAND
yield sublime.CompletionItem(
trigger=name,
annotation=package_name,
completion=name,
kind=kind,
details=(cmd_class.__doc__ or "").strip(),
# TODO link to show full description
)
def _create_completions(command_type=""):
completions = []
completions.extend(_builtin_completions(get_builtin_commands(command_type)))
completions.extend(_plugin_completions(iter_python_command_classes(command_type)))
logger.debug("Collected %d command completions", len(completions))
return completions
class SublimeTextCommandCompletionListener(sublime_plugin.EventListener):
@inhibit_word_completions
def on_query_completions(self, view, prefix, locations):
keymap_scope = "source.json.sublime meta.command-name"
loc = locations[0]
if not view.score_selector(loc, keymap_scope):
return
return _create_completions()
class SublimeTextCommandCompletionPythonListener(sublime_plugin.EventListener):
_RE_LINE_BEFORE = re.compile(
r"(?P<callervar>\w+)\s*\.\s*run_command\s*\("
r"\s*['\"]\w*$",
re.MULTILINE
)
@inhibit_word_completions
def on_query_completions(self, view, prefix, locations):
loc = locations[0]
python_arg_scope = ("source.python meta.function-call.arguments.python string.quoted")
if not view.score_selector(loc, python_arg_scope) or not is_plugin(view):
return None
before_region = sublime.Region(view.line(loc).a, loc)
lines = view.line(sublime.Region(view.line(locations[0]).a - 1, loc))
before_region = sublime.Region(lines.a, loc)
before = view.substr(before_region)
m = self._RE_LINE_BEFORE.search(before)
if not m:
return None
# get the command type
caller_var = m.group('callervar')
logger.debug("caller_var: %s", caller_var)
if "view" in caller_var or caller_var == "v":
command_type = 'text'
elif caller_var == "sublime":
command_type = 'app'
else:
# window.run_command allows all command types
command_type = ''
return _create_completions(command_type)
class SublimeTextCommandArgsCompletionListener(sublime_plugin.EventListener):
_default_args = [("args\targuments", '"args": {\n\t"$1": "$2"$0\n},')]
_st_insert_arg_scope = (
"("
" ("
+ ", ".join("source.json.sublime.{}".format(suffix)
for suffix in ("commands", "keymap", "macro", "menu", "mousemap"))
+ ")"
" & "
" meta.sequence meta.mapping"
" - meta.sequence meta.mapping meta.mapping"
")"
"- string "
"- comment "
"- ("
" meta.value.json "
" | meta.mapping.json meta.mapping.json "
" | meta.sequence.json meta.sequence.json "
" - meta.menu.collection.sublime-menu"
")"
)
_RE_COMMAND_SEARCH = re.compile(r'\"command\"\s*\:\s*\"(\w+)\"')
def on_query_completions(self, view, prefix, locations):
if not view.score_selector(locations[0], self._st_insert_arg_scope):
return
# extract the line and the line above to search for the command
lines_reg = view.line(sublime.Region(view.line(locations[0]).a - 1, locations[0]))
lines = view.substr(lines_reg)
results = self._RE_COMMAND_SEARCH.findall(lines)
if not results:
return self._default_args
command_name = results[-1]
logger.debug("building args completions for command %r", command_name)
command_args = get_args_from_command_name(command_name)
if not command_args:
return self._default_args
completion = create_args_snippet_from_command_args(command_args, for_json=True)
return [sublime.CompletionItem(
trigger="args",
annotation="auto-detected",
completion=completion,
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=KIND_SNIPPET,
)]
class SublimeTextCommandArgsCompletionPythonListener(sublime_plugin.EventListener):
_default_args_dict = {
c: sublime.CompletionItem(
trigger="args",
completion="{{{q}$1{q}: $0}}".format(q=c),
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=KIND_SNIPPET,
)
for c in "'\""
}
_RE_LINE_BEFORE = re.compile(
r"\w+\s*\.\s*run_command\s*\("
r"\s*(['\"])(\w+)\1,\s*\w*$"
)
def on_query_completions(self, view, prefix, locations):
loc = locations[0]
python_arg_scope = "source.python meta.function-call.arguments.python,"
if not view.score_selector(loc, python_arg_scope) or not is_plugin(view):
return
before_region = sublime.Region(view.line(loc).a, loc)
before = view.substr(before_region)
m = self._RE_LINE_BEFORE.search(before)
if not m:
return
quote_char, command_name = m.groups()
logger.debug("building args completions for command %r", command_name)
command_args = get_args_from_command_name(command_name)
if command_args is None:
return self._default_args_dict[quote_char]
completion = create_args_snippet_from_command_args(command_args, quote_char,
for_json=False)
return [sublime.CompletionItem(
trigger="args",
annotation="auto-detected",
completion=completion,
completion_format=sublime.COMPLETION_FORMAT_SNIPPET,
kind=KIND_SNIPPET,
)]
|
SublimeText/PackageDev
|
plugins/command_completions/__init__.py
|
Python
|
mit
| 9,912
|
import re
from bs4 import BeautifulSoup
from nose.tools import assert_equal, assert_in, assert_true
import pages
import requests
APP_URL = 'https://www.github.com'
ADMIN_CREDENTIALS = {'username': 'admin@example.com', 'password': 'pk$321'}
ROOT_CREDENTIALS = {'username': 'root', 'password': '123456'}
API_URLS_MAP = {
'login': '/api/login',
'logout': '/api/logout'
}
_admin_session = None
def get_requests_app_cookies(credentials):
s = _get_logged_session(credentials)
return s.cookies
def get_url(url_path, app_url=APP_URL):
return ''.join([app_url, url_path])
def _get_logged_session(credentials):
url = get_url(API_URLS_MAP['login'])
s = requests.Session()
payload = {
'email': credentials['username'],
'password': credentials['password']
}
r = s.post(url, data=payload, verify=False)
assert_equal(r.status_code, 200)
assert_true(r.json()['data']['isAuthenticated'])
return s
def get_csrf_token(response, on_form=False):
response_content = response.text
csrf_pattern = re.compile('<meta name="csrf-token" content="(.*?)">')
if on_form:
csrf_pattern = re.compile("<input type='hidden' name='csrfmiddlewaretoken' value='(.*?)'")
return csrf_pattern.findall(response_content)[0]
def _get_data_key(source_name, payload, response):
name_key_source_map = {
'page': '[name]',
'partner': '[name]',
'product': '[title]',
}
key_part = name_key_source_map[source_name]
name = payload[[k for k in payload.keys() if key_part in k][0]]
soup = BeautifulSoup(response.text)
trs = soup.findAll(lambda tag: tag.name == 'tr' and 'data-key' in tag.attrs)
tr = [tr for tr in trs if name in tr.text][0]
return tr['data-key']
|
KorolevskyMax/TestFrameworkTemplate
|
helpers/app_helpers/app_session.py
|
Python
|
mit
| 1,768
|
import numpy as np
import lxml.etree as ET
from ...dates import timedelta
from ...orbits import StateVector
from ...orbits.man import ImpulsiveMan, ContinuousMan
from ...utils import units
from ...frames.orient import G50, EME2000, GCRF, MOD, TEME, TOD, CIRF
from .cov import load_cov, dump_cov
from .commons import (
parse_date,
dump_kvn_meta_odm,
dump_kvn_header,
dump_xml_header,
dump_xml_meta_odm,
decode_unit,
CcsdsError,
DATE_FMT_DEFAULT,
kvn2dict,
xml2dict,
get_format,
)
def loads(string, fmt):
"""Read of OPM string
Args:
string (str): Text containing the OPM in KVN or XML format
fmt (str): format of the file to read
Return:
StateVector:
"""
if fmt == "kvn":
orb = _loads_kvn(string)
elif fmt == "xml":
orb = _loads_xml(string)
else: # pragma: no cover
raise CcsdsError(f"Unknown format '{fmt}'")
return orb
def dumps(data, **kwargs):
# Inject a default format if it is not provided, either by argument or by configuration
fmt = get_format(**kwargs)
if fmt == "kvn":
return _dumps_kvn(data, **kwargs)
elif fmt == "xml":
return _dumps_xml(data, **kwargs)
else: # pragma: no cover
raise CcsdsError(f"Unknown format '{fmt}'")
def _loads_kvn(string):
data = kvn2dict(string)
try:
name = data["OBJECT_NAME"].text
cospar_id = data["OBJECT_ID"].text
scale = data["TIME_SYSTEM"].text
frame = data["REF_FRAME"].text
center = data["CENTER_NAME"].text
# Convert the frame and center into a beyond frame name
# compatible with beyond.env.jpl
if center.lower() != "earth":
frame = center.title().replace(" ", "")
date = parse_date(data["EPOCH"].text, scale)
vx = decode_unit(data, "X_DOT", "km/s")
vy = decode_unit(data, "Y_DOT", "km/s")
vz = decode_unit(data, "Z_DOT", "km/s")
x = decode_unit(data, "X", "km")
y = decode_unit(data, "Y", "km")
z = decode_unit(data, "Z", "km")
except KeyError as e:
raise CcsdsError(f"Missing mandatory parameter {e}")
orb = StateVector(
[x, y, z, vx, vy, vz], date, "cartesian", frame, name=name, cospar_id=cospar_id
)
for raw_man in data.get("maneuvers", []):
man = {}
man["date"] = parse_date(raw_man["MAN_EPOCH_IGNITION"].text, scale)
man["duration"] = timedelta(seconds=decode_unit(raw_man, "MAN_DURATION", "s"))
man["frame"] = (
raw_man["MAN_REF_FRAME"].text
if raw_man["MAN_REF_FRAME"].text != frame
else None
)
man["delta_mass"] = raw_man["MAN_DELTA_MASS"].text
man["comment"] = raw_man["COMMENT"].text if "COMMENT" in raw_man else None
for i in range(1, 4):
f_name = f"MAN_DV_{i}"
man.setdefault("dv", []).append(decode_unit(raw_man, f_name, "km/s"))
if man["duration"].total_seconds() == 0:
orb.maneuvers.append(
ImpulsiveMan(
man["date"],
man["dv"],
frame=man["frame"],
comment=man["comment"],
)
)
else:
orb.maneuvers.append(
ContinuousMan(
man["date"],
man["duration"],
dv=man["dv"],
frame=man["frame"],
comment=man["comment"],
date_pos="start",
)
)
if "CX_X" in data:
orb.cov = load_cov(orb, data)
for k in data.keys():
if k.startswith("USER_DEFINED"):
ud = orb._data.setdefault("ccsds_user_defined", {})
ud[k[13:]] = data[k].text
return orb
def _loads_xml(string):
data = xml2dict(string.encode())
metadata = data["body"]["segment"]["metadata"]
statevector = data["body"]["segment"]["data"]["stateVector"]
maneuvers = data["body"]["segment"]["data"].get("maneuverParameters")
if isinstance(maneuvers, dict):
maneuvers = [maneuvers]
cov = data["body"]["segment"]["data"].get("covarianceMatrix")
try:
name = metadata["OBJECT_NAME"].text
cospar_id = metadata["OBJECT_ID"].text
scale = metadata["TIME_SYSTEM"].text
frame = metadata["REF_FRAME"].text
center = metadata["CENTER_NAME"].text
# Convert the frame and center into a beyond frame name
# compatible with beyond.env.jpl
if center.lower() != "earth":
frame = center.title().replace(" ", "")
date = parse_date(statevector["EPOCH"].text, scale)
vx = decode_unit(statevector, "X_DOT", "km/s")
vy = decode_unit(statevector, "Y_DOT", "km/s")
vz = decode_unit(statevector, "Z_DOT", "km/s")
x = decode_unit(statevector, "X", "km")
y = decode_unit(statevector, "Y", "km")
z = decode_unit(statevector, "Z", "km")
except KeyError as e:
raise CcsdsError(f"Missing mandatory parameter {e}")
orb = StateVector(
[x, y, z, vx, vy, vz], date, "cartesian", frame, name=name, cospar_id=cospar_id
)
if maneuvers:
for raw_man in maneuvers:
man = {}
man["date"] = parse_date(raw_man["MAN_EPOCH_IGNITION"].text, scale)
man["duration"] = timedelta(
seconds=decode_unit(raw_man, "MAN_DURATION", "s")
)
man["frame"] = (
raw_man["MAN_REF_FRAME"].text
if raw_man["MAN_REF_FRAME"].text != frame
else None
)
man["delta_mass"] = raw_man["MAN_DELTA_MASS"].text
man["comment"] = raw_man["COMMENT"].text if "COMMENT" in raw_man else None
for i in range(1, 4):
f_name = f"MAN_DV_{i}"
man.setdefault("dv", []).append(decode_unit(raw_man, f_name, "km/s"))
if man["duration"].total_seconds() == 0:
orb.maneuvers.append(
ImpulsiveMan(
man["date"],
man["dv"],
frame=man["frame"],
comment=man["comment"],
)
)
else:
orb.maneuvers.append(
ContinuousMan(
man["date"],
man["duration"],
dv=man["dv"],
frame=man["frame"],
comment=man["comment"],
date_pos="start",
)
)
if cov:
orb.cov = load_cov(orb, cov)
ud_dict = data["body"]["segment"]["data"].get("userDefinedParameters", {})
for field in ud_dict.get("USER_DEFINED", []):
ud = orb._data.setdefault("ccsds_user_defined", {})
ud[field.attrib["parameter"]] = field.text
return orb
def _dumps_kvn(data, *, kep=True, **kwargs):
cart = data.copy(form="cartesian")
header = dump_kvn_header(data, "OPM", version="2.0", **kwargs)
meta = dump_kvn_meta_odm(data, **kwargs)
text = """COMMENT State Vector
EPOCH = {cartesian.date:{dfmt}}
X = {cartesian.x: 12.6f} [km]
Y = {cartesian.y: 12.6f} [km]
Z = {cartesian.z: 12.6f} [km]
X_DOT = {cartesian.vx: 12.6f} [km/s]
Y_DOT = {cartesian.vy: 12.6f} [km/s]
Z_DOT = {cartesian.vz: 12.6f} [km/s]
""".format(
cartesian=cart / units.km,
dfmt=DATE_FMT_DEFAULT,
)
if kep and cart.frame.orientation in (G50, EME2000, GCRF, MOD, TOD, TEME, CIRF):
kep = data.copy(form="keplerian")
text += """
COMMENT Keplerian elements
SEMI_MAJOR_AXIS = {kep_a: 12.6f} [km]
ECCENTRICITY = {kep_e: 12.6f}
INCLINATION = {angles[0]: 12.6f} [deg]
RA_OF_ASC_NODE = {angles[1]: 12.6f} [deg]
ARG_OF_PERICENTER = {angles[2]: 12.6f} [deg]
TRUE_ANOMALY = {angles[3]: 12.6f} [deg]
GM = {gm:11.4f} [km**3/s**2]
""".format(
kep_a=kep.a / units.km,
kep_e=kep.e,
angles=np.degrees(kep[2:]),
gm=kep.frame.center.body.mu / (units.km ** 3),
)
# Covariance handling
if cart.cov is not None:
text += dump_cov(cart.cov)
if cart.maneuvers:
for i, man in enumerate(cart.maneuvers):
comment = f"\nCOMMENT {man.comment}" if man.comment else ""
if man.frame is None:
frame = cart.frame
elif man.frame == "QSW":
frame = "RSW"
else:
frame = man.frame
if isinstance(man, ContinuousMan):
date = man.start
duration = man.duration.total_seconds()
else:
date = man.date
duration = 0
text += """{comment}
MAN_EPOCH_IGNITION = {date:{dfmt}}
MAN_DURATION = {duration:0.3f} [s]
MAN_DELTA_MASS = 0.000 [kg]
MAN_REF_FRAME = {frame}
MAN_DV_1 = {dv[0]:.6f} [km/s]
MAN_DV_2 = {dv[1]:.6f} [km/s]
MAN_DV_3 = {dv[2]:.6f} [km/s]
""".format(
i=i + 1,
date=date,
duration=duration,
man=man,
dv=man._dv / units.km,
frame=frame,
comment=comment,
dfmt=DATE_FMT_DEFAULT,
)
if "ccsds_user_defined" in data._data:
text += "\n"
for k, v in data._data["ccsds_user_defined"].items():
text += f"USER_DEFINED_{k} = {v}\n"
return header + "\n" + meta + text
def _dumps_xml(data, *, kep=True, **kwargs):
cart = data.copy(form="cartesian")
# Write an intermediary, with field name, unit and value
# like a dict of tuple
# {
# "X": (cartesian.x / units.km, units.km)
# }
top = dump_xml_header(data, "OPM", version="2.0", **kwargs)
body = ET.SubElement(top, "body")
segment = ET.SubElement(body, "segment")
dump_xml_meta_odm(segment, data, **kwargs)
data_tag = ET.SubElement(segment, "data")
statevector = ET.SubElement(data_tag, "stateVector")
epoch = ET.SubElement(statevector, "EPOCH")
epoch.text = data.date.strftime(DATE_FMT_DEFAULT)
elems = {
"X": "x",
"Y": "y",
"Z": "z",
"X_DOT": "vx",
"Y_DOT": "vy",
"Z_DOT": "vz",
}
for k, v in elems.items():
x = ET.SubElement(statevector, k, units="km" if "DOT" not in k else "km/s")
x.text = f"{getattr(cart, v) / units.km:0.6f}"
if kep and cart.frame.orientation in (G50, EME2000, GCRF, MOD, TOD, TEME, CIRF):
kep = data.copy(form="keplerian")
keplerian = ET.SubElement(data_tag, "keplerianElements")
sma = ET.SubElement(keplerian, "SEMI_MAJOR_AXIS", units="km")
sma.text = f"{kep.a / units.km:0.6}"
ecc = ET.SubElement(keplerian, "ECCENTRICITY")
ecc.text = f"{kep.e:0.6}"
elems = {
"INCLINATION": "i",
"RA_OF_ASC_NODE": "Omega",
"ARG_OF_PERICENTER": "omega",
"TRUE_ANOMALY": "nu",
}
for k, v in elems.items():
x = ET.SubElement(keplerian, k, units="deg")
x.text = f"{np.degrees(getattr(kep, v)):0.6}"
gm = ET.SubElement(keplerian, "GM", units="km**3/s**2")
gm.text = f"{kep.frame.center.body.mu / units.km ** 3:11.4f}"
if cart.cov is not None:
cov = ET.SubElement(data_tag, "covarianceMatrix")
if cart.cov.frame != cart.frame:
frame = cart.cov.frame
if frame == "QSW":
frame = "RSW"
cov_frame = ET.SubElement(cov, "COV_REF_FRAME")
cov_frame.text = f"{frame}"
elems = ["X", "Y", "Z", "X_DOT", "Y_DOT", "Z_DOT"]
for i, a in enumerate(elems):
for j, b in enumerate(elems[: i + 1]):
x = ET.SubElement(cov, f"C{a}_{b}")
x.text = f"{cart.cov[i, j] / 1000000.0:0.12e}"
if cart.maneuvers:
for man in cart.maneuvers:
mans = ET.SubElement(data_tag, "maneuverParameters")
if man.comment:
com = ET.SubElement(mans, "COMMENT")
com.text = man.comment
if man.frame is None:
frame = cart.frame
elif man.frame == "QSW":
frame = "RSW"
else:
frame = man.frame
if isinstance(man, ContinuousMan):
date = man.start
duration = man.duration.total_seconds()
else:
date = man.date
duration = 0
man_epoch = ET.SubElement(mans, "MAN_EPOCH_IGNITION")
man_epoch.text = date.strftime(DATE_FMT_DEFAULT)
man_dur = ET.SubElement(mans, "MAN_DURATION", units="s")
man_dur.text = f"{duration:0.3f}"
man_mass = ET.SubElement(mans, "MAN_DELTA_MASS", units="kg")
man_mass.text = "-0.001"
man_frame = ET.SubElement(mans, "MAN_REF_FRAME")
man_frame.text = f"{frame}"
for i in range(3):
x = ET.SubElement(mans, f"MAN_DV_{i + 1}", units="km/s")
x.text = f"{man._dv[i] / units.km:.6f}"
if "ccsds_user_defined" in data._data:
ud = ET.SubElement(data_tag, "userDefinedParameters")
for k, v in data._data["ccsds_user_defined"].items():
el = ET.SubElement(ud, "USER_DEFINED", parameter=k)
el.text = v
return ET.tostring(
top, pretty_print=True, xml_declaration=True, encoding="UTF-8"
).decode()
|
galactics/beyond
|
beyond/io/ccsds/opm.py
|
Python
|
mit
| 13,862
|
# -*- coding: utf-8 -*-
"""Startup utilities"""
# pylint:skip-file
import os
import sys
from functools import partial
import paste.script.command
import werkzeug.script
etc = partial(os.path.join, 'parts', 'etc')
DEPLOY_INI = etc('deploy.ini')
DEPLOY_CFG = etc('deploy.cfg')
DEBUG_INI = etc('debug.ini')
DEBUG_CFG = etc('debug.cfg')
_buildout_path = __file__
for i in range(2 + __name__.count('.')):
_buildout_path = os.path.dirname(_buildout_path)
abspath = partial(os.path.join, _buildout_path)
del _buildout_path
# bin/paster serve parts/etc/deploy.ini
def make_app(global_conf={}, config=DEPLOY_CFG, debug=False):
from presence_analyzer import app
app.config.from_pyfile(abspath(config))
app.debug = debug
return app
# bin/paster serve parts/etc/debug.ini
def make_debug(global_conf={}, **conf):
from werkzeug.debug import DebuggedApplication
app = make_app(global_conf, config=DEBUG_CFG, debug=True)
return DebuggedApplication(app, evalex=True)
# bin/flask-ctl shell
def make_shell():
"""
Interactive Flask Shell.
"""
from flask import request
app = make_app()
http = app.test_client()
reqctx = app.test_request_context
return locals()
def _serve(action, debug=False, dry_run=False):
"""
Build paster command from 'action' and 'debug' flag.
"""
if debug:
config = DEBUG_INI
else:
config = DEPLOY_INI
argv = ['bin/paster', 'serve', config]
if action in ('start', 'restart'):
argv += [action, '--daemon']
elif action in ('', 'fg', 'foreground'):
argv += ['--reload']
else:
argv += [action]
# Print the 'paster' command
print ' '.join(argv)
if dry_run:
return
# Configure logging and lock file
if action in ('start', 'stop', 'restart', 'status'):
argv += [
'--log-file', abspath('var', 'log', 'paster.log'),
'--pid-file', abspath('var', 'log', '.paster.pid'),
]
sys.argv = argv[:2] + [abspath(config)] + argv[3:]
# Run the 'paster' command
paste.script.command.run()
# bin/flask-ctl ...
def run():
action_shell = werkzeug.script.make_shell(make_shell, make_shell.__doc__)
# bin/flask-ctl serve [fg|start|stop|restart|status]
def action_serve(action=('a', 'start'), dry_run=False):
"""Serve the application.
This command serves a web application that uses a paste.deploy
configuration file for the server and application.
Options:
- 'action' is one of [fg|start|stop|restart|status]
- '--dry-run' print the paster command and exit
"""
_serve(action, debug=False, dry_run=dry_run)
# bin/flask-ctl debug [fg|start|stop|restart|status]
def action_debug(action=('a', 'start'), dry_run=False):
"""
Serve the debugging application.
"""
_serve(action, debug=True, dry_run=dry_run)
# bin/flask-ctl status
def action_status(dry_run=False):
"""
Status of the application.
"""
_serve('status', dry_run=dry_run)
# bin/flask-ctl stop
def action_stop(dry_run=False):
"""
Stop the application.
"""
_serve('stop', dry_run=dry_run)
werkzeug.script.run()
def download_xml():
"""
Download xml files from stx website.
"""
import urllib
url = 'http://sargo.bolt.stxnext.pl/users.xml'
urllib.urlretrieve(url, 'runtime/data/users.xml')
|
stxnext-kindergarten/presence-analyzer-kjagodzinski
|
src/presence_analyzer/script.py
|
Python
|
mit
| 3,487
|
import time
def import_grid(file_to_open):
grid = []
print(file_to_open)
with open(file_to_open) as file:
for i, line in enumerate(file):
if i == 0:
iterations = int(line.split(" ")[0])
delay = float(line.split(" ")[1])
else:
grid.append([])
line = line.strip()
for item in line:
grid[i-1].append(int(item))
return grid, iterations, delay
def save_grid(file, grid):
with open(file, 'w') as file:
for line in grid:
file.write(line + "\n")
def check_time(prev_time, freq):
if time.time() - prev_time > freq:
return True
else:
return False
|
tburrows13/Game-of-Life
|
tools.py
|
Python
|
mit
| 771
|
# =============================================================================
# COPYRIGHT 2013 Brain Corporation.
# License under MIT license (see LICENSE file)
# =============================================================================
import pytest
from robustus.detail import perform_standard_test
def test_bullet_installation(tmpdir):
tmpdir.chdir()
bullet_versions = ['bc2']
for ver in bullet_versions:
bullet_files = ['lib/bullet-%s/lib/libBulletCollision.a' % ver,
'lib/bullet-%s/lib/libBulletDynamics.a' % ver,
'lib/bullet-%s/lib/libLinearMath.a' % ver]
perform_standard_test('bullet==%s' % ver, [], bullet_files)
if __name__ == '__main__':
pytest.main('-s %s -n0' % __file__)
|
braincorp/robustus
|
robustus/tests/test_bullet.py
|
Python
|
mit
| 775
|
"""Instruction descriptions for the "SPIR-V Extended Instructions for GLSL"
version 1.00, revision 2.
"""
INST_FORMAT = {
1 : {
'name' : 'Round',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
2 : {
'name' : 'RoundEven',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
3 : {
'name' : 'Trunc',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
4 : {
'name': 'FAbs',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
5 : {
'name' : 'SAbs',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
6 : {
'name' : 'FSign',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
7 : {
'name' : 'SSign',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
8 : {
'name' : 'Floor',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
9 : {
'name' : 'Ceil',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
10 : {
'name' : 'Fract',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
11 : {
'name' : 'Radians',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
12 : {
'name' : 'Degrees',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
13 : {
'name' : 'Sin',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
14 : {
'name' : 'Cos',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
15 : {
'name' : 'Tan',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
16 : {
'name' : 'Asin',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
17 : {
'name' : 'Acos',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
18 : {
'name' : 'Atan',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
19 : {
'name' : 'Sinh',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
20 : {
'name' : 'Cosh',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
21 : {
'name' : 'Tanh',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
22 : {
'name' : 'Asinh',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
23 : {
'name' : 'Acosh',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
24 : {
'name' : 'Atanh',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
25 : {
'name' : 'Atan2',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
26 : {
'name' : 'Pow',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
27 : {
'name' : 'Exp',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
28 : {
'name' : 'Log',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
29 : {
'name' : 'Exp2',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
30: {
'name' : 'Log2',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
31 : {
'name' : 'Sqrt',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
32 : {
'name' : 'Inversesqrt',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
33 : {
'name' : 'Determinant',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
34 : {
'name' : 'MatrixInverse',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
35 : {
'name' : 'Modf',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
36 : {
'name' : 'ModfStruct',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
37 : {
'name' : 'FMin',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
38 : {
'name' : 'UMin',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
39 : {
'name' : 'SMin',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
40 : {
'name' : 'FMax',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
41 : {
'name' : 'UMax',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
42 : {
'name' : 'SMax',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
43 : {
'name' : 'FClamp',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
44 : {
'name' : 'UClamp',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
45 : {
'name' : 'SClamp',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
46 : {
'name' : 'FMix',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
48 : {
'name' : 'Step',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
49 : {
'name' : 'Smoothstep',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
50 : {
'name' : 'Fma',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
51 : {
'name' : 'Frexp',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
52 : {
'name' : 'FrexpStruct',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
53 : {
'name' : 'Ldexp',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
54 : {
'name' : 'PackSnorm4x8',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
55 : {
'name' : 'PackUnorm4x8',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
56 : {
'name' : 'PackSnorm2x16',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
57 : {
'name' : 'PackUnrom2x16',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
58 : {
'name' : 'PackHalf2x16',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
59 : {
'name' : 'PackDouble2x32',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
60 : {
'name' : 'PackSnorm2x16',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
61 : {
'name' : 'UnpackUnorm2x16',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
62 : {
'name' : 'UnpackHalf2x16',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
63 : {
'name' : 'UnpackSnorm4x8',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
64 : {
'name' : 'UnpackUnorm4x8',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
65 : {
'name' : 'UnpackDouble2x32',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
66 : {
'name' : 'Length',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
67 : {
'name' : 'Distance',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : True
},
68 : {
'name' : 'Cross',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
69 : {
'name' : 'Normalize',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
70 : {
'name' : 'FaceForward',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
71 : {
'name' : 'Reflect',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
72 : {
'name' : 'Refract',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
73 : {
'name' : 'FindILsb',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
74 : {
'name' : 'FindSMsb',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
75 : {
'name' : 'FindUMsb',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
76 : {
'name' : 'InterpolateAtCentroid',
'operands' : ['Id'],
'has_side_effects' : False,
'is_commutative' : False
},
77 : {
'name' : 'InterpolateAtSample',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
78 : {
'name' : 'InterpolateAtOffset',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
79 : {
'name' : 'NMin',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
80 : {
'name' : 'NMax',
'operands' : ['Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
81 : {
'name' : 'NClamp',
'operands' : ['Id', 'Id', 'Id'],
'has_side_effects' : False,
'is_commutative' : False
},
}
|
kristerw/spirv-tools
|
spirv_tools/ext_inst/glsl_std_450.py
|
Python
|
mit
| 11,873
|
from unittest import TestCase
from dirty_validators.basic import (BaseValidator, EqualTo, NotEqualTo, StringNotContaining, Length, NumberRange,
Regexp, Email, IPAddress, MacAddress, URL, UUID, AnyOf, NoneOf,
IsEmpty, NotEmpty, NotEmptyString, IsNone, NotNone)
import re
class TestBaseValidator(TestCase):
def setUp(self):
self.validator = BaseValidator()
def tearDown(self):
pass
def test_validate_any(self):
self.assertTrue(self.validator.is_valid(None))
self.assertDictEqual(self.validator.messages, {})
self.assertTrue(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages, {})
self.assertTrue(self.validator.is_valid('aaa'))
self.assertDictEqual(self.validator.messages, {})
self.assertTrue(self.validator.is_valid({}))
self.assertDictEqual(self.validator.messages, {})
def test_error_not_hidden_behaviour(self):
error_key = 'Test key'
error_message = "'$value' is the value error to test hidden feature"
self.validator.error_messages = {error_key: error_message}
self.validator.error(error_key, 'Not hidden')
self.assertEqual(self.validator.messages,
{error_key: "'Not hidden' is the value error to test hidden feature"})
def test_error_hidden_behaviour(self):
hidden_validator = BaseValidator(hidden=True)
error_key = 'Test key'
error_message = "'$value' is the value error to test hidden feature"
hidden_validator.error_messages = {error_key: error_message}
hidden_validator.error(error_key, 'Will it be hidden?')
self.assertEqual(hidden_validator.messages,
{error_key: "'**Hidden**' is the value error to test hidden feature"})
class TestEqualTo(TestCase):
def setUp(self):
self.validator = EqualTo(comp_value="aaa")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaa"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aqaa"))
self.assertDictEqual(self.validator.messages, {EqualTo.NOT_EQUAL: "'aqaa' is not equal to 'aaa'"})
def test_validate_int_success(self):
self.validator = EqualTo(comp_value=3)
self.assertTrue(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.validator = EqualTo(comp_value=3)
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {EqualTo.NOT_EQUAL: "'4' is not equal to '3'"})
def test_validate_int_fail_custom_error_message(self):
self.validator = EqualTo(comp_value=3, error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {EqualTo.NOT_EQUAL: "4 4 aaa 3"})
def test_validate_int_fail_custom_error_code(self):
self.validator = EqualTo(comp_value=3, error_code_map={EqualTo.NOT_EQUAL: "newError"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {"newError": "'4' is not equal to '3'"})
def test_validate_int_fail_custom_error_code_and_error_message(self):
self.validator = EqualTo(comp_value=3,
error_code_map={EqualTo.NOT_EQUAL: "newError"},
error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {"newError": "4 4 aaa 3"})
def test_validate_int_fail_custom_error_code_error_message_and_custom_value(self):
self.validator = EqualTo(comp_value=3,
error_code_map={EqualTo.NOT_EQUAL: "newError"},
error_messages={EqualTo.NOT_EQUAL: "$value $value aaa $comp_value $value1 $value2"},
message_values={"value1": "aaaaaa1", "value2": "eeeeee1"})
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {"newError": "4 4 aaa 3 aaaaaa1 eeeeee1"})
class TestNotEqualTo(TestCase):
def setUp(self):
self.validator = NotEqualTo(comp_value="aaa")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aqaa"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aaa"))
self.assertDictEqual(self.validator.messages, {NotEqualTo.IS_EQUAL: "'aaa' is equal to 'aaa'"})
def test_validate_int_success(self):
self.validator = NotEqualTo(comp_value=3)
self.assertTrue(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.validator = NotEqualTo(comp_value=3)
self.assertFalse(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages, {NotEqualTo.IS_EQUAL: "'3' is equal to '3'"})
class TestStringNotContaining(TestCase):
def setUp(self):
self.validator = StringNotContaining(token='Test_TOKEN')
def test_validate_string_contains(self):
self.assertFalse(self.validator.is_valid('This string contains Test_TOKEN for sure'))
self.assertDictEqual(self.validator.messages,
{StringNotContaining.NOT_CONTAINS:
"'This string contains Test_TOKEN for sure' contains 'Test_TOKEN'"})
def test_validate_string_not_contains(self):
self.assertTrue(self.validator.is_valid('This string does not contain TESt_TOKEN for sensitive cases'))
def test_validate_string_contains_not_sensitive(self):
self.validator.case_sensitive = False
self.assertFalse(self.validator.is_valid('This string contains TESt_TOKEN for sensitive cases'))
class TestLength(TestCase):
def setUp(self):
self.validator = Length(min=3, max=6)
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aqaa"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail_short(self):
self.assertFalse(self.validator.is_valid("aa"))
self.assertDictEqual(self.validator.messages, {Length.TOO_SHORT: "'aa' is less than 3 unit length"})
def test_validate_str_fail_long(self):
self.assertFalse(self.validator.is_valid("aabbnnmm"))
self.assertDictEqual(self.validator.messages, {Length.TOO_LONG: "'aabbnnmm' is more than 6 unit length"})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {Length.INVALID_TYPE: "'5' has no length"})
def test_validate_list_success(self):
self.assertTrue(self.validator.is_valid(["1a", "32d", "tr", "wq"]))
self.assertDictEqual(self.validator.messages, {})
def test_validate_list_fail_short(self):
self.assertFalse(self.validator.is_valid(["1a"]))
self.assertDictEqual(self.validator.messages, {Length.TOO_SHORT: "'['1a']' is less than 3 unit length"})
def test_validate_list_fail_long(self):
self.assertFalse(self.validator.is_valid(["1a", "32d", "tr", "wq", "qwqw", "dd", "as", "er"]))
self.assertDictEqual(self.validator.messages,
{Length.TOO_LONG:
"'['1a', '32d', 'tr', 'wq', 'qwqw', 'dd', 'as', 'er']' is more than 6 unit length"})
class TestNumberRange(TestCase):
def setUp(self):
self.validator = NumberRange(min=3, max=4)
def tearDown(self):
pass
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {NumberRange.OUT_OF_RANGE: "'5' is out of range (3, 4)"})
def test_validate_int_no_min_success(self):
self.validator = NumberRange(max=4)
self.assertTrue(self.validator.is_valid(1))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_no_min_fail(self):
self.validator = NumberRange(max=4)
self.assertFalse(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {NumberRange.OUT_OF_RANGE: "'5' is out of range (None, 4)"})
def test_validate_int_no_max_success(self):
self.validator = NumberRange(min=4)
self.assertTrue(self.validator.is_valid(5))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_no_max_fail(self):
self.validator = NumberRange(min=4)
self.assertFalse(self.validator.is_valid(1))
self.assertDictEqual(self.validator.messages, {NumberRange.OUT_OF_RANGE: "'1' is out of range (4, None)"})
class TestRegexp(TestCase):
def setUp(self):
self.validator = Regexp(regex="^aa.+bb$")
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aarrbb"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aarrbbcc"))
self.assertDictEqual(self.validator.messages,
{Regexp.NOT_MATCH: "'aarrbbcc' does not match against pattern '^aa.+bb$'"})
def test_validate_str_case_sensitive_fail(self):
self.assertFalse(self.validator.is_valid("Aarrbb"))
self.assertDictEqual(self.validator.messages,
{Regexp.NOT_MATCH: "'Aarrbb' does not match against pattern '^aa.+bb$'"})
def test_validate_str_case_insensitive_success(self):
self.validator = Regexp(regex="^aa.+bb$", flags=re.IGNORECASE)
self.assertTrue(self.validator.is_valid("Aarrbb"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(6))
self.assertDictEqual(self.validator.messages,
{Regexp.NOT_MATCH: "'6' does not match against pattern '^aa.+bb$'"})
class TestEmail(TestCase):
def setUp(self):
self.validator = Email()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aarrbb@aaaa.com"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aarrbbaaaa@sas.c"))
self.assertDictEqual(self.validator.messages,
{Email.NOT_MAIL: "'aarrbbaaaa@sas.c' is not a valid email address."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {Email.NOT_MAIL: "'4' is not a valid email address."})
class TestIPAddress(TestCase):
def setUp(self):
self.validator = IPAddress()
def tearDown(self):
pass
def test_validate_str_ipv4_success(self):
self.assertTrue(self.validator.is_valid("192.168.2.2"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_fail(self):
self.assertFalse(self.validator.is_valid("192.168.2.277"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'192.168.2.277' does not appear to be a valid IP address. Allowed ipv4"})
def test_validate_str_ipv6_not_allowed_fail(self):
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages,
{IPAddress.IPV6_NOT_ALLOWED:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7334' is " +
"an ipv6 address that is not allowed. Allowed ipv4"})
def test_validate_str_ipv6_success(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv6_reduced_success(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3::8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv6_reduced_localhost_success(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertTrue(self.validator.is_valid("::1"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv6_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:733T"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:733T' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv6_too_large_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7333:3333:3333"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7333:3333:3333' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv6_too_big_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7333FFF"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:7333FFF' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv6_bad_white_spaces_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid(":0db8:"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"':0db8:' does " +
"not appear to be a valid IP address. Allowed ipv6"})
def test_validate_str_ipv4_not_allowed_fail(self):
self.validator = IPAddress(ipv4=False, ipv6=True)
self.assertFalse(self.validator.is_valid("192.168.2.233"))
self.assertDictEqual(self.validator.messages,
{IPAddress.IPV4_NOT_ALLOWED:
"'192.168.2.233' is an ipv4 address that is not allowed. Allowed ipv6"})
def test_validate_str_ipv4_ipv6_using_ipv4_success(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(self.validator.is_valid("192.168.2.2"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_ipv6_using_ipv6_success(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_ipv6_using_ipv6_reduced_success(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertTrue(self.validator.is_valid("2001:0db8:85a3::8a2e:0370:7334"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_ipv4_ipv6_using_wrong_ipv4_fail(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertFalse(self.validator.is_valid("192.168.2.277"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'192.168.2.277' does not appear to be a valid IP address. Allowed ipv4 and ipv6"})
def test_validate_str_ipv4_ipv6_using_wrong_ipv6_fail(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertFalse(self.validator.is_valid("2001:0db8:85a3:08d3:1319:8a2e:0370:733T"))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2001:0db8:85a3:08d3:1319:8a2e:0370:733T' does not " +
"appear to be a valid IP address. Allowed ipv4 and ipv6"})
def test_validate_int_fail(self):
self.validator = IPAddress(ipv4=True, ipv6=True)
self.assertFalse(self.validator.is_valid(2323))
self.assertDictEqual(self.validator.messages,
{IPAddress.NOT_IP_ADDRESS:
"'2323' does not appear to be a valid IP address. Allowed ipv4 and ipv6"})
def test_bad_definition(self):
with self.assertRaises(ValueError):
self.validator = IPAddress(ipv4=False, ipv6=False)
class TestMacAddress(TestCase):
def setUp(self):
self.validator = MacAddress()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("01:23:45:67:89:ab"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aarrbba@sas.c"))
self.assertDictEqual(self.validator.messages,
{MacAddress.INVALID_MAC_ADDRESS: "'aarrbba@sas.c' is not a valid mac address."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages,
{MacAddress.INVALID_MAC_ADDRESS: "'4' is not a valid mac address."})
class TestURL(TestCase):
def setUp(self):
self.validator = URL()
def tearDown(self):
pass
def test_validate_str_required_tld_http_success(self):
self.assertTrue(self.validator.is_valid("http://www.google.com"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_required_tld_git_success(self):
self.assertTrue(self.validator.is_valid("git://github.com"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_no_protocol_fail(self):
self.assertFalse(self.validator.is_valid("google.com"))
self.assertDictEqual(self.validator.messages, {URL.INVALID_URL: "'google.com' is not a valid url."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {URL.INVALID_URL: "'4' is not a valid url."})
def test_validate_str_not_required_tld_http_success(self):
self.validator = URL(require_tld=False)
self.assertTrue(self.validator.is_valid("http://google"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_not_required_tld_git_success(self):
self.validator = URL(require_tld=False)
self.assertTrue(self.validator.is_valid("git://github"))
self.assertDictEqual(self.validator.messages, {})
class TestUUID(TestCase):
def setUp(self):
self.validator = UUID()
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("550e8400-e29b-41d4-a716-446655440000"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("aarrbbaaaa@sas.c"))
self.assertDictEqual(self.validator.messages, {UUID.INVALID_UUID: "'aarrbbaaaa@sas.c' is not a valid UUID."})
def test_validate_int_fail(self):
self.assertFalse(self.validator.is_valid(4))
self.assertDictEqual(self.validator.messages, {UUID.INVALID_UUID: "'4' is not a valid UUID."})
class TestAnyOf(TestCase):
def setUp(self):
self.validator = AnyOf(values=[1, "2", "aaas", "ouch"])
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaas"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(1))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("lass"))
self.assertDictEqual(self.validator.messages, {AnyOf.NOT_IN_LIST: "'lass' is none of 1, '2', 'aaas', 'ouch'."})
def test_validate_int_as_str_fail(self):
self.assertFalse(self.validator.is_valid(2))
self.assertDictEqual(self.validator.messages, {AnyOf.NOT_IN_LIST: "'2' is none of 1, '2', 'aaas', 'ouch'."})
class TestNoneOf(TestCase):
def setUp(self):
self.validator = NoneOf(values=[1, "2", "aaas", "ouch"])
def tearDown(self):
pass
def test_validate_str_success(self):
self.assertTrue(self.validator.is_valid("aaaaaas"))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_success(self):
self.assertTrue(self.validator.is_valid(9))
self.assertDictEqual(self.validator.messages, {})
def test_validate_int_as_str_success(self):
self.assertTrue(self.validator.is_valid(2))
self.assertDictEqual(self.validator.messages, {})
def test_validate_str_fail(self):
self.assertFalse(self.validator.is_valid("ouch"))
self.assertDictEqual(self.validator.messages, {NoneOf.IN_LIST: "'ouch' is one of 1, '2', 'aaas', 'ouch'."})
class TestEmpty(TestCase):
def setUp(self):
self.validator = IsEmpty()
def test_validate_str_empty(self):
self.assertTrue(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages, {})
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertTrue(self.validator.is_valid(EmptyClass()))
self.assertDictEqual(self.validator.messages, {})
def test_validate_not_empty_class(self):
class NotEmptyClass:
def __repr__(self):
return "NotEmptyClass"
self.assertFalse(self.validator.is_valid(NotEmptyClass()))
self.assertDictEqual(self.validator.messages, {IsEmpty.EMPTY: "'NotEmptyClass' must be empty"})
def test_validate_none_ok(self):
self.assertTrue(self.validator.is_valid(None))
self.assertDictEqual(self.validator.messages, {})
def test_float_ok(self):
self.assertTrue(self.validator.is_valid(0.0))
class TestNotEmpty(TestCase):
def setUp(self):
self.validator = NotEmpty()
def test_validate_str_empty(self):
self.assertFalse(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages,
{NotEmpty.NOT_EMPTY: "Value can not be empty"})
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertFalse(self.validator.is_valid(EmptyClass()))
def test_validate_not_empty_class(self):
class NotEmptyClass:
pass
self.assertTrue(self.validator.is_valid(NotEmptyClass()))
self.assertDictEqual(self.validator.messages, {})
def test_validate_none_raises(self):
self.assertFalse(self.validator.is_valid(None))
def test_float_raises(self):
self.assertFalse(self.validator.is_valid(0.0))
class TestNotEmptyString(TestCase):
def setUp(self):
self.validator = NotEmptyString()
def test_validate_str_empty(self):
self.assertFalse(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages,
{NotEmptyString.NOT_EMPTY: "Value can not be empty"})
def test_validate_str_more_whites_empty(self):
self.assertFalse(self.validator.is_valid(" "))
self.assertDictEqual(self.validator.messages,
{NotEmptyString.NOT_EMPTY: "Value can not be empty"})
def test_validate_not_str(self):
self.assertFalse(self.validator.is_valid(3))
self.assertDictEqual(self.validator.messages,
{NotEmptyString.NOT_STRING: "Value must be a string"})
def test_validate_not_empty(self):
self.assertTrue(self.validator.is_valid("Batman"))
class TestIsNone(TestCase):
def setUp(self):
self.validator = IsNone()
def test_validate_str_empty(self):
self.assertFalse(self.validator.is_valid(""))
self.assertDictEqual(self.validator.messages,
{IsNone.NONE: "'' must be None"})
def test_validate_class_empty(self):
class EmptyClass:
def __len__(self):
return 0
self.assertFalse(self.validator.is_valid(EmptyClass()))
def test_validate_none(self):
self.assertTrue(self.validator.is_valid(None))
def test_float_raises(self):
self.assertFalse(self.validator.is_valid(0.0))
class TestIsNotNone(TestCase):
def setUp(self):
self.validator = NotNone()
def test_validate_none_raises(self):
self.assertFalse(self.validator.is_valid(None))
self.assertDictEqual(self.validator.messages,
{NotNone.NOT_NONE: NotNone.error_messages[NotNone.NOT_NONE]})
def test_empty_class_ok(self):
class EmptyClass:
def __len__(self):
return 0
self.assertTrue(self.validator.is_valid(EmptyClass()))
self.assertDictEqual(self.validator.messages, {})
|
alfred82santa/dirty-validators
|
tests/dirty_validators/tests_basic.py
|
Python
|
mit
| 26,469
|
from models.sampler import DynamicBlockGibbsSampler
from models.distribution import DynamicBernoulli
from models.optimizer import DynamicSGD
from utils.utils import prepare_frames
from scipy import io as matio
from data.gwtaylor.path import *
import ipdb
import numpy as np
SIZE_BATCH = 10
EPOCHS = 100
SIZE_HIDDEN = 50
SIZE_VISIBLE = 150
# CRBM Constants
M_LAG_VISIBLE = 2
N_LAG_HIDDEN = 2
SIZE_LAG = max(M_LAG_VISIBLE, N_LAG_HIDDEN)+1
# load and prepare dataset from .mat
mat = matio.loadmat(MOCAP_SAMPLE)
dataset = mat['batchdatabinary']
# generate batches
batch_idx_list = prepare_frames(len(dataset), SIZE_LAG, SIZE_BATCH)
# load distribution
bernoulli = DynamicBernoulli(SIZE_VISIBLE, SIZE_HIDDEN, m_lag_visible=M_LAG_VISIBLE, n_lag_hidden=N_LAG_HIDDEN)
gibbs_sampler = DynamicBlockGibbsSampler(bernoulli, sampling_steps=1)
sgd = DynamicSGD(bernoulli)
for epoch in range(EPOCHS):
error = 0.0
for chunk_idx_list in batch_idx_list:
# get batch data set
data = np.zeros(shape=(SIZE_BATCH, SIZE_VISIBLE, SIZE_LAG))
for idx, (start, end) in enumerate(chunk_idx_list):
data[idx, :, :] = dataset[start:end, :].T
hidden_0_probs, hidden_0_states, \
hidden_k_probs, hidden_k_states, \
visible_k_probs, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
# compute deltas
d_weight_update, d_bias_hidden_update, \
d_bias_visible_update, d_vis_vis, d_vis_hid = sgd.optimize(data[:, :, 0], hidden_0_states, hidden_0_probs, hidden_k_probs,
hidden_k_states, visible_k_probs, visible_k_states, data[:, :, 1:])
# update model values
bernoulli.weights += d_weight_update
bernoulli.bias_hidden += d_bias_hidden_update
bernoulli.bias_visible += d_bias_visible_update
bernoulli.vis_vis_weights += d_vis_vis
bernoulli.vis_hid_weights += d_vis_hid
# compute reconstruction error
_, _, \
_, _, \
_, visible_k_states = gibbs_sampler.sample(data[:, :, 0], data[:, :, 1:])
error += np.mean(np.abs(visible_k_states - data[:, :, 0]))
error = 1./len(batch_idx_list) * error;
print error
|
deprofundis/deprofundis
|
models/scripts/example_crbm.py
|
Python
|
mit
| 2,313
|
#!/usr/bin/python
# build_native.py
# Build native codes
import sys
import os, os.path
import shutil
from optparse import OptionParser
import cocos
from MultiLanguage import MultiLanguage
import cocos_project
import json
import re
from xml.dom import minidom
import project_compile
BUILD_CFIG_FILE="build-cfg.json"
class AndroidBuilder(object):
CFG_KEY_COPY_TO_ASSETS = "copy_to_assets"
CFG_KEY_MUST_COPY_TO_ASSERTS = "must_copy_to_assets"
CFG_KEY_STORE = "key_store"
CFG_KEY_STORE_PASS = "key_store_pass"
CFG_KEY_ALIAS = "alias"
CFG_KEY_ALIAS_PASS = "alias_pass"
ANT_KEY_STORE = "key.store"
ANT_KEY_ALIAS = "key.alias"
ANT_KEY_STORE_PASS = "key.store.password"
ANT_KEY_ALIAS_PASS = "key.alias.password"
GRADLE_KEY_STORE = "RELEASE_STORE_FILE"
GRADLE_KEY_ALIAS = "RELEASE_KEY_ALIAS"
GRADLE_KEY_STORE_PASS = "RELEASE_STORE_PASSWORD"
GRADLE_KEY_ALIAS_PASS = "RELEASE_KEY_PASSWORD"
def __init__(self, verbose, app_android_root, no_res, proj_obj, use_studio=False):
self._verbose = verbose
self.app_android_root = app_android_root
self._no_res = no_res
self._project = proj_obj
self.use_studio = use_studio
# check environment variable
if self.use_studio:
self.ant_root = None
self.sign_prop_file = os.path.join(self.app_android_root, 'app', "gradle.properties")
else:
self.ant_root = cocos.check_environment_variable('ANT_ROOT')
self.sign_prop_file = os.path.join(self.app_android_root, "ant.properties")
self.sdk_root = cocos.check_environment_variable('ANDROID_SDK_ROOT')
self._parse_cfg()
def _run_cmd(self, command, cwd=None):
cocos.CMDRunner.run_cmd(command, self._verbose, cwd=cwd)
def _parse_cfg(self):
self.cfg_path = os.path.join(self.app_android_root, BUILD_CFIG_FILE)
try:
f = open(self.cfg_path)
cfg = json.load(f, encoding='utf8')
f.close()
except Exception:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_PARSE_CFG_FAILED_FMT', self.cfg_path),
cocos.CCPluginError.ERROR_PARSE_FILE)
if cfg.has_key(project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES):
if self._no_res:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES]
else:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_MUST_COPY_RESOURCES] + cfg[project_compile.CCPluginCompile.CFG_KEY_COPY_RESOURCES]
else:
self.res_files = cfg[project_compile.CCPluginCompile.CFG_KEY_COPY_RESOURCES]
self.ndk_module_paths = cfg['ndk_module_path']
# get the properties for sign release apk
if self.use_studio:
self.key_store_str = AndroidBuilder.GRADLE_KEY_STORE
self.key_alias_str = AndroidBuilder.GRADLE_KEY_ALIAS
self.key_store_pass_str = AndroidBuilder.GRADLE_KEY_STORE_PASS
self.key_alias_pass_str = AndroidBuilder.GRADLE_KEY_ALIAS_PASS
else:
self.key_store_str = AndroidBuilder.ANT_KEY_STORE
self.key_alias_str = AndroidBuilder.ANT_KEY_ALIAS
self.key_store_pass_str = AndroidBuilder.ANT_KEY_STORE_PASS
self.key_alias_pass_str = AndroidBuilder.ANT_KEY_ALIAS_PASS
move_cfg = {}
self.key_store = None
if cfg.has_key(AndroidBuilder.CFG_KEY_STORE):
self.key_store = cfg[AndroidBuilder.CFG_KEY_STORE]
move_cfg[self.key_store_str] = self.key_store
del cfg[AndroidBuilder.CFG_KEY_STORE]
self.key_store_pass = None
if cfg.has_key(AndroidBuilder.CFG_KEY_STORE_PASS):
self.key_store_pass = cfg[AndroidBuilder.CFG_KEY_STORE_PASS]
move_cfg[self.key_store_pass_str] = self.key_store_pass
del cfg[AndroidBuilder.CFG_KEY_STORE_PASS]
self.alias = None
if cfg.has_key(AndroidBuilder.CFG_KEY_ALIAS):
self.alias = cfg[AndroidBuilder.CFG_KEY_ALIAS]
move_cfg[self.key_alias_str] = self.alias
del cfg[AndroidBuilder.CFG_KEY_ALIAS]
self.alias_pass = None
if cfg.has_key(AndroidBuilder.CFG_KEY_ALIAS_PASS):
self.alias_pass = cfg[AndroidBuilder.CFG_KEY_ALIAS_PASS]
move_cfg[self.key_alias_pass_str] = self.alias_pass
del cfg[AndroidBuilder.CFG_KEY_ALIAS_PASS]
if len(move_cfg) > 0:
# move the config into ant.properties
self._move_cfg(move_cfg)
with open(self.cfg_path, 'w') as outfile:
json.dump(cfg, outfile, sort_keys = True, indent = 4)
outfile.close()
def has_keystore_in_signprops(self):
keystore = None
if self.use_studio:
pattern = re.compile(r"^RELEASE_STORE_FILE=(.+)")
else:
pattern = re.compile(r"^key\.store=(.+)")
try:
file_obj = open(self.sign_prop_file)
for line in file_obj:
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = pattern.match(str2)
if match is not None:
keystore = match.group(1)
break
file_obj.close()
except:
pass
if keystore is None:
return False
else:
return True
def _write_sign_properties(self, cfg):
file_obj = open(self.sign_prop_file, "a+")
for key in cfg.keys():
str_cfg = "%s=%s\n" % (key, cfg[key])
file_obj.write(str_cfg)
file_obj.close()
def _move_cfg(self, cfg):
if not self.has_keystore_in_signprops():
self._write_sign_properties(cfg)
def remove_c_libs(self, libs_dir):
for file_name in os.listdir(libs_dir):
lib_file = os.path.join(libs_dir, file_name)
if os.path.isfile(lib_file):
ext = os.path.splitext(lib_file)[1]
if ext == ".a" or ext == ".so":
os.remove(lib_file)
def update_project(self, android_platform):
if self.use_studio:
manifest_path = os.path.join(self.app_android_root, 'app')
else:
manifest_path = self.app_android_root
sdk_tool_path = os.path.join(self.sdk_root, "tools", "android")
# check the android platform
target_str = self.check_android_platform(self.sdk_root, android_platform, manifest_path, False)
# update project
command = "%s update project -t %s -p %s" % (cocos.CMDRunner.convert_path_to_cmd(sdk_tool_path), target_str, manifest_path)
self._run_cmd(command)
# update lib-projects
property_path = manifest_path
self.update_lib_projects(self.sdk_root, sdk_tool_path, android_platform, property_path)
if self.use_studio:
# copy the local.properties to the app_android_root
file_name = 'local.properties'
src_path = os.path.normpath(os.path.join(manifest_path, file_name))
dst_path = os.path.normpath(os.path.join(self.app_android_root, file_name))
if src_path != dst_path:
if os.path.isfile(dst_path):
os.remove(dst_path)
shutil.copy(src_path, dst_path)
def get_toolchain_version(self, ndk_root, compile_obj):
ret_version = "4.8"
version_file_path = os.path.join(ndk_root, "RELEASE.TXT")
try:
versionFile = open(version_file_path)
lines = versionFile.readlines()
versionFile.close()
version_num = None
version_char = None
pattern = r'^[a-zA-Z]+(\d+)(\w)'
for line in lines:
str_line = line.lstrip()
match = re.match(pattern, str_line)
if match:
version_num = int(match.group(1))
version_char = match.group(2)
break
if version_num is None:
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_GET_NDK_VER_FAILED_FMT',
version_file_path))
else:
version_char = version_char.lower()
if version_num > 10 or (version_num == 10 and cmp(version_char, 'c') >= 0):
ret_version = "4.9"
else:
compile_obj.add_warning_at_end(MultiLanguage.get_string('COMPILE_WARNING_NDK_VERSION'))
except:
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_GET_NDK_VER_FAILED_FMT', version_file_path))
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_NDK_TOOLCHAIN_VER_FMT', ret_version))
if ret_version == "4.8":
compile_obj.add_warning_at_end(MultiLanguage.get_string('COMPILE_WARNING_TOOLCHAIN_FMT', ret_version))
return ret_version
def do_ndk_build(self, ndk_build_param, build_mode, compile_obj):
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_NDK_MODE', build_mode))
ndk_root = cocos.check_environment_variable('NDK_ROOT')
toolchain_version = self.get_toolchain_version(ndk_root, compile_obj)
if self.use_studio:
ndk_work_dir = os.path.join(self.app_android_root, 'app')
else:
ndk_work_dir = self.app_android_root
reload(sys)
sys.setdefaultencoding('utf8')
ndk_path = cocos.CMDRunner.convert_path_to_cmd(os.path.join(ndk_root, "ndk-build"))
module_paths = []
for cfg_path in self.ndk_module_paths:
if cfg_path.find("${ENGINE_ROOT}") >= 0:
cocos_root = cocos.check_environment_variable("COCOS_X_ROOT")
module_paths.append(cfg_path.replace("${ENGINE_ROOT}", cocos_root))
elif cfg_path.find("${COCOS_FRAMEWORKS}") >= 0:
cocos_frameworks = cocos.check_environment_variable("COCOS_FRAMEWORKS")
module_paths.append(cfg_path.replace("${COCOS_FRAMEWORKS}", cocos_frameworks))
else:
module_paths.append(os.path.join(self.app_android_root, cfg_path))
# delete template static and dynamic files
obj_local_dir = os.path.join(ndk_work_dir, "obj", "local")
if os.path.isdir(obj_local_dir):
for abi_dir in os.listdir(obj_local_dir):
static_file_path = os.path.join(ndk_work_dir, "obj", "local", abi_dir)
if os.path.isdir(static_file_path):
self.remove_c_libs(static_file_path)
# windows should use ";" to seperate module paths
if cocos.os_is_win32():
ndk_module_path = ';'.join(module_paths)
else:
ndk_module_path = ':'.join(module_paths)
ndk_module_path= 'NDK_MODULE_PATH=' + ndk_module_path
if ndk_build_param is None:
ndk_build_cmd = '%s -C %s %s' % (ndk_path, ndk_work_dir, ndk_module_path)
else:
ndk_build_cmd = '%s -C %s %s %s' % (ndk_path, ndk_work_dir, ' '.join(ndk_build_param), ndk_module_path)
ndk_build_cmd = '%s NDK_TOOLCHAIN_VERSION=%s' % (ndk_build_cmd, toolchain_version)
if build_mode == 'debug':
ndk_build_cmd = '%s NDK_DEBUG=1' % ndk_build_cmd
self._run_cmd(ndk_build_cmd)
def _xml_attr(self, dir, file_name, node_name, attr):
doc = minidom.parse(os.path.join(dir, file_name))
return doc.getElementsByTagName(node_name)[0].getAttribute(attr)
def update_lib_projects(self, sdk_root, sdk_tool_path, android_platform, property_path):
property_file = os.path.join(property_path, "project.properties")
if not os.path.isfile(property_file):
return
patten = re.compile(r'^android\.library\.reference\.[\d]+=(.+)')
for line in open(property_file):
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = patten.match(str2)
if match is not None:
# a lib project is found
lib_path = match.group(1)
abs_lib_path = os.path.join(property_path, lib_path)
abs_lib_path = os.path.normpath(abs_lib_path)
if os.path.isdir(abs_lib_path):
target_str = self.check_android_platform(sdk_root, android_platform, abs_lib_path, True)
command = "%s update lib-project -p %s -t %s" % (cocos.CMDRunner.convert_path_to_cmd(sdk_tool_path), abs_lib_path, target_str)
self._run_cmd(command)
self.update_lib_projects(sdk_root, sdk_tool_path, android_platform, abs_lib_path)
def select_default_android_platform(self, min_api_level):
''' select a default android platform in SDK_ROOT
'''
sdk_root = cocos.check_environment_variable('ANDROID_SDK_ROOT')
platforms_dir = os.path.join(sdk_root, "platforms")
ret_num = -1
ret_platform = ""
if os.path.isdir(platforms_dir):
for dir_name in os.listdir(platforms_dir):
if not os.path.isdir(os.path.join(platforms_dir, dir_name)):
continue
num = self.get_api_level(dir_name, raise_error=False)
if num >= min_api_level:
if ret_num == -1 or ret_num > num:
ret_num = num
ret_platform = dir_name
if ret_num != -1:
return ret_platform
else:
return None
def get_api_level(self, target_str, raise_error=True):
special_targats_info = {
"android-4.2" : 17,
"android-L" : 20
}
if special_targats_info.has_key(target_str):
ret = special_targats_info[target_str]
else:
match = re.match(r'android-(\d+)', target_str)
if match is not None:
ret = int(match.group(1))
else:
if raise_error:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_NOT_VALID_AP_FMT', target_str),
cocos.CCPluginError.ERROR_PARSE_FILE)
else:
ret = -1
return ret
def get_target_config(self, proj_path):
property_file = os.path.join(proj_path, "project.properties")
if not os.path.isfile(property_file):
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_FILE_NOT_FOUND_FMT', property_file),
cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
patten = re.compile(r'^target=(.+)')
for line in open(property_file):
str1 = line.replace(' ', '')
str2 = str1.replace('\t', '')
match = patten.match(str2)
if match is not None:
target = match.group(1)
target_num = self.get_api_level(target)
if target_num > 0:
return target_num
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_TARGET_NOT_FOUND_FMT', property_file),
cocos.CCPluginError.ERROR_PARSE_FILE)
# check the selected android platform
def check_android_platform(self, sdk_root, android_platform, proj_path, auto_select):
ret = android_platform
min_platform = self.get_target_config(proj_path)
if android_platform is None:
# not specified platform, found one
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_AUTO_SELECT_AP'))
ret = self.select_default_android_platform(min_platform)
else:
# check whether it's larger than min_platform
select_api_level = self.get_api_level(android_platform)
if select_api_level < min_platform:
if auto_select:
# select one for project
ret = self.select_default_android_platform(min_platform)
else:
# raise error
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_AP_TOO_LOW_FMT',
(proj_path, min_platform, select_api_level)),
cocos.CCPluginError.ERROR_WRONG_ARGS)
if ret is None:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_AP_NOT_FOUND_FMT',
(proj_path, min_platform)),
cocos.CCPluginError.ERROR_PARSE_FILE)
ret_path = os.path.join(cocos.CMDRunner.convert_path_to_python(sdk_root), "platforms", ret)
if not os.path.isdir(ret_path):
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_NO_AP_IN_SDK_FMT', ret),
cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
special_platforms_info = {
"android-4.2" : "android-17"
}
if special_platforms_info.has_key(ret):
ret = special_platforms_info[ret]
return ret
def ant_build_apk(self, build_mode, custom_step_args):
app_android_root = self.app_android_root
# run ant build
ant_path = os.path.join(self.ant_root, 'ant')
buildfile_path = os.path.join(app_android_root, "build.xml")
# generate paramters for custom step
args_ant_copy = custom_step_args.copy()
target_platform = cocos_project.Platforms.ANDROID
# invoke custom step: pre-ant-build
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_PRE_ANT_BUILD,
target_platform, args_ant_copy)
command = "%s clean %s -f %s -Dsdk.dir=%s" % (cocos.CMDRunner.convert_path_to_cmd(ant_path),
build_mode, buildfile_path,
cocos.CMDRunner.convert_path_to_cmd(self.sdk_root))
self._run_cmd(command)
# invoke custom step: post-ant-build
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_POST_ANT_BUILD,
target_platform, args_ant_copy)
def gradle_build_apk(self, build_mode):
# check the compileSdkVersion & buildToolsVersion
check_file = os.path.join(self.app_android_root, 'app', 'build.gradle')
f = open(check_file)
lines = f.readlines()
f.close()
compile_sdk_ver = None
build_tools_ver = None
compile_sdk_pattern = r'compileSdkVersion[ \t]+([\d]+)'
build_tools_pattern = r'buildToolsVersion[ \t]+"(.+)"'
for line in lines:
line_str = line.strip()
match1 = re.match(compile_sdk_pattern, line_str)
if match1:
compile_sdk_ver = match1.group(1)
match2 = re.match(build_tools_pattern, line_str)
if match2:
build_tools_ver = match2.group(1)
if compile_sdk_ver is not None:
# check the compileSdkVersion
check_folder_name = 'android-%s' % compile_sdk_ver
check_path = os.path.join(self.sdk_root, 'platforms', check_folder_name)
if not os.path.isdir(check_path):
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_COMPILE_SDK_FMT',
(compile_sdk_ver, check_path)))
if build_tools_ver is not None:
# check the buildToolsVersion
check_path = os.path.join(self.sdk_root, 'build-tools', build_tools_ver)
if not os.path.isdir(check_path):
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_WARNING_BUILD_TOOLS_FMT',
(build_tools_ver, check_path)))
# invoke gradlew for gradle building
if cocos.os_is_win32():
gradle_path = os.path.join(self.app_android_root, 'gradlew.bat')
else:
gradle_path = os.path.join(self.app_android_root, 'gradlew')
if not os.path.isfile(gradle_path):
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_GRALEW_NOT_EXIST_FMT', gradle_path),
cocos.CCPluginError.ERROR_PATH_NOT_FOUND)
mode_str = 'Debug' if build_mode == 'debug' else 'Release'
cmd = '"%s" --parallel --info assemble%s' % (gradle_path, mode_str)
self._run_cmd(cmd, cwd=self.app_android_root)
def do_build_apk(self, build_mode, no_apk, output_dir, custom_step_args, compile_obj):
if self.use_studio:
assets_dir = os.path.join(self.app_android_root, "app", "assets")
project_name = None
setting_file = os.path.join(self.app_android_root, 'settings.gradle')
if os.path.isfile(setting_file):
# get project name from settings.gradle
f = open(setting_file)
lines = f.readlines()
f.close()
pattern = r"project\(':(.*)'\)\.projectDir[ \t]*=[ \t]*new[ \t]*File\(settingsDir, 'app'\)"
for line in lines:
line_str = line.strip()
match = re.match(pattern, line_str)
if match:
project_name = match.group(1)
break
if project_name is None:
# use default project name
project_name = 'app'
gen_apk_folder = os.path.join(self.app_android_root, 'app/build/outputs/apk')
else:
assets_dir = os.path.join(self.app_android_root, "assets")
project_name = self._xml_attr(self.app_android_root, 'build.xml', 'project', 'name')
gen_apk_folder = os.path.join(self.app_android_root, 'bin')
# copy resources
self._copy_resources(custom_step_args, assets_dir)
# check the project config & compile the script files
if self._project._is_lua_project():
compile_obj.compile_lua_scripts(assets_dir, assets_dir)
if self._project._is_js_project():
compile_obj.compile_js_scripts(assets_dir, assets_dir)
if not no_apk:
# gather the sign info if necessary
if build_mode == "release" and not self.has_keystore_in_signprops():
self._gather_sign_info()
# build apk
if self.use_studio:
self.gradle_build_apk(build_mode)
else:
self.ant_build_apk(build_mode, custom_step_args)
# copy the apk to output dir
if output_dir:
apk_name = '%s-%s.apk' % (project_name, build_mode)
gen_apk_path = os.path.join(gen_apk_folder, apk_name)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
shutil.copy(gen_apk_path, output_dir)
cocos.Logging.info(MultiLanguage.get_string('COMPILE_INFO_MOVE_APK_FMT', output_dir))
if build_mode == "release":
signed_name = "%s-%s-signed.apk" % (project_name, build_mode)
apk_path = os.path.join(output_dir, signed_name)
if os.path.exists(apk_path):
os.remove(apk_path)
os.rename(os.path.join(output_dir, apk_name), apk_path)
else:
apk_path = os.path.join(output_dir, apk_name)
return apk_path
else:
raise cocos.CCPluginError(MultiLanguage.get_string('COMPILE_ERROR_NOT_SPECIFY_OUTPUT'),
cocos.CCPluginError.ERROR_WRONG_ARGS)
def _gather_sign_info(self):
user_cfg = {}
# get the path of keystore file
while True:
inputed = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_KEYSTORE'))
inputed = inputed.strip()
if not os.path.isabs(inputed):
if self.use_studio:
start_path = os.path.join(self.app_android_root, 'app')
else:
start_path = self.app_android_root
abs_path = os.path.join(start_path, inputed)
else:
abs_path = inputed
if os.path.isfile(abs_path):
user_cfg[self.key_store_str] = inputed.replace('\\', '/')
break
else:
cocos.Logging.warning(MultiLanguage.get_string('COMPILE_INFO_NOT_A_FILE'))
# get the alias of keystore file
user_cfg[self.key_alias_str] = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_ALIAS'))
# get the keystore password
user_cfg[self.key_store_pass_str] = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_KEY_PASS'))
# get the alias password
user_cfg[self.key_alias_pass_str] = self._get_user_input(MultiLanguage.get_string('COMPILE_TIP_INPUT_ALIAS_PASS'))
# write the config into ant.properties
self._write_sign_properties(user_cfg)
def _get_user_input(self, tip_msg):
cocos.Logging.warning(tip_msg)
ret = None
while True:
ret = raw_input()
break
return ret
def _copy_resources(self, custom_step_args, assets_dir):
app_android_root = self.app_android_root
res_files = self.res_files
# remove app_android_root/assets if it exists
if os.path.isdir(assets_dir):
shutil.rmtree(assets_dir)
# generate parameters for custom steps
target_platform = cocos_project.Platforms.ANDROID
cur_custom_step_args = custom_step_args.copy()
cur_custom_step_args["assets-dir"] = assets_dir
# make dir
os.mkdir(assets_dir)
# invoke custom step : pre copy assets
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_PRE_COPY_ASSETS, target_platform, cur_custom_step_args)
# copy resources
for cfg in res_files:
cocos.copy_files_with_config(cfg, app_android_root, assets_dir)
# invoke custom step : post copy assets
self._project.invoke_custom_step_script(cocos_project.Project.CUSTOM_STEP_POST_COPY_ASSETS, target_platform, cur_custom_step_args)
def get_apk_info(self):
if self.use_studio:
manifest_path = os.path.join(self.app_android_root, 'app')
gradle_cfg_path = os.path.join(manifest_path, 'build.gradle')
package = None
if os.path.isfile(gradle_cfg_path):
# get package name from build.gradle
f = open(gradle_cfg_path)
for line in f.readlines():
line_str = line.strip()
pattern = r'applicationId[ \t]+"(.*)"'
match = re.match(pattern, line_str)
if match:
package = match.group(1)
break
if package is None:
# get package name from AndroidManifest.xml
package = self._xml_attr(manifest_path, 'AndroidManifest.xml', 'manifest', 'package')
else:
manifest_path = self.app_android_root
package = self._xml_attr(manifest_path, 'AndroidManifest.xml', 'manifest', 'package')
activity_name = self._xml_attr(manifest_path, 'AndroidManifest.xml', 'activity', 'android:name')
if activity_name.startswith('.'):
activity = package + activity_name
else:
activity = activity_name
ret = (package, activity)
return ret
|
dios-game/dios-cocos
|
src/oslibs/cocos/cocos-src/tools/cocos2d-console/plugins/project_compile/build_android.py
|
Python
|
mit
| 28,191
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import math
import hyperopt
from hyperopt import fmin, tpe, hp
from hyperopt.mongoexp import MongoTrials
def get_space():
space = (hp.quniform('numTrees', 1, 10, 1),
hp.quniform('samplesPerImage', 10, 7500, 1),
hp.quniform('featureCount', 10, 7500, 1),
hp.quniform('minSampleCount', 1, 1000, 1),
hp.quniform('maxDepth', 5, 25, 1),
hp.quniform('boxRadius', 1, 127, 1),
hp.quniform('regionSize', 1, 127, 1),
hp.quniform('thresholds', 10, 60, 1),
hp.uniform('histogramBias', 0.0, 0.6),
)
return space
def get_exp(mongodb_url, database, exp_key):
trials = MongoTrials('mongo://%s/%s/jobs' % (mongodb_url, database), exp_key=exp_key)
space = get_space()
return trials, space
def show(mongodb_url, db, exp_key):
print ("Get trials, space...")
trials, space = get_exp(mongodb_url, db, exp_key)
print ("Get bandit...")
bandit = hyperopt.Bandit(expr=space, do_checks=False)
print ("Plotting...")
# from IPython.core.debugger import Tracer; Tracer()()
best_trial = trials.best_trial
values = best_trial['misc']['vals']
loss = best_trial['result']['loss']
true_loss = best_trial['result']['true_loss']
print ("values: ", values)
print ("loss: ", loss)
print ("true_loss: ", true_loss)
hyperopt.plotting.main_plot_history(trials)
hyperopt.plotting.main_plot_vars(trials, bandit, colorize_best=3)
if __name__ == "__main__":
if len(sys.argv) < 4:
print("usage: %s <mongodb-url> <database> <experiment> [show]" % sys.argv[0], file=sys.stderr)
sys.exit(1)
mongodb_url, database, exp_key = sys.argv[1:4]
if len(sys.argv) == 5:
show(mongodb_url, database, exp_key)
sys.exit(0)
trials, space = get_exp(mongodb_url, database, exp_key)
best = fmin(fn=math.sin, space=space, trials=trials, algo=tpe.suggest, max_evals=1000)
print("best: %s" % (repr(best)))
|
ferasha/curfil
|
scripts/hyperopt/hyperopt_search.py
|
Python
|
mit
| 2,103
|
'''
Defines the base class of an electric potential grid.
'''
import numpy as np
import matplotlib as mpl
import matplotlib.pylab as plt
from numba import jit
# Global dimensions (used for plots)
sqc_x = (2., 'cm') # unit length for SquareCable
sqc_u = (10., 'V') # unit potential for SquareCable
edm_x = (10., 'mm') # unit length for Edm
edm_u = (2., 'kV') # unit potential for Edm
# Plot parameters
font = {'family' : 'normal',
'weight' : 'normal'}
mpl.rc('font', **font)
mpl.rcParams['lines.linewidth'] = 5.
# Functions compiled just-in-time
@jit
def gen_sc_grid(b, t, u):
'''
Generates SquareCable grid.
'''
grid = np.full((b,b), u)
fix = np.ones((b,b))
grid = np.pad(grid, ((t-b)/2,), 'constant', constant_values=(0,))
fix = np.pad(fix, ((t-b)/2,), 'constant', constant_values=(0,))
grid = np.pad(grid, 1, 'constant', constant_values=(0,))
fix = np.pad(fix, 1, 'constant', constant_values=(1,))
return grid, fix
@jit
def gen_edm_grid(tube_dist, scale=1):
'''
Generates Edm grid.
'''
small_plate = np.full(2,1, dtype='float64')
big_plate = np.full(20,4, dtype='float64')
gap = np.zeros(1, dtype='float64')
row_one = np.concatenate((small_plate, gap, big_plate, gap, small_plate))
row_two = np.zeros(row_one.size)
row_three = -row_one
grid = np.vstack((row_one, row_two, row_three))
grid = np.pad(grid, tube_dist, 'constant', constant_values=(0,))
fix = np.where(grid==0, 0, 1)
if scale != 1:
scale = np.ones((scale, scale))
grid = np.kron(grid, scale)
fix = np.kron(fix, scale)
grid = np.pad(grid, 1, 'constant', constant_values=(0,))
fix = np.pad(fix, 1, 'constant', constant_values=(1,))
return grid, fix
@jit
def update(grid, fix, scale, w=-1):
'''
Updates SquareCable or Edm grid.
Relaxation parameter w (0 < w < 2) affects the speed of convergence.
- w = 'j': solves with Jacobi method
- w = -1: solves with estimated optimal w
'''
if w=='j' or w=='J':
new_grid=np.copy(grid)
for index, fixed in np.ndenumerate(fix):
if fixed: continue
new_grid[index] = 0.25*( grid[index[0]-1, index[1]] +
grid[index[0]+1, index[1]] +
grid[index[0], index[1]-1] +
grid[index[0], index[1]+1] )
return new_grid
if w==-1:
coef = float(grid.shape[1])/grid.shape[0]
const = 2.0 if coef==1. else 5.5
w = 2./(1+const/(coef*scale))
for index, fixed in np.ndenumerate(fix):
if fixed: continue
grid[index] = ((1-w) * grid[index] + 0.25 * w *
( grid[index[0]-1, index[1]] +
grid[index[0]+1, index[1]] +
grid[index[0], index[1]-1] +
grid[index[0], index[1]+1] ))
return grid
# Base class
class PotentialGrid(object):
def update_grid(self, w=-1):
'''
Updates grid once.
'''
self.grid = update(self.grid, self.fix, self.scale, w)
def converge_grid(self, w=-1, accuracy=0.05):
'''
Updates grid until convergence.
'''
temporal_spread = 1.
spatial_spread = 0.
updates = 0
while temporal_spread > accuracy*spatial_spread:
horizontal_spread = np.absolute(np.diff(self.grid, axis=-1)).max()
vertical_spread = np.absolute(np.diff(self.grid, axis=0)).max()
spatial_spread = max(horizontal_spread, vertical_spread)
old_grid = np.copy(self.grid)
self.update_grid(w)
temporal_spread = np.linalg.norm( (self.grid - old_grid) )
updates += 1
if updates%1000==0:
print '\nspatial spread = ', spatial_spread
print 'temporal spread = ', temporal_spread
print 'updates = ', updates
return temporal_spread, spatial_spread, updates
def plot_grid(self, title=None):
'''
Plots grid's potential field. Parameter title sets the title of the
plot.
'''
if self.grid.shape[0] == self.grid.shape[1]:
colour, shrink, aspect = 'YlOrRd', 1, (1, 10)
else:
colour, shrink, aspect = 'RdYlBu', 0.5, (1.2, 8)
grid = self.dim['u'][0]*self.grid
xedge = (grid.shape[1]-2.)*self.dim['x'][0]/self.scale/2.
yedge = (grid.shape[0]-2.)*self.dim['x'][0]/self.scale/2.
fig = plt.figure()
ax = fig.add_subplot(111)
if title=='intro':
ax.set_title(r'EDM experiment plate assembly', fontsize=45)
elif title=='results':
ax.set_title(r'Electric Potential Field', fontsize=45)
axx = ax.imshow(grid, extent= [-xedge, xedge, -yedge, yedge],
aspect=aspect[0], interpolation='None',
cmap=plt.cm.get_cmap(colour))
ax.set_xlabel(r'$system\ size\ ({0})$'.format(self.dim['x'][1]),
fontsize=45)
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.tick_params(axis='both', labelsize=40)
cbar = fig.colorbar(axx, shrink=shrink, aspect=aspect[1])
cbar.ax.tick_params(labelsize=40)
cbar.set_label(r'$Potential\ \phi\ ({0})$'.format(self.dim['u'][1]),
size=50)
def analyse_scale(self, w=-1, datapoints=20, accuracy=0.05, plot=True):
'''
Plots number of updates against scale for given relaxation parameter w,
number of datapoints and accuracy of convergence. If plot=False,
returns computed updates and scales.
Plots also maximum spatial spread of potential against scale.
'''
scales = np.linspace(10, 10*datapoints, datapoints)
mesh, updates = [], []
for s in scales:
print s
self.set_scale(s, silent=True)
data = self.converge_grid(w, accuracy)
updates.append(data[2])
mesh.append(data[1]*self.dim['u'][0])
if not plot: return scales, updates
if w=='j':
xaxis = scales*scales
lab= r'$scale^2\ \left(\frac{1}{(%g%s)^2}\right)$'% (
self.dim['x'][0], self.dim['x'][1])
else:
xaxis = scales
lab= r'$scale\ \left(\frac{1}{%g%s}\right)$'% (self.dim['x'][0],
self.dim['x'][1])
slope = updates[-1]/xaxis[-1]
fit = slope*xaxis
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(r'Number of updates against Scale', fontsize=45)
ax.plot(xaxis, updates, label=r'Numerical data')
ax.plot(xaxis, fit, label=r'Linear fit ($slope=%.2f$)'% (slope))
ax.set_xlabel(lab, fontsize=35)
ax.set_ylabel(r'$temporal\ updates$', fontsize=35)
ax.tick_params(axis='both', labelsize=25)
ax.legend(loc='upper left', prop={'size':40})
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(r'Spatial spread against Scale', fontsize=45)
ax.plot(scales, mesh)
ax.set_xlabel(r'$scale\ \left(\frac{1}{%g%s}\right)$'%
(self.dim['x'][0], self.dim['x'][1]), fontsize=40)
ax.set_ylabel(r'$spatial\ spread\ (%s)$'% (self.dim['u'][1]),
fontsize=40)
ax.tick_params(axis='both', labelsize=25)
def analyse_spread(self, w=-1, datapoints=10):
'''
Plots spatial spread of potential against accuracy of convergence for
given relaxation parameter w and number of datapoints.
'''
fig = plt.figure()
ax = fig.add_subplot(111)
#ax.set_title(r'Spatial spread against Accuracy of convergence',
# fontsize=75)
ax.set_xlabel(r'$fraction\ of\ spatial\ spread$', fontsize=40)
ax.invert_xaxis()
ax.set_ylabel(r'$spatial\ spread\ (%s)$'% (self.dim['u'][1]),
fontsize=40)
ax.tick_params(axis='both', labelsize=30)
accuracies = np.logspace(-1,-10,datapoints)
for scale in np.linspace(10,10*datapoints,datapoints):
self.set_scale(scale, silent=True)
spreads = []
for acc in accuracies:
t,s,u = self.converge_grid(w, acc)
spreads.append(s*self.dim['u'][0])
ax.plot(accuracies, spreads, label='Scale={0}'.format(scale))
return accuracies, spreads
def analyse_omega(self, guess, scales=-1, datapoints=20,
accuracy=0.05, plot=True):
'''
Plots number of updates against relaxation parameter for given initial
guess, system scales, number of datapoints and accuracy of convergence.
If plot=False, returns computed updates and relaxation parameters.
'''
if plot:
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(r'Optimal omega search at different scales',
fontsize=55)
ax.set_xlabel('$relaxation\ parameter\ \omega$', fontsize=37)
ax.set_ylabel('$temporal\ updates$', fontsize=37)
ax.tick_params(axis='both', labelsize=30)
ws = np.pad(np.array([guess]), datapoints/2, 'linear_ramp',
end_values=(guess-0.05, 1.99))
if scales==-1: scales = [self.scale]
for scale in scales:
updates = []
for w in ws:
self.set_scale(scale, silent=True)
data = self.converge_grid(w, accuracy)
updates.append(data[-1])
if plot: ax.plot(ws, updates, label=r'Scale ${0}$'.format(scale))
else: return ws, updates
if plot: ax.legend(loc='upper center', prop={'size':40})
def plot_omega_vs_scale(self, const=2., datapoints=20):
'''
Plots relaxation parameter against scale along with approximate fit for
given number of datapoints.
The fitting is approximated by the user with the constant const which
appears in the formula: 2(1+const/(coef*scale)), where coef is the
ratio of x and y dimensions of the system.
'''
coef = float(self.grid.shape[1]-2)/(self.grid.shape[0]-2)
scales = np.linspace(10, 50, datapoints)
fit = 2./(1+const/(coef*scales))
ws = []
for scale in scales:
self.set_scale(scale, silent=True)
guess = 2./(1+const/(coef*self.scale))
w, update = self.analyse_omega(guess, plot=False)
w = w[update.index(min(update))]
ws.append(w)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_title(r'Relaxation parameter against scale', fontsize=55)
ax.set_xlabel(r'$scale\ \left(\frac{1}{%g%s}\right)$'%
(self.dim['x'][0], self.dim['x'][1]), fontsize=37)
ax.set_ylabel('$relaxation\ parameter\ \omega$', fontsize=37)
ax.tick_params(axis='both', labelsize=30)
ax.plot(scales, ws, label=r'Numerical data')
ax.plot(scales, fit, label=r'Approximate fit ($C=%.1f$)'% (const))
ax.legend(loc='upper left', prop={'size':40})
return scales, ws
|
nkoukou/University_Projects_Year_3
|
EDM_Assembly/base_class.py
|
Python
|
mit
| 11,366
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Very basic tool to generate a binary font from a TTF. Currently
# hardcodes a lot of things, so it's only really suitable for this demo.
#
# Assumes every glyph fits in an 8x8 box. Each glyph is encoded as
# an uncompressed 8-byte bitmap, preceeded by a one-byte escapement.
#
import ImageFont, ImageDraw, Image
FONT = "04B_03__.TTF"
font = ImageFont.truetype(FONT, 8)
glyphs = map(chr, range(ord(' '), ord('z')+1))
print """/*
* 8x8 variable-width binary font, AUTOMATICALLY GENERATED
* by fontgen.py from %s
*/
static const uint8_t font_data[] = {""" % FONT
for g in glyphs:
width, height = font.getsize(g)
assert width <= 8
assert height <= 8
im = Image.new("RGB", (8,8))
draw = ImageDraw.Draw(im)
draw.text((0, 0), g, font=font)
bytes = [width]
for y in range(8):
byte = 0
for x in range(8):
if im.getpixel((x,y))[0]:
byte = byte | (1 << x)
bytes.append(byte)
print " %s" % "".join(["0x%02x," % x for x in bytes])
print "};"
|
Qusic/Companionship
|
fontgen.py
|
Python
|
mit
| 1,083
|
# Created by PyCharm Pro Edition
# User: Kaushik Talukdar
# Date: 27-03-2017
# Time: 05:25 PM
fastfood = ["momo", "roll", "chow", "pizza"]
print(fastfood)
print("\n")
#print one element using pop()
#output the popped element
print(fastfood.pop() + "\n")
#print the new list with less elements
print(fastfood)
|
KT26/PythonCourse
|
2. Introducing Lists/6.py
|
Python
|
mit
| 380
|
"""
Given an array nums, partition it into two (contiguous) subarrays left and right so that:
Every element in left is less than or equal to every element in right.
left and right are non-empty.
left has the smallest possible size.
Return the length of left after such a partitioning. It is guaranteed that such a partitioning exists.
Example 1:
Input: nums = [5,0,3,8,6]
Output: 3
Explanation: left = [5,0,3], right = [8,6]
Example 2:
Input: nums = [1,1,1,0,6,12]
Output: 4
Explanation: left = [1,1,1,0], right = [6,12]
Note:
2 <= nums.length <= 30000
0 <= nums[i] <= 106
It is guaranteed there is at least one way to partition nums as described.
"""
class Solution(object):
def partitionDisjoint(self, A):
"""
:type A: List[int]
:rtype: int
"""
mx, ms = [], []
for n in A:
if not mx:
mx.append(n)
else:
mx.append(max(mx[-1], n))
for n in reversed(A):
if not ms:
ms.append(n)
else:
ms.append(min(ms[-1], n))
ms = list(reversed(ms))
for i, n in enumerate(mx):
if i >= len(A) - 1:
continue
n2 = ms[i + 1]
if n2 >= n:
return i + 1
return len(A)
|
franklingu/leetcode-solutions
|
questions/partition-array-into-disjoint-intervals/Solution.py
|
Python
|
mit
| 1,320
|
import re
from functools import reduce
from typing import Optional, Callable, Any, Type, Union
import wx # type: ignore
from gooey.gui import formatters, events
from gooey.gui.util import wx_util
from gooey.python_bindings.types import FormField
from gooey.util.functional import getin, ifPresent
from gooey.gui.validators import runValidator
from gooey.gui.components.util.wrapped_static_text import AutoWrappedStaticText
from gooey.gui.components.mouse import notifyMouseEvent
from gooey.python_bindings import types as t
class BaseWidget(wx.Panel):
widget_class: Any
def arrange(self, label, text):
raise NotImplementedError
def getWidget(self, parent: wx.Window, **options):
return self.widget_class(parent, **options)
def connectSignal(self):
raise NotImplementedError
def getSublayout(self, *args, **kwargs):
raise NotImplementedError
def setValue(self, value):
raise NotImplementedError
def setPlaceholder(self, value):
raise NotImplementedError
def receiveChange(self, *args, **kwargs):
raise NotImplementedError
def dispatchChange(self, value, **kwargs):
raise NotImplementedError
def formatOutput(self, metatdata, value):
raise NotImplementedError
class TextContainer(BaseWidget):
# TODO: fix this busted-ass inheritance hierarchy.
# Cracking at the seems for more advanced widgets
# problems:
# - all the usual textbook problems of inheritance
# - assumes there will only ever be ONE widget created
# - assumes those widgets are all created in `getWidget`
# - all the above make for extremely awkward lifecycle management
# - no clear point at which binding is correct.
# - I think the core problem here is that I couple the interface
# for shared presentation layout with the specification of
# a behavioral interface
# - This should be broken apart.
# - presentation can be ad-hoc or composed
# - behavioral just needs a typeclass of get/set/format for Gooey's purposes
widget_class = None # type: ignore
def __init__(self, parent, widgetInfo, *args, **kwargs):
super(TextContainer, self).__init__(parent, *args, **kwargs)
self.info = widgetInfo
self._id = widgetInfo['id']
self.widgetInfo = widgetInfo
self._meta = widgetInfo['data']
self._options = widgetInfo['options']
self.label = wx.StaticText(self, label=widgetInfo['data']['display_name'])
self.help_text = AutoWrappedStaticText(self, label=widgetInfo['data']['help'] or '')
self.error = AutoWrappedStaticText(self, label='')
self.error.Hide()
self.widget = self.getWidget(self)
self.layout = self.arrange(*args, **kwargs)
self.setColors()
self.SetSizer(self.layout)
self.bindMouseEvents()
self.Bind(wx.EVT_SIZE, self.onSize)
# 1.0.7 initial_value should supersede default when both are present
if self._options.get('initial_value') is not None:
self.setValue(self._options['initial_value'])
# Checking for None instead of truthiness means False-evaluaded defaults can be used.
elif self._meta['default'] is not None:
self.setValue(self._meta['default'])
if self._options.get('placeholder'):
self.setPlaceholder(self._options.get('placeholder'))
self.onComponentInitialized()
def onComponentInitialized(self):
pass
def bindMouseEvents(self):
"""
Send any LEFT DOWN mouse events to interested
listeners via pubsub. see: gooey.gui.mouse for background.
"""
self.Bind(wx.EVT_LEFT_DOWN, notifyMouseEvent)
self.label.Bind(wx.EVT_LEFT_DOWN, notifyMouseEvent)
self.help_text.Bind(wx.EVT_LEFT_DOWN, notifyMouseEvent)
self.error.Bind(wx.EVT_LEFT_DOWN, notifyMouseEvent)
self.widget.Bind(wx.EVT_LEFT_DOWN, notifyMouseEvent)
def arrange(self, *args, **kwargs):
wx_util.make_bold(self.label)
wx_util.withColor(self.label, self._options['label_color'])
wx_util.withColor(self.help_text, self._options['help_color'])
wx_util.withColor(self.error, self._options['error_color'])
self.help_text.SetMinSize((0,-1))
layout = wx.BoxSizer(wx.VERTICAL)
if self._options.get('show_label', True):
layout.Add(self.label, 0, wx.EXPAND)
else:
self.label.Show(False)
layout.AddStretchSpacer(1)
layout.AddSpacer(2)
if self.help_text and self._options.get('show_help', True):
layout.Add(self.help_text, 1, wx.EXPAND)
layout.AddSpacer(2)
else:
self.help_text.Show(False)
layout.AddStretchSpacer(1)
layout.Add(self.getSublayout(), 0, wx.EXPAND)
layout.Add(self.error, 1, wx.EXPAND)
# self.error.SetLabel("HELLOOOOO??")
# self.error.Show()
# print(self.error.Shown)
return layout
def setColors(self):
wx_util.make_bold(self.label)
wx_util.withColor(self.label, self._options['label_color'])
wx_util.withColor(self.help_text, self._options['help_color'])
wx_util.withColor(self.error, self._options['error_color'])
if self._options.get('label_bg_color'):
self.label.SetBackgroundColour(self._options.get('label_bg_color'))
if self._options.get('help_bg_color'):
self.help_text.SetBackgroundColour(self._options.get('help_bg_color'))
if self._options.get('error_bg_color'):
self.error.SetBackgroundColour(self._options.get('error_bg_color'))
def getWidget(self, *args, **options):
return self.widget_class(*args, **options)
def getWidgetValue(self):
raise NotImplementedError
def getSublayout(self, *args, **kwargs):
layout = wx.BoxSizer(wx.HORIZONTAL)
layout.Add(self.widget, 1, wx.EXPAND)
return layout
def onSize(self, event):
# print(self.GetSize())
# self.error.Wrap(self.GetSize().width)
# self.help_text.Wrap(500)
# self.Layout()
event.Skip()
def getUiState(self) -> t.FormField:
return t.TextField(
id=self._id,
type=self.widgetInfo['type'],
value=self.getWidgetValue(),
placeholder=self.widget.widget.GetHint(),
error=self.error.GetLabel().replace('\n', ' '),
enabled=self.IsEnabled(),
visible=self.IsShown()
)
def syncUiState(self, state: FormField): # type: ignore
self.widget.setValue(state['value']) # type: ignore
self.error.SetLabel(state['error'] or '')
self.error.Show(state['error'] is not None and state['error'] is not '')
def getValue(self) -> t.FieldValue:
regexFunc: Callable[[str], bool] = lambda x: bool(re.match(userValidator, x))
userValidator = getin(self._options, ['validator', 'test'], 'True')
message = getin(self._options, ['validator', 'message'], '')
testFunc = regexFunc \
if getin(self._options, ['validator', 'type'], None) == 'RegexValidator'\
else eval('lambda user_input: bool(%s)' % userValidator)
satisfies = testFunc if self._meta['required'] else ifPresent(testFunc)
value = self.getWidgetValue()
return t.FieldValue( # type: ignore
id=self._id,
cmd=self.formatOutput(self._meta, value),
meta=self._meta,
rawValue= value,
# type=self.info['type'],
enabled=self.IsEnabled(),
visible=self.IsShown(),
test= runValidator(satisfies, value),
error=None if runValidator(satisfies, value) else message,
clitype=('positional'
if self._meta['required'] and not self._meta['commands']
else 'optional')
)
def setValue(self, value):
self.widget.SetValue(value)
def setPlaceholder(self, value):
if getattr(self.widget, 'SetHint', None):
self.widget.SetHint(value)
def setErrorString(self, message):
self.error.SetLabel(message)
self.error.Wrap(self.Size.width)
self.Layout()
def showErrorString(self, b):
self.error.Wrap(self.Size.width)
self.error.Show(b)
def setOptions(self, values):
return None
def receiveChange(self, metatdata, value):
raise NotImplementedError
def dispatchChange(self, value, **kwargs):
raise NotImplementedError
def formatOutput(self, metadata, value) -> str:
raise NotImplementedError
class BaseChooser(TextContainer):
""" Base Class for the Chooser widget types """
def setValue(self, value):
self.widget.setValue(value)
def setPlaceholder(self, value):
self.widget.SetHint(value)
def getWidgetValue(self):
return self.widget.getValue()
def formatOutput(self, metatdata, value):
return formatters.general(metatdata, value)
def getUiState(self) -> t.FormField:
btn: wx.Button = self.widget.button # type: ignore
return t.Chooser(
id=self._id,
type=self.widgetInfo['type'],
value=self.widget.getValue(),
btn_label=btn.GetLabel(),
error=self.error.GetLabel() or None,
enabled=self.IsEnabled(),
visible=self.IsShown()
)
|
chriskiehl/Gooey
|
gooey/gui/components/widgets/bases.py
|
Python
|
mit
| 9,873
|
from ubluepy import Scanner, constants
def bytes_to_str(bytes):
string = ""
for b in bytes:
string += chr(b)
return string
def get_device_names(scan_entries):
dev_names = []
for e in scan_entries:
scan = e.getScanData()
if scan:
for s in scan:
if s[0] == constants.ad_types.AD_TYPE_COMPLETE_LOCAL_NAME:
dev_names.append((e, bytes_to_str(s[2])))
return dev_names
def find_device_by_name(name):
s = Scanner()
scan_res = s.scan(100)
device_names = get_device_names(scan_res)
for dev in device_names:
if name == dev[1]:
return dev[0]
# >>> res = find_device_by_name("micr")
# >>> if res:
# ... print("address:", res.addr())
# ... print("address type:", res.addr_type())
# ... print("rssi:", res.rssi())
# ...
# ...
# ...
# address: c2:73:61:89:24:45
# address type: 1
# rssi: -26
|
adafruit/micropython
|
ports/nrf/examples/ubluepy_scan.py
|
Python
|
mit
| 923
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Feed.feed_type'
db.delete_column('syndication_feed', 'feed_type_id')
def backwards(self, orm):
# Adding field 'Feed.feed_type'
db.add_column('syndication_feed', 'feed_type',
self.gf('django.db.models.fields.related.ForeignKey')(default='', to=orm['syndication.FeedType']),
keep_default=False)
models = {
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'syndication.feed': {
'Meta': {'object_name': 'Feed'},
'content_type': ('django.db.models.fields.CharField', [], {'default': "'application/xml'", 'max_length': '100'}),
'feed_url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'site': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'}),
'template': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
},
'syndication.feedtype': {
'Meta': {'object_name': 'FeedType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['syndication']
|
wtrevino/django-listings
|
listings/syndication/migrations/0007_auto__del_field_feed_feed_type.py
|
Python
|
mit
| 1,955
|
#!/usr/bin/python
r"""
PYTHONRC
========
Initialization script for the interactive Python interpreter. Its main purpose
is to enhance the overall user experience when working in such an environment
by adding some niceties to the standard console.
It also works with IPython and BPython, although its utility in that kind of
scenarios can be argued.
Tested in GNU/Linux with Python versions 2.7 and 3.4.
Please read the Installation section below.
Features
--------
- User input completion
+ Introduces a completion mechanism for inputted commands in Python 2.
+ In Python 3, where the standard console is a lot nicer, it just
impersonates the default completion machinery to keep the consistency with
the behavior in Python 2 (and so it's still possible to adapt it to the
user's needs).
- Command History
+ Creates a callable, singleton object called `history`, placing it into
the `__builtins__` object to make it easily available, which enables the
handling of the command history (saving some input lines to a file of your
choice, listing the commands introduced so far, etc.). Try simply
`history()` on the Python prompt to see it in action; inspect its members
(with `dir(history)` or `help(history.write)`) for more information.
- Color prompt
+ Puts a colorful prompt in place, if the terminal supports it.
- Implementation of a bash's "operate-and-get-next" clone
+ Enables a quick re-edition of a code block from the history by
successive keypresses of the `Ctrl-o` hotkey.
Installation
------------
- You must define in your environment (in GNU/Linux and MacOS X that usually
means your `~/.bashrc` file) the variable 'PYTHONSTARTUP' containing the path
to `pythonrc.py`.
- It is also highly recommended to define the variable 'PYTHON_HISTORY_FILE'.
Remember that BPython (unlike the standard interpreter or IPython) ignores that
variable, so you'll have to configure it as well by other means to be able to
use the same history file there (for instance, in Linux, the file
`~/.config/bpython/config` is a good place to start, but please read BPython's
documentation).
### Example configurations
- Extract of `~/.bashrc`
```sh
# python
export PYTHONSTARTUP=~/.python/pythonrc.py
export PYTHON_HISTORY_FILE=~/.python/.python_history
## You may want to also uncomment some of this lines if using an old
## version of virtualenvwrapper
# export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3.4
# export WORKON_HOME=~/.python/virtualenvs
# source $(which virtualenvwrapper.sh)
```
- Extract of `~/.config/bpython/config`
```
[general]
color_scheme = default
hist_file = ~/.python/.python_history
hist_lenght = 1000
```
Bugs / Caveats / Future enhancements
------------------------------------
- No module/package introspection for the last argument in commands of the form
`from <package> import <not_completing_this>` (this, in fact, could be a not so
bad thing, because it doesn't execute side effects, e.g. modules' init code).
- Depending on the user's system, the compilation of the packages' and modules'
list for completing `import ...` and `from ... import ...` commands can take a
long time, especially the first time it is invoked.
- When completing things like a method's name, the default is to also include
the closing parenthesis along with the opening one, but the cursor is placed
after it no matter what, instead of between them. This is because of the
python module `readline`'s limitations.
You can turn off the inclusion of the closing parenthesis; if you do so, you
might be also interested in modifying the variable called
`dict_keywords_postfix` (especially the strings that act as that dictionary's
indexes).
- IPython has its own `%history` magic. I did my best to not interfere with
it, but I don't know the actual consequences. Also, it's debatable if it
even makes sense to use this file with IPython and/or BPython (though having
a unified history for all the environments is really nice).
You could define some bash aliases like
```sh
alias ipython='PYTHONSTARTUP="" ipython'
alias bpython='PYTHONSTARTUP="" bpython'
```
to be on the safer side.
- Could have used the module `six` for better clarity. Right now it uses my own
made up stubs to work on both Python 2 and 3.
- Needs better comments and documentation, especially the part on history
handling.
- Probably a lot more. Feel free to file bug reports ;-)
"""
def init():
# color prompt
import sys
import os
term_with_colors = ['xterm', 'xterm-color', 'xterm-256color', 'linux',
'screen', 'screen-256color', 'screen-bce']
red = ''
green = ''
reset = ''
if os.environ.get('TERM') in term_with_colors:
escapes_pattern = '\001\033[%sm\002' # \001 and \002 mark non-printing
red = escapes_pattern % '31'
green = escapes_pattern % '32'
reset = escapes_pattern % '0'
sys.ps1 = red + '>>> ' + reset
sys.ps2 = green + '... ' + reset
red = red.strip('\001\002')
green = green.strip('\001\002')
reset = reset.strip('\001\002')
# readline (tab-completion, history)
try:
import readline
except ImportError:
print(red + "Module 'readline' not available. Skipping user customizations." + reset)
return
import rlcompleter
import atexit
from pwd import getpwall
from os.path import isfile, isdir, expanduser, \
join as joinpath, split as splitpath, sep as pathsep
default_history_file = '~/.pythonhist'
majver = sys.version_info[0]
# Both BPython and Django shell change the nature of the __builtins__
# object. This hack workarounds that:
def builtin_setattr(attr, value):
if hasattr(__builtins__, '__dict__'):
setattr(__builtins__, attr, value)
else:
__builtins__[attr] = value
def builtin_getattr(attr):
if hasattr(__builtins__, '__dict__'):
return getattr(__builtins__, attr)
else:
return __builtins__[attr]
# My own "six" library, where I define the following stubs:
# * myrange for xrange() (python2) / range() (python3)
# * exec_stub for exec()
# * iteritems for dict.iteritems() (python2) / list(dict.items()) (python3)
# I could have done "from six import iteritems" and such instead of this
if majver == 2:
myrange = xrange
def exec_stub(textcode, globalz=None, localz=None):
# the parenthesis make it valid python3 syntax, do nothing at all
exec (textcode) in globalz, localz
def iteritems(d):
return d.iteritems()
elif majver == 3:
myrange = range
# def exec_stub(textcode, globalz=None, localz=None):
# # the "in" & "," make it valid python2 syntax, do nothing useful
# exec(textcode, globalz, localz) in globalz #, localz
# the three previous lines work, but this is better
exec_stub = builtin_getattr('exec')
def iteritems(d):
return list(d.items())
# AUXILIARY CLASSES
# History management
class History:
set_length = readline.set_history_length
get_length = readline.get_history_length
get_current_length = readline.get_current_history_length
get_item = readline.get_history_item
write = readline.write_history_file
def __init__(self, path=default_history_file, length=500):
self.path = path
self.reload(path)
self.set_length(length)
def __exit__(self):
print("Saving history (%s)..." % self.path)
self.write(expanduser(self.path))
def __repr__(self):
"""print out current history information"""
# length = self.get_current_length()
# command = self.get_item(length)
# if command == 'history':
# return "\n".join(self.get_item(i)
# for i in myrange(1, length+1))
# else:
# return '<%s instance>' % str(self.__class__)
return '<%s instance>' % str(self.__class__)
def __call__(self, pos=None, end=None):
"""print out current history information with line number"""
if not pos:
pos = 1
elif not end:
end = pos
for i, item in self.iterator(pos, end, enumerate_it=True):
print('%i:\t%s' % (i, item))
def iterator(self, pos, end, enumerate_it=False):
length = self.get_current_length()
if not pos:
pos = 1
if not end:
end = length
pos = min(pos, length)
if pos < 0:
pos = max(1, pos + length + 1)
end = min(end, length)
if end < 0:
end = max(1, end + length + 1)
if enumerate_it:
return ((i, self.get_item(i)) for i in myrange(pos, end + 1))
else:
return (self.get_item(i) for i in myrange(pos, end + 1))
def reload(self, path=""):
"""clear the current history and reload it from saved"""
readline.clear_history()
if isfile(path):
self.path = path
readline.read_history_file(expanduser(self.path))
def save(self, filename, pos=None, end=None):
"""write history number from pos to end into filename file"""
with open(filename, 'w') as f:
for item in self.iterator(pos, end):
f.write(item)
f.write('\n')
def execute(self, pos, end=None):
"""execute history number from pos to end"""
if not end:
end = pos
commands = []
for item in self.iterator(pos, end):
commands.append(item)
readline.add_history(item)
exec_stub("\n".join(commands), globals())
# comment the previous two lines and uncomment those below
# if you prefer to re-add to history just the commands that
# executed without problems
# try:
# exec_stub("\n".join(commands), globals())
# except:
# raise
# else:
# for item in commands:
# readline.add_history(cmdlist)
# Activate completion and make it smarter
class Irlcompleter(rlcompleter.Completer):
"""
This class enables the insertion of "indentation" if there's no text
for completion.
The default "indentation" is four spaces. You can initialize with '\t'
as the tab if you wish to use a genuine tab.
Also, compared to the default rlcompleter, this one performs some
additional useful things, like file completion for string constants
and addition of some decorations to keywords (namely, closing
parenthesis, and whatever you've defined in dict_keywords_postfix --
spaces, colons, etc.)
"""
def __init__(
self,
indent_str=' ',
delims=readline.get_completer_delims(),
binds=('tab: complete', ),
dict_keywords_postfix={" ": ["import", "from"], },
add_closing_parenthesis=True
):
rlcompleter.Completer.__init__(self, namespace=globals())
readline.set_completer_delims(delims)
self.indent_str_list = [indent_str, None]
for bind in binds:
readline.parse_and_bind(bind)
self.dict_keywords_postfix = dict_keywords_postfix
self.add_closing_parenthesis = add_closing_parenthesis
def complete(self, text, state):
line = readline.get_line_buffer()
stripped_line = line.lstrip()
# libraries
if stripped_line.startswith('import '):
value = self.complete_libs(text, state)
elif stripped_line.startswith('from '):
pos = readline.get_begidx()
# end = readline.get_endidx()
if line[:pos].strip() == 'from':
value = self.complete_libs(text, state) + " "
elif state == 0 and line.find(' import ') == -1:
value = 'import '
else:
# Here we could do module introspection (ugh)
value = None
# indentation, files and keywords/identifiers
elif text == '':
value = self.indent_str_list[state]
elif text[0] in ('"', "'"):
value = self.complete_files(text, state)
else:
value = self.complete_keywords(text, state)
return value
def complete_keywords(self, text, state):
txt = rlcompleter.Completer.complete(self, text, state)
if txt is None:
return None
if txt.endswith('('):
if self.add_closing_parenthesis:
return txt + ')'
else:
return txt
for postfix, words in iteritems(self.dict_keywords_postfix):
if txt in words:
return txt + postfix
return txt
def complete_files(self, text, state):
str_delim = text[0]
path = text[1:]
if path.startswith("~/"):
path = expanduser("~/") + path[2:]
elif path.startswith("~"):
i = path.find(pathsep)
if i > 0:
path = expanduser(path[:i]) + path[i:]
else:
return [
str_delim + "~" + i[0] + pathsep
for i in getpwall()
if i[0].startswith(path[1:])
][state]
dir, fname = splitpath(path)
if not dir:
dir = os.curdir
return [
str_delim + joinpath(dir, i)
for i in os.listdir(dir)
if i.startswith(fname)
][state]
def complete_libs(self, text, state):
libs = {}
for i in sys.path:
try:
if i == '':
i = os.curdir
files = os.listdir(i)
for j in files:
filename = joinpath(i, j)
if isfile(filename):
for s in [".py", ".pyc", ".so"]:
if j.endswith(s):
j = j[:-len(s)]
pos = j.find(".")
if pos > 0:
j = j[:pos]
libs[j] = None
break
elif isdir(filename):
for s in ["__init__.py", "__init__.pyc"]:
if isfile(joinpath(filename, s)):
libs[j] = None
except OSError:
pass
for j in sys.builtin_module_names:
libs[j] = None
libs = sorted(j for j in libs.keys() if j.startswith(text))
return libs[state]
# DEFINITIONS:
# history file path and length
history_length = 1000
history_path = os.getenv("PYTHON_HISTORY_FILE", default_history_file)
# bindings for readline (assign completion key, etc.)
# readline_binds = (
# 'tab: tab_complete',
# '"\C-o": operate-and-get-next', # exists in bash but not in readline
# )
# completion delimiters
# we erase ", ', ~ and / so file completion works
# readline_delims = ' \t\n`!@#$%^&*()-=+[{]}\\|;:,<>?'
readline_delims = readline.get_completer_delims()\
.replace("~", "", 1)\
.replace("/", "", 1)\
.replace("'", "", 1)\
.replace('"', '', 1)
# dictionary of keywords to be postfixed by a string
dict_keywords_postfix = {
":": ["else", "try", "finally", ],
" ": ["import", "from", "or", "and", "not", "if", "elif", ],
" ():": ["def", ] # "class", ]
}
# DO IT
completer = Irlcompleter(delims=readline_delims, # binds=readline_binds,
dict_keywords_postfix=dict_keywords_postfix)
readline.set_completer(completer.complete)
if not os.access(history_path, os.F_OK):
print(green + 'History file %s does not exist. Creating it...' % history_path + reset)
with open(history_path, 'w') as f:
pass
elif not os.access(history_path, os.R_OK|os.W_OK):
print(red + 'History file %s has wrong permissions!' % history_path + reset)
history = History(history_path, history_length)
#
# Hack: Implementation of bash-like "operate-and-get-next" (Ctrl-o)
#
try:
# We'll hook the C functions that we need from the underlying
# libreadline implementation that aren't exposed by the readline
# python module.
from ctypes import CDLL, CFUNCTYPE, c_int
librl = CDLL(readline.__file__)
rl_callback = CFUNCTYPE(c_int, c_int, c_int)
rl_int_void = CFUNCTYPE(c_int)
readline.add_defun = librl.rl_add_defun # didn't bother to define args
readline.accept_line = rl_callback(librl.rl_newline)
readline.previous_history = rl_callback(librl.rl_get_previous_history)
readline.where_history = rl_int_void(librl.where_history)
def pre_input_hook_factory(offset, char):
def rewind_history_pre_input_hook():
# Uninstall this hook, rewind history and redisplay
readline.set_pre_input_hook(None)
result = readline.previous_history(offset, char)
readline.redisplay()
return result
return rewind_history_pre_input_hook
@rl_callback
def operate_and_get_next(count, char):
current_line = readline.where_history()
offset = readline.get_current_history_length() - current_line
# Accept the current line and set the hook to rewind history
result = readline.accept_line(1, char)
readline.set_pre_input_hook(pre_input_hook_factory(offset, char))
return result
# Hook our function to Ctrl-o, and hold a reference to it to avoid GC
readline.add_defun('operate-and-get-next', operate_and_get_next, ord("O") & 0x1f)
history._readline_functions = [operate_and_get_next]
except (ImportError, OSError, AttributeError) as e:
print(red + """
Couldn't either bridge the needed methods from binary 'readline'
or properly install our implementation of 'operate-and-get-next'.
Skipping the hack. Underlying error:
""" + reset + repr(e))
builtin_setattr('history', history)
atexit.register(history.__exit__)
# run the initialization and clean up the environment afterwards
init()
del init
|
0xf4/pythonrc
|
pythonrc.py
|
Python
|
mit
| 19,310
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-02 15:02
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('talks', '0004_auto_20170326_1755'),
]
operations = [
migrations.AlterField(
model_name='talk',
name='fav_count',
field=models.PositiveIntegerField(default=0, verbose_name='favorite count'),
),
migrations.AlterField(
model_name='talk',
name='view_count',
field=models.PositiveIntegerField(default=0, verbose_name='view count'),
),
migrations.AlterField(
model_name='talk',
name='vote_count',
field=models.PositiveIntegerField(default=0, verbose_name='vote count'),
),
]
|
tlksio/tlksio
|
talks/migrations/0005_auto_20170402_1502.py
|
Python
|
mit
| 866
|
"""
Django settings for blog project.
Generated by 'django-admin startproject' using Django 1.9.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# BASE_DIR = "/Users/jmitch/desktop/blog/src/"
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'sm@g)(fbwdh5wc*xe@j++m9rh^uza5se9a57c5ptwkg*b@ki0x'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['tienduong.pythonanywhere.com', '127.0.0.1', '10.169.3.13', '172.20.10.5', '172.20.10.10']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# third party
'crispy_forms',
'markdown_deux',
'pagedown',
'rest_framework',
'django_tables2',
# local apps
'comments',
'posts',
'pingow_api',
]
CRISPY_TEMPLATE_PACK = 'bootstrap3'
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
LOGIN_URL = "/login/"
ROOT_URLCONF = 'blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Singapore'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
#'/var/www/static/',
]
STATIC_ROOT = os.path.join(os.path.dirname(BASE_DIR), "static_cdn")
MEDIA_URL = "/media/"
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), "media_cdn")
|
our-iot-project-org/pingow-web-service
|
src/blog/settings.py
|
Python
|
mit
| 3,829
|
from axiom.test.historic.stubloader import StubbedTest
from xquotient.mail import MailTransferAgent
from axiom.userbase import LoginSystem
class MTAUpgraderTest(StubbedTest):
def testMTA2to3(self):
"""
Make sure MailTransferAgent upgraded OK and that its
"userbase" attribute refers to the store's userbase.
"""
mta = self.store.findUnique(MailTransferAgent)
self.assertIdentical(mta.userbase,
self.store.findUnique(LoginSystem))
|
twisted/quotient
|
xquotient/test/historic/test_mta2to3.py
|
Python
|
mit
| 512
|
from sys import stdout
from collections import defaultdict
from .parse import main, HTMLTag
def maybe_call(f, *args, **kwargs):
if callable(f):
return f(*args, **kwargs)
return f
class Compiler(object):
def __init__(self, stream):
self.stream = stream
self.blocks = []
self.deferred_endif = ()
self.tmpvar_count = 0
def start(self, parser):
"""
Called by the parser to start compiling.
"""
self.parser = parser
def put_tmpvar(self, val):
"""
Allocate a temporary variable, output assignment, and return the
variable name.
"""
name = '_jade_%d' % self.tmpvar_count
self.tmpvar_count += 1
self.stream.write(u'{%% set %s = %s %%}' % (name, val))
return name
def dismiss_endif(self):
"""
Dismiss an endif, only outputting the newlines.
The parser doesn't take care of if-elif-else matching. Instead, it
will try to close the if block before opening a new elif or else
block. Thus the endif block needs to be deferred, along with the
newlines after it. When non-empty, self.deferred_endif is a list
[endif, newlines].
"""
if self.deferred_endif:
self.stream.write(self.deferred_endif[1])
self.deferred_endif = ()
def put_endif(self):
"""
Output an endif.
"""
if self.deferred_endif:
self.stream.write(''.join(self.deferred_endif))
self.deferred_endif = ()
def start_block(self, tag):
"""
Called by the parser to start a block. `tag` can be either an HTMLTag
or a ControlTag.
"""
if tag.name in ('elif', 'else'):
self.dismiss_endif()
else:
self.put_endif()
self.blocks.append(tag)
if isinstance(tag, HTMLTag):
self.stream.write(u'<%s' % tag.name)
for a in tag.attr:
if isinstance(a, basestring):
self.literal(a)
continue
k, v = a
if k == 'id':
# tag(id=xxx) takes precedence over tag#xxx
tag.id_ = None
elif k == 'class':
# merge tag(class=xxx) with tag.xxx
self.stream.write(
u' class="%s{{ _jade_class(%s) |escape}}"' %
(tag.class_ and tag.class_ + u' ' or u'', v))
tag.class_ = None
continue
self.stream.write(u' %s="{{ %s |escape}}"' % (k, v))
if tag.id_:
self.stream.write(u' id="%s"' % tag.id_)
if tag.class_:
self.stream.write(u' class="%s"' % tag.class_)
self.stream.write('>')
elif tag.name == 'case':
tag.var = self.put_tmpvar(tag.head)
tag.seen_when = tag.seen_default = False
elif tag.name in ('when', 'default'):
case_tag = len(self.blocks) >= 2 and self.blocks[-2]
if not case_tag or case_tag.name != 'case':
raise self.parser.error(
'%s tag not child of case tag' % tag.name)
if tag.name == 'when':
if case_tag.seen_default:
raise self.parser.error('when tag after default tag')
self.stream.write(u'{%% %s %s == %s %%}' % (
'elif' if case_tag.seen_when else 'if',
case_tag.var, tag.head))
case_tag.seen_when = True
else:
if case_tag.seen_default:
raise self.parser.error('duplicate default tag')
if not case_tag.seen_when:
raise self.parser.error('default tag before when tag')
self.stream.write(u'{% else %}')
case_tag.seen_default = True
else:
self.stream.write(maybe_call(control_blocks[tag.name][0], tag))
def end_block(self):
"""
Called by the parser to end a block. The parser doesn't keep track of
active blocks.
"""
tag = self.blocks.pop()
if isinstance(tag, HTMLTag):
self.stream.write('</%s>' % tag.name)
elif tag.name in ('if', 'elif'):
self.deferred_endif = [u'{% endif %}', '']
elif tag.name == 'case':
if not tag.seen_when:
raise self.parser.error('case tag has no when child')
self.stream.write('{% endif %}')
elif tag.name in ('when', 'default'):
pass
else:
self.stream.write(maybe_call(control_blocks[tag.name][1], tag))
def literal(self, text):
"""
Called by the parser to output literal text. The parser doesn't keep
track of active blocks.
"""
self.put_endif()
self.stream.write(text)
def newlines(self, text):
"""
Called by the parser to output newlines that are part of the indent.
"""
if self.deferred_endif:
self.deferred_endif[1] = text
else:
self.literal(text)
def end(self):
"""
Called by the parser to terminate compiling.
"""
self.put_endif()
doctypes = {
'5': '<!DOCTYPE html>',
'default': '<!DOCTYPE html>',
'xml': '<?xml version="1.0" encoding="utf-8" ?>',
'transitional': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 '
'Transitional//EN" "http://www.w3.org/TR/xhtml1/'
'DTD/xhtml1-transitional.dtd">',
'strict': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 '
'Strict//EN" "http://www.w3.org/TR/xhtml1/'
'DTD/xhtml1-strict.dtd">',
'frameset': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 '
'Frameset//EN" "http://www.w3.org/TR/xhtml1/'
'DTD/xhtml1-frameset.dtd">',
'1.1': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" '
'"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">',
'basic': '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML Basic '
'1.1//EN" "http://www.w3.org/TR/xhtml-basic/xhtml-basic11.dtd">',
'mobile': '<!DOCTYPE html PUBLIC "-//WAPFORUM//DTD XHTML Mobile 1.2//EN" '
'"http://www.openmobilealliance.org/tech/DTD/'
'xhtml-mobile12.dtd">'
}
def default_start(tag):
return '{%% %s %s %%}' % (tag.name, tag.head)
def default_end(tag):
return '{%% end%s %%}' % tag.name
def doctype(tag):
return doctypes.get(tag.head.lower() or 'default',
'<!DOCTYPE %s>' % tag.head)
control_blocks = defaultdict(
lambda: (default_start, default_end),
{
'=': ('{{ ', ' }}'),
'!=': ('{{ ', ' |safe}}'),
'-': ('{% ', ' %}'),
'|': ('', ''),
'//': (lambda tag: '<!--%s' % tag.head,
'-->'),
'//-': ('{#', '#}'),
':': (lambda tag: '{%% filter %s %%}' % tag.head,
'{% endfilter %}'),
'mixin': (lambda tag: '{%% macro %s %%}' % tag.head,
'{% endmacro %}'),
'prepend': (lambda tag: '{%% block %s %%}' % tag.head,
'{{ super() }} {% endblock %}'),
'append': (lambda tag: '{%% block %s %%} {{ super() }}' % tag.head,
'{% endblock %}'),
'extends': (default_start, ''),
'doctype': (doctype, ''),
'else': ('{% else %}', '{% endif %}'),
})
if __name__ == '__main__':
main(Compiler(stdout))
|
xiaq/jadepy
|
jade/compile.py
|
Python
|
mit
| 7,708
|
"""
airPy is a flight controller based on pyboard and written in micropython.
The MIT License (MIT)
Copyright (c) 2016 Fabrizio Scimia, fabrizio.scimia@gmail.com
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import struct
class SaveTxCalibration:
MESSAGE_TYPE_ID = 110
def __init__(self):
pass
@staticmethod
def decode_payload(payload):
"""
Decode message payload
:param payload: byte stream representing the message payload
:return: a list of 3 list of floats representing the PWM threshold values for each of the N active channels
[[min threshold values],[max threshold values], [center threshold values]]
"""
# 4 byte per float * 3 set of thesholds
byte_per_thd_set = int(len(payload)/3)
min_thd_vals = [0.0 for i in range(0, int(byte_per_thd_set/4))]
max_thd_vals = [0.0 for i in range(0, int(byte_per_thd_set/4))]
center_thd_vals = [0.0 for i in range(0, int(byte_per_thd_set/4))]
for i in range(0, int(byte_per_thd_set/4)):
min_thd_vals[i] = struct.unpack('>f', payload[i*4:i*4 + 4])[0]
for i in range(0, int(byte_per_thd_set/4)):
max_thd_vals[i] = struct.unpack('>f', payload[byte_per_thd_set + i*4:i*4 + 4 + byte_per_thd_set])[0]
for i in range(0, int(byte_per_thd_set/4)):
center_thd_vals[i] = struct.unpack('>f', payload[2*byte_per_thd_set + i*4:i*4 + 2*byte_per_thd_set + 4])[0]
return [min_thd_vals, max_thd_vals, center_thd_vals]
|
Sokrates80/air-py
|
aplink/messages/ap_save_tx_calibration.py
|
Python
|
mit
| 2,515
|
import datetime
import unittest2
import urlparse
from mock import Mock, ANY
import svb
from svb.six.moves.urllib import parse
from svb.test.helper import SvbUnitTestCase
VALID_API_METHODS = ('get', 'post', 'delete', 'patch')
class GMT1(datetime.tzinfo):
def utcoffset(self, dt):
return datetime.timedelta(hours=1)
def dst(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return "Europe/Prague"
class APIHeaderMatcher(object):
EXP_KEYS = [
'Authorization',
'SVB-Version',
'User-Agent',
'X-SVB-Client-User-Agent',
'X-Timestamp',
'X-Signature',
]
METHOD_EXTRA_KEYS = {
"post": ["Content-Type"],
"patch": ["Content-Type"],
}
def __init__(self, api_key=None, extra={}, request_method=None,
user_agent=None, app_info=None):
self.request_method = request_method
self.api_key = api_key or svb.api_key
self.extra = extra
self.user_agent = user_agent
self.app_info = app_info
def __eq__(self, other):
return (self._keys_match(other) and
self._auth_match(other) and
self._user_agent_match(other) and
self._x_svb_ua_contains_app_info(other) and
self._extra_match(other))
def _keys_match(self, other):
expected_keys = list(set(self.EXP_KEYS + self.extra.keys()))
if self.request_method is not None and self.request_method in \
self.METHOD_EXTRA_KEYS:
expected_keys.extend(self.METHOD_EXTRA_KEYS[self.request_method])
return (sorted(other.keys()) == sorted(expected_keys))
def _auth_match(self, other):
return other['Authorization'] == "Bearer %s" % (self.api_key,)
def _user_agent_match(self, other):
if self.user_agent is not None:
return other['User-Agent'] == self.user_agent
return True
def _x_svb_ua_contains_app_info(self, other):
if self.app_info:
ua = svb.util.json.loads(other['X-SVB-Client-User-Agent'])
if 'application' not in ua:
return False
return ua['application'] == self.app_info
return True
def _extra_match(self, other):
for k, v in self.extra.iteritems():
if other[k] != v:
return False
return True
class JSONMatcher(object):
def ordered(self, obj):
if isinstance(obj, dict):
return sorted((k, self.ordered(str(v))) for k, v in obj.items())
if isinstance(obj, list):
return sorted(self.ordered(str(x)) for x in obj)
else:
return obj
def __init__(self, expected):
if isinstance(expected, dict):
self.expected = self.ordered(expected)
elif isinstance(expected, svb.six.text_type):
self.expected = self.ordered(svb.util.json.loads(expected))
def __eq__(self, other):
return self.expected == self.ordered(svb.util.json.loads(other))
class QueryMatcher(object):
def __init__(self, expected):
self.expected = sorted(expected)
def __eq__(self, other):
query = parse.urlsplit(other).query or other
parsed = svb.util.parse_qsl(query)
return self.expected == sorted(parsed)
class UrlMatcher(object):
def __init__(self, expected):
self.exp_parts = parse.urlsplit(expected)
def __eq__(self, other):
other_parts = parse.urlsplit(other)
for part in ('scheme', 'netloc', 'path', 'fragment'):
expected = getattr(self.exp_parts, part)
actual = getattr(other_parts, part)
if expected != actual:
print 'Expected %s "%s" but got "%s"' % (
part, expected, actual)
return False
q_matcher = QueryMatcher(svb.util.parse_qsl(self.exp_parts.query))
return q_matcher == other
class APIRequestorRequestTests(SvbUnitTestCase):
ENCODE_INPUTS = {
'dict': {
'astring': 'bar',
'anint': 5,
'anull': None,
'adatetime': datetime.datetime(2013, 1, 1, tzinfo=GMT1()),
'atuple': (1, 2),
'adict': {'foo': 'bar', 'boz': 5},
'alist': ['foo', 'bar'],
},
'list': [1, 'foo', 'baz'],
'string': 'boo',
'unicode': u'\u1234',
'datetime': datetime.datetime(2013, 1, 1, second=1, tzinfo=GMT1()),
'none': None,
}
ENCODE_EXPECTATIONS = {
'dict': [
('%s[astring]', 'bar'),
('%s[anint]', 5),
('%s[adatetime]', 1356994800),
('%s[adict][foo]', 'bar'),
('%s[adict][boz]', 5),
('%s[alist][]', 'foo'),
('%s[alist][]', 'bar'),
('%s[atuple][]', 1),
('%s[atuple][]', 2),
],
'list': [
('%s[]', 1),
('%s[]', 'foo'),
('%s[]', 'baz'),
],
'string': [('%s', 'boo')],
'unicode': [('%s', svb.util.utf8(u'\u1234'))],
'datetime': [('%s', 1356994801)],
'none': [],
}
def setUp(self):
super(APIRequestorRequestTests, self).setUp()
self.http_client = Mock(svb.http_client.HTTPClient)
self.http_client._verify_ssl_certs = True
self.http_client.name = 'mockclient'
self.requestor = svb.api_requestor.APIRequestor(
client=self.http_client)
def mock_response(self, return_body, return_code, requestor=None,
headers=None):
if not requestor:
requestor = self.requestor
self.http_client.request = Mock(
return_value=(return_body, return_code, headers or {}))
def check_call(self, meth, abs_url=None, headers=None,
post_data=None, requestor=None):
if not abs_url:
abs_url = 'https://api.svb.com%s' % (self.valid_path,)
if not requestor:
requestor = self.requestor
if not headers:
headers = APIHeaderMatcher(request_method=meth)
self.http_client.request.assert_called_with(
meth, abs_url, headers, post_data)
@property
def valid_path(self):
return '/foo'
def encoder_check(self, key):
stk_key = "my%s" % (key,)
value = self.ENCODE_INPUTS[key]
expectation = [(k % (stk_key,), v) for k, v in
self.ENCODE_EXPECTATIONS[key]]
stk = []
fn = getattr(svb.api_requestor.APIRequestor, "encode_%s" % (key,))
fn(stk, stk_key, value)
if isinstance(value, dict):
expectation.sort()
stk.sort()
self.assertEqual(expectation, stk)
def _test_encode_naive_datetime(self):
stk = []
svb.api_requestor.APIRequestor.encode_datetime(
stk, 'test', datetime.datetime(2013, 1, 1))
# Naive datetimes will encode differently depending on your system
# local time. Since we don't know the local time of your system,
# we just check that naive encodings are within 24 hours of correct.
self.assertTrue(60 * 60 * 24 > abs(stk[0][1] - 1356994800))
def test_param_encoding(self):
self.mock_response('{}', 200)
self.requestor.request('get', '', self.ENCODE_INPUTS)
expectation = []
for type_, values in self.ENCODE_EXPECTATIONS.iteritems():
expectation.extend([(k % (type_,), str(v)) for k, v in values])
self.check_call('get', QueryMatcher(expectation))
def test_dictionary_list_encoding(self):
params = {
'foo': {
'0': {
'bar': 'bat',
}
}
}
encoded = list(svb.api_requestor._api_encode(params))
key, value = encoded[0]
self.assertEqual('foo[0][bar]', key)
self.assertEqual('bat', value)
def test_url_construction(self):
CASES = (
('https://api.svb.com?foo=bar', '', {'foo': 'bar'}),
('https://api.svb.com?foo=bar', '?', {'foo': 'bar'}),
('https://api.svb.com', '', {}),
(
'https://api.svb.com/%20spaced?foo=bar%24&baz=5',
'/%20spaced?foo=bar%24',
{'baz': '5'}
),
(
'https://api.svb.com?foo=bar&foo=bar',
'?foo=bar',
{'foo': 'bar'}
),
)
for expected, url, params in CASES:
self.mock_response('{}', 200)
self.requestor.request('get', url, params)
self.check_call('get', expected)
def test_empty_methods(self):
for meth in VALID_API_METHODS:
self.mock_response('{}', 200)
body, key = self.requestor.request(meth, self.valid_path, {})
if meth == 'post' or meth == 'patch':
post_data = svb.util.json.dumps({'data': {}})
else:
post_data = None
self.check_call(meth, post_data=post_data)
self.assertEqual({}, body)
def test_methods_with_params_and_response(self):
for meth in VALID_API_METHODS:
self.mock_response('{"foo": "bar", "baz": 6}', 200)
params = {
'alist': [1, 2, 3],
'adict': {'frobble': 'bits'},
'adatetime': datetime.datetime(2013, 1, 1, tzinfo=GMT1())
}
encoded = ('adict%5Bfrobble%5D=bits&adatetime=1356994800&'
'alist%5B%5D=1&alist%5B%5D=2&alist%5B%5D=3')
body, key = self.requestor.request(meth, self.valid_path,
params)
self.assertEqual({'foo': 'bar', 'baz': 6}, body)
if meth == 'post' or meth == 'patch':
x = JSONMatcher({"data": dict(svb.util.parse_qsl(encoded))})
self.check_call(
meth,
post_data=JSONMatcher(svb.util.json.dumps(
{
"data": dict(svb.util.parse_qsl(encoded))
})))
else:
abs_url = "https://api.svb.com%s?%s" % (
self.valid_path, encoded)
self.check_call(meth, abs_url=UrlMatcher(abs_url))
def test_uses_headers(self):
self.mock_response('{}', 200)
self.requestor.request('get', self.valid_path, {}, {'foo': 'bar'})
self.check_call('get', headers=APIHeaderMatcher(extra={'foo': 'bar'}))
def test_uses_instance_key(self):
key = 'fookey'
requestor = svb.api_requestor.APIRequestor(key,
client=self.http_client)
self.mock_response('{}', 200, requestor=requestor)
body, used_key = requestor.request('get', self.valid_path, {})
self.check_call('get', headers=APIHeaderMatcher(
key, request_method='get'), requestor=requestor)
self.assertEqual(key, used_key)
def test_uses_instance_api_version(self):
api_version = 'fooversion'
requestor = svb.api_requestor.APIRequestor(api_version=api_version,
client=self.http_client)
self.mock_response('{}', 200, requestor=requestor)
requestor.request('get', self.valid_path, {})
self.check_call('get', headers=APIHeaderMatcher(
extra={'SVB-Version': 'fooversion'}, request_method='get'),
requestor=requestor)
def test_uses_instance_account(self):
account = 'acct_foo'
requestor = svb.api_requestor.APIRequestor(account=account,
client=self.http_client)
self.mock_response('{}', 200, requestor=requestor)
requestor.request('get', self.valid_path, {})
self.check_call(
'get',
requestor=requestor,
headers=APIHeaderMatcher(
extra={'SVB-Account': account},
request_method='get'
),
)
def test_uses_app_info(self):
try:
old = svb.app_info
svb.set_app_info(
'MyAwesomePlugin',
url='https://myawesomeplugin.info',
version='1.2.34'
)
self.mock_response('{}', 200)
self.requestor.request('get', self.valid_path, {})
ua = "SVB/v1 PythonBindings/%s" % (svb.version.VERSION,)
ua += " MyAwesomePlugin/1.2.34 (https://myawesomeplugin.info)"
header_matcher = APIHeaderMatcher(
user_agent=ua,
app_info={
'name': 'MyAwesomePlugin',
'url': 'https://myawesomeplugin.info',
'version': '1.2.34',
}
)
self.check_call('get', headers=header_matcher)
finally:
svb.app_info = old
def test_fails_without_api_key(self):
svb.api_key = None
self.assertRaises(svb.error.AuthenticationError,
self.requestor.request,
'get', self.valid_path, {})
def test_not_found(self):
self.mock_response('{"error": {}}', 404)
self.assertRaises(svb.error.InvalidRequestError,
self.requestor.request,
'get', self.valid_path, {})
def test_authentication_error(self):
self.mock_response('{"error": {}}', 401)
self.assertRaises(svb.error.AuthenticationError,
self.requestor.request,
'get', self.valid_path, {})
def test_permissions_error(self):
self.mock_response('{"error": {}}', 403)
self.assertRaises(svb.error.PermissionError,
self.requestor.request,
'get', self.valid_path, {})
def test_card_error(self):
self.mock_response('{"error": {}}', 402)
self.assertRaises(svb.error.CardError,
self.requestor.request,
'get', self.valid_path, {})
def test_rate_limit_error(self):
self.mock_response('{"error": {}}', 429)
self.assertRaises(svb.error.RateLimitError,
self.requestor.request,
'get', self.valid_path, {})
def test_server_error(self):
self.mock_response('{"error": {}}', 500)
self.assertRaises(svb.error.APIError,
self.requestor.request,
'get', self.valid_path, {})
def test_invalid_json(self):
self.mock_response('{', 200)
self.assertRaises(svb.error.APIError,
self.requestor.request,
'get', self.valid_path, {})
def test_invalid_method(self):
self.assertRaises(svb.error.APIConnectionError,
self.requestor.request,
'foo', 'bar')
class DefaultClientTests(unittest2.TestCase):
def setUp(self):
svb.default_http_client = None
svb.api_key = 'foo'
def test_default_http_client_called(self):
hc = Mock(svb.http_client.HTTPClient)
hc._verify_ssl_certs = True
hc.name = 'mockclient'
hc.request = Mock(return_value=("{}", 200, {}))
svb.default_http_client = hc
svb.ACH.list(limit=3)
hc.request.assert_called_with(
'get', 'https://api.svb.com/v1/ach?limit=3', ANY, None)
def tearDown(self):
svb.api_key = None
svb.default_http_client = None
if __name__ == '__main__':
unittest2.main()
|
fuziontech/svb
|
svb/test/test_requestor.py
|
Python
|
mit
| 15,897
|
from django.test import TestCase
from django.contrib.auth.models import User
from dixit import settings
from dixit.game.models.game import Game
from dixit.game.models.player import Player
from dixit.game.models.round import Round, RoundStatus, Play
from dixit.game.models.card import Card
from dixit.game.exceptions import GameInvalidPlay, GameRoundIncomplete, GameDeckExhausted
class PlayTest(TestCase):
fixtures = ['game_testcards.json', ]
def setUp(self):
self.user = User.objects.create(username='test', email='test@localhost', password='test')
self.user2 = User.objects.create(username='test2', email='test2@localhost', password='test')
self.user3 = User.objects.create(username='test3', email='test3@localhost', password='test')
self.game = Game.new_game(name='test', user=self.user, player_name='storyteller')
self.current = self.game.current_round
self.player2 = self.game.add_player(self.user2, 'player2')
self.player3 = self.game.add_player(self.user3, 'player3')
def test_play_can_be_performed_for_round(self):
story_card = self.game.storyteller._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
self.assertEqual(self.current.plays.count(), 1)
def test_storyteller_can_provide_card(self):
story_play = Play(game_round=self.current, player=self.game.storyteller)
story_play.provide_card(self.game.storyteller._pick_card(), 'story')
self.assertEqual(self.current.plays.count(), 1)
def test_players_cant_provide_card_before_storyteller(self):
with self.assertRaises(GameInvalidPlay):
Play.play_for_round(self.current, self.player2, self.player2._pick_card())
def test_players_can_provide_card_after_storyteller(self):
Play.play_for_round(self.current, self.game.storyteller, self.game.storyteller._pick_card(), 'story')
Play.play_for_round(self.current, self.player2, self.player2._pick_card())
self.assertEqual(self.current.plays.count(), 2)
def test_players_can_not_provide_card_after_voting(self):
# TODO
pass
def test_players_can_choose_played_card(self):
story_card = self.game.storyteller._pick_card()
story_play = Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
play2 = Play.play_for_round(self.current, self.player2, self.player2._pick_card())
play3 = Play.play_for_round(self.current, self.player3, self.player3._pick_card())
self.assertEqual(self.current.status, RoundStatus.VOTING)
play2.vote_card(story_card)
def test_players_can_not_choose_unplayed_card(self):
story_card = self.game.storyteller._pick_card()
story_play = Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
with self.assertRaises(GameInvalidPlay):
other_card = Card.objects.available_for_game(self.game)[0]
play2.vote_card(other_card)
def test_players_can_not_choose_own_card(self):
story_card = self.game.storyteller._pick_card()
story_play = Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
with self.assertRaises(GameInvalidPlay):
play2.vote_card(card2)
def test_storytellers_cant_vote_card(self):
story_card = self.game.storyteller._pick_card()
story_play = Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
with self.assertRaises(GameInvalidPlay):
story_play.vote_card(card2)
class RoundTest(TestCase):
fixtures = ['game_testcards.json', ]
def setUp(self):
self.user = User.objects.create(username='test', email='test@localhost', password='test')
self.user2 = User.objects.create(username='test2', email='test2@localhost', password='test')
self.user3 = User.objects.create(username='test3', email='test3@localhost', password='test')
self.user4 = User.objects.create(username='test4', email='test4@localhost', password='test')
self.game = Game.new_game(name='test', user=self.user, player_name='storyteller')
self.current = self.game.current_round
self.player2 = self.game.add_player(self.user2, 'player2')
self.player3 = self.game.add_player(self.user3, 'player3')
def test_round_starts_new(self):
self.assertEqual(self.current.status, RoundStatus.NEW)
def test_round_is_new_when_only_storyteller_has_played(self):
story_card = self.game.storyteller._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
self.assertEqual(self.current.status, RoundStatus.NEW)
def test_round_is_providing_until_all_players_have_provided(self):
story_card = self.game.storyteller._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
players = self.game.players.exclude(id=self.game.storyteller.id)
for player in players[1:]:
Play.play_for_round(self.current, player, player._pick_card())
self.assertEqual(self.current.status, RoundStatus.PROVIDING)
def test_round_is_voting_when_all_players_have_provided_a_card(self):
Play.play_for_round(self.current, self.game.storyteller, self.game.storyteller._pick_card(), 'story')
players = self.game.players.all().exclude(id=self.game.storyteller.id)
for player in players:
Play.play_for_round(self.current, player, player._pick_card())
self.assertEqual(self.current.status, RoundStatus.VOTING)
def test_round_is_voting_until_all_players_have_voted(self):
story_card = self.current.turn._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
players = self.game.players.all().exclude(id=self.game.storyteller.id)
for player in players:
Play.play_for_round(self.current, player, player._pick_card())
plays = self.current.plays.all().exclude(player=self.game.storyteller)
for play in plays[1:]:
play.vote_card(story_card)
self.assertEqual(self.current.status, RoundStatus.VOTING)
def test_round_is_complete_when_all_players_have_voted(self):
story_card = self.current.turn._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
players = self.game.players.all().exclude(id=self.game.storyteller.id)
for player in players:
Play.play_for_round(self.current, player, player._pick_card())
plays = self.current.plays.all().exclude(player=self.game.storyteller)
for play in plays:
play.vote_card(story_card)
self.assertEqual(self.current.status, RoundStatus.COMPLETE)
def test_round_deals_hands_once_to_players(self):
game_round = Round(game=self.game, number=self.current.number + 1, turn=self.current.turn)
game_round.deal()
game_round.deal()
game_round.deal()
hand_sizes = (p.cards.count() for p in self.game.players.all())
self.assertTrue(all(s == settings.GAME_HAND_SIZE for s in hand_sizes))
def test_round_deals_system_card(self):
game_round = Round(game=self.game, number=self.current.number + 1, turn=self.current.turn)
game_round.deal()
self.assertTrue(game_round.card is not None)
def test_round_deals_system_card_once(self):
game_round = Round(game=self.game, number=self.current.number + 1, turn=self.current.turn)
game_round.deal()
system_card = game_round.card
game_round.deal()
self.assertEqual(system_card, game_round.card)
def test_deal_fails_when_not_enough_cards_available(self):
max_players = Card.objects.count() // (settings.GAME_HAND_SIZE + 1)
for i in range(max_players + 1):
test_username = 'test_n_{}'.format(i)
test_email = '{}@localhost'.format(test_username)
user = User.objects.create(username=test_username, email=test_email, password='test')
Player.objects.create(game=self.game, user=user, name='player_{}'.format(i))
new_round = Round(game=self.game, number=self.current.number + 1, turn=self.current.turn)
with self.assertRaises(GameDeckExhausted):
new_round.deal()
def test_new_round_can_not_be_closed(self):
self.assertEqual(self.current.status, RoundStatus.NEW)
self.assertRaises(GameRoundIncomplete, self.current.close)
def test_providing_round_can_not_be_closed(self):
story_card = self.current.turn._pick_card()
story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test')
Play.play_for_round(self.current, self.player2, self.player2._pick_card())
self.assertEqual(self.current.status, RoundStatus.PROVIDING)
self.assertRaises(GameRoundIncomplete, self.current.close)
def test_voting_round_can_not_be_closed(self):
story_card = self.current.turn._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
players = self.game.players.all().exclude(id=self.game.storyteller.id)
for player in players:
Play.play_for_round(self.current, player, player._pick_card())
plays = self.current.plays.all().exclude(player=self.game.storyteller)
for play in plays[1:]:
play.vote_card(story_card)
self.assertEqual(self.current.status, RoundStatus.VOTING)
self.assertRaises(GameRoundIncomplete, self.current.close)
def test_complete_round_can_be_closed(self):
story_card = self.current.turn._pick_card()
Play.play_for_round(self.current, self.game.storyteller, story_card, 'story')
players = self.game.players.all().exclude(id=self.game.storyteller.id)
for player in players:
Play.play_for_round(self.current, player, player._pick_card())
plays = self.current.plays.all().exclude(player=self.game.storyteller)
for play in plays:
play.vote_card(story_card)
self.assertEqual(self.current.status, RoundStatus.COMPLETE)
self.current.close()
def test_storyteller_scores_when_player_guessed(self):
story_card = self.current.turn._pick_card()
story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
card3 = self.player3._pick_card()
play3 = Play.play_for_round(self.current, self.player3, card3)
play2.vote_card(story_card)
play3.vote_card(card2)
self.current.close()
self.current.turn.refresh_from_db()
self.assertEqual(self.current.turn.score, settings.GAME_STORY_SCORE)
def test_storyteller_doesnt_score_when_all_players_guess(self):
story_card = self.current.turn._pick_card()
story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
card3 = self.player3._pick_card()
play3 = Play.play_for_round(self.current, self.player3, card3)
play2.vote_card(story_card)
play3.vote_card(story_card)
self.current.close()
self.current.turn.refresh_from_db()
self.assertEqual(self.current.turn.score, 0)
def test_players_score_when_their_card_is_chosen(self):
story_card = self.current.turn._pick_card()
story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
card3 = self.player3._pick_card()
play3 = Play.play_for_round(self.current, self.player3, card3)
play2.vote_card(card3)
play3.vote_card(card2)
self.current.close()
self.player2.refresh_from_db()
self.assertEqual(self.player2.score, settings.GAME_CONFUSED_GUESS_SCORE)
def test_players_score_max_bound(self):
player4 = self.game.add_player(self.user4, 'player4')
story_card = self.current.turn._pick_card()
story_play = Play.play_for_round(self.current, self.current.turn, story_card, 'test')
card2 = self.player2._pick_card()
play2 = Play.play_for_round(self.current, self.player2, card2)
card3 = self.player3._pick_card()
play3 = Play.play_for_round(self.current, self.player3, card3)
card4 = player4._pick_card()
play4 = Play.play_for_round(self.current, player4, card4)
play2.vote_card(story_card)
play3.vote_card(card2)
play4.vote_card(card2)
self.current.close()
self.player2.refresh_from_db()
self.assertEqual(self.player2.score, settings.GAME_MAX_ROUND_SCORE)
|
jminuscula/dixit-online
|
server/src/dixit/game/test/round.py
|
Python
|
mit
| 13,429
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('layout_page', '0007_auto_20170509_1148'),
]
operations = [
migrations.AddField(
model_name='layoutpage',
name='admin_notes',
field=models.TextField(help_text=b"Administrator's notes about this content", blank=True),
),
migrations.AddField(
model_name='layoutpage',
name='brief',
field=models.TextField(help_text=b'A document brief describing the purpose of this content', blank=True),
),
]
|
ic-labs/django-icekit
|
icekit/page_types/layout_page/migrations/0008_auto_20170518_1629.py
|
Python
|
mit
| 686
|
# -*- coding: utf-8 -*-
__title__ = 'pywebtask'
__version__ = '0.1.8'
__build__ = 0x000108
__author__ = 'Sebastián José Seba'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Sebastián José Seba'
from .webtasks import run, run_file
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
|
ssebastianj/pywebtasks
|
pywebtasks/__init__.py
|
Python
|
mit
| 546
|
import traceback
import BigWorld
from gui.Scaleform.daapi.view.lobby.messengerBar.NotificationListButton import NotificationListButton
from xfw import *
import xvm_main.python.config as config
from xvm_main.python.logger import *
###
@overrideMethod(NotificationListButton, 'as_setStateS')
def _NotificationListButton_as_setStateS(base, self, isBlinking, counterValue):
notificationsButtonType = config.get('hangar/notificationsButtonType', 'full').lower()
if notificationsButtonType == 'none':
isBlinking = False
counterValue = ''
elif notificationsButtonType == 'blink':
counterValue = ''
base(self, isBlinking, counterValue)
|
peterbartha/ImmunoMod
|
res_mods/mods/packages/xvm_hangar/python/svcmsg.py
|
Python
|
mit
| 675
|
#
# Retrieved from: https://svn.code.sf.net/p/p2tk/code/python/syllabify/syllabifier.py
# on 2014-09-05.
#
# According to https://www.ling.upenn.edu/phonetics/p2tk/, this is licensed
# under MIT.
#
# This is the P2TK automated syllabifier. Given a string of phonemes,
# it automatically divides the phonemes into syllables.
#
# By Joshua Tauberer, based on code originally written by Charles Yang.
#
# The syllabifier requires a language configuration which specifies
# the set of phonemes which are consonants and vowels (syllable nuclei),
# as well as the set of permissible onsets.
#
# Then call syllabify with a language configuration object and a word
# represented as a string (or list) of phonemes.
#
# Returned is a data structure representing the syllabification.
# What you get is a list of syllables. Each syllable is a tuple
# of (stress, onset, nucleus, coda). stress is None or an integer stress
# level attached to the nucleus phoneme on input. onset, nucleus,
# and coda are lists of phonemes.
#
# Example:
#
# import syllabifier
# language = syllabifier.English # or: syllabifier.loadLanguage("english.cfg")
# syllables = syllabifier.syllabify(language, "AO2 R G AH0 N AH0 Z EY1 SH AH0 N Z")
#
# The syllables variable then holds the following:
# [ (2, [], ['AO'], ['R']),
# (0, ['G'], ['AH'], []),
# (0, ['N'], ['AH'], []),
# (1, ['Z'], ['EY'], []),
# (0, ['SH'], ['AH'], ['N', 'Z'])]
#
# You could process that result with this type of loop:
#
# for stress, onset, nucleus, coda in syllables :
# print " ".join(onset), " ".join(nucleus), " ".join(coda)
#
# You can also pass the result to stringify to get a nice printable
# representation of the syllables, with periods separating syllables:
#
# print syllabify.stringify(syllables)
#
#########################################################################
English = {
'consonants': ['B', 'CH', 'D', 'DH', 'F', 'G', 'HH', 'JH', 'K', 'L', 'M', 'N',
'NG', 'P', 'R', 'S', 'SH', 'T', 'TH', 'V', 'W', 'Y', 'Z', 'ZH'],
'vowels': [ 'AA', 'AE', 'AH', 'AO', 'AW', 'AY', 'EH', 'ER', 'EY', 'IH', 'IY', 'OW', 'OY', 'UH', 'UW'],
'onsets': ['P', 'T', 'K', 'B', 'D', 'G', 'F', 'V', 'TH', 'DH', 'S', 'Z', 'SH', 'CH', 'JH', 'M',
'N', 'R', 'L', 'HH', 'W', 'Y', 'P R', 'T R', 'K R', 'B R', 'D R', 'G R', 'F R',
'TH R', 'SH R', 'P L', 'K L', 'B L', 'G L', 'F L', 'S L', 'T W', 'K W', 'D W',
'S W', 'S P', 'S T', 'S K', 'S F', 'S M', 'S N', 'G W', 'SH W', 'S P R', 'S P L',
'S T R', 'S K R', 'S K W', 'S K L', 'TH W', 'ZH', 'P Y', 'K Y', 'B Y', 'F Y',
'HH Y', 'V Y', 'TH Y', 'M Y', 'S P Y', 'S K Y', 'G Y', 'HH W', '']
}
def loadLanguage(filename) :
'''This function loads up a language configuration file and returns
the configuration to be passed to the syllabify function.'''
L = { "consonants" : [], "vowels" : [], "onsets" : [] }
f = open(filename, "r")
section = None
for line in f :
line = line.strip()
if line in ("[consonants]", "[vowels]", "[onsets]") :
section = line[1:-1]
elif section is None :
raise ValueError("File must start with a section header such as [consonants].")
elif not section in L :
raise ValueError("Invalid section: " + section)
else :
L[section].append(line)
for section in "consonants", "vowels", "onsets" :
if len(L[section]) == 0 :
raise ValueError("File does not contain any consonants, vowels, or onsets.")
return L
def syllabify(language, word) :
'''Syllabifies the word, given a language configuration loaded with loadLanguage.
word is either a string of phonemes from the CMU pronouncing dictionary set
(with optional stress numbers after vowels), or a Python list of phonemes,
e.g. "B AE1 T" or ["B", "AE1", "T"]'''
if type(word) == str :
word = word.split()
syllables = [] # This is the returned data structure.
internuclei = [] # This maintains a list of phonemes between nuclei.
for phoneme in word :
phoneme = phoneme.strip()
if phoneme == "" :
continue
stress = None
if phoneme[-1].isdigit() :
stress = int(phoneme[-1])
phoneme = phoneme[0:-1]
if phoneme in language["vowels"] :
# Split the consonants seen since the last nucleus into coda and onset.
coda = None
onset = None
# If there is a period in the input, split there.
if "." in internuclei :
period = internuclei.index(".")
coda = internuclei[:period]
onset = internuclei[period+1:]
else :
# Make the largest onset we can. The 'split' variable marks the break point.
for split in range(0, len(internuclei)+1) :
coda = internuclei[:split]
onset = internuclei[split:]
# If we are looking at a valid onset, or if we're at the start of the word
# (in which case an invalid onset is better than a coda that doesn't follow
# a nucleus), or if we've gone through all of the onsets and we didn't find
# any that are valid, then split the nonvowels we've seen at this location.
if " ".join(onset) in language["onsets"] \
or len(syllables) == 0 \
or len(onset) == 0 :
break
# Tack the coda onto the coda of the last syllable. Can't do it if this
# is the first syllable.
if len(syllables) > 0 :
syllables[-1][3].extend(coda)
# Make a new syllable out of the onset and nucleus.
syllables.append( (stress, onset, [phoneme], []) )
# At this point we've processed the internuclei list.
internuclei = []
elif not phoneme in language["consonants"] and phoneme != "." :
raise ValueError("Invalid phoneme: " + phoneme)
else : # a consonant
internuclei.append(phoneme)
# Done looping through phonemes. We may have consonants left at the end.
# We may have even not found a nucleus.
if len(internuclei) > 0 :
if len(syllables) == 0 :
syllables.append( (None, internuclei, [], []) )
else :
syllables[-1][3].extend(internuclei)
return syllables
def stringify(syllables) :
'''This function takes a syllabification returned by syllabify and
turns it into a string, with phonemes spearated by spaces and
syllables spearated by periods.'''
ret = []
for syl in syllables :
stress, onset, nucleus, coda = syl
if stress != None and len(nucleus) != 0 :
nucleus[0] += str(stress)
ret.append(" ".join(onset + nucleus + coda))
return " . ".join(ret)
# If this module was run directly, syllabify the words on standard input
# into standard output. Hashed lines are printed back untouched.
if __name__ == "__main__" :
import sys
if len(sys.argv) != 2 :
print("Usage: python syllabifier.py english.cfg < textfile.txt > outfile.txt")
else :
L = loadLanguage(sys.argv[1])
for line in sys.stdin :
if line[0] == "#" :
sys.stdout.write(line)
continue
line = line.strip()
s = stringify(syllabify(L, line))
sys.stdout.write(s + "\n")
|
dfm/twitterick
|
twitterick/syllabifier.py
|
Python
|
mit
| 6,769
|
import re
from copy import copy
from random import randint
class Server(object):
def __init__(self, ip, port, hostname):
self.ip = ip
self.port = port
self.hostname = hostname
self.weight = 500
self.maxconn = None
def __cmp__(self, other):
if not isinstance(other, Server):
return -1
return cmp((self.ip, self.port, self.weight, self.maxconn), (other.ip, other.port, other.weight, other.maxconn))
def __hash__(self):
return hash((self.ip, self.port, self.weight, self.maxconn))
def __str__(self):
extra = []
if self.weight != 500:
extra.append("weight=%d" % self.weight)
if self.maxconn:
extra.append("maxconn=%d" % self.maxconn)
result = '%s:%s' % (self.ip, self.port)
if extra:
result += '(%s)' % ','.join(extra)
return result
def __repr__(self):
return 'Server(%s, %s, %s, %s)' % (repr(self.ip), repr(self.port), repr(self.weight), repr(self.maxconn))
def clone(self):
return copy(self)
def setWeight(self, weight):
clone = self.clone()
clone.weight = weight
return clone
def setMaxconn(self, maxconn):
clone = self.clone()
clone.maxconn = maxconn
return clone
class Service(object):
def __init__(self, name, source, port, protocol, application='binary', healthcheck=False, healthcheckurl='/', timeoutclient=None, timeoutserver=None):
self.name = name
self.source = source
self.port = port
self.protocol = protocol
self.application = application
self.healthcheck = healthcheck
self.healthcheckurl = healthcheckurl
self.timeoutclient = timeoutclient
self.timeoutserver = timeoutserver
self.servers = set()
self.slots = []
# Check if there's a port override
match = re.search('.@(\d+)$', self.name)
if match:
self.name = self.name[0:-(len(match.group(1))+1)]
self.port = int(match.group(1))
def clone(self):
clone = Service(self.name, self.source, self.port, self.protocol, self.application, self.healthcheck, self.healthcheckurl, self.timeoutclient,
self.timeoutserver)
clone.servers = set(self.servers)
clone.slots = list(self.slots)
return clone
def __str__(self):
# Represent misc. service attributes as k=v pairs, but only if their value is not None
service_attributes = ['timeoutclient', 'timeoutserver']
service_options = ['%s=%s' % (attr, getattr(self, attr)) for attr in service_attributes if getattr(self, attr) is not None]
# Only use healthcheckurl if healtcheck has a meaningful value
if self.healthcheck:
service_options.append('healtcheck=%s' % self.healthcheck)
service_options.append('healthcheckurl=%s' % self.healthcheckurl)
return '%s:%s/%s%s -> [%s]' % (
self.name, self.port, self.application if self.application != 'binary' else self.protocol,
'(%s)' % ','.join(service_options) if service_options else '',
', '.join([str(s) for s in sorted(self.servers)]))
def __repr__(self):
return 'Service(%s, %s, %s, %s, %s)' % (repr(self.name), repr(self.port), repr(self.protocol), repr(self.application), repr(sorted(self.servers)))
def __cmp__(self, other):
if not isinstance(other, Service):
return -1
return cmp((self.name, self.port, self.protocol, self.servers), (other.name, other.port, other.protocol, other.servers))
def __hash__(self):
return hash((self.name, self.port, self.protocol, self.servers))
@property
def portname(self):
return re.sub('[^a-zA-Z0-9]', '_', str(self.port))
@property
def marathonpath(self):
ret = ''
for s in self.name.split('.'):
if ret is not '':
ret = s + '.' + ret
else:
ret = s
return ret
def update(self, other):
"""
Returns an new updated Service object
"""
clone = self.clone()
clone.name = other.name
clone.source = other.source
clone.port = other.port
clone.protocol = other.protocol
clone.timeoutclient = other.timeoutclient
clone.timeoutserver = other.timeoutserver
for server in clone.servers - other.servers:
clone._remove(server)
for server in other.servers - clone.servers:
clone._add(server)
return clone
def addServer(self, server):
clone = self.clone()
clone._add(server)
return clone
def setApplication(self, application):
clone = self.clone()
clone.application = application
return clone
def _add(self, server):
self.servers.add(server)
# Keep servers in the same index when they're added
for i in range(len(self.slots)):
if not self.slots[i]:
self.slots[i] = server
return
# Not present in list, just insert randomly
self.slots.insert(randint(0, len(self.slots)), server)
def _remove(self, server):
self.servers.remove(server)
# Set the server slot to None
for i in range(len(self.slots)):
if self.slots[i] == server:
del self.slots[i]
return
raise KeyError(str(server))
|
meltwater/proxymatic
|
src/proxymatic/services.py
|
Python
|
mit
| 5,559
|
__author__ = "Christian Kongsgaard"
__license__ = 'MIT'
# -------------------------------------------------------------------------------------------------------------------- #
# IMPORTS
# Modules
# RiBuild Modules
from delphin_6_automation.database_interactions.db_templates import delphin_entry
from delphin_6_automation.database_interactions.db_templates import sample_entry
from delphin_6_automation.database_interactions import mongo_setup
from delphin_6_automation.database_interactions.auth import auth_dict
# -------------------------------------------------------------------------------------------------------------------- #
# RIBuild
def correct_delphin():
samples = sample_entry.Sample.objects().only('delphin_docs')
print(f'There is {samples.count()} samples in DB')
sample_projects = []
for sample in samples:
if len(sample.delphin_docs) == 0:
print(f'Sample {sample.id} has no delphin projects. Deleting!')
sample.delete()
else:
for delphin in sample.delphin_docs:
sample_projects.append(delphin.id)
print(f'There is {len(sample_projects)} connected to a sample')
projects = delphin_entry.Delphin.objects().only('id')
print(f'There are currently {len(projects)} projects in the database')
print('Starting')
for proj in projects:
if proj.id not in sample_projects:
#print(f'Project with ID: {proj.id} is not part of a sample!')
proj.delete()
def correct_sample():
samples = sample_entry.Sample.objects()
for sample in samples:
docs = []
for ref in sample.delphin_docs:
delphin_projects = delphin_entry.Delphin.objects(id=ref.id)
if delphin_projects:
docs.append(delphin_projects.first())
else:
print(f'Found non existent project: {ref.id}')
sample.delphin_docs = docs
sample.save()
def correct_strategy():
strategy = sample_entry.Strategy.objects().first()
keep = []
for sample in strategy.samples:
found_sample = sample_entry.Sample.objects(id=sample.id)
if found_sample:
keep.append(found_sample.first().id)
else:
print(f"Sample {sample.id} was not in the DB")
print(f"Found samples {len(keep)} to keep: {keep}")
strategy.samples = keep
strategy.save()
def modify_sample():
id_ = "5e7878ce582e3e000172996d"
sample = sample_entry.Sample.objects(id=id_).first()
print('Got sample')
sample.mean = {}
sample.standard_deviation = {}
sample.save()
def correct_sample2():
samples = sample_entry.Sample.objects().only('id')
print(f"There is {samples.count()} samples in DB")
for i in range(samples.count()):
samples = sample_entry.Sample.objects(iteration=i).only('id')
print(f'There is {samples.count()} with iteration {i}')
if samples.count() > 1:
print(f"There is {samples.count()} samples with iteration {i}")
for j, sample in enumerate(samples):
if j == 0:
pass
else:
print(f'Deleting: {sample.id}')
#sample.delete()
if __name__ == '__main__':
server = mongo_setup.global_init(auth_dict)
#modify_sample()
#correct_sample()
#correct_sample2()
correct_delphin()
correct_strategy()
mongo_setup.global_end_ssh(server)
|
thp44/delphin_6_automation
|
data_process/wp6_v2/not_in_sample.py
|
Python
|
mit
| 3,478
|
def pe0001(upto):
total = 0
for i in range(upto):
if i % 3 == 0 or i % 5 == 0:
total += i
return total
print(pe0001(1000))
|
guandalf/projecteuler
|
pe0001.py
|
Python
|
mit
| 155
|
from setuptools import setup, find_packages
setup(name='MODEL1201230000',
version=20140916,
description='MODEL1201230000 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/MODEL1201230000',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
)
|
biomodels/MODEL1201230000
|
setup.py
|
Python
|
cc0-1.0
| 377
|
import os
def replace_temp(inputfile_folder):
os.chdir(inputfile_folder)
home_dir = os.getcwd()
for i in os.listdir(os.getcwd()):
if os.path.isdir(i):
os.chdir(i)
print("In folder: {}".format(os.getcwd()))
for z in os.listdir(os.getcwd()):
if '.txt' in z:
with open(z, 'r') as infile:
with open("temp.txt", 'w') as outfile:
print("\nChanging string in file: {}".format(z))
infile_text = infile.read()
s = infile_text.replace(",20,80,1200,0,-2,0", "0,20,80,1600,0,-2,0")
outfile.write(s)
os.remove(z)
os.rename("temp.txt", z)
infile.close()
print("Success! Replaced string in file: {}".format(z))
os.chdir(home_dir)
def initialization():
print("\n\n\n\nPlease specify your HeFESTo input file folder (in Exoplanet Pocketknife format):")
in1 = input("\n>>> ")
if in1 in os.listdir(os.getcwd()):
replace_temp(inputfile_folder=in1)
else:
initialization()
initialization()
|
ScottHull/Exoplanet-Pocketknife
|
old/hefesto_temp_fix.py
|
Python
|
cc0-1.0
| 1,250
|
## This file is part of Invenio.
## Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
Invenio utilities to run SQL queries.
The main API functions are:
- run_sql()
- run_sql_many()
- run_sql_with_limit()
but see the others as well.
"""
__revision__ = "$Id$"
# dbquery clients can import these from here:
# pylint: disable=W0611
from MySQLdb import Warning, Error, InterfaceError, DataError, \
DatabaseError, OperationalError, IntegrityError, \
InternalError, NotSupportedError, \
ProgrammingError
import gc
import os
import string
import time
import re
from thread import get_ident
from flask import current_app
from werkzeug.utils import cached_property
from invenio.base.globals import cfg
from invenio.utils.datastructures import LazyDict
from invenio.utils.serializers import serialize_via_marshal, \
deserialize_via_marshal
class DBConnect(object):
def __call__(self, *args, **kwargs):
return self._connect(*args, **kwargs)
@cached_property
def _connect(self):
if cfg['CFG_MISCUTIL_SQL_USE_SQLALCHEMY']:
try:
import sqlalchemy.pool as pool
import MySQLdb as mysqldb
mysqldb = pool.manage(mysqldb, use_threadlocal=True)
connect = mysqldb.connect
except ImportError:
cfg['CFG_MISCUTIL_SQL_USE_SQLALCHEMY'] = False
from MySQLdb import connect
else:
from MySQLdb import connect
return connect
def unlock_all(app):
for dbhost in _DB_CONN.keys():
for db in _DB_CONN[dbhost].values():
try:
cur = db.cur()
cur.execute("UNLOCK TABLES")
except:
pass
return app
def _db_conn():
current_app.teardown_appcontext_funcs.append(unlock_all)
out = {}
out[cfg['CFG_DATABASE_HOST']] = {}
out[cfg['CFG_DATABASE_SLAVE']] = {}
return out
connect = DBConnect()
_DB_CONN = LazyDict(_db_conn)
class InvenioDbQueryWildcardLimitError(Exception):
"""Exception raised when query limit reached."""
def __init__(self, res):
"""Initialization."""
self.res = res
def _db_login(dbhost=None, relogin=0):
"""Login to the database."""
## Note: we are using "use_unicode=False", because we want to
## receive strings from MySQL as Python UTF-8 binary string
## objects, not as Python Unicode string objects, as of yet.
## Note: "charset='utf8'" is needed for recent MySQLdb versions
## (such as 1.2.1_p2 and above). For older MySQLdb versions such
## as 1.2.0, an explicit "init_command='SET NAMES utf8'" parameter
## would constitute an equivalent. But we are not bothering with
## older MySQLdb versions here, since we are recommending to
## upgrade to more recent versions anyway.
if dbhost is None:
dbhost = cfg['CFG_DATABASE_HOST']
if cfg['CFG_MISCUTIL_SQL_USE_SQLALCHEMY']:
return connect(host=dbhost, port=int(cfg['CFG_DATABASE_PORT']),
db=cfg['CFG_DATABASE_NAME'], user=cfg['CFG_DATABASE_USER'],
passwd=cfg['CFG_DATABASE_PASS'],
use_unicode=False, charset='utf8')
else:
thread_ident = (os.getpid(), get_ident())
if relogin:
connection = _DB_CONN[dbhost][thread_ident] = connect(host=dbhost,
port=int(cfg['CFG_DATABASE_PORT']),
db=cfg['CFG_DATABASE_NAME'],
user=cfg['CFG_DATABASE_USER'],
passwd=cfg['CFG_DATABASE_PASS'],
use_unicode=False, charset='utf8')
connection.autocommit(True)
return connection
else:
if thread_ident in _DB_CONN[dbhost]:
return _DB_CONN[dbhost][thread_ident]
else:
connection = _DB_CONN[dbhost][thread_ident] = connect(host=dbhost,
port=int(cfg['CFG_DATABASE_PORT']),
db=cfg['CFG_DATABASE_NAME'],
user=cfg['CFG_DATABASE_USER'],
passwd=cfg['CFG_DATABASE_PASS'],
use_unicode=False, charset='utf8')
connection.autocommit(True)
return connection
def _db_logout(dbhost=None):
"""Close a connection."""
if dbhost is None:
dbhost = cfg['CFG_DATABASE_HOST']
try:
del _DB_CONN[dbhost][(os.getpid(), get_ident())]
except KeyError:
pass
def close_connection(dbhost=None):
"""
Enforce the closing of a connection
Highly relevant in multi-processing and multi-threaded modules
"""
if dbhost is None:
dbhost = cfg['CFG_DATABASE_HOST']
try:
db = _DB_CONN[dbhost][(os.getpid(), get_ident())]
cur = db.cursor()
cur.execute("UNLOCK TABLES")
db.close()
del _DB_CONN[dbhost][(os.getpid(), get_ident())]
except KeyError:
pass
def run_sql(sql, param=None, n=0, with_desc=False, with_dict=False, run_on_slave=False):
"""Run SQL on the server with PARAM and return result.
@param param: tuple of string params to insert in the query (see
notes below)
@param n: number of tuples in result (0 for unbounded)
@param with_desc: if True, will return a DB API 7-tuple describing
columns in query.
@param with_dict: if True, will return a list of dictionaries
composed of column-value pairs
@return: If SELECT, SHOW, DESCRIBE statements, return tuples of data,
followed by description if parameter with_desc is
provided.
If SELECT and with_dict=True, return a list of dictionaries
composed of column-value pairs, followed by description
if parameter with_desc is provided.
If INSERT, return last row id.
Otherwise return SQL result as provided by database.
@note: When the site is closed for maintenance (as governed by the
config variable CFG_ACCESS_CONTROL_LEVEL_SITE), do not attempt
to run any SQL queries but return empty list immediately.
Useful to be able to have the website up while MySQL database
is down for maintenance, hot copies, table repairs, etc.
@note: In case of problems, exceptions are returned according to
the Python DB API 2.0. The client code can import them from
this file and catch them.
"""
if cfg['CFG_ACCESS_CONTROL_LEVEL_SITE'] == 3:
# do not connect to the database as the site is closed for maintenance:
return []
if param:
param = tuple(param)
dbhost = cfg['CFG_DATABASE_HOST']
if run_on_slave and cfg['CFG_DATABASE_SLAVE']:
dbhost = cfg['CFG_DATABASE_SLAVE']
if 'sql-logger' in cfg.get('CFG_DEVEL_TOOLS', []):
log_sql_query(dbhost, sql, param)
try:
db = _db_login(dbhost)
cur = db.cursor()
gc.disable()
rc = cur.execute(sql, param)
gc.enable()
except (OperationalError, InterfaceError): # unexpected disconnect, bad malloc error, etc
# FIXME: now reconnect is always forced, we may perhaps want to ping() first?
try:
db = _db_login(dbhost, relogin=1)
cur = db.cursor()
gc.disable()
rc = cur.execute(sql, param)
gc.enable()
except (OperationalError, InterfaceError): # unexpected disconnect, bad malloc error, etc
raise
if string.upper(string.split(sql)[0]) in ("SELECT", "SHOW", "DESC", "DESCRIBE"):
if n:
recset = cur.fetchmany(n)
else:
recset = cur.fetchall()
if with_dict: # return list of dictionaries
# let's extract column names
keys = [row[0] for row in cur.description]
# let's construct a list of dictionaries
list_dict_results = [dict(zip(*[keys, values])) for values in recset]
if with_desc:
return list_dict_results, cur.description
else:
return list_dict_results
else:
if with_desc:
return recset, cur.description
else:
return recset
else:
if string.upper(string.split(sql)[0]) == "INSERT":
rc = cur.lastrowid
return rc
def run_sql_many(query, params, limit=None, run_on_slave=False):
"""Run SQL on the server with PARAM.
This method does executemany and is therefore more efficient than execute
but it has sense only with queries that affect state of a database
(INSERT, UPDATE). That is why the results just count number of affected rows
@param params: tuple of tuple of string params to insert in the query
@param limit: query will be executed in parts when number of
parameters is greater than limit (each iteration runs at most
`limit' parameters)
@return: SQL result as provided by database
"""
if limit is None:
limit = cfg['CFG_MISCUTIL_SQL_RUN_SQL_MANY_LIMIT']
dbhost = cfg['CFG_DATABASE_HOST']
if run_on_slave and cfg['CFG_DATABASE_SLAVE']:
dbhost = cfg['CFG_DATABASE_SLAVE']
i = 0
r = None
while i < len(params):
## make partial query safely (mimicking procedure from run_sql())
try:
db = _db_login(dbhost)
cur = db.cursor()
gc.disable()
rc = cur.executemany(query, params[i:i + limit])
gc.enable()
except (OperationalError, InterfaceError):
try:
db = _db_login(dbhost, relogin=1)
cur = db.cursor()
gc.disable()
rc = cur.executemany(query, params[i:i + limit])
gc.enable()
except (OperationalError, InterfaceError):
raise
## collect its result:
if r is None:
r = rc
else:
r += rc
i += limit
return r
def run_sql_with_limit(query, param=None, n=0, with_desc=False, wildcard_limit=0, run_on_slave=False):
"""This function should be used in some cases, instead of run_sql function, in order
to protect the db from queries that might take a log time to respond
Ex: search queries like [a-z]+ ; cern*; a->z;
The parameters are exactly the ones for run_sql function.
In case the query limit is reached, an InvenioDbQueryWildcardLimitError will be raised.
"""
try:
dummy = int(wildcard_limit)
except ValueError:
raise
if wildcard_limit < 1:#no limit on the wildcard queries
return run_sql(query, param, n, with_desc, run_on_slave=run_on_slave)
safe_query = query + " limit %s" %wildcard_limit
res = run_sql(safe_query, param, n, with_desc, run_on_slave=run_on_slave)
if len(res) == wildcard_limit:
raise InvenioDbQueryWildcardLimitError(res)
return res
def blob_to_string(ablob):
"""Return string representation of ABLOB. Useful to treat MySQL
BLOBs in the same way for both recent and old MySQLdb versions.
"""
if ablob:
if type(ablob) is str:
# BLOB is already a string in MySQLdb 0.9.2
return ablob
else:
# BLOB is array.array in MySQLdb 1.0.0 and later
return ablob.tostring()
else:
return ablob
def log_sql_query(dbhost, sql, param=None):
"""Log SQL query into prefix/var/log/dbquery.log log file. In order
to enable logging of all SQL queries, please uncomment one line
in run_sql() above. Useful for fine-level debugging only!
"""
from flask import current_app
from invenio.utils.date import convert_datestruct_to_datetext
from invenio.utils.text import indent_text
date_of_log = convert_datestruct_to_datetext(time.localtime())
message = date_of_log + '-->\n'
message += indent_text('Host:\n' + indent_text(str(dbhost), 2, wrap=True), 2)
message += indent_text('Query:\n' + indent_text(str(sql), 2, wrap=True), 2)
message += indent_text('Params:\n' + indent_text(str(param), 2, wrap=True), 2)
message += '-----------------------------\n\n'
try:
current_app.logger.info(message)
except:
pass
def get_table_update_time(tablename, run_on_slave=False):
"""Return update time of TABLENAME. TABLENAME can contain
wildcard `%' in which case we return the maximum update time
value.
"""
# Note: in order to work with all of MySQL 4.0, 4.1, 5.0, this
# function uses SHOW TABLE STATUS technique with a dirty column
# position lookup to return the correct value. (Making use of
# Index_Length column that is either of type long (when there are
# some indexes defined) or of type None (when there are no indexes
# defined, e.g. table is empty). When we shall use solely
# MySQL-5.0, we can employ a much cleaner technique of using
# SELECT UPDATE_TIME FROM INFORMATION_SCHEMA.TABLES WHERE
# table_name='collection'.
res = run_sql("SHOW TABLE STATUS LIKE %s", (tablename,),
run_on_slave=run_on_slave)
update_times = [] # store all update times
for row in res:
if type(row[10]) is long or \
row[10] is None:
# MySQL-4.1 and 5.0 have creation_time in 11th position,
# so return next column:
update_times.append(str(row[12]))
else:
# MySQL-4.0 has creation_time in 10th position, which is
# of type datetime.datetime or str (depending on the
# version of MySQLdb), so return next column:
update_times.append(str(row[11]))
return max(update_times)
def get_table_status_info(tablename, run_on_slave=False):
"""Return table status information on TABLENAME. Returned is a
dict with keys like Name, Rows, Data_length, Max_data_length,
etc. If TABLENAME does not exist, return empty dict.
"""
# Note: again a hack so that it works on all MySQL 4.0, 4.1, 5.0
res = run_sql("SHOW TABLE STATUS LIKE %s", (tablename,),
run_on_slave=run_on_slave)
table_status_info = {} # store all update times
for row in res:
if type(row[10]) is long or \
row[10] is None:
# MySQL-4.1 and 5.0 have creation time in 11th position:
table_status_info['Name'] = row[0]
table_status_info['Rows'] = row[4]
table_status_info['Data_length'] = row[6]
table_status_info['Max_data_length'] = row[8]
table_status_info['Create_time'] = row[11]
table_status_info['Update_time'] = row[12]
else:
# MySQL-4.0 has creation_time in 10th position, which is
# of type datetime.datetime or str (depending on the
# version of MySQLdb):
table_status_info['Name'] = row[0]
table_status_info['Rows'] = row[3]
table_status_info['Data_length'] = row[5]
table_status_info['Max_data_length'] = row[7]
table_status_info['Create_time'] = row[10]
table_status_info['Update_time'] = row[11]
return table_status_info
def wash_table_column_name(colname):
"""
Evaluate table-column name to see if it is clean.
This function accepts only names containing [a-zA-Z0-9_].
@param colname: The string to be checked
@type colname: str
@return: colname if test passed
@rtype: str
@raise Exception: Raises an exception if colname is invalid.
"""
if re.search('[^\w]', colname):
raise Exception('The table column %s is not valid.' % repr(colname))
return colname
def real_escape_string(unescaped_string, run_on_slave=False):
"""
Escapes special characters in the unescaped string for use in a DB query.
@param unescaped_string: The string to be escaped
@type unescaped_string: str
@return: Returns the escaped string
@rtype: str
"""
dbhost = cfg['CFG_DATABASE_HOST']
if run_on_slave and cfg['CFG_DATABASE_SLAVE']:
dbhost = cfg['CFG_DATABASE_SLAVE']
connection_object = _db_login(dbhost)
escaped_string = connection_object.escape_string(unescaped_string)
return escaped_string
|
PXke/invenio
|
invenio/legacy/dbquery.py
|
Python
|
gpl-2.0
| 17,135
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Subscription.paid'
db.add_column(u'subscriptions_subscription', 'paid',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Subscription.paid'
db.delete_column(u'subscriptions_subscription', 'paid')
models = {
u'subscriptions.subscription': {
'Meta': {'ordering': "['created_at']", 'object_name': 'Subscription'},
'cpf': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '11'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'paid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
}
}
complete_apps = ['subscriptions']
|
xiru/xiru.wttd
|
eventex/subscriptions/migrations/0002_auto__add_field_subscription_paid.py
|
Python
|
gpl-2.0
| 1,450
|
import re
from stgit.compat import environ_get
from stgit.config import config
from .base import Immutable
from .date import Date
class Person(Immutable):
"""Represents an author or committer in a Git commit object.
Contains :attr:`name`, :attr:`email` and :attr:`timestamp`.
"""
def __init__(self, name, email, date):
self.name = name
self.email = email
self.date = date
@property
def name_email(self):
return '%s <%s>' % (self.name, self.email)
def set_name(self, name):
return self._replace(name=name)
def set_email(self, email):
return self._replace(email=email)
def set_date(self, date):
return self._replace(date=date)
def _replace(self, **kws):
return type(self)(
kws.get('name', self.name),
kws.get('email', self.email),
kws.get('date', self.date),
)
def __repr__(self):
return '%s %s' % (self.name_email, self.date)
@classmethod
def parse(cls, s):
m = re.match(r'^([^<]*)<([^>]*)>\s+(\d+\s+[+-]\d{4})$', s)
name = m.group(1).strip()
email = m.group(2)
date = Date(m.group(3))
return cls(name, email, date)
@classmethod
def user(cls):
if not hasattr(cls, '_user'):
cls._user = cls(
config.get('user.name'), config.get('user.email'), date=None
)
return cls._user
@classmethod
def author(cls):
if not hasattr(cls, '_author'):
user = cls.user()
cls._author = cls(
environ_get('GIT_AUTHOR_NAME', user.name),
environ_get('GIT_AUTHOR_EMAIL', user.email),
Date.maybe(environ_get('GIT_AUTHOR_DATE')),
)
return cls._author
@classmethod
def committer(cls):
if not hasattr(cls, '_committer'):
user = cls.user()
cls._committer = cls(
environ_get('GIT_COMMITTER_NAME', user.name),
environ_get('GIT_COMMITTER_EMAIL', user.email),
Date.maybe(environ_get('GIT_COMMITTER_DATE')),
)
return cls._committer
|
emacsmirror/stgit
|
stgit/lib/git/person.py
|
Python
|
gpl-2.0
| 2,200
|
# -*- coding: utf-8 -*-
"""Called from datakortet\dkcoverage.bat to record regression test
coverage data in dashboard.
"""
import re
import os
# import sys
# import time
import glob
# from datakortet.dkdash.status import send_status
# from datakortet.utils import root
from coverage import coverage, misc
from coverage.files import find_python_files
from coverage.parser import CodeParser
from coverage.config import CoverageConfig
from . import dkenv
def linecount(fname, excludes):
"""Return the number of lines in ``fname``, counting the same way that
coverage does.
"""
cp = CodeParser(filename=fname,
exclude=re.compile(misc.join_regex(excludes)))
lines, excluded = cp.parse_source()
return len(lines), len(excluded)
def skiplist():
cov = coverage(config_file=os.path.join(dkenv.DKROOT, '.coveragerc'))
cwd = os.getcwd()
skippatterns = [os.path.normpath(p.replace(cwd, dkenv.DKROOT)) for p in cov.omit]
_skiplist = []
for pat in skippatterns:
_skiplist += glob.glob(pat)
return set(_skiplist)
def abspath(fname):
# cwd = os.getcwd()
res = os.path.normcase(
os.path.normpath(
os.path.abspath(fname))) #.replace(cwd, root()))))
return res
def valid_file(fname, _skiplist=None):
_skiplist = _skiplist or skiplist()
if fname.endswith('.py'):
absfname = abspath(fname)
if absfname not in _skiplist:
fpath, name = os.path.split(fname)
if name != '__init__.py' or os.stat(absfname).st_size > 0:
return absfname
return False
def python_files(folder):
_skiplist = skiplist()
for fname in find_python_files(folder):
f = valid_file(fname, _skiplist)
if f:
yield f
def pylinecount(rt=None, verbose=False):
"""Count Python lines the same way that coverage does.
"""
res = 0
cov = coverage(config_file=os.path.join(dkenv.DKROOT, '.coveragerc'))
rt = rt or dkenv.DKROOT
_skiplist = skiplist()
exclude_lines = cov.get_exclude_list()
for fname in python_files(rt):
if os.path.normpath(fname) not in _skiplist:
lcount, excount = linecount(fname, exclude_lines)
if verbose:
print '%5d %5d %s' % (lcount, excount, fname)
res += lcount
else:
if verbose:
print '-----', fname
return res
# def report_test_coverage(reportline, dashboard=True):
# start = time.time()
# parts = reportline.split()
#
# stmts = int(parts[1])
# skipped = int(parts[2])
# covered = stmts - skipped
# print >> sys.stderr, "COVERED:", covered
#
# linecount = pylinecount()
# print >> sys.stderr, "TOTAL: ", linecount
#
# coverage = 100.0 * covered / linecount
# severity = 'green'
# if coverage < 85:
# severity = 'yellow'
# if coverage < 60:
# severity = 'red'
#
# sys.stdout.write("Coverage: " + str(coverage) + '\n')
#
# if dashboard:
# send_status(tag='code.testcov',
# value=coverage,
# duration=time.time() - start,
# server='appsrv')
# if __name__ == "__main__":
# intxt = sys.stdin.read()
# report_test_coverage(intxt)
# sys.exit(0)
|
thebjorn/dkcoverage
|
dkcoverage/rtestcover.py
|
Python
|
gpl-2.0
| 3,325
|
#!/usr/bin/env python2
from GSettingsWidgets import *
from ChooserButtonWidgets import TweenChooserButton, EffectChooserButton
EFFECT_SETS = {
"cinnamon": ("traditional", "traditional", "traditional", "none", "none", "none"),
"scale": ("scale", "scale", "scale", "scale", "scale", "scale"),
"fade": ("fade", "fade", "fade", "scale", "scale", "scale"),
"blend": ("blend", "blend", "blend", "scale", "scale", "scale"),
"move": ("move", "move", "move", "scale", "scale", "scale"),
"flyUp": ("flyUp", "flyDown", "flyDown", "scale", "scale", "scale"),
"flyDown": ("flyDown", "flyUp", "flyUp", "scale", "scale", "scale"),
"default": ("scale", "scale", "none", "none", "none", "none")
}
TRANSITIONS_SETS = {
"cinnamon": ("easeOutQuad", "easeOutQuad", "easeInQuad", "easeInExpo", "easeNone", "easeInQuad"),
"normal": ("easeOutSine", "easeInBack", "easeInSine", "easeInBack", "easeOutBounce", "easeInBack"),
"extra": ("easeOutElastic", "easeOutBounce", "easeOutExpo", "easeInExpo", "easeOutElastic", "easeInExpo"),
"fade": ("easeOutQuart", "easeInQuart", "easeInQuart", "easeInBack", "easeOutBounce", "easeInBack")
}
TIME_SETS = {
"cinnamon": (175, 175, 200, 100, 100, 100),
"slow": (400, 400, 400, 100, 100, 100),
"normal": (250, 250, 250, 100, 100, 100),
"fast": (100, 100, 100, 100, 100, 100),
"default": (250, 250, 150, 400, 400, 400)
}
COMBINATIONS = {
# name effect transition time
"cinnamon": ("cinnamon", "cinnamon", "cinnamon"),
"scale": ("scale", "normal", "normal"),
"fancyScale": ("scale", "extra", "slow"),
"fade": ("fade", "fade", "normal"),
"blend": ("blend", "fade", "normal"),
"move": ("move", "normal", "fast"),
"flyUp": ("flyUp", "normal", "fast"),
"flyDown": ("flyDown", "normal", "fast"),
#for previous versions
"default": ("default", "normal", "default")
}
OPTIONS = (
("cinnamon", _("Cinnamon")),
("scale", _("Scale")),
("fancyScale", _("Fancy Scale")),
("fade", _("Fade")),
("blend", _("Blend")),
("move", _("Move")),
("flyUp", _("Fly up, down")),
("flyDown", _("Fly down, up")),
#for previous versions
("default", _("Default"))
)
TYPES = ("map", "close", "minimize", "maximize", "unmaximize", "tile")
SCHEMA = "org.cinnamon"
DEP_PATH = "org.cinnamon/desktop-effects"
KEY_TEMPLATE = "desktop-effects-%s-%s"
class GSettingsTweenChooserButton(TweenChooserButton, CSGSettingsBackend):
def __init__(self, schema, key, dep_key):
self.key = key
self.bind_prop = "tween"
self.bind_dir = Gio.SettingsBindFlags.DEFAULT
self.bind_object = self
if schema not in settings_objects.keys():
settings_objects[schema] = Gio.Settings.new(schema)
self.settings = settings_objects[schema]
super(GSettingsTweenChooserButton, self).__init__()
self.bind_settings()
class GSettingsEffectChooserButton(EffectChooserButton, CSGSettingsBackend):
def __init__(self, schema, key, dep_key, options):
self.key = key
self.bind_prop = "effect"
self.bind_dir = Gio.SettingsBindFlags.DEFAULT
self.bind_object = self
if schema not in settings_objects.keys():
settings_objects[schema] = Gio.Settings.new(schema)
self.settings = settings_objects[schema]
super(GSettingsEffectChooserButton, self).__init__(options)
self.bind_settings()
class Module:
name = "effects"
category = "appear"
comment = _("Control Cinnamon visual effects.")
def __init__(self, content_box):
keywords = _("effects, fancy, window")
sidePage = SidePage(_("Effects"), "cs-desktop-effects", keywords, content_box, module=self)
self.sidePage = sidePage
def on_module_selected(self):
if not self.loaded:
print "Loading Effects module"
self.sidePage.stack = SettingsStack()
self.sidePage.add_widget(self.sidePage.stack)
self.schema = Gio.Settings(SCHEMA)
self.effect_sets = {}
for name, sets in COMBINATIONS.items():
self.effect_sets[name] = (EFFECT_SETS[sets[0]], TRANSITIONS_SETS[sets[1]], TIME_SETS[sets[2]])
# Enable effects
page = SettingsPage()
self.sidePage.stack.add_titled(page, "effects", _("Enable effects"))
settings = page.add_section(_("Enable Effects"))
widget = GSettingsSwitch(_("Window effects"), "org.cinnamon", "desktop-effects")
settings.add_row(widget)
widget = GSettingsSwitch(_("Effects on dialog boxes"), "org.cinnamon", "desktop-effects-on-dialogs")
settings.add_reveal_row(widget, "org.cinnamon", "desktop-effects")
widget = GSettingsSwitch(_("Effects on menus"), "org.cinnamon", "desktop-effects-on-menus")
settings.add_reveal_row(widget, "org.cinnamon", "desktop-effects")
self.chooser = GSettingsComboBox(_("Effects style"), "org.cinnamon", "desktop-effects-style", OPTIONS)
self.chooser.content_widget.connect("changed", self.on_value_changed)
settings.add_reveal_row(self.chooser, "org.cinnamon", "desktop-effects")
widget = GSettingsSwitch(_("Fade effect on Cinnamon scrollboxes (like the Menu application list)"), "org.cinnamon", "enable-vfade")
settings.add_row(widget)
widget = GSettingsSwitch(_("Session startup animation"), "org.cinnamon", "startup-animation")
settings.add_row(widget)
if Gtk.get_major_version() == 3 and Gtk.get_minor_version() >= 16:
widget = GSettingsSwitch(_("Overlay scroll bars (logout required)"), "org.cinnamon.desktop.interface", "gtk-overlay-scrollbars")
settings.add_row(widget)
self.schema.connect("changed::desktop-effects", self.on_desktop_effects_enabled_changed)
# Customize
page = SettingsPage()
self.sidePage.stack.add_titled(page, "customize", _("Customize"))
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
label = Gtk.Label()
label.set_markup("<b>%s</b>" % _("Customize settings"))
box.pack_start(label, False, False, 0)
self.custom_switch = Gtk.Switch(active = self.is_custom())
box.pack_end(self.custom_switch, False, False, 0)
self.custom_switch.connect("notify::active", self.update_effects)
page.add(box)
self.revealer = Gtk.Revealer()
self.revealer.set_transition_type(Gtk.RevealerTransitionType.SLIDE_DOWN)
self.revealer.set_transition_duration(150)
page.add(self.revealer)
settings = SettingsBox(_("Effect"))
self.revealer.add(settings)
self.size_group = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
effects = ["none", "scale", "fade", "blend", "move", "flyUp", "flyDown", "traditional"]
# MAPPING WINDOWS
widget = self.make_effect_group(_("Mapping windows"), "map", effects)
settings.add_row(widget)
# CLOSING WINDOWS
widget = self.make_effect_group(_("Closing windows"), "close", effects)
settings.add_row(widget)
# MINIMIZING WINDOWS
widget = self.make_effect_group(_("Minimizing windows"), "minimize", effects)
settings.add_row(widget)
# MAXIMIZING WINDOWS
# effects = ["none", _("None")], ["scale", _("Scale")]]
widget = self.make_effect_group(_("Maximizing windows"), "maximize")
settings.add_row(widget)
# UNMAXIMIZING WINDOWS
widget = self.make_effect_group(_("Unmaximizing windows"), "unmaximize")
settings.add_row(widget)
# TILING WINDOWS
widget = self.make_effect_group(_("Tiling and snapping windows"), "tile")
settings.add_row(widget)
self.update_effects(self.custom_switch, None)
def make_effect_group(self, group_label, key, effects=None):
tmin, tmax, tstep, tdefault = (0, 2000, 50, 200)
row =SettingsWidget()
row.set_spacing(5)
label = Gtk.Label()
label.set_markup(group_label)
label.props.xalign = 0.0
row.pack_start(label, False, False, 0)
label = Gtk.Label(_("ms"))
row.pack_end(label, False, False, 0)
effect = GSettingsEffectChooserButton(SCHEMA, KEY_TEMPLATE % (key, "effect"), DEP_PATH, effects)
self.size_group.add_widget(effect)
tween = GSettingsTweenChooserButton(SCHEMA, KEY_TEMPLATE % (key, "transition"), DEP_PATH)
self.size_group.add_widget(tween)
time = GSettingsSpinButton("", SCHEMA, KEY_TEMPLATE % (key, "time"), dep_key=DEP_PATH, mini=tmin, maxi=tmax, step=tstep, page=tdefault)
time.set_border_width(0)
time.set_margin_right(0)
time.set_margin_left(0)
time.set_spacing(0)
row.pack_end(time, False, False, 0)
row.pack_end(tween, False, False, 0)
row.pack_end(effect, False, False, 0)
return row
def is_custom(self):
effects = []
transitions = []
times = []
for i in TYPES:
effects.append(self.schema.get_string(KEY_TEMPLATE % (i, "effect")))
transitions.append(self.schema.get_string(KEY_TEMPLATE % (i, "transition")))
times.append(self.schema.get_int(KEY_TEMPLATE % (i, "time")))
value = (tuple(effects), tuple(transitions), tuple(times))
return value != self.effect_sets[self.chooser.value]
def on_value_changed(self, widget):
value = self.effect_sets[self.schema.get_string("desktop-effects-style")]
j = 0
for i in TYPES:
self.schema.set_string(KEY_TEMPLATE % (i, "effect"), value[0][j])
self.schema.set_string(KEY_TEMPLATE % (i, "transition"), value[1][j])
self.schema.set_int(KEY_TEMPLATE % (i, "time"), value[2][j])
j += 1
def update_effects(self, switch, gparam):
active = switch.get_active()
self.revealer.set_reveal_child(active)
#when unchecking the checkbutton, reset the values
if not active:
self.on_value_changed(self.chooser)
def on_desktop_effects_enabled_changed(self, schema, key):
active = schema.get_boolean(key)
if not active and schema.get_boolean("desktop-effects-on-dialogs"):
schema.set_boolean("desktop-effects-on-dialogs", False)
self.update_effects(self.custom_switch, None)
|
pixunil/Cinnamon
|
files/usr/share/cinnamon/cinnamon-settings/modules/cs_effects.py
|
Python
|
gpl-2.0
| 10,983
|
#!/usr/bin/env python
import os
import sys
import MySQLdb
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "academicControl.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
hyms/academicControl
|
manage.py
|
Python
|
gpl-2.0
| 273
|
# -*- Mode: Python; test-case-name: flumotion.test.test_pb -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
import crypt
from twisted.cred import portal
from twisted.internet import defer, reactor
from twisted.python import log as tlog
from twisted.spread import pb as tpb
from twisted.trial import unittest
from zope.interface import implements
from flumotion.common import testsuite
from flumotion.common import keycards, log, errors
from flumotion.component.bouncers import htpasswdcrypt, saltsha256
from flumotion.twisted import checkers, pb
from flumotion.twisted import portal as fportal
from flumotion.test.common import haveTwisted
htpasswdcryptConf = {
'name': 'testbouncer',
'plugs': {},
'properties': {'data': "user:qi1Lftt0GZC0o"}}
saltsha256Conf = {
'name': 'testbouncer',
'plugs': {},
'properties': {
'data':
("user:"
"iamsalt:"
"2f826124ada2b2cdf11f4fd427c9ca48de0ed49b41476266d8df08d2cf86120e")}}
### lots of fake objects to have fun with
class FakePortalPlaintext(portal.Portal):
# a fake portal with a checker that lets username, password in
def __init__(self):
checker = checkers.FlexibleCredentialsChecker()
checker.addUser("username", "password")
portal.Portal.__init__(self, FakeTRealm(), (checker, ))
class FakePortalWrapperPlaintext:
# a fake wrapper with a checker that lets username, password in
def __init__(self):
self.broker = FakeBroker()
self.portal = FakePortalplaintext()
class FakePortalWrapperCrypt:
# a fake wrapper with a checker that lets username, crypt(password, iq) in
def __init__(self):
self.checker = checkers.CryptChecker()
cryptPassword = crypt.crypt('password', 'iq')
self.checker.addUser("username", cryptPassword)
self.portal = portal.Portal(FakeTRealm(), (self.checker, ))
# FIXME: using real portal
class FakeBouncerPortal:
# a fake wrapper implementing BouncerPortal lookalike
def __init__(self, bouncer):
self.bouncer = bouncer
def login(self, keycard, mind, interfaces):
return self.bouncer.authenticate(keycard)
class FakeAvatar(tpb.Avatar):
implements(tpb.IPerspective)
loggedIn = loggedOut = False
def __init__(self):
pass
def logout(self):
self.loggedOut = True
class FakeTRealm:
def __init__(self):
self.avatar = FakeAvatar()
def requestAvatar(self, avatarId, mind, *ifaces):
interface = ifaces[0]
assert interface == tpb.IPerspective, (
"interface is %r and not IPerspective" % interface)
self.avatar.loggedIn = True
# we can return a deferred, or return directly
return defer.succeed((tpb.IPerspective,
self.avatar, self.avatar.logout))
class FakeFRealm(FakeTRealm):
def requestAvatar(self, avatarId, keycard, mind, *interfaces):
return FakeTRealm.requestAvatar(self, avatarId, mind, *interfaces)
class FakeMind(tpb.Referenceable):
pass
class FakeBroker(tpb.Broker):
pass
# our test for twisted's challenger
# this is done for comparison with our challenger
class TestTwisted_PortalAuthChallenger(testsuite.TestCase):
def setUp(self):
root = tpb.IPBRoot(FakePortalPlaintext()).rootObject(
broker=FakeBroker())
# PB server creates a challenge and a challenger to send to the client
self.challenge, self.challenger = root.remote_login('username')
def testRightPassword(self):
# client is asked to respond, so generate the response
response = tpb.respond(self.challenge, 'password')
self.challenger.remote_respond(response, None)
def testWrongPassword(self):
# client is asked to respond, so generate the response
response = tpb.respond(self.challenge, 'wrong')
d = self.challenger.remote_respond(response, None)
def wrongPasswordErrback(wrongpasserror):
self.assert_(isinstance(wrongpasserror.type(),
errors.NotAuthenticatedError))
d.addErrback(wrongPasswordErrback)
return d
### SHINY NEW FPB
class Test_BouncerWrapper(testsuite.TestCase):
def setUp(self):
broker = FakeBroker()
self.bouncer = htpasswdcrypt.HTPasswdCrypt(htpasswdcryptConf)
self.bouncerPortal = fportal.BouncerPortal(FakeFRealm(), self.bouncer)
self.wrapper = pb._BouncerWrapper(self.bouncerPortal, broker)
def tearDown(self):
self.bouncer.stop()
def testUACPPOk(self):
mind = FakeMind()
keycard = keycards.KeycardUACPP('user', 'test', '127.0.0.1')
d = self.wrapper.remote_login(keycard, mind,
'twisted.spread.pb.IPerspective')
def uacppOkCallback(result):
self.assert_(isinstance(result, tpb.AsReferenceable))
return result
d.addCallback(uacppOkCallback)
return d
def testUACPPWrongPassword(self):
keycard = keycards.KeycardUACPP('user', 'tes', '127.0.0.1')
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacppWrongPasswordErrback(wrongpasserror):
self.assert_(isinstance(wrongpasserror.type(),
errors.NotAuthenticatedError))
d.addErrback(uacppWrongPasswordErrback)
return d
def testUACPCCOk(self):
# create
keycard = keycards.KeycardUACPCC('user', '127.0.0.1')
# send
d = self.wrapper.remote_login(keycard, None,
'twisted.spread.pb.IPerspective')
def uacpccOkCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# respond to challenge
keycard.setPassword('test')
d = self.wrapper.remote_login(keycard, None,
'twisted.spread.pb.IPerspective')
def uacpccOkCallback2(result):
self.assert_(isinstance(result, tpb.AsReferenceable))
return result
d.addCallback(uacpccOkCallback2)
return d
d.addCallback(uacpccOkCallback)
return d
def testUACPCCWrongUser(self):
# create
keycard = keycards.KeycardUACPCC('wronguser', '127.0.0.1')
# send
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacpccWrongUserCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# respond to challenge
keycard.setPassword('test')
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacpccWrongUserErrback(failure):
self.assert_(isinstance(failure.type(),
errors.NotAuthenticatedError))
return True
d.addErrback(uacpccWrongUserErrback)
return d
d.addCallback(uacpccWrongUserCallback)
return d
def testUACPCCWrongPassword(self):
# create
keycard = keycards.KeycardUACPCC('user', '127.0.0.1')
# send
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacpccWrongPasswordCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# respond to challenge
keycard.setPassword('wrong')
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacpccWrongPasswordErrback(failure):
self.assert_(isinstance(failure.type(),
errors.NotAuthenticatedError))
return True
d.addErrback(uacpccWrongPasswordErrback)
return d
d.addCallback(uacpccWrongPasswordCallback)
return d
def testUACPCCTamperWithChallenge(self):
# create challenger
keycard = keycards.KeycardUACPCC('user', '127.0.0.1')
self.assert_(keycard)
self.assertEquals(keycard.state, keycards.REQUESTING)
# submit for auth
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacpccTamperCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# mess with challenge, respond to challenge and resubmit
keycard.challenge = "I am a h4x0r"
keycard.setPassword('test')
d = self.wrapper.remote_login(keycard, "avatarId",
'twisted.spread.pb.IPerspective')
def uacpccTamperErrback(failure):
self.assert_(isinstance(failure.type(),
errors.NotAuthenticatedError))
d.addErrback(uacpccTamperErrback)
return d
d.addCallback(uacpccTamperCallback)
return d
class Test_FPortalRoot(testsuite.TestCase):
def setUp(self):
self.bouncerPortal = fportal.BouncerPortal(FakeFRealm(), 'bouncer')
self.root = pb._FPortalRoot(self.bouncerPortal)
def testRootObject(self):
root = self.root.rootObject('a')
self.failUnless(isinstance(root, pb._BouncerWrapper))
self.assertEquals(root.broker, 'a')
class TestAuthenticator(testsuite.TestCase):
def testIssueNoInfo(self):
# not setting any useful auth info on the authenticator does not
# allow us to issue a keycard
a = pb.Authenticator(username="tarzan")
d = a.issue([
"flumotion.common.keycards.KeycardUACPP", ])
d.addCallback(lambda r: self.failIf(r))
return d
def testIssueUACPP(self):
# our authenticator by default only does challenge-based keycards
a = pb.Authenticator(username="tarzan", password="jane",
address="localhost")
d = a.issue([
"flumotion.common.keycards.KeycardUACPP", ])
d.addCallback(lambda r: self.failIf(r))
def testIssueUACPCC(self):
a = pb.Authenticator(username="tarzan", password="jane",
address="localhost")
d = a.issue([
"flumotion.common.keycards.KeycardUACPCC", ])
d.addCallback(lambda r: self.failUnless(isinstance(r,
keycards.KeycardUACPCC)))
return d
# time for the big kahuna
# base class so we can use different bouncers
class Test_FPBClientFactory(testsuite.TestCase):
def setUp(self):
self.realm = FakeFRealm()
self.bouncer = self.bouncerClass(self.bouncerConf)
self.portal = fportal.BouncerPortal(self.realm, self.bouncer)
self.serverFactory = tpb.PBServerFactory(self.portal,
unsafeTracebacks=0)
self.port = reactor.listenTCP(0, self.serverFactory,
interface="127.0.0.1")
self.portno = self.port.getHost().port
def flushNotAuthenticatedError(self):
try:
self.flushLoggedErrors(errors.NotAuthenticatedError)
except AttributeError:
tlog.flushErrors(errors.NotAuthenticatedError)
def tearDown(self):
self.bouncer.stop()
self.flushNotAuthenticatedError()
self.port.stopListening()
def clientDisconnect(self, factory, reference):
# clean up broker by waiting on Disconnect notify
d = defer.Deferred()
if reference:
reference.notifyOnDisconnect(lambda r: d.callback(None))
factory.disconnect()
if reference:
return d
# test with htpasswdcrypt bouncer first
class Test_FPBClientFactoryHTPasswdCrypt(Test_FPBClientFactory):
bouncerClass = htpasswdcrypt.HTPasswdCrypt
bouncerConf = htpasswdcryptConf
def testOk(self):
factory = pb.FPBClientFactory()
a = pb.Authenticator(username="user", password="test",
address="127.0.0.1")
# send
d = factory.login(a)
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def OkCallback(result):
# make sure we really used challenge/response keycard
self.failUnless(isinstance(factory.keycard,
keycards.KeycardUACPCC))
self.assert_(isinstance(result, tpb.RemoteReference))
return self.clientDisconnect(factory, result)
d.addCallback(OkCallback)
return d
def testWrongPassword(self):
factory = pb.FPBClientFactory()
a = pb.Authenticator()
a = pb.Authenticator(username="user", password="wrong",
address="127.0.0.1")
d = factory.login(a)
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
log.debug("trial", "wait for result")
def WrongPasswordErrback(failure):
self.failUnless(isinstance(factory.keycard,
keycards.KeycardUACPCC))
# This is a CopiedFailure
self.assert_(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
log.debug("trial", "got failure %r" % failure)
c.disconnect()
return True
d.addErrback(WrongPasswordErrback)
return d
# FIXME: rewrite such that we can enforce a challenger, possibly
# by setting a property on the bouncer
def notestUACPCCOk(self):
factory = pb.FPBClientFactory()
# send
d = factory.login(self.authenticator, 'MIND')
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def uacpccOkCallback(keycard):
# get result
self.assertEquals(keycard.state, keycards.REQUESTING)
# respond to challenge
keycard.setPassword('test')
d = factory.login(keycard, 'MIND')
# check if we have a remote reference
def uacpccOkCallback2(result):
self.assert_(isinstance(result, tpb.RemoteReference))
return self.clientDisconnect(factory, result)
d.addCallback(uacpccOkCallback2)
return d
d.addCallback(uacpccOkCallback)
return d
def testWrongUser(self):
factory = pb.FPBClientFactory()
# create
a = pb.Authenticator(username="wronguser", password="test",
address="127.0.0.1")
# send
d = factory.login(a)
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def WrongUserCb(keycard):
self.fail("Should have returned NotAuthenticatedError")
def WrongUserEb(failure):
# find copied failure
self.failUnless(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
return self.clientDisconnect(factory, None)
d.addCallback(WrongUserCb)
d.addErrback(WrongUserEb)
return d
def notestUACPCCWrongPassword(self):
factory = pb.FPBClientFactory()
# create
keycard = keycards.KeycardUACPCC('user', '127.0.0.1')
# send
d = factory.login(keycard, 'MIND')
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def uacpccWrongPasswordCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# respond to challenge
keycard.setPassword('wrongpass')
d = factory.login(keycard, 'MIND')
def uacpccWrongPasswordErrback(failure):
# find copied failure
self.failUnless(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
return self.clientDisconnect(factory, None)
d.addErrback(uacpccWrongPasswordErrback)
return d
d.addCallback(uacpccWrongPasswordCallback)
return d
def notestUACPCCTamperWithChallenge(self):
factory = pb.FPBClientFactory()
# create
keycard = keycards.KeycardUACPCC('user', '127.0.0.1')
self.assert_(keycard)
self.assertEquals(keycard.state, keycards.REQUESTING)
# send
d = factory.login(keycard, 'MIND')
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def uacpccTamperCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# mess with challenge, respond to challenge and resubmit
keycard.challenge = "I am a h4x0r"
keycard.setPassword('test')
d = factory.login(keycard, 'MIND')
def uacpccTamperErrback(failure):
# find copied failure
self.failUnless(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
return self.clientDisconnect(factory, None)
d.addErrback(uacpccTamperErrback)
return d
d.addCallback(uacpccTamperCallback)
return d
# test with sha256 bouncer
class Test_FPBClientFactorySaltSha256(Test_FPBClientFactory):
bouncerClass = saltsha256.SaltSha256
bouncerConf = saltsha256Conf
def testOk(self):
factory = pb.FPBClientFactory()
a = pb.Authenticator(username="user", password="test",
address="127.0.0.1")
# send
d = factory.login(a)
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def OkCallback(result):
# make sure we really used an SHA256 challenge/response keycard
self.failUnless(isinstance(factory.keycard,
keycards.KeycardUASPCC))
self.assert_(isinstance(result, tpb.RemoteReference))
return self.clientDisconnect(factory, result)
d.addCallback(OkCallback)
return d
def testWrongPassword(self):
factory = pb.FPBClientFactory()
a = pb.Authenticator(username="user", password="wrong",
address="127.0.0.1")
d = factory.login(a)
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
log.debug("trial", "wait for result")
def WrongPasswordErrback(failure):
# make sure we really used an SHA256 challenge/response keycard
self.failUnless(isinstance(factory.keycard,
keycards.KeycardUASPCC))
# This is a CopiedFailure
self.assert_(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
log.debug("trial", "got failure %r" % failure)
c.disconnect()
return True
d.addErrback(WrongPasswordErrback)
return d
def testWrongUser(self):
factory = pb.FPBClientFactory()
# create
a = pb.Authenticator(username="wronguser", password="test",
address="127.0.0.1")
# send
d = factory.login(a)
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def WrongUserCb(keycard):
self.fail("Should have returned NotAuthenticatedError")
def WrongUserEb(failure):
# find copied failure
self.failUnless(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
return self.clientDisconnect(factory, None)
d.addCallback(WrongUserCb)
d.addErrback(WrongUserEb)
return d
# FIXME: do this with a fake authenticator that tampers with the challenge
def notestUACPCCTamperWithChallenge(self):
factory = pb.FPBClientFactory()
# create
keycard = keycards.KeycardUACPCC('user', '127.0.0.1')
self.assert_(keycard)
self.assertEquals(keycard.state, keycards.REQUESTING)
# send
d = factory.login(keycard, 'MIND')
c = reactor.connectTCP("127.0.0.1", self.portno, factory)
def uacpccTamperCallback(keycard):
self.assertEquals(keycard.state, keycards.REQUESTING)
# mess with challenge, respond to challenge and resubmit
keycard.challenge = "I am a h4x0r"
keycard.setPassword('test')
d = factory.login(keycard, 'MIND')
def uacpccTamperErrback(failure):
# find copied failure
self.failUnless(failure.check(
"flumotion.common.errors.NotAuthenticatedError"))
return self.clientDisconnect(factory, None)
d.addErrback(uacpccTamperErrback)
return d
d.addCallback(uacpccTamperCallback)
return d
if __name__ == '__main__':
unittest.main()
|
ylatuya/Flumotion
|
flumotion/test/test_pb.py
|
Python
|
gpl-2.0
| 21,383
|
import numpy as np
import re
import os
def GetBlocks(filename):
'''
prende il filename e ritorna la lista [[[pt1.x,pt1.y],...],[blocco2],...]
'''
f=open(filename)
block=False
lastlist=[]
listone=[]
for i in f:
print(i)
if(re.match(r"[\d\.]+e[\+\-][\d]+\t[\d\.]+e[\+\-][\d]+", i)):
print("entrato")
block=True
slast=re.findall(r"[\d\.]+e[\+\-][\d]+", i)
lastlist.append(list(map(float, slast)))
else:
if(block):
listone.append(lastlist)
lastlist=[]
block=False
return listone
print("Test...")
l=GetBlocks(r'C:\Users\silvanamorreale\Documents\GitHub\Lab3.2\parser test.txt')
print(l)
|
AleCandido/Lab3.2
|
Utilities/parser.py
|
Python
|
gpl-2.0
| 755
|
#! /usr/bin/python3
import sys
MAXLINELEN = 80
TABLEN = 8
# 3 for two quotes and a comma
FIRSTLINELEN = MAXLINELEN - TABLEN - 3
OTHERLINELEN = FIRSTLINELEN - 2 * TABLEN
FIRSTLINEBYTES = FIRSTLINELEN // 2
OTHERLINEBYTES = OTHERLINELEN // 2
def fix_line(line):
return "".join("\\x{}".format(line[i:i + 2].decode()) for i in range(0, len(line), 2))
def main():
with open(sys.argv[1], "rb") as f:
data = f.read().strip().splitlines()
with sys.stdout as f:
f.write("#define INPUTLEN {}\n".format(len(data[0]) // 2))
f.write("\n")
f.write("static const unsigned char input[][INPUTLEN + 1] = {\n")
for line in data:
f.write("\t\"{}\"".format(fix_line(line[:FIRSTLINEBYTES])))
if len(line) > FIRSTLINEBYTES:
line = line[FIRSTLINEBYTES:]
while line:
f.write("\n\t\t\t\"{}\"".format(
fix_line(line[:OTHERLINEBYTES])))
line = line[OTHERLINEBYTES:]
f.write(",\n")
f.write("};\n")
if __name__ == "__main__":
main();
|
talshorer/cryptopals
|
scripts/transform_hex_multine.py
|
Python
|
gpl-2.0
| 960
|