code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
|---|---|---|---|---|---|
from unittest import skip
import requests
from django.core.urlresolvers import reverse
from django.utils import timezone
from django.core.management import call_command
from django.test import TestCase
from django.utils.six import StringIO
from django.contrib.auth import get_user_model
from .models import Run, RunkeeperToken
from .runkeeper import create_runs_from_runkeeper
from datetime import timedelta
class TestSyncAllRunsCommand(TestCase):
pass
#
# @mock('runkeeper....')
# def test_syn_all_runs_output(self):
# out = StringIO()
# call_command('syncallruns', stdout=out)
# self.assertIn('XXXXX', out.getvalue())
class TestInputRun(TestCase):
def setUp(self):
self.test_user = get_user_model().objects.create(username='test')
self.test_user.set_password('pass123')
self.test_user.is_active = True
self.test_user.save()
def test_run_model(self):
yesterday = timezone.now() - timedelta(days=1)
new_run = Run.objects.create(runner=self.test_user, distance=10,
recorded_time=timedelta(1),
start_date=yesterday)
self.assertEquals(new_run.start_date, new_run.end_date)
# TODO: test save too... ;-)
def test_input_run_post(self):
self.client.login(username='test', password='pass123')
data = {
'distance': '10',
'start_date': timezone.now().strftime('%Y-%m-%d'),
'end_date': timezone.now().strftime('%Y-%m-%d'),
'recorded_time': '00:01:00',
}
resp = self.client.post(reverse('runs:input_run'), data,
follow=True)
self.assertRedirects(resp,
reverse('runs:user_runs',
kwargs=dict(user_id=self.test_user.id)),
302, 200)
msgs = list(resp.context['messages'])
self.assertTrue(msgs)
self.assertEqual('Your run has been added', str(msgs[0]))
class TestRunkeeper(TestCase):
def setUp(self):
self.test_user = get_user_model().objects.create(username='test')
self.test_user.set_password('pass123')
self.test_user.is_active = True
self.test_user.save()
self.token = 'XXXXXXXXXXXXXXXXXXXXXXXXXXX'
RunkeeperToken.objects.create(runner=self.test_user,
access_token=self.token)
@skip("WIP")
def test_create_runs_from_runkeeper(self):
"""
WIP!
"""
self.assertEqual(0, Run.objects.count())
# create_runs_from_runkeeper(user_id=self.test_user.id)
# self.assertEqual(1, Run.objects.count())
headers = {
'Authorization': 'Bearer %s' % (self.token, ),
'Accept': '',
}
url = 'https://api.runkeeper.com/user'
headers['Accept'] = 'application/vnd.com.runkeeper.User+json'
resp = requests.get(url, headers=headers)
print resp.status_code
print resp.headers
print resp.json()
user = resp.json()
url = 'https://api.runkeeper.com' + user['fitness_activities']
headers['Accept'] = 'application/vnd.com.runkeeper.FitnessActivityFeed+json'
resp = requests.get(url, headers=headers)
print resp.status_code
print resp.headers
print resp.text
self.assertDictEqual({}, resp.json())
|
Socialsquare/RunningCause
|
runs/tests.py
|
Python
|
mit
| 3,502
|
import FWCore.ParameterSet.Config as cms
#---------------------------------------------------------------------------------------------------
# M A I N
#---------------------------------------------------------------------------------------------------
# create the process
process = cms.Process('FILEFI')
# say how many events to process (-1 means no limit)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
#>> input source
process.source = cms.Source(
"PoolSource",
# make sure this is for the right version
fileNames = cms.untracked.vstring('file:/mnt/hadoop/cmsprod/00165B45-82E6-E311-B68D-002590AC4FEC.root')
)
process.source.inputCommands = cms.untracked.vstring(
"keep *",
"drop *_MEtoEDMConverter_*_*",
"drop L1GlobalTriggerObjectMapRecord_hltL1GtObjectMap__HLT"
)
#>> configurations
# determine the global tag to use
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff')
process.GlobalTag.globaltag = 'MCRUN2_74_V9'
# define meta data for this production
process.configurationMetadata = cms.untracked.PSet(
name = cms.untracked.string('BambuProd'),
version = cms.untracked.string('Mit_042'),
annotation = cms.untracked.string('AODSIM')
)
#>> standard sequences
# load some standard sequences we will need
process.load('Configuration.StandardSequences.Services_cff')
process.load('Configuration.StandardSequences.GeometryDB_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_cff')
process.load('Configuration.EventContent.EventContent_cff')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('RecoVertex.PrimaryVertexProducer.OfflinePrimaryVertices_cfi')
process.load('TrackingTools.TransientTrack.TransientTrackBuilder_cfi')
# define sequence for ProductNotFound
process.options = cms.untracked.PSet(
Rethrow = cms.untracked.vstring('ProductNotFound'),
fileMode = cms.untracked.string('NOMERGE'),
wantSummary = cms.untracked.bool(False)
)
# Import/Load the filler so all is already available for config changes
from MitProd.TreeFiller.MitTreeFiller_cfi import MitTreeFiller
process.load('MitProd.TreeFiller.MitTreeFiller_cfi')
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
#
# R E C O S E Q U E N C E
#
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
## Load stablePart producers
from MitEdm.Producers.conversionElectronsStable_cfi import electronsStable
process.load('MitEdm.Producers.conversionElectronsStable_cfi')
# Load Mit Mvf Conversion producer
# MultiVertexFitter is currently broken
#from MitProd.TreeFiller.conversionProducer_cff import conversionProducer, addConversionFiller
#process.load('MitProd.TreeFiller.conversionProducer_cff')
#addConversionFiller(MitTreeFiller)
# Electron likelihood-based id
from RecoEgamma.ElectronIdentification.electronIdLikelihoodExt_cfi import eidLikelihoodExt
process.load('RecoEgamma.ElectronIdentification.electronIdLikelihoodExt_cfi')
MitTreeFiller.Electrons.eIDLikelihoodName = 'eidLikelihoodExt'
# Load FastJet L1 corrections
from MitProd.TreeFiller.FastJetCorrection_cff import l1FastJetSequence, l1FastJetSequenceCHS
process.load('MitProd.TreeFiller.FastJetCorrection_cff')
# Load btagging
from MitProd.TreeFiller.utils.setupBTag import setupBTag
ak4PFBTagSequence = setupBTag(process, 'ak4PFJets', 'AKt4PF')
ak4PFCHSBTagSequence = setupBTag(process, 'ak4PFJetsCHS', 'AKt4PFCHS')
# Load basic particle flow collections
# Used for rho calculation
from CommonTools.ParticleFlow.goodOfflinePrimaryVertices_cfi import goodOfflinePrimaryVertices
from CommonTools.ParticleFlow.pfParticleSelection_cff import pfParticleSelectionSequence, pfPileUp, pfNoPileUp, pfPileUpIso, pfNoPileUpIso
from CommonTools.ParticleFlow.pfPhotons_cff import pfPhotonSequence
from CommonTools.ParticleFlow.pfElectrons_cff import pfElectronSequence
from CommonTools.ParticleFlow.pfMuons_cff import pfMuonSequence
from CommonTools.ParticleFlow.pfJets_cff import pfJets
from CommonTools.ParticleFlow.TopProjectors.pfNoMuon_cfi import pfNoMuon
from CommonTools.ParticleFlow.TopProjectors.pfNoElectron_cfi import pfNoElectron
process.load('CommonTools.ParticleFlow.goodOfflinePrimaryVertices_cfi')
process.load('CommonTools.ParticleFlow.pfParticleSelection_cff')
process.load('CommonTools.ParticleFlow.pfPhotons_cff')
process.load('CommonTools.ParticleFlow.pfElectrons_cff')
process.load('CommonTools.ParticleFlow.pfMuons_cff')
process.load('CommonTools.ParticleFlow.TopProjectors.pfNoMuon_cfi')
process.load('CommonTools.ParticleFlow.TopProjectors.pfNoElectron_cfi')
# Loading PFProducer to get the ptrs
from RecoParticleFlow.PFProducer.pfLinker_cff import particleFlowPtrs
process.load('RecoParticleFlow.PFProducer.pfLinker_cff')
# Load btagging
# recluster fat jets, subjets, btagging
from MitProd.TreeFiller.utils.makeFatJets import makeFatJets
fatjetSequence = makeFatJets(process, isData = False)
pfPileUp.PFCandidates = 'particleFlowPtrs'
pfNoPileUp.bottomCollection = 'particleFlowPtrs'
pfPileUpIso.PFCandidates = 'particleFlowPtrs'
pfNoPileUpIso.bottomCollection='particleFlowPtrs'
pfPileUp.Enable = True
pfPileUp.Vertices = 'goodOfflinePrimaryVertices'
pfPileUp.checkClosestZVertex = cms.bool(False)
#> Setup jet corrections
process.load('JetMETCorrections.Configuration.JetCorrectionServices_cff')
#> Setup the met filters
from MitProd.TreeFiller.metFilters_cff import metFilters
process.load('MitProd.TreeFiller.metFilters_cff')
#> The bambu reco sequence
recoSequence = cms.Sequence(
electronsStable *
eidLikelihoodExt *
# conversionProducer *
goodOfflinePrimaryVertices *
particleFlowPtrs *
pfParticleSelectionSequence *
pfPhotonSequence *
pfMuonSequence *
pfNoMuon *
pfElectronSequence *
pfNoElectron *
l1FastJetSequence *
l1FastJetSequenceCHS *
ak4PFBTagSequence *
ak4PFCHSBTagSequence *
fatjetSequence *
metFilters
)
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
#
# G E N S E Q U E N C E
#
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# Import/Load genjets
from RecoJets.Configuration.GenJetParticles_cff import genJetParticles
process.load('RecoJets.Configuration.GenJetParticles_cff')
from RecoJets.Configuration.RecoGenJets_cff import ak4GenJets, ak8GenJets
process.load('RecoJets.Configuration.RecoGenJets_cff')
genSequence = cms.Sequence(
genJetParticles *
ak4GenJets *
ak8GenJets
)
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
#
# B A M B U S E Q U E N C E
#
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
# remember the bambu sequence has been imported and loaded already in the beginning
# configure the filler
MitTreeFiller.TreeWriter.fileName = 'bambu-output-file-tmp'
MitTreeFiller.PileupInfo.active = True
MitTreeFiller.MCParticles.active = True
MitTreeFiller.MCEventInfo.active = True
MitTreeFiller.MCVertexes.active = True
# NOTRIG - special
MitTreeFiller.Trigger.active = False
# define fill bambu filler sequence
bambuFillerSequence = cms.Sequence(
MitTreeFiller
)
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
#
# C M S S W P A T H
#
#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#-#
process.path = cms.Path(
recoSequence *
genSequence *
bambuFillerSequence
)
|
cpausmit/Kraken
|
filefi/042/mc-notrig.py
|
Python
|
mit
| 7,750
|
__author__ = 'cenk'
def get_flags(*args, **kwargs):
"""
:param args:
:param kwargs:
:return: my, player, free flag respectively
"""
return kwargs.get('my_flag', 'X'), kwargs.get('player_flag', 'O'), kwargs.get('free', 0)
|
cenkbircanoglu/tic-tac-toe
|
game/utils/flags.py
|
Python
|
mit
| 247
|
import click
from photomanager.commands.index import CommandIndex
from photomanager.commands.display import CommandList
from photomanager.commands.remove_dup import CommandRemoveDuplicate
from photomanager.commands.config import CommandConfig
from photomanager.commands.update import CommandUpdate
@click.group()
def photo_manager_cli():
pass
@click.command()
@click.option('--force', is_flag=True, help="force update index of folder")
@click.option('--skip_existed', is_flag=True, help="skip existed index")
@click.option('--clean', is_flag=True, help="delete obsolete records")
@click.argument('folder')
def index(folder, force, skip_existed, clean):
""" Index the photos in folder """
command_index = CommandIndex(folder, {"force": force, "skip_existed": skip_existed, "clean": clean})
command_index.do()
@click.command()
@click.option('--geoinfo', is_flag=False, help="update address by geoinfo")
@click.argument('folder')
def update(folder, geoinfo):
"""update some info, such as adress by GPS info"""
command_update = CommandUpdate(folder, {"geoinfo": geoinfo})
command_update.do()
@click.command()
@click.argument('folder', type=click.Path(exists=True), nargs=1)
@click.argument('filters', nargs=-1)
@click.option('--limit', default=0, help="image limit for one search, 0 is unlimited")
def display(folder, filters, limit):
""" List the images by condition"""
command_index = CommandList(folder, {"filters": filters})
command_index.do()
@click.command()
@click.argument('folder', default="./")
def remove_dup(folder):
""" remove duplicate files"""
command_remove_dup = CommandRemoveDuplicate(folder, {})
command_remove_dup.do()
@click.command()
@click.argument('folder', default="./")
def config(folder, backup_dir):
""" set or list the configuration """
command_config = CommandConfig(folder, {"backup_dir", backup_dir})
command_config.do()
@click.command()
@click.argument('files')
def tag():
""" add tags to some photos"""
pass
if __name__ == "__main__":
photo_manager_cli.add_command(index)
photo_manager_cli.add_command(display)
photo_manager_cli.add_command(tag)
photo_manager_cli.add_command(config)
photo_manager_cli.add_command(update)
photo_manager_cli.add_command(remove_dup)
photo_manager_cli()
|
wrenchzc/photomanager
|
photomanager/photo_manager.py
|
Python
|
mit
| 2,330
|
# -*- coding: utf-8 -*-
from __future__ import division, print_function
import pytest
import numpy as np
from ... import Ensemble
from ..common import NormalWalker, UniformWalker
__all__ = ["test_invalid_coords_init", "test_invalid_dims_init",
"test_valid_init"]
def test_invalid_coords_init(nwalkers=32, ndim=5, seed=1234):
# Check for invalid coordinates.
np.random.seed(seed)
coords = \
np.ones(nwalkers)[:, None] + 0.001 * np.random.randn(nwalkers, ndim)
with pytest.raises(ValueError):
Ensemble(UniformWalker(), coords)
def test_invalid_dims_init(nwalkers=32, ndim=5, seed=1234):
# Check for invalid coordinate dimensions.
np.random.seed(seed)
coords = np.ones((nwalkers, ndim, 3))
coords += 0.001 * np.random.randn(*(coords.shape))
with pytest.raises(ValueError):
Ensemble(UniformWalker(), coords)
def test_valid_init(nwalkers=32, ndim=5, seed=1234):
# Check to make sure that valid coordinates work too.
np.random.seed(seed)
ivar = np.random.rand(ndim)
coords = 0.002 * np.random.rand(nwalkers, ndim) - 0.001
ens = Ensemble(NormalWalker(ivar), coords)
repr(ens.walkers[0])
assert np.all(np.isfinite(ens.log_probability))
|
dfm/emcee3
|
emcee3/tests/unit/test_ensemble.py
|
Python
|
mit
| 1,237
|
# Whether we're simply testing the site and no webcam should be used.
DEBUG = False
# Minimum area to consider a contour as valid.
AREA_THREASHOLD = 100
# Write debug images to disk.
SAVE_TO_DISK = False
# Size of the area used to learn color profile.
SAMPLE_SIZE = 15
class RectArea(object):
def __init__(self, command, x, y, w, h):
self.command = command
self.x = x
self.y = y
self.w = w
self.h = h
self.ex = x + w
self.ey = y + h
# List of area coords used to learn color profile.
SAMPLE_COORDS = [
RectArea('', 640/2 - 250, 100, SAMPLE_SIZE, SAMPLE_SIZE),
RectArea('', 640/2 + 250, 100, SAMPLE_SIZE, SAMPLE_SIZE),
RectArea('', 640/2 - 220, 150, SAMPLE_SIZE, SAMPLE_SIZE),
RectArea('', 640/2 + 220, 150, SAMPLE_SIZE, SAMPLE_SIZE),
]
# List of areas used to trigger actions.
ACTION_AREAS = [
RectArea('upper_left', 0, 0, 50, 50),
RectArea('upper_right', 590, 0, 50, 50),
RectArea('upper_left2', 110, 0, 50, 50),
RectArea('upper_right2', 480, 0, 50, 50),
]
|
Plezito/rpgesture
|
config.py
|
Python
|
mit
| 1,099
|
import math
import tensorflow as tf
#from .utils import base_name
def base_name(var):
"""Extracts value passed to name= when creating a variable"""
return var.name.split('/')[-1].split(':')[0]
def copy_variables(variables):
res = {}
for v in variables:
name = base_name(v)
copied_var = tf.Variable(v.initialized_value(), name=name)
res[name] = copied_var
return res
class Layer(object):
def __init__(self, input_sizes, output_size, scope):
"""Cretes a neural network layer."""
if type(input_sizes) != list:
input_sizes = [input_sizes]
self.input_sizes = input_sizes
self.output_size = output_size
self.scope = scope or "Layer"
with tf.variable_scope(self.scope):
self.Ws = []
for input_idx, input_size in enumerate(input_sizes):
W_name = "W_%d" % (input_idx,)
W_initializer = tf.random_uniform_initializer(
-1.0 / math.sqrt(input_size), 1.0 / math.sqrt(input_size))
W_var = tf.get_variable(W_name, (input_size, output_size), initializer=W_initializer)
self.Ws.append(W_var)
self.b = tf.get_variable("b", (output_size,), initializer=tf.constant_initializer(0))
def __call__(self, xs):
if type(xs) != list:
xs = [xs]
assert len(xs) == len(self.Ws), \
"Expected %d input vectors, got %d" % (len(self.Ws), len(xs))
with tf.variable_scope(self.scope):
return sum([tf.matmul(x, W) for x, W in zip(xs, self.Ws)]) + self.b
def variables(self):
return [self.b] + self.Ws
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
with tf.variable_scope(scope) as sc:
for v in self.variables():
tf.get_variable(base_name(v), v.get_shape(),
initializer=lambda x,dtype=tf.float32,partition_info=None: v.initialized_value())
sc.reuse_variables()
return Layer(self.input_sizes, self.output_size, scope=sc)
class MLP(object):
def __init__(self, input_sizes, hiddens, nonlinearities, scope=None, given_layers=None):
self.input_sizes = input_sizes
self.hiddens = hiddens
self.input_nonlinearity, self.layer_nonlinearities = nonlinearities[0], nonlinearities[1:]
self.scope = scope or "MLP"
assert len(hiddens) == len(nonlinearities), \
"Number of hiddens must be equal to number of nonlinearities"
with tf.variable_scope(self.scope):
if given_layers is not None:
self.input_layer = given_layers[0]
self.layers = given_layers[1:]
else:
self.input_layer = Layer(input_sizes, hiddens[0], scope="input_layer")
self.layers = []
for l_idx, (h_from, h_to) in enumerate(zip(hiddens[:-1], hiddens[1:])):
self.layers.append(Layer(h_from, h_to, scope="hidden_layer_%d" % (l_idx,)))
def __call__(self, xs):
if type(xs) != list:
xs = [xs]
with tf.variable_scope(self.scope):
hidden = self.input_nonlinearity(self.input_layer(xs))
for layer, nonlinearity in zip(self.layers, self.layer_nonlinearities):
hidden = nonlinearity(layer(hidden))
return hidden
def variables(self):
res = self.input_layer.variables()
for layer in self.layers:
res.extend(layer.variables())
return res
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
nonlinearities = [self.input_nonlinearity] + self.layer_nonlinearities
given_layers = [self.input_layer.copy()] + [layer.copy() for layer in self.layers]
return MLP(self.input_sizes, self.hiddens, nonlinearities, scope=scope,
given_layers=given_layers)
class ConvLayer(object):
def __init__(self, filter_H, filter_W,
in_C, out_C,
stride=(1,1),
scope="Convolution"):
self.filter_H, self.filter_W = filter_H, filter_W
self.in_C, self.out_C = in_C, out_C
self.stride = stride
self.scope = scope
with tf.variable_scope(self.scope):
input_size = filter_H * filter_W * in_C
W_initializer = tf.random_uniform_initializer(
-1.0 / math.sqrt(input_size),
1.0 / math.sqrt(input_size))
self.W = tf.get_variable('W',
(filter_H, filter_W, in_C, out_C),
initializer=W_initializer)
self.b = tf.get_variable('b',
(out_C),
initializer=tf.constant_initializer(0))
def __call__(self, X):
with tf.variable_scope(self.scope):
return tf.nn.conv2d(X, self.W,
strides=[1] + list(self.stride) + [1],
padding='SAME') + self.b
def variables(self):
return [self.W, self.b]
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
with tf.variable_scope(scope) as sc:
for v in self.variables():
tf.get_variable(base_name(v), v.get_shape(),
initializer=lambda x,dtype=tf.float32,partition_info=None: v.initialized_value())
sc.reuse_variables()
return ConvLayer(self.filter_H, self.filter_W, self.in_C, self.out_C, self.stride, scope=sc)
class SeqLayer(object):
def __init__(self, layers, scope='seq_layer'):
self.scope = scope
self.layers = layers
def __call__(self, x):
for l in self.layers:
x = l(x)
return x
def variables(self):
return sum([l.variables() for l in self.layers], [])
def copy(self, scope=None):
scope = scope or self.scope + "_copy"
with tf.variable_scope(self.scope):
copied_layers = [layer.copy() for layer in self.layers]
return SeqLayer(copied_layers, scope=scope)
class LambdaLayer(object):
def __init__(self, f):
self.f = f
def __call__(self, x):
return self.f(x)
def variables(self):
return []
def copy(self):
return LambdaLayer(self.f)
|
mburq/Deep_rl_stochastic_opt
|
src/agent/dqn/models.py
|
Python
|
mit
| 6,469
|
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
master_doc = 'index'
# -- Project information -----------------------------------------------------
project = 'Tree Collections'
copyright = '© 2019 David West'
author = 'David West'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
davidwest/TreeCollections
|
docs/conf.py
|
Python
|
mit
| 1,682
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from nltk.corpus import wordnet
def Levenshtein_Dist(s1,s2):
m = [[0 for col in range(len(s2)+1)] for row in range(len(s1)+1)]
for i in range(len(s1)+1):
m[i][0] = i
for j in range(len(s2)+1):
m[0][j] = j
for i in range(len(s1)):
for j in range(len(s2)):
if s1[i] == s2[j]:
m[i+1][j+1] = min(m[i][j+1],m[i+1][j],m[i][j])
else:
m[i+1][j+1] = min(m[i][j+1]+1, m[i+1][j]+1, m[i][j]+1)
return m[len(s1)][len(s2)]
def Revised(s,word2id):
#import a dictionay as correct_dictionary
if s in word2id:
return s
sys = wordnet.synsets(s)
distance = 100
word = ""
correct_dictionary = {}
#correct_dictionary["systm"] = "system"
if len(sys) == 0:
if (s in correct_dictionary):
return correct_dictionary[s]
else:
for item in word2id:
dist = Levenshtein_Dist(s,item)
if dist<distance:
distance = dist
word = item
return word
else:
return s
#if not (s in word2id):
# net
|
Impavidity/SearchEngine
|
OldCode/revised.py
|
Python
|
mit
| 1,014
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import requests
import time
from spider163.utils import encrypt
from spider163.spider import public as uapi
from spider163.spider import music
from spider163.utils import pysql
from spider163.utils import pylog
from spider163.utils import tools
from spider163 import settings
class Command():
def __init__(self):
modulus = uapi.comment_module
pubKey = uapi.pubKey
secKey = uapi.secKey
self.__encSecKey = self.rsaEncrypt(secKey, pubKey, modulus)
self.session = requests.session()
self.session.headers = uapi.header
def createPlaylistParams(self,ids,playlist_id,cmd,csrf_token):
text = '{"trackIds": ['+",".join(ids) + '],"pid": "{}","op": "{}","csrf_token": "{}"'.format(playlist_id,cmd,csrf_token) + '}'
nonce = '0CoJUm6Qyw8W8jud'
nonce2 = 16 * 'F'
encText = encrypt.aes(
encrypt.aes(text, nonce).decode("utf-8"), nonce2
)
return encText
def createPlaylistRemoveParams(self):
pass
def createLoginParams(self,username,password):
psw = tools.md5(password)
text = '{' + '"phone": "{}","password": "{}","rememberLogin": "true"'.format(username,psw)+'}'
nonce = '0CoJUm6Qyw8W8jud'
nonce2 = 16 * 'F'
encText = encrypt.aes(
encrypt.aes(text, nonce).decode("utf-8"), nonce2
)
return encText
def rsaEncrypt(self, text, pubKey, modulus):
text = text[::-1]
rs = int(tools.hex(text), 16)**int(pubKey, 16) % int(modulus, 16)
return format(rs, 'x').zfill(256)
def createSecretKey(self, size):
return (
''.join(map(lambda xx: (hex(ord(xx))[2:]), os.urandom(size)))
)[0:16]
def post_playlist_add(self,ids, playlist_id=2098905487, csrf_token="da2216e4b4ca4efcfab94d8d4920ef9"):
data = {
'params': self.createPlaylistParams(ids,playlist_id,"add",csrf_token),
'encSecKey': self.__encSecKey
}
url = uapi.playlist_add_api.format(csrf_token)
req = self.session.post(
url, data=data, timeout=100
)
return req.json()
def post_playlist_delete(self, ids, playlist_id=2098905487, csrf_token="da2216e4b4ca4efcfab94d8d4920ef9"):
data = {
'params': self.createPlaylistParams(ids, playlist_id, "delete", csrf_token),
'encSecKey': self.__encSecKey
}
url = uapi.playlist_add_api.format(csrf_token)
req = self.session.post(
url, data=data, timeout=10
)
return req.json()
def do_login(self,username,password):
data = {
'params': self.createLoginParams(username,password),
'encSecKey': self.__encSecKey
}
url = uapi.login_api
res = self.session.post(url, data=data, timeout=10).json()
# TODO 处理rep信息
if res["code"] != 200:
if res["code"] == 400:
raise Exception("用户名不合法!")
raise Exception(res["msg"])
return res
def clear_playlist(self,playlist_id=2098905487):
m = music.Music()
data = m.curl_playlist(playlist_id)
for d in data["tracks"]:
res = self.post_playlist_delete([str(d["id"]),],playlist_id)
if res["code"] == 200:
pylog.print_info("成功删除《{}》到指定歌单,歌单目前包含歌曲 {} 首".format(d["name"],res["count"]))
else:
time.sleep(5)
pylog.print_warn("歌曲《{}》不存在于歌单中!".format(d["name"]))
pylog.print_warn("删除歌单歌曲任务完成,请检查!")
def create_playlist_comment_top100(self,playlist_id=2098905487):
data = settings.Session.query(pysql.Music163.song_name, pysql.Music163.song_id,pysql.Music163.comment.label("count")).order_by(
pysql.Music163.comment.label("count").desc()).limit(200).all()
for d in data:
res = self.post_playlist_add([str(d[1]),],playlist_id)
if res["code"] == 502:
pylog.print_warn("歌曲《{}》已经存在于歌单中!".format(d[0]))
elif res["code"] == 200:
pylog.print_info("成功添加《{}》到指定歌单,歌单目前包含歌曲 {} 首".format(d[0],res["count"]))
else:
time.sleep(5)
pylog.print_warn("歌曲《{}》没有添加成功!".format(d[0]))
pylog.print_warn("增加歌单歌曲任务完成,请检查!")
|
Chengyumeng/spider163
|
spider163/spider/authorize.py
|
Python
|
mit
| 4,612
|
from crash_hound import CrashHound, CommonChecks, SenderMail
# helper lambda for gracefully degrading environmental variables
from os import environ
env = lambda e, d: environ[e] if environ.has_key(e) else d
# load configuration from environment variables
monitor_url = env('MONITOR_URL', '')
smtp_host = env('SMTP_HOST', '')
smtp_user = env('SMTP_USER', '')
smtp_password = env('SMTP_PASSWORD', '')
email_to = env('EMAIL_TO', '')
crash_sender = SenderMail(email_to, email_to, smtp_host, smtp_user, smtp_password)
crash_checker = CrashHound(crash_sender)
# check URL and send notifications if it does not return a status of 200 or 302
message_subject = '[Webmonitor] {0} Status Check'.format(monitor_url)
crash_checker.register_check(message_subject,
lambda: CommonChecks.website_check(monitor_url), notify_every=60*60)
# run checks every 30 minutes (indefinitely)
crash_checker.run_checks(check_interval=30*60)
|
kblum/webmonitor
|
worker.py
|
Python
|
mit
| 923
|
import os
from flask import Blueprint
from flask import json, url_for, current_app
from flask import g, request, abort, send_file
from flaskext.uploads import UploadSet
from ..utils import as_resource, as_collection
from ..db import sim as sim_db
parameters_page = Blueprint('parameters', __name__)
|
mcflugen/wmt-rest
|
wmt/flask/views/parameters.py
|
Python
|
mit
| 303
|
#!/usr/bin/env python
# encoding: utf-8
# http://axe.g0v.tw/level/3
import urllib2, re, cookielib
lines = []
# The hint is that we need to accept cookies.
jar = cookielib.FileCookieJar("cookies")
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(jar))
for index in range(1, 77):
url = "http://axe-level-1.herokuapp.com/lv3/" if index == 1 \
else "http://axe-level-1.herokuapp.com/lv3/?page=next"
html = opener.open(url).read()
pattern = r"<tr>\s*<td>(.*)</td>\s*<td>(.*)</td>\s*<td>(.*)</td>\s*</tr>"
results = re.findall(pattern, html, re.MULTILINE)[1:]
format = '{"town": "%s", "village": "%s", "name" : "%s"}'
for result in results:
lines.append(format % tuple(result))
with open("test.txt", "w") as f:
f.write("[%s]" % ",\n".join(lines))
|
zonble/axe_py
|
axe3.py
|
Python
|
mit
| 766
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import time
import datetime
import urllib2
import json
from requests_oauthlib import OAuth1Session
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.events import EVENT_JOB_EXECUTED
import logging
logging.basicConfig()
class Configuration(object):
FILENAME = 'conf.json'
def __init__(self):
with open(self.FILENAME, 'r') as f:
conf = json.load(f)
self.CK = conf['conf']['account']['CK']
self.CS = conf['conf']['account']['CS']
self.AT = conf['conf']['account']['AT']
self.AS = conf['conf']['account']['AS']
self.lat = conf['conf']['location']['lat']
self.lon = conf['conf']['location']['lon']
self.locname = conf['conf']['location']['name']
print('%s %s %s' % (self.lat, self.lon, self.locname))
class Chunchun(object):
next_sunrise = None
def __init__(self, conf):
self.conf = conf
def get_sunrise(self, url):
r = urllib2.urlopen(url)
js = r.read()
return self.parse(js)
def parse(self, js):
data = json.loads(js)
for event in data['result']['event']:
t = event['type']
b = event['boundary']
if t == u'daytime' and b == u'start':
time = event['time'].split('+')[0]
return datetime.datetime.strptime(time, '%Y-%m-%dT%H:%M')
def tweet(self):
url = 'https://api.twitter.com/1.1/statuses/update.json'
time = self.next_sunrise.strftime('%Y/%m/%d %H:%M')
params = {'status': '(・8・)チュンチュン 南ことりが日の出をお知らせします♪ - %s %s' % (time, self.conf.locname.encode('utf-8'))}
CK = self.conf.CK
CS = self.conf.CS
AT = self.conf.AT
AS = self.conf.AS
tw = OAuth1Session(CK, CS, AT, AS)
req = tw.post(url, params=params)
if req.status_code == 200:
print('OK')
else:
print('Error: %s' % req)
def update_sunrise(self):
URL = 'http://www.finds.jp/ws/movesun.php?jsonp=&y=%s&m=%s&d=%s&lat=%s&lon=%s&tz=9'
dt = datetime.date.today() + datetime.timedelta(days=1)
request_url = URL % (dt.year, dt.month, dt.day, self.conf.lat, self.conf.lon)
self.next_sunrise = self.get_sunrise(request_url)
print('next_sunrise:' + self.next_sunrise.strftime('%Y/%m/%d %H:%M'))
def do_post(self):
self.tweet()
self.update_sunrise()
running = False
def start_schedule(self):
sched = BackgroundScheduler()
job = sched.add_job(self.do_post, 'date', run_date=chun.next_sunrise)
sched.add_listener(self.job_executed_listener, EVENT_JOB_EXECUTED)
sched.start()
try:
while True:
self.running = True
while self.running:
time.sleep(10)
job.remove()
job = sched.add_job(self.do_post, 'date', run_date=self.next_sunrise, misfire_grace_time=120)
except (KeyboardInterrupt, SystemExit):
sched.shutdown()
def job_executed_listener(self, event):
self.running = False
if __name__ == '__main__':
chun = Chunchun(Configuration())
chun.update_sunrise()
chun.start_schedule()
|
kotori-sonoda/chunchun
|
chunchun.py
|
Python
|
mit
| 3,382
|
"""
Wrapper for timedelta class to provide nicer access for alarms
"""
class TimeSpan:
def __init__(self, seconds):
self._seconds = seconds
@staticmethod
def fromTimeDelta(delta):
return TimeSpan(delta.total_seconds())
@property
def seconds(self):
return self._seconds
@property
def minutes(self):
return self._seconds / 60.0
@property
def hours(self):
return self._seconds / 60.0 / 60.0
@property
def days(self):
return self._seconds / 60.0 / 60.0 / 24.0
def __json__(self):
return self._seconds
def __int__(self):
return int(self._seconds)
def __str__(self):
if self._seconds < 60:
return "%i seconds" % (self._seconds)
elif self._seconds < 60 * 60:
return "%i minutes" % (self.minutes)
elif self._seconds < 60 * 60 * 24:
return "%i hours" % (self.hours)
else:
return "%i days" % (self.days)
def __lt__(self, other): return int(self) < int(other)
def __le__(self, other): return int(self) <= int(other)
def __eq__(self, other): return int(self) == int(other)
def __ne__(self, other): return int(self) != int(other)
def __gt__(self, other): return int(self) > int(other)
def __ge__(self, other): return int(self) >= int(other)
|
zix99/sshsysmon
|
sshsysmon/lib/util/timespan.py
|
Python
|
mit
| 1,196
|
# this file is for interactive tests of the ppi_networkit cython API
# thus it just loads the configuration and
# database connection and enables tab completion
import os
import re
import itertools
import numpy
import time
import sys
import pappi.id_mapping
import pappi.sql
from pappi.data_config import *
#############################
# get database connection #
#############################
# get new database connection
con = pappi.sql.get_conn(DATABASE)
#################################
# Import PPI NetworKit module #
#################################
sys.path.append(os.path.join(os.path.dirname(__file__), "../ppi_networkit/cython"))
#import NetworKit
import ppi_networkit
# set the log-level to "ERROR", this will ignore the [INFO] and [WARN] logs
ppi_networkit.setLogLevel("ERROR")
sqlio = ppi_networkit.SQLiteIO(DATABASE)
#ppi = "string"
#expr = "gene_atlas"
ppi = "ccsb"
ppi = "bossi"
expr = "hpa"
expr = "hpa_all"
tsppi = sqlio.load_tsppi_graph(ppi, expr)
##############################
# enable tab completion
##############################
import readline
import rlcompleter
readline.parse_and_bind("tab: complete")
|
patflick/tsppi
|
src/networkit_test.py
|
Python
|
mit
| 1,149
|
# coding: utf-8
"""
DocuSign REST API
The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. # noqa: E501
OpenAPI spec version: v2.1
Contact: devcenter@docusign.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class EnvelopeAttachment(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'access_control': 'str',
'attachment_id': 'str',
'attachment_type': 'str',
'error_details': 'ErrorDetails',
'label': 'str',
'name': 'str'
}
attribute_map = {
'access_control': 'accessControl',
'attachment_id': 'attachmentId',
'attachment_type': 'attachmentType',
'error_details': 'errorDetails',
'label': 'label',
'name': 'name'
}
def __init__(self, access_control=None, attachment_id=None, attachment_type=None, error_details=None, label=None, name=None): # noqa: E501
"""EnvelopeAttachment - a model defined in Swagger""" # noqa: E501
self._access_control = None
self._attachment_id = None
self._attachment_type = None
self._error_details = None
self._label = None
self._name = None
self.discriminator = None
if access_control is not None:
self.access_control = access_control
if attachment_id is not None:
self.attachment_id = attachment_id
if attachment_type is not None:
self.attachment_type = attachment_type
if error_details is not None:
self.error_details = error_details
if label is not None:
self.label = label
if name is not None:
self.name = name
@property
def access_control(self):
"""Gets the access_control of this EnvelopeAttachment. # noqa: E501
# noqa: E501
:return: The access_control of this EnvelopeAttachment. # noqa: E501
:rtype: str
"""
return self._access_control
@access_control.setter
def access_control(self, access_control):
"""Sets the access_control of this EnvelopeAttachment.
# noqa: E501
:param access_control: The access_control of this EnvelopeAttachment. # noqa: E501
:type: str
"""
self._access_control = access_control
@property
def attachment_id(self):
"""Gets the attachment_id of this EnvelopeAttachment. # noqa: E501
# noqa: E501
:return: The attachment_id of this EnvelopeAttachment. # noqa: E501
:rtype: str
"""
return self._attachment_id
@attachment_id.setter
def attachment_id(self, attachment_id):
"""Sets the attachment_id of this EnvelopeAttachment.
# noqa: E501
:param attachment_id: The attachment_id of this EnvelopeAttachment. # noqa: E501
:type: str
"""
self._attachment_id = attachment_id
@property
def attachment_type(self):
"""Gets the attachment_type of this EnvelopeAttachment. # noqa: E501
# noqa: E501
:return: The attachment_type of this EnvelopeAttachment. # noqa: E501
:rtype: str
"""
return self._attachment_type
@attachment_type.setter
def attachment_type(self, attachment_type):
"""Sets the attachment_type of this EnvelopeAttachment.
# noqa: E501
:param attachment_type: The attachment_type of this EnvelopeAttachment. # noqa: E501
:type: str
"""
self._attachment_type = attachment_type
@property
def error_details(self):
"""Gets the error_details of this EnvelopeAttachment. # noqa: E501
:return: The error_details of this EnvelopeAttachment. # noqa: E501
:rtype: ErrorDetails
"""
return self._error_details
@error_details.setter
def error_details(self, error_details):
"""Sets the error_details of this EnvelopeAttachment.
:param error_details: The error_details of this EnvelopeAttachment. # noqa: E501
:type: ErrorDetails
"""
self._error_details = error_details
@property
def label(self):
"""Gets the label of this EnvelopeAttachment. # noqa: E501
# noqa: E501
:return: The label of this EnvelopeAttachment. # noqa: E501
:rtype: str
"""
return self._label
@label.setter
def label(self, label):
"""Sets the label of this EnvelopeAttachment.
# noqa: E501
:param label: The label of this EnvelopeAttachment. # noqa: E501
:type: str
"""
self._label = label
@property
def name(self):
"""Gets the name of this EnvelopeAttachment. # noqa: E501
# noqa: E501
:return: The name of this EnvelopeAttachment. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this EnvelopeAttachment.
# noqa: E501
:param name: The name of this EnvelopeAttachment. # noqa: E501
:type: str
"""
self._name = name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(EnvelopeAttachment, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, EnvelopeAttachment):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
docusign/docusign-python-client
|
docusign_esign/models/envelope_attachment.py
|
Python
|
mit
| 7,184
|
#!/usr/bin/env python
#
# Copyright under the latest Apache License 2.0
'''A class the inherits everything from python-twitter and allows oauth based access
Requires:
python-twitter
simplejson
oauth
'''
__author__ = "Hameedullah Khan <hameed@hameedkhan.net>"
__version__ = "0.2"
from jsb.contrib import twitter
from jsb.imports import getjson, getoauth
json = getjson()
oauth = getoauth()
# Taken from oauth implementation at: http://github.com/harperreed/twitteroauth-python/tree/master
REQUEST_TOKEN_URL = 'https://twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'http://twitter.com/oauth/authorize'
SIGNIN_URL = 'http://twitter.com/oauth/authenticate'
class OAuthApi(twitter.Api):
def __init__(self, consumer_key, consumer_secret, access_token=None):
if access_token:
Api.__init__(self,access_token.key, access_token.secret)
else:
Api.__init__(self)
self._Consumer = oauth.OAuthConsumer(consumer_key, consumer_secret)
self._signature_method = oauth.OAuthSignatureMethod_HMAC_SHA1()
self._access_token = access_token
def _GetOpener(self):
opener = self._urllib.build_opener()
return opener
def _FetchUrl(self,
url,
post_data=None,
parameters=None,
no_cache=None, verifier=""):
'''Fetch a URL, optionally caching for a specified time.
Args:
url: The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs. If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [OPTIONAL]
no_cache: If true, overrides the cache on the current request
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
# Add key/value parameters to the query string of the url
#url = self._BuildUrl(url, extra_params=extra_params)
if post_data:
http_method = "POST"
extra_params.update(post_data)
else:
http_method = "GET"
req = self._makeOAuthRequest(url, parameters=extra_params,
http_method=http_method, verifier=verifier)
self._signRequest(req, self._signature_method)
# Get a url opener that can handle Oauth basic auth
opener = self._GetOpener()
#encoded_post_data = self._EncodePostData(post_data)
if post_data:
encoded_post_data = req.to_postdata()
url = req.get_normalized_http_url()
else:
url = req.to_url()
encoded_post_data = ""
no_cache=True
# Open and return the URL immediately if we're not going to cache
# OR we are posting data
if encoded_post_data or no_cache:
if encoded_post_data:
url_data = opener.open(url, encoded_post_data).read()
else:
url_data = opener.open(url).read()
opener.close()
else:
# Unique keys are a combination of the url and the username
if self._username:
key = self._username + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
url_data = opener.open(url).read()
opener.close()
self._cache.Set(key, url_data)
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
def _makeOAuthRequest(self, url, token=None,
parameters=None, http_method="GET", verifier=""):
'''Make a OAuth request from url and parameters
Args:
url: The Url to use for creating OAuth Request
parameters:
The URL parameters
http_method:
The HTTP method to use
Returns:
A OAauthRequest object
'''
if not token:
token = self._access_token
request = oauth.OAuthRequest.from_consumer_and_token(
self._Consumer, token=token,
http_url=url, parameters=parameters,
http_method=http_method, verifier=verifier)
return request
def _signRequest(self, req, signature_method=oauth.OAuthSignatureMethod_HMAC_SHA1()):
'''Sign a request
Reminder: Created this function so incase
if I need to add anything to request before signing
Args:
req: The OAuth request created via _makeOAuthRequest
signate_method:
The oauth signature method to use
'''
req.sign_request(signature_method, self._Consumer, self._access_token)
def getAuthorizationURL(self, token, url=AUTHORIZATION_URL):
'''Create a signed authorization URL
Returns:
A signed OAuthRequest authorization URL
'''
req = self._makeOAuthRequest(url, token=token)
self._signRequest(req)
return req.to_url()
def getSigninURL(self, token, url=SIGNIN_URL):
'''Create a signed Sign-in URL
Returns:
A signed OAuthRequest Sign-in URL
'''
signin_url = self.getAuthorizationURL(token, url)
return signin_url
def getAccessToken(self, url=ACCESS_TOKEN_URL, verifier=""):
token = self._FetchUrl(url, no_cache=True, verifier=verifier)
return oauth.OAuthToken.from_string(token)
def getRequestToken(self, url=REQUEST_TOKEN_URL):
'''Get a Request Token from Twitter
Returns:
A OAuthToken object containing a request token
'''
resp = self._FetchUrl(url, no_cache=True)
token = oauth.OAuthToken.from_string(resp)
return token
def GetUserInfo(self, url='https://twitter.com/account/verify_credentials.json'):
'''Get user information from twitter
Returns:
Returns the twitter.User object
'''
json = self._FetchUrl(url)
data = json.loads(json)
self._CheckForTwitterError(data)
return twitter.User.NewFromJsonDict(data)
|
Petraea/jsonbot
|
jsb/contrib/oauthtwitter.py
|
Python
|
mit
| 6,745
|
# Copyright 2019 by Sergio Valqui. All rights reserved.
def SendEmail(fs_smtpsvr, fs_user, fs_pass, fs_emailcontent):
import smtplib
import socket
fs_FromAddr = ''
fs_ToAddr = ''
fs_emailcontent = 'Hello'
# Delta test
s = socket.socket()
s.bind(('', 50007))
s.listen(1)
s.close()
# Send Mail
server = smtplib.SMTP(fs_smtpsvr)
# server.login(fs_user,fs_pass)
server.sendmail(fs_FromAddr, fs_ToAddr, fs_emailcontent)
server.quit()
return ()
|
svalqui/sysad
|
coms/mail.py
|
Python
|
mit
| 505
|
# SteamFootBridge
# Copyright (c) 2016 Bryan DeGrendel
from . import config
import steam
# TODO: Does capitalization mater?
__root_userconfig_key__ = 'UserLocalConfigStore'
__friends_userconfig_key__ = 'friends'
__autologin_friends_key__ = 'AutoSignIntoFriends'
__system_userconfig_key__ = 'system'
__enable_game_overlay_key__ = 'EnableGameOverlay'
# TODO: Option to disable setting friends autologin
# TODO: Option to disable setting overlay disabled
# TODO: Option to avoid touching the userconfig at all
def do():
with config.Configuration() as c:
path = c.get_wine_steam_userconfig_filename()
print("Reading {}".format(path))
with open(path, 'r') as f:
userconfig = steam.vdf.load(f)
_base_setup_userconfig(userconfig)
_set_disable_friends_auto_login(userconfig)
_set_disable_overlay(userconfig)
print("Writing updated {}".format(path))
with open(path, 'w') as f:
steam.vdf.dump(userconfig, f)
def _base_setup_userconfig(userconfig):
if not __root_userconfig_key__ in userconfig:
userconfig[__root_userconfig_key__] = {}
root = userconfig[__root_userconfig_key__]
if not __friends_userconfig_key__ in root:
root[__friends_userconfig_key__] = {}
if not __system_userconfig_key__ in root:
root[__system_userconfig_key__] = {}
def _set_disable_friends_auto_login(userconfig):
userconfig[__root_userconfig_key__][__friends_userconfig_key__][__autologin_friends_key__] = '0'
def _set_disable_overlay(userconfig):
userconfig[__root_userconfig_key__][__system_userconfig_key__][__enable_game_overlay_key__] = '0'
|
sirnuke/steamfootbridge
|
steamfootbridge/setup.py
|
Python
|
mit
| 1,601
|
import datetime
import logging
import math
from pylons import cache, config, request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect
from pylons.decorators.secure import authenticate_form
from routes import request_config
from sqlalchemy.orm import aliased, contains_eager, joinedload
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.sql import func
import wtforms
from wtforms import fields
from spline.model import meta
from spline.lib import helpers as h
from spline.lib.base import BaseController, render
import spline.lib.markdown
from splinext.forum import model as forum_model
log = logging.getLogger(__name__)
def forum_activity_score(forum):
"""Returns a number representing how active a forum is, based on the past
week.
The calculation is arbitrary, but 0 is supposed to mean "dead" and 1 is
supposed to mean "healthy".
"""
cutoff = datetime.datetime.now() - datetime.timedelta(days=7)
post_count = meta.Session.query(forum_model.Post) \
.join(forum_model.Post.thread) \
.filter(forum_model.Thread.forum == forum) \
.filter(forum_model.Post.posted_time >= cutoff) \
.count()
# Avoid domain errors!
if not post_count:
return 0.0
# The log is to scale 0 posts to 0.0, and 168 posts to 1.0.
# The square is really just to take the edge off the log curve; it
# accelerates to 1 very quickly, then slows considerably after that.
# Squaring helps both of these problems.
score = (math.log(post_count) / math.log(168)) ** 2
# TODO more threads and more new threads should boost the score slightly
return score
def get_forum_activity():
"""Returns a hash mapping forum ids to their level of 'activity'."""
forums_q = meta.Session.query(forum_model.Forum)
activity = {}
for forum in forums_q:
activity[forum.id] = forum_activity_score(forum)
return activity
def get_forum_volume():
"""Returns a hash mapping forum ids to the percentage of all posts that
reside in that forum.
"""
# Do a complicated-ass subquery to get a list of forums and postcounts
volume_q = meta.Session.query(
forum_model.Forum.id.label('forum_id'),
func.count(forum_model.Post.id).label('post_count'),
) \
.outerjoin(forum_model.Thread) \
.outerjoin(forum_model.Post) \
.group_by(forum_model.Forum.id)
# Stick this into a hash, and count the number of total posts
total_posts = 0
volume = {}
for forum_id, post_count in volume_q:
post_count = float(post_count or 0)
volume[forum_id] = post_count
total_posts += post_count
# Divide, to get a percentage
if total_posts:
for forum_id, post_count in volume.iteritems():
volume[forum_id] /= total_posts
return volume
class WritePostForm(wtforms.Form):
content = fields.TextAreaField('Content')
class WriteThreadForm(WritePostForm):
subject = fields.StringField('Subject')
class ForumController(BaseController):
def forums(self):
c.forums = meta.Session.query(forum_model.Forum) \
.order_by(forum_model.Forum.id.asc()) \
.all()
# Get some forum stats. Cache them because they're a bit expensive to
# compute. Expire after an hour.
# XXX when there are admin controls, they'll need to nuke this cache
# when messing with the forum list
forum_cache = cache.get_cache('spline-forum', expiretime=3600)
c.forum_activity = forum_cache.get_value(
key='forum_activity', createfunc=get_forum_activity)
c.forum_volume = forum_cache.get_value(
key='forum_volume', createfunc=get_forum_volume)
try:
c.max_volume = max(c.forum_volume.itervalues()) or 1
except ValueError:
# Empty database
c.max_volume = 1
# Need to know the last post for each forum, in realtime
c.last_post = {}
last_post_subq = meta.Session.query(
forum_model.Forum.id.label('forum_id'),
func.max(forum_model.Post.posted_time).label('posted_time'),
) \
.outerjoin(forum_model.Thread) \
.outerjoin(forum_model.Post) \
.group_by(forum_model.Forum.id) \
.subquery()
last_post_q = meta.Session.query(
forum_model.Post,
last_post_subq.c.forum_id,
) \
.join((
last_post_subq,
forum_model.Post.posted_time == last_post_subq.c.posted_time,
)) \
.options(
joinedload('thread'),
joinedload('author'),
)
for post, forum_id in last_post_q:
c.last_post[forum_id] = post
return render('/forum/forums.mako')
def threads(self, forum_id):
c.forum = meta.Session.query(forum_model.Forum).get(forum_id)
if not c.forum:
abort(404)
c.write_thread_form = WriteThreadForm()
# nb: This will never show post-less threads. Oh well!
last_post = aliased(forum_model.Post)
threads_q = c.forum.threads \
.join((last_post, forum_model.Thread.last_post)) \
.order_by(last_post.posted_time.desc()) \
.options(
contains_eager(forum_model.Thread.last_post, alias=last_post),
joinedload('last_post.author'),
)
c.num_threads = threads_q.count()
try:
c.skip = int(request.params.get('skip', 0))
except ValueError:
abort(404)
c.per_page = 89
c.threads = threads_q.offset(c.skip).limit(c.per_page)
return render('/forum/threads.mako')
def posts(self, forum_id, thread_id):
try:
c.thread = meta.Session.query(forum_model.Thread) \
.filter_by(id=thread_id, forum_id=forum_id).one()
except NoResultFound:
abort(404)
c.write_post_form = WritePostForm()
posts_q = c.thread.posts \
.order_by(forum_model.Post.position.asc()) \
.options(joinedload('author'))
c.num_posts = c.thread.post_count
try:
c.skip = int(request.params.get('skip', 0))
except ValueError:
abort(404)
c.per_page = 89
c.posts = posts_q.offset(c.skip).limit(c.per_page)
return render('/forum/posts.mako')
def write_thread(self, forum_id):
"""Provides a form for posting a new thread."""
if not c.user.can('forum:create-thread'):
abort(403)
try:
c.forum = meta.Session.query(forum_model.Forum) \
.filter_by(id=forum_id).one()
except NoResultFound:
abort(404)
c.write_thread_form = WriteThreadForm(request.params)
return render('/forum/write_thread.mako')
@authenticate_form
def write_thread_commit(self, forum_id):
"""Posts a new thread."""
if not c.user.can('forum:create-thread'):
abort(403)
try:
c.forum = meta.Session.query(forum_model.Forum) \
.filter_by(id=forum_id).one()
except NoResultFound:
abort(404)
c.write_thread_form = WriteThreadForm(request.params)
# Reshow the form on failure
if not c.write_thread_form.validate():
return render('/forum/write_thread.mako')
# Otherwise, add the post.
c.forum = meta.Session.query(forum_model.Forum) \
.with_lockmode('update') \
.get(c.forum.id)
thread = forum_model.Thread(
forum_id = c.forum.id,
subject = c.write_thread_form.subject.data,
post_count = 1,
)
source = c.write_thread_form.content.data
post = forum_model.Post(
position = 1,
author_user_id = c.user.id,
raw_content = source,
content = spline.lib.markdown.translate(source),
)
thread.posts.append(post)
c.forum.threads.append(thread)
meta.Session.commit()
# Redirect to the new thread
h.flash("Contribution to the collective knowledge of the species successfully recorded.")
redirect(
url(controller='forum', action='posts',
forum_id=forum_id, thread_id=thread.id),
code=303,
)
def write(self, forum_id, thread_id):
"""Provides a form for posting to a thread."""
if not c.user.can('forum:create-post'):
abort(403)
try:
c.thread = meta.Session.query(forum_model.Thread) \
.filter_by(id=thread_id, forum_id=forum_id).one()
except NoResultFound:
abort(404)
c.write_post_form = WritePostForm(request.params)
return render('/forum/write.mako')
@authenticate_form
def write_commit(self, forum_id, thread_id):
"""Post to a thread."""
if not c.user.can('forum:create-post'):
abort(403)
try:
c.thread = meta.Session.query(forum_model.Thread) \
.filter_by(id=thread_id, forum_id=forum_id).one()
except NoResultFound:
abort(404)
c.write_post_form = WritePostForm(request.params)
# Reshow the form on failure
if not c.write_post_form.validate():
return render('/forum/write.mako')
# Otherwise, add the post.
c.thread = meta.Session.query(forum_model.Thread) \
.with_lockmode('update') \
.get(c.thread.id)
source = c.write_post_form.content.data
post = forum_model.Post(
position = c.thread.post_count + 1,
author_user_id = c.user.id,
raw_content = source,
content = spline.lib.markdown.translate(source),
)
c.thread.posts.append(post)
c.thread.post_count += 1
meta.Session.commit()
# Redirect to the thread
# XXX probably to the post instead; anchor? depends on paging scheme
h.flash('Your uniqueness has been added to our own.')
redirect(
url(controller='forum', action='posts',
forum_id=forum_id, thread_id=thread_id),
code=303,
)
|
veekun/spline
|
splinext/forum/controllers/forum.py
|
Python
|
mit
| 10,448
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import division
from quant import config
from .broker import Broker
from quant.api.bitfinex import PrivateClient as BfxClient
from quant.common import constant
import logging
# python -m quant.cli -m Bitfinex_BCH_BTC get-balance
class Bitfinex(Broker):
def __init__(self, pair_code, api_key=None, api_secret=None):
base_currency, market_currency = self.get_available_pairs(pair_code)
super(Bitfinex, self).__init__(base_currency, market_currency, pair_code)
self.client = BfxClient(
api_key if api_key else config.Bitfinex_API_KEY,
api_secret if api_secret else config.Bitfinex_SECRET_TOKEN)
# self.get_balances()
self.bt1_available = 0.0
self.bt1_balance = 0.0
self.bt2_available = 0.0
self.bt2_balance = 0.0
def _buy_limit(self, amount, price):
"""Create a buy limit order"""
res = self.client.place_order(
str(amount),
str(price),
'buy',
'exchange limit',
symbol=self.pair_code)
if res:
if 'order_id' in res:
return res['order_id']
else:
error_msg = 'unknown error, order_id not exists'
if 'message' in res:
error_msg = res['message']
raise ValueError(error_msg)
else:
raise ValueError('response is None')
def _sell_limit(self, amount, price):
"""Create a sell limit order"""
res = self.client.place_order(
str(amount),
str(price),
'sell',
'exchange limit',
symbol=self.pair_code)
if res:
if 'order_id' in res:
return res['order_id']
else:
error_msg = 'unknown error, order_id not exists'
if 'message' in res:
error_msg = res['message']
raise ValueError(error_msg)
else:
raise ValueError('response is None')
@classmethod
def _order_status(cls, res):
resp = {
'order_id': res['id'],
'amount': float(res['original_amount']),
'price': float(res['price']),
'deal_amount': float(res['executed_amount']),
'avg_price': float(res['avg_execution_price']),
'symbol': res['symbol']
}
is_cancelled = res['is_cancelled']
is_completed = (resp['amount'] == resp['deal_amount'])
if is_completed:
resp['status'] = constant.ORDER_STATE_CLOSED
else:
if is_cancelled:
resp['status'] = constant.ORDER_STATE_CANCELED
else:
resp['status'] = constant.ORDER_STATE_PENDING
return resp
def _get_order(self, order_id, order_type=None):
res = self.client.get_order(int(order_id))
if not res:
raise ValueError('response is None')
if 'message' in res:
raise ValueError(res['message'])
logging.debug('get_order id: %s, res: %s' % (order_id, res))
assert str(res['id']) == str(order_id)
return self._order_status(res)
def _get_active_orders(self):
res = self.client.active_orders()
# if not res:
# raise ValueError('response is None')
# empty is normal condition
if res is None:
raise ValueError('response is None')
# if len(res) == 0:
# raise ValueError('response is empty')
if 'message' in res:
raise ValueError(res['message'])
logging.debug('_get_active_orders res: %s' % res)
orders = []
for item in res:
orders.append(self._order_status(item))
return orders
def _cancel_order(self, order_id, order_type=None):
res = self.client.cancel_order(int(order_id))
if not res:
raise ValueError('response is None')
if 'message' in res:
raise ValueError(res['message'])
assert str(res['id']) == str(order_id)
resp = self._order_status(res)
if resp:
return True
else:
return False
def _cancel_all(self):
return self.client.cancel_all_orders()
def _cancel_orders(self):
try:
orders = self._get_active_orders()
if len(orders) == 0:
return
for order in orders:
if order['symbol'] != self.pair_code:
continue
try:
self._cancel_order(order_id=order['order_id'])
logging.info('_cancel_orders cancel %s success' % order['order_id'])
except Exception as e:
raise Exception('_cancel_orders %s failed : %s' % (order['order_id'], e))
except Exception as e:
raise Exception('_cancel_orders failed when get active orders, error: %s' % e)
def _get_balances(self):
"""Get balance"""
res = self.client.balances()
logging.debug("bitfinex get_balances response: %s" % res)
if not res:
raise ValueError('response is None')
if 'message' in res:
raise ValueError(res['message'])
if 'error' in res:
raise ValueError(res['error'])
for entry in res:
if entry['type'] != 'exchange':
continue
currency = entry['currency']
if currency not in (
'btc', 'bch', 'eth', 'zrx', 'usd', 'bt1', 'bt2', 'zec'):
continue
if currency == 'bch':
self.bch_available = float(entry['available'])
self.bch_balance = float(entry['amount'])
elif currency == 'btc':
self.btc_available = float(entry['available'])
self.btc_balance = float(entry['amount'])
elif currency == 'zec':
self.zec_available = float(entry['available'])
self.zec_balance = float(entry['amount'])
elif currency == 'usd':
self.usd_available = float(entry['available'])
self.usd_balance = float(entry['amount'])
elif currency == 'bt1':
self.bt1_available = float(entry['available'])
self.bt1_balance = float(entry['amount'])
elif currency == 'bt2':
self.bt2_available = float(entry['available'])
self.bt2_balance = float(entry['amount'])
elif currency == 'eth':
self.eth_available = float(entry['available'])
self.eth_balance = float(entry['amount'])
elif currency == 'zrx':
self.zrx_available = float(entry['available'])
self.zrx_balance = float(entry['amount'])
return res
def _ticker(self):
resp = self.client.ticker(self.pair_code)
if resp:
return {
'bid': float(resp['bid']),
'ask': float(resp['ask'])
}
@classmethod
def get_available_pairs(cls, pair_code):
"""可交易的pair"""
if pair_code == 'ethusd':
base_currency = 'USD'
market_currency = 'ETH'
elif pair_code == 'ethbtc':
base_currency = 'BTC'
market_currency = 'ETH'
elif pair_code == 'btcusd':
base_currency = 'USD'
market_currency = 'BTC'
elif pair_code == 'bt1usd':
base_currency = 'USD'
market_currency = 'BT1'
elif pair_code == 'bt2usd':
base_currency = 'USD'
market_currency = 'BT2'
elif pair_code == 'bt1btc':
base_currency = 'BTC'
market_currency = 'BT1'
elif pair_code == 'bt2btc':
base_currency = 'BTC'
market_currency = 'BT2'
elif pair_code == 'bchusd':
base_currency = 'USD'
market_currency = 'BCH'
elif pair_code == 'bchbtc':
base_currency = 'BTC'
market_currency = 'BCH'
elif pair_code == 'zecusd':
base_currency = 'USD'
market_currency = 'ZEC'
elif pair_code == 'neousd':
base_currency = 'USD'
market_currency = 'NEO'
elif pair_code == 'neobtc':
base_currency = 'BTC'
market_currency = 'NEO'
elif pair_code == 'neoeth':
base_currency = 'ETH'
market_currency = 'NEO'
elif pair_code == 'ioteth':
base_currency = 'ETH'
market_currency = 'IOT'
elif pair_code == 'iotbtc':
base_currency = 'BTC'
market_currency = 'IOT'
elif pair_code == 'iotusd':
base_currency = 'USD'
market_currency = 'IOT'
elif pair_code == 'zrxeth':
base_currency = 'ETH'
market_currency = 'ZRX'
elif pair_code == 'zrxbtc':
base_currency = 'BTC'
market_currency = 'ZRX'
else:
assert False
return base_currency, market_currency
def get_min_stock(self):
resp = self.client.symbols_details()
if resp:
for item in resp:
if item['pair'] == self.pair_code:
return float(item['minimum_order_size'])
|
doubleDragon/QuantBot
|
quant/brokers/_bitfinex.py
|
Python
|
mit
| 9,514
|
#!/usr/bin/env python
import unittest
import math
import mastermind
class TestMasterMind(unittest.TestCase):
def test_code(self):
code = mastermind.gencode()
self.assertTrue(code)
def test_duplicates(self):
'''Duplicate guesses are not counted'''
code = (1,1,2,2)
guess = (1,1,1,2)
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (3,0), "{} {} {}".format(code, guess, fb))
def test_duplicates2(self):
'''Middle 2's do not give white key pegs'''
code = (1,1,1,2)
guess = (1,2,2,2)
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (2,0), "{} {} {}".format(code, guess, fb))
def test_duplicates3(self):
'''More duplcates in code than guesses'''
code = [5, 0, 5, 5]
guess = [5, 5, 3, 2]
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (1,1), "{} {} {}".format(code, guess, fb))
def test_duplicates4(self):
code = [1, 3, 2, 3]
guess = [0, 3, 1, 1]
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (1,1), "{} {} {}".format(code, guess, fb))
def test_duplicates5(self):
code = [2, 2, 5, 3]
guess = [1, 1, 2, 2]
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (0,2), "{} {} {}".format(code, guess, fb))
def test_duplicates6(self):
code = [5, 5, 0, 3]
guess = [1, 1, 2, 2]
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (0,0), "{} {} {}".format(code, guess, fb))
def test_correct(self):
code = (1,2,3,4)
guess = (1,2,3,4)
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (4,0))
def test_wrong(self):
code = (2,2,2,2)
guess = (1,1,1,1)
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (0,0))
def test_misc(self):
code = (1,2,3,4)
guess = (4,3,2,1)
fb = mastermind.feedback(code, guess)
self.assertTrue(fb == (0,4))
def test_calc(self):
p = mastermind.calcperm(0,0,6,4)
self.assertEqual(p, math.pow(6,4))
def test_calc2(self):
p = mastermind.calcperm(6,0,6,4)
self.assertEqual(p, 1)
def test_calc3(self):
p = mastermind.calcperm(1,1,6,4)
self.assertEqual(p, 180)
def test_calc4(self):
p = mastermind.calcperm(3,1,6,4)
self.assertEqual(p, 5)
if __name__ == '__main__':
unittest.main()
|
peterstark72/mastermind
|
test.py
|
Python
|
mit
| 2,224
|
#! /usr/bin/python2
# vim: set fileencoding=utf-8
# from http://code.activestate.com/recipes/576611/
from operator import itemgetter
from heapq import nlargest
from itertools import repeat, ifilter
class Counter(dict):
'''Dict subclass for counting hashable objects. Sometimes called a bag
or multiset. Elements are stored as dictionary keys and their counts
are stored as dictionary values.
>>> Counter('zyzygy')
Counter({'y': 3, 'z': 2, 'g': 1})
'''
def __init__(self, iterable=None, **kwds):
'''Create a new, empty Counter object. And if given, count elements
from an input iterable. Or, initialize the count from another mapping
of elements to their counts.
>>> c = Counter() # a new, empty counter
>>> c = Counter('gallahad') # a new counter from an iterable
>>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping
>>> c = Counter(a=4, b=2) # a new counter from keyword args
'''
self.update(iterable, **kwds)
def __missing__(self, key):
return 0
def most_common(self, n=None):
'''List the n most common elements and their counts from the most
common to the least. If n is None, then list all element counts.
>>> Counter('abracadabra').most_common(3)
[('a', 5), ('r', 2), ('b', 2)]
'''
if n is None:
return sorted(self.iteritems(), key=itemgetter(1), reverse=True)
return nlargest(n, self.iteritems(), key=itemgetter(1))
def elements(self):
'''Iterator over elements repeating each as many times as its count.
>>> c = Counter('ABCABC')
>>> sorted(c.elements())
['A', 'A', 'B', 'B', 'C', 'C']
If an element's count has been set to zero or is a negative number,
elements() will ignore it.
'''
for elem, count in self.iteritems():
for _ in repeat(None, count):
yield elem
# Override dict methods where the meaning changes for Counter objects.
@classmethod
def fromkeys(cls, iterable, v=None):
raise NotImplementedError(
'Counter.fromkeys() is undefined. Use Counter(iterable) instead.')
def update(self, iterable=None, **kwds):
'''Like dict.update() but add counts instead of replacing them.
Source can be an iterable, a dictionary, or another Counter instance.
>>> c = Counter('which')
>>> c.update('witch') # add elements from another iterable
>>> d = Counter('watch')
>>> c.update(d) # add elements from another counter
>>> c['h'] # four 'h' in which, witch, and watch
4
'''
if iterable is not None:
if hasattr(iterable, 'iteritems'):
if self:
self_get = self.get
for elem, count in iterable.iteritems():
self[elem] = self_get(elem, 0) + count
else:
dict.update(self, iterable) # fast path when counter is empty
else:
self_get = self.get
for elem in iterable:
self[elem] = self_get(elem, 0) + 1
if kwds:
self.update(kwds)
def copy(self):
'Like dict.copy() but returns a Counter instance instead of a dict.'
return Counter(self)
def __delitem__(self, elem):
'Like dict.__delitem__() but does not raise KeyError for missing values.'
if elem in self:
dict.__delitem__(self, elem)
def __repr__(self):
if not self:
return '%s()' % self.__class__.__name__
items = ', '.join(map('%r: %r'.__mod__, self.most_common()))
return '%s({%s})' % (self.__class__.__name__, items)
# Multiset-style mathematical operations discussed in:
# Knuth TAOCP Volume II section 4.6.3 exercise 19
# and at http://en.wikipedia.org/wiki/Multiset
#
# Outputs guaranteed to only include positive counts.
#
# To strip negative and zero counts, add-in an empty counter:
# c += Counter()
def __add__(self, other):
'''Add counts from two counters.
>>> Counter('abbb') + Counter('bcc')
Counter({'b': 4, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem in set(self) | set(other):
newcount = self[elem] + other[elem]
if newcount > 0:
result[elem] = newcount
return result
def __sub__(self, other):
''' Subtract count, but keep only results with positive counts.
>>> Counter('abbbc') - Counter('bccd')
Counter({'b': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
result = Counter()
for elem in set(self) | set(other):
newcount = self[elem] - other[elem]
if newcount > 0:
result[elem] = newcount
return result
def __or__(self, other):
'''Union is the maximum of value in either of the input counters.
>>> Counter('abbb') | Counter('bcc')
Counter({'b': 3, 'c': 2, 'a': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
_max = max
result = Counter()
for elem in set(self) | set(other):
newcount = _max(self[elem], other[elem])
if newcount > 0:
result[elem] = newcount
return result
def __and__(self, other):
''' Intersection is the minimum of corresponding counts.
>>> Counter('abbb') & Counter('bcc')
Counter({'b': 1})
'''
if not isinstance(other, Counter):
return NotImplemented
_min = min
result = Counter()
if len(self) < len(other):
self, other = other, self
for elem in ifilter(self.__contains__, other):
newcount = _min(self[elem], other[elem])
if newcount > 0:
result[elem] = newcount
return result
if __name__ == '__main__':
import doctest
print doctest.testmod()
|
daureg/illalla
|
Counter.py
|
Python
|
mit
| 6,401
|
# coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import re # noqa: F401
import sys # noqa: F401
import typing
import urllib3
from urllib3._collections import HTTPHeaderDict
from openapi_client import api_client, exceptions
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from openapi_client.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
InstantiationMetadata,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
NumberBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
from openapi_client.model.list_view import ListView
# path params
NameSchema = StrSchema
RequestRequiredPathParams = typing.TypedDict(
'RequestRequiredPathParams',
{
'name': NameSchema,
}
)
RequestOptionalPathParams = typing.TypedDict(
'RequestOptionalPathParams',
{
},
total=False
)
class RequestPathParams(RequestRequiredPathParams, RequestOptionalPathParams):
pass
request_path_name = api_client.PathParameter(
name="name",
style=api_client.ParameterStyle.SIMPLE,
schema=NameSchema,
required=True,
)
_path = '/view/{name}/api/json'
_method = 'GET'
_auth = [
'jenkins_auth',
]
SchemaFor200ResponseBodyApplicationJson = ListView
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationJson,
]
headers: Unset = unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor401(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_401 = api_client.OpenApiResponse(
response_cls=ApiResponseFor401,
)
@dataclass
class ApiResponseFor403(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_403 = api_client.OpenApiResponse(
response_cls=ApiResponseFor403,
)
@dataclass
class ApiResponseFor404(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: Unset = unset
headers: Unset = unset
_response_for_404 = api_client.OpenApiResponse(
response_cls=ApiResponseFor404,
)
_status_code_to_response = {
'200': _response_for_200,
'401': _response_for_401,
'403': _response_for_403,
'404': _response_for_404,
}
_all_accept_content_types = (
'application/json',
)
class GetView(api_client.Api):
def get_view(
self: api_client.Api,
path_params: RequestPathParams = frozendict(),
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization
]:
"""
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
self._verify_typed_dict_inputs(RequestPathParams, path_params)
_path_params = {}
for parameter in (
request_path_name,
):
parameter_data = path_params.get(parameter.name, unset)
if parameter_data is unset:
continue
serialized_data = parameter.serialize(parameter_data)
_path_params.update(serialized_data)
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
response = self.api_client.call_api(
resource_path=_path,
method=_method,
path_params=_path_params,
headers=_headers,
auth_settings=_auth,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(api_response=api_response)
return api_response
|
cliffano/swaggy-jenkins
|
clients/python-experimental/generated/openapi_client/api/remote_access_api_endpoints/get_view.py
|
Python
|
mit
| 5,347
|
"""Mongo osid profile elements for proxy service packages"""
# -*- coding: utf-8 -*-
# pylint: disable=unused-import
# importing common values to be used by proxy.ProfileManger implementation
from ..profile import ID
from ..profile import LANGUAGETYPE
from ..profile import SCRIPTTYPE
from ..profile import FORMATTYPE
from ..profile import VERSIONSCHEME
from ..profile import LOCALES
from ..profile import LICENSE
from ..profile import PROVIDERID
from ..profile import OSIDVERSION
DISPLAYNAME = 'Mongo proxy'
DESCRIPTION = 'MongoDB based proxy implementation'
VERSIONCOMPONENTS = [0, 1, 45]
RELEASEDATE = "2018-03-08"
SUPPORTS = [ # 'Remove the # when implementations exist:'
# 'supports_journal_rollback',
# 'supports_journal_branching',
'supports_proxy',
]
|
mitsei/dlkit
|
dlkit/json_/proxy/profile.py
|
Python
|
mit
| 781
|
from keypad import Keypad
def parse(lines):
return [x.strip() for x in lines]
def step1(input):
keypad_look = """ 1 \n 234 \n56789\n ABC \n D """
keys = Keypad.parse_keys(keypad_look)
keypad = Keypad(keys)
keypad.follow(input)
return keypad.code
|
erikedin/aoc2016
|
aoc2016/day2.py
|
Python
|
mit
| 275
|
#!/usr/bin/env python
# Try to use setuptools from http://peak.telecommunity.com/DevCenter/setuptools
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from distutils.core import Extension
try:
from Pyrex.Distutils import build_ext
except ImportError:
build_ext = None
VERSION = '0.1.0'
BUILD_C_EXTS = False
extensions = []
cmdclass = {}
if BUILD_C_EXTS and build_ext:
particles_extension = Extension('ext.particles',
[os.path.join('miru', 'ext', 'particles.pyx')])
extensions = [particles_extension]
cmdclass = { 'build_ext' : build_ext}
if BUILD_C_EXTS and not build_ext:
from warnings import warn
warn("Pyrex is required to build C-based modules in miru.\n"
"Get Pyrex at: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/")
setup(
author='Drew Smathers',
author_email='drew dot smathers at gmail dot com',
name='Miru',
version=VERSION,
install_requires=['zope.interface','pyglet','Twisted==8.2.0'],
description="""Higher-level graphics abstractions for pyglet""",
license='MIT License',
url='http://enterthefoo.com:8001/Miru',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Multimedia :: Graphics :: 3D Rendering'],
packages=[ 'miru', 'miru.tools', 'miru.test' ],
py_modules=[ 'euclid' ],
package_dir={'miru': 'miru'},
package_data={'miru': ['test/*.obj', 'test/*.mtl']},
ext_package='miru',
ext_modules = extensions,
cmdclass = cmdclass
)
|
Knio/miru
|
setup.py
|
Python
|
mit
| 1,904
|
# from sqlalchemy import Table
from sqlalchemy import Column, String, Integer, event, ForeignKey
from sqlalchemy.dialects.sqlite import DATETIME
from sqlalchemy.orm import relationship, backref
import uuid
from datetime import datetime
from ..db import db
custom_datetime = DATETIME(
timezone="UTC",
)
# TODO Why doesn't custom datetime without milliseconds work?
# custom_datetime = DATETIME(
# timezone="UTC",
# storage_format="%(year)04d/%(month)02d/%(day)02d %(hour)02d:%(min)02d:%(second)02d",
# regexp=r"(\d+)-(\d+)-(\d+) (\d+):(\d+):(\d+)"
# )
# ..............................................................................
# Example of reflecting existing database table
# class client(db.MyBase, db.Base):
# __table__ = Table('client', db.metadata, autoload=True)
# ..............................................................................
class entry(db.MyBase, db.Base):
__tablename__ = 'entry'
uuid = Column(String, primary_key=True)
sheet = Column(String, index=True)
start_time = Column(custom_datetime)
end_time = Column(custom_datetime)
modified = Column(custom_datetime)
task_uuid = Column(String, ForeignKey("task.uuid"), nullable=True,
default=None)
task = relationship("task",
backref=backref("_entries", order_by=start_time))
# ..............................................................................
class task(db.MyBase, db.Base):
__tablename__ = 'task'
uuid = Column(String, primary_key=True)
project = Column(String)
urgency = Column(String)
description = Column(String)
modified = Column(custom_datetime)
completed = Column(custom_datetime)
# ..............................................................................
# The pointer table is used to assigned short IDs to pending tasks.
# Once a task is marked as completed it opens up a slot in the pointer table.
# If the pointer table is full when a new task is created we add a new row,
# otherwise an existing pointer row with null task_uuid is used.
class pointer(db.MyBase, db.Base):
__tablename__ = 'pointer'
# http://stackoverflow.com/a/4567698/639133
__table_args__ = {"sqlite_autoincrement": True}
id = Column(Integer, primary_key=True)
task_uuid = Column(String, ForeignKey("task.uuid"), nullable=True,
default=None)
task = relationship("task", backref=backref("_pointer", order_by=id))
# ..............................................................................
class task_tag(db.MyBase, db.Base):
__tablename__ = 'task_tag'
uuid = Column(String, primary_key=True)
modified = Column(custom_datetime)
tag_uuid = Column(String, ForeignKey("tag.uuid"), nullable=True,
default=None)
tag = relationship("tag", backref=backref("_task_tags"))
task_uuid = Column(String, ForeignKey("task.uuid"), nullable=True,
default=None)
task = relationship("task", backref=backref("_task_tags"))
# ..............................................................................
class tag(db.MyBase, db.Base):
__tablename__ = 'tag'
uuid = Column(String, primary_key=True)
tag = Column(String)
modified = Column(custom_datetime)
# ..............................................................................
# Create listener
def update_created_modified_on_create_listener(mapper, connection, target):
if hasattr(target, "uuid") and target.uuid is None:
# For all tables except task we want to generate UUIDs
uuid.uuid4().__str__()
target.modified = datetime.utcnow()
# Update listener
def update_modified_on_update_listener(mapper, connection, target):
target.modified = datetime.utcnow()
for my_class in db.MyBase._all_subclasses():
event.listen(my_class, 'before_insert',
update_created_modified_on_create_listener)
event.listen(my_class, 'before_update', update_modified_on_update_listener)
# ..............................................................................
# Allowed keywords for commands
commands = ["add", "done", "ls", "mod", "remove", "start", "stop", "timesheet"]
# Allowed keywords for mods
mods = ["description", "project", ["modified", ["today"]], "sheet", "tag",
"urgency"]
# Allowed values for urgency
urgency = ["h", "m", "l"]
# ..............................................................................
def create_all():
db.Base.metadata.create_all(db.engine)
|
mozey/taskmage
|
taskmage/db/models.py
|
Python
|
mit
| 4,552
|
import argparse
import logging
import math
import random
import re
import string
import warnings
from abc import ABC, abstractmethod
from enum import Enum
from functools import lru_cache, reduce
from itertools import groupby, product
from pathlib import Path
from typing import Any, Callable, Dict, FrozenSet, Iterable, List, Optional, Set, Tuple, Union, cast
import graphviz
from pudzu.utils import first, merge, merge_with
from pyparsing import ParseException
from pyparsing import printables as ascii_printables
from pyparsing import pyparsing_unicode as ppu
from pyparsing import srange
State = Any # really it's Union[str, Tuple['State']]
Move = Enum("Move", "EMPTY ALL")
Input = Union[str, Move]
Transitions = Dict[Tuple[State, Input], Set[State]]
CaptureGroup = str
Captures = Dict[Tuple[State, Input], Set[CaptureGroup]]
CaptureOutput = Dict[CaptureGroup, str]
logger = logging.getLogger("patterns")
DEBUG = False
DICTIONARY_FSM = None
EXPLICIT_FSM = None
SUBPATTERNS = {}
EXTRA_PRINTABLES = ""
SLOW_SIMPLIFICATION = True
class NFA:
"""Nondeterministic Finite Automata with
- single start state (with no inbounds) and end state (with no outbounds)
- ε-moves (including potential ε loops)
- *-moves (only used if there is no other matching move)
"""
def __init__(self, start: State, end: State, transitions: Transitions, captures: Optional[Captures] = None):
self.start = start
self.end = end
self.transitions = transitions
self.captures = captures or {}
self.states = {self.start, self.end} | {s for s, _ in self.transitions.keys()} | {t for ts in self.transitions.values() for t in ts}
def __repr__(self) -> str:
return f"NFA(start={self.start}, end={self.end}, transitions={self.transitions})"
def match(self, string: str) -> Optional[CaptureOutput]:
"""Match the NFA against a string input. Returns a CaptureOutput if found, or None otherwise."""
old_states: Dict[State, CaptureOutput] = {self.start: {}}
for c in string:
new_states: Dict[State, CaptureOutput] = {}
for s, co in old_states.items():
for se in self.expand_epsilons({s}):
for t in self.transitions.get((se, c), self.transitions.get((se, Move.ALL), set())):
if t not in new_states:
cgs = self.captures.get((se, c), set()) if (se, c) in self.transitions else self.captures.get((se, Move.ALL), set())
tco = merge(co, {cg: co.get(cg, "") + c for cg in cgs})
new_states[t] = tco
old_states = new_states
for s, co in old_states.items():
if self.end in self.expand_epsilons({s}):
return co
return None
def expand_epsilons(self, states: Iterable[State]) -> Set[State]:
"""Expand a collection of states along all ε-moves"""
old: Set[State] = set()
new = states
while new:
old.update(new)
new = {t for s in new for t in self.transitions.get((s, Move.EMPTY), set()) if t not in old}
return old
def remove_redundant_states(self, aggressive: bool = False) -> None:
"""Trim the NFA, removing unnecessary states and transitions."""
# remove states not reachable from the start
reachable, new = set(), {self.start}
while new:
reachable.update(new)
new = {t for (s, i), ts in self.transitions.items() if s in new for t in ts if t not in reachable}
self.states = reachable | {self.start, self.end}
self.transitions = {(s, i): ts for (s, i), ts in self.transitions.items() if s in reachable}
# remove states that can't reach the end (and any transitions to those states)
acceptable, new = set(), {self.end}
while new:
acceptable.update(new)
new = {s for (s, i), ts in self.transitions.items() if any(t in new for t in ts) and s not in acceptable}
self.states = acceptable | {self.start, self.end}
self.transitions = {
(s, i): {t for t in ts if t in acceptable}
for (s, i), ts in self.transitions.items()
if s in acceptable and (any(t in acceptable for t in ts) or (s, Move.ALL) in self.transitions)
}
# remove transitions that are equivalent to *
unnecessary: List[Tuple[State, Input]] = []
for (s, i), t in self.transitions.items():
if not isinstance(i, Move) and t == self.transitions.get((s, Move.ALL), set()):
unnecessary.append((s, i))
for k in unnecessary:
del self.transitions[k]
# remove capture information for trimmed transitions
self.captures = {(s, i): cs for (s, i), cs in self.captures.items() if (s, i) in self.transitions and i != Move.EMPTY}
if aggressive:
# remove states that only go via empty to self.end
# (don't call this from MatchBoth as it would break assumptions of some calling functions)
removable = set()
not_removable = set()
for (s, i), t in self.transitions.items():
if s != self.start and i == Move.EMPTY and t == {self.end}:
removable.add(s)
else:
not_removable.add(s)
removable = removable - not_removable
if removable:
unnecessary = []
for (s, i), ts in self.transitions.items():
if s in removable:
unnecessary.append((s, i))
elif any(t in removable for t in ts):
self.transitions[(s, i)] = {t for t in ts if t not in removable} | {self.end}
for k in unnecessary:
del self.transitions[k]
self.states -= removable
def render(self, name: str, console: bool = False, compact: bool = False) -> None:
"""Render the NFA as a dot.svg file."""
bg = "transparent" if console else "white"
fg = "white" if console else "black"
g = graphviz.Digraph(format="svg")
g.attr(rankdir="LR", bgcolor=bg)
states = set(self.states)
start = self.start
ends = {self.end}
if compact:
if {i for (s, i) in self.transitions if s == self.start} == {Move.EMPTY} and len(self.transitions.get((self.start, Move.EMPTY), set())) == 1:
states.remove(self.start)
start = first(self.transitions[(self.start, Move.EMPTY)])
if {i for (s, i), ts in self.transitions.items() if self.end in ts} == {Move.EMPTY}:
states.remove(self.end)
ends = {s for (s, i), ts in self.transitions.items() if self.end in ts}
# states
for s in states:
shape = "doublecircle" if s in ends else "ellipse"
root = "true" if s == start else "false"
g.node(str(s), root=root, shape=shape, label="", color=fg)
if s == start:
g.node("prestart", style="invisible")
g.edge("prestart", str(s), style="bold", color=fg)
# transitions
reverse_dict: Dict[State, Dict[Tuple[State, FrozenSet[CaptureGroup]], Set[Input]]] = {}
for (s, i), ts in self.transitions.items():
if s not in states:
continue
for t in ts:
if t not in states:
continue
c = frozenset(self.captures.get((s, i), set()))
reverse_dict.setdefault(s, {}).setdefault((t, c), set()).add(i)
for s, d in reverse_dict.items():
for (t, c), ii in d.items():
for move in (i for i in ii if isinstance(i, Move)):
if move == Move.EMPTY:
label = "ε"
else:
label = char_class("".join(j for u, j in self.transitions if u == s and isinstance(j, str)), negated=True)
if c:
label += f" {{{','.join(c)}}}"
g.edge(str(s), str(t), label=label, color=fg, fontcolor=fg)
input = "".join(sorted(i for i in ii if isinstance(i, str)))
if len(input) >= 1:
label = "SPACE" if input == " " else char_class(input)
if c:
label += f" {{{','.join(c)}}}"
g.edge(str(s), str(t), label=label, color=fg, fontcolor=fg)
g.render(filename=name + ".dot")
def save(self, name: str, renumber_states: bool = True) -> None:
"""Save FSM as a .fsm file."""
# TODO: save and load capture groups
def sort_key(s):
# Q: is there a better state ordering?
return "" if s == self.start else ")" if s == self.end else str(s)
sorted_states = sorted(self.states, key=sort_key)
def label(s):
return (
"START"
if s == self.start
else "END"
if s == self.end
else str(sorted_states.index(s))
if renumber_states
else str(s).replace("'", "").replace(" ", "")
)
with open(name + ".fsm", "w", encoding="utf-8") as f:
reverse_dict: Dict[State, Dict[FrozenSet[State], Set[Input]]] = {}
for (s, i), ts in self.transitions.items():
reverse_dict.setdefault(s, {}).setdefault(frozenset(ts), set()).add(i)
for state in sorted_states:
from_label = label(state)
for fts, ii in reverse_dict.get(state, {}).items():
to_labels = " ".join(label(t) for t in fts)
for move in (i for i in ii if isinstance(i, Move)):
print(f"{from_label} {str(move).replace('Move.','')} {to_labels}", file=f)
input = "".join(sorted(i for i in ii if isinstance(i, str)))
if len(input) >= 1:
input_label = "SPACE" if input == " " else char_class(input)
print(f"{from_label} {input_label} {to_labels}", file=f)
def example(self, min_length: int = 0, max_length: Optional[int] = None) -> Optional[str]:
"""Generate a random matching string. Assumes NFA has been trimmed of states that can't reach the end."""
nfa = MatchBoth(self, MatchLength(min_length, max_length)) if min_length or max_length is not None else self
output = ""
state = nfa.start
try:
while state != nfa.end:
choices = [i for (s, i) in nfa.transitions if s == state]
non_empty = [i for i in choices if nfa.transitions[(state, i)]]
i = random.choice(non_empty)
if i == Move.ALL:
# TODO: match with supported scripts?
options = list(set(string.ascii_letters + string.digits + " '") - set(i for i in choices if isinstance(i, str)))
output += random.choice(options)
elif isinstance(i, str):
output += i
state = random.choice(list(nfa.transitions[(state, i)]))
except IndexError:
return None
return output
def bound(self, lower_bound: bool, max_length: int) -> Optional[str]:
"""Generate a lower/upper lexicographic bound for the FSM."""
bound = ""
minmax = min if lower_bound else max
states = self.expand_epsilons({self.start})
while (not lower_bound or self.end not in states) and len(bound) < max_length:
least_char = None
next_state = None
for state in states:
least_trans = minmax([i for (s, i), ts in self.transitions.items() if s == state and ts and isinstance(i, str)], default=None)
if least_trans and (not least_char or least_trans == minmax((least_trans, least_char))):
least_char, next_state = least_trans, first(self.transitions[(state, least_trans)])
if self.transitions.get((state, Move.ALL)):
# TODO: match with supported scripts?
least_any = minmax(set(ascii_printables + " ") - {i for (s, i) in self.transitions if s == state})
if not least_char or least_any == minmax((least_any, least_char)):
least_char, next_state = least_any, first(self.transitions[(state, Move.ALL)])
if not least_char:
break
bound += least_char
states = self.expand_epsilons({next_state})
else:
if not lower_bound:
bound = bound[:-1] + chr(ord(bound[-1]) + 1)
return bound
def regex(self) -> "Regex":
"""Generate a regex corresponding to the NFA."""
L = {(i, j): RegexConcat() if i == j else RegexUnion() for i in self.states for j in self.states}
for (i, a), js in self.transitions.items():
for j in js:
if a == Move.ALL:
L[i, j] |= RegexNegatedChars("".join(b for k, b in self.transitions if i == k and isinstance(b, str)))
elif a == Move.EMPTY:
L[i, j] |= RegexConcat()
else:
L[i, j] |= RegexChars(a)
remaining = set(self.states)
for k in self.states:
if k == self.start or k == self.end:
continue
remaining.remove(k)
for i in remaining:
for j in remaining:
L[i, i] |= RegexConcat((L[i, k], RegexStar(L[k, k]), L[k, i]))
L[j, j] |= RegexConcat((L[j, k], RegexStar(L[k, k]), L[k, j]))
L[i, j] |= RegexConcat((L[i, k], RegexStar(L[k, k]), L[k, j]))
L[j, i] |= RegexConcat((L[j, k], RegexStar(L[k, k]), L[k, i]))
return L[self.start, self.end]
def min_length(self) -> Optional[int]:
""" The minimum possible length match. """
# use Dijkstra to find shortest path
unvisited = set(self.states)
distances = {s: 0 if s == self.start else math.inf for s in self.states}
current = self.start
while current in unvisited:
base = distances[current]
for (r, i), ss in self.transitions.items():
if r == current:
weight = base + (i != Move.EMPTY)
for s in ss:
if s in unvisited:
distances[s] = min(distances[s], weight)
unvisited.remove(current)
if current == self.end:
return int(distances[self.end])
current, distance = min(distances.items(), key=lambda x: math.inf if x[0] not in unvisited else x[1])
if distance == math.inf:
return None
return None
def max_length(self) -> Optional[int]:
""" The maximum possible length match. """
# converts to a regex, though there's probably a more efficient way
max_length = self.regex().max_length()
return int(max_length) if math.isfinite(max_length) else None
def char_class(chars: str, negated: bool = False) -> str:
"""Generate a character class description of the given characters"""
if len(chars) == 0 and negated:
return "."
elif len(chars) == 1:
if negated or chars in Pattern.literal_exclude:
return f"[{'^'*negated}{chars}]"
return chars
# find runs of length 4+
ordered = sorted(set(chars))
runs, i = [], 0
ords = [ord(c) - i for i, c in enumerate(ordered)]
for _, g in groupby(ords):
n = len([*g])
runs += ordered[i : i + n] if n < 4 else [ordered[i] + "-" + ordered[i + n - 1]]
i += n
# order things to minimise likelihood of having to escape anything
def sort_key(r):
if "]" in r:
return 0
if "-" in r[::2]:
return 1 if r[0] == "-" and "]" not in ordered else 4
if "^" in r:
return 3
else:
return 2
runs.sort(key=lambda s: sort_key(s))
# TODO: escape -^\] when needed once we can parse that
return f"[{'^'*negated}{''.join(runs)}]"
# pylint: disable=unbalanced-tuple-unpacking
def new_states(*names: str) -> List[Callable[..., State]]:
"""Return functions for generating new state names using the given labels.
Note that names are sorted alphabetically when generating FSM descriptions."""
generators = []
for name in names:
generators.append((lambda name: (lambda *args: (name, *args) if args else name))(name))
return generators
# NFA constructors
def merge_trans(*args):
"""Merge multiple transitions, unioning target states."""
return merge_with(lambda x: set.union(*x), *args)
def MatchEmpty() -> NFA:
"""Empty match"""
return NFA("1", "2", {("1", Move.EMPTY): {"2"}})
def MatchIn(characters: str) -> NFA:
"""Handles: a, [abc]"""
return NFA("1", "2", {("1", c): {"2"} for c in characters})
def MatchNotIn(characters: str) -> NFA:
"""Handles: [^abc], ."""
return NFA("1", "2", merge_trans({("1", Move.ALL): {"2"}}, {("1", c): set() for c in characters}))
def MatchWords(words: Iterable[str]) -> NFA:
# generate a prefix tree
start, end = ("0",), ("1",)
transitions: Transitions = {}
for word in words:
for i in range(len(word)):
transitions.setdefault((word[:i] or start, word[i]), set()).add(word[: i + 1])
transitions[(word, Move.EMPTY)] = {end}
return NFA(start, end, transitions)
def MatchDictionary(path: Path) -> NFA:
r"""Handles: \w"""
with open(str(path), "r", encoding="utf-8") as f:
return MatchWords(w.rstrip("\n") for w in f)
def ExplicitFSM(path: Path) -> NFA:
r"""Handles: \f"""
transitions: Transitions = {}
with open(str(path), "r", encoding="utf-8") as f:
for line in f:
args = line.split()
if args:
x: Input
start, x, *end = args
if start == "END":
raise ValueError("END state should have no outbound arrows")
elif "START" in end:
raise ValueError("START state should have no inbound arrows")
elif x == "SPACE":
x = " "
if re.match(r"^\[\^.+]$", x):
transitions.setdefault((start, Move.ALL), set()).update(end)
for x in srange(x):
transitions.setdefault((start, x), set())
elif re.match(r"^\[.+]$", x):
for x in srange(x):
transitions.setdefault((start, x), set()).update(end)
elif x in {"EMPTY", "ALL"}:
x = {"EMPTY": Move.EMPTY, "ALL": Move.ALL}[x]
transitions.setdefault((start, x), set()).update(end)
elif len(x) > 1:
raise ValueError(f"Unexpected FSM input `{x}`: should be character, class, ALL or EMPTY")
else:
transitions.setdefault((start, x), set()).update(end)
return NFA("START", "END", transitions)
def MatchCapture(nfa: NFA, id: CaptureGroup) -> NFA:
"""Handles: (?<id>A)"""
captures = {(s, i): {id} for (s, i) in nfa.transitions if i != Move.EMPTY}
return NFA(nfa.start, nfa.end, nfa.transitions, merge_trans(nfa.captures, captures))
def MatchAfter(nfa1: NFA, nfa2: NFA) -> NFA:
"""Handles: AB"""
First, Second = new_states("a", "b")
t1 = {(First(s), i): {First(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
c1 = {(First(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2 = {
(First(nfa1.end) if s == nfa2.start else Second(s), i): {First(nfa1.end) if t == nfa2.start else Second(t) for t in ts}
for (s, i), ts in nfa2.transitions.items()
}
c2 = {(First(nfa1.end) if s == nfa2.start else Second(s), i): cs for (s, i), cs in nfa2.captures.items()}
return NFA(First(nfa1.start), Second(nfa2.end), merge_trans(t1, t2), merge_trans(c1, c2))
def MatchEither(*nfas: NFA) -> NFA:
"""Handles: A|B (and arbitrary alternation too)"""
Start, End, *Option = new_states("a", "z", *[str(n) for n in range(1, len(nfas) + 1)])
tis, cis = [], []
for n, nfa in enumerate(nfas):
tis.append({(Option[n](s), i): {Option[n](t) for t in ts} for (s, i), ts in nfa.transitions.items()})
cis.append({(Option[n](s), i): cs for (s, i), cs in nfa.captures.items()})
tstart = {(Start(), Move.EMPTY): {Option[n](nfa.start) for n, nfa in enumerate(nfas)}}
tend = {(Option[n](nfa.end), Move.EMPTY): {End()} for n, nfa in enumerate(nfas)}
return NFA(Start(), End(), merge_trans(tstart, tend, *tis), merge_trans(*cis))
def MatchRepeated(nfa: NFA, repeat: bool = False, optional: bool = False) -> NFA:
"""Handles: A*, A+, A?"""
Start, End, Star = new_states("a", "z", "*")
transitions: Transitions = {(Star(s), i): {Star(t) for t in ts} for (s, i), ts in nfa.transitions.items()}
captures: Captures = {(Star(s), i): cs for (s, i), cs in nfa.captures.items()}
transitions[(Start(), Move.EMPTY)] = {Star(nfa.start)}
if optional:
transitions[(Start(), Move.EMPTY)].add(End())
transitions[(Star(nfa.end), Move.EMPTY)] = {End()}
if repeat:
transitions[(Star(nfa.end), Move.EMPTY)].add(Star(nfa.start))
return NFA(Start(), End(), transitions, captures)
def MatchRepeatedN(nfa: NFA, minimum: int, maximum: int) -> NFA:
"""Handles: A{2,5}"""
if minimum == maximum == 0:
return MatchEmpty()
elif minimum == maximum == 1:
return nfa
elif minimum > 0:
return MatchAfter(nfa, MatchRepeatedN(nfa, minimum - 1, maximum - 1))
elif maximum == 1:
return MatchRepeated(nfa, optional=True)
else:
return MatchRepeated(MatchAfter(nfa, MatchRepeatedN(nfa, 0, maximum - 1)), optional=True)
def MatchRepeatedNplus(nfa: NFA, minimum: int) -> NFA:
"""Handles: A{2,}"""
if minimum == 0:
return MatchRepeated(nfa, repeat=True, optional=True)
elif minimum == 1:
return MatchRepeated(nfa, repeat=True)
else:
return MatchAfter(nfa, MatchRepeatedNplus(nfa, minimum - 1))
def MatchLength(minimum: int = 0, maximum: Optional[int] = None) -> NFA:
if maximum is None:
return MatchRepeatedNplus(MatchNotIn(""), minimum)
else:
return MatchRepeatedN(MatchNotIn(""), minimum, maximum)
def MatchDFA(nfa: NFA, negate: bool) -> NFA:
"""Handles: (?D:A), ¬A"""
if nfa.captures and not negate:
raise NotImplementedError("Cannot convert NFA with submatch captures to a DFA")
# convert to DFA via powerset construction (and optionally invert accepted/rejected states)
start_state = tuple(sorted(nfa.expand_epsilons({nfa.start}), key=str))
to_process = [start_state]
processed_states = set()
accepting_states = set()
transitions: Transitions = {}
while to_process:
current_state = to_process.pop()
processed_states.add(current_state)
if any(s == nfa.end for s in current_state):
accepting_states.add(current_state)
moves = {i for (s, i) in nfa.transitions if s in current_state and i != Move.EMPTY}
for i in moves:
next_state = {t for s in current_state for t in nfa.transitions.get((s, i), nfa.transitions.get((s, Move.ALL), set()))}
next_state_sorted = tuple(sorted(nfa.expand_epsilons(next_state), key=str))
transitions[(current_state, i)] = {next_state_sorted}
if next_state_sorted not in processed_states:
to_process.append(next_state_sorted)
# transition accepting/non-accepting states to a single final state
for final_state in (processed_states - accepting_states) if negate else accepting_states:
transitions.setdefault((final_state, Move.EMPTY), set()).add("2")
# if negating, transition non-moves to a new accepting, consuming state
if negate:
for state in processed_states:
if (state, Move.ALL) not in transitions:
transitions[(state, Move.ALL)] = {"1"}
transitions.setdefault(("1", Move.ALL), {"1"})
transitions.setdefault(("1", Move.EMPTY), {"2"})
nfa = NFA(start_state, "2", transitions)
nfa.remove_redundant_states(aggressive=True)
return nfa
def MatchBoth(nfa1: NFA, nfa2: NFA, start_from: Optional[Set[State]] = None, stop_at: Optional[Set[State]] = None) -> NFA:
"""Handles: A&B"""
# generate transitions on cartesian product (with special handling for *-transitions)
# warning: some of the other methods currently depend on the implementation of this (which is naughty)
transitions: Transitions = {}
captures: Captures = {}
for (s1, i), ts1 in nfa1.transitions.items():
for s2 in nfa2.states:
if i == Move.EMPTY:
transitions = merge_trans(transitions, {((s1, s2), i): set(product(ts1, {s2}))})
else:
ts2 = nfa2.transitions.get((s2, i), nfa2.transitions.get((s2, Move.ALL)))
if ts2 is not None:
transitions = merge_trans(transitions, {((s1, s2), i): set(product(ts1, ts2))})
cs2 = nfa1.captures.get((s1, i), set()) | nfa2.captures.get((s2, i), nfa2.captures.get((s2, Move.ALL), set()))
if cs2:
captures = merge_trans(captures, {((s1, s2), i): cs2})
for (s2, i), ts2 in nfa2.transitions.items():
for s1 in nfa1.states:
if i == Move.EMPTY:
transitions = merge_trans(transitions, {((s1, s2), i): set(product({s1}, ts2))})
elif (s1, i) not in nfa1.transitions: # (as we've done those already!)
ts1o = nfa1.transitions.get((s1, Move.ALL))
if ts1o is not None:
transitions = merge_trans(transitions, {((s1, s2), i): set(product(ts1o, ts2))})
cs1o = nfa2.captures.get((s2, i), set()) | nfa1.captures.get((s1, Move.ALL), set())
if cs1o:
captures = merge_trans(captures, {((s1, s2), i): cs1o})
if start_from:
transitions[("1", Move.EMPTY)] = start_from
if stop_at:
transitions = merge_trans(transitions, {(s, Move.EMPTY): {"2"} for s in stop_at})
nfa = NFA("1" if start_from else (nfa1.start, nfa2.start), "2" if stop_at else (nfa1.end, nfa2.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchContains(nfa1: NFA, nfa2: NFA, proper: bool) -> NFA:
"""Handles: A<B, A<<B, A>B, A>>B"""
# transition from (2) A to (3) AxB to (5) A states
# for proper containment, also use (1) A and (4) A states
LeftFirst, Left, Middle, RightFirst, Right = new_states("<l1", "<l2", "<m", "<r1", "<r2")
t1, t1e, c1, t4, t4e, c4 = {}, {}, {}, {}, {}, {}
if proper:
t1 = {(LeftFirst(s), i): {LeftFirst(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t1e = {(LeftFirst(s), i): {Left(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c1 = {(LeftFirst(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2 = {(Left(s), i): {Left(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
c2 = {(Left(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2e = {(Left(s), Move.EMPTY): {Middle(s, nfa2.start)} for s in nfa1.states}
t3 = {(Middle(s, q), i): {Middle(s, t) for t in ts} for (q, i), ts in nfa2.transitions.items() for s in nfa1.states}
c3 = {(Middle(s, q), i): cs for (q, i), cs in nfa2.captures.items() for s in nfa1.states}
t3e = {(Middle(s, nfa2.end), Move.EMPTY): {(RightFirst(s) if proper else Right(s))} for s in nfa1.states}
if proper:
t4 = {(RightFirst(s), i): {RightFirst(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t4e = {(RightFirst(s), i): {Right(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c4 = {(RightFirst(s), i): cs for (s, i), cs in nfa1.captures.items()}
t5 = {(Right(s), i): {Right(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
c5 = {(Right(s), i): cs for (s, i), cs in nfa1.captures.items()}
transitions = merge_trans(t1, t1e, t2, t2e, t3, t3e, t4, t4e, t5)
captures = merge_trans(c1, c2, c3, c4, c5)
nfa = NFA(LeftFirst(nfa1.start) if proper else Left(nfa1.start), Right(nfa1.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchInterleaved(nfa1: NFA, nfa2: NFA, proper: bool) -> NFA:
"""Handles: A^B, A^^B"""
# transition between (2) AxB and (3) AxB states
# for proper interleaving, also use (1) A and (4) A states
First, Left, Right, Last = new_states("^a", "^l", "^r", "^z")
t1, t1e, c1, t4, t4e, c4 = {}, {}, {}, {}, {}, {}
if proper:
t1 = {(First(s), i): {First(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t1e = {(First(s), i): {Left(t, nfa2.start) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c1 = {(First(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2 = {(Left(s, q), i): {Left(t, q) for t in ts} for (s, i), ts in nfa1.transitions.items() for q in nfa2.states}
c2 = {(Left(s, q), i): cs for (s, i), cs in nfa1.captures.items() for q in nfa2.states}
t2e = {(Left(q, s), Move.EMPTY): {Right(q, s)} for q in nfa1.states for s in nfa2.states}
t3 = {(Right(q, s), i): {Right(q, t) for t in ts} for (s, i), ts in nfa2.transitions.items() for q in nfa1.states}
c3 = {(Right(q, s), i): cs for (s, i), cs in nfa2.captures.items() for q in nfa1.states}
t3e = {(Right(q, s), Move.EMPTY): {Left(q, s)} for q in nfa1.states for s in nfa2.states}
if proper:
t4 = {(Left(s, nfa2.end), i): {Last(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c4 = {(Left(s, nfa2.end), i): cs for (s, i), cs in nfa1.captures.items()}
t4e = {(Last(s), i): {Last(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
transitions = merge_trans(t1, t1e, t2, t2e, t3, t3e, t4, t4e)
captures = merge_trans(c1, c2, c3, c4)
nfa = NFA(First(nfa1.start) if proper else Left(nfa1.start, nfa2.start), Last(nfa1.end) if proper else Right(nfa1.end, nfa2.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchAlternating(nfa1: NFA, nfa2: NFA, ordered: bool) -> NFA:
"""Handles: A#B, A##B"""
# transition between (1) AxB and (2) AxB states
# for order agnostic alternation, also use an additional (0) start state
Start, Left, Right = new_states("#a", "#l", "#r")
t0 = {(Start(), Move.EMPTY): {Left(nfa1.start, nfa2.start), Right(nfa1.start, nfa2.start)}} if not ordered else {}
t1 = {(Left(s, q), i): {(Left if i == Move.EMPTY else Right)(t, q) for t in ts} for (s, i), ts in nfa1.transitions.items() for q in nfa2.states}
c1 = {(Left(s, q), i): cs for (s, i), cs in nfa1.captures.items() for q in nfa2.states}
t2 = {(Right(q, s), i): {(Right if i == Move.EMPTY else Left)(q, t) for t in ts} for (s, i), ts in nfa2.transitions.items() for q in nfa1.states}
c2 = {(Right(q, s), i): cs for (s, i), cs in nfa2.captures.items() for q in nfa1.states}
# handle final transitions
t1e = {(Left(nfa1.end, s), Move.EMPTY): {Left(nfa1.end, t) for t in ts} for (s, i), ts in nfa2.transitions.items() if i == Move.EMPTY}
t2e = {(Right(s, nfa2.end), Move.EMPTY): {Right(t, nfa2.end) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t21 = {(Right(nfa1.end, nfa2.end), Move.EMPTY): {Left(nfa1.end, nfa2.end)}}
transitions = merge_trans(t0, t1, t1e, t2, t2e, t21)
captures = merge_trans(c1, c2)
nfa = NFA(Start() if not ordered else Left(nfa1.start, nfa2.start), Left(nfa1.end, nfa2.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchSubtract(nfa1: NFA, nfa2: NFA, from_right: bool, negate: bool) -> NFA:
"""Handles: A-B, A_-B (and used in slicing)"""
# rewire end/start state of nfa1 based on partial intersection with nfa2
Start, Middle, End = new_states("-a", "-m", "-e")
if from_right:
both = MatchBoth(nfa1, nfa2, start_from={(a, nfa2.start) for a in nfa1.states})
else:
both = MatchBoth(nfa1, nfa2, stop_at={(a, nfa2.end) for a in nfa1.states})
if negate:
return both
transitions: Transitions = {(Middle(s), i): {Middle(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
captures: Captures = {(Middle(s), i): cs for (s, i), cs in nfa1.captures.items()}
if from_right:
midpoints = {a for a, _ in both.transitions.get((both.start, Move.EMPTY), set())}
transitions = merge_trans(transitions, {(Middle(s), Move.EMPTY): {End()} for s in midpoints})
nfa = NFA(Middle(nfa1.start), End(), transitions, captures)
else:
midpoints = {a for ((a, b), i), cs in both.transitions.items() if i == Move.EMPTY and both.end in cs}
transitions[(Start(), Move.EMPTY)] = {Middle(s) for s in midpoints}
nfa = NFA(Start(), Middle(nfa1.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchSubtractInside(nfa1: NFA, nfa2: NFA, proper: bool, replace: Optional[NFA] = None) -> NFA:
"""Handles: A->B, A->>B"""
# like MatchContains, but link (2) and (4)/(5) using partial intersection
LeftFirst, Left, Replace, RightFirst, Right = new_states("->l1", "->l2", "->m", "->r1", "->r2")
t1, t1e, c1, t3, c3, t3e, t4, t4e, c4 = {}, {}, {}, {}, {}, {}, {}, {}, {}
if proper:
t1 = {(LeftFirst(s), i): {LeftFirst(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t1e = {(LeftFirst(s), i): {Left(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c1 = {(LeftFirst(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2 = {(Left(s), i): {Left(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
c2 = {(Left(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2es = []
if replace:
t3 = {(Replace(s, q), i): {Replace(s, t) for t in ts} for (q, i), ts in replace.transitions.items() for s in nfa1.states}
c3 = {(Replace(s, q), i): cs for (q, i), cs in replace.captures.items() for s in nfa1.states}
t3e = {(Replace(s, replace.end), Move.EMPTY): {(RightFirst(s) if proper else Right(s))} for s in nfa1.states}
for s in nfa1.states:
both = MatchBoth(nfa1, nfa2, start_from={(s, nfa2.start)}, stop_at={(a, nfa2.end) for a in nfa1.states})
new_end = {a for a, _ in both.transitions.get((both.start, Move.EMPTY), set())}
new_start = {a[0] for (a, i), cs in both.transitions.items() if i == Move.EMPTY and both.end in cs}
t2es.append(
{(Left(e), Move.EMPTY): {(Replace(s, replace.start) if replace else RightFirst(s) if proper else Right(s)) for s in new_start} for e in new_end}
)
if proper:
t4 = {(RightFirst(s), i): {RightFirst(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t4e = {(RightFirst(s), i): {Right(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c4 = {(RightFirst(s), i): cs for (s, i), cs in nfa1.captures.items()}
t5 = {(Right(s), i): {Right(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
c5 = {(Right(s), i): cs for (s, i), cs in nfa1.captures.items()}
transitions = merge_trans(t1, t1e, t2, *t2es, t3, t3e, t4, t4e, t5)
captures = merge_trans(c1, c2, c3, c4, c5)
nfa = NFA(LeftFirst(nfa1.start) if proper else Left(nfa1.start), Right(nfa1.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchSubtractOutside(nfa1: NFA, nfa2: NFA, proper: bool) -> NFA:
"""Handles: A-<B, A-<<B"""
# Use partial intersections to generate collections of alternatives.
both_start = MatchBoth(nfa1, nfa2, stop_at={(a, b) for a in nfa1.states for b in nfa2.states})
both_end = MatchBoth(nfa1, nfa2, start_from={(a, b) for a in nfa1.states for b in nfa2.states})
both_start_end = {s for (s, i), cs in both_start.transitions.items() if i == Move.EMPTY and both_start.end in cs}
both_end_start = both_end.transitions.get((both_end.start, Move.EMPTY), set())
if proper:
# ensure partial intersections are (potentially) non-empty
both_start_proper = MatchBoth(both_start, MatchLength(1))
both_start_end = {
s[0] for (s, i), cs in both_start_proper.transitions.items() if i == Move.EMPTY and both_start_proper.end in cs and s[0] != both_start.end
}
both_end_proper = MatchBoth(both_end, MatchLength(1))
both_end_start = {s[0] for s in both_end_proper.transitions.get((both_end_proper.start, Move.EMPTY), set()) if s[0] != both_end.start}
nfas: List[NFA] = []
midpoints = {b for a, b in both_start_end if any(b == b2 for a2, b2 in both_end_start)}
for m in midpoints:
Start, Middle, End = new_states("-<a", "-<m", "-<z")
transitions: Transitions = {(Middle(s), i): {Middle(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
captures: Captures = {(Middle(s), i): cs for (s, i), cs in nfa1.captures.items()}
transitions[Start(), Move.EMPTY] = {Middle(a) for a, b in both_start_end if b == m}
for a in {a for a, b in both_end_start if b == m}:
transitions.setdefault((Middle(a), Move.EMPTY), set()).add(End())
nfa = NFA(Start(), End(), transitions, captures)
nfa.remove_redundant_states()
nfas.append(nfa)
return MatchEither(*nfas)
def MatchSubtractAlternating(nfa1: NFA, nfa2: NFA, ordered: bool, from_right: bool = True) -> NFA:
"""Handles: A-#B, A_-#B, A-##B"""
# Expand transitions in A with one from A&B (tracking both A and B states)
both = MatchBoth(nfa1, nfa2, stop_at={(a, b) for a in nfa1.states for b in nfa2.states}, start_from={(a, b) for a in nfa1.states for b in nfa2.states})
transitions: Transitions = {}
captures: Captures = {}
for (s, i), ts in nfa1.transitions.items():
for b in nfa2.states:
if i == Move.EMPTY:
states = {(t, b) for t in ts}
else:
ts = nfa1.expand_epsilons(ts)
states = {u for (r, i), us in both.transitions.items() for t in ts if r == (t, b) and i != Move.EMPTY for u in us}
transitions[((s, b), i)] = states
if (s, i) in nfa1.captures:
captures[((s, b), i)] = nfa1.captures[(s, i)]
if b == nfa2.end and nfa1.end in ts:
transitions.setdefault(((s, b), i), set()).add((nfa1.end, nfa2.end))
for (b, i), cs in nfa2.transitions.items():
for s in nfa1.states:
if i == Move.EMPTY:
transitions[((s, b), i)] = {(s, c) for c in cs}
start_state = set()
if not ordered or not from_right:
ts = {(nfa1.start, nfa2.start)}
ts = both.expand_epsilons(ts)
start_state |= {u for (s, i), us in both.transitions.items() if s in ts and i != Move.EMPTY for u in us}
if not ordered or from_right:
start_state |= {(nfa1.start, nfa2.start)}
if len(start_state) == 1:
nfa = NFA(first(start_state), (nfa1.end, nfa2.end), transitions, captures)
else:
transitions[("a", Move.EMPTY)] = start_state
nfa = NFA("a", (nfa1.end, nfa2.end), transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchSubtractInterleaved(nfa1: NFA, nfa2: NFA, proper: bool, from_right: bool = True) -> NFA:
"""Handles: A-^B, A-^^B, A_-^^B"""
# Combine transitions from A with empty transitions from A&B (tracking both A and B states)
both = MatchBoth(nfa1, nfa2, stop_at={(a, b) for a in nfa1.states for b in nfa2.states}, start_from={(a, b) for a in nfa1.states for b in nfa2.states})
transitions: Transitions = {}
captures: Captures = {}
for (a, i), ts in nfa1.transitions.items():
for b in nfa2.states:
transitions[(a, b), i] = {(t, b) for t in ts}
if (a, i) in nfa1.captures:
captures[(a, b), i] = nfa1.captures[(a, i)]
for (ab, i), tus in both.transitions.items():
if ab != both.start:
transitions.setdefault((ab, Move.EMPTY), set()).update(tus - {both.end})
if not proper:
transitions[((nfa1.end, nfa2.end), Move.EMPTY)] = {"z"}
nfa = NFA((nfa1.start, nfa2.start), "z", transitions, captures)
elif from_right:
First, Middle, Last = new_states("-^a", "-^m", "-^z")
t1 = {(First(s), i): {First(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i == Move.EMPTY}
t1e = {(First(s), i): {Middle((t, nfa2.start)) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
c1 = {(First(s), i): cs for (s, i), cs in nfa1.captures.items()}
t2 = {(Middle(s), i): {Middle(t) for t in ts} for (s, i), ts in transitions.items()}
c2 = {(Middle(s), i): cs for (s, i), cs in captures.items()}
t2e = {(Middle((s, nfa2.end)), i): {Last(t) for t in ts} for (s, i), ts in nfa1.transitions.items() if i != Move.EMPTY}
t3 = {(Last(s), i): {Last(t) for t in ts} for (s, i), ts in nfa1.transitions.items()}
c3 = {(Last(s), i): cs for (s, i), cs in nfa1.captures.items()}
transitions = merge_trans(t1, t1e, t2, t2e, t3)
captures = merge_trans(c1, c2, c3)
nfa = NFA(First(nfa1.start), Last(nfa1.end), transitions, captures)
else:
ts = both.expand_epsilons({(nfa1.start, nfa2.start)})
start_states = {u for (s, i), us in both.transitions.items() if s in ts and i != Move.EMPTY for u in us}
ts = set()
for t in both.states:
if (nfa1.end, nfa2.end) in both.expand_epsilons({t}):
ts.add(t)
end_states = {s for (s, i), us in both.transitions.items() if any(u in ts for u in us) and i != Move.EMPTY}
transitions[("a", Move.EMPTY)] = start_states
for s in end_states:
transitions[(s, Move.EMPTY)] = {"z"}
nfa = NFA("a", "z", transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchReversed(nfa: NFA) -> NFA:
"""Handles: (?r:A)"""
# just reverse the edges (with special handling for *-transitions)
transitions: Transitions = {}
captures: Captures = {}
(Extra,) = new_states("r")
for (s, i), ts in nfa.transitions.items():
for t in ts:
if i == Move.ALL:
if any(r != s and t in vs for (r, j), vs in nfa.transitions.items()):
extra_state = Extra(s, t)
transitions.setdefault((t, Move.EMPTY), set()).add(extra_state)
t = extra_state
for (r, j), _ in nfa.transitions.items():
if r == s and not isinstance(j, Move):
transitions.setdefault((t, j), set())
transitions.setdefault((t, i), set()).add(s)
if (s, i) in nfa.captures:
captures.setdefault((t, i), set()).update(nfa.captures[(s, i)])
nfa = NFA(nfa.end, nfa.start, transitions, captures)
nfa.remove_redundant_states()
return nfa
def MatchInsensitively(nfa: NFA) -> NFA:
"""Handles: (?i:A)"""
transitions: Transitions = {}
captures: Captures = {}
for (s, i), ts in nfa.transitions.items():
if isinstance(i, str):
transitions.setdefault((s, i.lower()), set()).update(ts)
transitions.setdefault((s, i.upper()), set()).update(ts)
if nfa.captures.get((s, i), set()):
captures.setdefault((s, i.lower()), set()).update(nfa.captures.get((s, i), set()))
captures.setdefault((s, i.upper()), set()).update(nfa.captures.get((s, i), set()))
else:
transitions[(s, i)] = ts
return NFA(nfa.start, nfa.end, transitions, captures)
def MatchShifted(nfa: NFA, shift: int) -> NFA:
"""Handles: (?sn:A)"""
transitions: Transitions = {}
captures: Captures = {}
for (s, i), ts in nfa.transitions.items():
c = nfa.captures.get((s, i), None)
for alphabet in (string.ascii_lowercase, string.ascii_uppercase):
if isinstance(i, str) and i in alphabet:
i = alphabet[(alphabet.index(i) + shift) % 26]
break
transitions[(s, i)] = ts
if c is not None:
captures[(s, i)] = c
return NFA(nfa.start, nfa.end, transitions, captures)
def MatchRotated(nfa: NFA, shift: int) -> NFA:
"""Handles (?Rn:A)"""
# slice off start/end and for each possibility move it to the other side
if shift == 0:
return nfa
rotations: List[NFA] = []
if shift < 0:
window = MatchLength(-shift, -shift)
intersection = MatchBoth(nfa, window, stop_at={(a, window.end) for a in nfa.states})
intersection_ends = {s[0] for (s, i), cs in intersection.transitions.items() if i == Move.EMPTY and intersection.end in cs and s[0] != nfa.end}
for middle in intersection_ends:
move = MatchBoth(nfa, window, stop_at={(middle, window.end)})
keep = NFA(middle, nfa.end, nfa.transitions, nfa.captures)
rotated = MatchAfter(keep, move)
rotated.remove_redundant_states()
rotations.append(rotated)
else:
window = MatchLength(shift, shift)
intersection = MatchBoth(nfa, window, start_from={(a, window.start) for a in nfa.states})
intersection_starts = {s[0] for s in intersection.transitions.get((intersection.start, Move.EMPTY), set()) if s[0] != nfa.start}
for middle in intersection_starts:
move = MatchBoth(nfa, window, start_from={(middle, window.start)})
keep = NFA(nfa.start, middle, nfa.transitions, nfa.captures)
rotated = MatchAfter(move, keep)
rotated.remove_redundant_states()
rotations.append(rotated)
rotation = MatchEither(*rotations)
rotation.remove_redundant_states()
return rotation
def MatchSlice(nfa: NFA, start: Optional[int], end: Optional[int], step: int) -> NFA:
"""Handles: (?S:A)[3:5], (?S:A)[-1::-2]"""
# reverse slice is equivalent to slice of reverse
if step < 0:
return MatchSlice(MatchReversed(nfa), None if end is None else end + 1, None if start is None else start + 1, -step)
assert step != 0
# slice off start
start = start or 0
if start > 0:
nfa = MatchSubtract(nfa, MatchLength(start, start), from_right=False, negate=False)
elif start < 0:
nfa = MatchSubtract(nfa, MatchLength(-start, -start), from_right=True, negate=True)
# slice off end
if end is not None:
if end >= 0:
assert end >= start >= 0
nfa = MatchSubtract(nfa, MatchLength(end - start, end - start), from_right=False, negate=True)
else:
assert start >= 0 or end >= start
nfa = MatchSubtract(nfa, MatchLength(-end, -end), from_right=True, negate=False)
# expand transitions by step-count-minus-one
if step > 1:
def expand_steps(nfa: NFA, states: Set[State], n: int) -> Tuple[Set[State], bool]:
hit_end = False
for _ in range(n):
states = nfa.expand_epsilons(states)
hit_end |= nfa.end in states
states = {t for s in states for (r, i), ts in nfa.transitions.items() if r == s and i != Move.EMPTY for t in ts}
return states, hit_end
transitions: Transitions = {}
captures: Captures = {}
for (s, i), ts in nfa.transitions.items():
if i == Move.EMPTY:
transitions[(s, i)] = ts
else:
next_states, hit_end = expand_steps(nfa, ts, step - 1)
transitions[(s, i)] = next_states
if (s, i) in nfa.captures:
captures[(s, i)] = nfa.captures[(s, i)]
if hit_end:
transitions[(s, i)].add(nfa.end)
nfa = NFA(nfa.start, nfa.end, transitions, captures)
nfa.remove_redundant_states()
return nfa
# Patterns
def op_reduce(l):
if len(l) == 1:
return l[0]
else:
return op_reduce([l[1](l[0], l[2]), *l[3:]])
class Pattern:
"""Regex-style pattern supporting novel spatial operators and modifiers."""
def __init__(self, pattern: str):
self.pattern = pattern
self.nfa = self.expr.parseString(pattern, parseAll=True)[0]
def __repr__(self):
return f"Pattern({self.pattern!r})"
def match(self, string: str) -> Optional[CaptureOutput]:
return self.nfa.match(string)
def example(self, min_length: int = 0, max_length: Optional[int] = None) -> str:
return self.nfa.example(min_length, max_length)
# parsing (should really go via an AST here)
from pyparsing import Forward, Group, Literal, OneOrMore
from pyparsing import Optional as Option
from pyparsing import ParserElement, Word, alphanums, alphas, infixNotation, nums, oneOf, opAssoc
ParserElement.setDefaultWhitespaceChars("")
ParserElement.enablePackrat()
# TODO: character escaping, supported scripts
_0_to_99 = Word(nums, min=1, max=2).setParseAction(lambda t: int("".join(t[0])))
_m99_to_99 = (Option("-") + _0_to_99).setParseAction(lambda t: t[-1] * (-1 if len(t) == 2 else 1))
_id = Word(alphas + "_", alphanums + "_")
printables = ppu.Latin1.printables + " " + EXTRA_PRINTABLES
literal_exclude = r"()+*.?<>#{}^_&|$\[]-"
set_exclude = r"\]"
literal = Word(printables, excludeChars=literal_exclude, exact=1).setParseAction(lambda t: MatchIn(t[0]))
dot = Literal(".").setParseAction(lambda t: MatchNotIn(""))
nset = ("[^" + Word(printables, excludeChars=set_exclude, min=1) + "]").setParseAction(lambda t: MatchNotIn(srange(f"[{t[1]}]")))
set = ("[" + Word(printables, excludeChars=set_exclude, min=1) + "]").setParseAction(lambda t: MatchIn(srange(f"[{t[1]}]")))
words = Literal(r"\w").setParseAction(lambda t: DICTIONARY_FSM)
fsm = Literal(r"\f").setParseAction(lambda t: EXPLICIT_FSM)
expr = Forward()
group = (
("(" + expr + ")").setParseAction(lambda t: t[1])
| ("(?D:" + expr + ")").setParseAction(lambda t: MatchDFA(t[1], negate=False))
| ("(?M:" + expr + ")").setParseAction(lambda t: MatchDFA(MatchReversed(MatchDFA(MatchReversed(t[1]), negate=False)), negate=False))
| ("(?i:" + expr + ")").setParseAction(lambda t: MatchInsensitively(t[1]))
| ("(?r:" + expr + ")").setParseAction(lambda t: MatchReversed(t[1]))
| ("(?<" + _id + ">" + expr + ")").setParseAction(lambda t: MatchCapture(t[3], t[1]))
| ("(?s" + _m99_to_99 + ":" + expr + ")").setParseAction(lambda t: MatchShifted(t[3], t[1]))
| ("(?s:" + expr + ")").setParseAction(lambda t: MatchEither(*[MatchShifted(t[1], i) for i in range(1, 26)]))
| ("(?R" + _m99_to_99 + ":" + expr + ")").setParseAction(lambda t: MatchRotated(t[3], t[1]))
| ("(?R<=" + _0_to_99 + ":" + expr + ")").setParseAction(lambda t: MatchEither(*[MatchRotated(t[3], i) for i in range(-t[1], t[1] + 1) if i != 0]))
| ("(?S:" + expr + ")[" + Option(_m99_to_99, None) + ":" + Option(_m99_to_99, None) + Option(":" + Option(_m99_to_99, 1), 1) + "]").setParseAction(
lambda t: MatchSlice(t[1], t[3], t[5], t[-2])
)
| ("(?/" + expr + "/" + expr + "/" + expr + "/" + Option("s") + ")").setParseAction(
lambda t: MatchSubtractInside(t[1], t[3], proper=(t[7] == "s"), replace=t[5])
)
| ("(?&" + _id + "=" + expr + ")").setParseAction(lambda t: SUBPATTERNS.update({t[1]: t[3]}) or MatchEmpty())
| ("(?&" + _id + ")").setParseAction(lambda t: SUBPATTERNS[t[1]])
)
atom = literal | dot | nset | set | words | fsm | group
item = (
(atom + "+").setParseAction(
lambda t: MatchRepeated(
t[0],
repeat=True,
)
)
| (atom + "*").setParseAction(lambda t: MatchRepeated(t[0], repeat=True, optional=True))
| (atom + "?").setParseAction(lambda t: MatchRepeated(t[0], optional=True))
| (atom + "{" + _0_to_99 + "}").setParseAction(lambda t: MatchRepeatedN(t[0], t[2], t[2]))
| (atom + "{" + _0_to_99 + ",}").setParseAction(lambda t: MatchRepeatedNplus(t[0], t[2]))
| (atom + "{" + _0_to_99 + "," + _0_to_99 + "}").setParseAction(lambda t: MatchRepeatedN(t[0], t[2], t[4]))
| ("¬" + atom).setParseAction(lambda t: MatchDFA(t[1], negate=True))
| atom
)
items = OneOrMore(item).setParseAction(lambda t: reduce(MatchAfter, t))
spatial_ops = (
# conjunction
Literal(">>").setParseAction(lambda _: lambda x, y: MatchContains(x, y, proper=True))
| Literal(">").setParseAction(lambda _: lambda x, y: MatchContains(x, y, proper=False))
| Literal("<<").setParseAction(lambda _: lambda x, y: MatchContains(y, x, proper=True))
| Literal("<").setParseAction(lambda _: lambda x, y: MatchContains(y, x, proper=False))
| Literal("^^").setParseAction(lambda _: lambda x, y: MatchInterleaved(x, y, proper=True))
| Literal("^").setParseAction(lambda _: lambda x, y: MatchInterleaved(x, y, proper=False))
| Literal("##").setParseAction(lambda _: lambda x, y: MatchAlternating(x, y, ordered=True))
| Literal("#").setParseAction(lambda _: lambda x, y: MatchAlternating(x, y, ordered=False))
|
# subtraction
Literal("->>").setParseAction(lambda _: lambda x, y: MatchSubtractInside(x, y, proper=True))
| Literal("->").setParseAction(lambda _: lambda x, y: MatchSubtractInside(x, y, proper=False))
| Literal("-<<").setParseAction(lambda _: lambda x, y: MatchSubtractOutside(x, y, proper=True))
| Literal("-<").setParseAction(lambda _: lambda x, y: MatchSubtractOutside(x, y, proper=False))
| Literal("-##").setParseAction(lambda _: lambda x, y: MatchSubtractAlternating(x, y, ordered=True, from_right=True))
| Literal("_-##").setParseAction(lambda _: lambda x, y: MatchSubtractAlternating(x, y, ordered=True, from_right=False))
| Literal("-#").setParseAction(lambda _: lambda x, y: MatchSubtractAlternating(x, y, ordered=False))
| Literal("-^^").setParseAction(lambda _: lambda x, y: MatchSubtractInterleaved(x, y, proper=True, from_right=True))
| Literal("_-^^").setParseAction(lambda _: lambda x, y: MatchSubtractInterleaved(x, y, proper=True, from_right=False))
| Literal("-^").setParseAction(lambda _: lambda x, y: MatchSubtractInterleaved(x, y, proper=False))
| Literal("-").setParseAction(lambda _: lambda x, y: MatchSubtract(x, y, from_right=True, negate=False))
| Literal("_-").setParseAction(lambda _: lambda x, y: MatchSubtract(x, y, from_right=False, negate=False))
)
expr <<= infixNotation(
items,
[
("&", 2, opAssoc.LEFT, lambda t: reduce(MatchBoth, t[0][::2])),
(spatial_ops, 2, opAssoc.LEFT, lambda t: op_reduce(t[0])),
("|", 2, opAssoc.LEFT, lambda t: MatchEither(*t[0][::2])),
],
)
# Regex reconstructions
class Regex(ABC):
@abstractmethod
def members(self) -> Any:
"""Members, used for equality testing and hashing."""
@abstractmethod
def to_string(self) -> str:
"""Regex string representation."""
@abstractmethod
def to_repr(self) -> str:
"""Debug string representation."""
@abstractmethod
def min_length(self) -> float:
"""Minimum match length (-inf for no match)."""
@abstractmethod
def max_length(self) -> float:
"""Maximum match length (-inf for no match, inf for infinite)."""
@abstractmethod
def first_character(self, from_end: bool = False) -> "Regex":
"""A Regex describing the first (or last) matching character."""
def __repr__(self):
return f"{self.to_string()}"
def __eq__(self, other):
if type(other) is type(self):
return self.members() == other.members()
elif isinstance(other, Regex):
return False
else:
return NotImplemented
def __hash__(self):
return hash((type(self), self.members()))
def __add__(self, other):
if isinstance(other, Regex):
return RegexConcat((self, other))
else:
return NotImplemented
def __or__(self, other):
if isinstance(other, Regex):
return RegexUnion((self, other))
else:
return NotImplemented
class RegexChars(Regex):
chars: str
def __new__(cls, chars):
# [] = ∅
if not chars:
return RegexUnion()
obj = super().__new__(cls)
obj.chars = "".join(sorted(set(chars)))
return obj
def members(self):
return self.chars
def to_string(self):
return char_class(self.chars, negated=False)
def to_repr(self):
return f"Chars[{self.chars}]"
def min_length(self):
return 1
def max_length(self):
return 1
def first_character(self, from_end: bool = False) -> Regex:
return self
class RegexNegatedChars(Regex):
def __init__(self, chars):
self.chars = "".join(sorted(set(chars)))
def members(self):
return self.chars
def to_string(self):
return char_class(self.chars, negated=True)
def to_repr(self):
return f"NChars[{self.chars}]"
def min_length(self):
return 1
def max_length(self):
return 1
def first_character(self, from_end: bool = False) -> Regex:
return self
class RegexStar(Regex):
regex: Regex
def __new__(cls, regex: Regex):
# ε* = ε
if regex == RegexConcat():
return regex
# (A*)* = A*
elif isinstance(regex, RegexStar):
return regex
# (ε|A|B|C)* = (A|B|C)*
if isinstance(regex, RegexUnion) and RegexConcat() in regex.regexes:
regex = RegexUnion(r for r in regex.regexes if r != RegexConcat())
# (A*B*C*)* = (A|B|C)*
if isinstance(regex, RegexConcat) and all(isinstance(r, RegexStar) for r in regex.regexes):
regex = RegexUnion(cast(RegexStar, r).regex for r in regex.regexes)
if isinstance(regex, RegexUnion):
regexes = set(regex.regexes)
for r in list(regexes):
# (A*|B|C)* = (A|B|C)*
if isinstance(r, RegexStar):
regexes.remove(r)
r = r.regex
regexes.add(r)
# (A|B|C)* = (B|C)* if A => B*
if any(regex_implies(r, RegexStar(s)) for s in regexes - {r}):
regexes.remove(r)
regex = RegexUnion(regexes)
# (ABC)* = B* if all(ABC => B*)
if isinstance(regex, RegexConcat):
for r in regex.regexes:
star = RegexStar(r)
if all(regex_implies(s, star) for s in regex.regexes):
regex = r
break
obj = super().__new__(cls)
obj.regex = regex
return obj
def members(self):
return self.regex
def to_string(self):
return f"{self.regex}*"
def to_repr(self):
return f"Star[{self.regex.to_repr()}]"
def min_length(self):
return 0
def max_length(self):
return 0 if self.regex.max_length() == 0 else math.inf
def first_character(self, from_end: bool = False) -> Regex:
return RegexConcat() | self.regex.first_character(from_end)
class RegexUnion(Regex):
regexes: FrozenSet[Regex]
def __new__(cls, regexes: Iterable[Regex] = ()):
# (A|(B|C)|D)=(A|B|C|D)
regexes = {s for x in (r.regexes if isinstance(r, RegexUnion) else [r] for r in regexes) for s in x}
# A|B|C = B|C if A=>B
for r in list(regexes):
if any(regex_implies(r, s) for s in regexes - {r}):
regexes.remove(r)
# ([^ab]|[ac]|C) = ([^b]|C)
if any(isinstance(r, RegexNegatedChars) for r in regexes):
nchars = RegexNegatedChars(
set.intersection(*[set(r.chars) for r in regexes if isinstance(r, RegexNegatedChars)])
- {c for r in regexes if isinstance(r, RegexChars) for c in r.chars}
)
regexes -= {r for r in regexes if isinstance(r, RegexNegatedChars) or isinstance(r, RegexChars)}
if not any(regex_implies(nchars, s) for s in regexes):
regexes.add(nchars)
# ([ab]|[ac]|C) = ([abc]|C)
elif any(isinstance(r, RegexChars) for r in regexes):
chars = RegexChars({c for r in regexes if isinstance(r, RegexChars) for c in r.chars})
regexes -= {r for r in regexes if isinstance(r, RegexChars)}
if not any(regex_implies(chars, s) for s in regexes):
regexes.add(chars)
# AB|AC|D = A(B|C|ε)
prefix = first(first(r.regexes) for r in regexes if isinstance(r, RegexConcat))
if prefix and all(isinstance(r, RegexConcat) and first(r.regexes) == prefix or r == prefix for r in regexes):
stripped = {RegexConcat(r.regexes[1:]) if isinstance(r, RegexConcat) else RegexConcat() for r in regexes}
return RegexConcat((prefix, RegexUnion(stripped)))
# BA|CA|DA = (B|C|ε)A
suffix = first(r.regexes[-1] for r in regexes if isinstance(r, RegexConcat) and r.regexes)
if suffix and all(isinstance(r, RegexConcat) and r.regexes and r.regexes[-1] == suffix or r == suffix for r in regexes):
stripped = {RegexConcat(r.regexes[:-1]) if isinstance(r, RegexConcat) else RegexConcat() for r in regexes}
return RegexConcat((RegexUnion(stripped), suffix))
# AA*|ε = A*|ε
if any(r.min_length() == 0 for r in regexes):
for r in {
r
for r in regexes
if isinstance(r, RegexConcat) and len(r.regexes) == 2 and isinstance(r.regexes[-1], RegexStar) and r.regexes[0] == r.regexes[-1].regex
}:
regexes.remove(r)
regexes.add(r.regexes[-1])
if RegexConcat() in regexes:
regexes.remove(RegexConcat())
# (A)=A
if len(regexes) == 1:
return first(regexes)
obj = super().__new__(cls)
obj.regexes = frozenset(regexes)
return obj
def members(self):
return self.regexes
def to_string(self):
if not self.regexes:
return "∅"
rs = [r for r in self.regexes if r != RegexConcat()]
ss = [re.sub(r"^\((.*)\)$", r"\1", str(r)) for r in rs]
unbracketed = len(rs) == 1 and (isinstance(rs[0], RegexChars) or isinstance(rs[0], RegexNegatedChars))
return ("{}{}" if unbracketed else "({}){}").format("|".join(ss), "?" * (RegexConcat() in self.regexes))
def to_repr(self):
subrepr = ", ".join(r.to_repr() for r in self.regexes)
return f"Union[{subrepr}]"
def min_length(self):
return -math.inf if not self.regexes else min([r.min_length() for r in self.regexes])
def max_length(self):
return -math.inf if not self.regexes else max([r.max_length() for r in self.regexes])
def first_character(self, from_end: bool = False) -> Regex:
return RegexUnion(r.first_character(from_end) for r in self.regexes)
class RegexConcat(Regex):
regexes: Tuple[Regex]
def __new__(cls, regexes: Iterable[Regex] = ()):
# (A∅B) = ∅
if any(r == RegexUnion() for r in regexes):
return RegexUnion()
# (A(BC)D) = (ABCD)
regexes = [s for x in (r.regexes if isinstance(r, RegexConcat) else [r] for r in regexes) for s in x]
# peephole optimizer
while True:
# A* A = A A* (canonical form)
i = first(i for i in range(len(regexes) - 1) if isinstance(regexes[i], RegexStar) and cast(RegexStar, regexes[i]).regex == regexes[i + 1])
if i is not None:
regexes[i], regexes[i + 1] = regexes[i + 1], regexes[i]
continue
# A* B* = A* if A => B
i = first(
i
for i in range(len(regexes) - 1)
if isinstance(regexes[i], RegexStar)
and isinstance(regexes[i + 1], RegexStar)
and regex_implies(cast(RegexStar, regexes[i]).regex, cast(RegexStar, regexes[i + 1]).regex)
)
if i is not None:
del regexes[i]
continue
# A* B* = B* if B => A
i = first(
i
for i in range(len(regexes) - 1)
if isinstance(regexes[i], RegexStar)
and isinstance(regexes[i + 1], RegexStar)
and regex_implies(cast(RegexStar, regexes[i + 1]).regex, cast(RegexStar, regexes[i]).regex)
)
if i is not None:
del regexes[i + 1]
continue
# nothing left to optimize
break
# (A) = A
if len(regexes) == 1:
return first(regexes)
obj = super().__new__(cls)
obj.regexes = tuple(regexes)
return obj
def members(self):
return self.regexes
def to_string(self):
ss = [str(r) for r in self.regexes]
while True:
# replace A A* with A+
i = first(i for i in range(len(ss) - 1) if ss[i] + "*" == ss[i + 1])
if i is not None:
ss[i + 1] = ss[i + 1][:-1] + "+"
del ss[i]
continue
break
return ".{0}" if not self.regexes else "({})".format("".join(ss))
def to_repr(self):
subrepr = ", ".join(r.to_repr() for r in self.regexes)
return f"Concat[{subrepr}]"
def min_length(self):
return sum(r.min_length() for r in self.regexes)
def max_length(self):
return sum(r.max_length() for r in self.regexes)
def first_character(self, from_end: bool = False) -> Regex:
fc = RegexUnion()
for r in self.regexes[:: -1 if from_end else 1]:
fcr = r.first_character(from_end)
if isinstance(fcr, RegexUnion) and RegexConcat() in fcr.regexes:
fc = RegexUnion([fc, *[r for r in fcr.regexes if r != RegexConcat()]])
else:
fc |= fcr
break
else:
fc |= RegexConcat()
return fc
@lru_cache(maxsize=None)
def regex_implies(a: Regex, b: Regex) -> bool:
"""Whether one regex implies the other."""
# A < B
if a == b:
return True
# [ab] < [abc]
if isinstance(a, RegexChars) and isinstance(b, RegexChars):
return set(a.chars) <= set(b.chars)
# [ab] < [^cd]
elif isinstance(a, RegexChars) and isinstance(b, RegexNegatedChars):
return not (set(a.chars) & set(b.chars))
# [^ab] < [^a]
elif isinstance(a, RegexNegatedChars) and isinstance(b, RegexNegatedChars):
return set(a.chars) >= set(b.chars)
# [^...] !< [...]
elif isinstance(a, RegexNegatedChars) and isinstance(b, RegexChars):
return False
# ε < A*
elif a == RegexConcat() and isinstance(b, RegexStar):
return True
# A* < B* iff A < B
elif isinstance(a, RegexStar) and isinstance(b, RegexStar):
return regex_implies(a.regex, b.regex)
# A|B|C < D iff all(ABC < D)
elif isinstance(a, RegexUnion):
return all(regex_implies(r, b) for r in a.regexes)
# A < B|C|D iff any(A < BCD)
elif isinstance(b, RegexUnion):
return any(regex_implies(a, r) for r in b.regexes)
# A < B* if A < B
elif isinstance(b, RegexStar) and regex_implies(a, b.regex):
return True
# ABC < D* if all(A < D)
elif isinstance(a, RegexConcat) and isinstance(b, RegexStar) and all(regex_implies(r, b) for r in a.regexes):
return True
# incompatible length
elif a.min_length() < b.min_length() or a.max_length() > b.max_length():
return False
# incompatible first characters
elif not regex_implies(a.first_character(), b.first_character()):
return False
# incompatible last characters
elif not regex_implies(a.first_character(from_end=True), b.first_character(from_end=True)):
return False
# the slow way using FMSs
if SLOW_SIMPLIFICATION:
try:
ans = Pattern(f"¬(¬({a})|{b})").nfa.min_length() is None
logger.debug("%s =%s=> %s", a, "=" if ans else "/", b)
return ans
except ParseException:
# currently doesn't work with e.g. emoji injected via \f or \w 🙁
warnings.warn("Cannot fully simplify regular expression due to non-Latin characters", UnicodeWarning)
return False
return False
def regex(pattern: str) -> Regex:
"""Generate a Regex object directly from basic regular expression syntax. Useful for testing."""
from pyparsing import Forward, Literal, OneOrMore, ParserElement, Word, infixNotation, nums, opAssoc
ParserElement.setDefaultWhitespaceChars("")
ParserElement.enablePackrat()
_0_to_99 = Word(nums, min=1, max=2).setParseAction(lambda t: int("".join(t[0])))
printables = ppu.Latin1.printables + " " + EXTRA_PRINTABLES
literal_exclude = r"()+*.?{}^|$\[]"
set_exclude = r"\]"
literal = Word(printables, excludeChars=literal_exclude, exact=1).setParseAction(lambda t: RegexChars(t[0]))
dot = Literal(".").setParseAction(lambda t: RegexNegatedChars(""))
nset = ("[^" + Word(printables, excludeChars=set_exclude, min=1) + "]").setParseAction(lambda t: RegexNegatedChars(srange(f"[{t[1]}]")))
set = ("[" + Word(printables, excludeChars=set_exclude, min=1) + "]").setParseAction(lambda t: RegexChars(srange(f"[{t[1]}]")))
expr = Forward()
group = ("(" + expr + ")").setParseAction(lambda t: t[1])
atom = literal | dot | nset | set | group
item = (
(atom + "*").setParseAction(lambda t: RegexStar(t[0]))
| (atom + "+").setParseAction(lambda t: t[0] + RegexStar(t[0]))
| (atom + "?").setParseAction(lambda t: RegexConcat() | t[0])
| (atom + "{" + _0_to_99 + "}").setParseAction(lambda t: RegexConcat([t[0]] * t[2]))
| (atom + "{" + _0_to_99 + ",}").setParseAction(lambda t: RegexConcat([t[0]] * t[2]) + RegexStar(t[0]))
| atom
)
items = OneOrMore(item).setParseAction(lambda t: RegexConcat(t))
expr <<= infixNotation(items, [("|", 2, opAssoc.LEFT, lambda t: RegexUnion(t[0][::2]))])
return expr.parseString(pattern, parseAll=True)[0]
def main() -> None:
parser = argparse.ArgumentParser(
description=r"""NFA-based pattern matcher supporting novel spatial conjunction and modifiers.
Supported syntax:
CHARACTERS
- a character literal
- . wildcard character
- [abc] character class
- [a-z] character range
- [^abc] negated character class
LOGICAL OPERATORS
- P|Q P or Q
- ¬P not P
- P&Q P and Q
- (P) scope and precedence
QUANTIFIERS
- P? 0 or 1 occurences
- P* 0 or more occurences
- P+ 1 or more occurences
- P{n} n occurences
- P{n,} n or more occurences
- P{m,n} m to n occurences
SEPARATING OPERATORS
- PQ concatenation
- P<Q P inside Q
- P<<Q P strictly inside Q
- P>Q P outside Q
- P>>Q P strictly outside Q
- P^Q P interleaved with Q
- P^^Q P interleaved inside Q
- P#Q P alternating with Q
- P##Q P alternating before Q
SUBTRACTION OPERATORS
- P-Q subtraction on right
- P_-Q subtraction on left
- P->Q subtraction inside
- P->>Q subtraction strictly inside
- P-<Q subtraction outside
- P-<<Q subtraction strictly outside
- P-#Q subtraction alternating
- P-##Q subtraction alternating after
- P_-##Q subtraction alternating before
- P-^Q subtraction interleaved
- P-^^Q subtraction interleaved inside
- P_-^^Q subtraction interleaved outside
OTHER MODIFIERS
- (?i:P) case-insensitive match
- (?r:P) reversed match
- (?sn:P) cipher-shifted by n characters
- (?s:P) cipher-shifted by 1 to 25 characters
- (?Rn:P) rotated by n characters right
- (?R<=n:P) rotated by 1 to n characters left or right
- (?S:P)[m:n] sliced match
- (?S:P)[m:n:s] sliced match with step
- (?/P/Q/R/) replace Q inside P by R
- (?/P/Q/R/s) replace Q strictly inside P by R
- (?D:P) convert NFA to DFA
- (?M:P) convert NFA to minimal DFA
REFERENCES
- (?<ID>P) define submatch capture group
- (?&ID=P) define subpattern for subsequent use
- (?&ID) use subpattern
- \w match word from dictionary file
- \f match FSM from external file
""",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument("pattern", type=str, help="pattern to compile")
parser.add_argument("files", type=str, nargs="*", help="filenames to search")
parser.add_argument("-d", dest="dict", metavar="PATH", type=str, help="dictionary file to use for \\w", default=None)
parser.add_argument("-f", dest="fsm", metavar="PATH", type=str, help="FSM file to use for \\f", default=None)
parser.add_argument("-D", dest="DFA", action="store_true", help="convert NFA to DFA", default=None)
parser.add_argument("-M", dest="min", action="store_true", help="convert NFA to minimal DFA ", default=None)
parser.add_argument("-i", dest="case_insensitive", action="store_true", help="case insensitive match")
parser.add_argument("-v", dest="invert", action="store_true", help="invert match")
parser.add_argument("-s", dest="svg", metavar="NAME", default=None, help="save FSM image and description")
parser.add_argument("-c", dest="console", action="store_true", help="save FSM image for console")
parser.add_argument("-C", dest="compact", action="store_true", help="compact start/end nodes in FSM image")
parser.add_argument("-x", dest="example", action="store_true", help="generate an example matching string")
parser.add_argument("-r", dest="regex", action="store_true", help="generate a standard equivalent regex")
parser.add_argument("-b", dest="bounds", action="store_true", help="generate lexicographic match bounds")
group = parser.add_mutually_exclusive_group()
group.add_argument("-X", dest="examples_only", metavar="N", type=int, help="output N example matches and quit")
group.add_argument("-R", dest="regex_only", action="store_true", help="output a standard equivalent regex and quit")
args = parser.parse_args()
global DICTIONARY_FSM, EXPLICIT_FSM, SLOW_SIMPLIFICATION
if args.examples_only is not None or args.regex_only:
logger.setLevel(logging.ERROR)
warnings.simplefilter("ignore")
SLOW_SIMPLIFICATION = False
if args.dict:
logger.info(f"Compiling dictionary from '{args.dict}'")
DICTIONARY_FSM = MatchDictionary(Path(args.dict))
if args.fsm:
logger.info(f"Compiling FSM from '{args.fsm}'")
EXPLICIT_FSM = ExplicitFSM(Path(args.fsm))
pattern = args.pattern
if args.case_insensitive:
pattern = f"(?i:{pattern})"
if args.invert:
pattern = f"!({pattern})"
if args.min:
pattern = f"(?M:{pattern})"
elif args.DFA:
pattern = f"(?D:{pattern})"
logger.info(f"Compiling pattern '{pattern}'")
pattern = Pattern(pattern)
if args.examples_only is not None:
for _ in range(args.examples_only):
print(pattern.example())
return
if args.regex_only:
regex = pattern.nfa.regex()
regex_repr = "$." if regex == RegexUnion() else "^$" if regex == RegexConcat() else f"^{regex}$"
print(regex_repr)
return
if args.svg:
logger.info(f"Rendering NFA diagram to '{args.svg}.dot.svg'")
pattern.nfa.render(args.svg, compact=args.compact)
pattern.nfa.save(args.svg, renumber_states=not DEBUG)
if args.console:
logger.info(f"Rendering NFA console diagram to 'console.dot.svg'")
pattern.nfa.render("fsm_console", console=True, compact=args.compact)
if args.example:
logger.info(f"Example match: {pattern.example()!r}")
if args.bounds:
logger.info(f"Match bounds: {pattern.nfa.bound(True, 10)!r} to {pattern.nfa.bound(False, 10)!r}")
if args.regex:
regex = pattern.nfa.regex()
regex_repr = "$." if regex == RegexUnion() else "^$" if regex == RegexConcat() else f"^{regex}$"
logger.info(f"Equivalent regex: {regex_repr}")
min_length = regex.min_length()
max_length = regex.max_length()
if min_length == -math.inf:
lengths = None
elif min_length == max_length:
lengths = min_length
elif max_length == math.inf:
lengths = f"{min_length}+"
else:
lengths = f"{min_length}-{max_length}"
logger.info(f"Match lengths: {lengths}")
for file in args.files:
logger.info(f"Matching pattern against '{file}'")
with open(file, "r", encoding="utf-8") as f:
for w in f:
word = w.rstrip("\n")
match = pattern.match(word)
if match is not None:
if match:
print(f"{word} ({', '.join(f'{k}={v}' for k,v in sorted(match.items()))})", flush=True)
else:
print(word, flush=True)
if __name__ == "__main__":
main()
|
Udzu/pudzu
|
pudzu/sandbox/patterns.py
|
Python
|
mit
| 78,270
|
import subprocess
import importlib
from SALib.util import avail_approaches
def test_cli_usage():
cmd = ["salib"]
try:
out = subprocess.check_output(cmd,
stderr=subprocess.STDOUT,
shell=True,
universal_newlines=True)
except subprocess.CalledProcessError as e:
pass
else:
# if no error raised, check the returned string
assert len(out) > 0 and "usage" in out.lower(), \
"Incorrect message returned from utility"
def test_cli_avail_methods():
method_types = ['sample', 'analyze']
for method in method_types:
module = importlib.import_module('.'.join(['SALib', method]))
actions = avail_approaches(module)
for act in actions:
approach = importlib.import_module('.'.join(
['SALib', method, act]))
# Just try to access the functions - raises error on failure
approach.cli_parse
approach.cli_action
if __name__ == '__main__':
test_cli_usage()
test_cli_avail_methods()
|
willu47/SALib
|
tests/test_cli.py
|
Python
|
mit
| 1,147
|
#!/usr/bin/python
# coding: utf-8
# Copyright (c) 2013 Mountainstorm
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from MobileDevice import *
import socket
import select
import os
import glob
class AMDevice(object):
u'''Represents a Apple Mobile Device; providing a wrapping around the raw
MobileDeviceAPI.
'''
# XXX add recovery mode features - move them into anoher file
INTERFACE_USB = 1
INTERFACE_WIFI = 2
BUDDY_SETID = 0x1
BUDDY_WIFI = 0x2
value_domains = [
u'com.apple.mobile.battery',
u'com.apple.mobile.iTunes',
u'com.apple.mobile.data_sync',
u'com.apple.mobile.sync_data_class',
u'com.apple.mobile.wireless_lockdown',
u'com.apple.mobile.internal',
u'com.apple.mobile.chaperone'
]
def __init__(self, dev):
u'''Initializes a AMDevice object
Arguments:
dev -- the device returned by MobileDeviceAPI
'''
self.dev = dev
def activate(self, activation_record):
u'''Sends the activation record to the device - activating it for use
Arguments:
activation_record -- the activation record, this will be converted to
a CFType
Error:
Raises RuntimeError on error
'''
record = CFTypeFrom(activation_record)
retval = AMDeviceActivate(self.dev, record)
CFRelease(record)
if retval != MDERR_OK:
raise RuntimeError(u'Unable to activate the device')
def connect(self, advanced=False):
u'''Connects to the device, and starts a session
Arguments:
advanced -- if not set, this will create a pairing record if required
(default: false)
Error:
Raises RuntimeError describing the error condition
'''
if AMDeviceConnect(self.dev) != MDERR_OK:
raise RuntimeError(u'Unable to connect to device')
if not advanced:
self.pair()
if AMDeviceStartSession(self.dev) != MDERR_OK:
if not advanced:
raise RuntimeError(u'Unable to start session')
def relay(self, args):
class Relay(object):
def __init__(self, dev, src, dst):
self.dev = dev
self.src = src
self.dst = dst
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind((u'localhost', src))
self.server.listen(5)
def accept(self):
s, addr = self.server.accept()
print(u'connection to device on port: %u' % self.dst)
retval = None
try:
retval = (s, self.dev.connect_to_port(self.dst))
except:
print(u'dev relay: error: unable to connect to port %u on device' % self.dst)
return retval
def close(self):
self.server.close()
def close_connection(endpoints, ins, outs, errs, s):
# remove src and dst
other = endpoints[s][0]
rem = [
(ins, s), (ins, other),
(outs, s), (outs, other),
(errs, s), (errs, other)
]
for rset, robj in rem:
try:
rset.remove(robj)
except:
pass
del endpoints[s]
del endpoints[other]
try:
s.shutdown(socket.SHUT_RDWR)
except:
pass
try:
other.shutdown(socket.SHUT_RDWR)
except:
pass
s.close()
other.close()
dev = self
pairs = args["ports"]
relays = {}
endpoints = {}
import sys
# check arguments
try:
for pair in pairs:
src, dst = pair.split(u':')
src = int(src)
dst = int(dst)
# create and register server
relay = Relay(dev, src, dst)
relays[relay.server] = relay
except socket.error:
print(u'dev relay: error: unable to bind to local port - %u' % src)
except:
print(u'dev relay: error: invalid port pair - %s' % pair)
print sys.exc_info()
# do relaying
if len(relays.keys()) == len(pairs):
for relay in relays.values():
print(u'relaying traffic from local port %u to device port %u' % (relay.src, relay.dst))
ins = relays.keys()
outs = []
errs = []
while len(ins) > 0 or len(outs) > 0:
ready = select.select(ins, outs, errs)
for s in ready[0]: # inputs
if s in relays:
# accept a new connection
e = relays[s].accept()
if e is not None:
a, b = e
endpoints[a] = (b, '')
endpoints[b] = (a, '')
# add both a and b to recv
ins.append(a)
ins.append(b)
errs.append(a)
errs.append(b)
elif s in endpoints:
# recv data and store data against opposite socket
data = s.recv(4096)
if len(data) > 0:
dst = endpoints[s][0]
endpoints[dst] = (s, data)
ins.remove(s)
outs.append(dst)
else:
close_connection(endpoints, ins, outs, errs, s)
for s in ready[1]: # output
if s in endpoints:
# write data
src, data = endpoints[s]
bs = s.send(data)
endpoints[s] = (src, data[bs:])
if len(data) == bs:
# sent everything - put src back in the recv list
outs.remove(s)
ins.append(src)
for s in ready[2]: # errors
if s in endpoints:
close_connection(endpoints, ins, outs, errs, s)
# cleanup
for endp in endpoints.keys():
try:
endp.shutdown(socket.SHUT_RDWR)
except:
pass
endp.close()
for relay in relays:
relay.close()
def get_deviceid(self):
u'''Retrieves the device identifier; labeled "Identifier" in the XCode
organiser; a 10 byte value as a string in hex
Return:
On success, the name as a string
Error:
Raises RuntimeError on error
'''
# AMDeviceGetName and AMDeviceCopyDeviceIdentifier return the same value
# AMDeviceRef + 20
cf = AMDeviceGetName(self.dev)
if cf is None:
raise RuntimeError(u'Unable to get device id')
return CFTypeTo(cf)
def get_location(self):
u'''Retrieves the location of the device; the address on the interface
(see get_interface_type)
Return:
On success, a location value e.g. the USB location ID
Error:
Raises RuntimeError on error
'''
# AMDeviceCopyDeviceLocation and AMDeviceUSBLocationID both return
# same value
# AMDeviceRef + 12
retval = AMDeviceCopyDeviceLocation(self.dev)
if retval is None:
raise RuntimeError(u'Unable to get device location')
return retval
def get_value(self, domain=None, name=None):
u'''Retrieves a value from the device
Arguments:
domain -- the domain to retrieve, or None to retrieve default domain
(default None)
name -- the name of the value to retrieve, or None to retrieve all
(default None)
Return:
On success the requested value
Error:
Raises RuntimeError on error
Domains:
AMDevice.value_domains
'''
retval = None
cfdomain = None
cfname = None
if domain is not None:
cfdomain = CFTypeFrom(domain)
if name is not None:
cfname = CFTypeFrom(name)
value = AMDeviceCopyValue(self.dev, cfdomain, cfname)
if cfdomain is not None:
CFRelease(cfdomain)
if cfname is not None:
CFRelease(cfname)
if value is None:
raise RuntimeError(u'Unable to retrieve value', domain, name)
retval = CFTypeTo(value)
CFRelease(value)
return retval
def deactivate(self):
u'''Deactivates the device - removing it from the network. WARNING:
you probably don't want to do this.
Error:
Raises RuntimeError on error
'''
if AMDeviceDeactivate != MDERR_OK:
raise RuntimeError(u'Unable to deactivate the device')
def disconnect(self):
u'''Disconnects from the device, ending the session'''
if self.dev is not None:
AMDeviceStopSession(self.dev)
AMDeviceDisconnect(self.dev)
AMDeviceRelease(self.dev)
self.dev = None
def enter_recovery_mode(self):
u'''Puts the device into recovery mode
Error:
Raises RuntimeError on error'''
if AMDeviceEnterRecovery(self.dev) != MDERR_OK:
raise RuntimeError(u'Unable to put device in recovery mode')
def get_interface_speed(self):
u'''Retrieves the interface speed'''
return AMDeviceGetInterfaceSpeed(self.dev)
def get_interface_type(self):
u'''Retrieves the interface type
Return:
None or error, else a AMDevice.INTERFACE_* value on success
'''
# AMDeviceRef + 24
retval = AMDeviceGetInterfaceType(self.dev)
if retval == -1:
retval = None
return retval
def get_wireless_buddyflags(self):
u'''Retrieve the wireless buddy flags; Probably used to do wifi sync
Error:
Raises a RuntimeError on error
'''
retval = None
obj = c_long()
if AMDeviceGetWirelessBuddyFlags(self.dev, byref(obj)) != MDERR_OK:
raise RuntimeError(u'Unable to get wireless buddy flags')
if obj is not None:
retval = obj.value
return retval
def remove_value(self, domain, name):
u'''Removes a value from the device
Arguments:
domain -- the domain to work in, or None to use the default domain
(default None)
name -- the name of the value to delete
Error:
Raises RuntimeError on error
'''
cfdomain = None
cfname = None
if domain is not None:
cfdomain = CFTypeFrom(domain)
if name is not None:
cfname = CFTypeFrom(name)
retval = AMDeviceRemoveValue(self.dev, cfdomain, cfname)
if cfdomain is not None:
CFRelease(cfdomain)
if cfname is not None:
CFRelease(cfname)
if retval != MDERR_OK:
raise RuntimeError(u'Unable to remove value %s/%s' % (domain, name))
def set_value(self, domain, name, value):
u'''Sets a value on the device
Arguments:
domain -- the domain to set in, or None to use the default domain
(default None)
name -- the name of the value to set
value -- the value to set
Error:
Raises RuntimeError on error
'''
cfdomain = None
cfname = None
if domain is not None:
cfdomain = CFTypeFrom(domain)
if name is not None:
cfname = CFTypeFrom(name)
if value is not None:
cfvalue = CFTypeFrom(value)
retval = AMDeviceSetValue(self.dev, cfdomain, cfname, cfvalue)
if cfdomain is not None:
CFRelease(cfdomain)
if cfname is not None:
CFRelease(cfname)
if cfvalue is not None:
CFRelease(cfvalue)
if retval != MDERR_OK:
raise RuntimeError(u'Unable to set value %s/%s' % (domain, name, value))
def set_wireless_buddyflags(self, enable_wifi=True, setid=True):
u'''Sets the wireless buddy flags, and optionally enables wifi
Arguments:
enable_wifi -- turns on/off wifi (default True)
setid -- if true, sets buddy id (default True)
Error:
Raises RuntimeError on error
'''
flags = 0
if enable_wifi:
flags |= AMDevice.BUDDY_WIFI
if setid:
flags |= AMDevice.BUDDY_SETID
if AMDeviceSetWirelessBuddyFlags(self.dev, flags) != MDERR_OK:
raise RuntimeError(u'Unable to set buddy id flags', enable_wifi, setid)
# XXX change api so start_service takes a python string and convert
def start_service(self, service_name, options=None):
u'''Starts the service and returns the socket
Argument:
service_name -- the reverse domain name for the service
options -- a dict of options, or None (default None)
Return:
The OS socket associated with the connection to the service
Error:
Raises RuntimeError on error
'''
sock = c_int32()
cfsvc_name = CFStringCreateWithCString(
None,
service_name,
kCFStringEncodingUTF8
)
err = False
if AMDeviceStartServiceWithOptions(
self.dev,
cfsvc_name,
options,
byref(sock)
) != MDERR_OK:
err = True
CFRelease(cfsvc_name)
if err:
raise RuntimeError(u'Unable to start service %s' % service_name)
return sock.value
def get_usb_deviceid(self):
u'''Retrieves the USB device id
Return:
The usb device id
Error:
Raises RuntimeError if theres no usb device id
'''
# AMDeviceRef + 8
retval = AMDeviceUSBDeviceID(self.dev)
if retval == 0:
raise RuntimeError(u'No usb device id')
return retval
def get_usb_productid(self):
u'''Retrieves the USB product id
Return:
The usb product id
Error:
Raises RuntimeError if theres no usb product id
'''
# AMDeviceRef + 16
retval = AMDeviceUSBProductID(self.dev)
if retval == 0:
raise RuntimeError(u'No usb device id')
return retval
def pair(self):
u'''Pairs the device to the host
Error:
Raises RuntimeError on error
'''
if AMDeviceIsPaired(self.dev) != 1:
if AMDevicePair(self.dev) != MDERR_OK:
raise RuntimeError(u'If your phone is locked with a passcode, unlock then reconnect it')
if AMDeviceValidatePairing(self.dev) != MDERR_OK:
raise RuntimeError(u'Unable to validate pairing')
def unpair(self):
u'''Unpairs device from host WARNING: you probably dont want to call
this
Error:
Raises RuntimeError on error
'''
if AMDeviceUnpair(self.dev) != MDERR_OK:
raise RuntimeError(u'Unable to unpair device')
def connect_to_port(self, port):
u'''Connects to a listening TCP port on the device.
Error:
Raises RuntimeError on error
'''
sock = c_int()
# logic taken from _connect_to_port
if self.get_interface_type() == AMDevice.INTERFACE_USB:
if USBMuxConnectByPort(
AMDeviceGetConnectionID(self.dev),
socket.htons(port),
byref(sock)
) != MDERR_OK:
raise RuntimeError(u'Unable to connect to socket via usb')
else:
# XXX test!
raise NotImplementedError(u'WiFi sync connect')
#if AMDeviceConnectByAddressAndPort(
# self.dev,
# port,
# byref(sock)
# ) != MDERR_OK:
# raise RuntimeError(u'Unable to connect to socket')
return socket.fromfd(sock.value, socket.AF_INET, socket.SOCK_STREAM)
def find_device_support_path(self):
u'''Returns the best device support path for this device
Returns:
the path
Error:
Raises RuntimeError if a suitable path can be found
'''
# XXX: Windows version
support_paths = glob.glob(
u'/Applications/Xcode*.app/Contents/Developer/Platforms/iPhoneOS.platform/DeviceSupport/*'
)
# process all the support paths to extract all the components
support = []
for path in support_paths:
name = os.path.split(path)[1]
parts = name.split(u' ')
version = parts[0]
build = None
if len(parts) != 1:
build = parts[1].replace(u'(', '').replace(u')', '')
version_parts = version.split(u'.')
major_version = version_parts[0]
minor_version = version_parts[1]
support.append({
u'version': version,
u'major_version': major_version,
u'minor_version': minor_version,
u'build': build,
u'path': path
})
# get the device info
version = self.get_value(name=u'ProductVersion')
version_parts = version.split(u'.')
major_version = version_parts[0]
minor_version = version_parts[1]
build = self.get_value(name=u'BuildVersion')
# lets find the best support path.
support_path = None
for s in support:
# version match is more important than build
if s[u'major_version'] == major_version:
if support_path is None:
support_path = s
else:
# is this better than the last match?
if s[u'minor_version'] == minor_version:
if s[u'build'] == build:
# perfect match
support_path = s
else:
if support_path[u'build'] != build:
# we're still better than existing match
support_path = s
if support_path is None:
raise RuntimeError(u'Unable to find device support path')
return support_path[u'path']
def find_developer_disk_image_path(self, device_support_path=None):
u'''Returns the best debug disk image for the device
Returns:
the path of the .dmg
Error:
Raises RuntimeError if a suitable disk image can't be found
'''
if device_support_path is None:
device_support_path = self.find_device_support_path()
path = os.path.join(
device_support_path,
u'DeveloperDiskImage.dmg'
)
if not os.path.exists(path):
# bum - that shouldn't happen
raise RuntimeError(u'Unable to find developer disk image')
return path
def handle_devices(factory):
u'''Waits indefinatly handling devices arrival/removal events.
Upon arrival the factory function will be called; providing the device as
a param. This method should return an object on success, None on error.
When the device is removed your object will have 'disconnect' called upon it
Typical Example:
def factory(dev):
dev.connect()
pprint.pprint(dev.get_value())
dev.disconnect()
Arguments:
factory -- the callback function, called on device arrival
Error:
Raises a RuntimeError on error
'''
# XXX: what do I need to release
devices = {}
def cbFunc(info, cookie):
info = info.contents
if info.message == ADNCI_MSG_CONNECTED:
devices[info.device] = AMDevice(info.device)
factory(devices[info.device])
elif info.message == ADNCI_MSG_DISCONNECTED:
devices[info.device].disconnect()
del devices[info.device]
notify = AMDeviceNotificationRef()
notifyFunc = AMDeviceNotificationCallback(cbFunc)
err = AMDeviceNotificationSubscribe(notifyFunc, 0, 0, 0, byref(notify))
if err != MDERR_OK:
raise RuntimeError(u'Unable to subscribe for notifications')
# loop so we can exit easily
while CFRunLoopRunInMode(kCFRunLoopDefaultMode, 0.1, False) == kCFRunLoopRunTimedOut:
pass
AMDeviceNotificationUnsubscribe(notify)
def list_devices(waittime=0.1):
u'''Returns a dictionary of AMDevice objects, indexed by device id,
currently connected; waiting at least waittime for them to be discovered.
Arguments:
waittime -- time to wait for devices to be discovered (default 0.1 seconds)
'''
# XXX: what do I need to release
devices = {}
def cbFunc(info, cookie):
info = info.contents
if info.message == ADNCI_MSG_CONNECTED:
dev = AMDevice(info.device)
devices[dev.get_deviceid()] = dev
notify = AMDeviceNotificationRef()
notifyFunc = AMDeviceNotificationCallback(cbFunc)
err = AMDeviceNotificationSubscribe(notifyFunc, 0, 0, 0, byref(notify))
if err != MDERR_OK:
raise RuntimeError(u'Unable to subscribe for notifications')
CFRunLoopRunInMode(kCFRunLoopDefaultMode, waittime, False)
AMDeviceNotificationUnsubscribe(notify)
return devices
def argparse_parse(scope):
u'''Provides basic argument parsing functionality (listing and selection of
devices). Will call any methods in scope whose keys start with
"register_argparse_" and call them with the argument parser
Arguments:
scope -- a dictionary of name -> functions
'''
import os
import sys
import argparse
class CmdArguments(object):
def __init__(self):
self._devs = list_devices()
self._parser = argparse.ArgumentParser()
self._parser.add_argument(
u'-x',
dest=u'advanced',
action=u'store_true',
help=u'''enables advanced mode; where helpful tasks are not done
automatically; e.g. pairing if your not already paired'''
)
group = self._parser.add_mutually_exclusive_group()
group.add_argument(
u'-d',
metavar=u'devid',
dest=u'device_idx',
choices=range(len(self._devs.keys())),
type=int,
action=u'store',
help=u'operate on the specified device'
)
group.add_argument(
u'-i',
metavar=u'identifier',
dest=u'device_id',
choices=self._devs.keys(),
type=str,
action=u'store',
help=u'operate on the specified device'
)
# add subparsers for commands
self._subparsers = self._parser.add_subparsers(
help=u'sub-command help; use <cmd> -h for help on sub commands'
)
# add listing command
listparser = self._subparsers.add_parser(
u'list',
help=u'list all attached devices'
)
listparser.set_defaults(listing=True)
def add_parser(self, *args, **kwargs):
return self._subparsers.add_parser(*args, **kwargs)
def parse_args(self):
args = self._parser.parse_args(namespace=self)
i = 0
if u'listing' in dir(self):
sys.stdout.write(self._print_devices())
else:
if len(self._devs) > 0:
devs = sorted(self._devs.keys())
if self.device_id is not None:
identifier = self.device_id.decode(u'utf-8')
for i in range(len(devs)):
if devs[i] == identifier:
self.device_idx = i
if self.device_idx is None:
self.device_idx = 0 # default to first device
k = devs[self.device_idx]
v = self._devs[k]
# connect before trying to get device name
v.connect(args.advanced)
name = u''
try:
name = v.get_value(name=u'DeviceName')
except:
pass
print(u'%u: %s - "%s"' % (
self.device_idx,
v.get_deviceid(),
name.decode(u'utf-8')
))
args.func(args, v)
v.disconnect()
def _print_devices(self):
retval = u'device list:\n'
i = 0
for k in sorted(self._devs.keys()):
v = self._devs[k]
try:
v.connect()
name = v.get_value(name=u'DeviceName')
retval += u' %u: %s - "%s"\n' % (
i,
v.get_deviceid(),
name.decode(u'utf-8')
)
except:
retval += u' %u: %s\n' % (i, k)
finally:
v.disconnect()
i = i + 1
return retval
cmdargs = CmdArguments()
# register any command line arguments from the modules
for member in scope.keys():
if member.startswith(u'register_argparse_'):
scope[member](cmdargs)
cmdargs.parse_args()
def register_argparse_dev(cmdargs):
import argparse
import pprint
def get_number_in_units(size,precision=2):
suffixes = [u'b', u'kb', u'mb', u'gb']
suffixIndex = 0
while size > 1024:
suffixIndex += 1 #increment the index of the suffix
size = size / 1024.0 #apply the division
return u'%.*f%s' % (precision,size,suffixes[suffixIndex])
def cmd_info(args, dev):
iface_types = {
AMDevice.INTERFACE_USB: u'USB',
AMDevice.INTERFACE_WIFI: u'WIFI'
}
device_type = dev.get_interface_type()
print(u' identifier: %s' % dev.get_deviceid())
print(u' interface type: %s' % iface_types[device_type])
print(u' interface speed: %sps' %
get_number_in_units(int(dev.get_interface_speed()))
)
print(u' location: 0x%x' % dev.get_location())
if device_type is AMDevice.INTERFACE_USB:
print(u' usb device id: 0x%x' % dev.get_usb_deviceid())
print(u' usb product id: 0x%x' % dev.get_usb_productid())
def cmd_get(args, dev):
if args.domain is not None or args.key is not None:
key = None
if args.key is not None:
key = args.key.decode(u'utf-8')
domain = None
if args.domain is not None:
domain = args.domain.decode(u'utf-8')
try:
pprint.pprint(dev.get_value(domain, key))
except:
pass
else:
# enumerate all the value_domains
output = {}
output[None] = dev.get_value()
for domain in AMDevice.value_domains:
output[domain] = dev.get_value(domain)
pprint.pprint(output)
def cmd_set(args, dev):
domain = None
if args.domain is not None:
domain = args.domain.decode(u'utf-8')
# XXX add support for non-string types; bool next
dev.set_value(domain, args.key.decode(u'utf-8'), args.value.decode(u'utf-8'))
def cmd_del(args, dev):
domain = None
if args.domain is not None:
domain = args.domain.decode(u'utf-8')
dev.remove_value(domain, args.key.decode(u'utf-8'))
def cmd_pair(args, dev):
dev.pair()
def cmd_unpair(args, dev):
dev.unpair()
def cmd_buddy(args, dev):
if args.wifi is not None or args.setid is not None:
dev.set_wireless_buddyflags(args.wifi, args.setid)
else:
flags = dev.get_wireless_buddyflags()
s = u''
if flags & AMDevice.BUDDY_WIFI:
s += u'BUDDY_WIFI'
if flags & AMDevice.BUDDY_SETID:
if len(s) != 0:
s += u' | '
s += u'BUDDY_SETID'
s += u' (0x%x)' % flags
print(u' wireless buddy flags: %s' % s)
def cmd_relay(args, dev):
am_device = AMDevice(dev)
am_device.relay({"ports":getattr(args, u'port:pair')})
# standard dev commands
devparser = cmdargs.add_parser(
u'dev',
help=u'commands related to the device'
)
# device info
devcmds = devparser.add_subparsers()
infocmd = devcmds.add_parser(
u'info',
help=u'display basic info about the device'
)
infocmd.set_defaults(func=cmd_info)
# get value
getcmd = devcmds.add_parser(
u'get',
help=u'display key/value info about the device'
)
getcmd.add_argument(
u'key',
nargs=u'?',
help=u'the key of the value to get'
)
getcmd.add_argument(
u'-d',
metavar=u'domain',
dest=u'domain',
help=u'the domain of the key to get'
)
getcmd.set_defaults(func=cmd_get)
# set value
setcmd = devcmds.add_parser(
u'set',
help=u'set key/value info about the device'
)
setcmd.add_argument(
u'key',
help=u'the key to set'
)
# XXX how do we support complex (dict) settings?
setcmd.add_argument(
u'value',
help=u'the value of the key to apply (only able to set simple values at present)'
)
setcmd.add_argument(
u'-d',
metavar=u'domain',
dest=u'domain',
help=u'the domain the key to set lives in'
)
setcmd.set_defaults(func=cmd_set)
# delete value
delcmd = devcmds.add_parser(
u'del',
help=u'delete key/value info from the device - DANGEROUS'
)
delcmd.add_argument(
u'key',
help=u'the key of the value to delete'
)
delcmd.add_argument(
u'-d',
metavar=u'domain',
dest=u'domain',
help=u'the domain of the key to delete'
)
delcmd.set_defaults(func=cmd_del)
# pair
paircmd = devcmds.add_parser(
u'pair',
help=u'pair the device to this host'
)
paircmd.set_defaults(func=cmd_pair)
# unpair
unpaircmd = devcmds.add_parser(
u'unpair',
help=u'unpair the device from this host'
)
unpaircmd.set_defaults(func=cmd_unpair)
# set buddy id
buddycmd = devcmds.add_parser(
u'buddy',
help=u'get/set wireless buddy parameters'
)
buddycmd.add_argument(
u'-w',
help=u'enable wifi (0 or 1)',
dest=u'wifi',
type=int,
choices=(0, 1)
)
buddycmd.add_argument(
u'-s',
help=u'sets buddy id (0 or 1)',
dest=u'setid',
type=int,
choices=(0, 1)
)
buddycmd.set_defaults(func=cmd_buddy)
# relay ports from localhost to device (tcprelay style)
relaycmd = devcmds.add_parser(
u'relay',
help=u'relay tcp ports from locahost to device'
)
relaycmd.add_argument(
u'port:pair',
nargs=u'+',
help=u'a pair of ports to relay <local>:<device>'
)
relaycmd.set_defaults(func=cmd_relay)
# XXX activate, deactivate - do we really want to be able to do these?
|
RoshanNindrai/MobileDevice
|
amdevice.py
|
Python
|
mit
| 33,356
|
from django.contrib.auth import get_user_model
from rest_framework import serializers
from djoser.conf import settings
from djoser.serializers import UserCreateMixin
from .models import CredentialOptions
from .utils import create_challenge, create_ukey
User = get_user_model()
class WebauthnSignupSerializer(serializers.ModelSerializer):
class Meta:
model = CredentialOptions
fields = ("username", "display_name")
def create(self, validated_data):
validated_data.update(
{
"challenge": create_challenge(
length=settings.WEBAUTHN["CHALLENGE_LENGTH"]
),
"ukey": create_ukey(length=settings.WEBAUTHN["UKEY_LENGTH"]),
}
)
return super().create(validated_data)
def validate_username(self, username):
if User.objects.filter(username=username).exists():
raise serializers.ValidationError(f"User {username} already exists.")
return username
class WebauthnCreateUserSerializer(UserCreateMixin, serializers.ModelSerializer):
class Meta:
model = User
fields = tuple(User.REQUIRED_FIELDS) + (
settings.LOGIN_FIELD,
User._meta.pk.name,
)
class WebauthnLoginSerializer(serializers.Serializer):
default_error_messages = {
"invalid_credentials": settings.CONSTANTS.messages.INVALID_CREDENTIALS_ERROR
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields[settings.LOGIN_FIELD] = serializers.CharField(required=True)
def validate_username(self, username):
try:
search_kwargs = {
settings.LOGIN_FIELD: username,
"credential_options__isnull": False,
}
self.user = user = User.objects.get(**search_kwargs)
except User.DoesNotExist:
self.fail("invalid_credentials")
if not user.is_active:
self.fail("invalid_credentials")
return username
|
sunscrapers/djoser
|
djoser/webauthn/serializers.py
|
Python
|
mit
| 2,050
|
import datetime
import os
env = os.getenv('BOT_ENV', 'staging')
stages = {
1: datetime.timedelta(minutes=0),
2: datetime.timedelta(minutes=30),
3: datetime.timedelta(hours=8),
4: datetime.timedelta(hours=24),
5: datetime.timedelta(days=3),
6: datetime.timedelta(weeks=1),
7: datetime.timedelta(weeks=4),
8: datetime.timedelta(weeks=12),
}
min_stage = 1
max_stage = 8
if env=='debug':
min_translation_stage = 1
else:
min_translation_stage = 3
|
Omrigan/shiny-language-learner
|
study_settings.py
|
Python
|
mit
| 481
|
import os
from lxml import etree
from flask.ext.script import Command, Option
from flask import current_app
class Readfeed(Command):
"""
Reads the given feed.xml and upserts the data
"""
EVE_RESOURCE = 'product'
XML_ELEM_PRODUCT = 'product'
XML_ELEM_FEATURE = 'feature'
XML_ELEM_LABEL = 'label'
XML_ELEM_VALUE = 'value'
XML_ELEM_ID = 'code'
option_list = (
Option('--file', '-f', dest='file', default='../data/feed.xml'),
)
def run(self, file):
logger = current_app.logger
file = os.path.abspath(os.path.join(os.getcwd(), file))
for p in self.readFile(file):
existing = current_app.data.find_one(Readfeed.EVE_RESOURCE, None, code=p[Readfeed.XML_ELEM_ID])
doc = {'title':p['name'], 'description_long': p['description'], 'description_short': p['shortDescription'],
'price': float(p['price']), 'code': p['code']}
if not self.validator.validate(doc):
logger.critical("Could not validate new document (%s)" % (self.validator.errors))
continue
if not existing:
obj = current_app.data.insert(Readfeed.EVE_RESOURCE, doc)
logger.info("Inserted new Document %s" % (str(obj)))
else:
current_app.data.update(Readfeed.EVE_RESOURCE, existing['_id'], doc)
logger.info("Updated Document %s" % (existing['_id']))
def readFile(self, file):
parser = etree.XMLParser(recover=True) # recovers from bad characters.
tree = etree.parse(file, parser)
for elem in tree.iter(Readfeed.XML_ELEM_PRODUCT):
product = {'name':None, 'description':None, 'shortDescription':None, 'price':None}
product['productfeatures'] = {}
for f in elem.iter(Readfeed.XML_ELEM_FEATURE):
label = f.findtext(Readfeed.XML_ELEM_LABEL, default=None)
value = f.findtext(Readfeed.XML_ELEM_VALUE, default=None)
product['productfeatures'][label] = value
for c in elem.getchildren():
product[c.tag] = c.text
yield product
_validator = None
@property
def validator(self):
if not self._validator:
resource_def = current_app.config['DOMAIN'][Readfeed.EVE_RESOURCE]
schema = resource_def['schema']
self._validator = current_app.validator(schema, Readfeed.EVE_RESOURCE)
return self._validator
|
tspycher/python-productdb
|
productdb/app/commands/readfeed.py
|
Python
|
mit
| 2,508
|
from flask import Blueprint
url_prefix = '/{{cookiecutter.project_slug}}'
main = Blueprint('main', __name__, url_prefix=url_prefix)
from . import views
|
by46/recipe
|
templates/python.flask/{{cookiecutter.project_safe_name}}/app/main/__init__.py
|
Python
|
mit
| 160
|
'''
>>> c = Conf('climon.conf.test')
>>> list(c.iter_ids('sensor'))
['sine', 'web']
>>> list(c.iter_ids('toggle'))
['fake']
>>> c.get_element('sensor', 'sine') # doctest: +ELLIPSIS
<function sine.<locals>.sine_sensor at ...>
>>> c.get_element('toggle', 'fake') # doctest: +ELLIPSIS
<toggles.FakeToggle object at ...>
>>> list(c.iter_elements('sensor')) # doctest: +ELLIPSIS
[('sine', <function sine... at ...>)]
>>> list(c.iter_elements('toggle')) # doctest: +ELLIPSIS
[('fake', <toggles.FakeToggle object at...>)]
'''
from functools import lru_cache
from toggles import TOGGLES, InvertedToggle
from sensors import SENSORS
ELEMENTS = {
'toggle': TOGGLES,
'sensor': SENSORS,
}
def new_element(element_type, element_conf):
make_element = ELEMENTS[element_type][element_conf['type'].upper()]
element = make_element(element_conf['source'])
if element_type == 'toggle' and element_conf.get('invert', None) == 'true':
element = InvertedToggle(element)
element.conf = element_conf
element.conf['element_type'] = element_type
return element
class Conf(object):
def __init__(self, fname):
import configparser
self.raw = configparser.ConfigParser()
self.raw.read(fname)
def get_section(self, element_type, element_id):
s = self.raw['%s:%s' % (element_type, element_id)]
s['id'] = element_id
s['element_type'] = element_type
return s
@lru_cache(maxsize=None)
def get_element(self, element_type, element_id):
return new_element(element_type, self.get_section(element_type, element_id))
def iter_ids(self, element_type):
for section in self.raw.sections():
if section.startswith('%s:' % element_type):
_, element_id = section.split(':', 1)
yield element_id
def iter_sections(self, element_type):
for element_id in self.iter_ids(element_type):
yield element_id, self.get_section(element_type, element_id)
def iter_elements(self, element_type):
for element_id in self.iter_ids(element_type):
yield element_id, self.get_element(element_type, element_id)
@staticmethod
def section_type_id(section):
try:
s_type, s_id = section.split(':', 1)
return s_type, s_id
except ValueError:
return None, None
def iter_group_elements(self, group):
for section in self.raw.sections():
element_type, element_id = self.section_type_id(section)
if element_type in ELEMENTS and self.raw[section].get('group', '') == group:
yield section, self.get_section(element_type, element_id)
def iter_groups(self):
groups = [group for group_id, group in self.iter_sections('group')]
for group in sorted(groups, key=lambda g: g['order']):
yield group, list(self.iter_group_elements(group['id']))
class ParsedConf(object):
def __init__(self, fname):
self.conf = Conf(fname)
self.groups = list(self.conf.iter_groups())
self.sensors = dict(self.conf.iter_sections('sensor'))
self.toggles = dict(self.conf.iter_sections('toggle'))
|
jwustrack/climon
|
conf.py
|
Python
|
mit
| 3,204
|
import numpy as np
import decimal
import spectra
class Smear(object):
""" This class smears the energy and radius of a spectra.
The class can recieve energy and radius as individual data points or a
1 dimensional numpy array to smear which is then returned. 2d and 3d
arrays with linked energy, radius and time information is yet to be
implemented.
Attributes:
_light_yield (float): Number of PMT hits expected for a
MeV energy deposit in NHit/MeV
_position_resolution (float): Sigma in mm
"""
_light_yield = 200. # NHit per MeV
_position_resolution = 100. # mm
def __init__(self):
""" Initialise the Smear class by seeding the random number generator
"""
np.random.seed()
def bin_1d_array(self, array, bins):
""" Sorts a 1 dimensional array and bins it
Args:
array (:class:`numpy.array`): To sort and bin
bins (list): Upper limit of bins
Returns:
A 1 dimensional numpy array, sorted and binned.
"""
array = np.sort(array)
split_at = array.searchsorted(bins)
return np.split(array, split_at)
def calc_gaussian(self, x, mean, sigma):
""" Calculates the value of a gaussian whose integral is equal to
one at position x with a given mean and sigma.
Args:
x : Position to calculate the gaussian
mean : Mean of the gaussian
sigma : Sigma of the gaussian
Returns:
Value of the gaussian at the given position
"""
return np.exp(-(x-mean)**2/(2*sigma**2))/(sigma*np.sqrt(2*np.pi))
def floor_to_bin(self, x, bin_size):
""" Rounds down value bin content to lower edge of nearest bin.
Args:
x (float): Value to round down
bin_size (float): Width of a bin
Returns:
Value of nearest lower bin edge
"""
dp = abs(decimal.Decimal(str(bin_size)).as_tuple().exponent)
coef = np.power(10, dp)
return np.floor(coef*(x//bin_size)*bin_size)/coef
def ceil_to_bin(self, x, bin_size):
""" Rounds up value bin content to upper edge of nearest bin.
Args:
x (float): Value to round down
bin_size (float): Width of a bin
Returns:
Value of nearest lower bin edge
"""
dp = abs(decimal.Decimal(str(bin_size)).as_tuple().exponent)
coef = np.power(10, dp)
return np.ceil(coef*(bin_size+(x//bin_size)*bin_size))/coef
def get_energy_sigma(self, energy):
""" Calculates sigma at a given energy.
Args:
energy (float): Energy value of data point(s)
Returns:
Sigma equivalent to sqrt(energy/_light_yield)
"""
return np.sqrt(energy/self._light_yield)
def smear_energy_0d(self, energy):
""" Smears a single energy value
Args:
energy (float): Value to smear
Returns:
Smeared energy value
"""
sigma = self.get_energy_sigma(energy)
return np.fabs(np.random.normal(energy, sigma))
def smear_energy_1d(self, energies, bins, binned=False):
""" Smears a 1 dimensional array of energy values
Args:
energies (:class:`numpy.array`): Values to smear
bins (list): Upper edge of bins for array
binned (bool): Is the array already binned? (True or False)
Returns:
Smeared and sorted 1 dimensional numpy array of energy values
"""
if binned is False:
energies = self.bin_1d_array(energies, bins)
bin_size = bins[1]-bins[0]
smeared_energies = []
for energy in energies:
if energy.any():
energy_bin = self.floor_to_bin(energy[0], bin_size)+0.5*bin_size
num_entries = len(energy)
smeared_energies += self.smear_energy_bin(energy_bin,
num_entries)
return np.array(smeared_energies)
def smear_energy_bin(self, energy, entries):
""" Smears one energy bin.
Args:
energy (float): Central value of energy of bin
entries (int): Number of entries in the bin
Returns:
A list of smeared energies corresponding to the input bin.
"""
sigma = self.get_energy_sigma(energy)
smeared_energies = []
for i in range(entries):
smeared_energies.append(np.fabs(np.random.normal(energy, sigma)))
return smeared_energies
def smear_radius_0d(self, radius):
""" Smears a single radius value
Args:
radius (float): Value to smear
Returns:
Smeared radius value
"""
return np.fabs(np.random.normal(radius, self._position_resolution))
def smear_radii_1d(self, radii, bins, binned=False):
""" Smears a 1 dimensional array of radius values
Args:
radii (:class:`numpy.array`): Values to smear
bins (list): Upper edge of bins for array
binned (bool): Is the array already binned? (True or False)
Returns:
Smeared and sorted 1 dimensional numpy array of radius values
"""
if binned is False:
radii = self.bin_1d_array(radii, bins)
bin_size = bins[1]-bins[0]
smeared_radii = []
for radius in radii:
if radius.any():
radius_bin = self.floor_to_bin(radius[0], bin_size)+0.5*bin_size
num_entries = len(radius)
smeared_radii += self.smear_radius_bin(radius_bin, num_entries)
return np.array(smeared_radii)
def smear_radius_bin(self, radius, entries):
""" Smears one energy bin.
Args:
radius (float): Central value of radius of bin
entries (int): Number of entries in the bin
Returns:
A list of smeared radii corresponding to the input bin.
"""
smeared_radii = []
for i in range(entries):
smeared_radii.append(np.fabs(np.random.normal(radius,
self._position_resolution)))
return smeared_radii
def random_gaussian_energy_spectra(self, true_spectrum):
""" Smears the energy of a spectra object by generating
a number of random points from a Gaussian pdf generated
for that bin. The number of points generated is equivalent
to the number of entries in that bin.
Args:
true_spectrum (spectra): spectrum to be smeared
Returns:
A smeared spectra object.
"""
raw_events = true_spectrum._raw_events
energy_step = (true_spectrum._energy_high-true_spectrum._energy_low)/true_spectrum._energy_bins
time_step = (true_spectrum._time_high-true_spectrum._time_low)/true_spectrum._time_bins
radial_step = (true_spectrum._radial_high-true_spectrum._radial_low)/true_spectrum._radial_bins
smeared_spectrum = spectra.Spectra(true_spectrum._name+str(self._light_yield)+"_light_yield",
true_spectrum._num_decays)
for time_bin in range(true_spectrum._time_bins):
mean_time = time_bin*time_step+0.5*time_step
for radial_bin in range(true_spectrum._radial_bins):
mean_radius = radial_bin*radial_step+0.5*radial_step
for energy_bin in range(true_spectrum._energy_bins):
mean_energy = energy_bin*energy_step+0.5*energy_step
sigma = self.get_energy_sigma(mean_energy)
entries = true_spectrum._data[energy_bin,
radial_bin,
time_bin]
for i in range(int(entries)):
try:
smeared_spectrum.fill(np.fabs(np.random.normal(mean_energy,
sigma)),
mean_radius,
mean_time)
except ValueError:
# Occurs when smeared energy is outside bin range
print "Warning: Smeared energy out of bounds. Skipping."
continue
smeared_spectrum._raw_events = raw_events
return smeared_spectrum
def weight_gaussian_energy_spectra(self, true_spectrum, num_sigma=5.):
""" Smears the energy of a spectra object by calculating a Gaussian pdf
for each bin and applying a weight to the bin and corresponding bins
a default 5 sigma apart.
Args:
true_spectrum (spectra): spectrum to be smeared
num_sigma (float): Width of window to apply the weight method.
Default is 5.
Returns:
A smeared spectra object.
"""
raw_events = true_spectrum._raw_events
energy_step = (true_spectrum._energy_high-true_spectrum._energy_low)/true_spectrum._energy_bins
time_step = (true_spectrum._time_high-true_spectrum._time_low)/true_spectrum._time_bins
radial_step = (true_spectrum._radial_high-true_spectrum._radial_low)/true_spectrum._radial_bins
smeared_spectrum = spectra.Spectra(true_spectrum._name+str(self._light_yield)+"_light_yield",
true_spectrum._num_decays)
for time_bin in range(true_spectrum._time_bins):
mean_time = time_bin*time_step+0.5*time_step
for radial_bin in range(true_spectrum._radial_bins):
mean_radius = radial_bin*radial_step+0.5*radial_step
for energy_bin in range(true_spectrum._energy_bins):
mean_energy = energy_bin*energy_step+0.5*energy_step
sigma = self.get_energy_sigma(mean_energy)
entries = float(true_spectrum._data[energy_bin,
radial_bin,
time_bin])
if entries == 0:
continue # Bin Empty
lower_bin = self.floor_to_bin(mean_energy-num_sigma*sigma,
energy_step)+0.5*energy_step
upper_bin = self.ceil_to_bin(mean_energy+num_sigma*sigma,
energy_step)-0.5*energy_step
if upper_bin > true_spectrum._energy_high:
upper_bin = true_spectrum._energy_high-0.5*energy_step
if lower_bin < true_spectrum._energy_low:
lower_bin = true_spectrum._energy_low+0.5*energy_step
weights = []
for energy in np.arange(lower_bin, upper_bin, energy_step):
weights.append(self.calc_gaussian(energy,
mean_energy,
sigma))
i = 0
tot_weight = np.array(weights).sum()
for energy in np.arange(lower_bin, upper_bin, energy_step):
try:
smeared_spectrum.fill(energy,
mean_radius,
mean_time,
entries*weights[i]/tot_weight)
except ValueError:
# Occurs when smeared energy is outside bin range
print "Warning: Smeared energy out of bounds. Skipping."
continue
i += 1
smeared_spectrum._raw_events = raw_events
return smeared_spectrum
def random_gaussian_radius_spectra(self, true_spectrum):
""" Smears the radius of a spectra object by generating a
number of random points from a Gaussian pdf generated for
that bin. The number of points generated is equivalent
to the number of entries in that bin.
Args:
true_spectrum (spectra): spectrum to be smeared
Returns:
A smeared spectra object.
"""
raw_events = true_spectrum._raw_events
energy_step = (true_spectrum._energy_high-true_spectrum._energy_low)/true_spectrum._energy_bins
time_step = (true_spectrum._time_high-true_spectrum._time_low)/true_spectrum._time_bins
radial_step = (true_spectrum._radial_high-true_spectrum._radial_low)/true_spectrum._radial_bins
smeared_spectrum = spectra.Spectra(true_spectrum._name+str(self._position_resolution)+"_position_resolution",
true_spectrum._num_decays)
for time_bin in range(true_spectrum._time_bins):
mean_time = time_bin*time_step+0.5*time_step
for energy_bin in range(true_spectrum._energy_bins):
mean_energy = energy_bin*energy_step+0.5*energy_step
for radial_bin in range(true_spectrum._radial_bins):
mean_radius = radial_bin*radial_step+0.5*radial_step
entries = true_spectrum._data[energy_bin,
radial_bin,
time_bin]
for i in range(int(entries)):
try:
smeared_spectrum.fill(mean_energy,
np.fabs(np.random.normal(mean_radius,
self._position_resolution)),
mean_time)
except ValueError:
# Occurs when smeared radius is outside bin range
print "Warning: Smeared radius out of bounds. Skipping."
continue
smeared_spectrum._raw_events = raw_events
return smeared_spectrum
def weight_gaussian_radius_spectra(self, true_spectrum, num_sigma=5.):
""" Smears the radius of a spectra object by calculating a Gaussian pdf
for each bin and applies a weight to the bin and corresponding bins a
default 5 sigma apart.
Args:
true_spectrum (spectra): spectrum to be smeared
num_sigma (float): Width of window to apply the weight method.
Default is 5.
Returns:
A smeared spectra object.
"""
raw_events = true_spectrum._raw_events
energy_step = (true_spectrum._energy_high-true_spectrum._energy_low)/true_spectrum._energy_bins
time_step = (true_spectrum._time_high-true_spectrum._time_low)/true_spectrum._time_bins
radial_step = (true_spectrum._radial_high-true_spectrum._radial_low)/true_spectrum._radial_bins
smeared_spectrum = spectra.Spectra(true_spectrum._name+str(self._position_resolution)+"_position_resolution",
true_spectrum._num_decays)
for time_bin in range(true_spectrum._time_bins):
mean_time = time_bin*time_step+0.5*time_step
for energy_bin in range(true_spectrum._energy_bins):
mean_energy = energy_bin*energy_step+0.5*energy_step
for radial_bin in range(true_spectrum._radial_bins):
mean_radius = radial_bin*radial_step+0.5*radial_step
entries = float(true_spectrum._data[energy_bin,
radial_bin,
time_bin])
if entries == 0:
continue # Bin Empty
lower_bin = self.floor_to_bin(mean_radius-num_sigma*self._position_resolution,
radial_step)+0.5*radial_step
upper_bin = self.ceil_to_bin(mean_radius+num_sigma*self._position_resolution,
radial_step)-0.5*radial_step
if upper_bin > true_spectrum._radial_high:
upper_bin = true_spectrum._radial_high-0.5*energy_step
if lower_bin < true_spectrum._radial_low:
lower_bin = true_spectrum._radial_low+0.5*energy_step
weights = []
for radius in np.arange(lower_bin, upper_bin, radial_step):
weights.append(self.calc_gaussian(radius,
mean_radius,
self._position_resolution))
weight_tot = np.array(weights).sum()
i = 0
for radius in np.arange(lower_bin, upper_bin, radial_step):
try:
smeared_spectrum.fill(mean_energy,
radius,
mean_time,
entries*weights[i]/weight_tot)
except ValueError:
# Occurs when smeared radius is outside bin range
print "Warning: Smeared radius out of bounds. Skipping."
continue
i += 1
smeared_spectrum._raw_events = raw_events
return smeared_spectrum
|
EdLeming/echidna
|
echidna/core/smear.py
|
Python
|
mit
| 17,880
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
from copy import deepcopy
import os
from .file_utils import read_yaml_file, read_yaml_string
from collections import MutableMapping
import logging
log = logging.getLogger('configmate.config')
CONFIG_INCLUDE_KEY = 'include'
CONFIG_INCLUDE_OPTIONAL_KEY = 'include_optional'
class ConfigException(Exception):
pass
class ConfigLoadException(ConfigException):
pass
class ConfigLoadFormatException(ConfigLoadException):
pass
class ConfigFileLoader(object):
def __init__(self, file_name, path_list = None, initial_config = False):
self._file_name = file_name
self._path_list = path_list if path_list else ['']
self._initial_config = initial_config
self._loaded_file_list = []
@property
def name_list(self):
return self._loaded_file_list or [self._file_name]
@property
def names(self):
return ', '.join(["'{}'".format(name) for name in self.name_list])
@property
def path_list(self):
return self._path_list
@property
def initial_config(self):
return self._initial_config
def get_data(self):
config_data_list = []
path_list = self._path_list
if self._initial_config:
path_list = path_list[:1]
self._loaded_file_list = []
for path in reversed(path_list):
file_name = os.path.join(path, self._file_name)
try:
config_data = read_yaml_file(file_name)
except IOError:
continue
if not isinstance(config_data, dict):
raise ConfigLoadFormatException(
"Config file should contain a valid YAML dictionary: '{}'".format(
file_name
)
)
config_data_list.append(config_data)
self._loaded_file_list.append(file_name)
if not config_data_list:
raise ConfigLoadException(
"Unable to load config: '{}'".format(
self._file_name
)
)
return config_data_list
class ConfigStringLoader(object):
CONFIG_NAME = '<string>'
def __init__(self, config_string, path_list = None, initial_config = False):
self._config_string = config_string
self._path_list = path_list
self._initial_config = initial_config
@property
def name_list(self):
return [self.CONFIG_NAME]
@property
def names(self):
return self.CONFIG_NAME
@property
def path_list(self):
return self._path_list
@property
def initial_config(self):
return self._initial_config
def get_data(self):
config_data = read_yaml_string(self._config_string)
if config_data is None:
raise ConfigLoadException(
"Unable to load config: {}".format(
self.CONFIG_NAME
)
)
if not isinstance(config_data, dict):
raise ConfigLoadFormatException(
"Config file should contain a valid YAML dictionary: {}".format(
self.CONFIG_NAME
)
)
return [config_data]
class Config(MutableMapping):
def __init__(
self,
from_file = None,
from_string = None,
path_list = None,
defaults = None
):
self._path_list = path_list
if defaults and not isinstance(defaults, dict):
raise ConfigException('Config defaults must be a dict')
self._config = deepcopy(defaults) if defaults else {}
self._uuid_cache = {}
source_list = []
if from_file is not None:
config_loader = ConfigFileLoader(from_file, path_list = path_list, initial_config = True)
self._read_config(config_loader)
source_list.append(config_loader.names)
if from_string is not None:
config_loader = ConfigStringLoader(from_string, path_list = path_list, initial_config = True)
self._read_config(config_loader)
source_list.append(config_loader.names)
self._sources = ','.join(source_list)
@property
def sources(self):
return self._sources
def expand_parameter(self, value):
# TODO process values
return value
def __getitem__(self, key):
if key in self._config:
return self.expand_parameter(self._config[key])
raise KeyError("'" + key + "'")
def __setitem__(self, key, value):
self._config[key] = value
def __delitem__(self, key):
if key in self._config:
del self._config[key]
else:
raise KeyError("'" + key + "'")
def __iter__(self):
return iter(self._config)
def __len__(self):
return len(self._config)
def _read_config(self, config_loader):
self._read_config_core(config_loader)
if config_loader.initial_config:
log.info("Loaded config: %s", config_loader.names)
for config_data in config_loader.get_data():
self._read_config_includes(CONFIG_INCLUDE_KEY, config_data, config_loader)
self._read_config_includes(CONFIG_INCLUDE_OPTIONAL_KEY, config_data, config_loader, optional = True)
def _read_config_core(self, config_loader):
config_data_list = config_loader.get_data()
for config_data in config_data_list:
if 'include' in config_data:
del(config_data['include'])
if 'include_optional' in config_data:
del(config_data['include_optional'])
self._config.update(config_data)
def _read_config_includes(self, include_key, config_data, config_loader, optional = False):
if include_key in config_data:
if not isinstance(config_data[include_key], list):
raise ConfigLoadFormatException(
"Config file {}includes should contain a valid YAML list: {}".format(
'optional ' if optional else '',
config_loader.names
)
)
for include_file_name in config_data[include_key]:
include_file_name_full = self.expand_parameter(include_file_name)
try:
include_config_loader = ConfigFileLoader(
include_file_name_full,
path_list = config_loader.path_list
)
self._read_config(include_config_loader)
except ConfigLoadFormatException:
raise
except ConfigLoadException:
if optional:
log.debug("Skipped optional config: '%s'", include_file_name_full)
else:
raise
else:
if optional:
log.info(
"Included optional config: %s into %s",
include_config_loader.names,
config_loader.names
)
def __str__(self):
return unicode(self).decode('utf-8')
def __unicode__(self):
return unicode(self._config)
def __repr__(self):
return "{}(\n{}\n)".format(
self.__class__.__name__,
str(self)
)
|
wamonite/configmate
|
configmate/config.py
|
Python
|
mit
| 7,565
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from wagtail.wagtailimages.utils import get_fill_filter_spec_migrations
class Migration(migrations.Migration):
dependencies = [
('images', '0005_auto_20161108_2120'),
]
forward, reverse = get_fill_filter_spec_migrations('images', 'AttributedRendition')
operations = [
migrations.RunPython(forward, reverse),
]
|
CIGIHub/greyjay
|
greyjay/images/migrations/0006_auto_20161108_2123.py
|
Python
|
mit
| 457
|
# ===The application coordinator: it starts the application and it let it run===
"""
It instantiate two objects:
1. a **tester** to run the unit tests
2. an **app** and runs the run() method
"""
__author__ = 'cosmin'
import model
import tester
import ui
from tester.tester import Tester
from ui.calculator import Calculator
if __name__ == '__main__':
t = Tester()
t.test()
app = Calculator()
app.run()
|
rusucosmin/courses
|
ubb/logic/BaseCalculator/app_coordinator.py
|
Python
|
mit
| 424
|
# Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import division
import array
import contextlib
import inspect
import traceback
import warnings
from xcffib.xproto import EventMask, StackMode, SetMode
import xcffib.xproto
from . import command
from . import utils
from . import hook
from .log_utils import logger
# ICCM Constants
NoValue = 0x0000
XValue = 0x0001
YValue = 0x0002
WidthValue = 0x0004
HeightValue = 0x0008
AllValues = 0x000F
XNegative = 0x0010
YNegative = 0x0020
USPosition = (1 << 0)
USSize = (1 << 1)
PPosition = (1 << 2)
PSize = (1 << 3)
PMinSize = (1 << 4)
PMaxSize = (1 << 5)
PResizeInc = (1 << 6)
PAspect = (1 << 7)
PBaseSize = (1 << 8)
PWinGravity = (1 << 9)
PAllHints = (PPosition | PSize | PMinSize | PMaxSize | PResizeInc | PAspect)
InputHint = (1 << 0)
StateHint = (1 << 1)
IconPixmapHint = (1 << 2)
IconWindowHint = (1 << 3)
IconPositionHint = (1 << 4)
IconMaskHint = (1 << 5)
WindowGroupHint = (1 << 6)
MessageHint = (1 << 7)
UrgencyHint = (1 << 8)
AllHints = (InputHint | StateHint | IconPixmapHint | IconWindowHint |
IconPositionHint | IconMaskHint | WindowGroupHint | MessageHint |
UrgencyHint)
WithdrawnState = 0
DontCareState = 0
NormalState = 1
ZoomState = 2
IconicState = 3
InactiveState = 4
RectangleOut = 0
RectangleIn = 1
RectanglePart = 2
VisualNoMask = 0x0
VisualIDMask = 0x1
VisualScreenMask = 0x2
VisualDepthMask = 0x4
VisualClassMask = 0x8
VisualRedMaskMask = 0x10
VisualGreenMaskMask = 0x20
VisualBlueMaskMask = 0x40
VisualColormapSizeMask = 0x80
VisualBitsPerRGBMask = 0x100
VisualAllMask = 0x1FF
ReleaseByFreeingColormap = 1
BitmapSuccess = 0
BitmapOpenFailed = 1
BitmapFileInvalid = 2
BitmapNoMemory = 3
XCSUCCESS = 0
XCNOMEM = 1
XCNOENT = 2
# float states
NOT_FLOATING = 1 # not floating
FLOATING = 2
MAXIMIZED = 3
FULLSCREEN = 4
TOP = 5
MINIMIZED = 6
_NET_WM_STATE_REMOVE = 0
_NET_WM_STATE_ADD = 1
_NET_WM_STATE_TOGGLE = 2
def _geometry_getter(attr):
def get_attr(self):
if getattr(self, "_" + attr) is None:
g = self.window.get_geometry()
# trigger the geometry setter on all these
self.x = g.x
self.y = g.y
self.width = g.width
self.height = g.height
return getattr(self, "_" + attr)
return get_attr
def _geometry_setter(attr):
def f(self, value):
if not isinstance(value, int):
frame = inspect.currentframe()
stack_trace = traceback.format_stack(frame)
logger.error("!!!! setting %s to a non-int %s; please report this!", attr, value)
logger.error(''.join(stack_trace[:-1]))
value = int(value)
setattr(self, "_" + attr, value)
return f
def _float_getter(attr):
def getter(self):
if self._float_info[attr] is not None:
return self._float_info[attr]
# we don't care so much about width or height, if not set, default to the window width/height
if attr in ('width', 'height'):
return getattr(self, attr)
raise AttributeError("Floating not yet configured yet")
return getter
def _float_setter(attr):
def setter(self, value):
self._float_info[attr] = value
return setter
class _Window(command.CommandObject):
_windowMask = None # override in child class
def __init__(self, window, qtile):
self.window, self.qtile = window, qtile
self.hidden = True
self.group = None
self.icons = {}
window.set_attribute(eventmask=self._windowMask)
self._float_info = {
'x': None,
'y': None,
'width': None,
'height': None,
}
try:
g = self.window.get_geometry()
self._x = g.x
self._y = g.y
self._width = g.width
self._height = g.height
self._float_info['width'] = g.width
self._float_info['height'] = g.height
except xcffib.xproto.DrawableError:
# Whoops, we were too early, so let's ignore it for now and get the
# values on demand.
self._x = None
self._y = None
self._width = None
self._height = None
self.borderwidth = 0
self.bordercolor = None
self.name = "<no name>"
self.strut = None
self.state = NormalState
self.window_type = "normal"
self._float_state = NOT_FLOATING
self._demands_attention = False
self.hints = {
'input': True,
'icon_pixmap': None,
'icon_window': None,
'icon_x': 0,
'icon_y': 0,
'icon_mask': 0,
'window_group': None,
'urgent': False,
# normal or size hints
'width_inc': None,
'height_inc': None,
'base_width': 0,
'base_height': 0,
}
self.updateHints()
x = property(fset=_geometry_setter("x"), fget=_geometry_getter("x"))
y = property(fset=_geometry_setter("y"), fget=_geometry_getter("y"))
width = property(
fset=_geometry_setter("width"),
fget=_geometry_getter("width")
)
height = property(
fset=_geometry_setter("height"),
fget=_geometry_getter("height")
)
float_x = property(
fset=_float_setter("x"),
fget=_float_getter("x")
)
float_y = property(
fset=_float_setter("y"),
fget=_float_getter("y")
)
float_width = property(
fset=_float_setter("width"),
fget=_float_getter("width")
)
float_height = property(
fset=_float_setter("height"),
fget=_float_getter("height")
)
@property
def has_focus(self):
return self == self.qtile.currentWindow
def updateName(self):
try:
self.name = self.window.get_name()
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
return
hook.fire('client_name_updated', self)
def updateHints(self):
"""Update the local copy of the window's WM_HINTS
See http://tronche.com/gui/x/icccm/sec-4.html#WM_HINTS
"""
try:
h = self.window.get_wm_hints()
normh = self.window.get_wm_normal_hints()
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
return
# FIXME
# h values
# {
# 'icon_pixmap': 4194337,
# 'icon_window': 0,
# 'icon_mask': 4194340,
# 'icon_y': 0,
# 'input': 1,
# 'icon_x': 0,
# 'window_group': 4194305
# 'initial_state': 1,
# 'flags': set(['StateHint',
# 'IconMaskHint',
# 'WindowGroupHint',
# 'InputHint',
# 'UrgencyHint',
# 'IconPixmapHint']),
# }
if normh:
normh.pop('flags')
normh['min_width'] = max(0, normh.get('min_width', 0))
normh['min_height'] = max(0, normh.get('min_height', 0))
if not normh['base_width'] and \
normh['min_width'] and \
normh['width_inc']:
# seems xcffib does ignore base width :(
normh['base_width'] = (
normh['min_width'] % normh['width_inc']
)
if not normh['base_height'] and \
normh['min_height'] and \
normh['height_inc']:
# seems xcffib does ignore base height :(
normh['base_height'] = (
normh['min_height'] % normh['height_inc']
)
self.hints.update(normh)
if h and 'UrgencyHint' in h['flags']:
if self.qtile.currentWindow != self:
self.hints['urgent'] = True
hook.fire('client_urgent_hint_changed', self)
elif self.urgent:
self.hints['urgent'] = False
hook.fire('client_urgent_hint_changed', self)
if getattr(self, 'group', None):
self.group.layoutAll()
return
def updateState(self):
triggered = ['urgent']
if self.qtile.config.auto_fullscreen:
triggered.append('fullscreen')
state = self.window.get_net_wm_state()
logger.debug('_NET_WM_STATE: %s', state)
for s in triggered:
setattr(self, s, (s in state))
@property
def urgent(self):
return self.hints['urgent'] or self._demands_attention
@urgent.setter
def urgent(self, val):
self._demands_attention = val
# TODO unset window hint as well?
if not val:
self.hints['urgent'] = False
def info(self):
if self.group:
group = self.group.name
else:
group = None
return dict(
name=self.name,
x=self.x,
y=self.y,
width=self.width,
height=self.height,
group=group,
id=self.window.wid,
floating=self._float_state != NOT_FLOATING,
float_info=self._float_info,
maximized=self._float_state == MAXIMIZED,
minimized=self._float_state == MINIMIZED,
fullscreen=self._float_state == FULLSCREEN
)
@property
def state(self):
return self.window.get_wm_state()[0]
@state.setter
def state(self, val):
if val in (WithdrawnState, NormalState, IconicState):
self.window.set_property('WM_STATE', [val, 0])
def setOpacity(self, opacity):
if 0.0 <= opacity <= 1.0:
real_opacity = int(opacity * 0xffffffff)
self.window.set_property('_NET_WM_WINDOW_OPACITY', real_opacity)
else:
return
def getOpacity(self):
opacity = self.window.get_property(
"_NET_WM_WINDOW_OPACITY", unpack=int
)
if not opacity:
return 1.0
else:
value = opacity[0]
# 2 decimal places
as_float = round(value / 0xffffffff, 2)
return as_float
opacity = property(getOpacity, setOpacity)
def kill(self):
if "WM_DELETE_WINDOW" in self.window.get_wm_protocols():
data = [
self.qtile.conn.atoms["WM_DELETE_WINDOW"],
xcffib.xproto.Time.CurrentTime,
0,
0,
0
]
u = xcffib.xproto.ClientMessageData.synthetic(data, "I" * 5)
e = xcffib.xproto.ClientMessageEvent.synthetic(
format=32,
window=self.window.wid,
type=self.qtile.conn.atoms["WM_PROTOCOLS"],
data=u
)
self.window.send_event(e)
else:
self.window.kill_client()
self.qtile.conn.flush()
def hide(self):
# We don't want to get the UnmapNotify for this unmap
with self.disableMask(xcffib.xproto.EventMask.StructureNotify):
self.window.unmap()
self.hidden = True
def unhide(self):
self.window.map()
self.state = NormalState
self.hidden = False
@contextlib.contextmanager
def disableMask(self, mask):
self._disableMask(mask)
yield
self._resetMask()
def _disableMask(self, mask):
self.window.set_attribute(
eventmask=self._windowMask & (~mask)
)
def _resetMask(self):
self.window.set_attribute(
eventmask=self._windowMask
)
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, force=False, margin=None):
"""Places the window at the specified location with the given size.
If force is false, than it tries to obey hints
"""
# TODO: self.x/y/height/width are updated BEFORE
# place is called, so there's no way to know if only
# the position is changed, so we are sending
# the ConfigureNotify every time place is called
#
# # if position change and size don't
# # send a configure notify. See ICCCM 4.2.3
# send_notify = False
# if (self.x != x or self.y != y) and \
# (self.width == width and self.height == height):
# send_notify = True
# #for now, we just:
send_notify = True
# Adjust the placement to account for layout margins, if there are any.
if margin is not None:
x += margin
y += margin
width -= margin * 2
height -= margin * 2
# save x and y float offset
if self.group is not None and self.group.screen is not None:
self.float_x = x - self.group.screen.x
self.float_y = y - self.group.screen.y
self.x = x
self.y = y
self.width = width
self.height = height
self.borderwidth = borderwidth
self.bordercolor = bordercolor
kwarg = dict(
x=x,
y=y,
width=width,
height=height,
borderwidth=borderwidth,
)
if above:
kwarg['stackmode'] = StackMode.Above
self.window.configure(**kwarg)
if send_notify:
self.send_configure_notify(x, y, width, height)
if bordercolor is not None:
self.window.set_attribute(borderpixel=bordercolor)
def send_configure_notify(self, x, y, width, height):
"""Send a synthetic ConfigureNotify"""
window = self.window.wid
above_sibling = False
override_redirect = False
event = xcffib.xproto.ConfigureNotifyEvent.synthetic(
event=window,
window=window,
above_sibling=above_sibling,
x=x,
y=y,
width=width,
height=height,
border_width=self.borderwidth,
override_redirect=override_redirect
)
self.window.send_event(event, mask=EventMask.StructureNotify)
def can_steal_focus(self):
return self.window.get_wm_type() != 'notification'
def focus(self, warp):
# Workaround for misbehaving java applications (actually it might be
# qtile who misbehaves by not implementing some X11 protocol correctly)
#
# See this xmonad issue for more information on the problem:
# http://code.google.com/p/xmonad/issues/detail?id=177
#
# 'sun-awt-X11-XFramePeer' is a main window of a java application.
# Only send WM_TAKE_FOCUS not FocusIn
# 'sun-awt-X11-XDialogPeer' is a dialog of a java application. Do not
# send any event.
cls = self.window.get_wm_class() or ''
is_java_main = 'sun-awt-X11-XFramePeer' in cls
is_java_dialog = 'sun-awt-X11-XDialogPeer' in cls
is_java = is_java_main or is_java_dialog
if not self.hidden:
# Never send TAKE_FOCUS on java *dialogs*
if not is_java_dialog and \
"WM_TAKE_FOCUS" in self.window.get_wm_protocols():
data = [
self.qtile.conn.atoms["WM_TAKE_FOCUS"],
xcffib.xproto.Time.CurrentTime,
0,
0,
0
]
u = xcffib.xproto.ClientMessageData.synthetic(data, "I" * 5)
e = xcffib.xproto.ClientMessageEvent.synthetic(
format=32,
window=self.window.wid,
type=self.qtile.conn.atoms["WM_PROTOCOLS"],
data=u
)
self.window.send_event(e)
# Never send FocusIn to java windows
if not is_java and self.hints['input']:
self.window.set_input_focus()
try:
if warp and self.qtile.config.cursor_warp:
self.window.warp_pointer(self.width // 2, self.height // 2)
except AttributeError:
pass
if self.urgent:
self.urgent = False
atom = self.qtile.conn.atoms["_NET_WM_STATE_DEMANDS_ATTENTION"]
state = list(self.window.get_property('_NET_WM_STATE', 'ATOM',
unpack=int))
if atom in state:
state.remove(atom)
self.window.set_property('_NET_WM_STATE', state)
self.qtile.root.set_property("_NET_ACTIVE_WINDOW", self.window.wid)
hook.fire("client_focus", self)
def _items(self, name, sel):
return None
def _select(self, name, sel):
return None
def cmd_focus(self, warp=None):
"""Focuses the window."""
if warp is None:
warp = self.qtile.config.cursor_warp
self.focus(warp=warp)
def cmd_info(self):
"""Returns a dictionary of info for this object"""
return self.info()
def cmd_inspect(self):
"""Tells you more than you ever wanted to know about a window"""
a = self.window.get_attributes()
attrs = {
"backing_store": a.backing_store,
"visual": a.visual,
"class": a._class,
"bit_gravity": a.bit_gravity,
"win_gravity": a.win_gravity,
"backing_planes": a.backing_planes,
"backing_pixel": a.backing_pixel,
"save_under": a.save_under,
"map_is_installed": a.map_is_installed,
"map_state": a.map_state,
"override_redirect": a.override_redirect,
# "colormap": a.colormap,
"all_event_masks": a.all_event_masks,
"your_event_mask": a.your_event_mask,
"do_not_propagate_mask": a.do_not_propagate_mask
}
props = self.window.list_properties()
normalhints = self.window.get_wm_normal_hints()
hints = self.window.get_wm_hints()
protocols = []
for i in self.window.get_wm_protocols():
protocols.append(i)
state = self.window.get_wm_state()
return dict(
attributes=attrs,
properties=props,
name=self.window.get_name(),
wm_class=self.window.get_wm_class(),
wm_window_role=self.window.get_wm_window_role(),
wm_type=self.window.get_wm_type(),
wm_transient_for=self.window.get_wm_transient_for(),
protocols=protocols,
wm_icon_name=self.window.get_wm_icon_name(),
wm_client_machine=self.window.get_wm_client_machine(),
normalhints=normalhints,
hints=hints,
state=state,
float_info=self._float_info
)
class Internal(_Window):
"""An internal window, that should not be managed by qtile"""
_windowMask = EventMask.StructureNotify | \
EventMask.PropertyChange | \
EventMask.EnterWindow | \
EventMask.FocusChange | \
EventMask.Exposure | \
EventMask.ButtonPress | \
EventMask.ButtonRelease | \
EventMask.KeyPress
@classmethod
def create(cls, qtile, x, y, width, height, opacity=1.0):
win = qtile.conn.create_window(x, y, width, height)
win.set_property("QTILE_INTERNAL", 1)
i = Internal(win, qtile)
i.place(x, y, width, height, 0, None)
i.opacity = opacity
return i
def __repr__(self):
return "Internal(%r, %s)" % (self.name, self.window.wid)
def kill(self):
self.qtile.conn.conn.core.DestroyWindow(self.window.wid)
def cmd_kill(self):
self.kill()
class Static(_Window):
"""An internal window, that should not be managed by qtile"""
_windowMask = EventMask.StructureNotify | \
EventMask.PropertyChange | \
EventMask.EnterWindow | \
EventMask.FocusChange | \
EventMask.Exposure
def __init__(self, win, qtile, screen,
x=None, y=None, width=None, height=None):
_Window.__init__(self, win, qtile)
self.updateName()
self.conf_x = x
self.conf_y = y
self.conf_width = width
self.conf_height = height
self.x = x or 0
self.y = y or 0
self.width = width or 0
self.height = height or 0
self.screen = screen
if None not in (x, y, width, height):
self.place(x, y, width, height, 0, 0)
self.update_strut()
def handle_ConfigureRequest(self, e):
cw = xcffib.xproto.ConfigWindow
if self.conf_x is None and e.value_mask & cw.X:
self.x = e.x
if self.conf_y is None and e.value_mask & cw.Y:
self.y = e.y
if self.conf_width is None and e.value_mask & cw.Width:
self.width = e.width
if self.conf_height is None and e.value_mask & cw.Height:
self.height = e.height
self.place(
self.screen.x + self.x,
self.screen.y + self.y,
self.width,
self.height,
self.borderwidth,
self.bordercolor
)
return False
def update_strut(self):
strut = self.window.get_property(
"_NET_WM_STRUT_PARTIAL",
unpack=int
)
strut = strut or self.window.get_property(
"_NET_WM_STRUT",
unpack=int
)
strut = strut or (0, 0, 0, 0)
self.qtile.update_gaps(strut, self.strut)
self.strut = strut
def handle_PropertyNotify(self, e):
name = self.qtile.conn.atoms.get_name(e.atom)
if name in ("_NET_WM_STRUT_PARTIAL", "_NET_WM_STRUT"):
self.update_strut()
def __repr__(self):
return "Static(%r)" % self.name
class Window(_Window):
_windowMask = EventMask.StructureNotify | \
EventMask.PropertyChange | \
EventMask.EnterWindow | \
EventMask.FocusChange
# Set when this object is being retired.
defunct = False
def __init__(self, window, qtile):
_Window.__init__(self, window, qtile)
self._group = None
self.updateName()
# add to group by position according to _NET_WM_DESKTOP property
group = None
index = window.get_wm_desktop()
if index is not None and index < len(qtile.groups):
group = qtile.groups[index]
elif index is None:
transient_for = window.get_wm_transient_for()
win = qtile.windowMap.get(transient_for)
if win is not None:
group = win._group
if group is not None:
group.add(self)
self._group = group
if group != qtile.currentScreen.group:
self.hide()
# add window to the save-set, so it gets mapped when qtile dies
qtile.conn.conn.core.ChangeSaveSet(SetMode.Insert, self.window.wid)
self.update_wm_net_icon()
@property
def group(self):
return self._group
@group.setter
def group(self, group):
if group:
try:
self.window.set_property(
"_NET_WM_DESKTOP",
self.qtile.groups.index(group)
)
except xcffib.xproto.WindowError:
logger.exception("whoops, got error setting _NET_WM_DESKTOP, too early?")
self._group = group
@property
def edges(self):
return (self.x, self.y, self.x + self.width, self.y + self.height)
@property
def floating(self):
return self._float_state != NOT_FLOATING
@floating.setter
def floating(self, do_float):
if do_float and self._float_state == NOT_FLOATING:
if self.group and self.group.screen:
screen = self.group.screen
self._enablefloating(
screen.x + self.float_x, screen.y + self.float_y, self.float_width, self.float_height
)
else:
# if we are setting floating early, e.g. from a hook, we don't have a screen yet
self._float_state = FLOATING
elif (not do_float) and self._float_state != NOT_FLOATING:
if self._float_state == FLOATING:
# store last size
self.float_width = self.width
self.float_height = self.height
self._float_state = NOT_FLOATING
self.group.mark_floating(self, False)
hook.fire('float_change')
def toggle_floating(self):
self.floating = not self.floating
def togglefloating(self):
warnings.warn("togglefloating is deprecated, use toggle_floating", DeprecationWarning)
self.toggle_floating()
def enablefloating(self):
warnings.warn("enablefloating is deprecated, use floating=True", DeprecationWarning)
self.floating = True
def disablefloating(self):
warnings.warn("disablefloating is deprecated, use floating=False", DeprecationWarning)
self.floating = False
@property
def fullscreen(self):
return self._float_state == FULLSCREEN
@fullscreen.setter
def fullscreen(self, do_full):
atom = set([self.qtile.conn.atoms["_NET_WM_STATE_FULLSCREEN"]])
prev_state = set(self.window.get_property('_NET_WM_STATE', 'ATOM', unpack=int))
def set_state(old_state, new_state):
if new_state != old_state:
self.window.set_property('_NET_WM_STATE', list(new_state))
if do_full:
screen = self.group.screen or \
self.qtile.find_closest_screen(self.x, self.y)
self._enablefloating(
screen.x,
screen.y,
screen.width,
screen.height,
new_float_state=FULLSCREEN
)
set_state(prev_state, prev_state | atom)
return
if self._float_state == FULLSCREEN:
# The order of calling set_state() and then
# setting self.floating = False is important
set_state(prev_state, prev_state - atom)
self.floating = False
return
def toggle_fullscreen(self):
self.fullscreen = not self.fullscreen
def togglefullscreen(self):
warnings.warn("togglefullscreen is deprecated, use toggle_fullscreen", DeprecationWarning)
self.toggle_fullscreen()
@property
def maximized(self):
return self._float_state == MAXIMIZED
@maximized.setter
def maximized(self, do_maximize):
if do_maximize:
screen = self.group.screen or \
self.qtile.find_closest_screen(self.x, self.y)
self._enablefloating(
screen.dx,
screen.dy,
screen.dwidth,
screen.dheight,
new_float_state=MAXIMIZED
)
else:
if self._float_state == MAXIMIZED:
self.floating = False
def enablemaximize(self, state=MAXIMIZED):
warnings.warn("enablemaximize is deprecated, use maximized=True", DeprecationWarning)
self.maximized = True
def toggle_maximize(self, state=MAXIMIZED):
self.maximized = not self.maximized
def togglemaximize(self):
warnings.warn("togglemaximize is deprecated, use toggle_maximize", DeprecationWarning)
self.toggle_maximize()
@property
def minimized(self):
return self._float_state == MINIMIZED
@minimized.setter
def minimized(self, do_minimize):
if do_minimize:
if self._float_state != MINIMIZED:
self._enablefloating(new_float_state=MINIMIZED)
else:
if self._float_state == MINIMIZED:
self.floating = False
def enableminimize(self):
warnings.warn("enableminimized is deprecated, use minimized=True", DeprecationWarning)
self.minimized = True
def toggle_minimize(self):
self.minimized = not self.minimized
def toggleminimize(self):
warnings.warn("toggleminimize is deprecated, use toggle_minimize", DeprecationWarning)
self.toggle_minimize()
def static(self, screen, x=None, y=None, width=None, height=None):
"""Makes this window a static window, attached to a Screen
If any of the arguments are left unspecified, the values given by the
window itself are used instead. So, for a window that's aware of its
appropriate size and location (like dzen), you don't have to specify
anything.
"""
self.defunct = True
screen = self.qtile.screens[screen]
if self.group:
self.group.remove(self)
s = Static(self.window, self.qtile, screen, x, y, width, height)
self.qtile.windowMap[self.window.wid] = s
hook.fire("client_managed", s)
return s
def tweak_float(self, x=None, y=None, dx=0, dy=0,
w=None, h=None, dw=0, dh=0):
print(x, y)
if x is not None:
self.x = x
self.x += dx
if y is not None:
self.y = y
self.y += dy
if w is not None:
self.width = w
self.width += dw
if h is not None:
self.height = h
self.height += dh
if self.height < 0:
self.height = 0
if self.width < 0:
self.width = 0
screen = self.qtile.find_closest_screen(self.x, self.y)
if self.group and screen is not None and screen != self.group.screen:
self.group.remove(self, force=True)
screen.group.add(self, force=True)
self.qtile.toScreen(screen.index)
self._reconfigure_floating()
def getsize(self):
return (self.width, self.height)
def getposition(self):
return (self.x, self.y)
def _reconfigure_floating(self, new_float_state=FLOATING):
if new_float_state == MINIMIZED:
self.state = IconicState
self.hide()
else:
width = max(self.width, self.hints.get('min_width', 0))
height = max(self.height, self.hints.get('min_height', 0))
if self.hints['base_width'] and self.hints['width_inc']:
width -= (width - self.hints['base_width']) % self.hints['width_inc']
if self.hints['base_height'] and self.hints['height_inc']:
height -= (height - self.hints['base_height']) % self.hints['height_inc']
print("placing", self.x, self.y, width, height)
self.place(
self.x, self.y,
width, height,
self.borderwidth,
self.bordercolor,
above=True,
)
if self._float_state != new_float_state:
self._float_state = new_float_state
if self.group: # may be not, if it's called from hook
self.group.mark_floating(self, True)
hook.fire('float_change')
def _enablefloating(self, x=None, y=None, w=None, h=None,
new_float_state=FLOATING):
if new_float_state != MINIMIZED:
self.x = x
self.y = y
self.width = w
self.height = h
self._reconfigure_floating(new_float_state=new_float_state)
def togroup(self, groupName=None):
"""Move window to a specified group"""
if groupName is None:
group = self.qtile.currentGroup
else:
group = self.qtile.groupMap.get(groupName)
if group is None:
raise command.CommandError("No such group: %s" % groupName)
if self.group is not group:
self.hide()
if self.group:
if self.group.screen:
# for floats remove window offset
self.x -= self.group.screen.x
self.group.remove(self)
if group.screen and self.x < group.screen.x:
self.x += group.screen.x
group.add(self)
def toscreen(self, index=None):
""" Move window to a specified screen, or the current screen. """
if index is None:
screen = self.qtile.currentScreen
else:
try:
screen = self.qtile.screens[index]
except IndexError:
raise command.CommandError('No such screen: %d' % index)
self.togroup(screen.group.name)
def match(self, wname=None, wmclass=None, role=None):
"""Match window against given attributes.
Parameters
==========
wname :
matches against the window name or title, that is, either
``_NET_WM_VISIBLE_NAME``, ``_NET_WM_NAME``, ``WM_NAME``.
wmclass :
matches against any of the two values in the ``WM_CLASS`` property
role :
matches against the ``WM_WINDOW_ROLE`` property
"""
if not (wname or wmclass or role):
raise TypeError(
"Either a name, a wmclass or a role must be specified"
)
if wname and wname == self.name:
return True
try:
cliclass = self.window.get_wm_class()
if wmclass and cliclass and wmclass in cliclass:
return True
clirole = self.window.get_wm_window_role()
if role and clirole and role == clirole:
return True
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
return False
return False
def handle_EnterNotify(self, e):
hook.fire("client_mouse_enter", self)
if self.qtile.config.follow_mouse_focus and \
self.group.currentWindow != self:
self.group.focus(self, False)
if self.group.screen and \
self.qtile.currentScreen != self.group.screen and \
self.qtile.config.follow_mouse_focus:
self.qtile.toScreen(self.group.screen.index, False)
return True
def handle_ConfigureRequest(self, e):
if self.qtile._drag and self.qtile.currentWindow == self:
# ignore requests while user is dragging window
return
if getattr(self, 'floating', False):
# only obey resize for floating windows
cw = xcffib.xproto.ConfigWindow
width = e.width if e.value_mask & cw.Width else self.width
height = e.height if e.value_mask & cw.Height else self.height
x = e.x if e.value_mask & cw.X else self.x
y = e.y if e.value_mask & cw.Y else self.y
else:
width, height, x, y = self.width, self.height, self.x, self.y
if self.group and self.group.screen:
self.place(
x, y,
width, height,
self.borderwidth, self.bordercolor,
)
self.updateState()
return False
def update_wm_net_icon(self):
"""Set a dict with the icons of the window"""
icon = self.window.get_property('_NET_WM_ICON', 'CARDINAL')
if not icon:
return
icon = list(map(ord, icon.value))
icons = {}
while True:
if not icon:
break
size = icon[:8]
if len(size) != 8 or not size[0] or not size[4]:
break
icon = icon[8:]
width = size[0]
height = size[4]
next_pix = width * height * 4
data = icon[:next_pix]
arr = array.array("B", data)
for i in range(0, len(arr), 4):
mult = arr[i + 3] / 255.
arr[i + 0] = int(arr[i + 0] * mult)
arr[i + 1] = int(arr[i + 1] * mult)
arr[i + 2] = int(arr[i + 2] * mult)
icon = icon[next_pix:]
icons["%sx%s" % (width, height)] = arr
self.icons = icons
hook.fire("net_wm_icon_change", self)
def handle_ClientMessage(self, event):
atoms = self.qtile.conn.atoms
opcode = event.type
data = event.data
if atoms["_NET_WM_STATE"] == opcode:
prev_state = self.window.get_property(
'_NET_WM_STATE',
'ATOM',
unpack=int
)
current_state = set(prev_state)
action = data.data32[0]
for prop in (data.data32[1], data.data32[2]):
if not prop:
# skip 0
continue
if action == _NET_WM_STATE_REMOVE:
current_state.discard(prop)
elif action == _NET_WM_STATE_ADD:
current_state.add(prop)
elif action == _NET_WM_STATE_TOGGLE:
current_state ^= set([prop]) # toggle :D
self.window.set_property('_NET_WM_STATE', list(current_state))
elif atoms["_NET_ACTIVE_WINDOW"] == opcode:
source = data.data32[0]
if source == 2: # XCB_EWMH_CLIENT_SOURCE_TYPE_NORMAL
logger.info("Focusing window by pager")
self.qtile.currentScreen.setGroup(self.group)
self.group.focus(self)
else: # XCB_EWMH_CLIENT_SOURCE_TYPE_OTHER
focus_behavior = self.qtile.config.focus_on_window_activation
if focus_behavior == "focus" or (focus_behavior == "smart" and self.group.screen and self.group.screen == self.qtile.currentScreen):
logger.info("Focusing window")
self.qtile.currentScreen.setGroup(self.group)
self.group.focus(self)
elif focus_behavior == "urgent" or (focus_behavior == "smart" and not self.group.screen):
logger.info("Setting urgent flag for window")
self.urgent = True
else:
logger.info("Ignoring focus request")
def handle_PropertyNotify(self, e):
name = self.qtile.conn.atoms.get_name(e.atom)
logger.debug("PropertyNotifyEvent: %s", name)
if name == "WM_TRANSIENT_FOR":
pass
elif name == "WM_HINTS":
self.updateHints()
elif name == "WM_NORMAL_HINTS":
self.updateHints()
elif name == "WM_NAME":
self.updateName()
elif name == "_NET_WM_NAME":
self.updateName()
elif name == "_NET_WM_VISIBLE_NAME":
self.updateName()
elif name == "WM_ICON_NAME":
pass
elif name == "_NET_WM_ICON_NAME":
pass
elif name == "_NET_WM_ICON":
self.update_wm_net_icon()
elif name == "ZOOM":
pass
elif name == "_NET_WM_WINDOW_OPACITY":
pass
elif name == "WM_STATE":
pass
elif name == "_NET_WM_STATE":
self.updateState()
elif name == "WM_PROTOCOLS":
pass
elif name == "_NET_WM_DESKTOP":
# Some windows set the state(fullscreen) when starts,
# updateState is here because the group and the screen
# are set when the property is emitted
# self.updateState()
self.updateState()
elif name == "_NET_WM_USER_TIME":
if not self.qtile.config.follow_mouse_focus and \
self.group.currentWindow != self:
self.group.focus(self, False)
else:
logger.info("Unknown window property: %s", name)
return False
def _items(self, name):
if name == "group":
return (True, None)
elif name == "layout":
return (True, list(range(len(self.group.layouts))))
elif name == "screen":
return (True, None)
def _select(self, name, sel):
if name == "group":
return self.group
elif name == "layout":
if sel is None:
return self.group.layout
else:
return utils.lget(self.group.layouts, sel)
elif name == "screen":
return self.group.screen
def __repr__(self):
return "Window(%r)" % self.name
def cmd_static(self, screen, x, y, width, height):
self.static(screen, x, y, width, height)
def cmd_kill(self):
"""Kill this window
Try to do this politely if the client support
this, otherwise be brutal.
"""
self.kill()
def cmd_togroup(self, groupName=None):
"""Move window to a specified group.
If groupName is not specified, we assume the current group
Examples
========
Move window to current group::
togroup()
Move window to group "a"::
togroup("a")
"""
self.togroup(groupName)
def cmd_toscreen(self, index=None):
"""Move window to a specified screen.
If index is not specified, we assume the current screen
Examples
========
Move window to current screen::
toscreen()
Move window to screen 0::
toscreen(0)
"""
self.toscreen(index)
def cmd_move_floating(self, dx, dy, curx, cury):
"""Move window by dx and dy"""
self.tweak_float(dx=dx, dy=dy)
def cmd_resize_floating(self, dw, dh, curx, cury):
"""Add dw and dh to size of window"""
self.tweak_float(dw=dw, dh=dh)
def cmd_set_position_floating(self, x, y, curx, cury):
"""Move window to x and y"""
self.tweak_float(x=x, y=y)
def cmd_set_size_floating(self, w, h, curx, cury):
"""Set window dimensions to w and h"""
self.tweak_float(w=w, h=h)
def cmd_get_position(self):
return self.getposition()
def cmd_get_size(self):
return self.getsize()
def cmd_toggle_floating(self):
self.toggle_floating()
def cmd_enable_floating(self):
self.floating = True
def cmd_disable_floating(self):
self.floating = False
def cmd_toggle_maximize(self):
self.toggle_maximize()
def cmd_enable_maximize(self):
self.maximize = True
def cmd_disable_maximize(self):
self.maximize = False
def cmd_toggle_fullscreen(self):
self.toggle_fullscreen()
def cmd_enable_fullscreen(self):
self.fullscreen = True
def cmd_disable_fullscreen(self):
self.fullscreen = False
def cmd_toggle_minimize(self):
self.toggle_minimize()
def cmd_enable_minimize(self):
self.minimize = True
def cmd_disable_minimize(self):
self.minimize = False
def cmd_bring_to_front(self):
if self.floating:
self.window.configure(stackmode=StackMode.Above)
else:
self._reconfigure_floating() # atomatically above
def cmd_match(self, *args, **kwargs):
return self.match(*args, **kwargs)
def cmd_opacity(self, opacity):
if opacity < .1:
self.opacity = .1
elif opacity > 1:
self.opacity = 1
else:
self.opacity = opacity
def cmd_down_opacity(self):
if self.opacity > .2:
# don't go completely clear
self.opacity -= .1
else:
self.opacity = .1
def cmd_up_opacity(self):
if self.opacity < .9:
self.opacity += .1
else:
self.opacity = 1
def _is_in_window(self, x, y, window):
return (window.edges[0] <= x <= window.edges[2] and
window.edges[1] <= y <= window.edges[3])
def cmd_set_position(self, dx, dy, curx, cury):
if self.floating:
self.tweak_float(dx, dy)
return
for window in self.group.windows:
if window == self or window.floating:
continue
if self._is_in_window(curx, cury, window):
clients = self.group.layout.clients
index1 = clients.index(self)
index2 = clients.index(window)
clients[index1], clients[index2] = clients[index2], clients[index1]
self.group.layout.focused = index2
self.group.layoutAll()
break
|
kynikos/qtile
|
libqtile/window.py
|
Python
|
mit
| 45,570
|
def safe_pawns(pawns):
safePawns = 0
for pawn in pawns:
col = pawn[0]
row = pawn[1]
defenseRow = str(int(row)-1)
defenseLeft = chr(ord(col)-1) + defenseRow
defenseRight = chr(ord(col)+1) + defenseRow
if defenseLeft in pawns or defenseRight in pawns:
safePawns += 1
return safePawns
|
Pouf/CodingCompetition
|
CiO/pawn-brotherhood.py
|
Python
|
mit
| 395
|
import unittest
from bpython import inspection
class TestInspection(unittest.TestCase):
def test_is_callable(self):
class OldCallable:
def __call__(self):
pass
class Callable(object):
def __call__(self):
pass
class OldNoncallable:
pass
class Noncallable(object):
pass
def spam():
pass
self.assertTrue(inspection.is_callable(spam))
self.assertTrue(inspection.is_callable(Callable))
self.assertTrue(inspection.is_callable(Callable()))
self.assertTrue(inspection.is_callable(OldCallable))
self.assertTrue(inspection.is_callable(OldCallable()))
self.assertFalse(inspection.is_callable(Noncallable()))
self.assertFalse(inspection.is_callable(OldNoncallable()))
self.assertFalse(inspection.is_callable(None))
def test_parsekeywordpairs(self):
def fails(spam=['-a', '-b']):
pass
default_arg_repr = "['-a', '-b']"
self.assertEqual(str(['-a', '-b']), default_arg_repr,
'This test is broken (repr does not match), fix me.')
argspec = inspection.getargspec('fails', fails)
defaults = argspec[1][3]
self.assertEqual(str(defaults[0]), default_arg_repr)
if __name__ == '__main__':
unittest.main()
|
5monkeys/bpython
|
bpython/test/test_inspection.py
|
Python
|
mit
| 1,387
|
# -*- coding: utf-8 -*-
"""
pyyp.client
~~~~~~~~~~~
This module provides the http client for Yunpian.
"""
import logging
import requests
from .exceptions import RequestException, Timeout
from .utils import encode_params
logger = logging.getLogger(__name__)
class Client(object):
"""Client to send requests"""
def __init__(self, timeout):
self._session = requests.session()
self._timeout = timeout
def get(self, request):
return self._request(
'get',
request.url,
params=request.params,
timeout=self._timeout)
def _request(self, method, url, **kwargs):
params = encode_params(kwargs.get('params'))
data = encode_params(kwargs.get('data'))
logger.info('sending request. url=%s method=%s params=%r data=%r '
'timeout=%s', url, method, params, data, kwargs['timeout'])
try:
r = self._session.request(method, url, **kwargs)
logger.info('received response. url=%s method=%s status_code=%s '
'text=%s', url, method, r.status_code, r.text)
return r
except requests.exceptions.Timeout as e:
logger.error('request timeout. %s', e)
raise Timeout(e)
except requests.exceptions.RequestException as e:
logger.error('request error. %s', e)
raise RequestException(e)
def post(self, request):
return self._request(
'post',
request.url,
data=request.params,
timeout=self._timeout)
|
pragkent/pyyp
|
pyyp/client.py
|
Python
|
mit
| 1,631
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from ebstall.deployers.openvpn import OpenVpnConfig
__author__ = 'dusanklinec'
test1 = """client-to-client
server 10.8.0.0 255.255.255.0
;server 10.7.0.0 255.255.255.0
key server.key # This file should be kept secret
;key server.key # This file should be kept secret
# test"""
test2 = """;persist-tun"""
test3 = """persist-tun"""
test4 = """;key server.key # This file should be kept secret"""
test5 = """push alpha
push beta
push gamma
push delta
push zetta"""
test6 = """remote [(${vpn_hostname})] 1194
resolv-retry infinite"""
test7 = """remote [(${vpn_hostname})] 1194
resolv-retry infinite
<ca>
line1
line2
line3
</ca>
persist-tun"""
test8 = 'proto udp'
class OpenVpnParserTest(unittest.TestCase):
"""Simple test from the readme"""
def __init__(self, *args, **kwargs):
super(OpenVpnParserTest, self).__init__(*args, **kwargs)
def setUp(self):
pass
def tearDown(self):
pass
def test1(self):
parser = OpenVpnConfig(static_config=test1)
parser.load()
data = parser.config_data
# Simple parser test
self.assertEqual(len(data), 6, 'Number of parsed lines does not match')
self.assertEqual(data[0].ltype, 3, 'Parsed command has invalid type')
self.assertEqual(data[0].cmd, 'client-to-client')
self.assertEqual(data[0].params, None)
self.assertEqual(data[0].comment, None)
self.assertEqual(data[1].ltype, 3)
self.assertEqual(data[1].cmd, 'server')
self.assertEqual(data[1].params, '10.8.0.0 255.255.255.0')
self.assertEqual(data[1].comment, None)
self.assertEqual(data[2].ltype, 2)
self.assertEqual(data[2].cmd, 'server')
self.assertEqual(data[2].params, '10.7.0.0 255.255.255.0')
self.assertEqual(data[2].comment, None)
self.assertEqual(data[3].ltype, 3)
self.assertEqual(data[3].cmd, 'key')
self.assertEqual(data[3].params, 'server.key')
self.assertEqual(data[3].comment, '# This file should be kept secret')
self.assertEqual(data[4].ltype, 2)
self.assertEqual(data[4].cmd, 'key')
self.assertEqual(data[4].params, 'server.key')
self.assertEqual(data[4].comment, '# This file should be kept secret')
self.assertEqual(data[5].ltype, 1)
test1x = parser.dump()
parser2 = OpenVpnConfig(static_config=test1x)
parser2.load()
data2 = parser.config_data
self.assertEqual(data2, data, 'Parser did not return the same data')
def test1_remove_single(self):
parser = OpenVpnConfig(static_config=test1)
parser.load()
parser.set_config_value('client-to-client', remove=True)
ctr_comm = 0
for rec in parser.config_data:
if rec.cmd == 'client-to-client':
self.assertEqual(rec.ltype, 2, 'Directive is still active')
if rec.ltype == 2 and rec.cmd == 'client-to-client':
ctr_comm += 1
self.assertLessEqual(ctr_comm, 1, 'Commented out value should be max 1')
def test1_remove_key(self):
parser = OpenVpnConfig(static_config=test1)
parser.load()
parser.set_config_value('key', remove=True)
ctr_comm = 0
for rec in parser.config_data:
if rec.cmd == 'key':
self.assertEqual(rec.ltype, 2, 'Directive is still active')
if rec.ltype == 2 and rec.cmd == 'key':
ctr_comm += 1
self.assertLessEqual(ctr_comm, 2, 'Commented out value should be max 2')
def test2_remove_removed(self):
parser = OpenVpnConfig(static_config=test2)
parser.load()
parser.set_config_value('persist-tun', remove=True)
data = parser.config_data
self.assertEqual(len(data), 1)
self.assertEqual(data[0].ltype, 2)
def test2_add_removed_single(self):
parser = OpenVpnConfig(static_config=test2)
parser.load()
parser.set_config_value('persist-tun')
data = parser.config_data
self.assertEqual(len(data), 1)
self.assertEqual(data[0].ltype, 3)
def test3_add_added(self):
parser = OpenVpnConfig(static_config=test3)
parser.load()
parser.set_config_value('persist-tun')
data = parser.config_data
self.assertEqual(len(data), 1)
self.assertEqual(data[0].ltype, 3)
def test3_remove_added(self):
parser = OpenVpnConfig(static_config=test3)
parser.load()
parser.set_config_value('persist-tun', remove=True)
data = parser.config_data
self.assertEqual(len(data), 1)
self.assertEqual(data[0].ltype, 2)
def test4_add_key(self):
parser = OpenVpnConfig(static_config=test4)
parser.load()
parser.set_config_value('key', 'server.key')
data = parser.config_data
self.assertEqual(len(data), 1)
self.assertEqual(data[0].ltype, 3)
def test5_push(self):
parser = OpenVpnConfig(static_config=test5)
parser.load()
vals = ['alpha', 'beta', 'delta', 'secret']
parser.set_config_value('push', vals)
data = parser.config_data
self.assertEqual(len(data), 6)
vals_present = [False] * len(vals)
for cur in data:
if cur.ltype == 3:
self.assertTrue(cur.params in vals)
vals_present[vals.index(cur.params)] = True
self.assertEqual(vals_present, [True] * len(vals))
def test5_push_remove(self):
parser = OpenVpnConfig(static_config=test5)
parser.load()
vals = ['alpha', 'secret']
parser.set_config_value('push', vals, remove=True)
data = parser.config_data
self.assertEqual(len(data), 5)
vals_present = [False] * len(vals)
for cur in data:
if cur.ltype == 3 and cur.params in vals:
vals_present[vals.index(cur.params)] = True
self.assertEqual(vals_present, [False] * len(vals))
def test6(self):
parser = OpenVpnConfig(static_config=test6)
parser.load()
data = parser.config_data
self.assertEqual(len(data), 2, 'Number of parsed lines does not match')
self.assertEqual(data[0].ltype, 3)
self.assertEqual(data[0].cmd, 'remote')
self.assertEqual(data[1].ltype, 3)
self.assertEqual(parser.dump(), test6, 'Parser did not return the same data')
def test7(self):
parser = OpenVpnConfig(static_config=test7)
parser.load()
data = parser.config_data
self.assertEqual(parser.dump().strip(), test7.strip(), 'Parser did not return the same data')
testx = parser.dump()
parser2 = OpenVpnConfig(static_config=testx)
parser2.load()
data2 = parser.config_data
self.assertEqual(data2, data, 'Parser did not return the same data')
def test8(self):
parser = OpenVpnConfig(static_config=test8)
parser.load()
data = parser.config_data
self.assertEqual(parser.dump().strip(), test8.strip(), 'Parser did not return the same data')
testx = parser.dump()
parser2 = OpenVpnConfig(static_config=testx)
parser2.load()
data2 = parser.config_data
self.assertEqual(data2, data, 'Parser did not return the same data')
parser.set_config_value('proto', 'tcp')
data = parser.config_data
self.assertEqual(len(data), 2)
self.assertEqual(data[0].ltype, 2)
self.assertEqual(data[1].ltype, 3)
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
EnigmaBridge/ebstall.py
|
ebstall/tests/test_openvpnparser.py
|
Python
|
mit
| 7,696
|
import logging
import numpy as np
from src.iBeatles.fitting.kropff.kropff_automatic_threshold_algorithms import Algorithms
from src.iBeatles.utilities.table_handler import TableHandler
class KropffBraggPeakThresholdCalculator:
def __init__(self, parent=None, grand_parent=None):
self.parent = parent
self.grand_parent = grand_parent
def run_automatic_mode(self):
logging.info(f"Automatic Bragg peak threshold calculator")
kropff_table_dictionary = self.grand_parent.kropff_table_dictionary
algorithm_selected = self.parent.kropff_automatic_threshold_finder_algorithm
logging.info(f"-> algorithm selected: {algorithm_selected}")
progress_bar_ui = self.parent.eventProgress
o_algo = Algorithms(kropff_table_dictionary=kropff_table_dictionary,
algorithm_selected=algorithm_selected,
progress_bar_ui=progress_bar_ui)
list_of_threshold_calculated = o_algo.get_peak_value_array(algorithm_selected)
logging.info(f"-> list of threshold found: {list_of_threshold_calculated}")
threshold_width = np.int(self.parent.ui.kropff_threshold_width_slider.value())
for _row_index, _row in enumerate(kropff_table_dictionary.keys()):
x_axis = kropff_table_dictionary[_row]['xaxis']
left_index = list_of_threshold_calculated[_row_index] - threshold_width
right_index = list_of_threshold_calculated[_row_index] + threshold_width
if right_index >= len(x_axis):
right_index = len(x_axis) - 1
kropff_table_dictionary[_row]['bragg peak threshold']['left'] = x_axis[left_index]
kropff_table_dictionary[_row]['bragg peak threshold']['right'] = x_axis[right_index]
self.grand_parent.kropff_table_dictionary = kropff_table_dictionary
def save_all_profiles(self, force=False):
o_table = TableHandler(table_ui=self.parent.ui.high_lda_tableWidget)
nbr_row = o_table.row_count()
table_dictionary = self.grand_parent.kropff_table_dictionary
data_2d = self.grand_parent.data_metadata['normalized']['data']
# index of selection in bragg edge plot
[left_index, right_index] = self.grand_parent.fitting_bragg_edge_linear_selection
run_calculation = False
for _row in np.arange(nbr_row):
_bin_entry = table_dictionary[str(_row)]
if force:
run_calculation = True
elif _bin_entry['yaxis'] is None:
run_calculation = True
if run_calculation:
_bin_x0 = _bin_entry['bin_coordinates']['x0']
_bin_x1 = _bin_entry['bin_coordinates']['x1']
_bin_y0 = _bin_entry['bin_coordinates']['y0']
_bin_y1 = _bin_entry['bin_coordinates']['y1']
yaxis = data_2d[left_index: right_index,
_bin_x0: _bin_x1,
_bin_y0: _bin_y1,
] # noqa: E124
yaxis = np.nanmean(yaxis, axis=1)
yaxis = np.array(np.nanmean(yaxis, axis=1), dtype=float)
_bin_entry['yaxis'] = yaxis
self.grand_parent.kropff_table_dictionary[str(_row)] = _bin_entry
# index of selection in bragg edge plot
[left_index, right_index] = self.grand_parent.fitting_bragg_edge_linear_selection
full_x_axis = self.parent.bragg_edge_data['x_axis']
xaxis = np.array(full_x_axis[left_index: right_index], dtype=float)
_bin_entry['xaxis'] = xaxis
|
ornlneutronimaging/iBeatles
|
src/iBeatles/fitting/kropff/kropff_bragg_peak_threshold_calculator.py
|
Python
|
mit
| 3,672
|
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
from pgmult.distributions import PGMultinomial
from pgmult.utils import compute_uniform_mean_psi, pi_to_psi, psi_to_pi
def test_psi_pi_conversion():
K = 10
pi = np.ones(K) / float(K)
psi = pi_to_psi(pi)
pi2 = psi_to_pi(psi)
print("pi: ", pi)
print("psi: ", psi)
print("pi2: ", pi2)
assert np.allclose(pi, pi2), "Mapping is not invertible."
def test_pgm_rvs():
K = 10
mu, sig = compute_uniform_mean_psi(K, sigma=2)
# mu = np.zeros(K-1)
# sig = np.ones(K-1)
print("mu: ", mu)
print("sig: ", sig)
Sigma = np.diag(sig)
# Add some covariance
# Sigma[:5,:5] = 1.0 + 1e-3*np.random.randn(5,5)
# Sample a bunch of pis and look at the marginals
pgm = PGMultinomial(K, mu=mu, Sigma=Sigma)
samples = 10000
pis = []
for smpl in xrange(samples):
pgm.resample()
pis.append(pgm.pi)
pis = np.array(pis)
print("E[pi]: ", pis.mean(axis=0))
print("var[pi]: ", pis.var(axis=0))
plt.figure()
plt.subplot(121)
plt.boxplot(pis)
plt.xlabel("k")
plt.ylabel("$p(\pi_k)$")
# Plot the covariance
cov = np.cov(pis.T)
plt.subplot(122)
plt.imshow(cov, interpolation="None", cmap="cool")
plt.colorbar()
plt.title("Cov($\pi$)")
plt.show()
def test_correlated_pgm_rvs(Sigma):
K = Sigma.shape[0] + 1
mu, _ = compute_uniform_mean_psi(K)
print("mu: ", mu)
# Sample a bunch of pis and look at the marginals
samples = 10000
psis = np.random.multivariate_normal(mu, Sigma, size=samples)
pis = []
for smpl in xrange(samples):
pis.append(psi_to_pi(psis[smpl]))
pis = np.array(pis)
print("E[pi]: ", pis.mean(axis=0))
print("var[pi]: ", pis.var(axis=0))
plt.figure()
plt.subplot(311)
plt.boxplot(pis)
plt.xlabel("k")
plt.ylabel("$p(\pi_k)$")
# Plot the covariance
cov = np.cov(pis.T)
plt.subplot(323)
plt.imshow(cov[:-1,:-1], interpolation="None", cmap="cool")
plt.colorbar()
plt.title("Cov($\pi$)")
plt.subplot(324)
invcov = np.linalg.inv(cov[:-1,:-1] + np.diag(1e-6 * np.ones(K-1)))
# good = np.delete(np.arange(K), np.arange(0,K,3))
# invcov = np.linalg.inv(cov[np.ix_(good,good)])
plt.imshow(invcov, interpolation="None", cmap="cool")
plt.colorbar()
plt.title("Cov$(\pi)^{-1}$")
plt.subplot(325)
plt.imshow(Sigma, interpolation="None", cmap="cool")
plt.colorbar()
plt.title("$\Sigma$")
plt.subplot(326)
plt.imshow(np.linalg.inv(Sigma), interpolation="None", cmap="cool")
plt.colorbar()
plt.title("$\Sigma^{-1}$")
plt.savefig("correlated_psi_pi.png")
plt.show()
def test_chain_correlated_pgm_rvs(K=10):
Sigma = np.linalg.inv(np.eye(K) + np.diag(np.repeat(0.5,K-1),k=1) + np.diag(np.repeat(0.5,K-1),k=-1))
test_correlated_pgm_rvs(Sigma)
def test_wishart_correlated_pgm_rvs(K=10):
# Randomly generate a covariance matrix
from pybasicbayes.util.stats import sample_invwishart
Sigma = sample_invwishart(np.eye(K-1), nu=K)
test_correlated_pgm_rvs(Sigma)
def test_block_correlated_pgm_rvs():
n = 3
Sblocks = 2.0 * np.eye(n) + np.diag(np.repeat(0.5,n-1),k=1) + np.diag(np.repeat(0.5,n-1),k=-1)
Sigma = np.kron(Sblocks,np.eye(3))
test_correlated_pgm_rvs(Sigma)
# test_psi_pi_conversion()
# test_pgm_rvs()
# test_chain_correlated_pgm_rvs()
test_wishart_correlated_pgm_rvs(K=10)
# test_block_correlated_pgm_rvs()
|
fivejjs/pgmult
|
test/test_psi_pi.py
|
Python
|
mit
| 3,554
|
'''oscaar v2.0
Module for differential photometry
Extends dataBank functionality to output the graph to the filesystem.
'''
from matplotlib import pyplot as plt
from dataBank import dataBank
class dataBankFileOutput(dataBank):
def __init__(self, initParFilePath=None, imageFilePath=None):
self.imageFilePath = imageFilePath
dataBank.__init__(self, initParFilePath)
def plotLightCurve_multirad_output(self):
print "Saving plot... %s" % self.imageFilePath
plt.savefig(self.imageFilePath)
|
bluegod/OSCAAR
|
oscaar/dataBankFileOutput.py
|
Python
|
mit
| 541
|
# Maximum value of the prime
n = int(input("What number should I go up to? "))
# The prime
p = 2
# The number of the prime
x = 0
for p in range(2, n+1):
for i in range(2, p):
if p % i == 0:
break
else:
x +=1
print ("#", x, "->",100 p),
print ("Done")
|
Praenomen/My-Project-Euler
|
Python/Problem007.py
|
Python
|
mit
| 296
|
"""
This file is part of the everest project.
See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information.
Created on Feb 21, 2012.
"""
from everest.resources.storing import build_resource_dependency_graph
from everest.resources.utils import get_member_class
from everest.testing import TestCaseWithConfiguration
from everest.tests.complete_app.interfaces import IMyEntity
from everest.tests.complete_app.interfaces import IMyEntityChild
from everest.tests.complete_app.interfaces import IMyEntityGrandchild
from everest.tests.complete_app.interfaces import IMyEntityParent
__docformat__ = 'reStructuredText en'
__all__ = ['ResourceGraphTestCase',
]
class ResourceGraphTestCase(TestCaseWithConfiguration):
package_name = 'everest.tests.complete_app'
def set_up(self):
TestCaseWithConfiguration.set_up(self)
self.config.load_zcml('configure_no_rdb.zcml')
self._interfaces = [IMyEntityParent, IMyEntity, IMyEntityChild,
IMyEntityGrandchild]
def test_dependency_graph(self):
grph = build_resource_dependency_graph(self._interfaces)
self.assert_equal(len(grph.nodes()), 4)
entity_mb_cls = get_member_class(IMyEntity)
entity_parent_mb_cls = get_member_class(IMyEntityParent)
entity_child_mb_cls = get_member_class(IMyEntityChild)
entity_grandchild_mb_cls = get_member_class(IMyEntityGrandchild)
# Entity Parent resource deps should be empty.
self.assert_equal(grph.neighbors(entity_parent_mb_cls), [])
# Entity Child has Grandchild.
self.assert_equal(grph.neighbors(entity_child_mb_cls),
[entity_grandchild_mb_cls])
# Entity Grandchild has Child, but we ignore backreferences.
self.assert_equal(grph.neighbors(entity_grandchild_mb_cls),
[])
# Entity has Parent and Child.
self.assert_equal(set(grph.neighbors(entity_mb_cls)),
set([entity_parent_mb_cls, entity_child_mb_cls]))
|
helixyte/everest
|
everest/tests/test_resourcegraphs.py
|
Python
|
mit
| 2,059
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10a1 on 2016-06-23 11:18
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('discussions', '0013_auto_20160622_1341'),
]
operations = [
migrations.AlterField(
model_name='post',
name='author',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
Udayraj123/dashboard_IITG
|
Binder/discussions/migrations/0014_auto_20160623_1648.py
|
Python
|
mit
| 590
|
#!/usr/bin/env python
from __future__ import print_function
from os.path import expanduser
import syslog
import urllib2
import json
from user_data import get_user_data
# Logs in to docker index if auth is given (builds ~/.dockercfg)
# docker.email: Email address to login in as
# docker.auth: Base64-encoded login (username/password)
def login():
syslog.syslog(syslog.LOG_WARNING, 'Logging in to docker..')
user_data = get_user_data()
docker = user_data['docker']
auth = docker.get('auth')
email = docker.get('email')
if auth and email:
home = expanduser('~')
docker_cfg_file = open(home + '/.dockercfg','w+')
# TODO: Right now this truncates any other login info-- consider refactoring
auth_hash = {
'https://index.docker.io/v1/': { 'auth': auth, 'email': email }
}
print(json.dumps(auth_hash), file=docker_cfg_file)
syslog.syslog(syslog.LOG_WARNING, 'Successfully logged into docker as %s' % email)
|
hayesgm/cerberus
|
scripts/docker_login.py
|
Python
|
mit
| 956
|
# Copyright (c) 2015 Xilinx Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys
import urllib2
from hopper.utils.logger import *
class TaskBase:
def __init__(self, environment):
self.environment = environment
def execute(self, handler = None):
if self.environment:
if not self.environment.prepare():
return False
return True
class Environment:
def __init__(self, basepath = os.getcwd(),
logger = None, mirrorpath = None, proxy = None,
threads = None, locallayers = None):
self.workingPath = basepath
self.threadlimit = threads
self.mirrorpath = os.path.expanduser(mirrorpath) if mirrorpath else None
self.proxy = proxy
self.locallayers = locallayers
self.logger = logger
self.allowbuildtools = True
def getMirrorPath(self):
return self.mirrorpath
def getSourceMirror(self):
if not self.getMirrorPath():
return None
return os.path.join(self.getMirrorPath(), "repo-mirror")
def getDownloadMirror(self):
if not self.getMirrorPath():
return None
return os.path.join(self.getMirrorPath(), "downloads-mirror")
def getDownloadMirrorUri(self):
if not self.getDownloadMirror():
return None
return "file://" + self.getDownloadMirror()
def getLocalLayerPath(self):
return self.locallayers
def getWorkingPath(self):
return self.workingPath
def getWorkingBuildPath(self):
return os.path.join(self.getWorkingPath(), "build")
def getWorkingTmpPath(self):
return os.path.join(self.getWorkingBuildPath(), "tmp")
def getWorkingDownloadPath(self):
return os.path.join(self.getWorkingBuildPath(), "downloads")
def getWorkingToolsPath(self):
return os.path.join(self.getWorkingBuildPath(), "tools")
def getWorkingSourcesPath(self):
return os.path.join(self.getWorkingPath(), "repos")
def getMaxThreads(self):
return self.threadlimit
def getProxy(self):
return self.proxy
def prepare(self):
if not(os.path.exists(self.getWorkingPath())):
self.note("Preparing environment at '%s'" % self.getWorkingPath())
os.makedirs(self.getWorkingPath())
if not(os.path.exists(self.getWorkingSourcesPath())):
self.debug(" * create repo directory")
os.makedirs(self.getWorkingSourcesPath())
if not(os.path.exists(self.getWorkingBuildPath())):
self.debug(" * create build directory")
os.makedirs(self.getWorkingBuildPath())
if not(os.path.exists(self.getWorkingTmpPath())):
self.debug(" * create working tmp directory")
os.makedirs(self.getWorkingTmpPath())
return True
def downloadFile(self, url):
filename = url.split("/")[-1]
# create downloads directory
if not(os.path.exists(self.getWorkingDownloadPath())):
os.makedirs(self.getWorkingDownloadPath())
mirrorpath = None
if self.getDownloadMirror():
mirrorpath = os.path.join(self.getDownloadMirror(), filename)
localpath = os.path.join(self.getWorkingDownloadPath(), filename)
if os.path.isfile(localpath):
return localpath
elif mirrorpath and os.path.isfile(mirrorpath):
import shutil
shutil.copyfile(mirrorpath, localpath)
return localpath
else:
try:
result = Environment.__download__(url, localpath)
if result:
return localpath
except urllib2.HTTPError as e:
self.error(e)
self.error("Failed to download file '%s'" % url)
return None
# Pass-through logging calls
def getLogger(self):
if self.logger:
return self.logger
return getDefaultLogger()
def log(self, message, level = LoggerLevel.Normal, severity = LoggerSeverity.Info):
self.getLogger().log(message, level, severity)
def verbose(self, message):
self.getLogger().verbose(message)
def debug(self, message):
self.getLogger().debug(message)
def fdebug(self, message):
self.getLogger().fdebug(message)
def error(self, message):
self.getLogger().error(message)
def warning(self, message):
self.getLogger().warning(message)
def note(self, message):
self.getLogger().note(message)
@staticmethod
def __download__(url, filepath):
resp = urllib2.urlopen(url)
size = int(resp.info()["Content-Length"])
with open(filepath, "wb") as f:
while True:
data = resp.read(8192)
if not data:
break
f.write(data)
return resp
|
Xilinx/hopper
|
hopper/utils/tasks.py
|
Python
|
mit
| 5,205
|
import pafy
def dl_audiostream(url, path=''):
"""download audiostream from youtube
:url: the youtube url
:returns: filename of the downloaded file
"""
video = pafy.new(url)
audio = video.getbestaudio()
if path != '':
path = "%s/%s" %(path, audio.filename)
name = audio.download(filepath=path, quiet=True)
print "downloaded file %s" %name
return name
def validate(url):
try:
v = pafy.new(url)
if len(v.audiostreams) > 0:
return True
except Exception, e:
print e
return False
return False # should not happen
|
bastinat0r/webaudio
|
youtube.py
|
Python
|
mit
| 615
|
from .messages import *
class BaseAction(object):
"""
This class represents a base action
"""
def __init__(self, shdr):
self.header = shdr
self.messages = []
@classmethod
def get_message_class(cls):
"""
This method returns the message class associated with the
action (cls.message_class by default)
If cls.message_class is not provided, this method MUST be
overriden
"""
return cls.message_class
@classmethod
def from_data(cls, data, shdr):
"""
This method extracts the messages from data and return
a populated Action class and the rest of the data
"""
msg_class = cls.get_message_class()
action = cls(shdr)
for i in range(shdr.count):
(msg, data) = msg_class.from_data(data)
action.messages.append(msg)
return (action, data)
class ActionInsertState(BaseAction):
"""Action class related to inserting states"""
message_class = MessageState
class ActionDeleteState(BaseAction):
"""Action class related to deleting states"""
message_class = MessageState
class ActionDeleteCompressedState(BaseAction):
"""Action class related to pfsync_del_c action"""
message_class = MessageDeleteCompressed
class ActionClearStates(BaseAction):
"""Action class related to pfsync_clr action"""
message_class = MessageClear
def build_from_header(shdr, data):
"""
This function returns an instance of the class corresponding to the
action type supplied in the header
shdr is of type pfsync.headers.SubHeader
If no action class is supplied below, this function extract the
needed amount of data in order to not pollute the rest of the program
Actions ID corresponds to these defines:
#define PFSYNC_ACT_CLR 0 /* clear all states */
#define PFSYNC_ACT_OINS 1 /* old insert state */
#define PFSYNC_ACT_INS_ACK 2 /* ack of insterted state */
#define PFSYNC_ACT_OUPD 3 /* old update state */
#define PFSYNC_ACT_UPD_C 4 /* "compressed" update state */
#define PFSYNC_ACT_UPD_REQ 5 /* request "uncompressed" state */
#define PFSYNC_ACT_DEL 6 /* delete state */
#define PFSYNC_ACT_DEL_C 7 /* "compressed" delete state */
#define PFSYNC_ACT_INS_F 8 /* insert fragment */
#define PFSYNC_ACT_DEL_F 9 /* delete fragments */
#define PFSYNC_ACT_BUS 10 /* bulk update status */
#define PFSYNC_ACT_OTDB 11 /* old TDB replay counter update */
#define PFSYNC_ACT_EOF 12 /* end of frame - DEPRECATED */
#define PFSYNC_ACT_INS 13 /* insert state */
#define PFSYNC_ACT_UPD 14 /* update state */
#define PFSYNC_ACT_TDB 15 /* TDB replay counter update */
#define PFSYNC_ACT_MAX 16
See OpenBSD sources sys/net/if_pfsync.h
"""
actions = [
ActionClearStates,
None,
None,
None,
None,
None,
ActionDeleteState,
ActionDeleteCompressedState,
None,
None,
None,
None,
None,
ActionInsertState,
None,
None,
]
if shdr.action_id >= 0 and shdr.action_id < len(actions) and actions[shdr.action_id]:
return actions[shdr.action_id].from_data(data, shdr)
else:
next_offset = shdr.length * shdr.count
data = data[next_offset:]
return (None, data)
|
Korrigan/pfstatelogger
|
pfsync/actions.py
|
Python
|
mit
| 3,637
|
#!/usr/bin/env python
"""
Predict firing rates from calcium traces.
Examples:
c2s predict -p 1 data.pck predictions.xpck
c2s predict -m model.xpck data.preprocessed.pck predictions.xpck
c2s predict -t mat data.preprocessed.pck predictions.mat
"""
import sys
from argparse import ArgumentParser
from pickle import dump
from scipy.io import savemat
from numpy import corrcoef, mean
from c2s import predict, preprocess, load_data
from c2s.experiment import Experiment
from c2s.utils import convert
def main(argv):
parser = ArgumentParser(argv[0], description=__doc__)
parser.add_argument('dataset', type=str)
parser.add_argument('output', type=str, nargs='+')
parser.add_argument('--model', '-m', type=str, default='')
parser.add_argument('--preprocess', '-p', type=int, default=0,
help='If you haven\'t already applied `preprocess` to the data, set to 1 (default: 0).')
parser.add_argument('--verbosity', '-v', type=int, default=1)
args = parser.parse_args(argv[1:])
experiment = Experiment()
# load data
data = load_data(args.dataset)
if args.preprocess:
# preprocess data
data = preprocess(data, args.verbosity)
if args.model:
# load training results
results = Experiment(args.model)['models']
else:
# use default model
results = None
# predict firing rates
data = predict(data, results, verbosity=args.verbosity)
# remove data except predictions
for entry in data:
if 'spikes' in entry:
del entry['spikes']
if 'spike_times' in entry:
del entry['spike_times']
del entry['calcium']
for filepath in args.output:
if filepath.lower().endswith('.mat'):
# store in MATLAB format
savemat(filepath, convert({'data': data}))
else:
with open(filepath, 'w') as handle:
dump(data, handle, protocol=2)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
lucastheis/c2s
|
scripts/c2s-predict.py
|
Python
|
mit
| 1,863
|
from random import randint
print ('Bem vindo!')
secreta = randint(1, 100)
tentativa = 0
while True:
chute = int(input('Chute um número: '))
tentativa += 1
if chute == secreta:
print ('Parabéns! Você acertou o número %d na %dª tentativa'
%(secreta, tentativa))
break
else:
print ('Alto' if chute > secreta else 'Baixo')
print ('Fim do jogo!')
|
outrofelipe/Python-para-zumbis
|
exercicios-e-exemplos/chute2.py
|
Python
|
mit
| 402
|
# -*- coding: utf-8 -*-
from selenium import webdriver
class tweetshot:
def __init__(self):
self.driver = webdriver.PhantomJS()
def __exit__(self):
self.driver.quit()
def take_screenshot(self, url, output):
self.driver.get(url)
e = self.driver.find_element_by_class_name("permalink-tweet-container")
width = e.size['width']
height = e.size['height']
self.driver.set_window_size(width, height)
self.driver.save_screenshot(output)
if __name__ == '__main__':
import argparse
import datetime
format = "%Y-%m-%d-%H%M%S"
today = datetime.datetime.today()
timestamp = today.strftime(format)
parser = argparse.ArgumentParser(
description='This script takes a screenshot of a tweet for posterity.')
parser.add_argument('url', action='store',
help='URL of the tweet that you want to screenshot.')
parser.add_argument('-o','--output', action='store', dest='output',
default='{}.png'.format(timestamp),
help='Output path of the image. Default is the current directory.')
args = parser.parse_args()
url = args.url
output = args.output
tweet = tweetshot()
tweet.take_screenshot(url, output)
print( 'Screenshot saved for {}'.format(url) )
|
todd64/screenshot-tweet
|
screenshot-tweet.py
|
Python
|
mit
| 1,406
|
from flask import render_template, redirect, url_for, request, flash, Blueprint
from app import forms, models, db, login_manager
from flask.ext.login import login_user, login_required, logout_user
import datetime
bp = Blueprint('app', __name__)
@login_manager.user_loader
def load_user(username):
return models.User.query.get(username)
@bp.route('/', methods=['GET', 'POST'])
def index():
form = forms.LoginForm()
if request.method == 'POST':
if form.validate_on_submit():
login_user(form.user)
return redirect(request.args.get('next') or url_for('app.create'))
return render_template('login.html', form=form)
@bp.route('/logout')
@login_required
def logout():
logout_user()
return redirect(url_for('app.index'))
@bp.route('/create', methods=['GET', 'POST'])
@login_required
def create():
form = forms.CreateForm()
if form.validate_on_submit():
# Create a patient from user input
patient = models.Patient(forename=form.forename.data,
surname=form.surname.data,
dob=int(datetime.datetime.strptime(
form.dob.data, "%d/%m/%Y").strftime("%s")),
mobile=form.mobile.data.replace('07', '447'))
# Add patient data to database
db.session.add(patient)
db.session.commit()
# Reset the form & redirect to self.
flash('The patient data has been saved successfully.')
form.reset()
return render_template('create.html', form=form)
@bp.app_errorhandler(404)
def page_not_found(e):
flash('Whoops, something went wrong.'
'You have been taken back to the login screen.')
return render_template('404.html'), 404
|
jawrainey/atc
|
app/views.py
|
Python
|
mit
| 1,790
|
from __future__ import absolute_import
from django import forms
from django.utils.translation import ugettext_lazy as _
from django.core.validators import MinValueValidator
from fobi.base import BaseFormFieldPluginForm, get_theme
from fobi.settings import DEFAULT_MAX_LENGTH, DEFAULT_MIN_LENGTH
from fobi.widgets import NumberInput
__title__ = 'fobi.contrib.plugins.form_elements.email.forms'
__author__ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('EmailInputForm',)
theme = get_theme(request=None, as_instance=True)
class EmailInputForm(forms.Form, BaseFormFieldPluginForm):
"""Form for ``EmailInputPlugin``."""
plugin_data_fields = [
("label", ""),
("name", ""),
("help_text", ""),
("initial", ""),
("max_length", str(DEFAULT_MAX_LENGTH)),
("required", False),
("placeholder", ""),
]
label = forms.CharField(
label=_("Label"),
required=True,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
name = forms.CharField(
label=_("Name"),
required=True,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
help_text = forms.CharField(
label=_("Help text"),
required=False,
widget=forms.widgets.Textarea(
attrs={'class': theme.form_element_html_class}
)
)
initial = forms.EmailField(
label=_("Initial"),
required=False,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
max_length = forms.IntegerField(
label=_("Max length"),
required=True,
widget=NumberInput(attrs={'class': theme.form_element_html_class,
'min': str(DEFAULT_MIN_LENGTH)}),
initial=DEFAULT_MAX_LENGTH,
validators=[MinValueValidator(DEFAULT_MIN_LENGTH)]
)
required = forms.BooleanField(
label=_("Required"),
required=False,
widget=forms.widgets.CheckboxInput(
attrs={'class': theme.form_element_checkbox_html_class}
)
)
placeholder = forms.CharField(
label=_("Placeholder"),
required=False,
widget=forms.widgets.TextInput(
attrs={'class': theme.form_element_html_class}
)
)
def clean(self):
super(EmailInputForm, self).clean()
max_length = self.cleaned_data.get('max_length', DEFAULT_MAX_LENGTH)
if self.cleaned_data['initial']:
len_initial = len(self.cleaned_data['initial'])
if len_initial > max_length:
self.add_error(
'initial',
_("Ensure this value has at most {0} characters "
"(it has {1}).".format(max_length, len_initial))
)
# For backwards compatibility
EmailForm = EmailInputForm
|
mansonul/events
|
events/contrib/plugins/form_elements/fields/email/forms.py
|
Python
|
mit
| 3,065
|
""""
Contains code to:
- hit the Sierra request-API
"""
import datetime, json, logging, os, pprint
import requests
from requests.auth import HTTPBasicAuth
log = logging.getLogger(__name__)
class SierraHelper( object ):
""" Gets item_id and places hold. """
def __init__( self ):
pass
self.SIERRA_API_ROOT_URL = os.environ['EZRQST__SIERRA_API_ROOT_URL']
self.SIERRA_API_KEY = os.environ['EZRQST__SIERRA_API_KEY']
self.SIERRA_API_SECRET = os.environ['EZRQST__SIERRA_API_SECRET']
# self.item_request = None
# self.item_dct = {} # populated by prep_item_data(); used by views.processor(), which passes this to aeon.build_aeon_url()
# self.item_bib = ''
# self.item_barcode = ''
# self.item_id = None # populated by get_item_id(); _no_ trailing check-digit
# self.item_title = ''
# self.patron_barcode = ''
# # self.patron_login_name = ''
# self.patron_sierra_id = ''
# self.hold_status = 'problem' # updated in place_hold()
def build_data( self, item_id, pickup_location_code ):
""" Preps item-data -- and some patron-data -- from item_request.
Called by models.Processor.place_request() """
payload_dct = {
'recordType': 'i',
'recordNumber': int( item_id[1:] ), # removes initial 'i'
'pickupLocation': pickup_location_code,
'note': 'source: easyRequest'
}
log.debug( f'payload_dct, ``{pprint.pformat(payload_dct)}``' )
return payload_dct
def manage_place_hold( self, data_dct, patron_sierra_id ):
""" Gets token and places hold.
Called by models.Processor.place_request() """
token = self.get_token()
self.place_hold( token, data_dct, patron_sierra_id )
log.debug( 'manage_place_hold() done.' )
return
def get_token( self ):
""" Gets token.
Called by manage_place_hold() """
token = 'init'
token_url = f'{self.SIERRA_API_ROOT_URL}/token'
log.debug( 'token_url, ```%s```' % token_url )
try:
r = requests.post( token_url,
auth=HTTPBasicAuth( self.SIERRA_API_KEY, self.SIERRA_API_SECRET ),
timeout=20 )
log.debug( 'token r.content, ```%s```' % r.content )
token = r.json()['access_token']
log.debug( 'token, ```%s```' % token )
return token
except:
log.exception( 'problem getting token; traceback follows' )
raise Exception( 'exception getting token' )
def place_hold( self, token, payload_dct, patron_sierra_id ):
""" Attempts to place hold via sierra api.
Called by manage_place_hold() """
log.info( 'placing hold' )
request_url = f'{self.SIERRA_API_ROOT_URL}/patrons/{patron_sierra_id}/holds/requests'
custom_headers = {'Authorization': f'Bearer {token}' }
log.debug( f'custom_headers, ```{custom_headers}```' )
log.debug( f'payload_dct, ```{pprint.pformat(payload_dct)}```' )
payload = json.dumps( payload_dct )
log.debug( f'payload-json-string, ```{payload}```' )
try:
r = requests.post( request_url, headers=custom_headers, data=payload, timeout=30 )
log.info( f'r.status_code, `{r.status_code}`' )
log.info( f'r.url, `{r.url}`' )
log.info( f'r.content, `{r.content}`' )
if r.status_code in [ 200, 204 ]:
self.hold_status = 'hold_placed'
except:
log.exception( 'problem hitting api to request item; traceback follows; processing will continue' )
log.debug( f'hold_status, `{self.hold_status}`' )
return
## end class SierraHelper()
|
birkin/easyrequest_project
|
easyrequest_app/lib/sierra.py
|
Python
|
mit
| 3,821
|
#Exercise A.1.1: Interest rates
#Author: Andreas Solberg Sagen - University of Oslo
|
exTerEX/PrimeOnScientificProgramming
|
Chapter A/module1.py
|
Python
|
mit
| 84
|
import sys
import traceback
import os
import logging
from six.moves import range
from six.moves import zip
from io import open
from datetime import datetime
from .version import __version__
from .plugins import default_plugin_manager
logger = logging.getLogger('honeybadger.payload')
def error_payload(exception, exc_traceback, config):
def _filename(name):
return name.replace(config.project_root, '[PROJECT_ROOT]')
def is_not_honeybadger_frame(frame):
# TODO: is there a better way to do this?
# simply looking for 'honeybadger' in the path doesn't seem
# specific enough but this approach seems too specific and
# would need to be updated if we re-factored the call stack
# for building a payload.
return not ('honeybadger' in frame[0] and frame[2] in ['notify', '_send_notice', 'create_payload', 'error_payload'])
if exc_traceback:
tb = traceback.extract_tb(exc_traceback)
else:
tb = [f for f in traceback.extract_stack() if is_not_honeybadger_frame(f)]
logger.debug(tb)
payload = {
'class': type(exception) is dict and exception['error_class'] or exception.__class__.__name__,
'message': type(exception) is dict and exception['error_message'] or str(exception),
'backtrace': [dict(number=f[1], file=_filename(f[0]), method=f[2], source=read_source(f)) for f in reversed(tb)]
}
return payload
def read_source(frame, source_radius=3):
if os.path.isfile(frame[0]):
with open(frame[0], 'rt', encoding='utf-8') as f:
contents = f.readlines()
start = max(1, frame[1] - source_radius)
end = min(len(contents), frame[1] + source_radius)
return dict(zip(range(start, end+1), contents[start-1:end]))
return {}
def server_payload(config):
return {
'project_root': config.project_root,
'environment_name': config.environment,
'hostname': config.hostname,
'time': datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
'pid': os.getpid(),
'stats': stats_payload()
}
def stats_payload():
try:
import psutil
except ImportError:
return {}
else:
s = psutil.virtual_memory()
loadavg = psutil.getloadavg()
free = float(s.free) / 1048576.0
buffers = hasattr(s, 'buffers') and float(s.buffers) / 1048576.0 or 0.0
cached = hasattr(s, 'cached') and float(s.cached) / 1048576.0 or 0.0
total_free = free + buffers + cached
payload = {}
payload['mem'] = {
'total': float(s.total) / 1048576.0, # bytes -> megabytes
'free': free,
'buffers': buffers,
'cached': cached,
'total_free': total_free
}
payload['load'] = dict(zip(('one', 'five', 'fifteen'), loadavg))
return payload
def create_payload(exception, exc_traceback=None, config=None, context=None):
if exc_traceback is None:
exc_traceback = sys.exc_info()[2]
#if context is None, Initialize as an emptty dict
if not context:
context = {}
payload = {
'notifier': {
'name': "Honeybadger for Python",
'url': "https://github.com/honeybadger-io/honeybadger-python",
'version': __version__
},
'error': error_payload(exception, exc_traceback, config),
'server': server_payload(config),
'request': {'context':context}
}
return default_plugin_manager.generate_payload(payload, config, context)
|
honeybadger-io/honeybadger-python
|
honeybadger/payload.py
|
Python
|
mit
| 3,579
|
from ctypes import c_char, c_ubyte, c_short, c_ushort, c_int, c_uint, c_ulong, c_long, pointer, Structure
dsInt8_t = c_char
dsUint8_t = c_ubyte
dsInt16_t = c_short
dsUint16_t = c_ushort
dsInt32_t = c_int
dsUint32_t = c_uint
dsULong_t = c_ulong
dsLong_t = c_long
dsChar_t = c_char
def dsTEXT(x):
return x
dsString_t = pointer(dsChar_t)
class dsUint160_t(Structure):
_fields_ = [
('top', dsUint32_t),
('hi_hi', dsUint32_t),
('hi_lo', dsUint32_t)
('lo_hi', dsUint32_t),
('lo_lo', dsUint32_t)
]
class dsmBool_t(Structure):
_fields_ = [
('dsmFalse', c_int),
('dsmTrue', c_int)
]
dsmFalse = 0
dsmTrue = 1
uint8 = dsUint8_t
int8 = dsInt8_t
uint16 = dsUint16_t
int16 = dsInt16_t
uint32 = dsUint32_t
int32 = dsInt32_t
class dsStruct64_t(Structure):
_fields_ = [
('hi', dsUint32_t),
('lo', dsUint32_t)
]
uint64 = dsStruct64_t
dsBool_t = dsmBool_t
bool_t = dsBool_t
bTrue = dsmTrue
bFalse = dsmFalse
dsInt64_t = c_long
dsUint64_t = c_ulong
|
DenisKuzin/python-tsm
|
dsmapips.py
|
Python
|
mit
| 1,044
|
# Python - 3.6.0
test.assert_equals(char_freq('I like cats'), {'a': 1, ' ': 2, 'c': 1, 'e': 1, 'I': 1, 'k': 1, 'l': 1, 'i': 1, 's': 1, 't': 1})
|
RevansChen/online-judge
|
Codewars/8kyu/character-frequency-2/Python/test.py
|
Python
|
mit
| 145
|
#!/usr/bin/python
import socket
import fcntl
import struct
import argparse
from json import load
from urllib2 import urlopen
parser = argparse.ArgumentParser()
parser.add_argument ("-s","--server", help="add server url")
parser.add_argument ("-i","--ip", help="add parameter for ip in url")
parser.add_argument ("-n","--name", help="add machine paramet in url")
parser.add_argument ("-t","--interfaces", help="change intefaces")
args = parser.parse_args()
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
if args.server and args.ip and args.name:
name = socket.gethostname()
my_ip = 'external%20ip%20'+ load(urlopen('http://jsonip.com'))['ip']+ '%20and%20local%20ip%20' + get_ip_address(args.interfaces)
url = args.server+'?'+args.name+'='+name+'&'+args.ip+'='+ my_ip
urlopen(url)
else:
print "error, you did not put arguments"
|
tsevillano/ip.py
|
ip.py
|
Python
|
mit
| 1,018
|
import asyncio
import sys
import settings
if __name__ == "__main__":
user_settings = settings.load_user_settings()
settings.set_settings(user_settings)
if sys.platform == 'win32':
loop = asyncio.ProactorEventLoop()
asyncio.set_event_loop(loop)
from communication import server, pubsub, topics
from interfaces.cli import menu
from engine.handler import EngineProcessHandler
from engine.client import RemoteEngineClient
import threading
import atexit
def main():
engine = asyncio.get_event_loop().run_until_complete(initialize_speech_engine_connector())
if settings.settings['network'] != 'server':
from recognition.commands import monitor
monitor.start_watching_user_state()
threading.Thread(target=get_cli_loop(), daemon=True).start()
engine_server = engine.server if isinstance(engine, EngineProcessHandler) else None
server.loop.run_forever()
@atexit.register
def shutdown():
pubsub.publish(topics.STOP_MAIN_PROCESS)
async def initialize_speech_engine_connector():
network = settings.settings['network']
if network == 'remote':
return RemoteEngineClient()
else:
is_server = network == 'server'
return await EngineProcessHandler.create(remote=is_server)
def get_cli_loop():
no_cli = settings.settings['network'] == 'server'
if no_cli:
input_blocker = lambda: input('')
else:
main_menu = menu.MainMenu()
input_blocker = main_menu.start
def loop_func():
input_blocker()
server.loop.call_soon_threadsafe(sys.exit)
return loop_func
if __name__ == "__main__":
main()
|
osspeak/osspeak
|
osspeak/main.py
|
Python
|
mit
| 1,630
|
import sys
import telnetlib
import logging
logger = logging.getLogger(__name__)
class adbConsole:
def __init__(self, host="localhost",port=5554):
self.HOST = host
self.PORT = port
self.tn = telnetlib.Telnet(self.HOST,self.PORT)
#read all the adb stuff
self.tn.read_until("OK")
def __del__(self):
self.tn.close()
def close(self):
self.tn.close()
#note the semantics of connect are that a repeated connect
#does NOT try to connect again, so we should always connect
def connect(self):
self.tn.open(self.HOST,self.PORT)
def send (self,string):
self.connect()
if not string.endswith("\n"):
string = string + "\n"
logger.debug("adb sending: %s" % string)
self.tn.write(string)
return self.isOK()
def isOK (self):
(idx,match,text) = self.tn.expect(["OK","KO"],30) # 30 second timeout; should really be instant
if idx == 0:
#logger.debug("text:%s" % text)
text2 = self.tn.read_until("OK")
logger.debug("adb OK: %s" % text2)
return text2;
elif idx == 1:
text2 = self.tn.read_until("KO")
logger.debug("adb KO: %s" % text2)
raise Exception("adb", "not OK")
else:
logger.error("text" + text)
raise Exception("adb", "unk err")
|
tvidas/a5
|
scripts/bin/adbconsole.py
|
Python
|
mit
| 1,278
|
# -*- coding: utf8 -*-
import sys
import os
import logging
import argparse
from datetime import datetime
from collections import defaultdict
from common_tools import parent_parser
from parser import Request_Parser, Response_Parser
from config import settings
from image import image_type_detection, compress_image_by_webp, get_image_info, convert_webp_to_png, ziprxoy_zip
from model import Image_Model
reload(sys)
sys.setdefaultencoding('utf-8')
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
date_tag = datetime.now().strftime("%Y-%m-%d")
logFormatter = logging.Formatter("%(asctime)s [%(threadName)s] [%(levelname)s] %(message)s")
fileHandler = logging.FileHandler("../logs/Main%s.log" % date_tag)
fileHandler.setFormatter(logFormatter)
logger.addHandler(fileHandler)
consoleHandler = logging.StreamHandler()
consoleHandler.setFormatter(logFormatter)
logger.addHandler(consoleHandler)
def main():
parser = argparse.ArgumentParser(parents=[parent_parser])
parser.add_argument('-v', '--version', action='version', version='%(prog)s 1.0')
options = parser.parse_args()
config = settings[options.config]
logging.info("Setting {}".format(options.config))
LEN_ORI_INPUT_TERMS = 5
REQUEST_HEADER_KEYS = ['X-QB', 'Accept', 'Accept-Encoding', ]
RESPONSE_HEADER_KEYS = ['Content-Length', 'Content-Type', ]
COMPRESS_WEBP_QUALITY = 70
ori_input_file = os.path.join(config['data_dir'], config['ori_input_file'])
base_output_file = os.path.join(config['output_dir'],
datetime.now().strftime("%Y%m%d%H%M%S") + "_" + config['base_output_file'])
image_output_file = os.path.join(config['output_dir'],
datetime.now().strftime("%Y%m%d%H%M%S") + "_" + config['image_output_file'])
if not os.path.isfile(ori_input_file):
logging.error("input file: %s is not exist!", ori_input_file)
sys.exit(-1)
overall_statistic = defaultdict(int)
real_image_type_statistic = defaultdict(int)
request_head_keys = set()
response_head_key = set()
with open(ori_input_file) as r_handler, \
open(base_output_file, 'w') as w_base_handler, \
open(image_output_file, 'w') as w_image_hanlder:
for line in r_handler:
try:
if line and line.strip():
line = line.strip()
overall_statistic['all'] += 1
# file format : req_time|rep_time|key|base64(req)|base64(rep)
terms = line.split('\t')
if len(terms) != LEN_ORI_INPUT_TERMS:
overall_statistic['format_wrong']
continue
# time
req_time = datetime.fromtimestamp(float(terms[0])).second * 10 ** 6 + datetime.fromtimestamp(
float(terms[0])).microsecond
rep_time = datetime.fromtimestamp(float(terms[1])).second * 10 ** 6 + datetime.fromtimestamp(
float(terms[1])).microsecond
# request
http_request_parser = Request_Parser()
http_request_parser.parse(terms[3].decode('base64'))
http_request_model = http_request_parser.get_request(*REQUEST_HEADER_KEYS)
# response
http_response_parser = Response_Parser()
http_response_parser.parse(terms[4].decode('base64'))
http_response_model = http_response_parser.get_reponse(*RESPONSE_HEADER_KEYS)
# base writer
base_str = "{}\t{}\t{}\t{}".format(req_time, rep_time, http_request_model, http_response_model)
w_base_handler.write("{}\n".format(base_str))
# reponse_body
reponse_body = http_response_parser.get_body()
real_image_type = image_type_detection(reponse_body)
real_image_type_statistic[real_image_type] += 1
# image
# TODO label image
if real_image_type and real_image_type not in ['unknown', '-']:
md5_code, width, height, image_pix_count = get_image_info(real_image_type, reponse_body)
image_model = Image_Model(real_image_type, md5_code, width, height, image_pix_count)
cwebp_run_time, dwebp_run_time, ziproxy_run_time = '-', '-', '-'
if real_image_type == 'webp':
image_model.set_zip(image_model.md5, len(reponse_body))
else:
compress_md5, compress_size, cwebp_run_time = compress_image_by_webp(reponse_body,
quality=COMPRESS_WEBP_QUALITY)
image_model.set_zip(compress_md5, compress_size)
dwebp_run_time = convert_webp_to_png()
ziproxy_run_time = ziprxoy_zip()
w_image_hanlder.write("{}\t{}\t{}\t{}\t{}\n".format(base_str, image_model,
cwebp_run_time, dwebp_run_time,
ziproxy_run_time))
# if options.filter_image:
except Exception as e:
overall_statistic['error'] += 1
logging.error("error:{0} ".format(e))
logging.info("[Stat] overall_statistic: {}".format(overall_statistic))
logging.info("[Stat] image_type_statistic: {}, total:{}".format(real_image_type_statistic,
sum(real_image_type_statistic.values())))
if __name__ == "__main__": main()
|
CharlesZhong/Mobile-Celluar-Measure
|
http_parser/main.py
|
Python
|
mit
| 5,954
|
import re
import time
lowest_cost = 9999999999999
best_spells = []
# MUCH faster than deepcopy
def spell_copy(spells):
return {'recharge' : [x for x in spells['recharge']],
'poison' : [x for x in spells['poison']],
'shield' : [x for x in spells['shield']],
'magic missile' : [x for x in spells['magic missile']],
'drain' : [x for x in spells['drain']]}
def cast(player, boss, spells, spell):
if spells[spell][2] > 0 or spells[spell][0] > player[1]:
return -1
if spell == 'magic missile':
boss[0] -= 4
player[1] -= spells[spell][0]
return spells[spell][0]
elif spell == 'drain':
boss[0] -= 2
player[0] += 2
player[1] -= spells[spell][0]
return spells[spell][0]
elif spell == 'shield':
player[1] -= spells[spell][0]
spells[spell][2] = spells[spell][1]
return spells[spell][0]
elif spell == 'poison':
player[1] -= spells[spell][0]
spells[spell][2] = spells[spell][1]
return spells[spell][0]
elif spell == 'recharge':
player[1] -= spells[spell][0]
spells[spell][2] = spells[spell][1]
return spells[spell][0]
def apply_effects(player, boss, spells):
if spells['shield'][2] > 0:
player[2] = 7
spells['shield'][2] -= 1
if spells['poison'][2] > 0:
boss[0] -= 3
spells['poison'][2] -= 1
if spells['recharge'][2] > 0:
player[1] += 101
spells['recharge'][2] -= 1
def remove_effects(player, spells):
if spells['shield'][2] == 0:
player[2] = 0
def play(player, boss, spells, hard_game=False, mana_used=0, is_player_turn=True, used_spells=[]):
global lowest_cost
global best_spells
if mana_used >= lowest_cost or player[0] <= 0: #check for win/lose or if the current game is worse than one done in the past
return
elif boss[0] <= 0:
lowest_cost = mana_used
best_spells = used_spells
return
if hard_game and is_player_turn: #health penalty for playing on hard
player[0] -= 1
apply_effects(player, boss, spells) #apply passive effects if applicable
if player[0] <= 0: #check for win/lose again
return
elif boss[0] <= 0:
lowest_cost = mana_used
best_spells = used_spells
return
if is_player_turn:
for spell in ['poison', 'recharge', 'shield', 'drain', 'magic missile']: # try every spell
new_player = [x for x in player]
new_boss = [x for x in boss]
new_spells = spell_copy(spells)
cost = cast(new_player, new_boss, new_spells, spell)
if cost == -1:
continue
remove_effects(new_player, spells) #remove the effect of shield, quick and dirty implementation
play(new_player, new_boss, new_spells, hard_game, cost + mana_used, False, used_spells + [spell]) #next turn -> boss
else: #boss turn
new_player = [x for x in player]
new_player[0] -= max(boss[1] - player[2], 1)
remove_effects(new_player, spells)
play(new_player, boss, spells, hard_game, mana_used, True, used_spells) #next turn -> player
with file('input.txt') as input_file:
spells = {'recharge' : [229, 5, 0], 'poison' : [173, 6, 0], 'shield' : [113, 6, 0], 'magic missile' : [53, 0, 0], 'drain' : [73, 0, 0]}
boss_stats = [] #hitpoint, damage
player_stats = [50, 500, 0] #hitpoint, mana, armor
for line in input_file:
boss_stats.append(int(re.search('(\d+)', line).group(1)))
start_time = time.time()
play([x for x in player_stats], [x for x in boss_stats], spell_copy(spells), False)
print 'To beat the boss on normal, it took this much mana:', lowest_cost
print 'These are the spells used, in order:', best_spells
print 'It took this many seconds to figure all of this out:', time.time() - start_time
start_time = time.time()
lowest_cost = 99999999
best_spells = []
play([x for x in player_stats], [x for x in boss_stats], spell_copy(spells), True)
print 'To beat the boss on hard, it took this much mana:', lowest_cost
print 'These are the spells used, in order:', best_spells
print 'It took this many seconds to figure all of this out:', time.time() - start_time
|
JulienCote/advent_of_code_2015
|
day22/wizard_battle.py
|
Python
|
mit
| 3,890
|
"""
Milestones
[x] display a diff of two files
[x] color code the diff
[x] populate output area after 'compare button' is pressed
[x] accept pasted data
[x] track download button event
[x] make diff downloadable
[x] autofill from gist combo ( webdiff/gist_id1-gist_id2 )
[x] deal with multifile gists somehow ( must gather examples )
[ ] accept dragged data
[ ] permalink, for sharing
[ ] implement additional origins beside gists
"""
import re
import webapp2
import difflib
import json
import urllib
from utils import PageHandler
from utils import logger
def get_raw_url_from_gist_id(gist_id, gist_name_propper=None):
gist_id = str(gist_id)
url = 'https://api.github.com/gists/' + gist_id
found_json = urllib.urlopen(url).read()
wfile = json.JSONDecoder()
wjson = wfile.decode(found_json)
files_flag = 'files'
file_names = wjson[files_flag].keys()
logger(file_names)
logger(gist_name_propper)
if not gist_name_propper:
file_name = file_names[0]
else:
# this is a little crude.
if gist_name_propper.startswith('file_'):
gist_name_propper = gist_name_propper[5:]
file_name = gist_name_propper
return wjson[files_flag][file_name]['raw_url']
def get_file(gist_id, gist_name_propper=None):
url = get_raw_url_from_gist_id(gist_id, gist_name_propper)
conn = urllib.urlopen(url)
return conn.read()
# -----------------------
def make_diffstring(content_ab, separator):
raw_text_input_a, raw_text_input_b = content_ab
text_input_a = raw_text_input_a.split(separator)
text_input_b = raw_text_input_b.split(separator)
# http://docs.python.org/library/difflib.html
diff_object = difflib.HtmlDiff(wrapcolumn=87)
diff_string = diff_object.make_table( text_input_a, text_input_b)
if not type(diff_string) == unicode:
logger('make_table failed')
return
return ''.join(diff_string)
def make_unified_diff(content_ab, separator):
a, b = content_ab
diff_generator = difflib.unified_diff( a=a.split(separator),
b=b.split(separator))
return '\n'.join(diff_generator)
# -----------------------
def perform_compare_or_download(self):
passed_args = self.request.arguments()
a = self.request.get('from').strip()
b = self.request.get('to').strip()
content_ab = a, b
if ('comparer_button' in passed_args) and not (r'' in content_ab):
diff_string = make_diffstring(content_ab, '\r\n')
# if make_diffstring can't resolve the content
if diff_string == None:
# perhaps pass an error message.. not needed for now
self.render('webdiff.html')
return
self.render('diff_into_base.html', diff=diff_string,
content_a=a,
content_b=b)
return
elif 'download_button' in passed_args:
file_name = self.request.get('filename')
CD_MESSAGE = "attachment; filename={}".format(file_name)
self.response.headers['Content-Disposition'] = CD_MESSAGE
self.response.headers['Content-Type'] = 'text/diff'
diff_wad = make_unified_diff(content_ab, '\r\n')
self.response.out.write(diff_wad)
return
self.render('webdiff.html', content_a=a, content_b=b)
# -----------------------
class _404(PageHandler):
def get(self):
self.redirect('/webdiff/')
class Welcome(PageHandler):
def get(self):
self.render('webdiff.html')
def post(self):
perform_compare_or_download(self)
class DiffGist(PageHandler):
"""Handles the matched regex of two numbers separated by a dash.
/webdiff/gistid1-gistid2
"""
def get(self, gist_ids):
gist_id_a, gist_id_b = gist_ids.split('-')
a = get_file(gist_id_a)
b = get_file(gist_id_b)
self.render('webdiff.html', content_a=a,
content_b=b)
def post(self, gist_ids):
perform_compare_or_download(self)
class MultiFileGist(PageHandler):
def get(self, matched_string):
gist_a, gist_b = matched_string.split('&')
gist_a_id, gist_a_name = gist_a.split('>')
gist_b_id, gist_b_name = gist_b.split('>')
a = get_file(gist_a_id, gist_a_name)
b = get_file(gist_b_id, gist_b_name)
self.render('webdiff.html', content_a=a,
content_b=b)
def post(self, matched_string):
perform_compare_or_download(self)
GIST_ID_RE = '(\d{3,}-\d{3,})'
MULTIFILE_RE = r'(\d{3,}>.*?&\d{3,}>.*?$)'
app = webapp2.WSGIApplication([ ('/webdiff/' + MULTIFILE_RE, MultiFileGist),
('/webdiff/' + GIST_ID_RE, DiffGist),
('/webdiff/?', Welcome),
('/?', _404)], debug=True)
|
zeffii/GAE-webdiff
|
webdiff.py
|
Python
|
mit
| 4,959
|
# -*- coding: utf-8 -*-
from bs4 import BeautifulSoup
import requests
import pickle
import os
import time
import re
import sys
WJN_BASE_URL = 'http://wjn.jp'
WJN_URL = 'http://wjn.jp/article/category/7/'
index_list_pkl = 'wjn_cat7_index_link_list.pkl'
sys.setrecursionlimit(1000000)
class WjnContents():
def __init__(self):
ksk_url_list = download_ksk_url_list()
ksk_contents = download_article_contents(ksk_url_list)
self.contents, self.labels = label_author(ksk_contents)
def download_ksk_url_list():
# wjnのサイトから奈倉氏、柏木氏が書く記事である、
# 官能小説作家書き下ろし[実録]がタイトルにつく記事のURLをlist化
# TODO:取得データ数を増やす
# wjnのサイトからは1000件までしか記事のURLが記録されてないが、
# 過去の記事自体は残っているので、そのURLをどうにかして取得する
# googleでsite:wjn.jp/article/detail/ "官能小説作家書き下ろし"
# と検索したら2000件くらい出てきたのでそれを使う感じで
wjn_cat7_index_link_list = []
if os.path.exists(index_list_pkl):
with open(index_list_pkl, 'rb') as f:
wjn_cat7_index_link_list = pickle.load(f)
else:
for i in range(1, 101):
if i == 1:
url = WJN_URL
else:
url = WJN_URL + str(i)
print(url)
# wjnのサイトをスクレイピングする時はuserAgentを設定する必要あり
html = requests.get(url, headers={'User-Agent': 'test'})
soup = BeautifulSoup(html.content, "html5lib",
from_encoding='Shift-JIS')
wjn_cat7_index_link_list.extend(soup.find_all('a',
{"class": "title"}))
# 待ち時間必須
time.sleep(1)
with open(index_list_pkl, 'wb') as f:
pickle.dump(wjn_cat7_index_link_list, f)
ksk_url_list = [WJN_BASE_URL+a.get('href') for a in
wjn_cat7_index_link_list
if re.search(r"官能小説作家書き下ろし", a.text)]
return ksk_url_list
def download_article_contents(ksk_url_list, single=False):
# 記事から本文を抜き出して、pickle化
if single:
contents = []
for url in ksk_url_list:
html = requests.get(url, headers={'User-Agent': 'test'})
soup = BeautifulSoup(html.text,
"html5lib")
content = soup.find("p", {"class": "desc"})
contents.append(content)
time.sleep(1)
else:
contents = []
if os.path.exists('wjn_content.pkl'):
with open('wjn_content.pkl', 'rb') as f:
contents = pickle.load(f)
else:
for url in ksk_url_list:
html = requests.get(url, headers={'User-Agent': 'test'})
soup = BeautifulSoup(html.text,
"html5lib", from_encoding="Shift-JIS")
content = soup.find("p", {"class": "desc"})
# print(content.text)
contents.append(content)
time.sleep(1)
with open('wjn_content.pkl', 'wb') as f:
pickle.dump(contents, f)
return contents
def label_author(ksk_contents):
author_removed_contents = []
author_labels = []
for content in ksk_contents:
# print(content.text)
# print(str(content))
if re.search(r"柏木春人", content.text):
author_removed_content = re.sub(r"柏木春人", "", content.text)
author_removed_contents.append(author_removed_content)
author_labels.append(1)
elif re.search(r"奈倉清孝", content.text):
author_removed_content = re.sub(r"奈倉清孝", "", content.text)
author_removed_contents.append(author_removed_content)
author_labels.append(0)
assert len(author_removed_contents) == len(author_labels)
return author_removed_contents, author_labels
|
hanarchy/DetectWjnAuthor
|
get_wjn_content.py
|
Python
|
mit
| 4,180
|
from __future__ import print_function, division, absolute_import
# Local imports
from rapid.common.spectrum import spectrum, SpectrumError, ZMat
from rapid.common.utils import normalize, clip, numerics, write_data
from rapid.common.save_script import save_script
from rapid.common.read_input import read_input
__all__ = ['spectrum',
'ZMat',
'SpectrumError',
'normalize',
'clip',
'numerics',
'write_data',
'save_script',
'read_input',
]
|
jensengrouppsu/rapid
|
rapid/common/__init__.py
|
Python
|
mit
| 537
|
#!/usr/bin/python
from controller.client import ClientController
from controller.movie import MovieController
from controller.rent import RentController
from domain.session import Session
from repository.fileclient import FileClientRepository
from repository.filemovie import FileMovieRepository
from repository.filerent import FileRentRepository
from ui.ui import UI
class App(object):
def __init__(self, first_message=""):
"""
The construction for the app. Used for reading the bloc from the file
"""
self.client_repository = FileClientRepository("clients_new.dat")
self.movie_repository = FileMovieRepository("movies_new.dat")
self.rent_repository = FileRentRepository("rents_new.dat")
self.client_controller = ClientController(self.client_repository)
self.movie_controller = MovieController(self.movie_repository)
self.rent_controller = RentController(rent_repository=self.rent_repository,
client_controller=self.client_controller,
movie_controller=self.movie_controller)
self.ui = UI(self.client_controller, self.movie_controller, self.rent_controller)
Session.set_message(first_message)
def run(self):
"""
Main loop of the application
"""
# self.load()
self.ui.display_message()
self.ui.run()
# self.save()
self.exit()
# def load(self):
# """
# Load the previous state
# """
# try:
# with open("clients.dat", "r") as fp:
# self.client_repository._repository = cPickle.load(fp)
# except cPickle.UnpicklingError:
# pass
# except IOError:
# pass
#
# try:
# with open("movies.dat", "r") as fp:
# self.movie_repository._repository = cPickle.load(fp)
# except cPickle.UnpicklingError:
# pass
# except IOError:
# pass
#
# try:
# with open("rents.dat", "r") as fp:
# self.rent_repository._repository = cPickle.load(fp)
# except cPickle.UnpicklingError:
# pass
# except IOError:
# pass
#
# def save(self):
# """
# Saves the current state
# """
# with open("clients.dat", "w") as fp:
# cPickle.dump(self.client_repository._repository, fp)
# with open("movies.dat", "w") as fp:
# cPickle.dump(self.movie_repository._repository, fp)
# with open("rents.dat", "w") as fp:
# cPickle.dump(self.rent_repository._repository, fp)
@staticmethod
def exit():
"""
Quits the app, called on 'quit' command
"""
exit("\nBye Bye :)")
|
leyyin/university
|
fundamentals-of-programming/labs/lab_5-11/app.py
|
Python
|
mit
| 2,835
|
# vim: set fileencoding=utf8 :
from __future__ import with_statement
from __future__ import unicode_literals
import os
import random
import string
import codecs
from six import StringIO, BytesIO
import logging
import socket
from pytds.tds_types import TimeType, DateTime2Type, DateType, DateTimeOffsetType, BitType, TinyIntType, SmallIntType, \
IntType, BigIntType, RealType, FloatType, NVarCharType, VarBinaryType, SmallDateTimeType, DateTimeType, DecimalType, \
MoneyType, UniqueIdentifierType, VariantType, ImageType, VarBinaryMaxType, VarCharType, TextType, NTextType, \
NVarCharMaxType, VarCharMaxType, XmlType
try:
import unittest2 as unittest
except:
import unittest
import sys
from decimal import Decimal, getcontext
import logging
from time import sleep
from datetime import datetime, date, time
import uuid
import pytest
import pytds.tz
import pytds.login
import pytds.smp
tzoffset = pytds.tz.FixedOffsetTimezone
utc = pytds.tz.utc
import pytds.extensions
import six
from six.moves import xrange
from pytds import (
connect, ProgrammingError, TimeoutError, Time,
Error, IntegrityError, Timestamp, DataError, Date, Binary,
output, default,
STRING, BINARY, NUMBER, DATETIME, DECIMAL, INTEGER, REAL, XML)
from pytds.tds_types import (DateTimeSerializer, SmallMoneyType)
from pytds.tds_base import (
Column,
IS_TDS73_PLUS, IS_TDS71_PLUS,
)
import dbapi20
import pytds
import settings
logger = logging.getLogger(__name__)
LIVE_TEST = getattr(settings, 'LIVE_TEST', True)
def create_test_database():
if not LIVE_TEST:
return
logger.info('in setup class')
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['autocommit'] = True
with connect(**kwargs) as conn:
with conn.cursor() as cur:
try:
cur.execute('drop database [{0}]'.format(settings.DATABASE))
except:
logger.exception('Failed to drop database')
pass
try:
cur.execute('create database [{0}]'.format(settings.DATABASE))
except:
pass
try:
cur.execute('create schema myschema')
except:
pass
try:
cur.execute('create table myschema.bulk_insert_table(num int, data varchar(100))')
except:
pass
try:
cur.execute('''
create procedure testproc (@param int, @add int = 2, @outparam int output)
as
begin
set nocount on
--select @param
set @outparam = @param + @add
return @outparam
end
''')
except:
pass
create_test_database()
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_connection_timeout_with_mars():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['timeout'] = 1
kwargs['use_mars'] = True
with connect(*settings.CONNECT_ARGS, **kwargs) as conn:
cur = conn.cursor()
with pytest.raises(TimeoutError):
cur.execute("waitfor delay '00:00:05'")
cur.execute('select 1')
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_connection_no_mars_autocommit():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs.update({
'use_mars': False,
'timeout': 1,
'pooling': True,
'autocommit': True,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
# test execute scalar with empty response
cur.execute_scalar('declare @tbl table(f int); select * from @tbl')
cur.execute("print 'hello'")
messages = cur.messages
assert len(messages) == 1
assert len(messages[0]) == 2
# in following assert exception class does not have to be exactly as specified
assert messages[0][0] == pytds.OperationalError
assert messages[0][1].text == 'hello'
assert messages[0][1].line == 1
assert messages[0][1].severity == 0
assert messages[0][1].number == 0
assert messages[0][1].state == 1
assert 'hello' in messages[0][1].message
# test cursor usage after close, should raise exception
cur = conn.cursor()
cur.execute_scalar('select 1')
cur.close()
with pytest.raises(Error) as ex:
cur.execute('select 1')
assert 'Cursor is closed' in str(ex)
# calling get_proc_return_status on closed cursor works
# this test does not have to pass
assert cur.get_proc_return_status() is None
# calling rowcount on closed cursor works
# this test does not have to pass
assert cur.rowcount == -1
# calling description on closed cursor works
# this test does not have to pass
assert cur.description is None
# calling messages on closed cursor works
# this test does not have to pass
assert cur.messages is None
# calling description on closed cursor works
# this test does not have to pass
assert cur.native_description is None
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_connection_timeout_no_mars():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs.update({
'use_mars': False,
'timeout': 1,
'pooling': True,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
with pytest.raises(TimeoutError):
cur.execute("waitfor delay '00:00:05'")
with conn.cursor() as cur:
cur.execute("select 1")
cur.fetchall()
# test cancelling
with conn.cursor() as cur:
cur.execute('select 1')
cur.cancel()
assert cur.fetchall() == []
cur.execute('select 2')
assert cur.fetchall() == [(2,)]
# test rollback
conn.rollback()
# test callproc on non-mars connection
with conn.cursor() as cur:
cur.callproc('sp_reset_connection')
with conn.cursor() as cur:
# test spid property on non-mars cursor
assert cur.spid is not None
# test tzinfo_factory property r/w
cur.tzinfo_factory = cur.tzinfo_factory
# test non-mars cursor with connection pool enabled
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute('select 1')
assert cur.fetchall() == [(1,)]
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_connection_no_mars_no_pooling():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs.update({
'use_mars': False,
'pooling': False,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute("select 1")
assert cur.fetchall() == [(1,)]
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_row_strategies():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs.update({
'row_strategy': pytds.list_row_strategy,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute("select 1")
assert cur.fetchall() == [[1]]
kwargs.update({
'row_strategy': pytds.namedtuple_row_strategy,
})
import collections
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute("select 1 as f")
assert cur.fetchall() == [collections.namedtuple('Row', ['f'])(1)]
kwargs.update({
'row_strategy': pytds.recordtype_row_strategy,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute("select 1 as e, 2 as f")
row, = cur.fetchall()
assert row.e == 1
assert row.f == 2
assert row[0] == 1
assert row[:] == (1, 2)
row[0] = 3
assert row[:] == (3, 2)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_get_instances():
if not hasattr(settings, 'BROWSER_ADDRESS'):
return unittest.skip('BROWSER_ADDRESS setting is not defined')
pytds.tds.tds7_get_instances(settings.BROWSER_ADDRESS)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class ConnectionTestCase(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def tearDown(self):
self.conn.close()
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class NoMarsTestCase(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['use_mars'] = False
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def tearDown(self):
self.conn.close()
def test_commit(self):
cursor = self.conn.cursor()
cursor.execute('select 1')
cursor.fetchall()
self.conn.commit()
class TestCaseWithCursor(ConnectionTestCase):
def setUp(self):
super(TestCaseWithCursor, self).setUp()
self.cursor = self.conn.cursor()
#def test_mars_sessions_recycle_ids(self):
# if not self.conn.mars_enabled:
# self.skipTest('Only relevant to mars')
# for _ in xrange(2 ** 16 + 1):
# cur = self.conn.cursor()
# cur.close()
def test_parameters_ll(self):
_params_tests(self)
class TestVariant(ConnectionTestCase):
def _t(self, result, sql):
with self.conn.cursor() as cur:
cur.execute("select cast({0} as sql_variant)".format(sql))
val, = cur.fetchone()
self.assertEqual(result, val)
def test_new_datetime(self):
if not IS_TDS73_PLUS(self.conn):
self.skipTest('Requires TDS7.3+')
import pytds.tz
self._t(datetime(2011, 2, 3, 10, 11, 12, 3000), "cast('2011-02-03T10:11:12.003000' as datetime2)")
self._t(time(10, 11, 12, 3000), "cast('10:11:12.003000' as time)")
self._t(date(2011, 2, 3), "cast('2011-02-03' as date)")
self._t(datetime(2011, 2, 3, 10, 11, 12, 3000, pytds.tz.FixedOffsetTimezone(3 * 60)), "cast('2011-02-03T10:11:12.003000+03:00' as datetimeoffset)")
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class BadConnection(unittest.TestCase):
def test_invalid_parameters(self):
with self.assertRaises(Error):
with connect(server=settings.HOST + 'bad', database='master', user='baduser', password=settings.PASSWORD, login_timeout=1) as conn:
with conn.cursor() as cur:
cur.execute('select 1')
with self.assertRaises(Error):
with connect(server=settings.HOST, database='doesnotexist', user=settings.USER, password=settings.PASSWORD) as conn:
with conn.cursor() as cur:
cur.execute('select 1')
with self.assertRaises(Error):
with connect(server=settings.HOST, database='master', user='baduser', password=None) as conn:
with conn.cursor() as cur:
cur.execute('select 1')
def test_instance_and_port(self):
host = settings.HOST
if '\\' in host:
host, _ = host.split('\\')
with self.assertRaisesRegexp(ValueError, 'Both instance and port shouldn\'t be specified'):
with connect(server=host + '\\badinstancename', database='master', user=settings.USER, password=settings.PASSWORD, port=1212) as conn:
with conn.cursor() as cur:
cur.execute('select 1')
def get_spid(conn):
with conn.cursor() as cur:
return cur.spid
def kill(conn, spid):
with conn.cursor() as cur:
cur.execute('kill {0}'.format(spid))
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class ConnectionClosing(unittest.TestCase):
def test_open_close(self):
for x in xrange(3):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
connect(**kwargs).close()
def test_closing_after_closed_by_server(self):
"""
You should be able to call close on connection closed by server
"""
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['autocommit'] = True
with connect(**kwargs) as master_conn:
kwargs['autocommit'] = False
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute('select 1')
conn.commit()
kill(master_conn, get_spid(conn))
sleep(0.2)
conn.close()
def test_connection_closed_by_server(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
with connect(**kwargs) as master_conn:
master_conn.autocommit = True
with connect(**kwargs) as conn:
conn.autocommit = False
# test overall recovery
with conn.cursor() as cur:
cur.execute('select 1')
conn.commit()
kill(master_conn, get_spid(conn))
sleep(0.2)
cur.execute('select 1')
cur.fetchall()
kill(master_conn, get_spid(conn))
sleep(0.2)
with conn.cursor() as cur:
cur.execute('select 1')
# test cursor opening in a transaction, it should raise exception
# make transaction dirty
with conn.cursor() as cur:
cur.execute('select 1')
kill(master_conn, get_spid(conn))
sleep(0.2)
# it does not have to raise this specific exception
with pytest.raises(socket.error):
with conn.cursor() as cur:
cur.execute('select 1')
# test recovery on transaction
with conn.cursor() as cur:
cur.execute('create table ##testtable3 (fld int)')
kill(master_conn, get_spid(conn))
sleep(0.2)
with self.assertRaises(Exception):
cur.execute('select * from ##testtable2')
cur.fetchall()
conn.rollback()
cur.execute('select 1')
# test server closed connection on rollback
with conn.cursor() as cur:
cur.execute('select 1')
kill(master_conn, get_spid(conn))
sleep(0.2)
conn.rollback()
#with connect(server=settings.HOST, database='master', user=settings.USER, password=settings.PASSWORD) as conn:
# spid = get_spid(conn)
# with conn.cursor() as cur:
# # test recovery of specific lowlevel methods
# tds_submit_query(cur._session, "waitfor delay '00:00:05'; select 1")
# kill(master_conn, spid)
# self.assertTrue(cur._session.is_connected())
# with self.assertRaises(Exception):
# tds_process_tokens(cur._session, TDS_TOKEN_RESULTS)
# self.assertFalse(cur._session.is_connected())
#class EncryptionTest(unittest.TestCase):
# def runTest(self):
# conn = connect(server=settings.HOST, database='master', user=settings.USER, password=settings.PASSWORD, encryption_level=TDS_ENCRYPTION_REQUIRE)
# cur = conn.cursor()
# cur.execute('select 1')
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class SmallDateTimeTest(ConnectionTestCase):
def _testval(self, val):
with self.conn.cursor() as cur:
cur.execute('select cast(%s as smalldatetime)', (val,))
self.assertEqual(cur.fetchall(), [(val,)])
def runTest(self):
self._testval(Timestamp(2010, 2, 1, 10, 12, 0))
self._testval(Timestamp(1900, 1, 1, 0, 0, 0))
self._testval(Timestamp(2079, 6, 6, 23, 59, 0))
with self.assertRaises(Error):
self._testval(Timestamp(1899, 1, 1, 0, 0, 0))
with self.assertRaises(Error):
self._testval(Timestamp(2080, 1, 1, 0, 0, 0))
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class DateTimeTest(ConnectionTestCase):
def _testencdec(self, val):
self.assertEqual(val, DateTimeSerializer.decode(*DateTimeSerializer._struct.unpack(DateTimeSerializer.encode(val))))
def _testval(self, val):
with self.conn.cursor() as cur:
cur.execute('select cast(%s as datetime)', (val,))
self.assertEqual(cur.fetchall(), [(val,)])
def runTest(self):
with self.conn.cursor() as cur:
cur.execute("select cast('9999-12-31T23:59:59.997' as datetime)")
self.assertEqual(cur.fetchall(), [(Timestamp(9999, 12, 31, 23, 59, 59, 997000),)])
self._testencdec(Timestamp(2010, 1, 2, 10, 11, 12))
self._testval(Timestamp(2010, 1, 2, 0, 0, 0))
self._testval(Timestamp(2010, 1, 2, 10, 11, 12))
self._testval(Timestamp(1753, 1, 1, 0, 0, 0))
self._testval(Timestamp(9999, 12, 31, 0, 0, 0))
with self.conn.cursor() as cur:
cur.execute("select cast(null as datetime)")
self.assertEqual(cur.fetchall(), [(None,)])
self._testval(Timestamp(9999, 12, 31, 23, 59, 59, 997000))
with self.assertRaises(Error):
self._testval(Timestamp(1752, 1, 1, 0, 0, 0))
with self.conn.cursor() as cur:
cur.execute('''
if object_id('testtable') is not null
drop table testtable
''')
cur.execute('create table testtable (col datetime not null)')
dt = Timestamp(2010, 1, 2, 20, 21, 22, 123000)
cur.execute('insert into testtable values (%s)', (dt,))
cur.execute('select col from testtable')
self.assertEqual(cur.fetchone(), (dt,))
class NewDateTimeTest(ConnectionTestCase):
def test_datetimeoffset(self):
if not IS_TDS73_PLUS(self.conn):
self.skipTest('Requires TDS7.3+')
def _testval(val):
with self.conn.cursor() as cur:
import pytds.tz
cur.tzinfo_factory = pytds.tz.FixedOffsetTimezone
cur.execute('select cast(%s as datetimeoffset)', (val,))
self.assertEqual(cur.fetchall(), [(val,)])
with self.conn.cursor() as cur:
import pytds.tz
cur.tzinfo_factory = pytds.tz.FixedOffsetTimezone
cur.execute("select cast('2010-01-02T20:21:22.1234567+05:00' as datetimeoffset)")
self.assertEqual(datetime(2010, 1, 2, 20, 21, 22, 123456, tzoffset(5 * 60)), cur.fetchone()[0])
_testval(Timestamp(2010, 1, 2, 0, 0, 0, 0, utc))
_testval(Timestamp(2010, 1, 2, 0, 0, 0, 0, tzoffset(5 * 60)))
_testval(Timestamp(1, 1, 1, 0, 0, 0, 0, utc))
_testval(Timestamp(9999, 12, 31, 23, 59, 59, 999999, utc))
_testval(Timestamp(2010, 1, 2, 0, 0, 0, 0, tzoffset(14)))
_testval(Timestamp(2010, 1, 2, 0, 0, 0, 0, tzoffset(-14)))
_testval(Timestamp(2010, 1, 2, 0, 0, 0, 0, tzoffset(-15)))
def test_time(self):
if not IS_TDS73_PLUS(self.conn):
self.skipTest('Requires TDS7.3+')
def testval(val):
with self.conn.cursor() as cur:
cur.execute('select cast(%s as time)', (val,))
self.assertEqual(cur.fetchall(), [(val,)])
testval(Time(14, 16, 18, 123456))
testval(Time(0, 0, 0, 0))
testval(Time(0, 0, 0, 0))
testval(Time(0, 0, 0, 0))
testval(Time(23, 59, 59, 999999))
testval(Time(0, 0, 0, 0))
testval(Time(0, 0, 0, 0))
testval(Time(0, 0, 0, 0))
def test_datetime2(self):
if not IS_TDS73_PLUS(self.conn):
self.skipTest('Requires TDS7.3+')
def testval(val):
with self.conn.cursor() as cur:
cur.execute('select cast(%s as datetime2)', (val,))
self.assertEqual(cur.fetchall(), [(val,)])
testval(Timestamp(2010, 1, 2, 20, 21, 22, 345678))
testval(Timestamp(2010, 1, 2, 0, 0, 0))
testval(Timestamp(1, 1, 1, 0, 0, 0))
testval(Timestamp(9999, 12, 31, 23, 59, 59, 999999))
def test_date(self):
if not IS_TDS73_PLUS(self.conn):
self.skipTest('Requires TDS7.3+')
def testval(val):
with self.conn.cursor() as cur:
cur.execute('select cast(%s as date)', (val,))
self.assertEqual(cur.fetchall(), [(val,)])
testval(Date(2010, 1, 2))
testval(Date(2010, 1, 2))
testval(Date(1, 1, 1))
testval(Date(9999, 12, 31))
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class Auth(unittest.TestCase):
@unittest.skipUnless(os.getenv('NTLM_USER') and os.getenv('NTLM_PASSWORD'), "requires NTLM_USER and NTLM_PASSWORD environment variables to be set")
def test_ntlm(self):
conn = connect(settings.HOST, auth=pytds.login.NtlmAuth(user_name=os.getenv('NTLM_USER'), password=os.getenv('NTLM_PASSWORD')))
with conn.cursor() as cursor:
cursor.execute('select 1')
cursor.fetchall()
@unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def test_sspi(self):
from pytds.login import SspiAuth
with connect(settings.HOST, auth=SspiAuth()) as conn:
with conn.cursor() as cursor:
cursor.execute('select 1')
cursor.fetchall()
@unittest.skipIf(getattr(settings, 'SKIP_SQL_AUTH', False), 'SKIP_SQL_AUTH is set')
def test_sqlauth(self):
with connect(settings.HOST, user=settings.USER, password=settings.PASSWORD) as conn:
with conn.cursor() as cursor:
cursor.execute('select 1')
cursor.fetchall()
class CloseCursorTwice(ConnectionTestCase):
def runTest(self):
cursor = self.conn.cursor()
cursor.close()
cursor.close()
class RegressionSuite(ConnectionTestCase):
def test_cancel(self):
self.conn.cursor().cancel()
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TimezoneTests(unittest.TestCase):
def check_val(self, conn, sql, input, output):
with conn.cursor() as cur:
cur.execute('select ' + sql, (input,))
rows = cur.fetchall()
self.assertEqual(rows[0][0], output)
def runTest(self):
kwargs = settings.CONNECT_KWARGS.copy()
use_tz = utc
kwargs['use_tz'] = use_tz
kwargs['database'] = 'master'
with connect(*settings.CONNECT_ARGS, **kwargs) as conn:
# Naive time should be interpreted as use_tz
self.check_val(conn, '%s',
datetime(2011, 2, 3, 10, 11, 12, 3000),
datetime(2011, 2, 3, 10, 11, 12, 3000, utc))
# Aware time shoule be passed as-is
dt = datetime(2011, 2, 3, 10, 11, 12, 3000, tzoffset(1))
self.check_val(conn, '%s', dt, dt)
# Aware time should be converted to use_tz if not using datetimeoffset type
dt = datetime(2011, 2, 3, 10, 11, 12, 3000, tzoffset(1))
if IS_TDS73_PLUS(conn):
self.check_val(conn, 'cast(%s as datetime2)', dt, dt.astimezone(use_tz))
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class DbapiTestSuite(dbapi20.DatabaseAPI20Test, ConnectionTestCase):
driver = pytds
connect_args = settings.CONNECT_ARGS
connect_kw_args = settings.CONNECT_KWARGS
# def _connect(self):
# return connection
def _try_run(self, *args):
with self._connect() as con:
with con.cursor() as cur:
for arg in args:
cur.execute(arg)
def _try_run2(self, cur, *args):
for arg in args:
cur.execute(arg)
# This should create the "lower" sproc.
def _callproc_setup(self, cur):
self._try_run2(
cur,
"""IF OBJECT_ID(N'[dbo].[to_lower]', N'P') IS NOT NULL DROP PROCEDURE [dbo].[to_lower]""",
"""
CREATE PROCEDURE to_lower
@input nvarchar(max)
AS
BEGIN
select LOWER(@input)
END
""",
)
# This should create a sproc with a return value.
def _retval_setup(self, cur):
self._try_run2(
cur,
"""IF OBJECT_ID(N'[dbo].[add_one]', N'P') IS NOT NULL DROP PROCEDURE [dbo].[add_one]""",
"""
CREATE PROCEDURE add_one (@input int)
AS
BEGIN
return @input+1
END
""",
)
def test_retval(self):
with self._connect() as con:
cur = con.cursor()
self._retval_setup(cur)
values = cur.callproc('add_one', (1,))
self.assertEqual(values[0], 1, 'input parameter should be left unchanged: %s' % (values[0],))
self.assertEqual(cur.description, None, "No resultset was expected.")
self.assertEqual(cur.return_value, 2, "Invalid return value: %s" % (cur.return_value,))
# This should create a sproc with a return value.
def _retval_select_setup(self, cur):
self._try_run2(
cur,
"""IF OBJECT_ID(N'[dbo].[add_one_select]', N'P') IS NOT NULL DROP PROCEDURE [dbo].[add_one_select]""",
"""
CREATE PROCEDURE add_one_select (@input int)
AS
BEGIN
select 'a' as a
select 'b' as b
return @input+1
END
""",
)
def test_retval_select(self):
with self._connect() as con:
cur = con.cursor()
self._retval_select_setup(cur)
values = cur.callproc('add_one_select', (1,))
self.assertEqual(values[0], 1, 'input parameter should be left unchanged: %s' % (values[0],))
self.assertEqual(len(cur.description), 1, "Unexpected resultset.")
self.assertEqual(cur.description[0][0], 'a', "Unexpected resultset.")
self.assertEqual(cur.fetchall(), [('a',)], 'Unexpected resultset.')
self.assertTrue(cur.nextset(), 'No second resultset found.')
self.assertEqual(len(cur.description), 1, "Unexpected resultset.")
self.assertEqual(cur.description[0][0], 'b', "Unexpected resultset.")
self.assertEqual(cur.return_value, 2, "Invalid return value: %s" % (cur.return_value,))
with self.assertRaises(Error):
cur.fetchall()
# This should create a sproc with an output parameter.
def _outparam_setup(self, cur):
self._try_run2(
cur,
"""IF OBJECT_ID(N'[dbo].[add_one_out]', N'P') IS NOT NULL DROP PROCEDURE [dbo].[add_one_out]""",
"""
CREATE PROCEDURE add_one_out (@input int, @output int OUTPUT)
AS
BEGIN
SET @output = @input+1
END
""",
)
def test_outparam(self):
with self._connect() as con:
cur = con.cursor()
self._outparam_setup(cur)
values = cur.callproc('add_one_out', (1, output(value=1)))
self.assertEqual(len(values), 2, 'expected 2 parameters')
self.assertEqual(values[0], 1, 'input parameter should be unchanged')
self.assertEqual(values[1], 2, 'output parameter should get new values')
values = cur.callproc('add_one_out', (None, output(value=1)))
self.assertEqual(len(values), 2, 'expected 2 parameters')
self.assertEqual(values[0], None, 'input parameter should be unchanged')
self.assertEqual(values[1], None, 'output parameter should get new values')
def test_assigning_select(self):
# test that assigning select does not interfere with result sets
with self._connect() as con:
cur = con.cursor()
cur.execute("""
declare @var1 int
select @var1 = 1
select @var1 = 2
select 'value'
""")
self.assertFalse(cur.description)
self.assertTrue(cur.nextset())
self.assertFalse(cur.description)
self.assertTrue(cur.nextset())
self.assertTrue(cur.description)
self.assertEqual([(u'value',)], cur.fetchall())
self.assertFalse(cur.nextset())
cur.execute("""
set nocount on
declare @var1 int
select @var1 = 1
select @var1 = 2
select 'value'
""")
self.assertTrue(cur.description)
self.assertEqual([(u'value',)], cur.fetchall())
self.assertFalse(cur.nextset())
# Don't need setoutputsize tests.
def test_setoutputsize(self):
pass
def help_nextset_setUp(self, cur):
self._try_run2(
cur,
"""IF OBJECT_ID(N'[dbo].[deleteme]', N'P') IS NOT NULL DROP PROCEDURE [dbo].[deleteme]""",
"""
create procedure deleteme
as
begin
select count(*) from %sbooze
select name from %sbooze
end
""" % (self.table_prefix, self.table_prefix),
)
def help_nextset_tearDown(self, cur):
cur.execute("drop procedure deleteme")
def test_ExceptionsAsConnectionAttributes(self):
pass
def test_select_decimal_zero(self):
with self._connect() as con:
expected = (
Decimal('0.00'),
Decimal('0.0'),
Decimal('-0.00'))
cur = con.cursor()
cur.execute("SELECT %s as A, %s as B, %s as C", expected)
result = cur.fetchall()
self.assertEqual(result[0], expected)
def test_type_objects(self):
with self._connect() as con:
cur = con.cursor()
cur.execute("""
select cast(0 as varchar),
cast(1 as binary),
cast(2 as int),
cast(3 as real),
cast(4 as decimal),
cast('2005' as datetime),
cast('6' as xml)
""")
self.assertTrue(cur.description)
col_types = [col[1] for col in cur.description]
self.assertEqual(col_types[0], STRING)
self.assertEqual(col_types[1], BINARY)
self.assertEqual(col_types[2], NUMBER)
self.assertEqual(col_types[2], INTEGER)
self.assertEqual(col_types[3], NUMBER)
self.assertEqual(col_types[3], REAL)
# self.assertEqual(col_types[4], NUMBER) ?
self.assertEqual(col_types[4], DECIMAL)
self.assertEqual(col_types[5], DATETIME)
self.assertEqual(col_types[6], XML)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestBug4(unittest.TestCase):
def test_as_dict(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
with connect(*settings.CONNECT_ARGS, **kwargs) as conn:
conn.as_dict = True
with conn.cursor() as cur:
cur.execute('select 1 as a, 2 as b')
self.assertDictEqual({'a': 1, 'b': 2}, cur.fetchone())
def _params_tests(self):
def test_val(typ, val):
with self.conn.cursor() as cur:
param = Column(type=typ, value=val)
logger.info("Testing with %s", repr(param))
cur.execute('select %s', [param])
self.assertTupleEqual(cur.fetchone(), (val,))
self.assertIs(cur.fetchone(), None)
test_val(BitType(), True)
test_val(BitType(), False)
test_val(BitType(), None)
test_val(TinyIntType(), 255)
test_val(SmallIntType(), 2 ** 15 - 1)
test_val(IntType(), 2 ** 31 - 1)
test_val(BigIntType(), 2 ** 63 - 1)
test_val(IntType(), None)
test_val(RealType(), 0.25)
test_val(FloatType(), 0.25)
test_val(RealType(), None)
test_val(SmallDateTimeType(), datetime(1900, 1, 1, 0, 0, 0))
test_val(SmallDateTimeType(), datetime(2079, 6, 6, 23, 59, 0))
test_val(DateTimeType(), datetime(1753, 1, 1, 0, 0, 0))
test_val(DateTimeType(), datetime(9999, 12, 31, 23, 59, 59, 990000))
test_val(DateTimeType(), None)
if pytds.tds_base.IS_TDS73_PLUS(self.conn._conn):
test_val(DateType(), date(1, 1, 1))
test_val(DateType(), date(9999, 12, 31))
test_val(DateType(), None)
test_val(TimeType(precision=0), time(0, 0, 0))
test_val(TimeType(precision=6), time(23, 59, 59, 999999))
test_val(TimeType(precision=0), None)
test_val(DateTime2Type(precision=0), datetime(1, 1, 1, 0, 0, 0))
test_val(DateTime2Type(precision=6), datetime(9999, 12, 31, 23, 59, 59, 999999))
test_val(DateTime2Type(precision=0), None)
test_val(DateTimeOffsetType(precision=6), datetime(9999, 12, 31, 23, 59, 59, 999999, utc))
test_val(DateTimeOffsetType(precision=6), datetime(9999, 12, 31, 23, 59, 59, 999999, tzoffset(14)))
test_val(DateTimeOffsetType(precision=0), datetime(1, 1, 1, 0, 0, 0, tzinfo=tzoffset(-14)))
#test_val(DateTimeOffsetType(precision=0), datetime(1, 1, 1, 0, 0, 0, tzinfo=tzoffset(14)))
test_val(DateTimeOffsetType(precision=6), None)
test_val(DecimalType(scale=6, precision=38), Decimal('123.456789'))
test_val(DecimalType(scale=6, precision=38), None)
test_val(SmallMoneyType(), Decimal('-214748.3648'))
test_val(SmallMoneyType(), Decimal('214748.3647'))
test_val(MoneyType(), Decimal('922337203685477.5807'))
test_val(MoneyType(), Decimal('-922337203685477.5808'))
test_val(MoneyType(), None)
test_val(UniqueIdentifierType(), None)
test_val(UniqueIdentifierType(), uuid.uuid4())
if pytds.tds_base.IS_TDS71_PLUS(self.conn._conn):
test_val(VariantType(), None)
#test_val(self.conn._conn.type_factory.SqlVariant(10), 100)
test_val(VarBinaryType(size=10), b'')
test_val(VarBinaryType(size=10), b'testtest12')
test_val(VarBinaryType(size=10), None)
test_val(VarBinaryType(size=8000), b'x' * 8000)
test_val(VarCharType(size=10), None)
test_val(VarCharType(size=10), '')
test_val(VarCharType(size=10), 'test')
test_val(VarCharType(size=8000), 'x' * 8000)
test_val(NVarCharType(size=10), u'')
test_val(NVarCharType(size=10), u'testtest12')
test_val(NVarCharType(size=10), None)
test_val(NVarCharType(size=4000), u'x' * 4000)
test_val(TextType(), None)
test_val(TextType(), '')
test_val(TextType(), 'hello')
test_val(NTextType(), None)
test_val(NTextType(), '')
test_val(NTextType(), 'hello')
test_val(ImageType(), None)
test_val(ImageType(), b'')
test_val(ImageType(), b'test')
if pytds.tds_base.IS_TDS72_PLUS(self.conn._conn):
test_val(VarBinaryMaxType(), None)
test_val(VarBinaryMaxType(), b'')
test_val(VarBinaryMaxType(), b'testtest12')
test_val(VarBinaryMaxType(), b'x' * (10 ** 6))
test_val(NVarCharMaxType(), None)
test_val(NVarCharMaxType(), 'test')
test_val(NVarCharMaxType(), 'x' * (10 ** 6))
test_val(VarCharMaxType(), None)
test_val(VarCharMaxType(), 'test')
test_val(VarCharMaxType(), 'x' * (10 ** 6))
test_val(XmlType(), '<root/>')
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestTds70(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['tds_version'] = pytds.tds_base.TDS70
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def test_parsing(self):
_params_tests(self)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestTds71(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['tds_version'] = pytds.tds_base.TDS71
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def test_parsing(self):
_params_tests(self)
def test_transaction(self):
self.conn.rollback()
self.conn.commit()
def test_bulk(self):
f = StringIO("42\tfoo\n74\tbar\n")
with self.conn.cursor() as cur:
cur.copy_to(f, 'bulk_insert_table', schema='myschema', columns=('num', 'data'))
cur.execute('select num, data from myschema.bulk_insert_table')
self.assertListEqual(cur.fetchall(), [(42, 'foo'), (74, 'bar')])
def test_call_proc(self):
with self.conn.cursor() as cur:
val = 45
values = cur.callproc('testproc', (val, default, output(value=1)))
#self.assertEqual(cur.fetchall(), [(val,)])
self.assertEqual(val + 2, values[2])
self.assertEqual(val + 2, cur.get_proc_return_status())
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestTds72(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['tds_version'] = pytds.tds_base.TDS72
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def test_parsing(self):
_params_tests(self)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestTds73A(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['tds_version'] = pytds.tds_base.TDS73A
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def test_parsing(self):
_params_tests(self)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestTds73B(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['database'] = 'master'
kwargs['tds_version'] = pytds.tds_base.TDS73B
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def test_parsing(self):
_params_tests(self)
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
class TestRawBytes(unittest.TestCase):
def setUp(self):
kwargs = settings.CONNECT_KWARGS.copy()
kwargs['bytes_to_unicode'] = False
kwargs['database'] = 'master'
self.conn = connect(*settings.CONNECT_ARGS, **kwargs)
def test_fetch(self):
cur = self.conn.cursor()
self.assertIsInstance(cur.execute_scalar("select cast('abc' as nvarchar(max))"), six.text_type)
self.assertIsInstance(cur.execute_scalar("select cast('abc' as varchar(max))"), six.binary_type)
self.assertIsInstance(cur.execute_scalar("select cast('abc' as text)"), six.binary_type)
self.assertIsInstance(cur.execute_scalar("select %s", ['abc']), six.text_type)
self.assertIsInstance(cur.execute_scalar("select %s", [b'abc']), six.binary_type)
rawBytes = six.b('\x01\x02\x03')
self.assertEquals(rawBytes, cur.execute_scalar("select cast(0x010203 as varchar(max))"))
self.assertEquals(rawBytes, cur.execute_scalar("select %s", [rawBytes]))
utf8char = six.b('\xee\xb4\xba')
self.assertEquals(utf8char, cur.execute_scalar("select %s", [utf8char]))
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_invalid_block_size():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs.update({
'blocksize': 4000,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute_scalar("select '{}'".format('x' * 8000))
@unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set")
def test_readonly_connection():
kwargs = settings.CONNECT_KWARGS.copy()
kwargs.update({
'readonly': True,
})
with connect(**kwargs) as conn:
with conn.cursor() as cur:
cur.execute_scalar("select 1")
|
tpow/pytds
|
tests/all_test.py
|
Python
|
mit
| 40,325
|
import pytest
from parglare import Grammar, GrammarError
def test_terminal_exists_noerror_on_terminal_definition_before():
"""
Test the situation where we have inline terminal used but the
same terminal is defined in the `terminals` section.
"""
grammar = """
Program: "begin"
statements=Statement*
ProgramEnd;
Statement: "end" "transaction" // here we are using inline terminal `end`
// instead of using a name reference
| "command";
terminals
ProgramEnd: "end";
"""
with pytest.raises(GrammarError) as e:
Grammar.from_string(grammar)
assert 'match the same string' in str(e.value)
|
igordejanovic/parglare
|
tests/func/regressions/test_terminal_exists_noerror_on_terminal_definition_before.py
|
Python
|
mit
| 720
|
class LRUCache(object):
def __init__(self, size):
self.size = size
self.cache = {}
self.priorities = []
def adjust(self, key):
item_index = self.priorities.index(key)
self.priorities[:] = self.priorities[:item_index] + self.priorities[item_index+1:]
self.priorities.insert(0, key)
def push(self, key, value):
item = (key, value)
if item[0] in self.cache:
self.adjust(item[0])
else:
if len(self.priorities) > self.size:
self.get(self.priorities[-1])
self.cache[item[0]] = item[1]
self.priorities.insert(0, item[0])
print (item[0], item[1])
def get(self, key):
if key in self.cache:
to_get = self.cache[key]
self.adjust(key)
return to_get
else:
print("Key not found.")
def remove(self, key):
try:
del self.cache[key]
del self.priorities[self.priorities.index(key)]
except:
pass
def test():
cache = LRUCache(size=3)
cache.push(1, 'one')
cache.push(2, 'two')
cache.push(1, 'one_a')
cache.push(3, 'three')
print (cache.get(1))
print (cache.get(2))
cache.remove(2)
print (cache.get(2))
print (cache.get(3))
print (cache.get(0))
test()
|
mmayet/mmayet.github.io
|
projects/lru_cache_mud_mayet.py
|
Python
|
mit
| 1,373
|
def kwo(*a, c):
print(a, c)
kwo(1, 2, 3, c=7) # prints: (1, 2, 3) 7
kwo(c=4) # prints: () 4
# kwo(1, 2) # breaks, invalid syntax, with the following error
# TypeError: kwo() missing 1 required keyword-only argument: 'c'
def kwo2(a, b=42, *, c):
print(a, b, c)
kwo2(3, b=7, c=99) # prints: 3 7 99
kwo2(3, c=13) # prints: 3 42 13
# kwo2(3, 23) # breaks, invalid syntax, with the following error
# TypeError: kwo2() missing 1 required keyword-only argument: 'c'
|
mkhuthir/learnPython
|
Book_learning-python-r1.1/ch4/arguments.keyword.only.py
|
Python
|
mit
| 490
|
"""
WSGI config for RealBack project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "RealBack.settings")
application = get_wsgi_application()
if os.getenv('DJANGO_PRODUCTION') is not None:
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
|
martinlunde/RealBack
|
RealBack/wsgi.py
|
Python
|
mit
| 541
|
# -*- coding: utf-8 -*-
#
# WebAlerts documentation build configuration file, created by
# sphinx-quickstart on Thu Jan 31 11:29:44 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from webalerts.version import __version__
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
intersphinx_mapping = {
'python': ('http://docs.python.org/2.7', None),
'requests': ('http://docs.python-requests.org/en/v1.1.0', None),
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'WebAlerts'
copyright = u'2013, Choongmin Lee'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'WebAlertsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'WebAlerts.tex', u'WebAlerts Documentation',
u'Choongmin Lee', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'webalerts', u'WebAlerts Documentation',
[u'Choongmin Lee'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'WebAlerts', u'WebAlerts Documentation',
u'Choongmin Lee', 'WebAlerts', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
clee704/WebAlerts
|
docs/conf.py
|
Python
|
mit
| 8,018
|
import typing
import requests
from stampman.services import base
from stampman.helpers import config_, mail_, exceptions_
class MailgunEmailService(base.AbstractEmailService):
def __init__(self, config: typing.NamedTuple = None,
failure_mode: bool = False, domain: str = None):
self._failure_mode = failure_mode
if not config or not isinstance(config, config_.ServiceConfig):
raise TypeError("Unexpected type for config; Expected "
"ServiceConfig")
self._config = config
self._api_endpoint = "https://api.mailgun.net/v3/{}".format(domain)
self._domain = domain
@property
def name(self):
return self._config.name
@property
def config(self):
return self._config
@property
def domain(self):
return self._domain
@property
def failure_mode(self):
return self._failure_mode
def send_email(self, email: mail_.Email):
payload = {
"from": "{} <{}>".format(email.sender[0], email.sender[1]),
"to": ",".join(email.recipients),
"subject": email.subject,
"text": email.content,
"cc": email.cc,
"bcc": email.bcc
}
response = requests.post("{}/messages".format(self._api_endpoint),
data=payload,
auth=("api", self._config.api_key))
if response.status_code == 200:
return True
elif response.status_code == 401:
raise exceptions_.InvalidServiceApiKeyException(self.name)
elif response.status_code in [400, 402, 404, 500, 502, 503, 504]:
raise exceptions_.GenericEmailServiceException(self.name)
return False
|
thunderboltsid/stampman
|
stampman/services/mailgun/mailgun.py
|
Python
|
mit
| 1,801
|
"""Utility functions used by projects.
"""
import fnmatch
import os
import re
import subprocess
import traceback
import logging
from httplib2 import Http
from django.conf import settings
from distutils2.version import NormalizedVersion, suggest_normalized_version
import redis
log = logging.getLogger(__name__)
def find_file(file):
"""Find matching filenames in the current directory and its subdirectories,
and return a list of matching filenames.
"""
matches = []
for root, dirnames, filenames in os.walk('.'):
for filename in fnmatch.filter(filenames, file):
matches.append(os.path.join(root, filename))
return matches
def run(*commands, **kwargs):
"""
Run one or more commands, and return ``(status, out, err)``.
If more than one command is given, then this is equivalent to
chaining them together with ``&&``; if all commands succeed, then
``(status, out, err)`` will represent the last successful command.
If one command failed, then ``(status, out, err)`` will represent
the failed command.
"""
environment = os.environ.copy()
environment['READTHEDOCS'] = 'True'
if 'DJANGO_SETTINGS_MODULE' in environment:
del environment['DJANGO_SETTINGS_MODULE']
if 'PYTHONPATH' in environment:
del environment['PYTHONPATH']
cwd = os.getcwd()
if not commands:
raise ValueError("run() requires one or more command-line strings")
shell = kwargs.get('shell', False)
for command in commands:
if shell:
log.info("Running commands in a shell")
run_command = command
else:
run_command = command.split()
log.info("Running: '%s'" % command)
try:
p = subprocess.Popen(run_command, shell=shell, cwd=cwd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE, env=environment)
out, err = p.communicate()
ret = p.returncode
except:
out = ''
err = traceback.format_exc()
ret = -1
log.error("Command failed", exc_info=True)
return (ret, out, err)
def safe_write(filename, contents):
"""Write ``contents`` to the given ``filename``. If the filename's
directory does not exist, it is created. Contents are written as UTF-8,
ignoring any characters that cannot be encoded as UTF-8.
"""
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(filename, 'w') as fh:
fh.write(contents.encode('utf-8', 'ignore'))
fh.close()
CUSTOM_SLUG_RE = re.compile(r'[^-._\w]+$')
def _custom_slugify(data):
return CUSTOM_SLUG_RE.sub('', data)
def slugify_uniquely(model, initial, field, max_length, **filters):
slug = _custom_slugify(initial)[:max_length]
current = slug
base_qs = model.objects.filter(**filters)
index = 0
while base_qs.filter(**{field: current}).exists():
suffix = '-%s' % index
current = '%s%s' % (slug, suffix)
index += 1
return current
def mkversion(version_obj):
try:
if hasattr(version_obj, 'slug'):
ver = NormalizedVersion(
suggest_normalized_version(version_obj.slug)
)
else:
ver = NormalizedVersion(
suggest_normalized_version(version_obj['slug'])
)
return ver
except TypeError:
return None
def highest_version(version_list):
highest = [None, None]
for version in version_list:
ver = mkversion(version)
if not ver:
continue
elif highest[1] and ver:
# If there's a highest, and no version, we don't need to set
# anything
if ver > highest[1]:
highest = [version, ver]
else:
highest = [version, ver]
return highest
def purge_version(version, mainsite=False, subdomain=False, cname=False):
varnish_servers = getattr(settings, 'VARNISH_SERVERS', None)
h = Http()
if varnish_servers:
for server in varnish_servers:
if subdomain:
#Send a request to the Server, to purge the URL of the Host.
host = "%s.readthedocs.org" % version.project.slug
headers = {'Host': host}
url = "/en/%s/*" % version.slug
to_purge = "http://%s%s" % (server, url)
log.info("Purging %s on %s" % (url, host))
h.request(to_purge, method="PURGE", headers=headers)
if mainsite:
headers = {'Host': "readthedocs.org"}
url = "/docs/%s/en/%s/*" % (version.project.slug, version.slug)
to_purge = "http://%s%s" % (server, url)
log.info("Purging %s on readthedocs.org" % url)
h.request(to_purge, method="PURGE", headers=headers)
root_url = "/docs/%s/" % version.project.slug
to_purge = "http://%s%s" % (server, root_url)
log.info("Purging %s on readthedocs.org" % root_url)
h.request(to_purge, method="PURGE", headers=headers)
if cname:
redis_conn = redis.Redis(**settings.REDIS)
for cnamed in redis_conn.smembers('rtd_slug:v1:%s'
% version.project.slug):
headers = {'Host': cnamed}
url = "/en/%s/*" % version.slug
to_purge = "http://%s%s" % (server, url)
log.info("Purging %s on %s" % (url, cnamed))
h.request(to_purge, method="PURGE", headers=headers)
root_url = "/"
to_purge = "http://%s%s" % (server, root_url)
log.info("Purging %s on %s" % (root_url, cnamed))
h.request(to_purge, method="PURGE", headers=headers)
class DictObj(object):
def __getattr__(self, attr):
return self.__dict__.get(attr)
def make_api_version(version_data):
from builds.models import Version
del version_data['resource_uri']
project_data = version_data['project']
project = make_api_project(project_data)
version_data['project'] = project
ver = Version(**version_data)
return ver
def make_api_project(project_data):
from projects.models import Project
for key in ['users', 'resource_uri', 'absolute_url', 'downloads']:
if key in project_data:
del project_data[key]
project = Project(**project_data)
return project
|
phalcon/readthedocs.org
|
readthedocs/projects/utils.py
|
Python
|
mit
| 6,659
|
#!/usr/bin/env python
import sys
from setuptools import setup, find_packages
from setuptools.command.test import test
class Tox(test):
def finalize_options(self):
test.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
import tox
errno = tox.cmdline(self.test_args)
sys.exit(errno)
setup(
name='interstat',
description='An HTML formatter for IRC log files',
version='0.2.2',
author='Kevin Xiwei Zheng',
author_email='blankplacement+interstat@gmail.com',
url='https://github.com/kxz/interstat',
license='X11',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Topic :: Communications :: Chat :: Internet Relay Chat',
'Topic :: Text Processing :: Markup :: HTML'],
keywords='irc html log',
packages=find_packages(),
package_data={
'interstat': [
'templates/*.css',
'templates/*.html',
'templates/message/*.html']},
entry_points={
'console_scripts': ['interstat=interstat.__main__:main']},
install_requires=[
'future',
'Jinja2'],
tests_require=[
'tox'],
cmdclass={
'test': Tox})
|
kxz/interstat
|
setup.py
|
Python
|
mit
| 1,618
|
"""Service Client"""
# pylint: disable=too-many-arguments
import warnings
from json import loads
from launchkey.exceptions import InvalidParameters, \
UnableToDecryptWebhookRequest, UnexpectedAuthorizationResponse, \
UnexpectedAPIResponse, UnexpectedWebhookRequest, XiovJWTValidationFailure,\
XiovJWTDecryptionFailure
from launchkey.utils.shared import XiovJWTService, deprecated
from launchkey.entities.validation import AuthorizationResponseValidator, \
AuthorizeSSEValidator, AuthorizeValidator, ServiceTOTPVerificationValidator
from launchkey.entities.service import AuthPolicy, AuthorizationResponse, \
SessionEndRequest, AuthorizationRequest, AdvancedAuthorizationResponse, \
DenialReason
from .base import BaseClient, api_call
class ServiceClient(BaseClient):
"""Service Client for interacting with Serive endpoints"""
def __init__(self, subject_id, transport):
super().__init__('svc', subject_id, transport)
self.x_iov_jwt_service = XiovJWTService(self._transport, self._subject)
@api_call
def authorize(self, user, context=None, policy=None, title=None, ttl=None,
push_title=None, push_body=None):
"""
Authorize a transaction for the provided user. This
get_service_service method would be utilized if you are
using this as a secondary factor for user login or authorizing a
single transaction within your application.
This will NOT begin a user session.
:param user: LaunchKey Username, User Push ID, or Directory User ID
for the End User
:param context: Arbitrary string of data up to 400 characters to be
presented to the End User during
authorization to provide context regarding the individual request
:param policy: Authorization policy override for this authorization.
The policy can only increase the security level any existing policy
in the Service Profile. It can never reduce the security level of
the Service Profile's policy.
:param title: String of data up to 200 characters to be presented to
the End User during authorization as the title of the individual
authorization request
:param ttl: Time for this authorization request to be valid. If no
value is provided, the system default will be used.
:param push_title: Title that will appear in the mobile authenticator's
push message. This feature is only available for Directory Services
that have push credentials configured.
:param push_body: Body that will appear in the mobile authenticator's
push message. This feature is only available for Directory Services
that have push credentials configured.
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.InvalidPolicyInput - Input policy was not
valid
:raise: launchkey.exceptions.PolicyFailure - Auth creation failed due
to user not passing policy
:raise: launchkey.exceptions.EntityNotFound - Username was invalid or
the user does not have any valid devices
:raise: launchkey.exceptions.RateLimited - Too many authorization
requests have been created for this user
:raise: launchkey.exceptions.InvalidPolicy - The input policy is not
valid. It should be a
launchkey.clients.service.AuthPolicy.
Please wait and try again.
:return: String - Unique identifier for tracking status of the
authorization request
"""
warnings.warn('This method has been deprecated and will be removed'
' in a future major release!', DeprecationWarning)
auth = self.authorization_request(user, context, policy, title, ttl,
push_title, push_body)
return auth.auth_request
@api_call
def authorization_request(self, user, context=None, policy=None,
title=None, ttl=None, push_title=None,
push_body=None, denial_reasons=None):
"""
Authorize a transaction for the provided user. This get_service_service
method would be utilized if you are using this as a secondary factor
for user login or authorizing a single transaction within your
application. This will NOT begin a user session.
:param user: LaunchKey Username, User Push ID, or Directory User ID
for the End User
:param context: Arbitrary string of data up to 400 characters to be
presented to the End User during authorization to provide context
regarding the individual request
:param policy: Authorization policy override for this authorization.
The policy can only increase the security
level any existing policy in the Service Profile. It can never reduce
the security level of the Service Profile's policy.
:param title: String of data up to 200 characters to be presented to
the End User during authorization as the title of the individual
authorization request
:param ttl: Time for this authorization request to be valid. If no
value is provided, the system default will be used.
:param push_title: Title that will appear in the mobile authenticator's
push message. This feature is only available for Directory Services
that have push credentials configured.
:param push_body: Body that will appear in the mobile authenticator's
push message. This feature is only available for Directory Services
that have push credentials configured.
:param denial_reasons: List of denial reasons to present to the user if
they deny the request. This list must include at least two items. At
least one of the items must have a fraud value of false and at least
one of the items must have a fraud value of true. If no denial_reasons
are given the defaults will be used. If a list is provided and denial
context inquiry is not enabled for the Directory, this request will
error. This feature is only available for Directory Services.
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.InvalidPolicyInput - Input policy was not
valid
:raise: launchkey.exceptions.PolicyFailure - Auth creation failed due
to user not passing policy
:raise: launchkey.exceptions.EntityNotFound - Username was invalid or
the user does not have any valid devices
:raise: launchkey.exceptions.RateLimited - Too many authorization
requests have been created for this user
:raise: launchkey.exceptions.InvalidPolicy - The input policy is not
valid. It should be a launchkey.clients.service.AuthPolicy.
Please wait and try again.
:raise: launchkey.exceptions.AuthorizationInProgress - Authorization
request already exists for the requesting user. That request either
needs to be responded to, expire out, or be canceled with
cancel_authorization_request().
:return AuthorizationResponse: Unique identifier for tracking status
of the authorization request
"""
kwargs = {'username': user}
if context is not None:
kwargs['context'] = context
if title is not None:
kwargs['title'] = title
if ttl is not None:
kwargs['ttl'] = ttl
if push_title is not None:
kwargs['push_title'] = push_title
if push_body is not None:
kwargs['push_body'] = push_body
if policy is not None:
if not isinstance(policy, AuthPolicy):
raise InvalidParameters(
"Please verify the input policy is a "
"launchkey.entities.service.AuthPolicy class")
kwargs['policy'] = policy.get_policy()
if denial_reasons is not None:
if not isinstance(denial_reasons, (list, set)):
raise InvalidParameters(
"Please ensure that input denial_reasons are a list of "
"launchkey.entities.service.DenialReason classes.")
parsed_reasons = []
for reason in denial_reasons:
if not isinstance(reason, DenialReason):
raise InvalidParameters(
"Please verify that denial_reasons are "
"launchkey.entities.service.DenialReason classes.")
parsed_reasons.append(
{"id": reason.denial_id, "reason": reason.reason,
"fraud": reason.fraud}
)
kwargs['denial_reasons'] = parsed_reasons
response = self._transport.post("/service/v3/auths",
self._subject, **kwargs)
data = self._validate_response(response, AuthorizeValidator)
return AuthorizationRequest(data.get('auth_request'),
data.get('push_package'),
data.get('device_ids'))
@api_call
def get_advanced_authorization_response(self, authorization_request_id):
"""
Request the response for a previous authorization call.
:param authorization_request_id: Unique identifier returned by
authorization_request()
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.RequestTimedOut - The authorization
request has not been responded to before the
timeout period (5 minutes)
:raise: launchkey.exceptions.AuthorizationRequestCanceled - The
authorization request has been canceled so a response cannot be
retrieved.
:return: None if the user has not responded otherwise a
launchkey.entities.service.AdvancedAuthorizationResponse object
with the user's response
in it
"""
response = self._transport.get(
"/service/v3/auths/%s" % authorization_request_id,
self._subject)
if response.status_code == 204:
authorization_response = None
else:
data = self._validate_response(
response,
AuthorizationResponseValidator)
authorization_response = AdvancedAuthorizationResponse(
data,
self._transport
)
return authorization_response
@deprecated
def get_authorization_response(self, authorization_request_id):
"""
NOTE: This method is being deprecated. Use
`get_advanced_authorization_response` instead!
Request the response for a previous authorization call.
:param authorization_request_id: Unique identifier returned by
authorization_request()
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.RequestTimedOut - The authorization
request has not been responded to before the
timeout period (5 minutes)
:raise: launchkey.exceptions.AuthorizationRequestCanceled - The
authorization request has been canceled so a response cannot be
retrieved.
:return: None if the user has not responded otherwise a
launchkey.entities.service.AuthorizationResponse object
with the user's response
in it
"""
advanced_authorization_response = \
self.get_advanced_authorization_response(authorization_request_id)
if not advanced_authorization_response:
return None
return AuthorizationResponse(
advanced_authorization_response.data,
advanced_authorization_response.transport)
@api_call
def cancel_authorization_request(self, authorization_request_id):
"""
Request to cancel an authorization request for the End User
:param authorization_request_id: Unique identifier returned by
authorization_request()
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.EntityNotFound - The authorization
request does not exist.
:raise: launchkey.exceptions.AuthorizationRequestCanceled - The
authorization request has already been canceled.
:raise: launchkey.exceptions.AuthorizationResponseExists - The
authorization request has already been responded to so it cannot be
canceled.
"""
self._transport.delete(
"/service/v3/auths/%s" % authorization_request_id,
self._subject)
@api_call
def session_start(self, user, authorization_request_id):
"""
Request to start a Service Session for the End User which was derived
from a authorization request
:param user: LaunchKey Username, User Push ID, or Directory User ID for
the End User
:param authorization_request_id: Unique identifier returned by
authorization_request()
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.EntityNotFound - The input username was
not valid
"""
self._transport.post("/service/v3/sessions",
self._subject,
username=user,
auth_request=authorization_request_id)
@api_call
def session_end(self, user):
"""
Request to end a Service Session for the End User
:param user: LaunchKey Username, User Push ID, or Directory User ID for
the End User
:raise: launchkey.exceptions.InvalidParameters - Input parameters were
not correct
:raise: launchkey.exceptions.EntityNotFound - The input username was
not valid
"""
self._transport.delete("/service/v3/sessions",
self._subject,
username=user)
@api_call
def verify_totp(self, user, otp):
"""
Verifies a given TOTP is valid for a given user.
:param user: Unique value identifying the End User in your
system. This value was used to create the Directory User and Link
Device.
:param otp: 6-8 digit OTP code for to verify.
:return: Boolean stating whether the given OTP code is valid.
:raise: launchkey.exceptions.EntityNotFound - Unable to find TOTP
configuration for given user.
"""
response = self._transport.post("/service/v3/totp",
self._subject, identifier=user,
otp=otp)
data = self._validate_response(
response,
ServiceTOTPVerificationValidator)
return data["valid"]
def handle_advanced_webhook(self, body, headers, method=None, path=None):
"""
Handle an advanced webhook callback
In the event of a Logout webhook, be sure to call session_end() when
you complete the process of ending the user's session in your
implementation. This will remove the corresponding Application from
the authorization list on all of the the user's mobile devices.
:param body: The raw body that was send in the POST content
:param headers: A generic map of response headers. These will be used
to access and validate authorization
:param path: The path of the request
:param method: The HTTP method of the request
:return: launchkey.entities.service.SessionEndRequest or
launchkey.entities.service.AdvancedAuthorizationResponse
:raises launchkey.exceptions.UnexpectedWebhookRequest: when the
request or its cannot be parsed or fails
validation.
:raises launchkey.exceptions.UnableToDecryptWebhookRequest: when the
request is an authorization response webhook and the request body
cannot be decrypted
:raises launchkey.exceptions.UnexpectedAuthorizationResponse: when the
decrypted auth package is missing required data. This error is
indicative of a non webhook request being sent to the method.
:raises launchkey.exceptions.UnexpectedKeyID: when the auth package in
an authorization response webhook request body is decrypted using a
public key whose private key is not known by the client. This can be
a configuration issue.
:raises launchkey.exceptions.UnexpectedDeviceResponse: when the auth
package received from the device is invalid. This error is
indicative of a man in the middle (MITM) attack.
:raises launchkey.exceptions.WebhookAuthorizationError: when the
"Authorization" header in the headers.
"""
if method is None:
warnings.warn("Not passing a valid request method string is "
"deprecated and will be required in the next "
"major version", PendingDeprecationWarning)
if path is None:
warnings.warn("Not passing a valid request path string is "
"deprecated and will be required in the next "
"major version", PendingDeprecationWarning)
try:
if "service_user_hash" in "%s" % body:
body = self.x_iov_jwt_service.verify_jwt_request(body, headers,
method, path)
try:
body = self._validate_response(
loads(body),
AuthorizeSSEValidator)
except UnexpectedAPIResponse as reason:
raise UnexpectedWebhookRequest(reason=reason) from reason
result = SessionEndRequest(
body['service_user_hash'],
self._transport.parse_api_time(body['api_time']))
else:
try:
decrypted_body = self.x_iov_jwt_service.decrypt_jwe(
body, headers, method, path
)
auth_response = loads(decrypted_body)
result = AdvancedAuthorizationResponse(
auth_response,
self._transport
)
except XiovJWTDecryptionFailure as reason:
raise UnableToDecryptWebhookRequest(reason=reason) \
from reason
except KeyError as reason:
raise UnexpectedAuthorizationResponse(reason=reason) \
from reason
except XiovJWTValidationFailure as reason:
raise UnexpectedWebhookRequest(reason) from reason
return result
@deprecated
def handle_webhook(self, body, headers, method=None, path=None):
"""
NOTE: This method is being deprecated. Use `handle_advanced_webhook`
instead!
Handle a webhook callback
In the event of a Logout webhook, be sure to call session_end() when
you complete the process of ending the user's session in your
implementation. This will remove the corresponding Application from
the authorization list on all of the the user's mobile devices.
:param body: The raw body that was send in the POST content
:param headers: A generic map of response headers. These will be used
to access and validate authorization
:param path: The path of the request
:param method: The HTTP method of the request
:return: launchkey.entities.service.SessionEndRequest or
launchkey.entities.service.AuthorizationResponse
:raises launchkey.exceptions.UnexpectedWebhookRequest: when the
request or its cannot be parsed or fails
validation.
:raises launchkey.exceptions.UnableToDecryptWebhookRequest: when the
request is an authorization response webhook and the request body
cannot be decrypted
:raises launchkey.exceptions.UnexpectedAuthorizationResponse: when the
decrypted auth package is missing required data. This error is
indicative of a non webhook request being sent to the method.
:raises launchkey.exceptions.UnexpectedKeyID: when the auth package in
an authorization response webhook request body is decrypted using a
public key whose private key is not known by the client. This can be
a configuration issue.
:raises launchkey.exceptions.UnexpectedDeviceResponse: when the auth
package received from the device is invalid. This error is
indicative of a man in the middle (MITM) attack.
:raises launchkey.exceptions.WebhookAuthorizationError: when the
"Authorization" header in the headers.
"""
advanced_authorization_response = self.handle_advanced_webhook(
body, headers, method, path)
if isinstance(advanced_authorization_response, SessionEndRequest):
return advanced_authorization_response
return AuthorizationResponse(
advanced_authorization_response.data,
advanced_authorization_response.transport)
|
iovation/launchkey-python
|
launchkey/clients/service.py
|
Python
|
mit
| 21,659
|
from revscoring.utilities.fetch_idioms import is_idiom
def test_is_idiom():
phrases = [
'Appendix:English 19th Century idioms',
'about to',
'activist justice',
'attaboy',
'bat for the other team',
'beard the lion in his den',
'as gentle as a dove',
'I\'ll say'
]
idioms = [phrase for phrase in phrases if is_idiom(phrase)]
assert idioms == ['bat for the other team', 'beard the lion in his den']
|
wiki-ai/revscoring
|
tests/utilities/test_fetch_idioms.py
|
Python
|
mit
| 496
|
from django.apps import AppConfig
class AuthConfig(AppConfig):
name = 'apps.auth'
label = 'qr_server_auth'
|
ycheng-aa/qr_server
|
apps/auth/apps.py
|
Python
|
mit
| 117
|
#!/usr/bin/python
from menuclass import *
from empClass import *
from functions import *
employees = []
mainMenu = MainMenu()
while True:
cmd = raw_input("What you want to DO: ")
br()
if cmd.lower() == "q":
print "Quiting Now......"
waits()
exit()
elif cmd.lower() == "m":
while True:
sldOpt = ""
mainMenu.sldopt = 0
mainMenu.menu()
sldOpt = mainMenu.optList[mainMenu.sldopt - 1]
if mainMenu.sldopt == 0:
break
if sldOpt == "New":
new_emp(Employee,employees)
elif sldOpt == "List":
list_emp(employees)
elif sldOpt == "Search":
srch_emp(employees)
elif sldOpt == "Delete":
del_emp(employees)
elif sldOpt == "Edit":
edit_emp(employees)
elif sldOpt == "View Details":
vd_emp(employees)
elif cmd.lower() == "help" or cmd.lower() =='h':
help()
else:
print 'Error... Please enter "Help" or "H" to get help'
|
imughal/EmployeeScript
|
main.py
|
Python
|
mit
| 893
|
# ValPy -- Validate.R in Python
# Author: Dustin Landers
# Contact: (770 289-8830 :: dustin.landers@gmail.com
"""Dependencies"""
from commandline import *
from fileimport import *
from checkhidden import *
"""Main function and execution"""
def main():
initializeGraphics()
folder, analysis, truth, snp, score, beta, filename, threshold, seper, kttype, kttypeseper = checkArgs()
appOutputList = checkList(getList(folder))
ktFile = loadKT(truth, kttypeseper)
if kttype is "OTE":
acquiredData = loadFile(folder, appOutputList[0], seper)
snpColumnNo = acquiredData.header.index(snp)
snpColumn = list()
for each in acquiredData.data.iteritems():
snpColumn.append(each[1][snpColumnNo])
for each in appOutputList:
acquiredData = loadFile(folder, each, seper)
snpColumnNo = acquiredData.header.index(snp)
snpColumn = list()
for each in acquiredData.data.iteritems():
snpColumn.append(each[1][snpColumnNo])
scoreColumnNo = acquiredData.header.index(score)
scoreColumn = list()
for each in acquiredData.data.iteritems():
scoreColumn.append(float(each[1][scoreColumnNo]))
if beta is not None:
betaColumnNo = acquiredData.header.index(beta)
betaColumn = list()
for each in acquiredData.data.iteritems():
betaColumn.append(float(each[1][betaColumnNo]))
if __name__ == "__main__":
main()
|
mtnman38/ktaR
|
valpy/validate.py
|
Python
|
mit
| 1,338
|
# -*- coding: utf-8 -*-
"""
Master Dictionary
Translations
0: English
1: Chinese
2: Dutch
3: French
4: Italian
5: Japanese
6: Korean
7: Portuguese
8: Russian
9: Spanish
"""
class MD_F:
# Master Dictionary Functions
def get_languages_list():
languages = []
languages.append("English")
languages.append("Chinese")
languages.append("Dutch")
languages.append("French")
languages.append("Italian")
languages.append("Japanese")
languages.append("Korean")
languages.append("Portuguese")
languages.append("Russian")
languages.append("Spanish")
return languages
def get_parent_classes_list():
parent_classes = []
parent_classes.append("BaseCase")
parent_classes.append("硒测试用例")
parent_classes.append("Testgeval")
parent_classes.append("CasDeBase")
parent_classes.append("CasoDiProva")
parent_classes.append("セレニウムテストケース")
parent_classes.append("셀레늄_테스트_케이스")
parent_classes.append("CasoDeTeste")
parent_classes.append("ТестНаСелен")
parent_classes.append("CasoDePrueba")
return parent_classes
def get_masterqa_parent_classes_list():
parent_classes = []
parent_classes.append("MasterQA")
parent_classes.append("MasterQA_中文")
parent_classes.append("MasterQA_Nederlands")
parent_classes.append("MasterQA_Français")
parent_classes.append("MasterQA_Italiano")
parent_classes.append("MasterQA_日本語")
parent_classes.append("MasterQA_한국어")
parent_classes.append("MasterQA_Português")
parent_classes.append("MasterQA_Русский")
parent_classes.append("MasterQA_Español")
return parent_classes
def get_parent_class_lang(parent_class):
parent_class_lang = {}
parent_class_lang["BaseCase"] = "English"
parent_class_lang["硒测试用例"] = "Chinese"
parent_class_lang["Testgeval"] = "Dutch"
parent_class_lang["CasDeBase"] = "French"
parent_class_lang["CasoDiProva"] = "Italian"
parent_class_lang["セレニウムテストケース"] = "Japanese"
parent_class_lang["셀레늄_테스트_케이스"] = "Korean"
parent_class_lang["CasoDeTeste"] = "Portuguese"
parent_class_lang["ТестНаСелен"] = "Russian"
parent_class_lang["CasoDePrueba"] = "Spanish"
if parent_class not in parent_class_lang.keys():
raise Exception("Invalid parent_class {%s} not in {%s}!"
"" % (parent_class, parent_class_lang.keys()))
return parent_class_lang[parent_class]
def get_mqa_par_class_lang(parent_class):
parent_class_lang = {}
parent_class_lang["MasterQA"] = "English"
parent_class_lang["MasterQA_中文"] = "Chinese"
parent_class_lang["MasterQA_Nederlands"] = "Dutch"
parent_class_lang["MasterQA_Français"] = "French"
parent_class_lang["MasterQA_Italiano"] = "Italian"
parent_class_lang["MasterQA_日本語"] = "Japanese"
parent_class_lang["MasterQA_한국어"] = "Korean"
parent_class_lang["MasterQA_Português"] = "Portuguese"
parent_class_lang["MasterQA_Русский"] = "Russian"
parent_class_lang["MasterQA_Español"] = "Spanish"
if parent_class not in parent_class_lang.keys():
raise Exception("Invalid parent_class {%s} not in {%s}!"
"" % (parent_class, parent_class_lang.keys()))
return parent_class_lang[parent_class]
def get_lang_parent_class(language):
lang_parent_class = {}
lang_parent_class["English"] = "BaseCase"
lang_parent_class["Chinese"] = "硒测试用例"
lang_parent_class["Dutch"] = "Testgeval"
lang_parent_class["French"] = "CasDeBase"
lang_parent_class["Italian"] = "CasoDiProva"
lang_parent_class["Japanese"] = "セレニウムテストケース"
lang_parent_class["Korean"] = "셀레늄_테스트_케이스"
lang_parent_class["Portuguese"] = "CasoDeTeste"
lang_parent_class["Russian"] = "ТестНаСелен"
lang_parent_class["Spanish"] = "CasoDePrueba"
if language not in lang_parent_class.keys():
raise Exception("Invalid language {%s} not in {%s}!"
"" % (language, lang_parent_class.keys()))
return lang_parent_class[language]
def get_mqa_lang_par_class(language):
lang_parent_class = {}
lang_parent_class["English"] = "MasterQA"
lang_parent_class["Chinese"] = "MasterQA_中文"
lang_parent_class["Dutch"] = "MasterQA_Nederlands"
lang_parent_class["French"] = "MasterQA_Français"
lang_parent_class["Italian"] = "MasterQA_Italiano"
lang_parent_class["Japanese"] = "MasterQA_日本語"
lang_parent_class["Korean"] = "MasterQA_한국어"
lang_parent_class["Portuguese"] = "MasterQA_Português"
lang_parent_class["Russian"] = "MasterQA_Русский"
lang_parent_class["Spanish"] = "MasterQA_Español"
if language not in lang_parent_class.keys():
raise Exception("Invalid language {%s} not in {%s}!"
"" % (language, lang_parent_class.keys()))
return lang_parent_class[language]
def get_import_line(language):
import_line = {}
# - The Default Import Line:
import_line["English"] = (
"from seleniumbase import BaseCase")
# - Translated Import Lines:
import_line["Chinese"] = (
"from seleniumbase.translate.chinese import 硒测试用例")
import_line["Dutch"] = (
"from seleniumbase.translate.dutch import Testgeval")
import_line["French"] = (
"from seleniumbase.translate.french import CasDeBase")
import_line["Italian"] = (
"from seleniumbase.translate.italian import CasoDiProva")
import_line["Japanese"] = (
"from seleniumbase.translate.japanese import セレニウムテストケース")
import_line["Korean"] = (
"from seleniumbase.translate.korean import 셀레늄_테스트_케이스")
import_line["Portuguese"] = (
"from seleniumbase.translate.portuguese import CasoDeTeste")
import_line["Russian"] = (
"from seleniumbase.translate.russian import ТестНаСелен")
import_line["Spanish"] = (
"from seleniumbase.translate.spanish import CasoDePrueba")
if language not in import_line.keys():
raise Exception("Invalid language {%s} not in {%s}!"
"" % (language, import_line.keys()))
return import_line[language]
def get_mqa_im_line(language):
import_line = {}
# - The Default Import Line:
import_line["English"] = (
"from seleniumbase import MasterQA")
# - Translated Import Lines:
import_line["Chinese"] = (
"from seleniumbase.translate.chinese import MasterQA_中文")
import_line["Dutch"] = (
"from seleniumbase.translate.dutch import MasterQA_Nederlands")
import_line["French"] = (
"from seleniumbase.translate.french import MasterQA_Français")
import_line["Italian"] = (
"from seleniumbase.translate.italian import MasterQA_Italiano")
import_line["Japanese"] = (
"from seleniumbase.translate.japanese import MasterQA_日本語")
import_line["Korean"] = (
"from seleniumbase.translate.korean import MasterQA_한국어")
import_line["Portuguese"] = (
"from seleniumbase.translate.portuguese import MasterQA_Português")
import_line["Russian"] = (
"from seleniumbase.translate.russian import MasterQA_Русский")
import_line["Spanish"] = (
"from seleniumbase.translate.spanish import MasterQA_Español")
if language not in import_line.keys():
raise Exception("Invalid language {%s} not in {%s}!"
"" % (language, import_line.keys()))
return import_line[language]
def get_locale_code(language):
locale_codes = {}
locale_codes["English"] = "en"
locale_codes["Chinese"] = "zh"
locale_codes["Dutch"] = "nl"
locale_codes["French"] = "fr"
locale_codes["Italian"] = "it"
locale_codes["Japanese"] = "ja"
locale_codes["Korean"] = "ko"
locale_codes["Portuguese"] = "pt"
locale_codes["Russian"] = "ru"
locale_codes["Spanish"] = "es"
if language not in locale_codes.keys():
raise Exception("Invalid language {%s} not in {%s}!"
"" % (language, locale_codes.keys()))
return locale_codes[language]
def get_locale_list():
locale_list = []
locale_list.append("en")
locale_list.append("zh")
locale_list.append("nl")
locale_list.append("fr")
locale_list.append("it")
locale_list.append("ja")
locale_list.append("ko")
locale_list.append("pt")
locale_list.append("ru")
locale_list.append("es")
return locale_list
class MD_L_Codes:
# Master Dictionary Language Codes
lang = {}
lang["English"] = 0
lang["Chinese"] = 1
lang["Dutch"] = 2
lang["French"] = 3
lang["Italian"] = 4
lang["Japanese"] = 5
lang["Korean"] = 6
lang["Portuguese"] = 7
lang["Russian"] = 8
lang["Spanish"] = 9
class MD:
# Master Dictionary
md = {}
num_langs = len(MD_L_Codes.lang)
md["open"] = ["*"] * num_langs
md["open"][0] = "open"
md["open"][1] = "开启"
md["open"][2] = "openen"
md["open"][3] = "ouvrir"
md["open"][4] = "apri"
md["open"][5] = "を開く"
md["open"][6] = "열기"
md["open"][7] = "abrir"
md["open"][8] = "открыть"
md["open"][9] = "abrir"
md["open_url"] = ["*"] * num_langs
md["open_url"][0] = "open_url"
md["open_url"][1] = "开启网址"
md["open_url"][2] = "url_openen"
md["open_url"][3] = "ouvrir_url"
md["open_url"][4] = "apri_url"
md["open_url"][5] = "URLを開く"
md["open_url"][6] = "URL_열기"
md["open_url"][7] = "abrir_url"
md["open_url"][8] = "открыть_URL"
md["open_url"][9] = "abrir_url"
md["click"] = ["*"] * num_langs
md["click"][0] = "click"
md["click"][1] = "单击"
md["click"][2] = "klik"
md["click"][3] = "cliquer"
md["click"][4] = "fare_clic"
md["click"][5] = "クリックして"
md["click"][6] = "클릭"
md["click"][7] = "clique"
md["click"][8] = "нажмите"
md["click"][9] = "haga_clic"
md["double_click"] = ["*"] * num_langs
md["double_click"][0] = "double_click"
md["double_click"][1] = "双击"
md["double_click"][2] = "dubbelklik"
md["double_click"][3] = "double_cliquer"
md["double_click"][4] = "doppio_clic"
md["double_click"][5] = "ダブルクリックして"
md["double_click"][6] = "더블_클릭"
md["double_click"][7] = "clique_duas_vezes"
md["double_click"][8] = "дважды_нажмите"
md["double_click"][9] = "doble_clic"
md["slow_click"] = ["*"] * num_langs
md["slow_click"][0] = "slow_click"
md["slow_click"][1] = "慢单击"
md["slow_click"][2] = "klik_langzaam"
md["slow_click"][3] = "cliquer_lentement"
md["slow_click"][4] = "clic_lentamente"
md["slow_click"][5] = "ゆっくりクリックして"
md["slow_click"][6] = "천천히_클릭"
md["slow_click"][7] = "clique_devagar"
md["slow_click"][8] = "нажмите_медленно"
md["slow_click"][9] = "clic_lentamente"
md["click_if_visible"] = ["*"] * num_langs
md["click_if_visible"][0] = "click_if_visible"
md["click_if_visible"][1] = "如果可见请单击"
md["click_if_visible"][2] = "klik_indien_zichtbaar"
md["click_if_visible"][3] = "cliquer_si_affiché"
md["click_if_visible"][4] = "clic_se_visto"
md["click_if_visible"][5] = "表示されている場合はクリック"
md["click_if_visible"][6] = "보이는_경우_클릭"
md["click_if_visible"][7] = "clique_se_está_visível"
md["click_if_visible"][8] = "нажмите_если_виден"
md["click_if_visible"][9] = "clic_si_está_muestra"
md["click_link_text"] = ["*"] * num_langs
md["click_link_text"][0] = "click_link_text"
md["click_link_text"][1] = "单击链接文本"
md["click_link_text"][2] = "klik_linktekst"
md["click_link_text"][3] = "cliquer_texte_du_lien"
md["click_link_text"][4] = "clic_testo_del_collegamento"
md["click_link_text"][5] = "リンクテキストをクリックします"
md["click_link_text"][6] = "링크_텍스트를_클릭합니다"
md["click_link_text"][7] = "clique_texto_do_link"
md["click_link_text"][8] = "нажмите_ссылку"
md["click_link_text"][9] = "clic_texto_del_enlace"
md["update_text"] = ["*"] * num_langs
md["update_text"][0] = "update_text"
md["update_text"][1] = "更新文本"
md["update_text"][2] = "tekst_bijwerken"
md["update_text"][3] = "modifier_texte"
md["update_text"][4] = "aggiornare_testo"
md["update_text"][5] = "テキストを更新"
md["update_text"][6] = "텍스트를_업데이트"
md["update_text"][7] = "atualizar_texto"
md["update_text"][8] = "обновить_текст"
md["update_text"][9] = "actualizar_texto"
md["add_text"] = ["*"] * num_langs
md["add_text"][0] = "add_text"
md["add_text"][1] = "添加文本"
md["add_text"][2] = "tekst_toevoegen"
md["add_text"][3] = "ajouter_texte"
md["add_text"][4] = "aggiungi_testo"
md["add_text"][5] = "テキストを追加"
md["add_text"][6] = "텍스트를_추가"
md["add_text"][7] = "adicionar_texto"
md["add_text"][8] = "добавить_текст"
md["add_text"][9] = "agregar_texto"
md["get_text"] = ["*"] * num_langs
md["get_text"][0] = "get_text"
md["get_text"][1] = "获取文本"
md["get_text"][2] = "tekst_ophalen"
md["get_text"][3] = "obtenir_texte"
md["get_text"][4] = "ottenere_testo"
md["get_text"][5] = "テキストを取得"
md["get_text"][6] = "텍스트를_검색"
md["get_text"][7] = "obter_texto"
md["get_text"][8] = "получить_текст"
md["get_text"][9] = "obtener_texto"
md["assert_text"] = ["*"] * num_langs
md["assert_text"][0] = "assert_text"
md["assert_text"][1] = "断言文本"
md["assert_text"][2] = "controleren_tekst"
md["assert_text"][3] = "vérifier_texte"
md["assert_text"][4] = "verificare_testo"
md["assert_text"][5] = "テキストを確認する"
md["assert_text"][6] = "텍스트_확인"
md["assert_text"][7] = "verificar_texto"
md["assert_text"][8] = "подтвердить_текст"
md["assert_text"][9] = "verificar_texto"
md["assert_exact_text"] = ["*"] * num_langs
md["assert_exact_text"][0] = "assert_exact_text"
md["assert_exact_text"][1] = "确切断言文本"
md["assert_exact_text"][2] = "controleren_exacte_tekst"
md["assert_exact_text"][3] = "vérifier_texte_exactement"
md["assert_exact_text"][4] = "verificare_testo_esatto"
md["assert_exact_text"][5] = "正確なテキストを確認する"
md["assert_exact_text"][6] = "정확한_텍스트를_확인하는"
md["assert_exact_text"][7] = "verificar_texto_exato"
md["assert_exact_text"][8] = "подтвердить_текст_точно"
md["assert_exact_text"][9] = "verificar_texto_exacto"
md["assert_link_text"] = ["*"] * num_langs
md["assert_link_text"][0] = "assert_link_text"
md["assert_link_text"][1] = "断言链接文本"
md["assert_link_text"][2] = "controleren_linktekst"
md["assert_link_text"][3] = "vérifier_texte_du_lien"
md["assert_link_text"][4] = "verificare_testo_del_collegamento"
md["assert_link_text"][5] = "リンクテキストを確認する"
md["assert_link_text"][6] = "링크_텍스트_확인"
md["assert_link_text"][7] = "verificar_texto_do_link"
md["assert_link_text"][8] = "подтвердить_ссылку"
md["assert_link_text"][9] = "verificar_texto_del_enlace"
md["assert_element"] = ["*"] * num_langs
md["assert_element"][0] = "assert_element"
md["assert_element"][1] = "断言元素"
md["assert_element"][2] = "controleren_element"
md["assert_element"][3] = "vérifier_élément"
md["assert_element"][4] = "verificare_elemento"
md["assert_element"][5] = "要素を確認する"
md["assert_element"][6] = "요소_확인"
md["assert_element"][7] = "verificar_elemento"
md["assert_element"][8] = "подтвердить_элемент"
md["assert_element"][9] = "verificar_elemento"
md["assert_element_visible"] = ["*"] * num_langs
md["assert_element_visible"][0] = "assert_element_visible"
md["assert_element_visible"][1] = "断言元素可见"
md["assert_element_visible"][2] = "controleren_element_zichtbaar"
md["assert_element_visible"][3] = "vérifier_élément_affiché"
md["assert_element_visible"][4] = "verificare_elemento_visto"
md["assert_element_visible"][5] = "要素が表示されていることを確認"
md["assert_element_visible"][6] = "요소가_보이는지_확인"
md["assert_element_visible"][7] = "verificar_elemento_visível"
md["assert_element_visible"][8] = "подтвердить_элемент_виден"
md["assert_element_visible"][9] = "verificar_elemento_se_muestra"
md["assert_element_not_visible"] = ["*"] * num_langs
md["assert_element_not_visible"][0] = "assert_element_not_visible"
md["assert_element_not_visible"][1] = "断言元素不可见"
md["assert_element_not_visible"][2] = "controleren_element_niet_zichtbaar"
md["assert_element_not_visible"][3] = "vérifier_élément_pas_affiché"
md["assert_element_not_visible"][4] = "verificare_elemento_non_visto"
md["assert_element_not_visible"][5] = "要素が表示されていないことを確認します"
md["assert_element_not_visible"][6] = "요소가_보이지_않는지_확인"
md["assert_element_not_visible"][7] = "verificar_elemento_não_visível"
md["assert_element_not_visible"][8] = "подтвердить_элемент_не_виден"
md["assert_element_not_visible"][9] = "verificar_elemento_no_se_muestra"
md["assert_element_present"] = ["*"] * num_langs
md["assert_element_present"][0] = "assert_element_present"
md["assert_element_present"][1] = "断言元素存在"
md["assert_element_present"][2] = "controleren_element_aanwezig"
md["assert_element_present"][3] = "vérifier_élément_présent"
md["assert_element_present"][4] = "verificare_elemento_presente"
md["assert_element_present"][5] = "要素が存在することを確認します"
md["assert_element_present"][6] = "요소가_존재하는지_확인"
md["assert_element_present"][7] = "verificar_elemento_presente"
md["assert_element_present"][8] = "подтвердить_элемент_присутствует"
md["assert_element_present"][9] = "verificar_elemento_presente"
md["assert_element_absent"] = ["*"] * num_langs
md["assert_element_absent"][0] = "assert_element_absent"
md["assert_element_absent"][1] = "断言元素不存在"
md["assert_element_absent"][2] = "controleren_element_afwezig"
md["assert_element_absent"][3] = "vérifier_élément_pas_présent"
md["assert_element_absent"][4] = "verificare_elemento_assente"
md["assert_element_absent"][5] = "要素が存在しないことを確認します"
md["assert_element_absent"][6] = "요소가_존재하지_않는지_확인"
md["assert_element_absent"][7] = "verificar_elemento_ausente"
md["assert_element_absent"][8] = "подтвердить_элемент_отсутствует"
md["assert_element_absent"][9] = "verificar_elemento_ausente"
md["assert_title"] = ["*"] * num_langs
md["assert_title"][0] = "assert_title"
md["assert_title"][1] = "断言标题"
md["assert_title"][2] = "controleren_titel"
md["assert_title"][3] = "vérifier_titre"
md["assert_title"][4] = "verificare_titolo"
md["assert_title"][5] = "タイトルを確認"
md["assert_title"][6] = "제목_확인"
md["assert_title"][7] = "verificar_título"
md["assert_title"][8] = "подтвердить_название"
md["assert_title"][9] = "verificar_título"
md["get_title"] = ["*"] * num_langs
md["get_title"][0] = "get_title"
md["get_title"][1] = "获取标题"
md["get_title"][2] = "titel_ophalen"
md["get_title"][3] = "obtenir_titre"
md["get_title"][4] = "ottenere_titolo"
md["get_title"][5] = "タイトルを取得する"
md["get_title"][6] = "제목_검색"
md["get_title"][7] = "obter_título"
md["get_title"][8] = "получить_название"
md["get_title"][9] = "obtener_título"
md["assert_true"] = ["*"] * num_langs
md["assert_true"][0] = "assert_true"
md["assert_true"][1] = "断言为真"
md["assert_true"][2] = "controleren_ware"
md["assert_true"][3] = "vérifier_vrai"
md["assert_true"][4] = "verificare_vero"
md["assert_true"][5] = "検証が正しい"
md["assert_true"][6] = "올바른지_확인"
md["assert_true"][7] = "verificar_verdade"
md["assert_true"][8] = "подтвердить_правду"
md["assert_true"][9] = "verificar_verdad"
md["assert_false"] = ["*"] * num_langs
md["assert_false"][0] = "assert_false"
md["assert_false"][1] = "断言为假"
md["assert_false"][2] = "controleren_valse"
md["assert_false"][3] = "vérifier_faux"
md["assert_false"][4] = "verificare_falso"
md["assert_false"][5] = "検証は偽です"
md["assert_false"][6] = "거짓인지_확인"
md["assert_false"][7] = "verificar_falso"
md["assert_false"][8] = "подтвердить_ложные"
md["assert_false"][9] = "verificar_falso"
md["assert_equal"] = ["*"] * num_langs
md["assert_equal"][0] = "assert_equal"
md["assert_equal"][1] = "断言等于"
md["assert_equal"][2] = "controleren_gelijk"
md["assert_equal"][3] = "vérifier_égal"
md["assert_equal"][4] = "verificare_uguale"
md["assert_equal"][5] = "検証が等しい"
md["assert_equal"][6] = "동일한지_확인"
md["assert_equal"][7] = "verificar_igual"
md["assert_equal"][8] = "подтвердить_одинаковый"
md["assert_equal"][9] = "verificar_igual"
md["assert_not_equal"] = ["*"] * num_langs
md["assert_not_equal"][0] = "assert_not_equal"
md["assert_not_equal"][1] = "断言不等于"
md["assert_not_equal"][2] = "controleren_niet_gelijk"
md["assert_not_equal"][3] = "vérifier_non_égal"
md["assert_not_equal"][4] = "verificare_non_uguale"
md["assert_not_equal"][5] = "検証が等しくない"
md["assert_not_equal"][6] = "동일하지_않다고_어설션"
md["assert_not_equal"][7] = "verificar_não_igual"
md["assert_not_equal"][8] = "подтвердить_не_одинаковый"
md["assert_not_equal"][9] = "verificar_diferente"
md["refresh_page"] = ["*"] * num_langs
md["refresh_page"][0] = "refresh_page"
md["refresh_page"][1] = "刷新页面"
md["refresh_page"][2] = "ververs_pagina"
md["refresh_page"][3] = "rafraîchir_la_page"
md["refresh_page"][4] = "aggiorna_la_pagina"
md["refresh_page"][5] = "ページを更新する"
md["refresh_page"][6] = "페이지_새로_고침"
md["refresh_page"][7] = "atualizar_a_página"
md["refresh_page"][8] = "обновить_страницу"
md["refresh_page"][9] = "actualizar_la_página"
md["get_current_url"] = ["*"] * num_langs
md["get_current_url"][0] = "get_current_url"
md["get_current_url"][1] = "获取当前网址"
md["get_current_url"][2] = "huidige_url_ophalen"
md["get_current_url"][3] = "obtenir_url_actuelle"
md["get_current_url"][4] = "ottenere_url_corrente"
md["get_current_url"][5] = "現在のURLを取得"
md["get_current_url"][6] = "현재의_URL을_가져"
md["get_current_url"][7] = "obter_url_atual"
md["get_current_url"][8] = "получить_текущий_URL"
md["get_current_url"][9] = "obtener_url_actual"
md["get_page_source"] = ["*"] * num_langs
md["get_page_source"][0] = "get_page_source"
md["get_page_source"][1] = "获取页面源代码"
md["get_page_source"][2] = "broncode_ophalen"
md["get_page_source"][3] = "obtenir_html_de_la_page"
md["get_page_source"][4] = "ottenere_la_pagina_html"
md["get_page_source"][5] = "ページのソースコードを取得する"
md["get_page_source"][6] = "페이지의_소스_코드를_얻을"
md["get_page_source"][7] = "obter_a_página_html"
md["get_page_source"][8] = "получить_источник_страницы"
md["get_page_source"][9] = "obtener_html_de_la_página"
md["go_back"] = ["*"] * num_langs
md["go_back"][0] = "go_back"
md["go_back"][1] = "回去"
md["go_back"][2] = "terug"
md["go_back"][3] = "retour"
md["go_back"][4] = "indietro"
md["go_back"][5] = "戻る"
md["go_back"][6] = "뒤로"
md["go_back"][7] = "voltar"
md["go_back"][8] = "назад"
md["go_back"][9] = "volver"
md["go_forward"] = ["*"] * num_langs
md["go_forward"][0] = "go_forward"
md["go_forward"][1] = "向前"
md["go_forward"][2] = "vooruit"
md["go_forward"][3] = "en_avant"
md["go_forward"][4] = "avanti"
md["go_forward"][5] = "進む"
md["go_forward"][6] = "앞으로"
md["go_forward"][7] = "avançar"
md["go_forward"][8] = "вперед"
md["go_forward"][9] = "adelante"
md["is_text_visible"] = ["*"] * num_langs
md["is_text_visible"][0] = "is_text_visible"
md["is_text_visible"][1] = "文本是否显示"
md["is_text_visible"][2] = "tekst_zichtbaar"
md["is_text_visible"][3] = "est_texte_affiché"
md["is_text_visible"][4] = "è_testo_visto"
md["is_text_visible"][5] = "テキストが表示されています"
md["is_text_visible"][6] = "텍스트가_표시됩니다"
md["is_text_visible"][7] = "o_texto_está_visível"
md["is_text_visible"][8] = "текст_виден"
md["is_text_visible"][9] = "se_muestra_el_texto"
md["is_element_visible"] = ["*"] * num_langs
md["is_element_visible"][0] = "is_element_visible"
md["is_element_visible"][1] = "元素是否可见"
md["is_element_visible"][2] = "element_zichtbaar"
md["is_element_visible"][3] = "est_un_élément_affiché"
md["is_element_visible"][4] = "è_elemento_visto"
md["is_element_visible"][5] = "要素は表示されますか"
md["is_element_visible"][6] = "요소가_표시됩니다"
md["is_element_visible"][7] = "o_elemento_está_visível"
md["is_element_visible"][8] = "элемент_виден"
md["is_element_visible"][9] = "se_muestra_el_elemento"
md["is_element_present"] = ["*"] * num_langs
md["is_element_present"][0] = "is_element_present"
md["is_element_present"][1] = "元素是否存在"
md["is_element_present"][2] = "element_aanwezig"
md["is_element_present"][3] = "est_un_élément_présent"
md["is_element_present"][4] = "è_elemento_presente"
md["is_element_present"][5] = "要素が存在するかどうか"
md["is_element_present"][6] = "요소가_있습니다"
md["is_element_present"][7] = "o_elemento_está_presente"
md["is_element_present"][8] = "элемент_присутствует"
md["is_element_present"][9] = "está_presente_el_elemento"
md["wait_for_text"] = ["*"] * num_langs
md["wait_for_text"][0] = "wait_for_text"
md["wait_for_text"][1] = "等待文本"
md["wait_for_text"][2] = "wachten_op_tekst"
md["wait_for_text"][3] = "attendre_le_texte"
md["wait_for_text"][4] = "attendere_il_testo"
md["wait_for_text"][5] = "テキストを待つ"
md["wait_for_text"][6] = "텍스트가_나타날_때까지_기다립니다"
md["wait_for_text"][7] = "aguardar_o_texto"
md["wait_for_text"][8] = "ждать_текста"
md["wait_for_text"][9] = "espera_el_texto"
md["wait_for_element"] = ["*"] * num_langs
md["wait_for_element"][0] = "wait_for_element"
md["wait_for_element"][1] = "等待元素"
md["wait_for_element"][2] = "wachten_op_element"
md["wait_for_element"][3] = "attendre_un_élément"
md["wait_for_element"][4] = "attendere_un_elemento"
md["wait_for_element"][5] = "要素を待つ"
md["wait_for_element"][6] = "요소가_나타날_때까지_기다립니다"
md["wait_for_element"][7] = "aguardar_o_elemento"
md["wait_for_element"][8] = "ждать_элемента"
md["wait_for_element"][9] = "espera_el_elemento"
md["wait_for_element_visible"] = ["*"] * num_langs
md["wait_for_element_visible"][0] = "wait_for_element_visible"
md["wait_for_element_visible"][1] = "等待元素可见"
md["wait_for_element_visible"][2] = "wachten_op_element_zichtbaar"
md["wait_for_element_visible"][3] = "attendre_un_élément_affiché"
md["wait_for_element_visible"][4] = "attendere_un_elemento_visto"
md["wait_for_element_visible"][5] = "要素が表示されるのを待ちます"
md["wait_for_element_visible"][6] = "요소가_표시_될_때까지_기다립니다"
md["wait_for_element_visible"][7] = "aguardar_o_elemento_visível"
md["wait_for_element_visible"][8] = "ждать_элемента_виден"
md["wait_for_element_visible"][9] = "espera_el_elemento_se_muestra"
md["wait_for_element_not_visible"] = ["*"] * num_langs
md["wait_for_element_not_visible"][0] = "wait_for_element_not_visible"
md["wait_for_element_not_visible"][1] = "等待元素不可见"
md["wait_for_element_not_visible"][2] = "wachten_op_element_niet_zichtbaar"
md["wait_for_element_not_visible"][3] = "attendre_un_élément_pas_affiché"
md["wait_for_element_not_visible"][4] = "attendere_un_elemento_non_visto"
md["wait_for_element_not_visible"][5] = "要素が表示されなくなるまで待ちます"
md["wait_for_element_not_visible"][6] = "요소가_사라질_때까지_기다리십시오"
md["wait_for_element_not_visible"][7] = "aguardar_o_elemento_não_visível"
md["wait_for_element_not_visible"][8] = "ждать_элемента_не_виден"
md["wait_for_element_not_visible"][9] = "espera_el_elemento_no_se_muestra"
md["wait_for_element_present"] = ["*"] * num_langs
md["wait_for_element_present"][0] = "wait_for_element_present"
md["wait_for_element_present"][1] = "等待元素存在"
md["wait_for_element_present"][2] = "wachten_op_element_aanwezig"
md["wait_for_element_present"][3] = "attendre_un_élément_présent"
md["wait_for_element_present"][4] = "attendere_un_elemento_presente"
md["wait_for_element_present"][5] = "要素が存在するのを待つ"
md["wait_for_element_present"][6] = "요소가_존재할_때까지_기다립니다"
md["wait_for_element_present"][7] = "aguardar_o_elemento_presente"
md["wait_for_element_present"][8] = "ждать_элемента_присутствует"
md["wait_for_element_present"][9] = "espera_el_elemento_presente"
md["wait_for_element_absent"] = ["*"] * num_langs
md["wait_for_element_absent"][0] = "wait_for_element_absent"
md["wait_for_element_absent"][1] = "等待元素不存在"
md["wait_for_element_absent"][2] = "wachten_op_element_afwezig"
md["wait_for_element_absent"][3] = "attendre_un_élément_pas_présent"
md["wait_for_element_absent"][4] = "attendere_un_elemento_assente"
md["wait_for_element_absent"][5] = "要素が存在しないのを待つ"
md["wait_for_element_absent"][6] = "요소가_나타날_때까지_기다리십시오"
md["wait_for_element_absent"][7] = "aguardar_o_elemento_ausente"
md["wait_for_element_absent"][8] = "ждать_элемента_отсутствует"
md["wait_for_element_absent"][9] = "espera_el_elemento_ausente"
md["sleep"] = ["*"] * num_langs
md["sleep"][0] = "sleep"
md["sleep"][1] = "睡"
md["sleep"][2] = "slapen"
md["sleep"][3] = "dormir"
md["sleep"][4] = "dormire"
md["sleep"][5] = "眠る"
md["sleep"][6] = "잠을"
md["sleep"][7] = "dormir"
md["sleep"][8] = "спать"
md["sleep"][9] = "dormir"
md["wait"] = ["*"] * num_langs
md["wait"][0] = "wait"
md["wait"][1] = "等待"
md["wait"][2] = "wachten"
md["wait"][3] = "attendre"
md["wait"][4] = "attendere"
md["wait"][5] = "待つ"
md["wait"][6] = "기다림"
md["wait"][7] = "aguardar"
md["wait"][8] = "ждать"
md["wait"][9] = "espera"
md["submit"] = ["*"] * num_langs
md["submit"][0] = "submit"
md["submit"][1] = "提交"
md["submit"][2] = "verzenden"
md["submit"][3] = "soumettre"
md["submit"][4] = "inviare"
md["submit"][5] = "を提出す"
md["submit"][6] = "제출"
md["submit"][7] = "enviar"
md["submit"][8] = "отправить"
md["submit"][9] = "enviar"
md["clear"] = ["*"] * num_langs
md["clear"][0] = "clear"
md["clear"][1] = "清除"
md["clear"][2] = "wissen"
md["clear"][3] = "effacer"
md["clear"][4] = "cancellare"
md["clear"][5] = "クリアする"
md["clear"][6] = "지우려면"
md["clear"][7] = "limpar"
md["clear"][8] = "очистить"
md["clear"][9] = "despejar"
md["js_click"] = ["*"] * num_langs
md["js_click"][0] = "js_click"
md["js_click"][1] = "JS单击"
md["js_click"][2] = "js_klik"
md["js_click"][3] = "js_cliquer"
md["js_click"][4] = "js_fare_clic"
md["js_click"][5] = "JSクリックして"
md["js_click"][6] = "JS_클릭"
md["js_click"][7] = "js_clique"
md["js_click"][8] = "JS_нажмите"
md["js_click"][9] = "js_haga_clic"
md["js_update_text"] = ["*"] * num_langs
md["js_update_text"][0] = "js_update_text"
md["js_update_text"][1] = "JS更新文本"
md["js_update_text"][2] = "js_tekst_bijwerken"
md["js_update_text"][3] = "js_modifier_texte"
md["js_update_text"][4] = "js_aggiornare_testo"
md["js_update_text"][5] = "JSテキストを更新"
md["js_update_text"][6] = "JS_텍스트를_업데이트"
md["js_update_text"][7] = "js_atualizar_texto"
md["js_update_text"][8] = "JS_обновить_текст"
md["js_update_text"][9] = "js_actualizar_texto"
md["js_type"] = ["*"] * num_langs
md["js_type"][0] = "js_type"
md["js_type"][1] = "JS输入文本"
md["js_type"][2] = "js_typ"
md["js_type"][3] = "js_taper"
md["js_type"][4] = "js_digitare"
md["js_type"][5] = "JS入力"
md["js_type"][6] = "JS_입력"
md["js_type"][7] = "js_tipo"
md["js_type"][8] = "JS_введите"
md["js_type"][9] = "js_escriba"
md["inspect_html"] = ["*"] * num_langs
md["inspect_html"][0] = "inspect_html"
md["inspect_html"][1] = "检查HTML"
md["inspect_html"][2] = "html_inspecteren"
md["inspect_html"][3] = "vérifier_html"
md["inspect_html"][4] = "controlla_html"
md["inspect_html"][5] = "HTMLをチェック"
md["inspect_html"][6] = "HTML_확인"
md["inspect_html"][7] = "verificar_html"
md["inspect_html"][8] = "проверить_HTML"
md["inspect_html"][9] = "comprobar_html"
md["save_screenshot"] = ["*"] * num_langs
md["save_screenshot"][0] = "save_screenshot"
md["save_screenshot"][1] = "保存截图"
md["save_screenshot"][2] = "bewaar_screenshot"
md["save_screenshot"][3] = "enregistrer_capture_d_écran"
md["save_screenshot"][4] = "salva_screenshot"
md["save_screenshot"][5] = "スクリーンショットを保存"
md["save_screenshot"][6] = "스크린_샷_저장"
md["save_screenshot"][7] = "salvar_captura_de_tela"
md["save_screenshot"][8] = "сохранить_скриншот"
md["save_screenshot"][9] = "guardar_captura_de_pantalla"
md["choose_file"] = ["*"] * num_langs
md["choose_file"][0] = "choose_file"
md["choose_file"][1] = "选择文件"
md["choose_file"][2] = "selecteer_bestand"
md["choose_file"][3] = "sélectionner_fichier"
md["choose_file"][4] = "seleziona_file"
md["choose_file"][5] = "ファイルを選択"
md["choose_file"][6] = "파일을_선택"
md["choose_file"][7] = "selecionar_arquivo"
md["choose_file"][8] = "выберите_файл"
md["choose_file"][9] = "seleccionar_archivo"
md["execute_script"] = ["*"] * num_langs
md["execute_script"][0] = "execute_script"
md["execute_script"][1] = "执行脚本"
md["execute_script"][2] = "script_uitvoeren"
md["execute_script"][3] = "exécuter_script"
md["execute_script"][4] = "eseguire_script"
md["execute_script"][5] = "スクリプトを実行する"
md["execute_script"][6] = "스크립트를_실행하려면"
md["execute_script"][7] = "executar_script"
md["execute_script"][8] = "выполнение_скрипта"
md["execute_script"][9] = "ejecutar_script"
md["safe_execute_script"] = ["*"] * num_langs
md["safe_execute_script"][0] = "safe_execute_script"
md["safe_execute_script"][1] = "安全执行脚本"
md["safe_execute_script"][2] = "script_veilig_uitvoeren"
md["safe_execute_script"][3] = "exécuter_script_sans_risque"
md["safe_execute_script"][4] = "eseguire_script_sicuro"
md["safe_execute_script"][5] = "スクリプトを安全に実行する"
md["safe_execute_script"][6] = "스크립트를_안전하게_실행"
md["safe_execute_script"][7] = "executar_script_com_segurança"
md["safe_execute_script"][8] = "безопасное_выполнение_скрипта"
md["safe_execute_script"][9] = "ejecutar_script_de_forma_segura"
md["activate_jquery"] = ["*"] * num_langs
md["activate_jquery"][0] = "activate_jquery"
md["activate_jquery"][1] = "加载JQUERY"
md["activate_jquery"][2] = "activeer_jquery"
md["activate_jquery"][3] = "activer_jquery"
md["activate_jquery"][4] = "attiva_jquery"
md["activate_jquery"][5] = "JQUERYを読み込む"
md["activate_jquery"][6] = "JQUERY_로드"
md["activate_jquery"][7] = "ativar_jquery"
md["activate_jquery"][8] = "активировать_JQUERY"
md["activate_jquery"][9] = "activar_jquery"
md["ad_block"] = ["*"] * num_langs
md["ad_block"][0] = "ad_block"
md["ad_block"][1] = "阻止广告"
md["ad_block"][2] = "blokkeer_advertenties"
md["ad_block"][3] = "annonces_de_bloc"
md["ad_block"][4] = "bloccare_gli_annunci"
md["ad_block"][5] = "ブロック広告"
md["ad_block"][6] = "광고_차단"
md["ad_block"][7] = "bloquear_anúncios"
md["ad_block"][8] = "блокировать_рекламу"
md["ad_block"][9] = "bloquear_anuncios"
md["skip"] = ["*"] * num_langs
md["skip"][0] = "skip"
md["skip"][1] = "跳过"
md["skip"][2] = "overslaan"
md["skip"][3] = "sauter"
md["skip"][4] = "saltare"
md["skip"][5] = "スキップ"
md["skip"][6] = "건너뛸"
md["skip"][7] = "saltar"
md["skip"][8] = "пропускать"
md["skip"][9] = "saltar"
md["assert_no_404_errors"] = ["*"] * num_langs
md["assert_no_404_errors"][0] = "assert_no_404_errors"
md["assert_no_404_errors"][1] = "检查断开的链接"
md["assert_no_404_errors"][2] = "controleren_op_gebroken_links"
md["assert_no_404_errors"][3] = "vérifier_les_liens_rompus"
md["assert_no_404_errors"][4] = "verificare_i_collegamenti"
md["assert_no_404_errors"][5] = "リンク切れを確認する"
md["assert_no_404_errors"][6] = "끊어진_링크_확인"
md["assert_no_404_errors"][7] = "verificar_se_há_links_quebrados"
md["assert_no_404_errors"][8] = "проверить_ошибки_404"
md["assert_no_404_errors"][9] = "verificar_si_hay_enlaces_rotos"
md["assert_no_js_errors"] = ["*"] * num_langs
md["assert_no_js_errors"][0] = "assert_no_js_errors"
md["assert_no_js_errors"][1] = "检查JS错误"
md["assert_no_js_errors"][2] = "controleren_op_js_fouten"
md["assert_no_js_errors"][3] = "vérifier_les_erreurs_js"
md["assert_no_js_errors"][4] = "controlla_errori_js"
md["assert_no_js_errors"][5] = "JSエラーを確認する"
md["assert_no_js_errors"][6] = "JS_오류_확인"
md["assert_no_js_errors"][7] = "verificar_se_há_erros_js"
md["assert_no_js_errors"][8] = "проверить_ошибки_JS"
md["assert_no_js_errors"][9] = "verificar_si_hay_errores_js"
md["switch_to_frame"] = ["*"] * num_langs
md["switch_to_frame"][0] = "switch_to_frame"
md["switch_to_frame"][1] = "切换到帧"
md["switch_to_frame"][2] = "overschakelen_naar_frame"
md["switch_to_frame"][3] = "passer_au_cadre"
md["switch_to_frame"][4] = "passa_al_frame"
md["switch_to_frame"][5] = "フレームに切り替え"
md["switch_to_frame"][6] = "프레임으로_전환"
md["switch_to_frame"][7] = "mudar_para_o_quadro"
md["switch_to_frame"][8] = "переключиться_на_кадр"
md["switch_to_frame"][9] = "cambiar_al_marco"
md["switch_to_default_content"] = ["*"] * num_langs
md["switch_to_default_content"][0] = "switch_to_default_content"
md["switch_to_default_content"][1] = "切换到默认内容"
md["switch_to_default_content"][2] = "overschakelen_naar_standaardcontent"
md["switch_to_default_content"][3] = "passer_au_contenu_par_défaut"
md["switch_to_default_content"][4] = "passa_al_contenuto_predefinito"
md["switch_to_default_content"][5] = "デフォルトのコンテンツに切り替える"
md["switch_to_default_content"][6] = "기본_콘텐츠로_전환"
md["switch_to_default_content"][7] = "mudar_para_o_conteúdo_padrão"
md["switch_to_default_content"][8] = (
"переключиться_на_содержимое_по_умолчанию")
md["switch_to_default_content"][9] = "cambiar_al_contenido_predeterminado"
md["open_new_window"] = ["*"] * num_langs
md["open_new_window"][0] = "open_new_window"
md["open_new_window"][1] = "打开新窗口"
md["open_new_window"][2] = "nieuw_venster_openen"
md["open_new_window"][3] = "ouvrir_une_nouvelle_fenêtre"
md["open_new_window"][4] = "apri_una_nuova_finestra"
md["open_new_window"][5] = "新しいウィンドウを開く"
md["open_new_window"][6] = "새_창_열기"
md["open_new_window"][7] = "abrir_nova_janela"
md["open_new_window"][8] = "открыть_новое_окно"
md["open_new_window"][9] = "abrir_una_nueva_ventana"
md["switch_to_window"] = ["*"] * num_langs
md["switch_to_window"][0] = "switch_to_window"
md["switch_to_window"][1] = "切换到窗口"
md["switch_to_window"][2] = "overschakelen_naar_venster"
md["switch_to_window"][3] = "passer_à_fenêtre"
md["switch_to_window"][4] = "passa_alla_finestra"
md["switch_to_window"][5] = "ウィンドウに切り替え"
md["switch_to_window"][6] = "창으로_전환"
md["switch_to_window"][7] = "mudar_para_janela"
md["switch_to_window"][8] = "переключиться_на_окно"
md["switch_to_window"][9] = "cambiar_a_ventana"
md["switch_to_default_window"] = ["*"] * num_langs
md["switch_to_default_window"][0] = "switch_to_default_window"
md["switch_to_default_window"][1] = "切换到默认窗口"
md["switch_to_default_window"][2] = "overschakelen_naar_standaardvenster"
md["switch_to_default_window"][3] = "passer_à_fenêtre_par_défaut"
md["switch_to_default_window"][4] = "passa_alla_finestra_predefinita"
md["switch_to_default_window"][5] = "デフォルトのウィンドウに切り替える"
md["switch_to_default_window"][6] = "기본_창으로_전환"
md["switch_to_default_window"][7] = "mudar_para_a_janela_padrão"
md["switch_to_default_window"][8] = "переключиться_в_окно_по_умолчанию"
md["switch_to_default_window"][9] = "cambiar_a_ventana_predeterminada"
md["maximize_window"] = ["*"] * num_langs
md["maximize_window"][0] = "maximize_window"
md["maximize_window"][1] = "最大化窗口"
md["maximize_window"][2] = "venster_maximaliseren"
md["maximize_window"][3] = "maximiser_fenêtre"
md["maximize_window"][4] = "ingrandisci_finestra"
md["maximize_window"][5] = "ウィンドウを最大化する"
md["maximize_window"][6] = "창_최대화"
md["maximize_window"][7] = "maximizar_janela"
md["maximize_window"][8] = "максимальное_окно"
md["maximize_window"][9] = "maximizar_ventana"
md["highlight"] = ["*"] * num_langs
md["highlight"][0] = "highlight"
md["highlight"][1] = "亮点"
md["highlight"][2] = "markeren"
md["highlight"][3] = "illuminer"
md["highlight"][4] = "illuminare"
md["highlight"][5] = "ハイライト"
md["highlight"][6] = "강조"
md["highlight"][7] = "destaque"
md["highlight"][8] = "осветить"
md["highlight"][9] = "resalte"
md["highlight_click"] = ["*"] * num_langs
md["highlight_click"][0] = "highlight_click"
md["highlight_click"][1] = "亮点单击"
md["highlight_click"][2] = "markeren_klik"
md["highlight_click"][3] = "illuminer_cliquer"
md["highlight_click"][4] = "illuminare_clic"
md["highlight_click"][5] = "ハイライトしてクリックして"
md["highlight_click"][6] = "강조_클릭"
md["highlight_click"][7] = "destaque_clique"
md["highlight_click"][8] = "осветить_нажмите"
md["highlight_click"][9] = "resalte_clic"
md["scroll_to"] = ["*"] * num_langs
md["scroll_to"][0] = "scroll_to"
md["scroll_to"][1] = "滚动到"
md["scroll_to"][2] = "scrollen_naar"
md["scroll_to"][3] = "déménager_à"
md["scroll_to"][4] = "scorrere_fino_a"
md["scroll_to"][5] = "スクロールして"
md["scroll_to"][6] = "요소로_스크롤"
md["scroll_to"][7] = "rolar_para"
md["scroll_to"][8] = "прокрутить_к"
md["scroll_to"][9] = "desplazarse_a"
md["scroll_to_top"] = ["*"] * num_langs
md["scroll_to_top"][0] = "scroll_to_top"
md["scroll_to_top"][1] = "滚动到顶部"
md["scroll_to_top"][2] = "naar_boven_scrollen"
md["scroll_to_top"][3] = "faites_défiler_vers_le_haut"
md["scroll_to_top"][4] = "scorri_verso_alto"
md["scroll_to_top"][5] = "一番上までスクロール"
md["scroll_to_top"][6] = "맨_위로_스크롤"
md["scroll_to_top"][7] = "rolar_para_o_topo"
md["scroll_to_top"][8] = "пролистать_наверх"
md["scroll_to_top"][9] = "desplazarse_hasta_la_parte_superior"
md["scroll_to_bottom"] = ["*"] * num_langs
md["scroll_to_bottom"][0] = "scroll_to_bottom"
md["scroll_to_bottom"][1] = "滚动到底部"
md["scroll_to_bottom"][2] = "naar_beneden_scrollen"
md["scroll_to_bottom"][3] = "faites_défiler_vers_le_bas"
md["scroll_to_bottom"][4] = "scorri_verso_il_basso"
md["scroll_to_bottom"][5] = "一番下までスクロール"
md["scroll_to_bottom"][6] = "하단으로_스크롤"
md["scroll_to_bottom"][7] = "rolar_para_o_fundo"
md["scroll_to_bottom"][8] = "прокрутить_вниз"
md["scroll_to_bottom"][9] = "desplazarse_hasta_la_parte_inferior"
md["hover_and_click"] = ["*"] * num_langs
md["hover_and_click"][0] = "hover_and_click"
md["hover_and_click"][1] = "悬停并单击"
md["hover_and_click"][2] = "zweven_en_klik"
md["hover_and_click"][3] = "planer_au_dessus_et_cliquer"
md["hover_and_click"][4] = "passa_il_mouse_sopra_e_fai_clic"
md["hover_and_click"][5] = "上にマウスを移動しクリック"
md["hover_and_click"][6] = "위로_마우스를_이동하고_클릭"
md["hover_and_click"][7] = "passe_o_mouse_e_clique"
md["hover_and_click"][8] = "наведите_и_нажмите"
md["hover_and_click"][9] = "pasar_el_ratón_y_hacer_clic"
md["is_selected"] = ["*"] * num_langs
md["is_selected"][0] = "is_selected"
md["is_selected"][1] = "是否被选中"
md["is_selected"][2] = "is_het_geselecteerd"
md["is_selected"][3] = "est_il_sélectionné"
md["is_selected"][4] = "è_selezionato"
md["is_selected"][5] = "選択されていることを"
md["is_selected"][6] = "선택되어_있는지"
md["is_selected"][7] = "é_selecionado"
md["is_selected"][8] = "выбран"
md["is_selected"][9] = "está_seleccionado"
md["press_up_arrow"] = ["*"] * num_langs
md["press_up_arrow"][0] = "press_up_arrow"
md["press_up_arrow"][1] = "按向上箭头"
md["press_up_arrow"][2] = "druk_op_pijl_omhoog"
md["press_up_arrow"][3] = "appuyer_sur_flèche_haut"
md["press_up_arrow"][4] = "premere_la_freccia_su"
md["press_up_arrow"][5] = "上矢印を押します"
md["press_up_arrow"][6] = "위쪽_화살표를_누릅니다"
md["press_up_arrow"][7] = "pressione_a_seta_para_cima"
md["press_up_arrow"][8] = "нажмите_стрелку_вверх"
md["press_up_arrow"][9] = "presione_la_flecha_hacia_arriba"
md["press_down_arrow"] = ["*"] * num_langs
md["press_down_arrow"][0] = "press_down_arrow"
md["press_down_arrow"][1] = "按向下箭头"
md["press_down_arrow"][2] = "druk_op_pijl_omlaag"
md["press_down_arrow"][3] = "appuyer_sur_flèche_bas"
md["press_down_arrow"][4] = "premere_la_freccia_giù"
md["press_down_arrow"][5] = "下矢印を押します"
md["press_down_arrow"][6] = "아래쪽_화살표를_누르십시오"
md["press_down_arrow"][7] = "pressione_a_seta_para_baixo"
md["press_down_arrow"][8] = "нажмите_стрелку_вниз"
md["press_down_arrow"][9] = "presione_la_flecha_hacia_abajo"
md["press_left_arrow"] = ["*"] * num_langs
md["press_left_arrow"][0] = "press_left_arrow"
md["press_left_arrow"][1] = "按向左箭头"
md["press_left_arrow"][2] = "druk_op_pijl_links"
md["press_left_arrow"][3] = "appuyer_sur_flèche_gauche"
md["press_left_arrow"][4] = "premere_la_freccia_sinistra"
md["press_left_arrow"][5] = "左矢印を押します"
md["press_left_arrow"][6] = "왼쪽_화살표를_누르십시오"
md["press_left_arrow"][7] = "pressione_a_seta_esquerda"
md["press_left_arrow"][8] = "нажмите_стрелку_влево"
md["press_left_arrow"][9] = "presione_la_flecha_izquierda"
md["press_right_arrow"] = ["*"] * num_langs
md["press_right_arrow"][0] = "press_right_arrow"
md["press_right_arrow"][1] = "按向右箭头"
md["press_right_arrow"][2] = "druk_op_pijl_rechts"
md["press_right_arrow"][3] = "appuyer_sur_flèche_droite"
md["press_right_arrow"][4] = "premere_la_freccia_destra"
md["press_right_arrow"][5] = "右矢印を押します"
md["press_right_arrow"][6] = "오른쪽_화살표를_누르십시오"
md["press_right_arrow"][7] = "pressione_a_seta_direita"
md["press_right_arrow"][8] = "нажмите_стрелку_вправо"
md["press_right_arrow"][9] = "presione_la_flecha_derecha"
md["click_visible_elements"] = ["*"] * num_langs
md["click_visible_elements"][0] = "click_visible_elements"
md["click_visible_elements"][1] = "单击可见元素"
md["click_visible_elements"][2] = "klik_zichtbare_elementen"
md["click_visible_elements"][3] = "cliquer_éléments_visibles"
md["click_visible_elements"][4] = "clic_sugli_elementi_visibili"
md["click_visible_elements"][5] = "表示要素をクリックします"
md["click_visible_elements"][6] = "페이지_요소를_클릭_합니다"
md["click_visible_elements"][7] = "clique_nos_elementos_visíveis"
md["click_visible_elements"][8] = "нажмите_видимые_элементы"
md["click_visible_elements"][9] = "clic_en_elementos_visibles"
md["select_option_by_text"] = ["*"] * num_langs
md["select_option_by_text"][0] = "select_option_by_text"
md["select_option_by_text"][1] = "按文本选择选项"
md["select_option_by_text"][2] = "optie_selecteren_op_tekst"
md["select_option_by_text"][3] = "sélectionner_option_par_texte"
md["select_option_by_text"][4] = "selezionare_opzione_per_testo"
md["select_option_by_text"][5] = "テキストでオプションを選択"
md["select_option_by_text"][6] = "텍스트로_옵션_선택"
md["select_option_by_text"][7] = "selecionar_opção_por_texto"
md["select_option_by_text"][8] = "выбрать_опцию_по_тексту"
md["select_option_by_text"][9] = "seleccionar_opción_por_texto"
md["select_option_by_index"] = ["*"] * num_langs
md["select_option_by_index"][0] = "select_option_by_index"
md["select_option_by_index"][1] = "按索引选择选项"
md["select_option_by_index"][2] = "optie_selecteren_op_index"
md["select_option_by_index"][3] = "sélectionner_option_par_index"
md["select_option_by_index"][4] = "selezionare_opzione_per_indice"
md["select_option_by_index"][5] = "インデックスでオプションを選択"
md["select_option_by_index"][6] = "인덱스별로_옵션_선택"
md["select_option_by_index"][7] = "selecionar_opção_por_índice"
md["select_option_by_index"][8] = "выбрать_опцию_по_индексу"
md["select_option_by_index"][9] = "seleccionar_opción_por_índice"
md["select_option_by_value"] = ["*"] * num_langs
md["select_option_by_value"][0] = "select_option_by_value"
md["select_option_by_value"][1] = "按值选择选项"
md["select_option_by_value"][2] = "optie_selecteren_op_waarde"
md["select_option_by_value"][3] = "sélectionner_option_par_valeur"
md["select_option_by_value"][4] = "selezionare_opzione_per_valore"
md["select_option_by_value"][5] = "値でオプションを選択"
md["select_option_by_value"][6] = "값별로_옵션_선택"
md["select_option_by_value"][7] = "selecionar_opção_por_valor"
md["select_option_by_value"][8] = "выбрать_опцию_по_значению"
md["select_option_by_value"][9] = "seleccionar_opción_por_valor"
md["create_presentation"] = ["*"] * num_langs
md["create_presentation"][0] = "create_presentation"
md["create_presentation"][1] = "创建演示文稿"
md["create_presentation"][2] = "maak_een_presentatie"
md["create_presentation"][3] = "créer_une_présentation"
md["create_presentation"][4] = "creare_una_presentazione"
md["create_presentation"][5] = "プレゼンテーションを作成する"
md["create_presentation"][6] = "프레젠테이션_만들기"
md["create_presentation"][7] = "criar_uma_apresentação"
md["create_presentation"][8] = "создать_презентацию"
md["create_presentation"][9] = "crear_una_presentación"
md["add_slide"] = ["*"] * num_langs
md["add_slide"][0] = "add_slide"
md["add_slide"][1] = "添加幻灯片"
md["add_slide"][2] = "een_dia_toevoegen"
md["add_slide"][3] = "ajouter_une_diapositive"
md["add_slide"][4] = "aggiungere_una_diapositiva"
md["add_slide"][5] = "スライドを追加する"
md["add_slide"][6] = "슬라이드_추가"
md["add_slide"][7] = "adicionar_um_slide"
md["add_slide"][8] = "добавить_слайд"
md["add_slide"][9] = "agregar_una_diapositiva"
md["save_presentation"] = ["*"] * num_langs
md["save_presentation"][0] = "save_presentation"
md["save_presentation"][1] = "保存演示文稿"
md["save_presentation"][2] = "de_presentatie_opslaan"
md["save_presentation"][3] = "enregistrer_la_présentation"
md["save_presentation"][4] = "salva_la_presentazione"
md["save_presentation"][5] = "プレゼンテーションを保存する"
md["save_presentation"][6] = "프레젠테이션_저장"
md["save_presentation"][7] = "salvar_apresentação"
md["save_presentation"][8] = "сохранить_презентацию"
md["save_presentation"][9] = "guardar_presentación"
md["begin_presentation"] = ["*"] * num_langs
md["begin_presentation"][0] = "begin_presentation"
md["begin_presentation"][1] = "开始演示文稿"
md["begin_presentation"][2] = "de_presentatie_starten"
md["begin_presentation"][3] = "démarrer_la_présentation"
md["begin_presentation"][4] = "avviare_la_presentazione"
md["begin_presentation"][5] = "プレゼンテーションを開始する"
md["begin_presentation"][6] = "프레젠테이션_시작"
md["begin_presentation"][7] = "iniciar_apresentação"
md["begin_presentation"][8] = "начать_презентацию"
md["begin_presentation"][9] = "iniciar_presentación"
md["create_pie_chart"] = ["*"] * num_langs
md["create_pie_chart"][0] = "create_pie_chart"
md["create_pie_chart"][1] = "创建饼图"
md["create_pie_chart"][2] = "maak_een_cirkeldiagram"
md["create_pie_chart"][3] = "créer_un_graphique_à_secteurs"
md["create_pie_chart"][4] = "creare_un_grafico_a_torta"
md["create_pie_chart"][5] = "円グラフを作成する"
md["create_pie_chart"][6] = "원형_차트_만들기"
md["create_pie_chart"][7] = "criar_um_gráfico_de_pizza"
md["create_pie_chart"][8] = "создать_круговую_диаграмму"
md["create_pie_chart"][9] = "crear_un_gráfico_circular"
md["create_bar_chart"] = ["*"] * num_langs
md["create_bar_chart"][0] = "create_bar_chart"
md["create_bar_chart"][1] = "创建条形图"
md["create_bar_chart"][2] = "maak_een_staafdiagram"
md["create_bar_chart"][3] = "créer_un_graphique_à_barres"
md["create_bar_chart"][4] = "creare_un_grafico_a_barre"
md["create_bar_chart"][5] = "棒グラフを作成する"
md["create_bar_chart"][6] = "막대_차트_만들기"
md["create_bar_chart"][7] = "criar_um_gráfico_de_barras"
md["create_bar_chart"][8] = "создать_бар_диаграмму"
md["create_bar_chart"][9] = "crear_un_gráfico_de_barras"
md["create_column_chart"] = ["*"] * num_langs
md["create_column_chart"][0] = "create_column_chart"
md["create_column_chart"][1] = "创建柱形图"
md["create_column_chart"][2] = "maak_een_kolomdiagram"
md["create_column_chart"][3] = "créer_un_graphique_à_colonnes"
md["create_column_chart"][4] = "creare_un_grafico_a_colonne"
md["create_column_chart"][5] = "縦棒グラフを作成する"
md["create_column_chart"][6] = "열_차트_만들기"
md["create_column_chart"][7] = "criar_um_gráfico_de_colunas"
md["create_column_chart"][8] = "создать_столбчатую_диаграмму"
md["create_column_chart"][9] = "crear_un_gráfico_de_columnas"
md["create_line_chart"] = ["*"] * num_langs
md["create_line_chart"][0] = "create_line_chart"
md["create_line_chart"][1] = "创建折线图"
md["create_line_chart"][2] = "maak_een_lijndiagram"
md["create_line_chart"][3] = "créer_un_graphique_linéaire"
md["create_line_chart"][4] = "creare_un_grafico_a_linee"
md["create_line_chart"][5] = "折れ線グラフを作成する"
md["create_line_chart"][6] = "선_차트_만들기"
md["create_line_chart"][7] = "criar_um_gráfico_de_linhas"
md["create_line_chart"][8] = "создать_линейную_диаграмму"
md["create_line_chart"][9] = "crear_un_gráfico_de_líneas"
md["create_area_chart"] = ["*"] * num_langs
md["create_area_chart"][0] = "create_area_chart"
md["create_area_chart"][1] = "创建面积图"
md["create_area_chart"][2] = "maak_een_vlakdiagram"
md["create_area_chart"][3] = "créer_un_graphique_en_aires"
md["create_area_chart"][4] = "creare_un_grafico_ad_area"
md["create_area_chart"][5] = "面グラフを作成する"
md["create_area_chart"][6] = "영역_차트_만들기"
md["create_area_chart"][7] = "criar_um_gráfico_de_área"
md["create_area_chart"][8] = "создать_диаграмму_области"
md["create_area_chart"][9] = "crear_un_gráfico_de_área"
md["add_series_to_chart"] = ["*"] * num_langs
md["add_series_to_chart"][0] = "add_series_to_chart"
md["add_series_to_chart"][1] = "将系列添加到图表"
md["add_series_to_chart"][2] = "reeksen_toevoegen_aan_grafiek"
md["add_series_to_chart"][3] = "ajouter_séries_au_graphique"
md["add_series_to_chart"][4] = "aggiungere_serie_al_grafico"
md["add_series_to_chart"][5] = "グラフに系列を追加する"
md["add_series_to_chart"][6] = "차트에_시리즈_추가"
md["add_series_to_chart"][7] = "adicionar_séries_ao_gráfico"
md["add_series_to_chart"][8] = "добавить_серии_в_диаграмму"
md["add_series_to_chart"][9] = "agregar_series_al_gráfico"
md["add_data_point"] = ["*"] * num_langs
md["add_data_point"][0] = "add_data_point"
md["add_data_point"][1] = "添加数据点"
md["add_data_point"][2] = "gegevenspunt_toevoegen"
md["add_data_point"][3] = "ajouter_un_point_de_données"
md["add_data_point"][4] = "aggiungi_punto_dati"
md["add_data_point"][5] = "データポイントを追加する"
md["add_data_point"][6] = "데이터_포인트_추가"
md["add_data_point"][7] = "adicionar_ponto_de_dados"
md["add_data_point"][8] = "добавить_точку_данных"
md["add_data_point"][9] = "agregar_punto_de_datos"
md["save_chart"] = ["*"] * num_langs
md["save_chart"][0] = "save_chart"
md["save_chart"][1] = "保存图表"
md["save_chart"][2] = "grafiek_opslaan"
md["save_chart"][3] = "enregistrer_le_graphique"
md["save_chart"][4] = "salva_il_grafico"
md["save_chart"][5] = "グラフを保存する"
md["save_chart"][6] = "차트_저장"
md["save_chart"][7] = "salvar_gráfico"
md["save_chart"][8] = "сохранить_диаграмму"
md["save_chart"][9] = "guardar_gráfico"
md["display_chart"] = ["*"] * num_langs
md["display_chart"][0] = "display_chart"
md["display_chart"][1] = "显示图表"
md["display_chart"][2] = "grafiek_weergeven"
md["display_chart"][3] = "afficher_le_graphique"
md["display_chart"][4] = "mostra_il_grafico"
md["display_chart"][5] = "グラフを表示する"
md["display_chart"][6] = "차트_표시"
md["display_chart"][7] = "exibir_gráfico"
md["display_chart"][8] = "отображать_диаграмму"
md["display_chart"][9] = "muestra_gráfico"
md["extract_chart"] = ["*"] * num_langs
md["extract_chart"][0] = "extract_chart"
md["extract_chart"][1] = "提取图表"
md["extract_chart"][2] = "grafiek_uitpakken"
md["extract_chart"][3] = "extraire_le_graphique"
md["extract_chart"][4] = "estrarre_il_grafico"
md["extract_chart"][5] = "グラフを抽出する"
md["extract_chart"][6] = "차트_추출"
md["extract_chart"][7] = "extrair_gráfico"
md["extract_chart"][8] = "извлекать_диаграмму"
md["extract_chart"][9] = "extracto_gráfico"
md["create_tour"] = ["*"] * num_langs
md["create_tour"][0] = "create_tour"
md["create_tour"][1] = "创建游览"
md["create_tour"][2] = "maak_een_tour"
md["create_tour"][3] = "créer_une_visite"
md["create_tour"][4] = "creare_un_tour"
md["create_tour"][5] = "ツアーを作成する"
md["create_tour"][6] = "가이드_투어_만들기"
md["create_tour"][7] = "criar_um_tour"
md["create_tour"][8] = "создать_тур"
md["create_tour"][9] = "crear_una_gira"
md["create_shepherd_tour"] = ["*"] * num_langs
md["create_shepherd_tour"][0] = "create_shepherd_tour"
md["create_shepherd_tour"][1] = "创建SHEPHERD游览"
md["create_shepherd_tour"][2] = "maak_een_shepherd_tour"
md["create_shepherd_tour"][3] = "créer_une_visite_shepherd"
md["create_shepherd_tour"][4] = "creare_un_tour_shepherd"
md["create_shepherd_tour"][5] = "SHEPHERDツアーを作成する"
md["create_shepherd_tour"][6] = "가이드_SHEPHERD_투어_만들기"
md["create_shepherd_tour"][7] = "criar_um_tour_shepherd"
md["create_shepherd_tour"][8] = "создать_SHEPHERD_тур"
md["create_shepherd_tour"][9] = "crear_una_gira_shepherd"
md["create_bootstrap_tour"] = ["*"] * num_langs
md["create_bootstrap_tour"][0] = "create_bootstrap_tour"
md["create_bootstrap_tour"][1] = "创建BOOTSTRAP游览"
md["create_bootstrap_tour"][2] = "maak_een_bootstrap_tour"
md["create_bootstrap_tour"][3] = "créer_une_visite_bootstrap"
md["create_bootstrap_tour"][4] = "creare_un_tour_bootstrap"
md["create_bootstrap_tour"][5] = "BOOTSTRAPツアーを作成する"
md["create_bootstrap_tour"][6] = "가이드_BOOTSTRAP_투어_만들기"
md["create_bootstrap_tour"][7] = "criar_um_tour_bootstrap"
md["create_bootstrap_tour"][8] = "создать_BOOTSTRAP_тур"
md["create_bootstrap_tour"][9] = "crear_una_gira_bootstrap"
md["create_driverjs_tour"] = ["*"] * num_langs
md["create_driverjs_tour"][0] = "create_driverjs_tour"
md["create_driverjs_tour"][1] = "创建DRIVERJS游览"
md["create_driverjs_tour"][2] = "maak_een_driverjs_tour"
md["create_driverjs_tour"][3] = "créer_une_visite_driverjs"
md["create_driverjs_tour"][4] = "creare_un_tour_driverjs"
md["create_driverjs_tour"][5] = "DRIVERJSツアーを作成する"
md["create_driverjs_tour"][6] = "가이드_DRIVERJS_투어_만들기"
md["create_driverjs_tour"][7] = "criar_um_tour_driverjs"
md["create_driverjs_tour"][8] = "создать_DRIVERJS_тур"
md["create_driverjs_tour"][9] = "crear_una_gira_driverjs"
md["create_hopscotch_tour"] = ["*"] * num_langs
md["create_hopscotch_tour"][0] = "create_hopscotch_tour"
md["create_hopscotch_tour"][1] = "创建HOPSCOTCH游览"
md["create_hopscotch_tour"][2] = "maak_een_hopscotch_tour"
md["create_hopscotch_tour"][3] = "créer_une_visite_hopscotch"
md["create_hopscotch_tour"][4] = "creare_un_tour_hopscotch"
md["create_hopscotch_tour"][5] = "HOPSCOTCHツアーを作成する"
md["create_hopscotch_tour"][6] = "가이드_HOPSCOTCH_투어_만들기"
md["create_hopscotch_tour"][7] = "criar_um_tour_hopscotch"
md["create_hopscotch_tour"][8] = "создать_HOPSCOTCH_тур"
md["create_hopscotch_tour"][9] = "crear_una_gira_hopscotch"
md["create_introjs_tour"] = ["*"] * num_langs
md["create_introjs_tour"][0] = "create_introjs_tour"
md["create_introjs_tour"][1] = "创建INTROJS游览"
md["create_introjs_tour"][2] = "maak_een_introjs_tour"
md["create_introjs_tour"][3] = "créer_une_visite_introjs"
md["create_introjs_tour"][4] = "creare_un_tour_introjs"
md["create_introjs_tour"][5] = "INTROJSツアーを作成する"
md["create_introjs_tour"][6] = "가이드_INTROJS_투어_만들기"
md["create_introjs_tour"][7] = "criar_um_tour_introjs"
md["create_introjs_tour"][8] = "создать_INTROJS_тур"
md["create_introjs_tour"][9] = "crear_una_gira_introjs"
md["add_tour_step"] = ["*"] * num_langs
md["add_tour_step"][0] = "add_tour_step"
md["add_tour_step"][1] = "添加游览步骤"
md["add_tour_step"][2] = "toevoegen_tour_stap"
md["add_tour_step"][3] = "ajouter_étape_à_la_visite"
md["add_tour_step"][4] = "aggiungere_passo_al_tour"
md["add_tour_step"][5] = "ツアーステップを追加する"
md["add_tour_step"][6] = "둘러보기_단계_추가"
md["add_tour_step"][7] = "adicionar_passo_para_o_tour"
md["add_tour_step"][8] = "добавить_шаг_в_тур"
md["add_tour_step"][9] = "agregar_paso_a_la_gira"
md["play_tour"] = ["*"] * num_langs
md["play_tour"][0] = "play_tour"
md["play_tour"][1] = "播放游览"
md["play_tour"][2] = "speel_de_tour"
md["play_tour"][3] = "jouer_la_visite"
md["play_tour"][4] = "riprodurre_il_tour"
md["play_tour"][5] = "ツアーを再生する"
md["play_tour"][6] = "가이드_투어를하다"
md["play_tour"][7] = "jogar_o_tour"
md["play_tour"][8] = "играть_тур"
md["play_tour"][9] = "reproducir_la_gira"
md["export_tour"] = ["*"] * num_langs
md["export_tour"][0] = "export_tour"
md["export_tour"][1] = "导出游览"
md["export_tour"][2] = "de_tour_exporteren"
md["export_tour"][3] = "exporter_la_visite"
md["export_tour"][4] = "esportare_il_tour"
md["export_tour"][5] = "ツアーをエクスポートする"
md["export_tour"][6] = "가이드_투어_내보내기"
md["export_tour"][7] = "exportar_o_tour"
md["export_tour"][8] = "экспортировать_тур"
md["export_tour"][9] = "exportar_la_gira"
md["get_pdf_text"] = ["*"] * num_langs
md["get_pdf_text"][0] = "get_pdf_text"
md["get_pdf_text"][1] = "获取PDF文本"
md["get_pdf_text"][2] = "pdf_tekst_ophalen"
md["get_pdf_text"][3] = "obtenir_texte_pdf"
md["get_pdf_text"][4] = "ottenere_testo_pdf"
md["get_pdf_text"][5] = "PDFテキストを取得"
md["get_pdf_text"][6] = "PDF_텍스트를_검색"
md["get_pdf_text"][7] = "obter_texto_pdf"
md["get_pdf_text"][8] = "получить_текст_PDF"
md["get_pdf_text"][9] = "obtener_texto_pdf"
md["assert_pdf_text"] = ["*"] * num_langs
md["assert_pdf_text"][0] = "assert_pdf_text"
md["assert_pdf_text"][1] = "断言PDF文本"
md["assert_pdf_text"][2] = "controleren_pdf_tekst"
md["assert_pdf_text"][3] = "vérifier_texte_pdf"
md["assert_pdf_text"][4] = "verificare_testo_pdf"
md["assert_pdf_text"][5] = "PDFテキストを確認する"
md["assert_pdf_text"][6] = "PDF_텍스트_확인"
md["assert_pdf_text"][7] = "verificar_texto_pdf"
md["assert_pdf_text"][8] = "подтвердить_текст_PDF"
md["assert_pdf_text"][9] = "verificar_texto_pdf"
md["assert_downloaded_file"] = ["*"] * num_langs
md["assert_downloaded_file"][0] = "assert_downloaded_file"
md["assert_downloaded_file"][1] = "检查下载的文件"
md["assert_downloaded_file"][2] = "controleren_gedownloade_bestand"
md["assert_downloaded_file"][3] = "vérifier_fichier_téléchargé"
md["assert_downloaded_file"][4] = "verificare_file_scaricato"
md["assert_downloaded_file"][5] = "ダウンロードしたファイルを確認する"
md["assert_downloaded_file"][6] = "다운로드한_파일_확인"
md["assert_downloaded_file"][7] = "verificar_arquivo_baixado"
md["assert_downloaded_file"][8] = "подтвердить_загруженный_файл"
md["assert_downloaded_file"][9] = "verificar_archivo_descargado"
md["fail"] = ["*"] * num_langs
md["fail"][0] = "fail"
md["fail"][1] = "失败"
md["fail"][2] = "mislukken"
md["fail"][3] = "échouer"
md["fail"][4] = "fallire"
md["fail"][5] = "失敗"
md["fail"][6] = "실패"
md["fail"][7] = "falhar"
md["fail"][8] = "провалить"
md["fail"][9] = "fallar"
md["get"] = ["*"] * num_langs
md["get"][0] = "get"
md["get"][1] = "获取"
md["get"][2] = "ophalen"
md["get"][3] = "obtenir"
md["get"][4] = "ottenere"
md["get"][5] = "を取得する"
md["get"][6] = "받기"
md["get"][7] = "obter"
md["get"][8] = "получить"
md["get"][9] = "obtener"
md["visit"] = ["*"] * num_langs
md["visit"][0] = "visit"
md["visit"][1] = "访问"
md["visit"][2] = "bezoek"
md["visit"][3] = "visiter"
md["visit"][4] = "visita"
md["visit"][5] = "を訪問"
md["visit"][6] = "방문"
md["visit"][7] = "visitar"
md["visit"][8] = "посетить"
md["visit"][9] = "visita"
md["visit_url"] = ["*"] * num_langs
md["visit_url"][0] = "visit_url"
md["visit_url"][1] = "访问网址"
md["visit_url"][2] = "bezoek_url"
md["visit_url"][3] = "visiter_url"
md["visit_url"][4] = "visita_url"
md["visit_url"][5] = "URLを訪問"
md["visit_url"][6] = "방문_URL"
md["visit_url"][7] = "visitar_url"
md["visit_url"][8] = "посетить_URL"
md["visit_url"][9] = "visita_url"
md["get_element"] = ["*"] * num_langs
md["get_element"][0] = "get_element"
md["get_element"][1] = "获取元素"
md["get_element"][2] = "element_ophalen"
md["get_element"][3] = "obtenir_élément"
md["get_element"][4] = "ottenere_elemento"
md["get_element"][5] = "要素を取得する"
md["get_element"][6] = "요소_검색"
md["get_element"][7] = "obter_elemento"
md["get_element"][8] = "получить_элемент"
md["get_element"][9] = "obtener_elemento"
md["find_element"] = ["*"] * num_langs
md["find_element"][0] = "find_element"
md["find_element"][1] = "查找元素"
md["find_element"][2] = "vind_element"
md["find_element"][3] = "trouver_élément"
md["find_element"][4] = "trovare_elemento"
md["find_element"][5] = "要素を見つける"
md["find_element"][6] = "요소를_찾을"
md["find_element"][7] = "encontrar_elemento"
md["find_element"][8] = "найти_элемент"
md["find_element"][9] = "encontrar_elemento"
md["remove_element"] = ["*"] * num_langs
md["remove_element"][0] = "remove_element"
md["remove_element"][1] = "删除第一个元素"
md["remove_element"][2] = "verwijder_element"
md["remove_element"][3] = "supprimer_élément"
md["remove_element"][4] = "rimuovere_elemento"
md["remove_element"][5] = "最初の要素を削除"
md["remove_element"][6] = "첫_번째_요소_제거"
md["remove_element"][7] = "remover_elemento"
md["remove_element"][8] = "удалить_элемент"
md["remove_element"][9] = "eliminar_elemento"
md["remove_elements"] = ["*"] * num_langs
md["remove_elements"][0] = "remove_elements"
md["remove_elements"][1] = "删除所有元素"
md["remove_elements"][2] = "verwijder_elementen"
md["remove_elements"][3] = "supprimer_éléments"
md["remove_elements"][4] = "rimuovere_elementi"
md["remove_elements"][5] = "すべての要素を削除"
md["remove_elements"][6] = "모든_요소_제거"
md["remove_elements"][7] = "remover_elementos"
md["remove_elements"][8] = "удалить_элементы"
md["remove_elements"][9] = "eliminar_elementos"
md["find_text"] = ["*"] * num_langs
md["find_text"][0] = "find_text"
md["find_text"][1] = "查找文本"
md["find_text"][2] = "vind_tekst"
md["find_text"][3] = "trouver_texte"
md["find_text"][4] = "trovare_testo"
md["find_text"][5] = "テキストを見つける"
md["find_text"][6] = "텍스트_찾기"
md["find_text"][7] = "encontrar_texto"
md["find_text"][8] = "найти_текст"
md["find_text"][9] = "encontrar_texto"
md["set_text"] = ["*"] * num_langs
md["set_text"][0] = "set_text"
md["set_text"][1] = "设置文本"
md["set_text"][2] = "tekst_instellen"
md["set_text"][3] = "définir_texte"
md["set_text"][4] = "impostare_testo"
md["set_text"][5] = "テキストを設定する"
md["set_text"][6] = "텍스트_설정"
md["set_text"][7] = "definir_texto"
md["set_text"][8] = "набор_текст"
md["set_text"][9] = "establecer_texto"
md["get_attribute"] = ["*"] * num_langs
md["get_attribute"][0] = "get_attribute"
md["get_attribute"][1] = "获取属性"
md["get_attribute"][2] = "kenmerk_ophalen"
md["get_attribute"][3] = "obtenir_attribut"
md["get_attribute"][4] = "ottenere_attributo"
md["get_attribute"][5] = "属性を取得する"
md["get_attribute"][6] = "특성_검색"
md["get_attribute"][7] = "obter_atributo"
md["get_attribute"][8] = "получить_атрибут"
md["get_attribute"][9] = "obtener_atributo"
md["set_attribute"] = ["*"] * num_langs
md["set_attribute"][0] = "set_attribute"
md["set_attribute"][1] = "设置属性"
md["set_attribute"][2] = "kenmerk_instellen"
md["set_attribute"][3] = "définir_attribut"
md["set_attribute"][4] = "imposta_attributo"
md["set_attribute"][5] = "属性を設定する"
md["set_attribute"][6] = "특성_설정"
md["set_attribute"][7] = "definir_atributo"
md["set_attribute"][8] = "набор_атрибута"
md["set_attribute"][9] = "establecer_atributo"
md["set_attributes"] = ["*"] * num_langs
md["set_attributes"][0] = "set_attributes"
md["set_attributes"][1] = "设置所有属性"
md["set_attributes"][2] = "kenmerken_instellen"
md["set_attributes"][3] = "définir_attributs"
md["set_attributes"][4] = "impostare_gli_attributi"
md["set_attributes"][5] = "すべての属性を設定"
md["set_attributes"][6] = "모든_특성_설정"
md["set_attributes"][7] = "definir_atributos"
md["set_attributes"][8] = "набор_атрибутов"
md["set_attributes"][9] = "establecer_atributos"
md["type"] = ["*"] * num_langs
md["type"][0] = "type"
md["type"][1] = "输入文本"
md["type"][2] = "typ"
md["type"][3] = "taper"
md["type"][4] = "digitare"
md["type"][5] = "入力"
md["type"][6] = "입력"
md["type"][7] = "tipo"
md["type"][8] = "введите"
md["type"][9] = "escriba"
md["write"] = ["*"] * num_langs
md["write"][0] = "write"
md["write"][1] = "写文本"
md["write"][2] = "schrijven"
md["write"][3] = "écriver"
md["write"][4] = "scrivere"
md["write"][5] = "書く"
md["write"][6] = "쓰다"
md["write"][7] = "escreva"
md["write"][8] = "написать"
md["write"][9] = "escribir"
md["set_messenger_theme"] = ["*"] * num_langs
md["set_messenger_theme"][0] = "set_messenger_theme"
md["set_messenger_theme"][1] = "设置消息主题"
md["set_messenger_theme"][2] = "thema_van_bericht_instellen"
md["set_messenger_theme"][3] = "définir_thème_du_message"
md["set_messenger_theme"][4] = "impostare_tema_del_messaggio"
md["set_messenger_theme"][5] = "メッセージのスタイルを設定する"
md["set_messenger_theme"][6] = "메시지_테마_설정"
md["set_messenger_theme"][7] = "definir_tema_da_mensagem"
md["set_messenger_theme"][8] = "набор_тему_сообщения"
md["set_messenger_theme"][9] = "establecer_tema_del_mensaje"
md["post_message"] = ["*"] * num_langs
md["post_message"][0] = "post_message"
md["post_message"][1] = "显示讯息"
md["post_message"][2] = "bericht_weergeven"
md["post_message"][3] = "afficher_message"
md["post_message"][4] = "visualizza_messaggio"
md["post_message"][5] = "メッセージを表示する"
md["post_message"][6] = "메시지를_표시"
md["post_message"][7] = "exibir_mensagem"
md["post_message"][8] = "показать_сообщение"
md["post_message"][9] = "mostrar_mensaje"
md["_print"] = ["*"] * num_langs
md["_print"][0] = "_print"
md["_print"][1] = "打印"
md["_print"][2] = "afdrukken"
md["_print"][3] = "imprimer"
md["_print"][4] = "stampare"
md["_print"][5] = "印刷"
md["_print"][6] = "인쇄"
md["_print"][7] = "imprimir"
md["_print"][8] = "печатать"
md["_print"][9] = "imprimir"
md["deferred_assert_element"] = ["*"] * num_langs
md["deferred_assert_element"][0] = "deferred_assert_element"
md["deferred_assert_element"][1] = "推迟断言元素"
md["deferred_assert_element"][2] = "uitgestelde_controleren_element"
md["deferred_assert_element"][3] = "reporté_vérifier_élément"
md["deferred_assert_element"][4] = "differita_verificare_elemento"
md["deferred_assert_element"][5] = "を延期する要素を確認する"
md["deferred_assert_element"][6] = "연기된_요소_확인"
md["deferred_assert_element"][7] = "adiada_verificar_elemento"
md["deferred_assert_element"][8] = "отложенный_подтвердить_элемент"
md["deferred_assert_element"][9] = "diferido_verificar_elemento"
md["deferred_assert_text"] = ["*"] * num_langs
md["deferred_assert_text"][0] = "deferred_assert_text"
md["deferred_assert_text"][1] = "推迟断言文本"
md["deferred_assert_text"][2] = "uitgestelde_controleren_tekst"
md["deferred_assert_text"][3] = "reporté_vérifier_texte"
md["deferred_assert_text"][4] = "differita_verificare_testo"
md["deferred_assert_text"][5] = "を延期するテキストを確認する"
md["deferred_assert_text"][6] = "연기된_텍스트_확인"
md["deferred_assert_text"][7] = "adiada_verificar_texto"
md["deferred_assert_text"][8] = "отложенный_подтвердить_текст"
md["deferred_assert_text"][9] = "diferido_verificar_texto"
md["process_deferred_asserts"] = ["*"] * num_langs
md["process_deferred_asserts"][0] = "process_deferred_asserts"
md["process_deferred_asserts"][1] = "处理推迟断言"
md["process_deferred_asserts"][2] = "verwerken_uitgestelde_controleren"
md["process_deferred_asserts"][3] = "effectuer_vérifications_reportées"
md["process_deferred_asserts"][4] = "elaborare_differita_verificari"
md["process_deferred_asserts"][5] = "遅延アサーションの処理"
md["process_deferred_asserts"][6] = "연기된_검증_처리"
md["process_deferred_asserts"][7] = "processar_verificações_adiada"
md["process_deferred_asserts"][8] = "обработки_отложенных_подтверждений"
md["process_deferred_asserts"][9] = "procesar_verificaciones_diferidas"
md["accept_alert"] = ["*"] * num_langs
md["accept_alert"][0] = "accept_alert"
md["accept_alert"][1] = "接受警报"
md["accept_alert"][2] = "waarschuwing_accepteren"
md["accept_alert"][3] = "accepter_alerte"
md["accept_alert"][4] = "accetta_avviso"
md["accept_alert"][5] = "アラートを受け入れる"
md["accept_alert"][6] = "경고를_수락"
md["accept_alert"][7] = "aceitar_alerta"
md["accept_alert"][8] = "принять_оповещение"
md["accept_alert"][9] = "aceptar_alerta"
md["dismiss_alert"] = ["*"] * num_langs
md["dismiss_alert"][0] = "dismiss_alert"
md["dismiss_alert"][1] = "解除警报"
md["dismiss_alert"][2] = "waarschuwing_wegsturen"
md["dismiss_alert"][3] = "rejeter_alerte"
md["dismiss_alert"][4] = "elimina_avviso"
md["dismiss_alert"][5] = "アラートを却下"
md["dismiss_alert"][6] = "경고를_거부"
md["dismiss_alert"][7] = "demitir_alerta"
md["dismiss_alert"][8] = "увольнять_оповещение"
md["dismiss_alert"][9] = "descartar_alerta"
md["switch_to_alert"] = ["*"] * num_langs
md["switch_to_alert"][0] = "switch_to_alert"
md["switch_to_alert"][1] = "切换到警报"
md["switch_to_alert"][2] = "overschakelen_naar_waarschuwing"
md["switch_to_alert"][3] = "passer_à_alerte"
md["switch_to_alert"][4] = "passa_al_avviso"
md["switch_to_alert"][5] = "アラートに切り替え"
md["switch_to_alert"][6] = "경고로_전환"
md["switch_to_alert"][7] = "mudar_para_alerta"
md["switch_to_alert"][8] = "переключиться_на_оповещение"
md["switch_to_alert"][9] = "cambiar_a_alerta"
md["drag_and_drop"] = ["*"] * num_langs
md["drag_and_drop"][0] = "drag_and_drop"
md["drag_and_drop"][1] = "拖放"
md["drag_and_drop"][2] = "slepen_en_neerzetten"
md["drag_and_drop"][3] = "glisser_et_déposer"
md["drag_and_drop"][4] = "trascinare_e_rilasciare"
md["drag_and_drop"][5] = "ドラッグアンドドロップ"
md["drag_and_drop"][6] = "드래그_앤_드롭"
md["drag_and_drop"][7] = "arrastar_e_soltar"
md["drag_and_drop"][8] = "перетащить_и_падение"
md["drag_and_drop"][9] = "arrastrar_y_soltar"
md["load_html_file"] = ["*"] * num_langs
md["load_html_file"][0] = "load_html_file"
md["load_html_file"][1] = "加载HTML文件"
md["load_html_file"][2] = "html_bestand_laden"
md["load_html_file"][3] = "charger_html_fichier"
md["load_html_file"][4] = "caricare_html_file"
md["load_html_file"][5] = "HTMLファイルを読み込む"
md["load_html_file"][6] = "HTML_파일_로드"
md["load_html_file"][7] = "carregar_arquivo_html"
md["load_html_file"][8] = "загрузить_HTML_файл"
md["load_html_file"][9] = "cargar_archivo_html"
md["open_html_file"] = ["*"] * num_langs
md["open_html_file"][0] = "open_html_file"
md["open_html_file"][1] = "打开HTML文件"
md["open_html_file"][2] = "html_bestand_openen"
md["open_html_file"][3] = "ouvrir_html_fichier"
md["open_html_file"][4] = "apri_html_file"
md["open_html_file"][5] = "HTMLファイルを開く"
md["open_html_file"][6] = "HTML_파일_열기"
md["open_html_file"][7] = "abrir_arquivo_html"
md["open_html_file"][8] = "открыть_HTML_файл"
md["open_html_file"][9] = "abrir_archivo_html"
md["delete_all_cookies"] = ["*"] * num_langs
md["delete_all_cookies"][0] = "delete_all_cookies"
md["delete_all_cookies"][1] = "删除所有COOKIE"
md["delete_all_cookies"][2] = "alle_cookies_verwijderen"
md["delete_all_cookies"][3] = "supprimer_tous_les_cookies"
md["delete_all_cookies"][4] = "elimina_tutti_i_cookie"
md["delete_all_cookies"][5] = "すべてのクッキーを削除する"
md["delete_all_cookies"][6] = "모든_쿠키_삭제"
md["delete_all_cookies"][7] = "excluir_todos_os_cookies"
md["delete_all_cookies"][8] = "удалить_все_куки"
md["delete_all_cookies"][9] = "eliminar_todas_las_cookies"
md["get_user_agent"] = ["*"] * num_langs
md["get_user_agent"][0] = "get_user_agent"
md["get_user_agent"][1] = "获取用户代理"
md["get_user_agent"][2] = "gebruikersagent_ophalen"
md["get_user_agent"][3] = "obtenir_agent_utilisateur"
md["get_user_agent"][4] = "ottenere_agente_utente"
md["get_user_agent"][5] = "ユーザーエージェントの取得"
md["get_user_agent"][6] = "사용자_에이전트_가져_오기"
md["get_user_agent"][7] = "obter_agente_do_usuário"
md["get_user_agent"][8] = "получить_агента_пользователя"
md["get_user_agent"][9] = "obtener_agente_de_usuario"
md["get_locale_code"] = ["*"] * num_langs
md["get_locale_code"][0] = "get_locale_code"
md["get_locale_code"][1] = "获取语言代码"
md["get_locale_code"][2] = "taalcode_ophalen"
md["get_locale_code"][3] = "obtenir_code_de_langue"
md["get_locale_code"][4] = "ottenere_codice_lingua"
md["get_locale_code"][5] = "言語コードを取得する"
md["get_locale_code"][6] = "언어_코드를_얻을"
md["get_locale_code"][7] = "obter_código_de_idioma"
md["get_locale_code"][8] = "получить_код_языка"
md["get_locale_code"][9] = "obtener_código_de_idioma"
################
# Duplicates
# "input" -> duplicate of "type"
md["input"] = ["*"] * num_langs
md["input"][0] = "input"
md["input"][1] = "输入文本"
md["input"][2] = "typ"
md["input"][3] = "taper"
md["input"][4] = "digitare"
md["input"][5] = "入力"
md["input"][6] = "입력"
md["input"][7] = "tipo"
md["input"][8] = "введите"
md["input"][9] = "escriba"
# "goto" -> duplicate of "visit"
md["goto"] = ["*"] * num_langs
md["goto"][0] = "goto"
md["goto"][1] = "访问"
md["goto"][2] = "bezoek"
md["goto"][3] = "visiter"
md["goto"][4] = "visita"
md["goto"][5] = "を訪問"
md["goto"][6] = "방문"
md["goto"][7] = "visitar"
md["goto"][8] = "посетить"
md["goto"][9] = "visita"
# "go_to" -> duplicate of "visit"
md["go_to"] = ["*"] * num_langs
md["go_to"][0] = "go_to"
md["go_to"][1] = "访问"
md["go_to"][2] = "bezoek"
md["go_to"][3] = "visiter"
md["go_to"][4] = "visita"
md["go_to"][5] = "を訪問"
md["go_to"][6] = "방문"
md["go_to"][7] = "visitar"
md["go_to"][8] = "посетить"
md["go_to"][9] = "visita"
# "refresh" -> duplicate of "refresh_page"
md["refresh"] = ["*"] * num_langs
md["refresh"][0] = "refresh"
md["refresh"][1] = "刷新页面"
md["refresh"][2] = "ververs_pagina"
md["refresh"][3] = "rafraîchir_la_page"
md["refresh"][4] = "aggiorna_la_pagina"
md["refresh"][5] = "ページを更新する"
md["refresh"][6] = "페이지_새로_고침"
md["refresh"][7] = "atualizar_a_página"
md["refresh"][8] = "обновить_страницу"
md["refresh"][9] = "actualizar_la_página"
# "reload" -> duplicate of "refresh_page"
md["reload"] = ["*"] * num_langs
md["reload"][0] = "reload"
md["reload"][1] = "刷新页面"
md["reload"][2] = "ververs_pagina"
md["reload"][3] = "rafraîchir_la_page"
md["reload"][4] = "aggiorna_la_pagina"
md["reload"][5] = "ページを更新する"
md["reload"][6] = "페이지_새로_고침"
md["reload"][7] = "atualizar_a_página"
md["reload"][8] = "обновить_страницу"
md["reload"][9] = "actualizar_la_página"
# "reload_page" -> duplicate of "refresh_page"
md["reload_page"] = ["*"] * num_langs
md["reload_page"][0] = "reload_page"
md["reload_page"][1] = "刷新页面"
md["reload_page"][2] = "ververs_pagina"
md["reload_page"][3] = "rafraîchir_la_page"
md["reload_page"][4] = "aggiorna_la_pagina"
md["reload_page"][5] = "ページを更新する"
md["reload_page"][6] = "페이지_새로_고침"
md["reload_page"][7] = "atualizar_a_página"
md["reload_page"][8] = "обновить_страницу"
md["reload_page"][9] = "actualizar_la_página"
# "get_page_title" -> duplicate of "get_title"
md["get_page_title"] = ["*"] * num_langs
md["get_page_title"][0] = "get_page_title"
md["get_page_title"][1] = "获取标题"
md["get_page_title"][2] = "titel_ophalen"
md["get_page_title"][3] = "obtenir_le_titre"
md["get_page_title"][4] = "ottenere_il_titolo"
md["get_page_title"][5] = "タイトルを取得する"
md["get_page_title"][6] = "제목_검색"
md["get_page_title"][7] = "obter_título"
md["get_page_title"][8] = "получить_название"
md["get_page_title"][9] = "obtener_título"
# "click_link" -> duplicate of "click_link_text"
md["click_link"] = ["*"] * num_langs
md["click_link"][0] = "click_link"
md["click_link"][1] = "单击链接文本"
md["click_link"][2] = "klik_linktekst"
md["click_link"][3] = "cliquer_texte_du_lien"
md["click_link"][4] = "clic_testo_del_collegamento"
md["click_link"][5] = "リンクテキストをクリックします"
md["click_link"][6] = "링크_텍스트를_클릭합니다"
md["click_link"][7] = "clique_texto_do_link"
md["click_link"][8] = "нажмите_ссылку"
md["click_link"][9] = "clic_texto_del_enlace"
# "send_keys" -> duplicate of "add_text"
md["send_keys"] = ["*"] * num_langs
md["send_keys"][0] = "send_keys"
md["send_keys"][1] = "添加文本"
md["send_keys"][2] = "tekst_toevoegen"
md["send_keys"][3] = "ajouter_texte"
md["send_keys"][4] = "aggiungi_testo"
md["send_keys"][5] = "テキストを追加"
md["send_keys"][6] = "텍스트를_추가"
md["send_keys"][7] = "adicionar_texto"
md["send_keys"][8] = "добавить_текст"
md["send_keys"][9] = "agregar_texto"
# "set_attribute_all" -> duplicate of "set_attributes"
md["set_attribute_all"] = ["*"] * num_langs
md["set_attribute_all"][0] = "set_attribute_all"
md["set_attribute_all"][1] = "设置所有属性"
md["set_attribute_all"][2] = "kenmerken_instellen"
md["set_attribute_all"][3] = "définir_attributs"
md["set_attribute_all"][4] = "impostare_gli_attributi"
md["set_attribute_all"][5] = "すべての属性を設定"
md["set_attribute_all"][6] = "모든_특성_설정"
md["set_attribute_all"][7] = "definir_atributos"
md["set_attribute_all"][8] = "набор_атрибутов"
md["set_attribute_all"][9] = "establecer_atributos"
# "is_checked" -> duplicate of "is_selected"
md["is_checked"] = ["*"] * num_langs
md["is_checked"][0] = "is_checked"
md["is_checked"][1] = "是否被选中"
md["is_checked"][2] = "is_het_geselecteerd"
md["is_checked"][3] = "est_il_sélectionné"
md["is_checked"][4] = "è_selezionato"
md["is_checked"][5] = "選択されていることを"
md["is_checked"][6] = "선택되어_있는지"
md["is_checked"][7] = "é_selecionado"
md["is_checked"][8] = "выбран"
md["is_checked"][9] = "está_seleccionado"
# "wait_for_text_visible" -> duplicate of "wait_for_text"
md["wait_for_text_visible"] = ["*"] * num_langs
md["wait_for_text_visible"][0] = "wait_for_text_visible"
md["wait_for_text_visible"][1] = "等待文本"
md["wait_for_text_visible"][2] = "wachten_op_tekst"
md["wait_for_text_visible"][3] = "attendre_le_texte"
md["wait_for_text_visible"][4] = "attendere_il_testo"
md["wait_for_text_visible"][5] = "テキストを待つ"
md["wait_for_text_visible"][6] = "텍스트가_나타날_때까지_기다립니다"
md["wait_for_text_visible"][7] = "aguardar_o_texto"
md["wait_for_text_visible"][8] = "ждать_текста"
md["wait_for_text_visible"][9] = "espera_el_texto"
# "assert_text_visible" -> duplicate of "assert_text"
md["assert_text_visible"] = ["*"] * num_langs
md["assert_text_visible"][0] = "assert_text_visible"
md["assert_text_visible"][1] = "断言文本"
md["assert_text_visible"][2] = "controleren_tekst"
md["assert_text_visible"][3] = "vérifier_texte"
md["assert_text_visible"][4] = "verificare_testo"
md["assert_text_visible"][5] = "テキストを確認する"
md["assert_text_visible"][6] = "텍스트_확인"
md["assert_text_visible"][7] = "verificar_texto"
md["assert_text_visible"][8] = "подтвердить_текст"
md["assert_text_visible"][9] = "verificar_texto"
# "assert_no_broken_links" -> duplicate of "assert_no_404_errors"
md["assert_no_broken_links"] = ["*"] * num_langs
md["assert_no_broken_links"][0] = "assert_no_broken_links"
md["assert_no_broken_links"][1] = "检查断开的链接"
md["assert_no_broken_links"][2] = "controleren_op_gebroken_links"
md["assert_no_broken_links"][3] = "vérifier_les_liens_rompus"
md["assert_no_broken_links"][4] = "verificare_i_collegamenti"
md["assert_no_broken_links"][5] = "リンク切れを確認する"
md["assert_no_broken_links"][6] = "끊어진_링크_확인"
md["assert_no_broken_links"][7] = "verificar_se_há_links_quebrados"
md["assert_no_broken_links"][8] = "проверить_ошибки_404"
md["assert_no_broken_links"][9] = "verificar_si_hay_enlaces_rotos"
# "block_ads" -> duplicate of "ad_block"
md["block_ads"] = ["*"] * num_langs
md["block_ads"][0] = "block_ads"
md["block_ads"][1] = "阻止广告"
md["block_ads"][2] = "blokkeer_advertenties"
md["block_ads"][3] = "annonces_de_bloc"
md["block_ads"][4] = "bloccare_gli_annunci"
md["block_ads"][5] = "ブロック広告"
md["block_ads"][6] = "광고_차단"
md["block_ads"][7] = "bloquear_anúncios"
md["block_ads"][8] = "блокировать_рекламу"
md["block_ads"][9] = "bloquear_anuncios"
# "start_tour" -> duplicate of "play_tour"
md["start_tour"] = ["*"] * num_langs
md["start_tour"][0] = "start_tour"
md["start_tour"][1] = "播放游览"
md["start_tour"][2] = "speel_de_tour"
md["start_tour"][3] = "jouer_la_visite"
md["start_tour"][4] = "riprodurre_il_tour"
md["start_tour"][5] = "ツアーを再生する"
md["start_tour"][6] = "가이드_투어를하다"
md["start_tour"][7] = "jogar_o_tour"
md["start_tour"][8] = "играть_тур"
md["start_tour"][9] = "reproducir_la_gira"
# "wait_for_and_accept_alert" -> duplicate of "accept_alert"
md["wait_for_and_accept_alert"] = ["*"] * num_langs
md["wait_for_and_accept_alert"][0] = "wait_for_and_accept_alert"
md["wait_for_and_accept_alert"][1] = "接受警报"
md["wait_for_and_accept_alert"][2] = "waarschuwing_accepteren"
md["wait_for_and_accept_alert"][3] = "accepter_alerte"
md["wait_for_and_accept_alert"][4] = "accetta_avviso"
md["wait_for_and_accept_alert"][5] = "アラートを受け入れる"
md["wait_for_and_accept_alert"][6] = "경고를_수락"
md["wait_for_and_accept_alert"][7] = "aceitar_alerta"
md["wait_for_and_accept_alert"][8] = "принять_оповещение"
md["wait_for_and_accept_alert"][9] = "aceptar_alerta"
# "wait_for_and_dismiss_alert" -> duplicate of "dismiss_alert"
md["wait_for_and_dismiss_alert"] = ["*"] * num_langs
md["wait_for_and_dismiss_alert"][0] = "wait_for_and_dismiss_alert"
md["wait_for_and_dismiss_alert"][1] = "解除警报"
md["wait_for_and_dismiss_alert"][2] = "waarschuwing_wegsturen"
md["wait_for_and_dismiss_alert"][3] = "rejeter_alerte"
md["wait_for_and_dismiss_alert"][4] = "elimina_avviso"
md["wait_for_and_dismiss_alert"][5] = "アラートを却下"
md["wait_for_and_dismiss_alert"][6] = "경고를_거부"
md["wait_for_and_dismiss_alert"][7] = "demitir_alerta"
md["wait_for_and_dismiss_alert"][8] = "увольнять_оповещение"
md["wait_for_and_dismiss_alert"][9] = "descartar_alerta"
# "wait_for_and_switch_to_alert" -> duplicate of "switch_to_alert"
md["wait_for_and_switch_to_alert"] = ["*"] * num_langs
md["wait_for_and_switch_to_alert"][0] = "wait_for_and_switch_to_alert"
md["wait_for_and_switch_to_alert"][1] = "切换到警报"
md["wait_for_and_switch_to_alert"][2] = "overschakelen_naar_waarschuwing"
md["wait_for_and_switch_to_alert"][3] = "passer_à_alerte"
md["wait_for_and_switch_to_alert"][4] = "passa_al_avviso"
md["wait_for_and_switch_to_alert"][5] = "アラートに切り替え"
md["wait_for_and_switch_to_alert"][6] = "경고로_전환"
md["wait_for_and_switch_to_alert"][7] = "mudar_para_alerta"
md["wait_for_and_switch_to_alert"][8] = "переключиться_на_оповещение"
md["wait_for_and_switch_to_alert"][9] = "cambiar_a_alerta"
################
# MasterQA Only!
md["verify"] = ["*"] * num_langs
md["verify"][0] = "verify"
md["verify"][1] = "校验"
md["verify"][2] = "controleren"
md["verify"][3] = "vérifier"
md["verify"][4] = "verificare"
md["verify"][5] = "を確認する"
md["verify"][6] = "확인"
md["verify"][7] = "verificar"
md["verify"][8] = "подтвердить"
md["verify"][9] = "verificar"
|
mdmintz/SeleniumBase
|
seleniumbase/translate/master_dict.py
|
Python
|
mit
| 99,028
|
from plotly.basedatatypes import BaseTraceType as _BaseTraceType
import copy as _copy
class Scatter(_BaseTraceType):
# class properties
# --------------------
_parent_path_str = ""
_path_str = "scatter"
_valid_props = {
"cliponaxis",
"connectgaps",
"customdata",
"customdatasrc",
"dx",
"dy",
"error_x",
"error_y",
"fill",
"fillcolor",
"groupnorm",
"hoverinfo",
"hoverinfosrc",
"hoverlabel",
"hoveron",
"hovertemplate",
"hovertemplatesrc",
"hovertext",
"hovertextsrc",
"ids",
"idssrc",
"legendgroup",
"line",
"marker",
"meta",
"metasrc",
"mode",
"name",
"opacity",
"orientation",
"r",
"rsrc",
"selected",
"selectedpoints",
"showlegend",
"stackgaps",
"stackgroup",
"stream",
"t",
"text",
"textfont",
"textposition",
"textpositionsrc",
"textsrc",
"texttemplate",
"texttemplatesrc",
"tsrc",
"type",
"uid",
"uirevision",
"unselected",
"visible",
"x",
"x0",
"xaxis",
"xcalendar",
"xsrc",
"y",
"y0",
"yaxis",
"ycalendar",
"ysrc",
}
# cliponaxis
# ----------
@property
def cliponaxis(self):
"""
Determines whether or not markers and text nodes are clipped
about the subplot axes. To show markers and text nodes above
axis lines and tick labels, make sure to set `xaxis.layer` and
`yaxis.layer` to *below traces*.
The 'cliponaxis' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["cliponaxis"]
@cliponaxis.setter
def cliponaxis(self, val):
self["cliponaxis"] = val
# connectgaps
# -----------
@property
def connectgaps(self):
"""
Determines whether or not gaps (i.e. {nan} or missing values)
in the provided data arrays are connected.
The 'connectgaps' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["connectgaps"]
@connectgaps.setter
def connectgaps(self, val):
self["connectgaps"] = val
# customdata
# ----------
@property
def customdata(self):
"""
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note that,
"scatter" traces also appends customdata items in the markers
DOM elements
The 'customdata' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["customdata"]
@customdata.setter
def customdata(self, val):
self["customdata"] = val
# customdatasrc
# -------------
@property
def customdatasrc(self):
"""
Sets the source reference on Chart Studio Cloud for customdata
.
The 'customdatasrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["customdatasrc"]
@customdatasrc.setter
def customdatasrc(self, val):
self["customdatasrc"] = val
# dx
# --
@property
def dx(self):
"""
Sets the x coordinate step. See `x0` for more info.
The 'dx' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["dx"]
@dx.setter
def dx(self, val):
self["dx"] = val
# dy
# --
@property
def dy(self):
"""
Sets the y coordinate step. See `y0` for more info.
The 'dy' property is a number and may be specified as:
- An int or float
Returns
-------
int|float
"""
return self["dy"]
@dy.setter
def dy(self, val):
self["dy"] = val
# error_x
# -------
@property
def error_x(self):
"""
The 'error_x' property is an instance of ErrorX
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.ErrorX`
- A dict of string/value properties that will be passed
to the ErrorX constructor
Supported dict properties:
array
Sets the data corresponding the length of each
error bar. Values are plotted relative to the
underlying data.
arrayminus
Sets the data corresponding the length of each
error bar in the bottom (left) direction for
vertical (horizontal) bars Values are plotted
relative to the underlying data.
arrayminussrc
Sets the source reference on Chart Studio Cloud
for arrayminus .
arraysrc
Sets the source reference on Chart Studio Cloud
for array .
color
Sets the stoke color of the error bars.
copy_ystyle
symmetric
Determines whether or not the error bars have
the same length in both direction (top/bottom
for vertical bars, left/right for horizontal
bars.
thickness
Sets the thickness (in px) of the error bars.
traceref
tracerefminus
type
Determines the rule used to generate the error
bars. If *constant`, the bar lengths are of a
constant value. Set this constant in `value`.
If "percent", the bar lengths correspond to a
percentage of underlying data. Set this
percentage in `value`. If "sqrt", the bar
lengths correspond to the sqaure of the
underlying data. If "data", the bar lengths are
set with data set `array`.
value
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars.
valueminus
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars in the bottom
(left) direction for vertical (horizontal) bars
visible
Determines whether or not this set of error
bars is visible.
width
Sets the width (in px) of the cross-bar at both
ends of the error bars.
Returns
-------
plotly.graph_objs.scatter.ErrorX
"""
return self["error_x"]
@error_x.setter
def error_x(self, val):
self["error_x"] = val
# error_y
# -------
@property
def error_y(self):
"""
The 'error_y' property is an instance of ErrorY
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.ErrorY`
- A dict of string/value properties that will be passed
to the ErrorY constructor
Supported dict properties:
array
Sets the data corresponding the length of each
error bar. Values are plotted relative to the
underlying data.
arrayminus
Sets the data corresponding the length of each
error bar in the bottom (left) direction for
vertical (horizontal) bars Values are plotted
relative to the underlying data.
arrayminussrc
Sets the source reference on Chart Studio Cloud
for arrayminus .
arraysrc
Sets the source reference on Chart Studio Cloud
for array .
color
Sets the stoke color of the error bars.
symmetric
Determines whether or not the error bars have
the same length in both direction (top/bottom
for vertical bars, left/right for horizontal
bars.
thickness
Sets the thickness (in px) of the error bars.
traceref
tracerefminus
type
Determines the rule used to generate the error
bars. If *constant`, the bar lengths are of a
constant value. Set this constant in `value`.
If "percent", the bar lengths correspond to a
percentage of underlying data. Set this
percentage in `value`. If "sqrt", the bar
lengths correspond to the sqaure of the
underlying data. If "data", the bar lengths are
set with data set `array`.
value
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars.
valueminus
Sets the value of either the percentage (if
`type` is set to "percent") or the constant (if
`type` is set to "constant") corresponding to
the lengths of the error bars in the bottom
(left) direction for vertical (horizontal) bars
visible
Determines whether or not this set of error
bars is visible.
width
Sets the width (in px) of the cross-bar at both
ends of the error bars.
Returns
-------
plotly.graph_objs.scatter.ErrorY
"""
return self["error_y"]
@error_y.setter
def error_y(self, val):
self["error_y"] = val
# fill
# ----
@property
def fill(self):
"""
Sets the area to fill with a solid color. Defaults to "none"
unless this trace is stacked, then it gets "tonexty"
("tonextx") if `orientation` is "v" ("h") Use with `fillcolor`
if not "none". "tozerox" and "tozeroy" fill to x=0 and y=0
respectively. "tonextx" and "tonexty" fill between the
endpoints of this trace and the endpoints of the trace before
it, connecting those endpoints with straight lines (to make a
stacked area graph); if there is no trace before it, they
behave like "tozerox" and "tozeroy". "toself" connects the
endpoints of the trace (or each segment of the trace if it has
gaps) into a closed shape. "tonext" fills the space between two
traces if one completely encloses the other (eg consecutive
contour lines), and behaves like "toself" if there is no trace
before it. "tonext" should not be used if one trace does not
enclose the other. Traces in a `stackgroup` will only fill to
(or be filled to) other traces in the same group. With multiple
`stackgroup`s or some traces stacked and some not, if fill-
linked traces are not already consecutive, the later ones will
be pushed down in the drawing order.
The 'fill' property is an enumeration that may be specified as:
- One of the following enumeration values:
['none', 'tozeroy', 'tozerox', 'tonexty', 'tonextx',
'toself', 'tonext']
Returns
-------
Any
"""
return self["fill"]
@fill.setter
def fill(self, val):
self["fill"] = val
# fillcolor
# ---------
@property
def fillcolor(self):
"""
Sets the fill color. Defaults to a half-transparent variant of
the line color, marker color, or marker line color, whichever
is available.
The 'fillcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["fillcolor"]
@fillcolor.setter
def fillcolor(self, val):
self["fillcolor"] = val
# groupnorm
# ---------
@property
def groupnorm(self):
"""
Only relevant when `stackgroup` is used, and only the first
`groupnorm` found in the `stackgroup` will be used - including
if `visible` is "legendonly" but not if it is `false`. Sets the
normalization for the sum of this `stackgroup`. With
"fraction", the value of each trace at each location is divided
by the sum of all trace values at that location. "percent" is
the same but multiplied by 100 to show percentages. If there
are multiple subplots, or multiple `stackgroup`s on one
subplot, each will be normalized within its own set.
The 'groupnorm' property is an enumeration that may be specified as:
- One of the following enumeration values:
['', 'fraction', 'percent']
Returns
-------
Any
"""
return self["groupnorm"]
@groupnorm.setter
def groupnorm(self, val):
self["groupnorm"] = val
# hoverinfo
# ---------
@property
def hoverinfo(self):
"""
Determines which trace information appear on hover. If `none`
or `skip` are set, no information is displayed upon hovering.
But, if `none` is set, click and hover events are still fired.
The 'hoverinfo' property is a flaglist and may be specified
as a string containing:
- Any combination of ['x', 'y', 'z', 'text', 'name'] joined with '+' characters
(e.g. 'x+y')
OR exactly one of ['all', 'none', 'skip'] (e.g. 'skip')
- A list or array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["hoverinfo"]
@hoverinfo.setter
def hoverinfo(self, val):
self["hoverinfo"] = val
# hoverinfosrc
# ------------
@property
def hoverinfosrc(self):
"""
Sets the source reference on Chart Studio Cloud for hoverinfo
.
The 'hoverinfosrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hoverinfosrc"]
@hoverinfosrc.setter
def hoverinfosrc(self, val):
self["hoverinfosrc"] = val
# hoverlabel
# ----------
@property
def hoverlabel(self):
"""
The 'hoverlabel' property is an instance of Hoverlabel
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Hoverlabel`
- A dict of string/value properties that will be passed
to the Hoverlabel constructor
Supported dict properties:
align
Sets the horizontal alignment of the text
content within hover label box. Has an effect
only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength .
Returns
-------
plotly.graph_objs.scatter.Hoverlabel
"""
return self["hoverlabel"]
@hoverlabel.setter
def hoverlabel(self, val):
self["hoverlabel"] = val
# hoveron
# -------
@property
def hoveron(self):
"""
Do the hover effects highlight individual points (markers or
line points) or do they highlight filled regions? If the fill
is "toself" or "tonext" and there are no markers or text, then
the default is "fills", otherwise it is "points".
The 'hoveron' property is a flaglist and may be specified
as a string containing:
- Any combination of ['points', 'fills'] joined with '+' characters
(e.g. 'points+fills')
Returns
-------
Any
"""
return self["hoveron"]
@hoveron.setter
def hoveron(self, val):
self["hoveron"] = val
# hovertemplate
# -------------
@property
def hovertemplate(self):
"""
Template string used for rendering the information that appear
on hover box. Note that this will override `hoverinfo`.
Variables are inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's syntax
%{variable:d3-format}, for example "Price: %{y:$.2f}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format for details on
the formatting syntax. Dates are formatted using d3-time-
format's syntax %{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format for details on
the date formatting syntax. The variables available in
`hovertemplate` are the ones emitted as event data described at
this link https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be specified per-
point (the ones that are `arrayOk: true`) are available.
Anything contained in tag `<extra>` is displayed in the
secondary box, for example "<extra>{fullData.name}</extra>". To
hide the secondary box completely, use an empty tag
`<extra></extra>`.
The 'hovertemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["hovertemplate"]
@hovertemplate.setter
def hovertemplate(self, val):
self["hovertemplate"] = val
# hovertemplatesrc
# ----------------
@property
def hovertemplatesrc(self):
"""
Sets the source reference on Chart Studio Cloud for
hovertemplate .
The 'hovertemplatesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hovertemplatesrc"]
@hovertemplatesrc.setter
def hovertemplatesrc(self, val):
self["hovertemplatesrc"] = val
# hovertext
# ---------
@property
def hovertext(self):
"""
Sets hover text elements associated with each (x,y) pair. If a
single string, the same string appears over all the data
points. If an array of string, the items are mapped in order to
the this trace's (x,y) coordinates. To be seen, trace
`hoverinfo` must contain a "text" flag.
The 'hovertext' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["hovertext"]
@hovertext.setter
def hovertext(self, val):
self["hovertext"] = val
# hovertextsrc
# ------------
@property
def hovertextsrc(self):
"""
Sets the source reference on Chart Studio Cloud for hovertext
.
The 'hovertextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["hovertextsrc"]
@hovertextsrc.setter
def hovertextsrc(self, val):
self["hovertextsrc"] = val
# ids
# ---
@property
def ids(self):
"""
Assigns id labels to each datum. These ids for object constancy
of data points during animation. Should be an array of strings,
not numbers or any other type.
The 'ids' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["ids"]
@ids.setter
def ids(self, val):
self["ids"] = val
# idssrc
# ------
@property
def idssrc(self):
"""
Sets the source reference on Chart Studio Cloud for ids .
The 'idssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["idssrc"]
@idssrc.setter
def idssrc(self, val):
self["idssrc"] = val
# legendgroup
# -----------
@property
def legendgroup(self):
"""
Sets the legend group for this trace. Traces part of the same
legend group hide/show at the same time when toggling legend
items.
The 'legendgroup' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["legendgroup"]
@legendgroup.setter
def legendgroup(self, val):
self["legendgroup"] = val
# line
# ----
@property
def line(self):
"""
The 'line' property is an instance of Line
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Line`
- A dict of string/value properties that will be passed
to the Line constructor
Supported dict properties:
color
Sets the line color.
dash
Sets the dash style of lines. Set to a dash
type string ("solid", "dot", "dash",
"longdash", "dashdot", or "longdashdot") or a
dash length list in px (eg "5px,10px,2px,2px").
shape
Determines the line shape. With "spline" the
lines are drawn using spline interpolation. The
other available values correspond to step-wise
line shapes.
simplify
Simplifies lines by removing nearly-collinear
points. When transitioning lines, it may be
desirable to disable this so that the number of
points along the resulting SVG path is
unaffected.
smoothing
Has an effect only if `shape` is set to
"spline" Sets the amount of smoothing. 0
corresponds to no smoothing (equivalent to a
"linear" shape).
width
Sets the line width (in px).
Returns
-------
plotly.graph_objs.scatter.Line
"""
return self["line"]
@line.setter
def line(self, val):
self["line"] = val
# marker
# ------
@property
def marker(self):
"""
The 'marker' property is an instance of Marker
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Marker`
- A dict of string/value properties that will be passed
to the Marker constructor
Supported dict properties:
autocolorscale
Determines whether the colorscale is a default
palette (`autocolorscale: true`) or the palette
determined by `marker.colorscale`. Has an
effect only if in `marker.color`is set to a
numerical array. In case `colorscale` is
unspecified or `autocolorscale` is true, the
default palette will be chosen according to
whether numbers in the `color` array are all
positive, all negative or mixed.
cauto
Determines whether or not the color domain is
computed with respect to the input data (here
in `marker.color`) or the bounds set in
`marker.cmin` and `marker.cmax` Has an effect
only if in `marker.color`is set to a numerical
array. Defaults to `false` when `marker.cmin`
and `marker.cmax` are set by the user.
cmax
Sets the upper bound of the color domain. Has
an effect only if in `marker.color`is set to a
numerical array. Value should have the same
units as in `marker.color` and if set,
`marker.cmin` must be set as well.
cmid
Sets the mid-point of the color domain by
scaling `marker.cmin` and/or `marker.cmax` to
be equidistant to this point. Has an effect
only if in `marker.color`is set to a numerical
array. Value should have the same units as in
`marker.color`. Has no effect when
`marker.cauto` is `false`.
cmin
Sets the lower bound of the color domain. Has
an effect only if in `marker.color`is set to a
numerical array. Value should have the same
units as in `marker.color` and if set,
`marker.cmax` must be set as well.
color
Sets themarkercolor. It accepts either a
specific color or an array of numbers that are
mapped to the colorscale relative to the max
and min values of the array or relative to
`marker.cmin` and `marker.cmax` if set.
coloraxis
Sets a reference to a shared color axis.
References to these shared color axes are
"coloraxis", "coloraxis2", "coloraxis3", etc.
Settings for these shared color axes are set in
the layout, under `layout.coloraxis`,
`layout.coloraxis2`, etc. Note that multiple
color scales can be linked to the same color
axis.
colorbar
:class:`plotly.graph_objects.scatter.marker.Col
orBar` instance or dict with compatible
properties
colorscale
Sets the colorscale. Has an effect only if in
`marker.color`is set to a numerical array. The
colorscale must be an array containing arrays
mapping a normalized value to an rgb, rgba,
hex, hsl, hsv, or named color string. At
minimum, a mapping for the lowest (0) and
highest (1) values are required. For example,
`[[0, 'rgb(0,0,255)'], [1, 'rgb(255,0,0)']]`.
To control the bounds of the colorscale in
color space, use`marker.cmin` and
`marker.cmax`. Alternatively, `colorscale` may
be a palette name string of the following list:
Greys,YlGnBu,Greens,YlOrRd,Bluered,RdBu,Reds,Bl
ues,Picnic,Rainbow,Portland,Jet,Hot,Blackbody,E
arth,Electric,Viridis,Cividis.
colorsrc
Sets the source reference on Chart Studio Cloud
for color .
gradient
:class:`plotly.graph_objects.scatter.marker.Gra
dient` instance or dict with compatible
properties
line
:class:`plotly.graph_objects.scatter.marker.Lin
e` instance or dict with compatible properties
maxdisplayed
Sets a maximum number of points to be drawn on
the graph. 0 corresponds to no limit.
opacity
Sets the marker opacity.
opacitysrc
Sets the source reference on Chart Studio Cloud
for opacity .
reversescale
Reverses the color mapping if true. Has an
effect only if in `marker.color`is set to a
numerical array. If true, `marker.cmin` will
correspond to the last color in the array and
`marker.cmax` will correspond to the first
color.
showscale
Determines whether or not a colorbar is
displayed for this trace. Has an effect only if
in `marker.color`is set to a numerical array.
size
Sets the marker size (in px).
sizemin
Has an effect only if `marker.size` is set to a
numerical array. Sets the minimum size (in px)
of the rendered marker points.
sizemode
Has an effect only if `marker.size` is set to a
numerical array. Sets the rule for which the
data in `size` is converted to pixels.
sizeref
Has an effect only if `marker.size` is set to a
numerical array. Sets the scale factor used to
determine the rendered size of marker points.
Use with `sizemin` and `sizemode`.
sizesrc
Sets the source reference on Chart Studio Cloud
for size .
symbol
Sets the marker symbol type. Adding 100 is
equivalent to appending "-open" to a symbol
name. Adding 200 is equivalent to appending
"-dot" to a symbol name. Adding 300 is
equivalent to appending "-open-dot" or "dot-
open" to a symbol name.
symbolsrc
Sets the source reference on Chart Studio Cloud
for symbol .
Returns
-------
plotly.graph_objs.scatter.Marker
"""
return self["marker"]
@marker.setter
def marker(self, val):
self["marker"] = val
# meta
# ----
@property
def meta(self):
"""
Assigns extra meta information associated with this trace that
can be used in various text attributes. Attributes such as
trace `name`, graph, axis and colorbar `title.text`, annotation
`text` `rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta` values in
an attribute in the same trace, simply use `%{meta[i]}` where
`i` is the index or key of the `meta` item in question. To
access trace `meta` in layout attributes, use
`%{data[n[.meta[i]}` where `i` is the index or key of the
`meta` and `n` is the trace index.
The 'meta' property accepts values of any type
Returns
-------
Any|numpy.ndarray
"""
return self["meta"]
@meta.setter
def meta(self, val):
self["meta"] = val
# metasrc
# -------
@property
def metasrc(self):
"""
Sets the source reference on Chart Studio Cloud for meta .
The 'metasrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["metasrc"]
@metasrc.setter
def metasrc(self, val):
self["metasrc"] = val
# mode
# ----
@property
def mode(self):
"""
Determines the drawing mode for this scatter trace. If the
provided `mode` includes "text" then the `text` elements appear
at the coordinates. Otherwise, the `text` elements appear on
hover. If there are less than 20 points and the trace is not
stacked then the default is "lines+markers". Otherwise,
"lines".
The 'mode' property is a flaglist and may be specified
as a string containing:
- Any combination of ['lines', 'markers', 'text'] joined with '+' characters
(e.g. 'lines+markers')
OR exactly one of ['none'] (e.g. 'none')
Returns
-------
Any
"""
return self["mode"]
@mode.setter
def mode(self, val):
self["mode"] = val
# name
# ----
@property
def name(self):
"""
Sets the trace name. The trace name appear as the legend item
and on hover.
The 'name' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["name"]
@name.setter
def name(self, val):
self["name"] = val
# opacity
# -------
@property
def opacity(self):
"""
Sets the opacity of the trace.
The 'opacity' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self["opacity"]
@opacity.setter
def opacity(self, val):
self["opacity"] = val
# orientation
# -----------
@property
def orientation(self):
"""
Only relevant when `stackgroup` is used, and only the first
`orientation` found in the `stackgroup` will be used -
including if `visible` is "legendonly" but not if it is
`false`. Sets the stacking direction. With "v" ("h"), the y (x)
values of subsequent traces are added. Also affects the default
value of `fill`.
The 'orientation' property is an enumeration that may be specified as:
- One of the following enumeration values:
['v', 'h']
Returns
-------
Any
"""
return self["orientation"]
@orientation.setter
def orientation(self, val):
self["orientation"] = val
# r
# -
@property
def r(self):
"""
r coordinates in scatter traces are deprecated!Please switch to
the "scatterpolar" trace type.Sets the radial coordinatesfor
legacy polar chart only.
The 'r' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["r"]
@r.setter
def r(self, val):
self["r"] = val
# rsrc
# ----
@property
def rsrc(self):
"""
Sets the source reference on Chart Studio Cloud for r .
The 'rsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["rsrc"]
@rsrc.setter
def rsrc(self, val):
self["rsrc"] = val
# selected
# --------
@property
def selected(self):
"""
The 'selected' property is an instance of Selected
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Selected`
- A dict of string/value properties that will be passed
to the Selected constructor
Supported dict properties:
marker
:class:`plotly.graph_objects.scatter.selected.M
arker` instance or dict with compatible
properties
textfont
:class:`plotly.graph_objects.scatter.selected.T
extfont` instance or dict with compatible
properties
Returns
-------
plotly.graph_objs.scatter.Selected
"""
return self["selected"]
@selected.setter
def selected(self, val):
self["selected"] = val
# selectedpoints
# --------------
@property
def selectedpoints(self):
"""
Array containing integer indices of selected points. Has an
effect only for traces that support selections. Note that an
empty array means an empty selection where the `unselected` are
turned on for all points, whereas, any other non-array values
means no selection all where the `selected` and `unselected`
styles have no effect.
The 'selectedpoints' property accepts values of any type
Returns
-------
Any
"""
return self["selectedpoints"]
@selectedpoints.setter
def selectedpoints(self, val):
self["selectedpoints"] = val
# showlegend
# ----------
@property
def showlegend(self):
"""
Determines whether or not an item corresponding to this trace
is shown in the legend.
The 'showlegend' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self["showlegend"]
@showlegend.setter
def showlegend(self, val):
self["showlegend"] = val
# stackgaps
# ---------
@property
def stackgaps(self):
"""
Only relevant when `stackgroup` is used, and only the first
`stackgaps` found in the `stackgroup` will be used - including
if `visible` is "legendonly" but not if it is `false`.
Determines how we handle locations at which other traces in
this group have data but this one does not. With *infer zero*
we insert a zero at these locations. With "interpolate" we
linearly interpolate between existing values, and extrapolate a
constant beyond the existing values.
The 'stackgaps' property is an enumeration that may be specified as:
- One of the following enumeration values:
['infer zero', 'interpolate']
Returns
-------
Any
"""
return self["stackgaps"]
@stackgaps.setter
def stackgaps(self, val):
self["stackgaps"] = val
# stackgroup
# ----------
@property
def stackgroup(self):
"""
Set several scatter traces (on the same subplot) to the same
stackgroup in order to add their y values (or their x values if
`orientation` is "h"). If blank or omitted this trace will not
be stacked. Stacking also turns `fill` on by default, using
"tonexty" ("tonextx") if `orientation` is "h" ("v") and sets
the default `mode` to "lines" irrespective of point count. You
can only stack on a numeric (linear or log) axis. Traces in a
`stackgroup` will only fill to (or be filled to) other traces
in the same group. With multiple `stackgroup`s or some traces
stacked and some not, if fill-linked traces are not already
consecutive, the later ones will be pushed down in the drawing
order.
The 'stackgroup' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["stackgroup"]
@stackgroup.setter
def stackgroup(self, val):
self["stackgroup"] = val
# stream
# ------
@property
def stream(self):
"""
The 'stream' property is an instance of Stream
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Stream`
- A dict of string/value properties that will be passed
to the Stream constructor
Supported dict properties:
maxpoints
Sets the maximum number of points to keep on
the plots from an incoming stream. If
`maxpoints` is set to 50, only the newest 50
points will be displayed on the plot.
token
The stream id number links a data trace on a
plot with a stream. See https://chart-
studio.plotly.com/settings for more details.
Returns
-------
plotly.graph_objs.scatter.Stream
"""
return self["stream"]
@stream.setter
def stream(self, val):
self["stream"] = val
# t
# -
@property
def t(self):
"""
t coordinates in scatter traces are deprecated!Please switch to
the "scatterpolar" trace type.Sets the angular coordinatesfor
legacy polar chart only.
The 't' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["t"]
@t.setter
def t(self, val):
self["t"] = val
# text
# ----
@property
def text(self):
"""
Sets text elements associated with each (x,y) pair. If a single
string, the same string appears over all the data points. If an
array of string, the items are mapped in order to the this
trace's (x,y) coordinates. If trace `hoverinfo` contains a
"text" flag and "hovertext" is not set, these elements will be
seen in the hover labels.
The 'text' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["text"]
@text.setter
def text(self, val):
self["text"] = val
# textfont
# --------
@property
def textfont(self):
"""
Sets the text font.
The 'textfont' property is an instance of Textfont
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Textfont`
- A dict of string/value properties that will be passed
to the Textfont constructor
Supported dict properties:
color
colorsrc
Sets the source reference on Chart Studio Cloud
for color .
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for family .
size
sizesrc
Sets the source reference on Chart Studio Cloud
for size .
Returns
-------
plotly.graph_objs.scatter.Textfont
"""
return self["textfont"]
@textfont.setter
def textfont(self, val):
self["textfont"] = val
# textposition
# ------------
@property
def textposition(self):
"""
Sets the positions of the `text` elements with respects to the
(x,y) coordinates.
The 'textposition' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top left', 'top center', 'top right', 'middle left',
'middle center', 'middle right', 'bottom left', 'bottom
center', 'bottom right']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self["textposition"]
@textposition.setter
def textposition(self, val):
self["textposition"] = val
# textpositionsrc
# ---------------
@property
def textpositionsrc(self):
"""
Sets the source reference on Chart Studio Cloud for
textposition .
The 'textpositionsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["textpositionsrc"]
@textpositionsrc.setter
def textpositionsrc(self, val):
self["textpositionsrc"] = val
# textsrc
# -------
@property
def textsrc(self):
"""
Sets the source reference on Chart Studio Cloud for text .
The 'textsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["textsrc"]
@textsrc.setter
def textsrc(self, val):
self["textsrc"] = val
# texttemplate
# ------------
@property
def texttemplate(self):
"""
Template string used for rendering the information text that
appear on points. Note that this will override `textinfo`.
Variables are inserted using %{variable}, for example "y:
%{y}". Numbers are formatted using d3-format's syntax
%{variable:d3-format}, for example "Price: %{y:$.2f}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format for details on
the formatting syntax. Dates are formatted using d3-time-
format's syntax %{variable|d3-time-format}, for example "Day:
%{2019-01-01|%A}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format for details on
the date formatting syntax. Every attributes that can be
specified per-point (the ones that are `arrayOk: true`) are
available.
The 'texttemplate' property is a string and must be specified as:
- A string
- A number that will be converted to a string
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
str|numpy.ndarray
"""
return self["texttemplate"]
@texttemplate.setter
def texttemplate(self, val):
self["texttemplate"] = val
# texttemplatesrc
# ---------------
@property
def texttemplatesrc(self):
"""
Sets the source reference on Chart Studio Cloud for
texttemplate .
The 'texttemplatesrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["texttemplatesrc"]
@texttemplatesrc.setter
def texttemplatesrc(self, val):
self["texttemplatesrc"] = val
# tsrc
# ----
@property
def tsrc(self):
"""
Sets the source reference on Chart Studio Cloud for t .
The 'tsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["tsrc"]
@tsrc.setter
def tsrc(self, val):
self["tsrc"] = val
# uid
# ---
@property
def uid(self):
"""
Assign an id to this trace, Use this to provide object
constancy between traces during animations and transitions.
The 'uid' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["uid"]
@uid.setter
def uid(self, val):
self["uid"] = val
# uirevision
# ----------
@property
def uirevision(self):
"""
Controls persistence of some user-driven changes to the trace:
`constraintrange` in `parcoords` traces, as well as some
`editable: true` modifications such as `name` and
`colorbar.title`. Defaults to `layout.uirevision`. Note that
other user-driven trace attribute changes are controlled by
`layout` attributes: `trace.visible` is controlled by
`layout.legend.uirevision`, `selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)` (accessible
with `config: {editable: true}`) is controlled by
`layout.editrevision`. Trace changes are tracked by `uid`,
which only falls back on trace index if no `uid` is provided.
So if your app can add/remove traces before the end of the
`data` array, such that the same trace has a different index,
you can still preserve user-driven changes if you give each
trace a `uid` that stays with it as it moves.
The 'uirevision' property accepts values of any type
Returns
-------
Any
"""
return self["uirevision"]
@uirevision.setter
def uirevision(self, val):
self["uirevision"] = val
# unselected
# ----------
@property
def unselected(self):
"""
The 'unselected' property is an instance of Unselected
that may be specified as:
- An instance of :class:`plotly.graph_objs.scatter.Unselected`
- A dict of string/value properties that will be passed
to the Unselected constructor
Supported dict properties:
marker
:class:`plotly.graph_objects.scatter.unselected
.Marker` instance or dict with compatible
properties
textfont
:class:`plotly.graph_objects.scatter.unselected
.Textfont` instance or dict with compatible
properties
Returns
-------
plotly.graph_objs.scatter.Unselected
"""
return self["unselected"]
@unselected.setter
def unselected(self, val):
self["unselected"] = val
# visible
# -------
@property
def visible(self):
"""
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as a
legend item (provided that the legend itself is visible).
The 'visible' property is an enumeration that may be specified as:
- One of the following enumeration values:
[True, False, 'legendonly']
Returns
-------
Any
"""
return self["visible"]
@visible.setter
def visible(self, val):
self["visible"] = val
# x
# -
@property
def x(self):
"""
Sets the x coordinates.
The 'x' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["x"]
@x.setter
def x(self, val):
self["x"] = val
# x0
# --
@property
def x0(self):
"""
Alternate to `x`. Builds a linear space of x coordinates. Use
with `dx` where `x0` is the starting coordinate and `dx` the
step.
The 'x0' property accepts values of any type
Returns
-------
Any
"""
return self["x0"]
@x0.setter
def x0(self, val):
self["x0"] = val
# xaxis
# -----
@property
def xaxis(self):
"""
Sets a reference between this trace's x coordinates and a 2D
cartesian x axis. If "x" (the default value), the x coordinates
refer to `layout.xaxis`. If "x2", the x coordinates refer to
`layout.xaxis2`, and so on.
The 'xaxis' property is an identifier of a particular
subplot, of type 'x', that may be specified as the string 'x'
optionally followed by an integer >= 1
(e.g. 'x', 'x1', 'x2', 'x3', etc.)
Returns
-------
str
"""
return self["xaxis"]
@xaxis.setter
def xaxis(self, val):
self["xaxis"] = val
# xcalendar
# ---------
@property
def xcalendar(self):
"""
Sets the calendar system to use with `x` date data.
The 'xcalendar' property is an enumeration that may be specified as:
- One of the following enumeration values:
['gregorian', 'chinese', 'coptic', 'discworld',
'ethiopian', 'hebrew', 'islamic', 'julian', 'mayan',
'nanakshahi', 'nepali', 'persian', 'jalali', 'taiwan',
'thai', 'ummalqura']
Returns
-------
Any
"""
return self["xcalendar"]
@xcalendar.setter
def xcalendar(self, val):
self["xcalendar"] = val
# xsrc
# ----
@property
def xsrc(self):
"""
Sets the source reference on Chart Studio Cloud for x .
The 'xsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["xsrc"]
@xsrc.setter
def xsrc(self, val):
self["xsrc"] = val
# y
# -
@property
def y(self):
"""
Sets the y coordinates.
The 'y' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self["y"]
@y.setter
def y(self, val):
self["y"] = val
# y0
# --
@property
def y0(self):
"""
Alternate to `y`. Builds a linear space of y coordinates. Use
with `dy` where `y0` is the starting coordinate and `dy` the
step.
The 'y0' property accepts values of any type
Returns
-------
Any
"""
return self["y0"]
@y0.setter
def y0(self, val):
self["y0"] = val
# yaxis
# -----
@property
def yaxis(self):
"""
Sets a reference between this trace's y coordinates and a 2D
cartesian y axis. If "y" (the default value), the y coordinates
refer to `layout.yaxis`. If "y2", the y coordinates refer to
`layout.yaxis2`, and so on.
The 'yaxis' property is an identifier of a particular
subplot, of type 'y', that may be specified as the string 'y'
optionally followed by an integer >= 1
(e.g. 'y', 'y1', 'y2', 'y3', etc.)
Returns
-------
str
"""
return self["yaxis"]
@yaxis.setter
def yaxis(self, val):
self["yaxis"] = val
# ycalendar
# ---------
@property
def ycalendar(self):
"""
Sets the calendar system to use with `y` date data.
The 'ycalendar' property is an enumeration that may be specified as:
- One of the following enumeration values:
['gregorian', 'chinese', 'coptic', 'discworld',
'ethiopian', 'hebrew', 'islamic', 'julian', 'mayan',
'nanakshahi', 'nepali', 'persian', 'jalali', 'taiwan',
'thai', 'ummalqura']
Returns
-------
Any
"""
return self["ycalendar"]
@ycalendar.setter
def ycalendar(self, val):
self["ycalendar"] = val
# ysrc
# ----
@property
def ysrc(self):
"""
Sets the source reference on Chart Studio Cloud for y .
The 'ysrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self["ysrc"]
@ysrc.setter
def ysrc(self, val):
self["ysrc"] = val
# type
# ----
@property
def type(self):
return self._props["type"]
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
cliponaxis
Determines whether or not markers and text nodes are
clipped about the subplot axes. To show markers and
text nodes above axis lines and tick labels, make sure
to set `xaxis.layer` and `yaxis.layer` to *below
traces*.
connectgaps
Determines whether or not gaps (i.e. {nan} or missing
values) in the provided data arrays are connected.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
customdata .
dx
Sets the x coordinate step. See `x0` for more info.
dy
Sets the y coordinate step. See `y0` for more info.
error_x
:class:`plotly.graph_objects.scatter.ErrorX` instance
or dict with compatible properties
error_y
:class:`plotly.graph_objects.scatter.ErrorY` instance
or dict with compatible properties
fill
Sets the area to fill with a solid color. Defaults to
"none" unless this trace is stacked, then it gets
"tonexty" ("tonextx") if `orientation` is "v" ("h") Use
with `fillcolor` if not "none". "tozerox" and "tozeroy"
fill to x=0 and y=0 respectively. "tonextx" and
"tonexty" fill between the endpoints of this trace and
the endpoints of the trace before it, connecting those
endpoints with straight lines (to make a stacked area
graph); if there is no trace before it, they behave
like "tozerox" and "tozeroy". "toself" connects the
endpoints of the trace (or each segment of the trace if
it has gaps) into a closed shape. "tonext" fills the
space between two traces if one completely encloses the
other (eg consecutive contour lines), and behaves like
"toself" if there is no trace before it. "tonext"
should not be used if one trace does not enclose the
other. Traces in a `stackgroup` will only fill to (or
be filled to) other traces in the same group. With
multiple `stackgroup`s or some traces stacked and some
not, if fill-linked traces are not already consecutive,
the later ones will be pushed down in the drawing
order.
fillcolor
Sets the fill color. Defaults to a half-transparent
variant of the line color, marker color, or marker line
color, whichever is available.
groupnorm
Only relevant when `stackgroup` is used, and only the
first `groupnorm` found in the `stackgroup` will be
used - including if `visible` is "legendonly" but not
if it is `false`. Sets the normalization for the sum of
this `stackgroup`. With "fraction", the value of each
trace at each location is divided by the sum of all
trace values at that location. "percent" is the same
but multiplied by 100 to show percentages. If there are
multiple subplots, or multiple `stackgroup`s on one
subplot, each will be normalized within its own set.
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
hoverinfo .
hoverlabel
:class:`plotly.graph_objects.scatter.Hoverlabel`
instance or dict with compatible properties
hoveron
Do the hover effects highlight individual points
(markers or line points) or do they highlight filled
regions? If the fill is "toself" or "tonext" and there
are no markers or text, then the default is "fills",
otherwise it is "points".
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format for
details on the formatting syntax. Dates are formatted
using d3-time-format's syntax %{variable|d3-time-
format}, for example "Day: %{2019-01-01|%A}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format for
details on the date formatting syntax. The variables
available in `hovertemplate` are the ones emitted as
event data described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. Anything contained in tag `<extra>` is
displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
hovertemplate .
hovertext
Sets hover text elements associated with each (x,y)
pair. If a single string, the same string appears over
all the data points. If an array of string, the items
are mapped in order to the this trace's (x,y)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
hovertext .
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
ids .
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
line
:class:`plotly.graph_objects.scatter.Line` instance or
dict with compatible properties
marker
:class:`plotly.graph_objects.scatter.Marker` instance
or dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
meta .
mode
Determines the drawing mode for this scatter trace. If
the provided `mode` includes "text" then the `text`
elements appear at the coordinates. Otherwise, the
`text` elements appear on hover. If there are less than
20 points and the trace is not stacked then the default
is "lines+markers". Otherwise, "lines".
name
Sets the trace name. The trace name appear as the
legend item and on hover.
opacity
Sets the opacity of the trace.
orientation
Only relevant when `stackgroup` is used, and only the
first `orientation` found in the `stackgroup` will be
used - including if `visible` is "legendonly" but not
if it is `false`. Sets the stacking direction. With "v"
("h"), the y (x) values of subsequent traces are added.
Also affects the default value of `fill`.
r
r coordinates in scatter traces are deprecated!Please
switch to the "scatterpolar" trace type.Sets the radial
coordinatesfor legacy polar chart only.
rsrc
Sets the source reference on Chart Studio Cloud for r
.
selected
:class:`plotly.graph_objects.scatter.Selected` instance
or dict with compatible properties
selectedpoints
Array containing integer indices of selected points.
Has an effect only for traces that support selections.
Note that an empty array means an empty selection where
the `unselected` are turned on for all points, whereas,
any other non-array values means no selection all where
the `selected` and `unselected` styles have no effect.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stackgaps
Only relevant when `stackgroup` is used, and only the
first `stackgaps` found in the `stackgroup` will be
used - including if `visible` is "legendonly" but not
if it is `false`. Determines how we handle locations at
which other traces in this group have data but this one
does not. With *infer zero* we insert a zero at these
locations. With "interpolate" we linearly interpolate
between existing values, and extrapolate a constant
beyond the existing values.
stackgroup
Set several scatter traces (on the same subplot) to the
same stackgroup in order to add their y values (or
their x values if `orientation` is "h"). If blank or
omitted this trace will not be stacked. Stacking also
turns `fill` on by default, using "tonexty" ("tonextx")
if `orientation` is "h" ("v") and sets the default
`mode` to "lines" irrespective of point count. You can
only stack on a numeric (linear or log) axis. Traces in
a `stackgroup` will only fill to (or be filled to)
other traces in the same group. With multiple
`stackgroup`s or some traces stacked and some not, if
fill-linked traces are not already consecutive, the
later ones will be pushed down in the drawing order.
stream
:class:`plotly.graph_objects.scatter.Stream` instance
or dict with compatible properties
t
t coordinates in scatter traces are deprecated!Please
switch to the "scatterpolar" trace type.Sets the
angular coordinatesfor legacy polar chart only.
text
Sets text elements associated with each (x,y) pair. If
a single string, the same string appears over all the
data points. If an array of string, the items are
mapped in order to the this trace's (x,y) coordinates.
If trace `hoverinfo` contains a "text" flag and
"hovertext" is not set, these elements will be seen in
the hover labels.
textfont
Sets the text font.
textposition
Sets the positions of the `text` elements with respects
to the (x,y) coordinates.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
textposition .
textsrc
Sets the source reference on Chart Studio Cloud for
text .
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format for
details on the formatting syntax. Dates are formatted
using d3-time-format's syntax %{variable|d3-time-
format}, for example "Day: %{2019-01-01|%A}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format for
details on the date formatting syntax. Every attributes
that can be specified per-point (the ones that are
`arrayOk: true`) are available.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
texttemplate .
tsrc
Sets the source reference on Chart Studio Cloud for t
.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
unselected
:class:`plotly.graph_objects.scatter.Unselected`
instance or dict with compatible properties
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
x
Sets the x coordinates.
x0
Alternate to `x`. Builds a linear space of x
coordinates. Use with `dx` where `x0` is the starting
coordinate and `dx` the step.
xaxis
Sets a reference between this trace's x coordinates and
a 2D cartesian x axis. If "x" (the default value), the
x coordinates refer to `layout.xaxis`. If "x2", the x
coordinates refer to `layout.xaxis2`, and so on.
xcalendar
Sets the calendar system to use with `x` date data.
xsrc
Sets the source reference on Chart Studio Cloud for x
.
y
Sets the y coordinates.
y0
Alternate to `y`. Builds a linear space of y
coordinates. Use with `dy` where `y0` is the starting
coordinate and `dy` the step.
yaxis
Sets a reference between this trace's y coordinates and
a 2D cartesian y axis. If "y" (the default value), the
y coordinates refer to `layout.yaxis`. If "y2", the y
coordinates refer to `layout.yaxis2`, and so on.
ycalendar
Sets the calendar system to use with `y` date data.
ysrc
Sets the source reference on Chart Studio Cloud for y
.
"""
def __init__(
self,
arg=None,
cliponaxis=None,
connectgaps=None,
customdata=None,
customdatasrc=None,
dx=None,
dy=None,
error_x=None,
error_y=None,
fill=None,
fillcolor=None,
groupnorm=None,
hoverinfo=None,
hoverinfosrc=None,
hoverlabel=None,
hoveron=None,
hovertemplate=None,
hovertemplatesrc=None,
hovertext=None,
hovertextsrc=None,
ids=None,
idssrc=None,
legendgroup=None,
line=None,
marker=None,
meta=None,
metasrc=None,
mode=None,
name=None,
opacity=None,
orientation=None,
r=None,
rsrc=None,
selected=None,
selectedpoints=None,
showlegend=None,
stackgaps=None,
stackgroup=None,
stream=None,
t=None,
text=None,
textfont=None,
textposition=None,
textpositionsrc=None,
textsrc=None,
texttemplate=None,
texttemplatesrc=None,
tsrc=None,
uid=None,
uirevision=None,
unselected=None,
visible=None,
x=None,
x0=None,
xaxis=None,
xcalendar=None,
xsrc=None,
y=None,
y0=None,
yaxis=None,
ycalendar=None,
ysrc=None,
**kwargs
):
"""
Construct a new Scatter object
The scatter trace type encompasses line charts, scatter charts,
text charts, and bubble charts. The data visualized as scatter
point or lines is set in `x` and `y`. Text (appearing either on
the chart or on hover only) is via `text`. Bubble charts are
achieved by setting `marker.size` and/or `marker.color` to
numerical arrays.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.Scatter`
cliponaxis
Determines whether or not markers and text nodes are
clipped about the subplot axes. To show markers and
text nodes above axis lines and tick labels, make sure
to set `xaxis.layer` and `yaxis.layer` to *below
traces*.
connectgaps
Determines whether or not gaps (i.e. {nan} or missing
values) in the provided data arrays are connected.
customdata
Assigns extra data each datum. This may be useful when
listening to hover, click and selection events. Note
that, "scatter" traces also appends customdata items in
the markers DOM elements
customdatasrc
Sets the source reference on Chart Studio Cloud for
customdata .
dx
Sets the x coordinate step. See `x0` for more info.
dy
Sets the y coordinate step. See `y0` for more info.
error_x
:class:`plotly.graph_objects.scatter.ErrorX` instance
or dict with compatible properties
error_y
:class:`plotly.graph_objects.scatter.ErrorY` instance
or dict with compatible properties
fill
Sets the area to fill with a solid color. Defaults to
"none" unless this trace is stacked, then it gets
"tonexty" ("tonextx") if `orientation` is "v" ("h") Use
with `fillcolor` if not "none". "tozerox" and "tozeroy"
fill to x=0 and y=0 respectively. "tonextx" and
"tonexty" fill between the endpoints of this trace and
the endpoints of the trace before it, connecting those
endpoints with straight lines (to make a stacked area
graph); if there is no trace before it, they behave
like "tozerox" and "tozeroy". "toself" connects the
endpoints of the trace (or each segment of the trace if
it has gaps) into a closed shape. "tonext" fills the
space between two traces if one completely encloses the
other (eg consecutive contour lines), and behaves like
"toself" if there is no trace before it. "tonext"
should not be used if one trace does not enclose the
other. Traces in a `stackgroup` will only fill to (or
be filled to) other traces in the same group. With
multiple `stackgroup`s or some traces stacked and some
not, if fill-linked traces are not already consecutive,
the later ones will be pushed down in the drawing
order.
fillcolor
Sets the fill color. Defaults to a half-transparent
variant of the line color, marker color, or marker line
color, whichever is available.
groupnorm
Only relevant when `stackgroup` is used, and only the
first `groupnorm` found in the `stackgroup` will be
used - including if `visible` is "legendonly" but not
if it is `false`. Sets the normalization for the sum of
this `stackgroup`. With "fraction", the value of each
trace at each location is divided by the sum of all
trace values at that location. "percent" is the same
but multiplied by 100 to show percentages. If there are
multiple subplots, or multiple `stackgroup`s on one
subplot, each will be normalized within its own set.
hoverinfo
Determines which trace information appear on hover. If
`none` or `skip` are set, no information is displayed
upon hovering. But, if `none` is set, click and hover
events are still fired.
hoverinfosrc
Sets the source reference on Chart Studio Cloud for
hoverinfo .
hoverlabel
:class:`plotly.graph_objects.scatter.Hoverlabel`
instance or dict with compatible properties
hoveron
Do the hover effects highlight individual points
(markers or line points) or do they highlight filled
regions? If the fill is "toself" or "tonext" and there
are no markers or text, then the default is "fills",
otherwise it is "points".
hovertemplate
Template string used for rendering the information that
appear on hover box. Note that this will override
`hoverinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format for
details on the formatting syntax. Dates are formatted
using d3-time-format's syntax %{variable|d3-time-
format}, for example "Day: %{2019-01-01|%A}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format for
details on the date formatting syntax. The variables
available in `hovertemplate` are the ones emitted as
event data described at this link
https://plotly.com/javascript/plotlyjs-events/#event-
data. Additionally, every attributes that can be
specified per-point (the ones that are `arrayOk: true`)
are available. Anything contained in tag `<extra>` is
displayed in the secondary box, for example
"<extra>{fullData.name}</extra>". To hide the secondary
box completely, use an empty tag `<extra></extra>`.
hovertemplatesrc
Sets the source reference on Chart Studio Cloud for
hovertemplate .
hovertext
Sets hover text elements associated with each (x,y)
pair. If a single string, the same string appears over
all the data points. If an array of string, the items
are mapped in order to the this trace's (x,y)
coordinates. To be seen, trace `hoverinfo` must contain
a "text" flag.
hovertextsrc
Sets the source reference on Chart Studio Cloud for
hovertext .
ids
Assigns id labels to each datum. These ids for object
constancy of data points during animation. Should be an
array of strings, not numbers or any other type.
idssrc
Sets the source reference on Chart Studio Cloud for
ids .
legendgroup
Sets the legend group for this trace. Traces part of
the same legend group hide/show at the same time when
toggling legend items.
line
:class:`plotly.graph_objects.scatter.Line` instance or
dict with compatible properties
marker
:class:`plotly.graph_objects.scatter.Marker` instance
or dict with compatible properties
meta
Assigns extra meta information associated with this
trace that can be used in various text attributes.
Attributes such as trace `name`, graph, axis and
colorbar `title.text`, annotation `text`
`rangeselector`, `updatemenues` and `sliders` `label`
text all support `meta`. To access the trace `meta`
values in an attribute in the same trace, simply use
`%{meta[i]}` where `i` is the index or key of the
`meta` item in question. To access trace `meta` in
layout attributes, use `%{data[n[.meta[i]}` where `i`
is the index or key of the `meta` and `n` is the trace
index.
metasrc
Sets the source reference on Chart Studio Cloud for
meta .
mode
Determines the drawing mode for this scatter trace. If
the provided `mode` includes "text" then the `text`
elements appear at the coordinates. Otherwise, the
`text` elements appear on hover. If there are less than
20 points and the trace is not stacked then the default
is "lines+markers". Otherwise, "lines".
name
Sets the trace name. The trace name appear as the
legend item and on hover.
opacity
Sets the opacity of the trace.
orientation
Only relevant when `stackgroup` is used, and only the
first `orientation` found in the `stackgroup` will be
used - including if `visible` is "legendonly" but not
if it is `false`. Sets the stacking direction. With "v"
("h"), the y (x) values of subsequent traces are added.
Also affects the default value of `fill`.
r
r coordinates in scatter traces are deprecated!Please
switch to the "scatterpolar" trace type.Sets the radial
coordinatesfor legacy polar chart only.
rsrc
Sets the source reference on Chart Studio Cloud for r
.
selected
:class:`plotly.graph_objects.scatter.Selected` instance
or dict with compatible properties
selectedpoints
Array containing integer indices of selected points.
Has an effect only for traces that support selections.
Note that an empty array means an empty selection where
the `unselected` are turned on for all points, whereas,
any other non-array values means no selection all where
the `selected` and `unselected` styles have no effect.
showlegend
Determines whether or not an item corresponding to this
trace is shown in the legend.
stackgaps
Only relevant when `stackgroup` is used, and only the
first `stackgaps` found in the `stackgroup` will be
used - including if `visible` is "legendonly" but not
if it is `false`. Determines how we handle locations at
which other traces in this group have data but this one
does not. With *infer zero* we insert a zero at these
locations. With "interpolate" we linearly interpolate
between existing values, and extrapolate a constant
beyond the existing values.
stackgroup
Set several scatter traces (on the same subplot) to the
same stackgroup in order to add their y values (or
their x values if `orientation` is "h"). If blank or
omitted this trace will not be stacked. Stacking also
turns `fill` on by default, using "tonexty" ("tonextx")
if `orientation` is "h" ("v") and sets the default
`mode` to "lines" irrespective of point count. You can
only stack on a numeric (linear or log) axis. Traces in
a `stackgroup` will only fill to (or be filled to)
other traces in the same group. With multiple
`stackgroup`s or some traces stacked and some not, if
fill-linked traces are not already consecutive, the
later ones will be pushed down in the drawing order.
stream
:class:`plotly.graph_objects.scatter.Stream` instance
or dict with compatible properties
t
t coordinates in scatter traces are deprecated!Please
switch to the "scatterpolar" trace type.Sets the
angular coordinatesfor legacy polar chart only.
text
Sets text elements associated with each (x,y) pair. If
a single string, the same string appears over all the
data points. If an array of string, the items are
mapped in order to the this trace's (x,y) coordinates.
If trace `hoverinfo` contains a "text" flag and
"hovertext" is not set, these elements will be seen in
the hover labels.
textfont
Sets the text font.
textposition
Sets the positions of the `text` elements with respects
to the (x,y) coordinates.
textpositionsrc
Sets the source reference on Chart Studio Cloud for
textposition .
textsrc
Sets the source reference on Chart Studio Cloud for
text .
texttemplate
Template string used for rendering the information text
that appear on points. Note that this will override
`textinfo`. Variables are inserted using %{variable},
for example "y: %{y}". Numbers are formatted using
d3-format's syntax %{variable:d3-format}, for example
"Price: %{y:$.2f}". https://github.com/d3/d3-3.x-api-
reference/blob/master/Formatting.md#d3_format for
details on the formatting syntax. Dates are formatted
using d3-time-format's syntax %{variable|d3-time-
format}, for example "Day: %{2019-01-01|%A}".
https://github.com/d3/d3-3.x-api-
reference/blob/master/Time-Formatting.md#format for
details on the date formatting syntax. Every attributes
that can be specified per-point (the ones that are
`arrayOk: true`) are available.
texttemplatesrc
Sets the source reference on Chart Studio Cloud for
texttemplate .
tsrc
Sets the source reference on Chart Studio Cloud for t
.
uid
Assign an id to this trace, Use this to provide object
constancy between traces during animations and
transitions.
uirevision
Controls persistence of some user-driven changes to the
trace: `constraintrange` in `parcoords` traces, as well
as some `editable: true` modifications such as `name`
and `colorbar.title`. Defaults to `layout.uirevision`.
Note that other user-driven trace attribute changes are
controlled by `layout` attributes: `trace.visible` is
controlled by `layout.legend.uirevision`,
`selectedpoints` is controlled by
`layout.selectionrevision`, and `colorbar.(x|y)`
(accessible with `config: {editable: true}`) is
controlled by `layout.editrevision`. Trace changes are
tracked by `uid`, which only falls back on trace index
if no `uid` is provided. So if your app can add/remove
traces before the end of the `data` array, such that
the same trace has a different index, you can still
preserve user-driven changes if you give each trace a
`uid` that stays with it as it moves.
unselected
:class:`plotly.graph_objects.scatter.Unselected`
instance or dict with compatible properties
visible
Determines whether or not this trace is visible. If
"legendonly", the trace is not drawn, but can appear as
a legend item (provided that the legend itself is
visible).
x
Sets the x coordinates.
x0
Alternate to `x`. Builds a linear space of x
coordinates. Use with `dx` where `x0` is the starting
coordinate and `dx` the step.
xaxis
Sets a reference between this trace's x coordinates and
a 2D cartesian x axis. If "x" (the default value), the
x coordinates refer to `layout.xaxis`. If "x2", the x
coordinates refer to `layout.xaxis2`, and so on.
xcalendar
Sets the calendar system to use with `x` date data.
xsrc
Sets the source reference on Chart Studio Cloud for x
.
y
Sets the y coordinates.
y0
Alternate to `y`. Builds a linear space of y
coordinates. Use with `dy` where `y0` is the starting
coordinate and `dy` the step.
yaxis
Sets a reference between this trace's y coordinates and
a 2D cartesian y axis. If "y" (the default value), the
y coordinates refer to `layout.yaxis`. If "y2", the y
coordinates refer to `layout.yaxis2`, and so on.
ycalendar
Sets the calendar system to use with `y` date data.
ysrc
Sets the source reference on Chart Studio Cloud for y
.
Returns
-------
Scatter
"""
super(Scatter, self).__init__("scatter")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.Scatter
constructor must be a dict or
an instance of :class:`plotly.graph_objs.Scatter`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("cliponaxis", None)
_v = cliponaxis if cliponaxis is not None else _v
if _v is not None:
self["cliponaxis"] = _v
_v = arg.pop("connectgaps", None)
_v = connectgaps if connectgaps is not None else _v
if _v is not None:
self["connectgaps"] = _v
_v = arg.pop("customdata", None)
_v = customdata if customdata is not None else _v
if _v is not None:
self["customdata"] = _v
_v = arg.pop("customdatasrc", None)
_v = customdatasrc if customdatasrc is not None else _v
if _v is not None:
self["customdatasrc"] = _v
_v = arg.pop("dx", None)
_v = dx if dx is not None else _v
if _v is not None:
self["dx"] = _v
_v = arg.pop("dy", None)
_v = dy if dy is not None else _v
if _v is not None:
self["dy"] = _v
_v = arg.pop("error_x", None)
_v = error_x if error_x is not None else _v
if _v is not None:
self["error_x"] = _v
_v = arg.pop("error_y", None)
_v = error_y if error_y is not None else _v
if _v is not None:
self["error_y"] = _v
_v = arg.pop("fill", None)
_v = fill if fill is not None else _v
if _v is not None:
self["fill"] = _v
_v = arg.pop("fillcolor", None)
_v = fillcolor if fillcolor is not None else _v
if _v is not None:
self["fillcolor"] = _v
_v = arg.pop("groupnorm", None)
_v = groupnorm if groupnorm is not None else _v
if _v is not None:
self["groupnorm"] = _v
_v = arg.pop("hoverinfo", None)
_v = hoverinfo if hoverinfo is not None else _v
if _v is not None:
self["hoverinfo"] = _v
_v = arg.pop("hoverinfosrc", None)
_v = hoverinfosrc if hoverinfosrc is not None else _v
if _v is not None:
self["hoverinfosrc"] = _v
_v = arg.pop("hoverlabel", None)
_v = hoverlabel if hoverlabel is not None else _v
if _v is not None:
self["hoverlabel"] = _v
_v = arg.pop("hoveron", None)
_v = hoveron if hoveron is not None else _v
if _v is not None:
self["hoveron"] = _v
_v = arg.pop("hovertemplate", None)
_v = hovertemplate if hovertemplate is not None else _v
if _v is not None:
self["hovertemplate"] = _v
_v = arg.pop("hovertemplatesrc", None)
_v = hovertemplatesrc if hovertemplatesrc is not None else _v
if _v is not None:
self["hovertemplatesrc"] = _v
_v = arg.pop("hovertext", None)
_v = hovertext if hovertext is not None else _v
if _v is not None:
self["hovertext"] = _v
_v = arg.pop("hovertextsrc", None)
_v = hovertextsrc if hovertextsrc is not None else _v
if _v is not None:
self["hovertextsrc"] = _v
_v = arg.pop("ids", None)
_v = ids if ids is not None else _v
if _v is not None:
self["ids"] = _v
_v = arg.pop("idssrc", None)
_v = idssrc if idssrc is not None else _v
if _v is not None:
self["idssrc"] = _v
_v = arg.pop("legendgroup", None)
_v = legendgroup if legendgroup is not None else _v
if _v is not None:
self["legendgroup"] = _v
_v = arg.pop("line", None)
_v = line if line is not None else _v
if _v is not None:
self["line"] = _v
_v = arg.pop("marker", None)
_v = marker if marker is not None else _v
if _v is not None:
self["marker"] = _v
_v = arg.pop("meta", None)
_v = meta if meta is not None else _v
if _v is not None:
self["meta"] = _v
_v = arg.pop("metasrc", None)
_v = metasrc if metasrc is not None else _v
if _v is not None:
self["metasrc"] = _v
_v = arg.pop("mode", None)
_v = mode if mode is not None else _v
if _v is not None:
self["mode"] = _v
_v = arg.pop("name", None)
_v = name if name is not None else _v
if _v is not None:
self["name"] = _v
_v = arg.pop("opacity", None)
_v = opacity if opacity is not None else _v
if _v is not None:
self["opacity"] = _v
_v = arg.pop("orientation", None)
_v = orientation if orientation is not None else _v
if _v is not None:
self["orientation"] = _v
_v = arg.pop("r", None)
_v = r if r is not None else _v
if _v is not None:
self["r"] = _v
_v = arg.pop("rsrc", None)
_v = rsrc if rsrc is not None else _v
if _v is not None:
self["rsrc"] = _v
_v = arg.pop("selected", None)
_v = selected if selected is not None else _v
if _v is not None:
self["selected"] = _v
_v = arg.pop("selectedpoints", None)
_v = selectedpoints if selectedpoints is not None else _v
if _v is not None:
self["selectedpoints"] = _v
_v = arg.pop("showlegend", None)
_v = showlegend if showlegend is not None else _v
if _v is not None:
self["showlegend"] = _v
_v = arg.pop("stackgaps", None)
_v = stackgaps if stackgaps is not None else _v
if _v is not None:
self["stackgaps"] = _v
_v = arg.pop("stackgroup", None)
_v = stackgroup if stackgroup is not None else _v
if _v is not None:
self["stackgroup"] = _v
_v = arg.pop("stream", None)
_v = stream if stream is not None else _v
if _v is not None:
self["stream"] = _v
_v = arg.pop("t", None)
_v = t if t is not None else _v
if _v is not None:
self["t"] = _v
_v = arg.pop("text", None)
_v = text if text is not None else _v
if _v is not None:
self["text"] = _v
_v = arg.pop("textfont", None)
_v = textfont if textfont is not None else _v
if _v is not None:
self["textfont"] = _v
_v = arg.pop("textposition", None)
_v = textposition if textposition is not None else _v
if _v is not None:
self["textposition"] = _v
_v = arg.pop("textpositionsrc", None)
_v = textpositionsrc if textpositionsrc is not None else _v
if _v is not None:
self["textpositionsrc"] = _v
_v = arg.pop("textsrc", None)
_v = textsrc if textsrc is not None else _v
if _v is not None:
self["textsrc"] = _v
_v = arg.pop("texttemplate", None)
_v = texttemplate if texttemplate is not None else _v
if _v is not None:
self["texttemplate"] = _v
_v = arg.pop("texttemplatesrc", None)
_v = texttemplatesrc if texttemplatesrc is not None else _v
if _v is not None:
self["texttemplatesrc"] = _v
_v = arg.pop("tsrc", None)
_v = tsrc if tsrc is not None else _v
if _v is not None:
self["tsrc"] = _v
_v = arg.pop("uid", None)
_v = uid if uid is not None else _v
if _v is not None:
self["uid"] = _v
_v = arg.pop("uirevision", None)
_v = uirevision if uirevision is not None else _v
if _v is not None:
self["uirevision"] = _v
_v = arg.pop("unselected", None)
_v = unselected if unselected is not None else _v
if _v is not None:
self["unselected"] = _v
_v = arg.pop("visible", None)
_v = visible if visible is not None else _v
if _v is not None:
self["visible"] = _v
_v = arg.pop("x", None)
_v = x if x is not None else _v
if _v is not None:
self["x"] = _v
_v = arg.pop("x0", None)
_v = x0 if x0 is not None else _v
if _v is not None:
self["x0"] = _v
_v = arg.pop("xaxis", None)
_v = xaxis if xaxis is not None else _v
if _v is not None:
self["xaxis"] = _v
_v = arg.pop("xcalendar", None)
_v = xcalendar if xcalendar is not None else _v
if _v is not None:
self["xcalendar"] = _v
_v = arg.pop("xsrc", None)
_v = xsrc if xsrc is not None else _v
if _v is not None:
self["xsrc"] = _v
_v = arg.pop("y", None)
_v = y if y is not None else _v
if _v is not None:
self["y"] = _v
_v = arg.pop("y0", None)
_v = y0 if y0 is not None else _v
if _v is not None:
self["y0"] = _v
_v = arg.pop("yaxis", None)
_v = yaxis if yaxis is not None else _v
if _v is not None:
self["yaxis"] = _v
_v = arg.pop("ycalendar", None)
_v = ycalendar if ycalendar is not None else _v
if _v is not None:
self["ycalendar"] = _v
_v = arg.pop("ysrc", None)
_v = ysrc if ysrc is not None else _v
if _v is not None:
self["ysrc"] = _v
# Read-only literals
# ------------------
self._props["type"] = "scatter"
arg.pop("type", None)
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
|
plotly/python-api
|
packages/python/plotly/plotly/graph_objs/_scatter.py
|
Python
|
mit
| 107,803
|
from lasagne.layers import DenseLayer, DropoutLayer, batch_norm
from lasagne.regularization import regularize_layer_params_weighted, l1, l2
import utils
class Layer(object):
def __init__(self,
layers_info):
self.non_linearity = layers_info['non_linearity']
self.n_hidden = layers_info['n_hidden']
self.l1_reg = layers_info['l1_reg']
self.l2_reg = layers_info['l2_reg']
self.dropout_p = layers_info['dropout']
self.batch_norm = layers_info['batch_norm']
self.name = layers_info['name']
def name(self):
return self.name
def non_linearity(self):
return self.non_linearity
def n_hidden(self):
return self.n_hidden
def l1_reg(self):
return self.l1_reg
def l2_reg(self):
return self.l2_reg
def dropout_prob(self):
return self.dropout_p
def is_batch_norm(self):
return self.batch_norm
def build_layer(self, model, all_l1_regs, all_l2_regs):
model = DenseLayer(model,
num_units=self.n_hidden,
nonlinearity=utils.get_non_linearity(self.non_linearity))
if self.l1_reg != 0:
all_l1_regs += regularize_layer_params_weighted({model: self.l1_reg}, l1)
if self.l2_reg != 0:
all_l2_regs += regularize_layer_params_weighted({model: self.l2_reg}, l2)
if self.batch_norm == "Y":
model = batch_norm(model)
if self.dropout_p != 0:
model = DropoutLayer(model, p=self.dropout_p)
return model, all_l1_regs, all_l2_regs
def __str__(self):
return str(
'Layer %s: \n\tnonlinearity: %s\n\tl1 reg: %.3f\n\tl2 reg: %.3f\n\tdrop prob: '
'%.3f\n\tnb hidden: %d\n\tbatch_norm?: %s' % (self.name, self.non_linearity, self.l1_reg, self.l2_reg,
self.dropout_p, self.n_hidden, self.batch_norm))
|
louishenrifranc/spearmint-for-neural-network
|
script/Layer.py
|
Python
|
mit
| 1,980
|
from app import app, db
from flask import json, request, abort, render_template
from models import Sensor, Measure
from datetime import datetime
import time
@app.route('/', methods=['GET'])
def index():
return 'Hello, world.'
@app.route('/api/sensors', methods=['GET'])
@app.route('/api/sensors/<int:id>', methods=['GET'])
def getSensor(id=1):
sensor = Sensor.query.get(id)
measures = [{"y": m.val / 16.0, "x": int(time.mktime(m.update_on.timetuple()) * 1000)} for m in sensor.measures.order_by("update_on desc").limit(100)]
return json.dumps({"sensor": id, "measures": measures })
@app.route('/api/sensors/<int:id>', methods=['POST'])
def postSensor(id):
sensor = Sensor.query.get(id)
data = request.json
if sensor:
data['sensor_id'] = id
data['update_on'] = datetime.now()
m = Measure(**data)
db.session.add(m)
db.session.commit()
else:
abort(404)
return json.dumps({"result": "OK"})
@app.route('/chart')
def showChart():
return render_template('chart.html')
|
aguegu/busykoala-rpi
|
app/views.py
|
Python
|
mit
| 1,013
|
# -*- coding: utf-8 -*-
from setuptools.command import easy_install
import importlib
import pkg_resources
# If pip is not installed, install it
try:
import pip
except ImportError:
easy_install.main(['-U','pip'])
finally:
pkg_resources.require('pip')
import pip
# Install external packages
packages = ['musicbrainzngs', 'soundcloud', 'mutagen']
for package in packages:
try:
importlib.import_module(package)
except ImportError:
pip.main(['install', package])
|
yenbekbay/ion-downloader
|
IonDownloader/Python/imports.py
|
Python
|
mit
| 498
|
import re
import logging
import argparse
from weblib.logs import default_logging
from spiders.blogs import DataScienceBlogsSpider
from settings import db_connection
logger = logging.getLogger(__name__)
if __name__ == '__main__':
default_logging(grab_log='var/grab.log', level=logging.DEBUG, mode='w',
propagate_network_logger=False,
network_log='var/grab.network.log')
parser = argparse.ArgumentParser(description='command line interface')
parser.add_argument(
'-S', '--scrape-blogs', type=str,
help='scrape blogs from provided *.opml file')
parser.add_argument(
'--stats', action="store_true", default=False,
help='print some stats'
)
args = parser.parse_args()
if args.scrape_blogs:
spider = DataScienceBlogsSpider.get_instance(
data_science_blogs_list=args.scrape_blogs)
spider.run()
# logger.info(spider.render_stats())
elif args.stats:
db = db_connection()
print("Blogs in database: {}".format(
db['blogs'].count()
))
# calculate top 10 tags
print("Top 10 tags:")
pipeline = [
{"$match": {"content.tags": {"$not": {"$size": 0}}}},
{"$project": {"content.tags": 1}},
{"$unwind": "$content.tags"},
{"$group": {"_id": "$content.tags", "count": {"$sum": 1}}},
{"$sort": {"count": -1}},
{"$limit": 10}
]
results = db.command("aggregate", "blogs", pipeline=pipeline)
for res in results['result']:
print("\t" + res['_id'] + " - " + str(res['count']))
print
print("Authors with post in more than one blog:")
pipeline = [
{"$match": {"content.authors": {"$not": {"$size": 0}}}},
{"$project": {"content.authors": 1, "source_url": 1}},
{"$unwind": "$content.authors"},
{
"$group": {
"_id": "$content.authors",
"count": {"$sum": 1},
"blogs": {"$addToSet": "$source_url"}
}
},
{"$match": {"count": {"$gt": 1}}},
]
results = db.command("aggregate", "blogs", pipeline=pipeline)
for res in results['result']:
print(
"\t" + res['_id'] + " - " + str(res['count']) +
" - " + str(res['blogs']))
print
# blogs based on twitter bootstrap
# db.blogs.find({ "imports.css": {$in: [/bootstrap/]} }).count();
results = db['blogs'].find(
{
"imports.css": {
"$in": [re.compile(r'bootstrap')],
},
},
{
"source_url": True,
}
)
print(
"{} blogs based on twitter bootstrap css.".format(
results.count())
)
# print ", ".join(map(lambda x: x['source_url'], results))
results = db['blogs'].find(
{
"imports.css": {
"$in": [re.compile(r'wp-content')],
},
},
{
"source_url": True,
}
)
print(
"{} blogs on wordpress CMS.".format(
results.count())
)
results = db['blogs'].find(
{
"imports.scripts": {
"$in": [re.compile(r'octopress')],
},
},
{
"source_url": True,
}
)
print(
"{} blogs on Octopress.".format(
results.count())
)
else:
parser.print_help()
|
istinspring/grab-datascience-blogs
|
cli.py
|
Python
|
mit
| 3,773
|
# from materials.tests import *
# from analysis.tests import *
# from analysis.thermodynamics.tests import *
# from io.tests import *
|
wolverton-research-group/qmpy
|
qmpy/tests.py
|
Python
|
mit
| 134
|
from codegen import *
from secd import *
import sys
# read eval print loop
machine = SECD()
while True:
line = input('SECDrepl> ')
code = codegen(parse(line), [], ['stop'])
machine.runProgram(code, [])
|
yarbroughw/secdpy
|
repl.py
|
Python
|
mit
| 219
|
# coding: utf-8
"""Webmail tests."""
from __future__ import unicode_literals
import os
import shutil
import tempfile
try:
import mock
except ImportError:
from unittest import mock
from six import BytesIO
from django.core import mail
from django.urls import reverse
from modoboa.admin import factories as admin_factories
from modoboa.core import models as core_models
from modoboa.lib.tests import ModoTestCase
from . import data as tests_data
BODYSTRUCTURE_SAMPLE_WITH_FLAGS = [
(b'19 (UID 19 FLAGS (\\Seen) RFC822.SIZE 100000 BODYSTRUCTURE (("text" "plain" ("charset" "ISO-8859-1" "format" "flowed") NIL NIL "7bit" 2 1 NIL NIL NIL NIL)("message" "rfc822" ("name*" "ISO-8859-1\'\'%5B%49%4E%53%43%52%49%50%54%49%4F%4E%5D%20%52%E9%63%E9%70%74%69%6F%6E%20%64%65%20%76%6F%74%72%65%20%64%6F%73%73%69%65%72%20%64%27%69%6E%73%63%72%69%70%74%69%6F%6E%20%46%72%65%65%20%48%61%75%74%20%44%E9%62%69%74") NIL NIL "8bit" 3632 ("Wed, 13 Dec 2006 20:30:02 +0100" {70}', # noqa
b"[INSCRIPTION] R\xe9c\xe9ption de votre dossier d'inscription Free Haut D\xe9bit"), # noqa
(b' (("Free Haut Debit" NIL "inscription" "freetelecom.fr")) (("Free Haut Debit" NIL "inscription" "freetelecom.fr")) ((NIL NIL "hautdebit" "freetelecom.fr")) ((NIL NIL "nguyen.antoine" "wanadoo.fr")) NIL NIL NIL "<20061213193125.9DA0919AC@dgroup2-2.proxad.net>") ("text" "plain" ("charset" "iso-8859-1") NIL NIL "8bit" 1428 38 NIL ("inline" NIL) NIL NIL) 76 NIL ("inline" ("filename*" "ISO-8859-1\'\'%5B%49%4E%53%43%52%49%50%54%49%4F%4E%5D%20%52%E9%63%E9%70%74%69%6F%6E%20%64%65%20%76%6F%74%72%65%20%64%6F%73%73%69%65%72%20%64%27%69%6E%73%63%72%69%70%74%69%6F%6E%20%46%72%65%65%20%48%61%75%74%20%44%E9%62%69%74")) NIL NIL) "mixed" ("boundary" "------------040706080908000209030901") NIL NIL NIL) BODY[HEADER.FIELDS (DATE FROM TO CC SUBJECT)] {266}', # noqa
b'Date: Tue, 19 Dec 2006 19:50:13 +0100\r\nFrom: Antoine Nguyen <nguyen.antoine@wanadoo.fr>\r\nTo: Antoine Nguyen <tonio@koalabs.org>\r\nSubject: [Fwd: [INSCRIPTION] =?ISO-8859-1?Q?R=E9c=E9ption_de_votre_?=\r\n =?ISO-8859-1?Q?dossier_d=27inscription_Free_Haut_D=E9bit=5D?=\r\n\r\n'
),
b')'
]
def get_gif():
"""Return gif."""
gif = BytesIO(
b"GIF87a\x01\x00\x01\x00\x80\x01\x00\x00\x00\x00ccc,\x00"
b"\x00\x00\x00\x01\x00\x01\x00\x00\x02\x02D\x01\x00;")
gif.name = "image.gif"
return gif
class IMAP4Mock(object):
"""Fake IMAP4 client."""
def __init__(self, *args, **kwargs):
self.untagged_responses = {}
def _quote(self, data):
return data
def _simple_command(self, name, *args, **kwargs):
if name == "CAPABILITY":
self.untagged_responses["CAPABILITY"] = [b""]
elif name == "LIST":
self.untagged_responses["LIST"] = [b"() \".\" \"INBOX\""]
elif name == "NAMESPACE":
self.untagged_responses["NAMESPACE"] = [b'(("" "/")) NIL NIL']
return "OK", None
def append(self, *args, **kwargs):
pass
def create(self, name):
return "OK", None
def delete(self, name):
return "OK", None
def list(self):
return "OK", [b"() \".\" \"INBOX\""]
def rename(self, oldname, newname):
return "OK", None
def uid(self, command, *args):
if command == "SORT":
return "OK", [b"19"]
elif command == "FETCH":
uid = int(args[0])
data = BODYSTRUCTURE_SAMPLE_WITH_FLAGS
if uid == 46931:
if args[1] == "(BODYSTRUCTURE)":
data = tests_data.BODYSTRUCTURE_ONLY_4
elif "HEADER.FIELDS" in args[1]:
data = tests_data.BODYSTRUCTURE_SAMPLE_4
else:
data = tests_data.BODY_PLAIN_4
elif uid == 46932:
if args[1] == "(BODYSTRUCTURE)":
data = tests_data.BODYSTRUCTURE_ONLY_5
elif "HEADER.FIELDS" in args[1]:
data = tests_data.BODYSTRUCTURE_SAMPLE_9
else:
data = tests_data.BODYSTRUCTURE_SAMPLE_10
elif uid == 33:
if args[1] == "(BODYSTRUCTURE)":
data = tests_data.BODYSTRUCTURE_EMPTY_MAIL
else:
data = tests_data.EMPTY_BODY
elif uid == 133872:
data = tests_data.COMPLETE_MAIL
return "OK", data
elif command == "STORE":
return "OK", []
class WebmailTestCase(ModoTestCase):
"""Check webmail backend."""
@classmethod
def setUpTestData(cls): # noqa
"""Create some users."""
super(WebmailTestCase, cls).setUpTestData()
admin_factories.populate_database()
cls.user = core_models.User.objects.get(username="user@test.com")
def setUp(self):
"""Connect with a simpler user."""
patcher = mock.patch("imaplib.IMAP4")
self.mock_imap4 = patcher.start()
self.mock_imap4.return_value = IMAP4Mock()
self.addCleanup(patcher.stop)
self.set_global_parameter("imap_port", 1435)
self.workdir = tempfile.mkdtemp()
os.mkdir("{}/webmail".format(self.workdir))
self.set_global_parameter("update_scheme", False, app="core")
url = reverse("core:login")
data = {
"username": self.user.username, "password": "toto"
}
self.client.post(url, data)
def tearDown(self):
"""Cleanup."""
shutil.rmtree(self.workdir)
def test_listmailbox(self):
"""Check listmailbox action."""
url = reverse("modoboa_webmail:index")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.get(
"{}?action=listmailbox".format(url),
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 200)
self.assertIn(
"nguyen.antoine@wanadoo.fr", response.json()["listing"])
response = self.client.get(
"{}?action=listmailbox&pattern=Réception&criteria=Subject"
.format(url),
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 200)
self.assertIn(
"nguyen.antoine@wanadoo.fr", response.json()["listing"])
def test_attachments(self):
"""Check attachments."""
url = reverse("modoboa_webmail:index")
response = self.client.get("{}?action=compose".format(url))
self.assertEqual(response.status_code, 200)
self.assertIn("compose_mail", self.client.session)
url = reverse("modoboa_webmail:attachment_list")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.set_global_parameters({"max_attachment_size": "10"})
with self.settings(MEDIA_ROOT=self.workdir):
response = self.client.post(url, {"attachment": get_gif()})
self.assertContains(response, "Attachment is too big")
self.set_global_parameters({"max_attachment_size": "10K"})
with self.settings(MEDIA_ROOT=self.workdir):
response = self.client.post(url, {"attachment": get_gif()})
self.assertContains(response, "upload_success")
self.assertEqual(
len(self.client.session["compose_mail"]["attachments"]), 1)
name = self.client.session["compose_mail"]["attachments"][0]["tmpname"]
path = "{}/webmail/{}".format(self.workdir, name)
self.assertTrue(os.path.exists(path))
url = reverse("modoboa_webmail:attachment_delete")
with self.settings(MEDIA_ROOT=self.workdir):
self.ajax_get("{}?name={}".format(url, name))
self.assertFalse(os.path.exists(path))
def test_delattachment_errors(self):
"""Check error cases."""
url = reverse("modoboa_webmail:index")
response = self.client.get("{}?action=compose".format(url))
self.assertEqual(response.status_code, 200)
self.assertIn("compose_mail", self.client.session)
url = reverse("modoboa_webmail:attachment_delete")
with self.settings(MEDIA_ROOT=self.workdir):
response = self.ajax_get("{}?name=".format(url))
self.assertEqual(response["status"], "ko")
self.assertEqual(response["respmsg"], "Bad query")
with self.settings(MEDIA_ROOT=self.workdir):
response = self.ajax_get("{}?name=test".format(url))
self.assertEqual(response["status"], "ko")
self.assertEqual(response["respmsg"], "Unknown attachment")
def test_send_mail(self):
"""Check compose form."""
url = "{}?action=compose".format(reverse("modoboa_webmail:index"))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(
url, {
"from_": self.user.email, "to": "test@example.test",
"subject": "test", "body": "Test"
}
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].from_email, "user@test.com")
# Try to send an email using HTML format
self.user.first_name = "Antoine"
self.user.last_name = "Nguyen"
self.user.parameters.set_value("editor", "html")
self.user.save()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
mail.outbox = []
response = self.client.post(
url, {
"from_": self.user.email,
"to": "test@example.test", "subject": "test",
"body": "<p>Test</p>"
}
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].from_email, '"Antoine Nguyen" <user@test.com>')
def test_signature(self):
"""Check signature in different formats."""
signature = "Antoine Nguyen"
self.user.parameters.set_value("signature", signature)
self.user.save()
response = self.client.get(reverse("modoboa_webmail:index"))
self.assertEqual(response.status_code, 200)
url = "{}?action=compose".format(reverse("modoboa_webmail:index"))
response = self.ajax_get(url)
self.assertIn(signature, response["listing"])
def test_custom_js_in_preferences(self):
"""Check that custom js is included."""
url = reverse("core:user_index")
response = self.client.get(url)
self.assertContains(response, "function toggleSignatureEditor()")
def test_send_mail_errors(self):
"""Check error cases."""
url = "{}?action=compose".format(reverse("modoboa_webmail:index"))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.ajax_post(
url, {"to": "", "subject": "test", "body": "Test"}, 400
)
self.assertEqual(len(mail.outbox), 0)
def test_new_folder(self):
"""Test folder creation."""
url = reverse("modoboa_webmail:folder_add")
response = self.client.get(url)
self.assertContains(response, "Create a new folder")
response = self.ajax_post(url, {"name": "Test"})
self.assertIn("newmb", response)
def test_edit_folder(self):
"""Test folder edition."""
url = reverse("modoboa_webmail:folder_change")
response = self.client.get(url)
self.assertContains(response, "Invalid request")
url = "{}?name=Test".format(url)
response = self.client.get(url)
self.assertContains(response, "Edit folder")
session = self.client.session
session["webmail_navparams"] = {"inbox": "Test"}
session.save()
response = self.ajax_post(url, {"oldname": "Test", "name": "Toto"})
self.assertEqual(response["respmsg"], "Folder updated")
def test_delete_folder(self):
"""Test folder removal."""
url = reverse("modoboa_webmail:folder_delete")
self.ajax_get(url, status=400)
url = "{}?name=Test".format(url)
session = self.client.session
session["webmail_navparams"] = {"inbox": "Test"}
session.save()
self.ajax_get(url)
def test_reply_to_email(self):
"""Test reply form."""
url = "{}?action=reply&mbox=INBOX&mailid=46931".format(
reverse("modoboa_webmail:index"))
session = self.client.session
session["lastaction"] = "compose"
session.save()
response = self.ajax_get(url)
self.assertIn('id="id_origmsgid"', response["listing"])
response = self.client.post(
url, {
"from_": self.user.email, "to": "test@example.test",
"subject": "test", "body": "Test",
"origmsgid": "<id@localhost>"
}
)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].from_email, "user@test.com")
self.assertIn("References", mail.outbox[0].extra_headers)
def test_forward_email(self):
"""Test forward form."""
url = "{}?action=forward&mbox=INBOX&mailid=46932".format(
reverse("modoboa_webmail:index"))
session = self.client.session
session["lastaction"] = "compose"
session.save()
with self.settings(MEDIA_ROOT=self.workdir):
response = self.client.get(
url, HTTP_X_REQUESTED_WITH="XMLHttpRequest")
response = response.json()
self.assertIn('id="id_origmsgid"', response["listing"])
self.assertEqual(
len(self.client.session["compose_mail"]["attachments"]), 1)
response = self.client.post(
url, {
"from_": self.user.email, "to": "test@example.test",
"subject": "test", "body": "Test",
"origmsgid": "<id@localhost>"
}
)
self.assertEqual(len(mail.outbox), 1)
def test_getmailcontent_empty_mail(self):
"""Try to display an empty email."""
url = "{}?action=reply&mbox=INBOX&mailid=33".format(
reverse("modoboa_webmail:mailcontent_get"))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_getmailsource(self):
"""Try to display a message's source."""
url = "{}?mbox=INBOX&mailid=133872".format(
reverse("modoboa_webmail:mailsource_get"))
response = self.client.get(url)
self.assertContains(response, "Message-ID")
|
modoboa/modoboa-webmail
|
modoboa_webmail/tests/test_views.py
|
Python
|
mit
| 14,655
|
import sublime
import os
import sys
import re
from .test_package import UnitTestingCommand
try:
import coverage
except Exception:
print("coverage not found.")
class UnitTestingCoverageCommand(UnitTestingCommand):
def unit_testing(self, stream, package, settings):
package_path = os.path.join(sublime.packages_path(), package)
data_file = os.path.join(
sublime.packages_path(), "User", "UnitTesting", package, "coverage")
data_file_dir = os.path.dirname(data_file)
if not os.path.isdir(data_file_dir):
os.makedirs(data_file_dir)
if os.path.exists(data_file):
os.unlink(data_file)
config_file = os.path.join(package_path, ".coveragerc")
include = "{}/*".format(package_path)
omit = "{}/{}/*".format(package_path, settings["tests_dir"])
if os.path.exists(config_file):
with open(config_file, "r") as f:
txt = f.read()
if re.search("^include", txt, re.M):
include = None
if re.search("^omit", txt, re.M):
omit = None
else:
config_file = None
cov = coverage.Coverage(
data_file=data_file, config_file=config_file, include=include, omit=omit)
if not settings['start_coverage_after_reload']:
cov.start()
if settings["reload_package_on_testing"]:
self.reload_package(package, dummy=False, show_reload_progress=False)
if settings['start_coverage_after_reload']:
cov.start()
def cleanup():
stream.write("\n")
cov.stop()
coverage.files.RELATIVE_DIR = os.path.normcase(package_path + os.sep)
ignore_errors = cov.get_option("report:ignore_errors")
show_missing = cov.get_option("report:show_missing")
cov.report(file=stream, ignore_errors=ignore_errors, show_missing=show_missing)
if settings['generate_html_report']:
html_output_dir = os.path.join(package_path, 'htmlcov')
cov.html_report(directory=html_output_dir, ignore_errors=ignore_errors)
cov.save()
super().unit_testing(stream, package, settings, [cleanup])
def is_enabled(self):
return "coverage" in sys.modules
|
randy3k/UnitTesting
|
unittesting/test_coverage.py
|
Python
|
mit
| 2,342
|
# -*- coding: utf-8 -*-
from elasticsearch_dsl import DocType, Keyword, Float, Nested, Date, Long, Short, Boolean
from elasticsearch_dsl import MetaField
class Account(DocType):
# 机器人名字
botName = Keyword()
# 可用现金
cash = Float()
# 具体仓位
positions = Nested()
# 市值
value = Float()
# 市值+cash
allValue = Float()
# 时间
timestamp = Date()
# 收盘计算
closing = Boolean()
class Meta:
index = 'account'
doc_type = 'doc'
all = MetaField(enabled=False)
dynamic = MetaField('strict')
class Position(DocType):
# 证券id
securityId = Keyword()
# 做多数量
longAmount = Long()
# 可平多数量
availableLong = Long()
# 平均做多价格
averageLongPrice = Long()
# 做空数量
shortAmount = Long()
# 可平空数量
availableShort = Long()
# 平均做空价格
averageShortPrice = Long()
# 市值 或者 占用的保证金(方便起见,总是100%)
value = Float()
# 交易类型(0代表T+0,1代表T+1)
tradingT = Short()
def __init__(self, meta=None, security_id=None, trading_t=1, **kwargs):
super().__init__(meta, **kwargs)
self.securityId = security_id
self.longAmount = 0
self.availableLong = 0
self.shortAmount = 0
self.availableShort = 0
self.profit = 0
self.value = 0
self.tradingT = trading_t
class Order(DocType):
# 订单id
id = Keyword()
# 交易员id
botName = Keyword()
# 证券id
securityId = Keyword()
# 买卖(多空)
direction = Short()
# 市价/限价
type = Keyword()
# 价格
price = Float()
# 数量
amount = Long()
# 状态
status = Keyword()
# 时间
timestamp = Date()
class Meta:
doc_type = 'doc'
all = MetaField(enabled=False)
dynamic = MetaField('strict')
|
foolcage/fooltrader
|
fooltrader/domain/business/es_account.py
|
Python
|
mit
| 1,971
|