repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
wemanuel/smry
|
smry/server-auth/ls/google-cloud-sdk/lib/googlecloudsdk/sql/tools/instances/set_root_password.py
|
Python
|
apache-2.0
| 4,026
| 0.002981
|
# Copyright 2013 Google Inc. All Rights Reserved.
"""Sets the password of the MySQL root user."""
from googlecloudsdk.calliope import base
from googlecloudsdk.core import log
from googlecloudsdk.core import remote_completion
from googlecloudsdk.sql import util
@base.ReleaseTracks(base.ReleaseTrack.GA)
class SetRootPassword(base.Command):
"""Sets the password of the MySQL root user."""
@staticmethod
def Args(parser):
"""Args is called by calliope to gather arguments for this command.
Args:
parser: An argparse parser that you can use to add arguments that go
on the command line after this command. Positional arguments are
allowed.
"""
instance = parser.add_argument(
'instance',
help='Cloud SQL instance ID.')
cli = SetRootPassword.GetCLIGenerator()
instance.completer = (remote_completion.RemoteCompletion.
GetCompleterForResource('sql.instances', cli))
password_group = parser.add_mutually_exclusive_group(required=True)
password_group.add_argument(
'--password',
'-p',
help='The password for the root user. WARNING: Setting password using '
'this option can potentially expose the password to other users '
'of this machine. Instead, you can use --password-file to get the'
' password from a file.')
password_group.add_argument(
'--password-file',
help='The path to the filename which has the password to be set. The '
'first line of the file will be interpreted as the password to be set.')
parser.add_argument(
'--async',
action='store_true',
help='Do not wait for the operation to complete.')
@util.ReraiseHttpException
def Run(self, args):
"""Sets the password of the MySQL root user.
Args:
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
A dict object representing the operations resource describing the
setRootPassword operation if the setRootPassword was successful.
Raises:
HttpException: A http error response was received while executing api
request.
ToolException: An error other than http error occured while executing the
command.
"""
sql_client = self.context['sql_client']
sql_messages = self.context['sql_messages']
resources = self.context['registry']
util.ValidateInstanceName(args.instance)
instance_ref = resources.Parse(args.instance, collection='sql.instances')
if args.password_file:
with open(args.password_file) as f:
password = f
|
.readline()
else:
password = args.password
result = sql_client.instances.SetRootPassword(
sql_messages.SqlInstancesSetRootPasswordRequest(
project=instance_ref.project,
instance=instance_ref.instance,
instanceSetRootPasswordRequest=(
sql_messages.InstanceSetRootPasswordRequest(
setRootPasswordContext=(
sql_messages.SetRootPasswordContext(
|
password=password))))))
operation_ref = resources.Create(
'sql.operations',
operation=result.operation,
project=instance_ref.project,
instance=instance_ref.instance,
)
if args.async:
return sql_client.operations.Get(operation_ref.Request())
util.WaitForOperation(
sql_client, operation_ref, 'Setting Cloud SQL instance password')
log.status.write('Set password for [{instance}].\n'.format(
instance=instance_ref))
return None
# pylint: disable=unused-argument
def Display(self, args, result):
"""Display prints information about what just happened to stdout.
Args:
args: The same as the args in Run.
result: A dict object representing the operations resource describing the
set-root-password operation if the set-root-password was successful.
"""
self.format(result)
|
googleapis/python-aiplatform
|
google/cloud/aiplatform/datasets/column_names_dataset.py
|
Python
|
apache-2.0
| 8,935
| 0.001231
|
# -*- coding: utf-8 -*-
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import csv
import logging
from typing import List, Optional, Set
from google.auth import credentials as auth_credentials
from google.cloud import bigquery
from google.cloud import storage
from google.cloud.aiplatform import utils
from google.cloud.aiplatform import datasets
class _ColumnNamesDataset(datasets._Dataset):
@property
def column_names(self) -> List[str]:
"""Retrieve the columns for the dataset by extracting it from the Google Cloud Storage or
Google BigQuery source.
Returns:
List[str]
A list of columns names
Raises:
RuntimeError: When no valid source is found.
"""
self._assert_gca_resource_is_available()
metadata = self._gca_resource.metadata
if metadata is None:
raise RuntimeError("No metadata found for dataset")
input_config = metadata.get("inputConfig")
if input_config is None:
raise RuntimeError("No inputConfig found for dataset")
gcs_source = input_config.get("gcsSource")
bq_source = input_config.get("bigquerySource")
if gcs_source:
gcs_source_uris = gcs_source.get("uri")
if gcs_source_uris and len(gcs_source_uris) > 0:
# Lexicographically sort the files
gcs_source_uris.sort()
# Get the first file in sorted list
# TODO(b/193044977): Return as Set instead of List
return list(
self._retrieve_gcs_source_columns(
project=self.project,
gcs_csv_file_path=gcs_source_uris[0],
credentials=self.credentials,
)
)
elif bq_source:
bq_table_uri = bq_source.get("uri")
if bq_table_uri:
# TODO(b/193044977): Return as Set instead of List
return list(
self._retrieve_bq_source_columns(
project=self.project,
bq_table_uri=bq_table_uri,
credentials=self.credentials,
)
)
raise RuntimeError("No valid CSV or BigQuery datasource found.")
@staticmethod
def _retrieve_gcs_source_columns(
project: str,
gcs_csv_file_path: str,
credentials: Optional[auth_credentials.Credentials] = None,
) -> Set[str]:
"""Retrieve the columns from a comma-delimited CSV file stored on Google Cloud Storage
Example Usage:
column_names = _retrieve_gcs_source_columns(
"project_id",
"gs://example-bucket/path/to/csv_file"
)
# column_names = {"column_1", "column_2"}
Args:
project (str):
Required. Project to initiate the Google Cloud Storage client with.
gcs_csv_file_path (str):
Required. A full path to a CSV files stored on Google Cloud Storage.
Must include "gs://" prefix.
credentials (auth_credentials.Credentials):
Credentials to use to with GCS Client.
Returns:
Set[str]
A set of columns names in the CSV file.
Raises:
RuntimeError: When the retrieved CSV file is invalid.
"""
gcs_bucket, gcs_blob = utils.extract_bucket_and_prefix_from_gcs_path(
gcs_csv_file_path
)
client = storage.Client(project=project, credentials=credentials)
bucket = client.bucket(gcs_bucket)
blob = bucket.blob(gcs_blob)
# Incrementally download the CSV file until the header is retrieved
first_new_line_index = -1
start_index = 0
increment = 1000
line = ""
try:
logger = logging.getLogger("google.resumable_media._helpers")
logging_warning_filter = utils.LoggingFilter(logging.INFO)
logger.addFilter(logging_warning_filter)
while first_new_line_index == -1:
line += blob.download_as_bytes(
start=start_index, end=start_index + increment - 1
).decode("utf-8")
first_new_line_index = line.find("\n")
start_index += increment
header_line = line[:first_new_line_index]
# Split to make it an iterable
header_line = header_line.split("\n")[:1]
csv_reader = csv.reader(header_line, delimiter=",")
except (ValueError, RuntimeError) as err:
raise RuntimeError(
"There was a problem extracting the headers from the CSV file at '{}': {}".format(
gcs_csv_file_path, err
)
)
finally:
logger.removeFilter(logging_warning_filter)
return set(next(csv_reader))
@staticmethod
def _get_bq_schema_field_names_recursively(
schema_field: bigquery.SchemaField,
) -> Set[str]:
"""Retrieve the name for a schema field along with ancestor fields.
Nested schema fields are flattened and concatenated with a ".".
Schema fields with child fields are not included, but the children are.
Args:
project (str):
Required. Project to initiate the BigQuery client with.
bq_table_uri (str):
Required. A URI to a BigQuery table.
Can include "bq://" prefix but not required.
credentials (auth_credentials.Credentials):
Credentials to use with BQ Client.
Returns:
Set[str]
A set of columns names in the BigQuery table.
"""
ancestor_names = {
nested_field_name
for field in schema_field.fields
for nested_field_name in _ColumnNamesDataset._get_bq_schema_field_names_recursively(
field
)
}
# Only return "leaf nodes", basically any field that doesn't have children
if len(ancestor_names) == 0:
return {schema_field.name}
else:
return {f"{schema_field.name}.{name}" for name in ancestor_names}
@staticmethod
def _retrieve_bq_source_columns(
project: str,
bq_table_uri: str,
credentials: Optional[auth_credentials.Credentials] = None,
) -> Set[str]:
"""Retrieve the column names from a table on Google BigQuery
Nested schema fields are flattened and concatenated with a ".".
Schema fields with child fields are not included, but the children are.
Example Usage:
column_names = _retrieve_bq_source_columns(
"project_id",
"bq://project_id.dataset.table"
)
# column_names = {"column_1", "column_2", "column_3.nested_field"}
Args:
project (str):
Requir
|
ed. Project to initiate the BigQuery client with.
bq_table_uri (str)
|
:
Required. A URI to a BigQuery table.
Can include "bq://" prefix but not required.
credentials (auth_credentials.Credentials):
Credentials to use with BQ Client.
Returns:
Set[str]
A set of column names in the BigQuery table.
"""
# Remove bq:// prefix
prefix = "bq://"
if bq_table_uri.startswith(prefix):
bq_table_uri = bq_table_uri[len(prefix)
|
antoinecarme/sklearn2sql_heroku
|
tests/classification/iris/ws_iris_AdaBoostClassifier_mysql_code_gen.py
|
Python
|
bsd-3-clause
| 137
| 0.014599
|
from sklearn2sql_hero
|
ku.tests.classification import generic as class_gen
class_gen.test_model("AdaBoostClassifier" , "iris" , "mysql"
|
)
|
aapris/IoT-Web-Experiments
|
iotendpoints/endpoints/migrations/0003_datalogger.py
|
Python
|
mit
| 1,762
| 0.003973
|
# Generated by Django 2.0.5 on 2018-05-07 09:24
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import endpoints.models
|
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(se
|
ttings.AUTH_USER_MODEL),
('endpoints', '0002_request_user'),
]
operations = [
migrations.CreateModel(
name='Datalogger',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('uid', models.CharField(db_index=True, default=endpoints.models.get_uid, editable=False, max_length=40, unique=True)),
('devid', models.CharField(max_length=64, unique=True, verbose_name='Unique device id')),
('name', models.CharField(blank=True, max_length=50)),
('description', models.CharField(blank=True, max_length=200)),
('lon', models.FloatField(blank=True, null=True)),
('lat', models.FloatField(blank=True, null=True)),
('location', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326)),
('activity_at', models.DateTimeField(null=True)),
('created_at', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('updated_at', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL, verbose_name='Owner')),
],
),
]
|
tomduck/pandoc-xnos
|
pandocxnos/__init__.py
|
Python
|
gpl-3.0
| 121
| 0
|
"""
|
Package initialization."""
from .core import *
from .main import main
from .pandocattributes import PandocAt
|
tributes
|
dstufft/sessions
|
sessions/__init__.py
|
Python
|
apache-2.0
| 913
| 0.001095
|
# Copyright 2014 Donald Stufft
#
# Licensed under the Apache
|
License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either exp
|
ress or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from sessions.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
]
|
nkmk/python-snippets
|
notebook/numpy_floor_trunc_ceil.py
|
Python
|
mit
| 681
| 0
|
import numpy as np
print(np.__version__)
# 1.19.4
a = np.array([[10.0, 10.1, 10.9], [-10.0, -10.1, -10.9]])
print(a)
# [[ 10. 10.1 10.9]
# [-10. -10.1 -10.9]]
|
print(np.floor(a))
# [[ 10. 10. 10.]
# [-10. -11. -11.]]
print(np.floor(a).dtype)
# float64
print(np.floor(a).astyp
|
e(int))
# [[ 10 10 10]
# [-10 -11 -11]]
print(np.floor(10.1))
# 10.0
print(np.trunc(a))
# [[ 10. 10. 10.]
# [-10. -10. -10.]]
print(np.fix(a))
# [[ 10. 10. 10.]
# [-10. -10. -10.]]
print(a.astype(int))
# [[ 10 10 10]
# [-10 -10 -10]]
print(np.ceil(a))
# [[ 10. 11. 11.]
# [-10. -10. -10.]]
print(np.copysign(np.ceil(np.abs(a)), a))
# [[ 10. 11. 11.]
# [-10. -11. -11.]]
|
OpenMined/PySyft
|
packages/syft/src/syft/core/node/common/node_table/dataset.py
|
Python
|
apache-2.0
| 435
| 0
|
# third party
from sqlalchemy import Column
from sqlalchemy import JSON
from sqlalchemy import String
# relative
from . import Base
class Dataset(Base):
__tablename__ = "dataset"
id = Column(String(256), pr
|
imary_key=True)
name = Column(String(256))
manifest = Column(String(2048))
description = Column(String(2048))
tags = Column(JSON())
str_metadata = Column(JSON())
|
blob_metadata = Column(JSON())
|
airbnb/airflow
|
dev/send_email.py
|
Python
|
apache-2.0
| 10,284
| 0.001654
|
#!/usr/bin/python3
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# This tool is based on the Superset send_email script:
# https://github.com/apache/incubator-superset/blob/master/RELEASING/send_email.py
import os
import smtplib
import ssl
import sys
from typing import List, Union
try:
import jinja2
except ModuleNotFoundError:
sys.exit("Jinja2 is a required dependency for this script")
try:
import click
except ModuleNotFoundError:
sys.exit("Click is a required dependency for this script")
SMTP_PORT = 587
SMTP_SERVER = "mail-relay.apache.org"
MAILING_LIST = {"dev": "dev@airflow.apache.org", "users": "users@airflow.apache.org"}
def string_comma_to_list(message: str) -> List[str]:
"""
Split string to list
"""
return message.split(",") if message else []
def send_email(
smtp_server: str,
smpt_port: int,
username: str,
password: str,
sender_email: str,
receiver_email: Union[str, List],
message: str,
):
"""
Send a simple text email (SMTP)
"""
context = ssl.create_default_context()
with smtplib.SMTP(smtp_server, smpt_port) as server:
server.starttls(context=context)
server.login(username, password)
server.sendmail(sender_email, receiver_email, message)
def render_template(template_file: str, **kwargs) -> str:
"""
Simple render template based on named parameters
:param template_file: The template file location
:kwargs: Named parameters to use when rendering the template
:return: Rendered template
"""
dir_path = os.path.dirname(os.path.realpath(__file__))
template = jinja2.Template(open(os.path.join(dir_path, template_file)).read())
return template.render(kwargs)
def show_message(entity: str, message: str):
"""
Show message on the Command Line
"""
width, _ = click.get_terminal_size()
click.secho("-" * width, fg="blue")
click.secho(f"{entity} Message:", fg="bright_red", bold=True)
click.secho("-" * width, fg="blue")
click.echo(message)
click.secho("-" * width, fg="blue")
def inter_send_email(
username: str, password: str, sender_email: str, receiver_email: Union[str, List], message: str
):
"""
Send email using SMTP
"""
show_message("SMTP", message)
click.confirm("Is the Email message ok?", abort=True)
try:
send_email(
SMTP_SERVER,
SMTP_PORT,
username,
password,
sender_email,
receiver_email,
message,
)
click.secho("✅ Email sent successfully", fg="green")
except smtplib.SMTPAuthenticationError:
sys.exit("SMTP User authentication error, Email not sent!")
except Exception as e: # pylint: disable=broad-except
sys.exit(f"SMTP exception {e}")
class BaseParameters:
"""
Base Class to send emails using Apache Creds and for Jinja templating
"""
def __init__(self, name=None, email=None, username=None, password=None, version=None, version_rc=None):
self.name = name
self.email = email
self.username = username
self.password = password
self.version = version
self.version_rc = version_rc
self.template_arguments = {}
def __repr__(self):
return f"Apache Credentials: {self.email}/{self.username}/{self.version}/{self.version_rc}"
@click.group(context_settings=dict(help_option_names=["-h", "--help"]))
@click.pass_context
@click.option(
"-e",
"--apache_email",
prompt="Apache Email",
envvar="APACHE_EMAIL",
show_envvar=True,
help="Your Apache email will be used for SMTP From",
required=True,
)
@click.option(
"-u",
"--apache_username",
prompt="Apache Username",
envvar="APACHE_USERNAME",
show_envvar=Tr
|
ue,
help="Your LDAP Apache username",
required=True,
)
@click.password_option( # type: ignore
"-p",
"--apache_password",
prompt="Apache Password",
envvar="APACHE_PASSWORD",
show_envvar=True,
help="Your LDAP Apache password",
|
required=True,
)
@click.option(
"-v",
"--version",
prompt="Version",
envvar="AIRFLOW_VERSION",
show_envvar=True,
help="Release Version",
required=True,
)
@click.option(
"-rc",
"--version_rc",
prompt="Version (with RC)",
envvar="AIRFLOW_VERSION_RC",
show_envvar=True,
help="Release Candidate Version",
required=True,
)
@click.option( # type: ignore
"-n",
"--name",
prompt="Your Name",
default=lambda: os.environ.get('USER', ''),
show_default="Current User",
help="Name of the Release Manager",
type=click.STRING,
required=True,
)
def cli(
ctx,
apache_email: str,
apache_username: str,
apache_password: str,
version: str,
version_rc: str,
name: str,
):
"""
🚀 CLI to send emails for the following:
\b
* Voting thread for the rc
* Result of the voting for the rc
* Announcing that the new version has been released
"""
base_parameters = BaseParameters(
name, apache_email, apache_username, apache_password, version, version_rc
)
base_parameters.template_arguments["version"] = base_parameters.version
base_parameters.template_arguments["version_rc"] = base_parameters.version_rc
base_parameters.template_arguments["sender_email"] = base_parameters.email
base_parameters.template_arguments["release_manager"] = base_parameters.name
ctx.obj = base_parameters
@cli.command("vote")
@click.option(
"--receiver_email",
default=MAILING_LIST.get("dev"),
type=click.STRING,
prompt="The receiver email (To:)",
)
@click.pass_obj
def vote(base_parameters, receiver_email: str):
"""
Send email calling for Votes on RC
"""
template_file = "templates/vote_email.j2"
base_parameters.template_arguments["receiver_email"] = receiver_email
message = render_template(template_file, **base_parameters.template_arguments)
inter_send_email(
base_parameters.username,
base_parameters.password,
base_parameters.template_arguments["sender_email"],
base_parameters.template_arguments["receiver_email"],
message,
)
if click.confirm("Show Slack message for announcement?", default=True):
base_parameters.template_arguments["slack_rc"] = False
slack_msg = render_template("templates/slack.j2", **base_parameters.template_arguments)
show_message("Slack", slack_msg)
@cli.command("result")
@click.option(
"-re",
"--receiver_email",
default=MAILING_LIST.get("dev"),
type=click.STRING,
prompt="The receiver email (To:)",
)
@click.option(
"--vote_bindings",
default="",
type=click.STRING,
prompt="A List of people with +1 binding vote (ex: Max,Grace,Krist)",
)
@click.option(
"--vote_nonbindings",
default="",
type=click.STRING,
prompt="A List of people with +1 non binding vote (ex: Ville)",
)
@click.option(
"--vote_negatives",
default="",
type=click.STRING,
prompt="A List of people with -1 vote (ex: John)",
)
@click.pass_obj
def result(
base_parameters,
receiver_email: str,
vote_bindings: str,
vote_nonbindings: str,
vote_negatives: str,
):
"""
Send email with results of voting on RC
"""
template_file = "templates/result_email.j2"
base_parameters.template_arguments["receiver_ema
|
Cynary/distro6.01
|
arch/6.01Soft/lib601-F13-4/soar/worlds/bigFrustrationWorld.py
|
Python
|
mit
| 133
| 0.120301
|
dimensions(8,8)
wall((2,0),(2,4))
wal
|
l((2,4),(4,4))
wall((2,6),(6,6))
wall((6,6),(6,0))
wall((6,2),(4,2))
initi
|
alRobotLoc(1.0, 1.0)
|
AnanseGroup/map-of-innovation
|
mapofinnovation/tests/functional/test_adminfunc.py
|
Python
|
mit
| 215
| 0.004651
|
from mapofinnovation.tests import *
class TestAdminfuncC
|
ontroller(TestController):
def test_index(self):
response = self.app.get(url(controller='adminfunc', action='i
|
ndex'))
# Test response...
|
FrozenPigs/Taigabot
|
plugins/amazon.py
|
Python
|
gpl-3.0
| 2,394
| 0.000835
|
# amazon plugin by ine (2020)
from util import hook
from utilities import request
from bs4 import BeautifulSoup
import re
def parse(html):
soup = BeautifulSoup(html, 'lxml')
container = soup.find(attrs={'data-component-type': 's-search-results'})
if container is None:
return []
results = container.find_all(attrs={'data-component-
|
type': 's-search-result'})
if len(results) == 0:
return []
links = []
for result in results:
ti
|
tle = result.find('h2')
price = result.find('span', attrs={'class': 'a-offscreen'})
if title is None or price is None:
continue
id = result['data-asin']
title = title.text.strip()
price = price.text.strip()
url = 'https://www.amazon.com/dp/' + id + '/'
# avoids spam if they change urls in the future
if len(id) > 20:
continue
links.append((title, price, url))
return links
def parse_product(html):
soup = BeautifulSoup(html, 'lxml')
title = soup.find(id='productTitle')
price = soup.find(id='priceblock_ourprice')
if title is None:
title = soup.find('title')
if title is None:
title = 'Untitled'
title = title.text.replace('Amazon.com: ', '')
else:
title = title.text.strip()
if price is None:
price = 'various prices'
else:
price = price.text.strip()
return title, price
@hook.command
def amazon(inp):
"""amazon [query] -- Searches amazon for query"""
if not inp:
return "usage: amazon <search>"
inp = request.urlencode(inp)
html = request.get('https://www.amazon.com/s?k=' + inp)
results = parse(html)
if len(results) == 0:
return 'No results found'
title, price, url = results[0]
if len(title) > 80:
title = title[:80] + '...'
# \x03 = color, 03 = green
return u'[Amazon] {} \x0303{}\x03 {}'.format(title, price, url)
AMAZON_RE = (r"https?:\/\/(www\.)?amazon.com\/[^\s]*dp\/([A-Za-z0-9]+)[^\s]*", re.I)
@hook.regex(*AMAZON_RE)
def amazon_url(match):
id = match.group(2).strip()
url = 'https://www.amazon.com/dp/' + id + '/'
html = request.get(url)
title, price = parse_product(html)
if len(title) > 80:
title = title[:80] + '...'
return u'[Amazon] {} \x0303{}\x03 {}'.format(title, price, url)
|
MjAbuz/watchdog
|
vendor/rdflib-2.4.0/test/rdfdiff.py
|
Python
|
agpl-3.0
| 2,002
| 0.025475
|
#!/usr/bin/env python
"""
RDF Graph Isomorphism Tester
Author: Sean B. Palmer, inamidst.com
Uses the pyrple algorithm
Requirements:
Python2.4+
http://inamidst.com/proj/rdf/ntriples.py
Usage: ./rdfdiff.py <ntriplesP> <ntriplesQ>
"""
import sys, re, urllib
import ntriples
from ntriples import bNode
ntriples.r_uriref = re.compile(r'<([^\s"<>]+)>')
class Graph(object):
def __init__(self, uri=None, content=None):
self.triples = set()
if uri:
self.parse(uri)
elif content:
self.parse_string(content)
def parse(self, uri):
class Sink(object):
|
def triple(sink, s, p, o):
self.triples.add((s, p, o))
p = ntriples.NTriplesParser(sink=Sink())
u = urllib.urlopen(uri)
p.parse(u)
u.close()
def parse_string(self, content):
class Sink(object):
def triple(sink, s, p, o):
self.triples.add((s, p, o))
p = ntriples.NTriplesParser(sink=Si
|
nk())
p.parsestring(content)
def __hash__(self):
return hash(tuple(sorted(self.hashtriples())))
def hashtriples(self):
for triple in self.triples:
g = ((isinstance(t, bNode) and self.vhash(t)) or t for t in triple)
yield hash(tuple(g))
def vhash(self, term, done=False):
return tuple(sorted(self.vhashtriples(term, done)))
def vhashtriples(self, term, done):
for t in self.triples:
if term in t: yield tuple(self.vhashtriple(t, term, done))
def vhashtriple(self, triple, term, done):
for p in xrange(3):
if not isinstance(triple[p], bNode): yield triple[p]
elif done or (triple[p] == term): yield p
else: yield self.vhash(triple[p], done=True)
def compare(p, q):
return hash(Graph(p)) == hash(Graph(q))
def compare_from_string(p, q):
return hash(Graph(content=p)) == hash(Graph(content=q))
def main():
result = compare(sys.argv[1], sys.argv[2])
print ('no', 'yes')[result]
if __name__=="__main__":
main()
|
Zimmi48/coq
|
doc/tools/coqrst/notations/TacticNotationsLexer.py
|
Python
|
lgpl-2.1
| 3,961
| 0.004292
|
# Generated from TacticNotations.g by ANTLR 4.7.2
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\f")
buf.write("f\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\3\2\3\2\3\2\3\3\3\3")
buf.write("\3\3\3\3\3\3\3\3\5\3!\n\3\3\4\3\4\3\5\3\5\3\6\3\6\3\6")
buf.write("\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3")
buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6")
buf.write("\3\6\5\6F\n\6\3\7\3\7\3\b\3\b\6\bL\n\b\r\b\16\bM\5\bP")
buf.write("\n\b\3\t\3\t\5\tT\n\t\3\t\6\tW\n\t\r\t\16\tX\3\n\3\n\3")
buf.write("\n\6\n^\n\n\r\n\16\n_\3\13\6\13c\n\13\r\13\16\13d\2\2")
buf.write("\f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\3\2\5")
buf.write("\4\2BBaa\6\2\"\"BBaa}\177\5\2\62;C\\c|\2v\2\3\3\2\2\2")
buf.write("\2\5\3\2\2
|
\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r")
buf.write("\3\2\2\2\2\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3")
buf.write("\2\2\2\3\27\3\2\2\2\5 \3\2\2\2\7\"\3\2\2\2\t$\3\2\2\2")
buf.write("\13E\3\2\2\2\rG\3\2\2\2\17O\3\2\2\2\21Q\3\2\2\2\23Z\3")
buf.write("\2\2\2\25b\3\2\2\2\27\30\7}\2\2\30\31\7~\2\2\31\4\3\2")
buf.write("\2\2\32\33\7}\2\2\33!\7-\2\2\34\35\7}\
|
2\2\35!\7,\2\2\36")
buf.write("\37\7}\2\2\37!\7A\2\2 \32\3\2\2\2 \34\3\2\2\2 \36\3\2")
buf.write("\2\2!\6\3\2\2\2\"#\7}\2\2#\b\3\2\2\2$%\7\177\2\2%\n\3")
buf.write("\2\2\2&\'\7\'\2\2\'F\7}\2\2()\7\'\2\2)F\7\177\2\2*+\7")
buf.write("\'\2\2+F\7~\2\2,-\7b\2\2-.\7\'\2\2.F\7}\2\2/\60\7B\2\2")
buf.write("\60\61\7\'\2\2\61F\7}\2\2\62\63\7\'\2\2\63\64\7~\2\2\64")
buf.write("F\7/\2\2\65\66\7\'\2\2\66\67\7~\2\2\678\7/\2\28F\7@\2")
buf.write("\29:\7\'\2\2:;\7~\2\2;F\7~\2\2<=\7\'\2\2=>\7~\2\2>?\7")
buf.write("~\2\2?F\7~\2\2@A\7\'\2\2AB\7~\2\2BC\7~\2\2CD\7~\2\2DF")
buf.write("\7~\2\2E&\3\2\2\2E(\3\2\2\2E*\3\2\2\2E,\3\2\2\2E/\3\2")
buf.write("\2\2E\62\3\2\2\2E\65\3\2\2\2E9\3\2\2\2E<\3\2\2\2E@\3\2")
buf.write("\2\2F\f\3\2\2\2GH\7~\2\2H\16\3\2\2\2IP\t\2\2\2JL\n\3\2")
buf.write("\2KJ\3\2\2\2LM\3\2\2\2MK\3\2\2\2MN\3\2\2\2NP\3\2\2\2O")
buf.write("I\3\2\2\2OK\3\2\2\2P\20\3\2\2\2QV\7B\2\2RT\7a\2\2SR\3")
buf.write("\2\2\2ST\3\2\2\2TU\3\2\2\2UW\t\4\2\2VS\3\2\2\2WX\3\2\2")
buf.write("\2XV\3\2\2\2XY\3\2\2\2Y\22\3\2\2\2Z[\7a\2\2[]\7a\2\2\\")
buf.write("^\t\4\2\2]\\\3\2\2\2^_\3\2\2\2_]\3\2\2\2_`\3\2\2\2`\24")
buf.write("\3\2\2\2ac\7\"\2\2ba\3\2\2\2cd\3\2\2\2db\3\2\2\2de\3\2")
buf.write("\2\2e\26\3\2\2\2\13\2 EMOSX_d\2")
return buf.getvalue()
class TacticNotationsLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
LALT = 1
LGROUP = 2
LBRACE = 3
RBRACE = 4
ESCAPED = 5
PIPE = 6
ATOM = 7
ID = 8
SUB = 9
WHITESPACE = 10
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'{|'", "'{'", "'}'", "'|'" ]
symbolicNames = [ "<INVALID>",
"LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE", "ATOM",
"ID", "SUB", "WHITESPACE" ]
ruleNames = [ "LALT", "LGROUP", "LBRACE", "RBRACE", "ESCAPED", "PIPE",
"ATOM", "ID", "SUB", "WHITESPACE" ]
grammarFileName = "TacticNotations.g"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
|
igraph/xdata-igraph
|
interfaces/python/igraph/datatypes.py
|
Python
|
gpl-2.0
| 28,500
| 0.001334
|
# vim:ts=4:sw=4:sts=4:et
# -*- coding: utf-8 -*-
"""Additional auxiliary data types"""
from itertools import islice
__license__ = """\
Copyright (C) 2006-2012 Tamás Nepusz <ntamas@gmail.com>
Pázmány Péter sétány 1/a, 1117 Budapest, Hungary
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
class Matrix(object):
"""Simple matrix data type.
Of course there are much more advanced matrix data types for Python (for
instance, the C{ndarray} data type of Numeric Python) and this implementation
does not want to compete with them. The only role of this data type is to
provide a convenient interface for the matrices returned by the C{Graph}
object (for instance, allow indexing with tuples in the case of adjacency
matrices and so on).
"""
def __init__(self, data=None):
"""Initializes a matrix.
@param data: the elements of the matrix as a list of lists, or C{None} to
create a 0x0 matrix.
"""
self._nrow, self._ncol, self._data = 0, 0, []
self.data = data
# pylint: disable-msg=C0103
@classmethod
def Fill(cls, value, *args):
"""Creates a matrix filled with the given value
@param value: the value to be used
@keyword shape: the shape of the matrix. Can be a single integer,
two integers or a tuple. If a single integer is
given here, the matrix is assumed to be square-shaped.
"""
if len(args) < 1:
raise TypeError("expected an integer or a tuple")
if len(args) == 1:
if hasattr(args[0], "__len__"):
height, width = int(args[0][0]), int(args[0][1])
else:
height, width = int(args[0]), int(args[0])
else:
height, width = int(args[0]), int(args[1])
mtrx = [[value]*width for _ in xrange(height)]
return cls(mtrx)
# pylint: disable-msg=C0103
@classmethod
def Zero(cls, *args):
"""Creates a matrix filled with zeros.
@keyword shape: the shape of the matrix. Can be a single integer,
two integers or a tuple. If a single integer is
given here, the matrix is assumed to be square-shaped.
"""
result = cls.Fill(0, *args)
return result
# pylint: disable-msg=C0103
@classmethod
def Identity(cls, *args):
"""Creates an identity matrix.
@keyword shape: the shape of the matrix. Can be a single integer,
two integers or a tuple. If a single integer is
given here, the matrix is assumed to be square-shaped.
"""
# pylint: disable-msg=W0212
result = cls.Fill(0, *args)
for i in xrange(min(result.shape)):
result._data[i][i] = 1
return result
def _set_data(self, data=None):
"""Sets the data stored in the matrix"""
if data is not None:
self._data = [list(row) for row in data]
self._nrow = len(self._data)
if self._nrow > 0:
self._ncol = max(len(row) for row in self._data)
else:
self._ncol = 0
for row in self._data:
if len(row) < self._ncol:
row.extend([0]*(self._ncol-len(row)))
def _get_data(self):
"""Returns the data stored in the matrix as a list of lists"""
return [list(row) for row in self._data]
data = property(_get_data, _set_data)
@property
def shape(self):
"""Returns the shape of the matrix as a tuple"""
return self._nrow, self._ncol
def __add__(self, other):
"""Adds the given value to the matrix.
@param other: either a scalar or a matrix. Scalars will
be added to each element of the matrix. Matrices will
be added together elementwise.
@return: the result matrix
"""
if isinstance(other, Matrix):
if self.shape != other.shape:
raise ValueError("matr
|
ix shapes do not match")
return self.__class__([
[a+b for a, b in izip(row_a, row_b)]
for row_a, row_b in izip(self, other)
])
else:
return self.__class__([
[item+other for item in row] for row in self])
def __eq__(self, other):
"""Checks whether a given matri
|
x is equal to another one"""
return isinstance(other, Matrix) and \
self._nrow == other._nrow and \
self._ncol == other._ncol and \
self._data == other._data
def __getitem__(self, i):
"""Returns a single item, a row or a column of the matrix
@param i: if a single integer, returns the M{i}th row as a list. If a
slice, returns the corresponding rows as another L{Matrix} object. If
a 2-tuple, the first element of the tuple is used to select a row and
the second is used to select a column.
"""
if isinstance(i, int):
return list(self._data[i])
elif isinstance(i, slice):
return self.__class__(self._data[i])
elif isinstance(i, tuple):
try:
first = i[0]
except IndexError:
first = slice(None)
try:
second = i[1]
except IndexError:
second = slice(None)
if type(first) == slice and type(second) == slice:
return self.__class__(row[second] for row in self._data[first])
elif type(first) == slice:
return [row[second] for row in self._data[first]]
else:
return self._data[first][second]
else:
raise IndexError("invalid matrix index")
def __hash__(self):
"""Returns a hash value for a matrix."""
return hash(self._nrow, self._ncol, self._data)
def __iadd__(self, other):
"""In-place addition of a matrix or scalar."""
if isinstance(other, Matrix):
if self.shape != other.shape:
raise ValueError("matrix shapes do not match")
for row_a, row_b in izip(self._data, other):
for i in xrange(len(row_a)):
row_a[i] += row_b[i]
else:
for row in self._data:
for i in xrange(len(row)):
row[i] += other
return self
def __isub__(self, other):
"""In-place subtraction of a matrix or scalar."""
if isinstance(other, Matrix):
if self.shape != other.shape:
raise ValueError("matrix shapes do not match")
for row_a, row_b in izip(self._data, other):
for i in xrange(len(row_a)):
row_a[i] -= row_b[i]
else:
for row in self._data:
for i in xrange(len(row)):
row[i] -= other
return self
def __ne__(self, other):
"""Checks whether a given matrix is not equal to another one"""
return not self == other
def __setitem__(self, i, value):
"""Sets a single item, a row or a column of the matrix
@param i: if a single integer, sets the M{i}th row as a list. If a
slice, sets the corresponding rows from another L{Matrix} object.
If a 2-tuple, the first element of the tuple is used to select a row
and the second is used to select a column.
@param value: the new value
"""
if isinstance(
|
BlogomaticProject/Blogomatic
|
opt/blog-o-matic/usr/lib/python/Bio/PopGen/GenePop/EasyController.py
|
Python
|
gpl-2.0
| 6,648
| 0.007822
|
# Copyright 2009 by Tiago Antao <tiagoantao@gmail.com>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module allows to control GenePop through an easier interface.
This interface is less efficient than the standard GenePopControler
"""
from Controller import GenePopController
from Bio.PopGen import GenePop
class EasyController:
def __init__(self, fname, genepop_dir = None):
"""Initializes the controller.
genepop_dir is the directory where GenePop is.
The binary should be called Genepop (capital G)
"""
self._fname = fname
self._controller = GenePopController(genepop_dir)
self.__fst_pair_locus = {} #More caches like this needed!
def get_basic_info(self):
f=open(self._fname)
rec = GenePop.read(f)
f.close()
return rec.pop_list, rec.loci_list
def test_hw_pop(self, pop_pos, test_type = "probability"):
if test_type=="deficiency":
hw_res = self._controller.test_pop_hz_deficiency(self._fname)
elif test_type=="excess":
hw_res = self._controller.test_pop_hz_excess(self._fname)
else:
loci_res, hw_res, fisher_full = self._controller.test_pop_hz_prob(self._fname, ".P")
for i in range(pop_pos-1):
hw_res.next()
return hw_res.next()
def test_hw_global(self, test_type = "deficiency", enum_test = True,
dememorization = 10000, batches = 20, iterations = 5000):
if test_type=="deficiency":
pop_res, loc_res, all = self._controller.test_global_hz_deficiency(self._fname,
enum_test, dememorization, batches, iterations)
else:
pop_res, loc_res, all = self._controller.test_global_hz_excess(self._fname,
enum_test, dememorization, batches, iterations)
return list(pop_res), list(loc_res), all
def test_ld_all_pair(self, locus1, locus2,
dememorization = 10000, batches = 20, iterations = 5000):
all_ld = self._controller.test_ld(self._fname, dememorization, batches, iterations)[1]
for ld_case in all_ld:
(l1, l2), result = ld_case
if (l1==locus1 and l2==locus2) or (l1==locus2 and l2==locus1):
return result
def estimate_nm(self):
""" Estimate Nm. Just a simple bridge.
"""
return self._controller.estimate_nm(self._fname)
def get_heterozygosity_info(self, pop_pos, locus_name):
"""Returns the heterozygosity info for a certain locus on a population.
Returns (Expected homozygotes, observed homozygotes,
Expected heterozygotes, observed heterozygotes)
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][1]
def get_genotype_count(self, pop_pos, locus_name):
"""Returns the genotype counts for a certain population and locus
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][0]
def get_fis(self, pop_pos, locus_name):
"""Returns the Fis for a certain population and locus
Below CW means Cockerham and Weir and RH means Robertson and Hill.
Returns a pair:
dictionary [allele] = (repetition count, frequency, Fis CW )
with information for each allele
a triple with total number of alleles, Fis CW, Fis RH
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][2:]
def get_alleles(self, pop_pos, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][2].keys()
def get_alleles_all_pops(self, locus_name):
|
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
return locus_info[1]
def get_allele_frequency(self, pop_pos, locus_name):
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for
|
locus_info in loc_iter:
if locus_info[0] == locus_name:
alleles = locus_info[1]
pop_name, freqs, total = locus_info[2][pop_pos]
allele_freq = {}
for i in range(len(alleles)):
allele_freq[alleles[i]] = freqs[i]
return total, allele_freq
def get_multilocus_f_stats(self):
""" Returns the multilocus F stats
Explain averaging.
Returns Fis(CW), Fst, Fit
"""
return self._controller.calc_fst_all(self._fname)[0]
def get_f_stats(self, locus_name):
""" Returns F stats for a locus
Returns Fis(CW), Fst, Fit, Qintra, Qinter
"""
loci_iter = self._controller.calc_fst_all(self._fname)[1]
for name, fis, fst, fit, qintra, qinter in loci_iter:
if name == locus_name:
return fis, fst, fit, qintra, qinter
def get_avg_fis(self):
return self._controller.calc_diversities_fis_with_identity(self._fname)[1]
def get_avg_fst_pair(self):
return self._controller.calc_fst_pair(self._fname)[1]
def get_avg_fst_pair_locus(self, locus):
if len(self.__fst_pair_locus) == 0:
iter = self._controller.calc_fst_pair(self._fname)[0]
for locus_info in iter:
self.__fst_pair_locus[locus_info[0]] = locus_info[1]
return self.__fst_pair_locus[locus]
def calc_ibd(self, is_diplo = True, stat="a", scale="Log", min_dist=0.00001):
if is_diplo:
return self._controller.calc_ibd_diplo(self._fname, stat, scale, min_dist)
else:
return self._controller.calc_ibd_haplo(self._fname, stat, scale, min_dist)
|
duyuan11/ford
|
setup.py
|
Python
|
gpl-3.0
| 2,137
| 0.015442
|
from setuptools import setup, find_packages
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'FORD',
packages = ['ford'],
include_package_data = True,
version = '4.3.0',
description = 'FORD, standing for FORtran Documenter, is an automatic documentation generator for modern Fortran programs.',
long_description = long_description,
author = 'Chris MacMackin',
author_email = 'cmacmackin@gmail.com',
url = 'https://github.com/cmacmackin/ford/',
download_url = 'https://github.com/cmacmackin/ford/tarball/4.3.0',
keywords = ['Markdown', 'Fortran', 'documentation', 'comments'],
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Documentation',
'Topic :: Text Processing :: Markup :: HTML',
'Topic :: Documentation',
'Topic :: Utilities',
|
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python',
'
|
Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
install_requires = ['markdown','markdown-include >= 0.5.1','toposort',
'jinja2 >= 2.1','pygments','beautifulsoup4','graphviz'],
entry_points = {
'console_scripts': [
'ford=ford:run',
],
}
)
|
fireeye/flare-wmi
|
python-cim/tests/test_mapping.py
|
Python
|
apache-2.0
| 7,662
| 0.001044
|
from fixtures import *
import cim
def test_mapping_type_guess_xp():
'''
test automatic detection of winxp repositories.
'''
repodir = os.path.join(os.path.dirname(__file__), 'repos')
xpdir = os.path.join(repodir, 'xp')
repopath = os.path.join(xpdir, 'mapping-only')
assert cim.CIM.guess_cim_type(repopath) == cim.CIM_TYPE_XP
def test_mapping_type_guess_win7():
'''
test automatic detection of win7 repositories.
'''
repodir = os.path.join(os.path.dirname(__file__), 'repos')
win7dir = os.path.join(repodir, 'win7')
repopath = os.path.join(win7dir, 'deleted-instance')
assert cim.CIM.guess_cim_type(repopath) == cim.CIM_TYPE_WIN7
############ INDEX MAPPING ###############################################
def test_index_mapping(repo):
"""
demonstrate extraction of basic information from the mapping header.
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.index_mapping
# collected empirically.
assert len(mapping.map.entries) == 7824
assert mapping.map.free_dword_count == 241
assert mapping.map.header.physical_page_count == 547
assert mapping.map.header.mapping_entry_count == 326
assert mapping.get_physical_page_number(logical_page_number=0) == 13
assert mapping.get_logical_page_number(physical_page_number=13) == 0
def test_index_mapping_inconsistencies(repo):
"""
find logical pages where the physical page does not map back to it.
this is probably where there are two logical pages that point to the
same physical page.
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.index_mapping
# logical pages where the physical page does not map back to it.
# that is, there must be two logical pages that point here.
inconsistencies = []
for i in range(mapping.map.header.mapping_entry_count):
try:
pnum = mapping.get_physical_page_number(logical_page_number=i)
if i != mapping.get_logical_page_number(physical_page_number=pnum):
inconsistencies.append(i)
except cim.UnmappedPage:
continue
# collected empirically.
assert inconsistencies == []
def test_unmapped_index_logical_pages(repo):
"""
find logical pages that have no physical page.
presumably you can't fetch these pages.
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.index_mapping
unmapped_pages = []
for i in range(mapping.map.header.mapping_entry_count):
if not mapping.is_logical_page_mapped(i):
unmapped_pages.append(i)
continue
# collected empirically.
assert unmapped_pages == [91, 160, 201, 202, 203, 204, 205, 206, 207, 208,
209, 210, 211, 212, 213, 214, 215, 227, 228, 230]
def test_unallocated_index_physical_pages(repo):
"""
find physical pages that have no logical page.
to do this, need to actually reference the size of the index.
this should contain unallocated data.
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.index_mapping
index = repo.logical_index_store
unmapped_pages = []
for i in range(index.page_count):
if not mapping.is_physical_page_mapped(i):
unmapped_pages.append(i)
continue
# collected empirically.
assert unmapped_pages == [4, 8, 40, 48, 62, 70, 74, 84, 116, 117, 118, 119,
122, 126, 131, 132, 134, 142, 153, 156, 159, 161,
165, 167, 169, 179, 181, 182, 184, 185, 186, 188,
190, 192, 195, 199, 203, 205, 207, 209, 210, 212,
213, 214, 216, 217, 218, 225, 230, 232, 234, 238,
239, 241, 244, 245, 253, 254, 258, 260, 262, 264,
265, 266, 268, 269, 273, 274, 275, 277, 279, 283,
284, 286, 292, 293, 294, 295, 296, 301, 309, 311,
313, 314, 315, 316, 317, 318, 319, 320, 321, 322,
|
325, 330, 331, 334, 341, 347, 349, 352, 354, 355,
357, 358, 365, 366, 367, 372, 373, 375, 379, 380,
381, 383, 384, 386, 387, 388, 39
|
0, 391, 392, 393,
394, 395, 396, 398, 401, 403, 404, 406, 407, 408,
409, 410, 414, 415, 417, 419, 420, 422, 424, 425,
426, 430, 432, 433, 434, 435, 436, 437, 438, 439,
440, 442, 443, 447, 448, 449, 452, 453, 454, 455,
456, 457, 458, 459, 460, 461, 462, 463, 464, 465,
466, 467, 468, 470, 471, 474, 475, 476, 477, 478,
479, 480, 481, 486, 487, 489, 490, 491, 496, 497,
498, 499, 500, 501, 502, 503, 504, 505, 506, 507,
508, 509, 510, 511, 512, 513, 514, 515, 516, 517,
518, 519, 520, 521, 522, 523, 524, 525, 526, 527,
528, 529, 530, 531, 532, 533, 534, 535, 536, 537,
538, 539, 540, 541, 542, 543, 544, 545, 546]
############ DATA MAPPING ###############################################
def test_data_mapping(repo):
"""
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.data_mapping
# collected empirically.
assert len(mapping.map.entries) == 41448
assert mapping.map.free_dword_count == 159
assert mapping.map.header.physical_page_count == 1886
assert mapping.map.header.mapping_entry_count == 1727
assert mapping.get_physical_page_number(logical_page_number=0) == 0
assert mapping.get_logical_page_number(physical_page_number=0) == 0
def test_data_mapping_inconsistencies(repo):
"""
find logical pages where the physical page does not map back to it.
this is probably where there are two logical pages that point to the
same physical page.
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.data_mapping
# logical pages where the physical page does not map back to it.
# that is, there must be two logical pages that point here.
inconsistencies = []
for i in range(mapping.map.header.mapping_entry_count):
try:
pnum = mapping.get_physical_page_number(logical_page_number=i)
if i != mapping.get_logical_page_number(physical_page_number=pnum):
inconsistencies.append(i)
except cim.UnmappedPage:
continue
# collected empirically.
assert inconsistencies == []
def test_unmapped_data_logical_pages(repo):
"""
find logical pages that have no physical page.
presumably you can't fetch these pages.
Args:
repo (cim.CIM): the deleted-instance repo
Returns:
None
"""
mapping = repo.index_mapping
unmapped_pages = []
for i in range(mapping.map.header.mapping_entry_count):
if not mapping.is_logical_page_mapped(i):
unmapped_pages.append(i)
continue
# collected empirically.
assert unmapped_pages == [91, 160, 201, 202, 203, 204, 205, 206, 207, 208,
209, 210, 211, 212, 213, 214, 215, 227, 228, 230]
|
GhostshipSoftware/avaloria
|
src/tests/test_server_amp.py
|
Python
|
bsd-3-clause
| 5,448
| 0.008443
|
import unittest
class TestGetRestartMode(unittest.TestCase):
def test_get_restart_mode(self):
# self.assertEqual(expected, get_restart_mode(restart_file))
assert True # TODO: implement your test here
class TestAmpServerFactory(unittest.TestCase):
def test___init__(self):
# amp_server_factory = AmpServerFactory(server)
assert True # TODO: implement your test here
def test_buildProtocol(self):
# amp_server_factory = AmpServerFactory(server)
# self.assertEqual(expected, amp_server_factory.buildProtocol(addr))
assert True # TODO: implement your test here
class TestAmpClientFactory(unittest.TestCase):
def test___init__(self):
# amp_client_factory = AmpClientFactory(portal)
assert True # TODO: implement your test here
def test_buildProtocol(self):
# amp_client_factory = AmpClientFactory(portal)
# self.assertEqual(expected, amp_client_factory.buildProtocol(addr))
assert True # TODO: implement your test here
def test_clientConnectionFailed(self):
# amp_client_factory = AmpClientFactory(portal)
# self.assertEqual(expected, amp_client_factory.clientConnectionFailed(connector, reason))
assert True # TODO: implement your test here
def test_clientConnectionLost(self):
# amp_client_factory = AmpClientFactory(portal)
# self.assertEqual(expected, amp_client_factory.clientConnectionLost(connector, reason))
assert True # TODO: implement your test here
def test_startedConnecting(self):
# amp_client_factory = AmpClientFactory(portal)
# self.assertEqual(expected, amp_client_factory.startedConnecting(connector))
assert True # TODO: implement your test here
class TestDumps(unittest.TestCase):
def test_dumps(self):
# self.assertEqual(expected, dumps(data))
assert True # TODO: implement your test here
class TestLoads(unittest.TestCase):
def test_loads(self):
# self.assertEqual(expected, loads(data))
assert True # TODO: implement your test here
class TestAMPProtocol(unittest.TestCase):
def test_amp_function_call(self):
|
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.amp_function_call(module, function, args, **kwargs))
assert True # TODO: implement your test here
def test_amp_msg_portal2server(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(
|
expected, a_mp_protocol.amp_msg_portal2server(sessid, ipart, nparts, msg, data))
assert True # TODO: implement your test here
def test_amp_msg_server2portal(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.amp_msg_server2portal(sessid, ipart, nparts, msg, data))
assert True # TODO: implement your test here
def test_amp_portal_admin(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.amp_portal_admin(sessid, ipart, nparts, operation, data))
assert True # TODO: implement your test here
def test_amp_server_admin(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.amp_server_admin(sessid, ipart, nparts, operation, data))
assert True # TODO: implement your test here
def test_call_remote_FunctionCall(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.call_remote_FunctionCall(modulepath, functionname, *args, **kwargs))
assert True # TODO: implement your test here
def test_call_remote_MsgPortal2Server(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.call_remote_MsgPortal2Server(sessid, msg, data))
assert True # TODO: implement your test here
def test_call_remote_MsgServer2Portal(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.call_remote_MsgServer2Portal(sessid, msg, data))
assert True # TODO: implement your test here
def test_call_remote_PortalAdmin(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.call_remote_PortalAdmin(sessid, operation, data))
assert True # TODO: implement your test here
def test_call_remote_ServerAdmin(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.call_remote_ServerAdmin(sessid, operation, data))
assert True # TODO: implement your test here
def test_connectionMade(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.connectionMade())
assert True # TODO: implement your test here
def test_errback(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.errback(e, info))
assert True # TODO: implement your test here
def test_safe_recv(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.safe_recv(command, sessid, ipart, nparts, **kwargs))
assert True # TODO: implement your test here
def test_safe_send(self):
# a_mp_protocol = AMPProtocol()
# self.assertEqual(expected, a_mp_protocol.safe_send(command, sessid, **kwargs))
assert True # TODO: implement your test here
if __name__ == '__main__':
unittest.main()
|
leighpauls/k2cro4
|
third_party/mozc/chrome/chromeos/renderer/litify_proto_file.py
|
Python
|
bsd-3-clause
| 2,506
| 0.005188
|
# -*- coding: utf-8 -*-
# Copyright 2010-2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Litify a .proto file.
This program add a line
"option optimize_for = LITE_RUNTIME;"
to the input .proto file.
"""
import fileinput
import optparse
LITE_OPTIMIZER = 'option optimize_for = LI
|
TE_R
|
UNTIME;'
def ParseOption():
parser = optparse.OptionParser()
parser.add_option('--in_file_path', dest='in_file_path',
help='Specify the input protocol buffer definition file.')
parser.add_option('--out_file_path', dest='out_file_path',
help='Specify the result file name.')
(options, _) = parser.parse_args()
return options
def ExecuteLitify(in_file_path, out_file_path):
output_file = open(out_file_path, 'w')
for line in fileinput.input(in_file_path):
output_file.write(line)
output_file.write('\n%s\n' % LITE_OPTIMIZER)
output_file.close()
def main():
options = ParseOption()
ExecuteLitify(options.in_file_path, options.out_file_path)
if __name__ == '__main__':
main()
|
tranpthuan/blabla
|
Cybercore/task1_cybercore.py
|
Python
|
gpl-3.0
| 1,320
| 0.018939
|
import numpy as np
import matplotlib.pyplot as plt
import csv
read1 = []
read2 = []
with open('train.csv',"rb") as csvfile:
read = csv.reader(csvfile)
read.next()
for row in read :
if len(row) <= 1 : #data preprocessing c
continue
read1.append(row[0])
read2.append(row[1])
X = np.array([read1], dtype = float).T
Y = np.array([read2], dtype = float).T
#Xbar for the mean value
one = np.ones((X.shape[0], 1))
Xbar = np.concatenate((one, X), axis = 1)
#processing lines
A = np.dot(Xbar.T, Xbar)
b = np.dot(
|
Xbar.T,Y)
w = np.dot(np.linalg.pinv(A),b)
w0 = w[0][0]
w1 = w[1][0]
print(w0)
print(w1)
x0 = np.linspace(0, 110, 2)
y0 = w0 + w1*x0
plt.plot(X, Y, 'm.') # data
plt.plot(x0, y0, 'c') # the fitting line
plt.axis([0, 110, 0, 110])
plt.xlabel('X')
plt.ylabel(''
|
)
plt.show()
temp = []
data = []
with open('test.csv',"rb") as csvtest :
test = csv.reader(csvtest)
test.next()
for i in test:
if(len(i) < 1) :
continue
temp.append(i[0]);
data = np.array(temp, dtype = float)
with open('predict.csv',"wb") as output :
writer = csv.writer(output)
writer.writerow(['x','y'])
for j in data :
y1 = j*w1 + w0
writer.writerow([j, y1])
csvtest.close()
csvfile.close()
output.close()
|
lovelysystems/pyjamas
|
pyjs/src/pyjs/lib/gdk.py
|
Python
|
apache-2.0
| 248
| 0
|
class Rectangle:
def __init__(self, x=0, y=0, width=0, height=0):
self.x = x
self.y = y
self.width = width
|
self.height = height
def in
|
tersect(self, src):
pass
def union(self, src):
pass
|
ucsd-progsys/ml2
|
paper/oopsla17-cameraready/plots.py
|
Python
|
bsd-3-clause
| 4,957
| 0.009482
|
import csv
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
UCSD = 'UCSD'
BUCKETS = [0.1, 0.2, 1.0, 10.0, 60.0 ] # range(500, 3001, 500)
#COLORS=['#90B0D4', '#90D492', '#D4B490', '#D490D2']
COLORS=['#8dd3c7','#bebada','#ffffb3','#fb8072','#80b1d3','#fdb462']
COLORS_E=['#8dd3c7','#bebada','#80b1d3','#ffffb3','#fdb462','#fb8072']
def read_csv(f):
with open(f) as f:
return list(csv.reader(f))
def read_csv_dict(f):
with open(f) as f:
return list(csv.DictReader(f))
def plot_user_study():
a = read_csv_dict('study/study-data/Types_Study_A_scores.csv')
b = read_csv_dict('study/study-data/Types_Study_B_scores.csv')
def f(xs):
return [float(x) for x in xs if float(x) >= 0]
def err(xs):
#p = np.average(xs)
#return 100 * np.sqrt(p * (1-p) / len(xs))
s = np.std(xs)
n = len(xs)
return 100 * (s / np.sqrt(n))
## REASON
sepconcat_a = f([r['1: sepConcat explain (1.0 pts)'] for r in a])
padzero_a = f([r['3: padZero explain (1.0 pts)'] for r in a])
mulbydigit_a = f([r['5: mulByDigit explain (1.0 pts)'] for r in a])
sepconcat_b = f([r['1: sepConcat explain (1.0 pts)'] for r in b])
padzero_b = f([r['3: padZero explain (1.0 pts)'] for r in b])
mulbydigit_b = f([r['5: mulByDigit explain (1.0 pts)'] for r in b])
ind = np.arange(3)
width = 0.35
print 'EXPLAIN'
print 'sherrloc'
print [100*np.average(sepconcat_a), 100*np.average(padzero_b), 100*np.average(mulbydigit_a)]
print map(err, [sepconcat_a, padzero_b, mulbydigit_a])
fig = plt.figure()
p_o = plt.bar(ind,
[100*np.average(sepconcat_a), 100*np.average(padzero_b), 100*np.average(mulbydigit_a)],
width,
color=COLORS[0],
yerr=map(err, [sepconcat_a, padzero_b, mulbydigit_a]),
error_kw={'linewidth': 3, 'ecolor': 'gray', 'capsize': 6, 'capthick': 3}
)
print 'nate'
print [100*np.average(sepconcat_b), 100*np.average(padzero_a), 100*np.average(mulbydigit_b)]
print map(err, [sepconcat_
|
b, padzero_a, mulbydigit_b])
p_n = plt.bar(ind + width,
[100*np.average(sepconcat_b), 100*np.average(padzero_a), 100*np.average(mulbydigit_b)],
width,
color=COLORS[1],
yerr=map(err, [sepconcat_b, padzero_a, mulbydigit_b]),
error_kw={'linewidth': 3, 'ecolor': 'gray', 'capsize': 6, 'capthick': 3}
)
plt.title('Explanation',
|
fontsize=30)
# plt.xlabel('Problem', fontsize=20)
plt.ylabel('% Correct', fontsize=24)
plt.xticks(ind + width, ['sepConcat\n(p = 0.48)', 'padZero\n(p = 0.097)', 'mulByDigit\n(p = 0.083)'], fontsize=20)
plt.legend(('SHErrLoc', 'Nate'), loc='lower right', fontsize=20)
# autolabel(plt, p_o)
# autolabel(plt, p_n)
fig.savefig('user-study-reason.png')
plt.close()
## FIX
sepconcat_a = f([r['2: sepConcat fix (1.0 pts)'] for r in a])
padzero_a = f([r['4: padZero fix (1.0 pts)'] for r in a])
mulbydigit_a = f([r['6: mulByDigit fix (1.0 pts)'] for r in a])
sepconcat_b = f([r['2: sepConcat fix (1.0 pts)'] for r in b])
padzero_b = f([r['4: padZero fix (1.0 pts)'] for r in b])
mulbydigit_b = f([r['6: mulByDigit fix (1.0 pts)'] for r in b])
ind = np.arange(3)
width = 0.35
print 'FIX'
print 'sherrloc'
print [100*np.average(sepconcat_a), 100*np.average(padzero_b), 100*np.average(mulbydigit_a)]
print map(err, [sepconcat_a, padzero_b, mulbydigit_a])
fig = plt.figure()
p_o = plt.bar(ind,
[100*np.average(sepconcat_a), 100*np.average(padzero_b), 100*np.average(mulbydigit_a)],
width,
color=COLORS[0],
yerr=map(err, [sepconcat_a, padzero_b, mulbydigit_a]),
error_kw={'linewidth': 3, 'ecolor': 'gray', 'capsize': 6, 'capthick': 3}
)
print 'nate'
print [100*np.average(sepconcat_b), 100*np.average(padzero_a), 100*np.average(mulbydigit_b)]
print map(err, [sepconcat_b, padzero_a, mulbydigit_b])
p_n = plt.bar(ind + width,
[100*np.average(sepconcat_b), 100*np.average(padzero_a), 100*np.average(mulbydigit_b)],
width,
color=COLORS[1],
yerr=map(err, [sepconcat_b, padzero_a, mulbydigit_b]),
error_kw={'linewidth': 3, 'ecolor': 'gray', 'capsize': 6, 'capthick': 3}
)
plt.title('Fix',fontsize=30)
# plt.xlabel('Problem', fontsize=20)
plt.ylabel('% Correct', fontsize=24)
plt.xticks(ind + width, ['sepConcat\n(p = 0.57)', 'padZero\n(p = 0.33)', 'mulByDigit\n(p = 0.31)'], fontsize=20)
plt.legend(('SHErrLoc', 'Nate'), loc='lower right', fontsize=20)
# autolabel(plt, p_o)
# autolabel(plt, p_n)
fig.savefig('user-study-fix.png')
plt.close()
if __name__ == '__main__':
plot_user_study()
|
bboalimoe/ndn-cache-policy
|
docs/sphinx-contrib/actdiag/setup.py
|
Python
|
gpl-3.0
| 1,692
| 0
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
long_desc = '''
This package contains the actdiag Sphinx extension.
.. _Sphinx: http://sphinx.pocoo
|
.org/
.. _actdiag: http://blockdiag.com/en/actdiag/
This extension enable you to insert activity diagrams in your Sphinx document.
Following code is sample::
.. actdiag::
|
diagram {
A -> B -> C -> D;
lane {
A; B;
}
lane {
C; D;
}
}
This module needs actdiag_.
'''
requires = ['actdiag>=0.5.3', 'Sphinx>=0.6', 'setuptools']
setup(
name='sphinxcontrib-actdiag',
version='0.7.2',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
download_url='http://pypi.python.org/pypi/sphinxcontrib-actdiag',
license='BSD',
author='Takeshi Komiya',
author_email='i.tkomiya@gmail.com',
description='Sphinx "actdiag" extension',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
|
kobotoolbox/kobocat
|
onadata/celery.py
|
Python
|
bsd-2-clause
| 1,357
| 0
|
# coding: utf-8
import os
import celery
import logging
from django.apps import apps
from django.conf import settings
# http://celery.readthedocs.org/en/latest/django/first-steps-with-django.html
# Attempt to determine the project name from the directory containing this file
PROJECT_NAME = os.path.basename(os.path.dirname(__file__))
# Set the default Django settings module for the 'celery' command-line program
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'onadata.settings.prod')
Celery = celery.Celery
app = Celery(PROJECT_NAME)
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings', namespace='CELERY')
# The `app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)` technique
# described in
# http://docs.celeryproject.org/en/latest/django/first-steps-with-django.html
# fails when INSTALLED_APPS
|
includes a "dotted path to the appropriate
# AppConfig subclass" as recommended
|
by
# https://docs.djangoproject.com/en/1.8/ref/applications/#configuring-applications.
# Ask Solem recommends the following workaround; see
# https://github.com/celery/celery/issues/2248#issuecomment-97404667
app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()])
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
|
WarrenWeckesser/scipy
|
scipy/linalg/tests/test_basic.py
|
Python
|
bsd-3-clause
| 62,678
| 0.000032
|
import itertools
import warnings
import numpy as np
from numpy import (arange, array, dot, zeros, identity, conjugate, transpose,
float32)
import numpy.linalg as linalg
from numpy.random import random
from numpy.testing import (assert_equal, assert_almost_equal, assert_,
assert_array_almost_equal, assert_allclose,
assert_array_equal, suppress_warnings)
import pytest
from pytest import raises as assert_raises
from scipy.linalg import (solve, inv, det, lstsq, pinv, pinv2, pinvh, norm,
solve_banded, solveh_banded, solve_triangular,
solve_circulant, circulant, LinAlgError, block_diag,
matrix_balance, qr, LinAlgWarning)
from scipy.linalg._testutils import assert_no_overwrite
from scipy._lib._testutils import check_free_memory
from scipy.linalg.blas import HAS_ILP64
REAL_DTYPES = (np.float32, np.float64, np.longdouble)
COMPLEX_DTYPES = (np.complex64, np.complex128, np.clongdouble)
DTYPES = REAL_DTYPES + COMPLEX_DTYPES
def _eps_cast(dtyp):
"""Get the epsilon for dtype, possibly downcast to BLAS types."""
dt = dtyp
if dt == np.longdouble:
dt = np.float64
elif dt == np.clongdouble:
dt = np.complex128
return np.finfo(dt).eps
class TestSolveBanded:
def test_real(self):
a = array([[1.0, 20, 0, 0],
[-30, 4, 6, 0],
[2, 1, 20, 2],
[0, -1, 7, 14]])
ab = array([[0.0, 20, 6, 2],
[1, 4, 20, 14],
[-30, 1, 7, 0],
[2, -1, 0, 0]])
l, u = 2, 1
b4 = array([10.0, 0.0, 2.0, 14.0])
b4by1 = b4.reshape(-1, 1)
b4by2 = array([[2, 1],
[-30, 4],
[2, 3],
[1, 3]])
b4by4 = array([[1, 0, 0, 0],
[0, 0, 0, 1],
[0, 1, 0, 0],
[0, 1, 0, 0]])
for b in [b4, b4by1, b4by2, b4by4]:
x = solve_banded((l, u), ab, b)
assert_array_almost_equal(dot(a, x), b)
def test_complex(self):
a = array([[1.0, 20, 0, 0],
[-30, 4, 6
|
, 0],
[2j, 1, 20, 2j],
|
[0, -1, 7, 14]])
ab = array([[0.0, 20, 6, 2j],
[1, 4, 20, 14],
[-30, 1, 7, 0],
[2j, -1, 0, 0]])
l, u = 2, 1
b4 = array([10.0, 0.0, 2.0, 14.0j])
b4by1 = b4.reshape(-1, 1)
b4by2 = array([[2, 1],
[-30, 4],
[2, 3],
[1, 3]])
b4by4 = array([[1, 0, 0, 0],
[0, 0, 0, 1j],
[0, 1, 0, 0],
[0, 1, 0, 0]])
for b in [b4, b4by1, b4by2, b4by4]:
x = solve_banded((l, u), ab, b)
assert_array_almost_equal(dot(a, x), b)
def test_tridiag_real(self):
ab = array([[0.0, 20, 6, 2],
[1, 4, 20, 14],
[-30, 1, 7, 0]])
a = np.diag(ab[0, 1:], 1) + np.diag(ab[1, :], 0) + np.diag(
ab[2, :-1], -1)
b4 = array([10.0, 0.0, 2.0, 14.0])
b4by1 = b4.reshape(-1, 1)
b4by2 = array([[2, 1],
[-30, 4],
[2, 3],
[1, 3]])
b4by4 = array([[1, 0, 0, 0],
[0, 0, 0, 1],
[0, 1, 0, 0],
[0, 1, 0, 0]])
for b in [b4, b4by1, b4by2, b4by4]:
x = solve_banded((1, 1), ab, b)
assert_array_almost_equal(dot(a, x), b)
def test_tridiag_complex(self):
ab = array([[0.0, 20, 6, 2j],
[1, 4, 20, 14],
[-30, 1, 7, 0]])
a = np.diag(ab[0, 1:], 1) + np.diag(ab[1, :], 0) + np.diag(
ab[2, :-1], -1)
b4 = array([10.0, 0.0, 2.0, 14.0j])
b4by1 = b4.reshape(-1, 1)
b4by2 = array([[2, 1],
[-30, 4],
[2, 3],
[1, 3]])
b4by4 = array([[1, 0, 0, 0],
[0, 0, 0, 1],
[0, 1, 0, 0],
[0, 1, 0, 0]])
for b in [b4, b4by1, b4by2, b4by4]:
x = solve_banded((1, 1), ab, b)
assert_array_almost_equal(dot(a, x), b)
def test_check_finite(self):
a = array([[1.0, 20, 0, 0],
[-30, 4, 6, 0],
[2, 1, 20, 2],
[0, -1, 7, 14]])
ab = array([[0.0, 20, 6, 2],
[1, 4, 20, 14],
[-30, 1, 7, 0],
[2, -1, 0, 0]])
l, u = 2, 1
b4 = array([10.0, 0.0, 2.0, 14.0])
x = solve_banded((l, u), ab, b4, check_finite=False)
assert_array_almost_equal(dot(a, x), b4)
def test_bad_shape(self):
ab = array([[0.0, 20, 6, 2],
[1, 4, 20, 14],
[-30, 1, 7, 0],
[2, -1, 0, 0]])
l, u = 2, 1
bad = array([1.0, 2.0, 3.0, 4.0]).reshape(-1, 4)
assert_raises(ValueError, solve_banded, (l, u), ab, bad)
assert_raises(ValueError, solve_banded, (l, u), ab, [1.0, 2.0])
# Values of (l,u) are not compatible with ab.
assert_raises(ValueError, solve_banded, (1, 1), ab, [1.0, 2.0])
def test_1x1(self):
b = array([[1., 2., 3.]])
x = solve_banded((1, 1), [[0], [2], [0]], b)
assert_array_equal(x, [[0.5, 1.0, 1.5]])
assert_equal(x.dtype, np.dtype('f8'))
assert_array_equal(b, [[1.0, 2.0, 3.0]])
def test_native_list_arguments(self):
a = [[1.0, 20, 0, 0],
[-30, 4, 6, 0],
[2, 1, 20, 2],
[0, -1, 7, 14]]
ab = [[0.0, 20, 6, 2],
[1, 4, 20, 14],
[-30, 1, 7, 0],
[2, -1, 0, 0]]
l, u = 2, 1
b = [10.0, 0.0, 2.0, 14.0]
x = solve_banded((l, u), ab, b)
assert_array_almost_equal(dot(a, x), b)
class TestSolveHBanded:
def test_01_upper(self):
# Solve
# [ 4 1 2 0] [1]
# [ 1 4 1 2] X = [4]
# [ 2 1 4 1] [1]
# [ 0 2 1 4] [2]
# with the RHS as a 1D array.
ab = array([[0.0, 0.0, 2.0, 2.0],
[-99, 1.0, 1.0, 1.0],
[4.0, 4.0, 4.0, 4.0]])
b = array([1.0, 4.0, 1.0, 2.0])
x = solveh_banded(ab, b)
assert_array_almost_equal(x, [0.0, 1.0, 0.0, 0.0])
def test_02_upper(self):
# Solve
# [ 4 1 2 0] [1 6]
# [ 1 4 1 2] X = [4 2]
# [ 2 1 4 1] [1 6]
# [ 0 2 1 4] [2 1]
#
ab = array([[0.0, 0.0, 2.0, 2.0],
[-99, 1.0, 1.0, 1.0],
[4.0, 4.0, 4.0, 4.0]])
b = array([[1.0, 6.0],
[4.0, 2.0],
[1.0, 6.0],
[2.0, 1.0]])
x = solveh_banded(ab, b)
expected = array([[0.0, 1.0],
[1.0, 0.0],
[0.0, 1.0],
[0.0, 0.0]])
assert_array_almost_equal(x, expected)
def test_03_upper(self):
# Solve
# [ 4 1 2 0] [1]
# [ 1 4 1 2] X = [4]
# [ 2 1 4 1] [1]
# [ 0 2 1 4] [2]
# with the RHS as a 2D array with shape (3,1).
ab = array([[0.0, 0.0, 2.0, 2.0],
[-99, 1.0, 1.0, 1.0],
[4.0, 4.0, 4.0, 4.0]])
b = array([1.0, 4.0, 1.0, 2.0]).reshape(-1, 1)
x = solveh_banded(ab, b)
assert_array_almost_equal(x, array([0., 1., 0., 0.]).reshape(-1, 1))
def test_01_lower(self):
# Solve
# [ 4 1 2 0] [1]
# [ 1 4 1 2] X = [4]
# [ 2 1 4 1] [1]
# [ 0 2 1 4] [2]
#
ab = array([[4.0, 4.0, 4.0, 4.0],
[1.0, 1.0, 1.0, -9
|
maninmotion/LittleSportsBiscuit
|
config/settings/common.py
|
Python
|
bsd-3-clause
| 9,768
| 0.001126
|
# -*- coding: utf-8 -*-
"""
Django settings for LittleSportsBiscuit project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
from __future__ import absolute_import, unicode_literals
import environ
ROOT_DIR = environ.Path(__file__) - 3 # (/a/b/myfile.py - 3 = /)
APPS_DIR = ROOT_DIR.path('LittleSportsBiscuit')
env = environ.Env()
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin
'django.contrib.admin',
)
THIRD_PARTY_APPS = (
'crispy_forms', # Form layouts
'allauth', # registration
'allauth.account', # registration
'allauth.socialaccount', # registration
)
# Apps specific for this project go here.
LOCAL_APPS = (
'LittleSportsBiscuit.users', # custom users app
'LittleSportsBiscuit',
# Your stuff: custom apps go here
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# MIDDLEWARE CONFIGURATION
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# MIGRATIONS CONFIGURATION
# ------------------------------------------------------------------------------
MIGRATION_MODULES = {
'sites': 'LittleSportsBiscuit.contrib.sites.migrations'
}
# DEBUG
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = env.bool("DJANGO_DEBUG", False)
# FIXTURE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
str(APPS_DIR.path('fixtures')),
)
# EMAIL CONFIGURATION
# ------------------------------------------------------------------------------
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend')
# MANAGER CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
("""Kevin A. Miller""", 'kevin@maninmotion.com'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
'default': env.db("DATABASE_URL", default="postgres:///LittleSportsBiscuit"),
}
DATABASES['default']['ATOMIC_REQUESTS'] = True
# GENERAL CONFIGURATI
|
ON
# ------------------------------------------------------------------------------
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'EST'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
#
|
See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#templates
TEMPLATES = [
{
# See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
'debug': DEBUG,
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
# Your stuff: custom template context processors go here
],
},
},
]
# See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#template-packs
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# STATIC FILE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(ROOT_DIR('staticfiles'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# MEDIA CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(APPS_DIR('media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# URL Configuration
# ------------------------------------------------------------------------------
ROOT_URLCONF = 'config.urls'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'config.wsgi.application'
# AUTHENTICATION CONFIGURATION
# ------------------------------------------------------------------------------
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Some really nice defaults
ACCOUNT_AUTHENTICATION_METHOD = 'username'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
# Custom user app defaults
# Select the correct user model
AUTH_USER_MODEL = 'users.User'
LOGIN_REDIRECT_URL = 'users:redirect'
LOGIN_URL = 'account_login'
# SLUGLIFIER
AUTOSLUG_SLUGIFY_FUNCTION = 'slugify.slugify'
# LOGGING CONFIGURATION
# -------------------------------------------------------------------------
|
dokipen/trac
|
trac/upgrades/db10.py
|
Python
|
bsd-3-clause
| 697
| 0.010043
|
sql = [
#-- Make the node_change table contain more information, and force a resync
|
"""DROP TABLE revision;""",
"""DROP TABLE node_change;""",
"""CREATE TABLE revision (
rev text PRIMARY KEY,
time integer,
author text,
message text
);""",
"""CREATE TABLE node_change (
rev text,
path text,
kind char(1), -- 'D' for directory, 'F' for file
change char(1),
base_path text,
base_rev
|
text,
UNIQUE(rev, path, change)
);"""
]
def do_upgrade(env, ver, cursor):
for s in sql:
cursor.execute(s)
print 'Please perform a "resync" after this upgrade.'
|
flypy/flypy
|
flypy/cppgen/tests/test_cppgen.py
|
Python
|
bsd-2-clause
| 907
| 0.005513
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import os
import unittest
import tempfile
from flypy import jit
from flypy.cppgen import cppgen
@jit('C[x, y]')
class C(object):
layout = [('a', 'x'), ('b', 'y')]
@jit('C[x, y] -> x')
def first(self):
return self.a
@jit('C[x, y] -> y')
def scnd(self):
return self.b
#===----------------------
|
--------------------------------------------===
# Tests
#===------------------------------------------------------------------===
class TestCPPGen(unittest.TestCase):
def test_cppgen(self):
with tempfile.NamedTemporaryFile(suffix=".cpp") as f:
# TODO: Remove compiled code
# TODO: Portable compilation
cppgen.generat
|
e(C, f.write)
os.system("g++ -g -Wall -c %s" % (f.name,))
if __name__ == '__main__':
unittest.main()
|
PeachstoneIO/peachbox
|
tutorials/tutorial_movie_reviews/model/master.py
|
Python
|
apache-2.0
| 3,918
| 0.015314
|
# Copyright 2015 Philipp Pahl, Sven Schubert, Daniel Britzger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model definition for the master data set.
The entities of the master data set consist of 'edges' and 'properties' and conform to the
fact based model. <Reference>
Every entity has a unique data_unit_index. It is horizontally partitioned wrt. the
partition key and a granularity.
An edge or property must define a unique data_unit_index, the partition key and granularity.
The schema contains the edges and properties of an entity and the entities which are related by the edge."""
import peachbox.model
class UserReviewEdge(peachbox.model.MasterDataSet,TaskImportModel):
"""A particular realization of an 'edge'. Here: the user review edge """
data_unit_index = 0
partition_key = 'true_as_of_seconds'
partition_granularity = 60*60*24*360
schema = [{'field':'user_id', 'type':'StringType'},
{'field':'review_id', 'type':'StringType'}]
def lhs_node(self, row):
pass
def calc_value(self,field,row):
field = 'review_id'
val = 4*3*row.review_id
self.set_value(field,val)
def import(row):
self.lhs_node(row.user_id)
self.rhs_node(row.review_id)
self.partition_key(row.time)
class ProductReviewEdge(peachbox.model.MasterDataSet):
"""A particular realization of an 'edge'. Here: the product review edge """
data_unit_index = 1
partition_key = 'true_as_of_seconds'
partition_granularity = 60*60*24*360
schema = [{'field':'review_id', 'type':'StringType'},
{'field':'product_id', 'type':'StringType'}]
class ReviewProperties(peachbox.model.MasterDataSet):
"""A particular realization of a node, containing several properties. Here: the review properties """
data_unit_index = 2
partition_key = 'true_as_
|
of_seconds'
partition_granularity = 60*60*24*360
time_fill_method = fill_name('time')
model = [{'field':'review_id', 'type':'StringType', 'fill_method': fill_review_id},
{'field':'helpful', 'type':'IntegerType', 'fill_method': helpful},
{'field':'nothelpful', 'type':'IntegerType', 'fill_method':fill_nothelpful},
{'field':'score', 'type':'IntegerType'},
{'field':'summary', 'type':'StringType'},
|
{'field':'text', 'type':'StringType'}]
source_fields = [{'field:review_id','type:StringType','validation:notempty'},
{'field':'text','validation:notempty'}]
def __init__(self):
self.build_model()
def helpful(self, row, field=''):
lambda row: int(row['helpfulness'].split('/')[0])
def fill_review_id(self, row, field):
user_id = row['user_id']
product_id = row['product_id']
true_as_of_seconds = row['time']
return unicode(hash(user_id+product_id+str(true_as_of_seconds)))
def fill_nothelpful(self, row, field):
return int(row['helpfulness'].split('/')[1]) - fill_method['helpful'](row,'helpful')
class UserProperties(peachbox.model.MasterDataSet):
"""A particular realization of properties. Here: the user properties """
data_unit_index = 3
partition_key = 'true_as_seconds'
partition_granularity = 60*60*24*360
schema = [{'field':'user_id', 'type':'StringType'},
{'field':'profile_name', 'type':'StringType'}]
|
plotly/python-api
|
packages/python/plotly/plotly/validators/isosurface/slices/z/_locations.py
|
Python
|
mit
| 476
| 0.002101
|
import _plotly_utils.basevalidators
c
|
lass LocationsValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, plotly_name="locations", parent_name="isosurface.slices.z", **kwargs
):
super(LocationsValidat
|
or, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "data"),
**kwargs
)
|
France-ioi/taskgrader
|
cache_reset.py
|
Python
|
mit
| 680
| 0.001471
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
# Copyright (c) 2016 France-IOI, MIT license
#
# http://opensource.org/licenses/MIT
# This little script resets the build folder and the cache database
import os, shuti
|
l
# Local imports
import schema_db
from config_default import CFG_BUILDSDIR, CFG_CACHEDIR
from config import CFG_BUILDSDIR, CFG_CACHEDIR
if __name__ == '__main__':
# Delete the builds and the cache folder
shutil.rmtree(CFG_BUILDSDIR, ignore_errors=True)
shutil.rmtree(CFG_CACHEDIR, ignore_errors=True)
# Recreate them
os.makedirs(CFG_BUILDSDIR)
os.makedirs(CFG_CACHEDIR)
# Reinitialize the cache datab
|
ase
schema_db.schemaDb()
|
vesellov/bitdust.devel
|
manage.py
|
Python
|
agpl-3.0
| 1,152
| 0
|
#!/usr/bin/env python
# manage.py
#
# Copyright (C) 2008-2018 Veselin Penev, https://bitdust.io
#
# This file (manage.py) is part of BitDust Software.
#
# BitDust is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# BitDust Software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with BitDust Software. I
|
f not, see <http://www.gnu.org/licenses/>.
#
# Please contact us if you have any questions at bitdust.i
|
o@gmail.com
from __future__ import absolute_import
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "web.asite.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
sauloal/pycluster
|
pypy-1.9_64/lib_pypy/_md5.py
|
Python
|
mit
| 12,905
| 0.01511
|
#!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# Note that PyPy contains also a built-in module 'md5' which will hide
# this one if compiled in.
"""A sample implementation of MD5 in pure Python.
This is an implementation o
|
f the MD5 hash function, as specified by
RFC 1321, in pure Python. It was implemented using Bruce Schneier's
excellent book "Applied Cryptography", 2nd ed., 1996.
Surely this is not meant to compete with the existing implementation
of
|
the Python standard library (written in C). Rather, it should be
seen as a Python complement that is more readable than C and can be
used more conveniently for learning and experimenting purposes in
the field of cryptography.
This module tries very hard to follow the API of the existing Python
standard library's "md5" module, but although it seems to work fine,
it has not been extensively tested! (But note that there is a test
module, test_md5py.py, that compares this Python implementation with
the C one of the Python standard library.
BEWARE: this comes with no guarantee whatsoever about fitness and/or
other properties! Specifically, do not use this in any production
code! License is Python License!
Special thanks to Aurelian Coman who fixed some nasty bugs!
Dinu C. Gherman
"""
__date__ = '2004-11-17'
__version__ = 0.91 # Modernised by J. Hallén and L. Creighton for Pypy
__metaclass__ = type # or genrpy won't work
import struct, copy
# ======================================================================
# Bit-Manipulation helpers
# ======================================================================
def _bytelist2long(list):
"Transform a list of characters into a list of longs."
imax = len(list)/4
hl = [0L] * imax
j = 0
i = 0
while i < imax:
b0 = long(ord(list[j]))
b1 = (long(ord(list[j+1]))) << 8
b2 = (long(ord(list[j+2]))) << 16
b3 = (long(ord(list[j+3]))) << 24
hl[i] = b0 | b1 |b2 | b3
i = i+1
j = j+4
return hl
def _rotateLeft(x, n):
"Rotate x (32 bit) left n bits circularly."
return (x << n) | (x >> (32-n))
# ======================================================================
# The real MD5 meat...
#
# Implemented after "Applied Cryptography", 2nd ed., 1996,
# pp. 436-441 by Bruce Schneier.
# ======================================================================
# F, G, H and I are basic MD5 functions.
def F(x, y, z):
return (x & y) | ((~x) & z)
def G(x, y, z):
return (x & z) | (y & (~z))
def H(x, y, z):
return x ^ y ^ z
def I(x, y, z):
return y ^ (x | (~z))
def XX(func, a, b, c, d, x, s, ac):
"""Wrapper for call distribution to functions F, G, H and I.
This replaces functions FF, GG, HH and II from "Appl. Crypto."
Rotation is separate from addition to prevent recomputation
(now summed-up in one function).
"""
res = 0L
res = res + a + func(b, c, d)
res = res + x
res = res + ac
res = res & 0xffffffffL
res = _rotateLeft(res, s)
res = res & 0xffffffffL
res = res + b
return res & 0xffffffffL
class MD5Type:
"An implementation of the MD5 hash function in pure Python."
digest_size = digestsize = 16
block_size = 64
def __init__(self):
"Initialisation."
# Initial message length in bits(!).
self.length = 0L
self.count = [0, 0]
# Initial empty message as a sequence of bytes (8 bit characters).
self.input = []
# Call a separate init function, that can be used repeatedly
# to start from scratch on the same object.
self.init()
def init(self):
"Initialize the message-digest and set all fields to zero."
self.length = 0L
self.count = [0, 0]
self.input = []
# Load magic initialization constants.
self.A = 0x67452301L
self.B = 0xefcdab89L
self.C = 0x98badcfeL
self.D = 0x10325476L
def _transform(self, inp):
"""Basic MD5 step transforming the digest based on the input.
Note that if the Mysterious Constants are arranged backwards
in little-endian order and decrypted with the DES they produce
OCCULT MESSAGES!
"""
a, b, c, d = A, B, C, D = self.A, self.B, self.C, self.D
# Round 1.
S11, S12, S13, S14 = 7, 12, 17, 22
a = XX(F, a, b, c, d, inp[ 0], S11, 0xD76AA478L) # 1
d = XX(F, d, a, b, c, inp[ 1], S12, 0xE8C7B756L) # 2
c = XX(F, c, d, a, b, inp[ 2], S13, 0x242070DBL) # 3
b = XX(F, b, c, d, a, inp[ 3], S14, 0xC1BDCEEEL) # 4
a = XX(F, a, b, c, d, inp[ 4], S11, 0xF57C0FAFL) # 5
d = XX(F, d, a, b, c, inp[ 5], S12, 0x4787C62AL) # 6
c = XX(F, c, d, a, b, inp[ 6], S13, 0xA8304613L) # 7
b = XX(F, b, c, d, a, inp[ 7], S14, 0xFD469501L) # 8
a = XX(F, a, b, c, d, inp[ 8], S11, 0x698098D8L) # 9
d = XX(F, d, a, b, c, inp[ 9], S12, 0x8B44F7AFL) # 10
c = XX(F, c, d, a, b, inp[10], S13, 0xFFFF5BB1L) # 11
b = XX(F, b, c, d, a, inp[11], S14, 0x895CD7BEL) # 12
a = XX(F, a, b, c, d, inp[12], S11, 0x6B901122L) # 13
d = XX(F, d, a, b, c, inp[13], S12, 0xFD987193L) # 14
c = XX(F, c, d, a, b, inp[14], S13, 0xA679438EL) # 15
b = XX(F, b, c, d, a, inp[15], S14, 0x49B40821L) # 16
# Round 2.
S21, S22, S23, S24 = 5, 9, 14, 20
a = XX(G, a, b, c, d, inp[ 1], S21, 0xF61E2562L) # 17
d = XX(G, d, a, b, c, inp[ 6], S22, 0xC040B340L) # 18
c = XX(G, c, d, a, b, inp[11], S23, 0x265E5A51L) # 19
b = XX(G, b, c, d, a, inp[ 0], S24, 0xE9B6C7AAL) # 20
a = XX(G, a, b, c, d, inp[ 5], S21, 0xD62F105DL) # 21
d = XX(G, d, a, b, c, inp[10], S22, 0x02441453L) # 22
c = XX(G, c, d, a, b, inp[15], S23, 0xD8A1E681L) # 23
b = XX(G, b, c, d, a, inp[ 4], S24, 0xE7D3FBC8L) # 24
a = XX(G, a, b, c, d, inp[ 9], S21, 0x21E1CDE6L) # 25
d = XX(G, d, a, b, c, inp[14], S22, 0xC33707D6L) # 26
c = XX(G, c, d, a, b, inp[ 3], S23, 0xF4D50D87L) # 27
b = XX(G, b, c, d, a, inp[ 8], S24, 0x455A14EDL) # 28
a = XX(G, a, b, c, d, inp[13], S21, 0xA9E3E905L) # 29
d = XX(G, d, a, b, c, inp[ 2], S22, 0xFCEFA3F8L) # 30
c = XX(G, c, d, a, b, inp[ 7], S23, 0x676F02D9L) # 31
b = XX(G, b, c, d, a, inp[12], S24, 0x8D2A4C8AL) # 32
# Round 3.
S31, S32, S33, S34 = 4, 11, 16, 23
a = XX(H, a, b, c, d, inp[ 5], S31, 0xFFFA3942L) # 33
d = XX(H, d, a, b, c, inp[ 8], S32, 0x8771F681L) # 34
c = XX(H, c, d, a, b, inp[11], S33, 0x6D9D6122L) # 35
b = XX(H, b, c, d, a, inp[14], S34, 0xFDE5380CL) # 36
a = XX(H, a, b, c, d, inp[ 1], S31, 0xA4BEEA44L) # 37
d = XX(H, d, a, b, c, inp[ 4], S32, 0x4BDECFA9L) # 38
c = XX(H, c, d, a, b, inp[ 7], S33, 0xF6BB4B60L) # 39
b = XX(H, b, c, d, a, inp[10], S34, 0xBEBFBC70L) # 40
a = XX(H, a, b, c, d, inp[13], S31, 0x289B7EC6L) # 41
d = XX(H, d, a, b, c, inp[ 0], S32, 0xEAA127FAL) # 42
c = XX(H, c, d, a, b, inp[ 3], S33, 0xD4EF3085L) # 43
b = XX(H, b, c, d, a, inp[ 6], S34, 0x04881D05L) # 44
a = XX(H, a, b, c, d, inp[ 9], S31, 0xD9D4D039L) # 45
d = XX(H, d, a, b, c, inp[12], S32, 0xE6DB99E5L) # 46
c = XX(H, c, d, a, b, inp[15], S33, 0x1FA27CF8L) # 47
b = XX(H, b, c, d, a, inp[ 2], S34, 0xC4AC5665L) # 48
# Round 4.
S41, S42, S43, S44 = 6, 10, 15, 21
a = XX(I, a, b, c, d, inp[ 0], S41, 0xF4292244L) # 49
d = XX(I, d, a, b, c, inp[ 7], S42, 0x432AFF97L) # 50
c = XX(I, c, d, a, b, inp[14], S43, 0xAB9423A7L) # 51
b = XX(I, b, c, d, a, inp[ 5], S44, 0xFC93A039L) # 52
a = XX(I, a, b, c, d, inp[12], S41, 0x655B59C3L) # 53
d = XX(I, d, a, b, c, inp[ 3], S42, 0x8F0CCC92L) # 54
c = XX(I, c, d, a, b, inp[10], S43, 0xFFEFF47DL) # 55
b = XX(I, b, c, d, a, inp[ 1], S44, 0x85845DD1L) # 56
a = XX(I, a, b, c, d, inp[ 8], S41, 0x6FA87E4FL) # 57
d = XX
|
jcmgray/autoray
|
autoray/_version.py
|
Python
|
apache-2.0
| 18,445
| 0
|
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = "."
cfg.versionfile_source = "autoray/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
|
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startsw
|
ith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"di
|
Voldemort93/python
|
detect_remotes.py
|
Python
|
gpl-2.0
| 13,069
| 0.004438
|
# coding=utf-8
import base64
import zlib
import json
from sqlalchemy import Column, ForeignKey, Integer, String
import sqlalchemy
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Tables:
def __init__(self):
pass
class CommandCode(Base):
__tablename__ = 'codes'
_id = Column(Integer, primary_key=True, nullable=False)
codedata = Column(String, nullable=False)
def __init__(self, **kwargs):
self.__periodTolerance = kwargs.get('periodTolerance')
self.__frame = kwargs.get('frame')
self.__frequency = kwargs.get('frequency')
self.__period = kwargs.get('period')
self.__repeatCount = kwargs.get('repeatCount')
@sqlalchemy.orm.reconstructor
def initialize(self):
coded = base64.b64decode(self.codedata)
data = json.loads(zlib.decompress(coded))
self.__periodTolerance = data['periodTolerance']
self.__frame = data['frame']
self.__frequency = data['frequency']
self.__period = data['period']
self.__repeatCount = data['repeatCount']
@property
def frame(self):
return self.__frame
@property
def frequency(self):
return self.__frequency
@property
def period(self):
return self.__period
@property
def periodTolerance(self):
return self.__periodTolerance
@property
def repeatCount(self):
return self.__repeatCount
def __cmp__(self, other):
global result
assert isinstance(other, Tables.CommandCode)
if self.frame is not None:
# in Command Code is only frame
if self.period is None and self.frequency is None and self.periodTolerance is None and \
self.repeatCount is None:
result = cmp((self.frame,), (other.frame,))
# in Command Code is frame and one of the other parameters
elif self.period is not None and self.frequency is None and self.periodTolerance is None and \
self.repeatCount is None:
result = cmp((self.frame, self.period),
(other.frame, other.period))
elif self.period is None and self.frequency is not None and self.period
|
Tolerance is None and \
self.repeatCount is None:
result = cmp((self.frame, self.frequency),
(other.frame, o
|
ther.frequency))
elif self.period is None and self.frequency is None and self.periodTolerance is not None and \
self.repeatCount is None:
result = cmp((self.frame, self.periodTolerance),
(other.frame, other.periodTolerance))
elif self.period is None and self.frequency is None and self.periodTolerance is None and \
self.repeatCount is not None:
result = cmp((self.frame, self.repeatCount),
(other.frame, other.repeatCount))
# in Command Code is frame and two of the other parameters
elif self.period is not None and self.frequency is not None and self.periodTolerance is None and \
self.repeatCount is None:
result = cmp((self.frame, self.period, self.frequency),
(other.frame, other.period, other.frequency))
elif self.period is not None and self.frequency is None and self.periodTolerance is not None and \
self.repeatCount is None:
result = cmp((self.frame, self.period, self.periodTolerance),
(other.frame, other.period, other.periodTolerance))
elif self.period is not None and self.frequency is None and self.periodTolerance is None and \
self.repeatCount is not None:
result = cmp((self.frame, self.period, self.repeatCount),
(other.frame, other.period, other.repeatCount))
elif self.period is None and self.frequency is not None and self.periodTolerance is not None and \
self.repeatCount is None:
result = cmp((self.frame, self.frequency, self.periodTolerance),
(other.frame, other.frequency, other.periodTolerance))
elif self.period is None and self.frequency is not None and self.periodTolerance is None and \
self.repeatCount is not None:
result = cmp((self.frame, self.frequency, self.repeatCount),
(other.frame, other.frequency, other.repeatCount))
elif self.period is None and self.frequency is None and self.periodTolerance is not None and \
self.repeatCount is not None:
result = cmp((self.frame, self.periodTolerance, self.repeatCount),
(other.frame, other.periodTolerance, other.repeatCount))
# in Command Code is frame and two of the other parameters
elif self.period is not None and self.frequency is not None and self.periodTolerance is not None and \
self.repeatCount is None:
result = cmp((self.frame, self.period, self.frequency, self.periodTolerance),
(other.frame, other.period, other.frequency, other.periodTolerance))
elif self.period is not None and self.frequency is not None and self.periodTolerance is None and \
self.repeatCount is not None:
result = cmp((self.frame, self.period, self.frequency, self.repeatCount),
(other.frame, other.period, other.frequency, other.repeatCount))
# in Command Code are all parameters
else:
result = cmp(
(self.frame, self.period, self.frequency, self.periodTolerance, self.repeatCount,),
(other.frame, other.period, other.frequency, other.periodTolerance, other.repeatCount,))
return result
def __repr__(self):
return "period: {}, freq: {}, frame: {}, periodTolerance: {}, repeatCount: {}".format(self.__period,
self.__frequency,
self.__frame,
self.__periodTolerance,
self.__repeatCount)
class CodeAllocation(Base):
__tablename__ = 'codeallocations'
# columns
_id = Column(Integer, nullable=False, primary_key=True)
model_id = Column(Integer, ForeignKey('models._id'), nullable=False)
code_id = Column(Integer, ForeignKey('codes._id'), nullable=False)
codetype_id = Column(Integer, ForeignKey('codetypes._id'), nullable=False)
model = relationship('Model')
class Model(Base):
__tablename__ = 'models'
# columns
_id = Column(Integer, nullable=False, primary_key=True)
vendor_id = Column(Integer, ForeignKey('vendors._id'), nullable=False)
devicetype_id = Column(Integer, nullable=False)
name = Column(String, nullable=False)
vendor = relationship('Vendor')
def __repr__(self):
return "(Model: vendor:
|
ademariag/kapitan
|
kapitan/version.py
|
Python
|
apache-2.0
| 504
| 0.003968
|
#!/usr/bin/env python3
# Copyright 2019 The Kapitan Authors
# SPDX-FileCopyrightText: 2020 The Kapitan Authors <kapitan
|
-admins@googlegroups.com>
#
# SPDX-License-Identifier: Apache-2.0
"Project description variables"
PROJECT_NAME = "kapitan"
VERSION = '0
|
.29.4'
DESCRIPTION = "Generic templated configuration management for Kubernetes, " "Terraform and other things"
AUTHOR = "Ricardo Amaro"
AUTHOR_EMAIL = "ramaro@google.com"
LICENCE = "Apache License 2.0"
URL = "https://github.com/kapicorp/kapitan"
|
Azure/azure-sdk-for-python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/aio/operations/_virtual_routers_operations.py
|
Python
|
mit
| 24,260
| 0.005029
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualRoutersOperations:
"""VirtualRoutersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
virtual_router_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_router_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified Virtual Router.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_router_name: The name of the Virtual Router.
:type virtual_router_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for t
|
his operation to not poll, or pass in your own initialized polling obje
|
ct for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_router_name=virtual_router_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualRouterName': self._serialize.url("virtual_router_name", virtual_router_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualRouters/{virtualRouterName}'} # type: ignore
async def get(
self,
resource_group_name: str,
virtual_router_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.VirtualRouter":
"""Gets the specified Virtual Router.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_router_name: The name of the Virtual Router.
:type virtual_router_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cl
|
SteveWooding/fullstack-nanodegee-conference
|
Lesson_3/00_Conference_Central/conference.py
|
Python
|
gpl-3.0
| 4,120
| 0.004126
|
#!/usr/bin/env python
"""
conference.py -- Udacity conference server-side Python App Engine API;
uses Google Cloud Endpoints
$Id: conference.py,v 1.25 2014/05/24 23:42:19 wesc Exp wesc $
created by wesc on 2014 apr 21
"""
__author__ = 'wesc+api@google.com (Wesley Chun)'
from datetime import datetime
import endpoints
from protorpc import messages
from protorpc import message_types
from protorpc import remote
from google.appengine.ext import ndb
from models import Profile
from models import ProfileMiniForm
from models import ProfileForm
from models import TeeShirtSize
from settings import WEB_CLIENT_ID
from utils import getUserId
EMAIL_SCOPE = endpoints.EMAIL
|
_SCOPE
API_EXPLORER_CLIENT_ID = endpoints.API_EXPLORER_CLIENT_ID
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
@endpoints.api( name='conference',
version='v1',
allowed_client_ids=[WEB_CLIENT_ID, API_EXPLORER_CLIENT_ID],
scopes=[EMAIL_SCOPE])
class ConferenceApi(remote.Service):
"""Conference API v0.1"""
# - - - Profile objects - - - - - - - - - - - - - - - - - - -
def _copyPr
|
ofileToForm(self, prof):
"""Copy relevant fields from Profile to ProfileForm."""
# copy relevant fields from Profile to ProfileForm
pf = ProfileForm()
for field in pf.all_fields():
if hasattr(prof, field.name):
# convert t-shirt string to Enum; just copy others
if field.name == 'teeShirtSize':
setattr(pf, field.name, getattr(TeeShirtSize, getattr(prof, field.name)))
else:
setattr(pf, field.name, getattr(prof, field.name))
pf.check_initialized()
return pf
def _getProfileFromUser(self):
"""Return user Profile from datastore, creating new one if non-existent."""
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
# TODO 1
# step 1. copy utils.py from additions folder to this folder
# and import getUserId from it
# step 2. get user id by calling getUserId(user)
# step 3. create a new key of kind Profile from the id
user_id = getUserId(user)
p_key = ndb.Key(Profile, user_id)
# TODO 3
# get the entity from datastore by using get() on the key
profile = p_key.get()
if not profile:
profile = Profile(
key = p_key, # TODO 1 step 4. replace with the key from step 3
displayName = user.nickname(),
mainEmail= user.email(),
teeShirtSize = str(TeeShirtSize.NOT_SPECIFIED),
)
# TODO 2
# save the profile to datastore
profile.put()
return profile # return Profile
def _doProfile(self, save_request=None):
"""Get user Profile and return to user, possibly updating it first."""
# get user Profile
prof = self._getProfileFromUser()
# if saveProfile(), process user-modifyable fields
if save_request:
for field in ('displayName', 'teeShirtSize'):
if hasattr(save_request, field):
val = getattr(save_request, field)
if val:
setattr(prof, field, str(val))
# TODO 4
# put the modified profile to datastore
prof.put()
# return ProfileForm
return self._copyProfileToForm(prof)
@endpoints.method(message_types.VoidMessage, ProfileForm,
path='profile', http_method='GET', name='getProfile')
def getProfile(self, request):
"""Return user profile."""
return self._doProfile()
@endpoints.method(ProfileMiniForm, ProfileForm,
path='profile', http_method='POST', name='saveProfile')
def saveProfile(self, request):
"""Update & return user profile."""
return self._doProfile(request)
# registers API
api = endpoints.api_server([ConferenceApi])
|
feifangit/dj-api-auth
|
djapiauth/models.py
|
Python
|
gpl-2.0
| 4,372
| 0.003202
|
import uuid
import re
import cPickle
import pprint
from django.db import models
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from django.db.models.signals import m2m_changed, pre_delete
from django.dispatch import receiver
class APITree(object):
def __init__(self):
self._tree = {} # re name -> (redump, sub dict)
def add(self, srelist):
p = self._tree
for sre in srelist:
pr = sre.pattern
if not p.has_key(pr):
p[pr] = (sre, {})
p = p[pr][1]
def pprint(self):
pprint.pprint(self._tree)
def match(self, url):
path = url
eps = self._tree # entry points at same level
bpartialmatch = True
while eps and bpartialmatch: # until leaf
bpartialmatch = False
for rename, (redump, nextlevel) in eps.items():
match = redump.search(path)
if match:
path, eps, bpartialmatch = path[match.end():], nextlevel, True
if (not path) and (not eps): # perfect match
return True
break # partial match, jump to next level
else: # not match for this entry, try next one at same level
continue
if not bpartialmatch: # failed to match in this level
return False
return False
class APIEntryPoint(models.Model):
class Meta:
verbose_name = "Entry point"
name = models.CharField(max_length=100, unique=True)
pattern = models.CharField(max_length=300) # cPickle.dumps(<compiled RE>)
def __unicode__(self):
return unicode(self.name)
def gen_apikey(): # django 1.7 can not serilize lambda funciton
return uuid.uuid4().hex[:8]
def gen_seckey(): # django 1.7 can not serilize lambda funciton
return uuid.uuid4().hex
def gen_empty_list():
return cPickle.dumps([])
class APIKeys(models.Model):
class Meta:
verbose_name = "Credential"
apikey = models.CharField(max_length=50, default=gen_apikey, unique=True)
seckey = models.CharField(max_length=50, default=gen_seckey)
user = models.ForeignKey(settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.SET_NULL)
apis = models.ManyToManyField(APIEntryPoint, blank=True, null=True, help_text="accessible api entries")
note = models.CharField(max_length=80, null=True, blank=True)
apitree = models.TextField(default=gen_empty_list)
def __unicode__(self):
return unicode(self.apikey)
@staticmethod
def permission_check(apikey, endpoint):
"""
return (user, seckey) if url end point is in allowed entry point list
"""
try:
ak = APIKeys.objects.get(apikey=apikey)
apitree = cPickle.loads(ak.apitree.encode("ascii"))
if apitree.match(endpoint):
return ak.user if ak.user else AnonymousUser(), ak.seckey
except APIKeys.DoesNotExist:
pass
return None, None
def _api_set_changed(sender, instance, action, **kwargs):
# removed/add an API from an API key
tree = APITree()
if action == "post_clear":
instance.apitree = cPickle.dumps(tree)
instance.save(update_fields=["apitree"])
elif (action == "post_remove" or action == "post_add"):
for api in instance.apis.all():
srelist = cPickle.loads(api.pattern.encode("ascii"))
tree.add(srelist)
instance.apitree = cPickle.dumps(tree)
instance.save(update_fields=["apitree"])
m2m_changed.connect(_api_set_changed, sender=APIKeys.apis.through)
@receiver(pre_delete, sender=APIEntryPoint)
def _api_entry_deleted(sender, instance, using, *args, **kwargs):
# when an api entry is deleted,
# the entry will be removed automatically from api key
# but we need to refresh the apitree field which stores the data strucuture for fast-matching
for apikey in instance.apikeys_set.all():
tree = APITree()
for api in apikey.apis.all():
if api.id == instance.id:
continue
srelist = cPickle.loads(api.pattern.encode("ascii"))
tree.add(srelist)
apikey.apitre
|
e = cPickle.dumps(tree)
apikey.save(
|
update_fields=["apitree"])
|
JoyTeam/metagam
|
mg/test/testorm-2.py
|
Python
|
gpl-3.0
| 1,140
| 0.004386
|
#!/usr/bin/python2.6
# -*- coding: utf-8 -*
|
-
#
|
This file is a part of Metagam project.
#
# Metagam is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# Metagam is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Metagam. If not, see <http://www.gnu.org/licenses/>.
import unittest
from concurrence import dispatch, Tasklet
import mg.test.testorm
from mg.core.memcached import Memcached
from mg.core.cass import CassandraPool
class TestORM_Storage2(mg.test.testorm.TestORM):
def setUp(self):
mg.test.testorm.TestORM.setUp(self)
self.db.storage = 2
self.db.app = "testapp"
def main():
mg.test.testorm.cleanup()
unittest.main()
if __name__ == "__main__":
dispatch(main)
|
martinrotter/rssguard
|
resources/scripts/scrapers/search-xml-feeds.py
|
Python
|
gpl-3.0
| 979
| 0.017365
|
# Produces the list of links to XML feeds as extracted from input list of generic URLs.
# This script expects to have the file path passed as the
|
only input parameter
import re
import sys
import urllib.request
from urllib.parse import urljoin
urls_file = sys.argv[1]
with open(urls_file) as f:
urls_lines = [line.rstrip() for line in f]
regexp_link = re.compile
|
("<link[^>]+type=\"application\/(?:atom\+xml|rss\+xml|feed\+json|json)\"[^>]*>")
regexp_href = re.compile("href=\"([^\"]+)\"")
for url in urls_lines:
# Download HTML data.
try:
url_response = urllib.request.urlopen(url)
html = url_response.read().decode("utf-8")
except:
continue
# Search for XML feeds with regexps.
for link_tag in re.findall(regexp_link, html):
for link_xml_feed in re.findall(regexp_href, link_tag):
if link_xml_feed.startswith("/"):
print(urljoin(url, "/") + link_xml_feed[1:])
else:
print(link_xml_feed)
|
lombritz/odoo
|
addons/pos_delivery_restaurant/wizard/__init__.py
|
Python
|
agpl-3.0
| 1,334
| 0.001499
|
# -*- encoding: utf-8 -*-
###############################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://www.vauxoo.com>).
# All Rights Reserved
############# Credits #########################################################
# Coded by: Yanina Aular <yani@vauxoo.com>
# Planified by: Humberto Arocha <hbto@vauxoo.com>
# Audited by: Humberto Arocha <hbto@vauxoo.com>
######################
|
#########################################################
# This program is free software: y
|
ou can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
###############################################################################
|
QuantumQuadrate/CsPyController
|
python/Counter.py
|
Python
|
lgpl-3.0
| 19,978
| 0.005056
|
"""Counter.py
Part of the AQuA Cesium Controller software package
author=Martin Lichtman
created=2013-10-19
modified>=2015-05-11
This file holds everything to model a National Instruments DAQmx counter.
It communicated to LabView via the higher up LabView(Instrument) class.
Saving of returned data is handled in the LabView class.
"""
from __future__ import division
__author__ = 'Martin Lichtman'
import logging
import numpy as np
from atom.api import Str, Float, Typed, Member, Bool, Int, List
from cs_instruments import Instrument
from instrument_property import Prop, ListProp
from analysis import AnalysisWithFigure
from sklearn import mixture
from scipy.optimize import curve_fit
from scipy.special import erf
import matplotlib.gridspec as gridspec
import time
logger = logging.getLogger(__name__)
gs = gridspec.GridSpec(2, 2)
gmix = mixture.GaussianMixture(n_components=2)
class Counters(Instrument):
version = '2015.05.11'
counters = Typed(ListProp)
def __init__(self, name, experiment, description=''):
super(Counters, self).__init__(name, experiment, description)
# start with a blank list of counters
self.counters = ListProp('counters', experiment, listElementType=Counter, listElementName='counter')
self.properties += ['version', 'counters']
class Counter(Prop):
"""Each individual counter has a field for the signal source, clock source, and clock rate (in Hz,
used only for internal clocking).
"""
counter_source = Str()
clock_source = Str()
clock_rate = Float()
def __init__(self, name, experiment, description=''):
super(Counter, self).__init__(name, experiment, description)
self.properties += ['counter_source', 'clock_source', 'clock_rate']
class CounterAnalysis(AnalysisWithFigure):
counter_array = Member()
binned_array = Member()
meas_analysis_path = Str()
meas_data_path = Str()
iter_analysis_path = Str()
update_lock = Bool(False)
iterationonly = Bool(False)
enable = Bool()
drops = Int(3)
bins = Int(25)
shots = Int(2)
ROIs = List([0])
graph_roi = Int(0)
def __init__(self, name, experiment, description=''):
super(CounterAnalysis, self).__init__(name, experiment, description)
self.meas_analysis_path = 'analysis/counter_data'
self.meas_data_path = 'data/counter/data'
self.iter_analysis_path = 'shotData'
self.properties += ['enable', 'drops', 'bins', 'shots', 'graph_roi','draw_fig','iterationonly']
def preIteration(self, iterationResults, experimentResults):
self.counter_array = []
self.binned_array = None
def format_data(self, array):
"""Formats raw 2D counter data into the required 4D format.
Formats raw 2D counter data with implicit stucture:
[ # counter 0
[ dropped_bins shot_time_series dropped_bins shot_time_series ... ],
# counter 1
[ dropped_bins shot_time_series dropped_bins shot_time_series ... ]
]
into the 4D format expected by the subsequent analyses"
[ # measurements, can have different lengths run-to-run
[ # shots array, fixed size
[ # roi list, shot 0
[ time_series_roi_0 ],
[ time_series_roi_1 ],
...
],
[ # roi list, shot 1
[ time_series_roi_0 ],
[ time_series_roi_1 ],
...
],
...
],
...
]
"""
rois, bins = array.shape[:2]
bins_per_shot = self.drops + self.bins # self.bins is data bins per shot
# calculate the number of shots dynamically
num_shots = int(bins/(bins_per_shot))
# calculate the number of measurements contained in the raw data
# there may be extra shots if we get branching implemented
num_meas = num_shots//self.shots
# build a mask for removing valid data
shot_mask = ([False]*self.drops + [True]*self.bins)
good_shots = self.shots*num_meas
# mask for the roi
ctr_mask = np.array(shot_mask*good_shots + 0*shot_mask*(num_shots-good_shots), dtype='bool')
# apply mask a reshape partially
array = array[:, ctr_mask].reshape((rois, num_meas, self.shots, sel
|
f.bins))
array = array.swapaxes(0, 1) # swap rois and measurement axes
array = array.swapaxes(1,
|
2) # swap rois and shots axes
return array
def analyzeMeasurement(self, measurementResults, iterationResults, experimentResults):
if self.enable:
'''# number of shots is hard coded right now
bins_per_shot = self.drops + self.bins
num_shots = int(len(self.counter_array[-1])/bins_per_shot)
#if self.draw_fig:
# print "Number of shots: {}".format(num_shots)
# print "Bins per shot: {}".format(bins_per_shot)
# print "Length of counter array: {}".format(int(len(self.counter_array[-1])))
# counter array is appended every measurement so the counter hists can be calculated
# updated every cycle
# WARNING: counter_array only works with a single counter right now
self.binned_array = np.array([
self.counter_array[:, s*bins_per_shot + self.drops:(s+1)*bins_per_shot].sum(1)
for s in range(num_shots)
])'''
# MFE 2018/01: this analysis has been generalized such that multiple sub measurements can occur
# in the same traditional measurement
array = measurementResults[self.meas_data_path][()]
try:
# package data into an array with shape (sub measurements, shots, counters, time series data)
array = self.format_data(array)
# flatten the sub_measurements by converting top level to normal list and concatentating
self.counter_array += list(array)
except ValueError:
errmsg = "Error retrieving counter data. Offending counter data shape: {}"
logger.exception(errmsg.format(array.shape))
except:
logger.exception('Unhandled counter data exception')
# write this cycle's data into hdf5 file so that the threshold analysis can read it
# when multiple counter support is enabled, the ROIs parameter will hold the count
# Note the constant 1 is for the roi column parameter, all counters get entered in a single row
n_meas, n_shots, n_rois, bins = array.shape
sum_array = array.sum(axis=3).reshape((n_meas, n_shots, n_rois, 1))
measurementResults[self.meas_analysis_path] = sum_array
# put the sum data in the expected format for display
if self.binned_array is None:
self.binned_array = np.array([sum_array.reshape((n_meas, n_shots, n_rois))])
else:
self.binned_array = np.concatenate((
self.binned_array,
[sum_array.reshape((n_meas, n_shots, n_rois))]
))
if not self.iterationonly:
self.updateFigure()
def analyzeIteration(self, iterationResults, experimentResults):
if self.enable:
# recalculate binned_array to get rid of cut data
# iterationResults[self.iter_analysis_path] = self.binned_array
meas = map(int, iterationResults['measurements'].keys())
meas.sort()
path = 'measurements/{}/' + self.meas_analysis_path
try:
res = np.array([iterationResults[path.format(m)] for m in meas])
except KeyError:
# I was having problem with the file maybe not being ready
logger.warning("Issue read
|
s0lst1c3/eaphammer
|
local/hostapd-eaphammer/tests/hwsim/test_module_tests.py
|
Python
|
gpl-3.0
| 889
| 0.00225
|
# Module tests
# Copyright (c) 2014, Jouni Malinen <j@w1.fi>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import os
import time
import hostapd
def test_module_wpa_supplicant(dev, apdev, params):
"""wpa_supplicant module
|
tests"""
if "OK" not in dev[0].global_request("MODULE_TESTS"):
raise Exception("Module tests failed")
# allow eloop test to complete
time.sleep(0.75)
dev[0].relog()
with open(os.path.join(params['logdir'], 'log0'), 'r') as f:
res = f.read()
if "FAIL - should not have called this function" in res:
raise Exception("eloop test failed")
def test_module_hostapd(dev):
|
"""hostapd module tests"""
hapd_global = hostapd.HostapdGlobal()
if "OK" not in hapd_global.ctrl.request("MODULE_TESTS"):
raise Exception("Module tests failed")
|
MorseDecoder/Morse-Code-Project
|
Input/User Input.py
|
Python
|
gpl-3.0
| 1,443
| 0.11088
|
import winsound #Import winsound library for winsound.Beep() function
import time #Import time library for time.sleep() function
morse_code = { #Dictionary containing each letter and their respective morse code
"a" : [0,1],
"b" : [1,0,0,0],
"c" : [1,0,1,0],
"d" : [1,0,0],
"e" : [0],
"f" : [0,0,1,0],
"g" : [1,1,0],
"h" : [0,0,0,0],
"i" : [0,0],
"j" : [0,1,1,1],
"k" : [1,0,1],
"l" : [0,1,0,0],
"m" : [1,1],
"n" : [1,0],
"o" : [1,1,1],
"p" : [0,1,1,0],
"q" : [1,1,0,1],
"r" : [0,1,0],
"s" : [0,0,0],
"t" : [1],
"u" : [0,0,1],
"v" : [0,0,0,1],
"w" : [0,1,1],
"x" : [1,0,0,1],
"y" : [1,0,1,1],
"z" : [1,1,0,0],
" " : [2]
}
while True: #Loops program
morse_input = input("Enter a word\n> ").lower() #Asks user for input
morse_buffer = [] #Buffer for input
count = 0
while count < len(morse_input):
morse_buffer.append(morse_code[morse_input[count]]) #Adds input to buffer and converts to morse code
count += 1
count = 0
innercount = 0
while count < len(morse_buffer):
currentlength = len(morse_buffer[count])
while innercount < currentlength:
if morse_buffer[cou
|
nt][innercount] ==
|
0:
winsound.Beep(1000, 500) #Plays a dot
time.sleep(0.2)
elif morse_buffer[count][innercount] == 1:
winsound.Beep(1000, 980) #Plays a dash
time.sleep(0.1)
elif morse_buffer[count][innercount] == 2:
time.sleep(2.1) #Space
innercount += 1
innercount = 0
count += 1
time.sleep(1)
|
billiob/papyon
|
papyon/media/conference.py
|
Python
|
gpl-2.0
| 12,851
| 0.003735
|
# -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2009 Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
from papyon.media import *
from papyon.event.media import *
import pygst
pygst.require('0.10')
import farsight
import gobject
import gst
import logging
import sys
logger = logging.getLogger("papyon.media.conference")
codecs_definitions = {
"audio" : [
(114, "x-msrta", farsight.MEDIA_TYPE_AUDIO, 16000),
(111, "SIREN", farsight.MEDIA_TYPE_AUDIO, 16000),
(112, "G7221", farsight.MEDIA_TYPE_AUDIO, 16000),
(115, "x-msrta", farsight.MEDIA_TYPE_AUDIO, 8000),
(116, "SIREN", farsight.MEDIA_TYPE_AUDIO, 8000),
(4, "G723", farsight.MEDIA_TYPE_AUDIO, 8000),
(8, "PCMA", farsight.MEDIA_TYPE_AUDIO, 8000),
(0, "PCMU", farsight.MEDIA_TYPE_AUDIO, 8000),
(97, "RED", farsight.MEDIA_TYPE_AUDIO, 8000),
(101, "telephone-event", farsight.MEDIA_TYPE_AUDIO, 8000)
],
"video" : [
(121, "x-rtvc1", farsight.MEDIA_TYPE_VIDEO, 90000),
(34, "H263", farsight.MEDIA_TYPE_VIDEO, 90000)
]
}
types = {
0 : None,
farsight.CANDIDATE_TYPE_HOST : "host",
farsight.CANDIDATE_TYPE_SRFLX : "srflx",
farsight.CANDIDATE_TYPE_PRFLX : "prflx",
farsight.CANDIDATE_TYPE_RELAY : "relay"
}
protos = {
farsight.NETWORK_PROTOCOL_TCP : "TCP",
farsight.NETWORK_PROTOCOL_UDP : "UDP"
}
media_names = {
farsight.MEDIA_TYPE_AUDIO : "audio",
farsight.MEDIA_TYPE_VIDEO : "video"
}
media_types = {
"audio" : farsight.MEDIA_TYPE_AUDIO,
"video" : farsight.MEDIA_TYPE_VIDEO
}
class Conference(gobject.GObject):
def __init__(self):
gobject.GObject.__init__(self)
def set_source(self, source):
pass
class MediaSessionHandler(MediaSessionEventInterface):
def __init__(self, session):
MediaSessionEventInterface.__init__(self, session)
self._conference = None
self._handlers = []
self._setup()
for stream in session.streams:
self.on_stream_added(stream)
def _setup(self):
self._pipeline = gst.Pipeline()
bus = self._pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", self.on_bus_message)
if self._session.type is MediaSessionType.WEBCAM_RECV:
name = "fsmsncamrecvconference"
elif self._session.type is MediaSessionType.WEBCAM_SEND:
name = "fsmsncamsendconference"
else:
name = "fsrtpconference"
self._conference = gst.element_factory_make(name)
self._participant = self._conference.new_participant("")
self._pipeline.add(self._conference)
self._pipeline.set_state(gst.STATE_PLAYING)
#FIXME Create FsElementAddedNotifier
def on_stream_added(self, stream):
logger.debug("Stream \"%s\" added" % stream.name)
handler = MediaStreamHandler(stream)
handler.setup(self._conference, self._pipeline, self._participant,
self._session.type)
self._handlers.append(handler)
if self._session.type is MediaSessionType.WEBCAM_RECV or\
self._session.type is MediaSessionType.WEBCAM_SEND:
stream.set_local_codecs([])
def on_bus_message(self, bus, msg):
ret = gst.BUS_PASS
if msg.type == gst.MESSAGE_ELEMENT:
s = msg.structure
if s.has_name("farsight-error"):
logger.error("Farsight error : %s" % s['error-msg'])
if s.has_name("farsight-codecs-changed"):
logger.debug("Farsight codecs changed")
ret = gst.BUS_DROP
ready = s["session"].get_property("codecs-ready")
if ready:
codecs = s["session"].get_property("codecs")
name = media_names[s["session"].get_property("media-type")]
stream = self._session.get_stream(name)
stream.set_local_codecs(convert_fs_codecs(codecs))
if s.has_name("farsight-new-local-candidate"):
logger.debug("New local candidate")
ret = gst.BUS_DROP
name = media_names[s["stream"].get_property("session").get_property("media-type")]
candidate = convert_fs_candidate(s["candidate"])
stream = self._session.get_stream(name)
stream.new_local_candidate(candidate)
if s.has_name("farsight-local-candidates-prepared"):
logger.debug("Local candidates are prepared")
ret = gst.BUS_DROP
type = s["stream"].get_property("session").get_property("media-type")
name = media_names[type]
stream = self._session.get_stream(name)
stream.local_candidates_prepared()
if s.has_name("farsight-new-active-candidate-pair"):
logger.debug("New active candidate pair")
ret = gst.BUS_DROP
type = s["stream"].get_property("session").get_property("media-type")
name = media_names[type]
stream = self._session.get_stream(name)
local = s["local-candidate"]
remote = s["remote-candidate"]
local = convert_fs_candidate(local)
remote = convert_fs_candidate(remote)
stream.new_active_candidate_pair(local, remote)
return ret
class MediaStreamHandler(MediaStreamEventInterface):
def __init__(self, stream):
MediaStreamEventInterface.__init__(self, stream)
def setup(self, conference, pipeline, participant, type):
relays = []
for r in self._stream.relays:
relay = gst.Structure("relay")
relay.set_value("username", r.username)
relay.set_value("password", r.password)
relay.set_value("ip", r.ip)
relay.set_value("port", r.port, "uint")
relays.append(relay)
if type in (MediaSessionType.SIP, MediaSessionType.TUNNELED_SIP):
if type is MediaSessionType.TUNNELED_SIP:
compatibility_mode = 3
else:
compatibility_mode = 2
params = {"stun-ip" : "64.14.48.28", "stun-port" : 3478,
"compatibility-mode" : compatibility_mode,
"controlling-mode": self._stream.created_locally,
"relay-info": relays}
else:
params = {}
media_type = media_types[self._stream.name]
self.fssession
|
= conference.new_session(media_type)
self.fssession.set_codec_preferences(build_codecs(self._stream.name))
self.fsstream = self.fssession.new_stream(participant,
self._stream.direction, "nice", params)
self.fsstream.connect("src-pad-added", self.on_src_pad_added, pipeline)
source = make_source(se
|
lf._stream.name)
pipeline.add(source)
source.get_pad("src").link(self.fssession.get_property("sink-pad"))
pipeline.set_state(gst.STATE_PLAYING)
def on_stream_closed(self):
del self.fsstream
def on_remote_candidates_received(self, candidates):
candidates = filter(lambda x: x.transport == "UDP", candidates)
candidates = convert_media_candidates(candidates)
self.fsstream.set_remote_candidates(candidates)
def on_remote_codecs_received(self, codecs):
codecs = convert_media_codecs(codecs, self._s
|
agustinhenze/logbook.debian
|
logbook/__init__.py
|
Python
|
bsd-3-clause
| 1,683
| 0.000594
|
# -*- coding: utf-8 -*-
"""
logbook
~~~~~~~
Simple logging library that aims to support desktop, command line
and web applications alike.
:copyright: (c) 2010 by Armin Ronacher, Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import os
from logbook.base import LogRecord, Logger, LoggerGroup, NestedSetup, \
Processor, Flags, get_level_name, lookup_level, dispatch_record, \
CRITICAL, ERROR, WARNING, NOTICE, INFO,
|
DEBUG, NOTSET, \
set_datetime_format
from logbook.handlers import Handler, StreamHandler, FileHandler, \
MonitoringFileHandler, StderrHandler, RotatingFileHandler, \
TimedRotatingFileHandler, TestHandler, MailHandler, GMailHandler, SyslogHandler, \
NullHandler, NTEventLogHandler, create_syshandler, StringFormatter, \
StringFormatterHandlerMixin, HashingHandlerMixin
|
, \
LimitingHandlerMixin, WrapperHandler, FingersCrossedHandler, \
GroupHandler
__version__ = '0.10.0'
# create an anonymous default logger and provide all important
# methods of that logger as global functions
_default_logger = Logger('Generic')
_default_logger.suppress_dispatcher = True
debug = _default_logger.debug
info = _default_logger.info
warn = _default_logger.warn
warning = _default_logger.warning
notice = _default_logger.notice
error = _default_logger.error
exception = _default_logger.exception
catch_exceptions = _default_logger.catch_exceptions
critical = _default_logger.critical
log = _default_logger.log
del _default_logger
# install a default global handler
if os.environ.get('LOGBOOK_INSTALL_DEFAULT_HANDLER'):
default_handler = StderrHandler()
default_handler.push_application()
|
Sergiopopoulos/IV-perezmolinasergio
|
iaas/fabfile.py
|
Python
|
gpl-3.0
| 330
| 0.027273
|
#
|
coding: utf-8
from fabric.api import *
def instalacion():
run('sudo git clone https://github.com/Sergiopopoulos/IV-perezmolinasergio')
run('cd IV-perezmolinasergio && sudo pip install -r requirements.txt')
def ejecucion():
run('cd IV-perezmolinasergio && nohup sudo -E gunicorn app.wsgi -b 0.0.0.0:80 &', pty=False)
| |
vedujoshi/tempest
|
tempest/tests/lib/services/identity/v3/test_endpoint_groups_client.py
|
Python
|
apache-2.0
| 5,681
| 0
|
# Copyright 2017 AT&T Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.identity.v3 import endpoint_groups_client
from tempest.tests.lib import fake_auth_provider
from tempest.tests.lib.services import base
class TestEndPointGroupsClient(base.BaseServiceTest):
FAKE_CREATE_ENDPOINT_GROUP = {
"endpoint_group": {
"id": 1,
"name": "FAKE_ENDPOINT_GROUP",
"description": "FAKE SERVICE ENDPOINT GROUP",
"filters": {
"service_id": 1
}
}
}
FAKE_ENDPOINT_GROUP_INFO = {
"endpoint_group": {
"id": 1,
"name": "FAKE_ENDPOINT_GROUP",
"description": "FAKE SERVICE ENDPOINT GROUP",
"links": {
"self": "http://example.com/identity/v3/OS-EP-FILTER/" +
"endpoint_groups/1"
},
"filters": {
"service_id": 1
}
}
}
FAKE_LIST_ENDPOINT_GROUPS = {
"endpoint_groups": [
{
"id": 1,
"name": "SERVICE_GROUP1",
"description": "FAKE SERVICE ENDPOINT GROUP",
"links": {
"self": "http://example.com/identity/v3/OS-EP-FILTER/" +
"endpoint_groups/1"
},
"filters": {
"service_id": 1
}
},
{
"id": 2,
"name": "SERVICE_GROUP2",
"description": "FAKE SERVICE ENDPOINT GROUP",
"links": {
"self": "http://example.com/identity/v3/OS-EP-FILTER/" +
"endpoint_groups/2"
},
"filters": {
"service_id": 2
}
}
]
}
def setUp(self):
super(TestEndPointGroupsClient, self).setUp()
fake_auth = fake_auth_provider.FakeAuthProvider()
self.client = endpoint_groups_client.EndPointGroupsClient(
fake_auth, 'identity', 'regionOne')
def _test_create_endpoint_group(self, bytes_body=False):
self.check_service_client_function(
self.client.cr
|
eate_endpoint_group,
'tempest.lib.common.rest
|
_client.RestClient.post',
self.FAKE_CREATE_ENDPOINT_GROUP,
bytes_body,
status=201,
name="FAKE_ENDPOINT_GROUP",
filters={'service_id': "1"})
def _test_show_endpoint_group(self, bytes_body=False):
self.check_service_client_function(
self.client.show_endpoint_group,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_ENDPOINT_GROUP_INFO,
bytes_body,
endpoint_group_id="1")
def _test_check_endpoint_group(self, bytes_body=False):
self.check_service_client_function(
self.client.check_endpoint_group,
'tempest.lib.common.rest_client.RestClient.head',
{},
bytes_body,
status=200,
endpoint_group_id="1")
def _test_update_endpoint_group(self, bytes_body=False):
self.check_service_client_function(
self.client.update_endpoint_group,
'tempest.lib.common.rest_client.RestClient.patch',
self.FAKE_ENDPOINT_GROUP_INFO,
bytes_body,
endpoint_group_id="1",
name="NewName")
def _test_list_endpoint_groups(self, bytes_body=False):
self.check_service_client_function(
self.client.list_endpoint_groups,
'tempest.lib.common.rest_client.RestClient.get',
self.FAKE_LIST_ENDPOINT_GROUPS,
bytes_body)
def test_create_endpoint_group_with_str_body(self):
self._test_create_endpoint_group()
def test_create_endpoint_group_with_bytes_body(self):
self._test_create_endpoint_group(bytes_body=True)
def test_show_endpoint_group_with_str_body(self):
self._test_show_endpoint_group()
def test_show_endpoint_group_with_bytes_body(self):
self._test_show_endpoint_group(bytes_body=True)
def test_check_endpoint_group_with_str_body(self):
self._test_check_endpoint_group()
def test_check_endpoint_group_with_bytes_body(self):
self._test_check_endpoint_group(bytes_body=True)
def test_list_endpoint_groups_with_str_body(self):
self._test_list_endpoint_groups()
def test_list_endpoint_groups_with_bytes_body(self):
self._test_list_endpoint_groups(bytes_body=True)
def test_update_endpoint_group_with_str_body(self):
self._test_update_endpoint_group()
def test_update_endpoint_group_with_bytes_body(self):
self._test_update_endpoint_group(bytes_body=True)
def test_delete_endpoint_group(self):
self.check_service_client_function(
self.client.delete_endpoint_group,
'tempest.lib.common.rest_client.RestClient.delete',
{},
endpoint_group_id="1",
status=204)
|
cigroup-ol/metaopt
|
docs/_extensions/numpy_ext/docscrape_sphinx.py
|
Python
|
bsd-3-clause
| 7,924
| 0.001641
|
import re, inspect, textwrap, pydoc
import sphinx
from docscrape import NumpyDocString, FunctionDoc, ClassDoc
class SphinxDocString(NumpyDocString):
def __init__(self, docstring, config=None):
config = {} if config is None else config
self.use_plots = config.get('use_plots', False)
NumpyDocString.__init__(self, docstring, config=config)
# string conversion routines
def _str_header(self, name, symbol='`'):
return ['.. rubric:: ' + name, '']
def _str_field_list(self, name):
return [':' + name + ':']
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
return ['']
if self['Signature']:
return ['``%s``' % self['Signature']] + ['']
else:
return ['']
def _str_summary(self):
return self['Summary'] + ['']
def _str_extended_summary(self):
return self['Extended Summary'] + ['']
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_field_list(name)
out += ['']
for param,param_type,desc in self[name]:
out += self._str_indent(['**%s** : %s' % (param.strip(),
param_type)])
out += ['']
out += self._str_indent(desc,8)
out += ['']
return out
@property
def _obj(self):
if hasattr(self, '_cls'):
return self._cls
elif hasattr(self, '_f'):
return self._f
return None
def _str_member_list(self, name):
"""
Generate a member listing, autosummary:: table where possible,
and a table where not.
"""
out = []
if self[name]:
out += ['.. rubric:: %s' % name, '']
prefix = getattr(self, '_name', '')
if prefix:
prefix = '~%s.' % prefix
autosum = []
others = []
for param, param_type, desc in self[name]:
param = param.strip()
if not self._obj or hasattr(self._obj, param):
autosum += [" %s%s" % (prefix, param)]
else:
others.append((param, param_type, desc))
if autosum:
# GAEL: Toctree commented out below because it creates
# hundreds of sphinx warnings
# out += ['.. autosummary::', ' :toctree:', '']
out += ['.. autosummary::', '']
out += autosum
if others:
maxlen_0 = max([len(x[0]) for x in others])
maxlen_1 = max([len(x[1]) for x in others])
hdr = "="*maxlen_0 + " " + "="*maxlen_1 + " " + "="*10
fmt = '%%%ds %%%ds ' % (maxlen_0, maxlen_1)
n_indent = maxlen_0 + maxlen_1 + 4
out += [hdr]
for param, param_type, desc in others:
out += [fmt % (param.strip(), param_type)]
out += self._str_indent(desc, n_indent)
out += [hdr]
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += ['']
content = textwrap.dedent("\n".join(self[name])).split("\n")
out += content
out += ['']
return out
def _str_see_also(self, func_role):
out = []
if self['See Also']:
see_also = super(SphinxDocString, self)._str_see_also(func_role)
out = ['.. seealso::', '']
out += self._str_indent(see_also[2:])
return out
def _str_warnings(self):
out = []
if self['Warnings']:
out = ['.. warning::', '']
out += self._str_indent(self['Warnings'])
return out
def _str_index(self):
idx = self['index']
out = []
if len(idx) == 0:
return out
out += ['.. index:: %s' % idx.get('default','')]
for section, references in idx.iteritems():
if section == 'default':
continue
elif section == 'refguide':
out += [' single: %s' % (', '.join(references))]
else:
out += [' %s: %s' % (section, ','.join(references))]
return out
def _str_references(self):
out = []
if self['References']:
out += self._str_header('References')
if isinstance(self['References'], str):
self['References'] = [self['References']]
out.extend(self['References'])
out += ['']
# Latex collects all references to a separate bibliography,
# so we need to insert links to it
if sphinx.__version__ >= "0.6":
out += ['.. only:: latex','']
else:
out += ['.. latexonly::','']
items = []
for line in self['References']:
m = re.match(r'.. \[([a-z0-9._-]+)\]', line, re.I)
if m:
items.append(m.group(1))
out += [' ' + ", ".join(["[%s]_" % item for item in items]), '']
return out
def _str_examples(self):
examples_str = "\n".join(self['Examples'])
if (self.use_plots and 'import matplotlib' in examples_str
and 'plot::' not in examples_str):
out = []
out += self._str_header('Examples')
out += ['.. plot::', '']
out += self._str_indent(self['Examples'])
out += ['']
return out
else:
return self._str_section('Examples')
def __str__(self, indent=0, func_role="obj"):
out = []
out += self._str_signature()
out += self._str_index() + ['']
out +=
|
self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters', 'Returns', 'Raises'):
out += self._str_param_list(param_list)
out += self._str_warnings()
out += self._str_see_also(func_role)
out += self._str_section('Notes')
out += self._str_references()
out +=
|
self._str_examples()
for param_list in ('Attributes', 'Methods'):
out += self._str_member_list(param_list)
out = self._str_indent(out,indent)
return '\n'.join(out)
class SphinxFunctionDoc(SphinxDocString, FunctionDoc):
def __init__(self, obj, doc=None, config={}):
self.use_plots = config.get('use_plots', False)
FunctionDoc.__init__(self, obj, doc=doc, config=config)
class SphinxClassDoc(SphinxDocString, ClassDoc):
def __init__(self, obj, doc=None, func_doc=None, config={}):
self.use_plots = config.get('use_plots', False)
ClassDoc.__init__(self, obj, doc=doc, func_doc=None, config=config)
class SphinxObjDoc(SphinxDocString):
def __init__(self, obj, doc=None, config=None):
self._f = obj
SphinxDocString.__init__(self, doc, config=config)
def get_doc_object(obj, what=None, doc=None, config={}):
if what is None:
if inspect.isclass(obj):
what = 'class'
elif inspect.ismodule(obj):
what = 'module'
elif callable(obj):
what = 'function'
else:
what = 'object'
if what == 'class':
return SphinxClassDoc(obj, func_doc=SphinxFunctionDoc, doc=doc,
config=config)
elif what in ('function', 'method'):
return SphinxFunctionDoc(obj, doc=doc, config=config)
else:
if doc is None:
doc = pydoc.getdoc(obj)
return SphinxObjDoc(obj, doc, config=config)
|
eliadl/talks
|
20170613-mock/code/ggee/tests/test_i18n.py
|
Python
|
gpl-3.0
| 2,534
| 0.002815
|
# -*- coding: utf-8 -*-
try:
from unittest import mock
except ImportError:
import mock
from guessing import i18n
@mock.patch.dict('guessing.i18n.environ', LC_MESSAGES='he_IL')
def test_lang_lc_message_he():
assert i18n.lang() == 'he'
def test_lang_default():
with mock.patch.dict('guessing.i18n.environ', clear=True):
assert i18n.environ == {}
assert i18n.lang() == 'C'
def
|
test_is_lang_en():
with mock.patch.dict('guessing.i18n.environ',
dict(LC_ALL='en_US',
|
LC_MESSAGES='en_US',
LANG='en_US')):
assert i18n.lang() == 'en'
@mock.patch('guessing.i18n.lang')
def test_T_default(mock_lang):
mock_lang.return_value = 'C'
assert i18n.T('hello') == 'hello'
@mock.patch('guessing.i18n.CLIENT') #, spec=mock.create_autospec(i18n.CLIENT))
@mock.patch('guessing.i18n.lang')
def test_T_hebrew(mock_lang, mock_CLIENT):
mock_CLIENT.translate.return_value = [dict(translatedText='שלום')]
mock_lang.return_value = 'he'
assert i18n.T('hello') == 'שלום'
def test_is_quit_english():
with mock.patch('guessing.i18n.lang', return_value='en'):
assert i18n.is_quit('QUIT')
assert i18n.is_quit(' stop ')
assert i18n.is_quit('exit')
assert not i18n.is_quit('')
assert not i18n.is_quit('I want my Mummy!')
assert not i18n.is_quit('q')
assert not i18n.is_quit('די')
@mock.patch('guessing.i18n.lang')
def test_is_quit_hebrew(mock_lang):
mock_lang.return_value = 'he'
assert i18n.is_quit('די')
assert i18n.is_quit('מספיק')
assert i18n.is_quit('צא בחוץ')
assert not i18n.is_quit('')
assert not i18n.is_quit('אני רוצה לאימא')
assert not i18n.is_quit('quit')
@mock.patch('guessing.i18n.lang', return_value='en')
def test_is_yes_english(_):
assert i18n.is_yes('')
assert i18n.is_yes(' yes')
assert i18n.is_yes('y')
assert i18n.is_yes('YEP ')
assert i18n.is_yes(' sure ')
assert not i18n.is_yes('no')
assert not i18n.is_yes('maybe')
assert not i18n.is_yes('you tell me')
assert not i18n.is_yes('כן')
def test_is_yes_hebrew():
with mock.patch('guessing.i18n.lang') as mock_lang:
mock_lang.return_value = 'he'
assert i18n.is_yes('')
assert i18n.is_yes('כן')
assert i18n.is_yes('כ')
assert i18n.is_yes(' בטח')
assert i18n.is_yes('סבבה')
assert not i18n.is_yes('yes')
|
odoo-arg/odoo_l10n_ar
|
l10n_ar_account_payment/tests/test_account_payment.py
|
Python
|
agpl-3.0
| 15,283
| 0.003075
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms o
|
f the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This
|
program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import set_up
from openerp.exceptions import ValidationError, UserError
from mock import mock
class TestAccountPayment(set_up.SetUp):
def test_payment_type_line_amount(self):
""" Intentamos cargar una linea de pago con distintos importes """
self.payment_line.amount = 1000.12
self.payment_line.amount = 0.1
with self.assertRaises(ValidationError):
self.payment_line.amount = 0
with self.assertRaises(ValidationError):
self.payment_line.amount = -230.5
def test_pos(self):
customer_payment = self.customer_payment.with_context(default_payment_type='inbound')
supplier_payment = self.customer_payment.with_context(default_payment_type='outbound')
assert customer_payment._get_default_pos() == self.pos_inbound.id
assert supplier_payment._get_default_pos() == self.pos_outbound.id
def test_onchange_payment_line(self):
self.customer_payment.amount = 10000
self.customer_payment.onchange_payment_type_line_ids()
assert self.customer_payment.amount == 500
def test_onchange_currency_id(self):
assert self.customer_payment.payment_type_line_ids
self.customer_payment.onchange_currency_id()
assert not self.customer_payment.payment_type_line_ids
def test_get_document_book(self):
# Sin punto de venta
with self.assertRaises(ValidationError):
self.customer_payment.get_document_book()
self.customer_payment.pos_ar_id = self.pos_inbound
assert self.customer_payment.get_document_book() == self.document_book_inbound
self.document_book_inbound.unlink()
# Sin el talonario
with self.assertRaises(ValidationError):
self.customer_payment.get_document_book()
def test_get_pos(self):
assert self.customer_payment.get_pos(self.customer_payment.payment_type) == self.pos_inbound.id
self.document_book_inbound.unlink()
self.document_book_invoice.unlink()
self.pos_inbound.unlink()
assert not self.customer_payment.get_pos(self.customer_payment.payment_type)
def test_payment_method_vals(self):
vals = self.customer_payment.set_payment_methods_vals()
assert vals[0].get('amount') == 500
assert vals[0].get('account_id') == self.payment_type_transfer.account_id.id
def test_multiple_payment_methods_vals(self):
# Creamos una nueva - Efectivo
payment_type_cash = self.env['account.payment.type'].create({
'name': 'Efectivo',
'account_id': self.env.ref('l10n_ar.1_caja_pesos').id,
})
self.env['account.payment.type.line'].create({
'account_payment_type_id': payment_type_cash.id,
'payment_id': self.customer_payment.id,
'amount': 1000
})
vals = self.customer_payment.set_payment_methods_vals()
vals.sort(key=lambda x: x['amount'])
assert vals[0].get('amount') == 500
assert vals[0].get('account_id') == self.payment_type_transfer.account_id.id
assert vals[1].get('amount') == 1000
assert vals[1].get('account_id') == payment_type_cash.account_id.id
def test_grouped_payment_method_vals(self):
vals = self.customer_payment._get_payment_methods_vals()
assert vals[0].get('amount') == 500
assert vals[0].get('account_id') == self.payment_type_transfer.account_id.id
def test_grouped_multiple_payment_method_vals(self):
# Agregamos otra transferencia
self.env['account.payment.type.line'].create({
'account_payment_type_id': self.payment_type_transfer.id,
'payment_id': self.customer_payment.id,
'amount': 2000
})
# Creamos una nueva - Efectivo
payment_type_cash = self.env['account.payment.type'].create({
'name': 'Efectivo',
'account_id': self.env.ref('l10n_ar.1_caja_pesos').id,
})
self.env['account.payment.type.line'].create({
'account_payment_type_id': payment_type_cash.id,
'payment_id': self.customer_payment.id,
'amount': 1000
})
vals = self.customer_payment._get_payment_methods_vals()
vals.sort(key=lambda x: x['amount'])
assert vals[0].get('amount') == 1000
assert vals[0].get('account_id') == payment_type_cash.account_id.id
assert vals[1].get('amount') == 2500
assert vals[1].get('account_id') == self.payment_type_transfer.account_id.id
def test_invalid_account_payment_method(self):
vals = self.customer_payment.set_payment_methods_vals()
vals[0].pop('account_id')
with self.assertRaises(ValidationError):
self.customer_payment._validate_payment_vals(vals)
def test_invalid_amount_payment_method(self):
vals = self.customer_payment.set_payment_methods_vals()
vals[0].pop('amount')
with self.assertRaises(ValidationError):
self.customer_payment._validate_payment_vals(vals)
def test_onchange_payment_type(self):
payment = self.env['account.payment'].new({
'payment_type': 'transfer',
'pos_ar_id': self.pos_inbound,
'payment_type_line_ids': [(6, 0, [self.payment_line.id])]
})
payment.onchange_payment_type()
assert not payment.pos_ar_id
assert not payment.payment_type_line_ids
payment.payment_type = 'outbound'
payment.onchange_payment_type()
assert payment.pos_ar_id == self.pos_outbound
def test_has_number(self):
# Previo a validarse no deberia tener nombre
assert not self.customer_payment.has_number
# Luego de validarse ya deberia tener el nombre
self.customer_payment.pos_ar_id = self.pos_inbound
self.customer_payment.post_l10n_ar()
self.customer_payment._set_has_number()
assert self.customer_payment.has_number
def test_old_post(self):
""" Validamos que la funcion vieja de post no se pueda ejecutar """
with self.assertRaises(ValidationError) as e:
self.customer_payment.post()
assert str(e.exception[0]) == 'Funcion de validacion de pago estandar deshabilitada'
def test_unlink(self):
""" Antes no se podia borrar un pago que alguna vez tuvo numero, en la localizacion si """
self.customer_payment.pos_ar_id = self.pos_inbound
self.customer_payment.post_l10n_ar()
self.customer_payment.cancel()
self.customer_payment.unlink()
def test_internal_transfer(self):
""" Probamos una transferencia entre cuentas internas, la cual deberia funcionar igual que el base """
self.customer_payment.write({
'pos_ar_id': self.pos_inbound.id,
'payment_type': 'transfer',
'destination_journal_id': self.env.ref('l10n_ar_account_payment.journal_cobros_y_pagos').id
})
self.customer_payment.post_l10n_ar()
sequence_code = 'account.payment.transfer'
sequence = self.env['ir.sequence'].with_context(ir_sequence_date=self.customer_payment.payment_date)\
.next_by_code(sequence_code)
# Validamos que este tomando la numeracion de la secuencia y el asiento tenga la numeraci
|
SylvainCecchetto/plugin.video.catchuptvandmore
|
plugin.video.catchuptvandmore/resources/lib/skeletons/cn_replay.py
|
Python
|
gpl-2.0
| 1,406
| 0.000711
|
# -*- coding: utf-8 -*-
"""
Catch-up TV & More
Copyright (C) 2016 SylvainCecchetto
|
This file is part of C
|
atch-up TV & More.
Catch-up TV & More is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Catch-up TV & More is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with Catch-up TV & More; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
# The unicode_literals import only has
# an effect on Python 2.
# It makes string literals as unicode like in Python 3
from __future__ import unicode_literals
from codequick import Script
"""
The following dictionaries describe
the addon's tree architecture.
* Key: item id
* Value: item infos
- route (folder)/resolver (playable URL): Callback function to run once this item is selected
- thumb: Item thumb path relative to "media" folder
- fanart: Item fanart path relative to "meia" folder
"""
menu = {}
|
ShanghaitechGeekPie/LBlogger
|
LBlogger/LBlogger/part_upload.py
|
Python
|
gpl-2.0
| 809
| 0.069221
|
#coding=utf-8
import string, random, time
from django.http import Http404
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
import os
def upload(request):
file_path='%s_' %(string.join(random.sample(['z','y','x','w','v','u','t','s','r','q','p','o','n','m','l','k','j','i','h','g','f','e','d','c','b','a','1','2','3','4','5','6','7','8','9','0'], 6)).replace(' ',''))
try:
destination = open(os.getcwd()+'/upload/'+file_path+request.FIL
|
ES['upload_file'].name,'wb+')
for chunk in r
|
equest.FILES['upload_file'].chunks():
destination.write(chunk)
destination.close()
return '/download/'+file_path+request.FILES['upload_file'].name
except:
return 'error'
|
TheDSCPL/SSRE_2017-2018_group8
|
Projeto/Python/cryptopy/crypto/keyedHash/hmacHash.py
|
Python
|
mit
| 3,613
| 0.014669
|
# -*- coding: utf-8 -*-
""" hmacHash.py
Implemention of Request for Comments: 2104
HMAC: Keyed-Hashing for Message Authentication
HMAC is a mechanism for message authentication
using cryptographic hash functions. HMAC can be used with any
iterative cryptographic hash function, e.g., MD5, SHA-1, in
combination with a secret shared key. The cryptographic strength of
HMAC depends on the properties of the underlying hash function.
This implementation of HMAC uses a generic cryptographic 'hashFunction'
(self.H). Hash functions must conform to the crypto.hash method
conventions and are not directly compatible with the Python sha1 or md5 algorithms.
[IETF] RFC 2104 "HMAC: Keyed-Hashing for Message Authentication"
>>>key = '\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
>>>keyedHashAlg = HMAC(SHA1, key)
>>>result = keyedHashAlg(data)
"""
from crypto.hash.hash import Hash
class HMAC(Hash):
""" To compute HMAC over the data `text' we perform
H(K XOR opad, H(K XOR ipad, text))
"""
def __init__(self, hashFunction, key = None):
""" initialize HMAC with hashfunction and optionally the key """
# should check for right type of function
self.H = hashFunction() # a new instance for inner hash
self.H_outer = hashFunction() # separate outer context to allow intermediate digests
self.B = self.H.raw_block_size # in bytes, note - hash block size typically 1
# and raw_block_size much larger
# e.g. raw_block_size is 64 bytes for SHA1 and MD5
self.name = 'HMAC_'+self.H.name
self.blocksize = 1 # single octets can be hashed by padding to raw block size
self.raw_block_size = self.H.raw_block_size
self.digest_size = self.H.digest_size
if key != None:
self.setKey(key)
else:
self.keyed = None
def setKey(self,key):
""" setKey(key) ... key is binary string """
if len(key) > self.B: # if key is too long then hash it
key = self.H(key) # humm... this is odd, hash can be smaller than B
else: # should raise error on short key, but breaks tests :-(
key =key + (self.B-len(key)) * chr(0)
self.k_xor_ipad = ''.join([chr(ord(bchar)^0x36) for bchar in key])
self.k_xor_opad = ''.join([chr(ord(bchar)^0x5C) for bchar in key])
self.keyed = 1
self.reset()
def reset(self):
self.H.reset()
if self.keyed == None :
raise 'no key defined'
self.H.update(self.k_xor_ipad) # start inner hash with key xored with ipad
# outer hash always called as one full pass (no updates)
def update(self,data):
if self.keyed == None :
raise 'no key defined'
self.H.update(data)
def digest(self):
if self.keyed
|
== None :
raise 'no key defined'
return self.H_outer(self.k_xor_opad+self.H.digest())
from crypto.hash.sha1Hash import SHA1
class HMAC_SHA1(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,SHA1,
|
key)
from crypto.hash.md5Hash import MD5
class HMAC_MD5(HMAC):
""" Predefined HMAC built on SHA1 """
def __init__(self, key = None):
""" optionally initialize with key """
HMAC.__init__(self,MD5,key)
|
chrys87/fenrir
|
src/fenrirscreenreader/commands/commands/quit_fenrir.py
|
Python
|
lgpl-3.0
| 552
| 0.016304
|
#!/bin/python
# -*- coding: utf-8 -*-
# Fenrir TTY
|
screen reader
# By Chrys, Storm Dragon, and contributers.
from fenrirscreenreader.core import debug
class command():
def __init__(self):
pass
def initialize(self, environment):
self.env = environment
def shutdown(self):
pass
def getDescription(self):
return _('exits Fenrir')
def run(self):
self.env['runtime']['eventManager']
|
.stopMainEventLoop()
def setCallback(self, callback):
pass
|
makcedward/nlpaug
|
nlpaug/augmenter/spectrogram/loudness.py
|
Python
|
mit
| 1,919
| 0.00938
|
import numpy as np
from nlpaug.augmenter.spectrogram import SpectrogramAugmenter
from nlpaug.util import Action
import nlpa
|
ug.model.spectrogram as nms
class LoudnessAug(SpectrogramAugmenter):
"""
Augmenter that change loudness on mel spectrogram by random values.
:param tuple zone: Default value is (0.2, 0.8). Assign a zone for augmentation. By default, no any augmentation
will be applied in first 20% and last 20% of whole audio.
|
:param float coverage: Default value is 1 and value should be between 0 and 1. Portion of augmentation.
If `1` is assigned, augment operation will be applied to target audio segment. For example, the audio
duration is 60 seconds while zone and coverage are (0.2, 0.8) and 0.7 respectively. 42
seconds ((0.8-0.2)*0.7*60) audio will be augmented.
:param tuple factor: Default value is (0.5, 2). Volume change value will be picked within the range of this
tuple value. Volume will be reduced if value is between 0 and 1. Otherwise, volume will be increased.
:param str name: Name of this augmenter
"""
def __init__(self, name='Loudness_Aug', zone=(0.2, 0.8), coverage=1., factor=(0.5, 2), verbose=0,
silence=False, stateless=True):
super().__init__(action=Action.SUBSTITUTE, zone=zone, coverage=coverage, factor=factor,
verbose=verbose, name=name, silence=silence, stateless=stateless)
self.model = nms.Loudness()
def substitute(self, data):
# https://arxiv.org/pdf/2001.01401.pdf
loudness_level = self.get_random_factor()
time_start, time_end = self.get_augment_range_by_coverage(data)
if not self.stateless:
self.time_start, self.time_end, self.loudness_level = time_start, time_end, loudness_level
return self.model.manipulate(data, loudness_level=loudness_level, time_start=time_start, time_end=time_end)
|
esthermm/odoomrp-wip
|
mrp_repair_full_editable/models/mrp_repair.py
|
Python
|
agpl-3.0
| 755
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, s
|
ee __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class MrpRepair(models.Model):
_inherit = 'mrp.repair'
fees_lines = fields.One2many(readonly=False)
operations = fields.O
|
ne2many(readonly=False)
@api.multi
@api.onchange('product_id')
def onchange_product_id(self, product_id=None):
res = super(MrpRepair, self).onchange_product_id(product_id)
if not self.partner_id:
res['value']['pricelist_id'] = self.env.ref('product.list0')
return res
|
tdyas/pants
|
src/python/pants/backend/codegen/thrift/java/register.py
|
Python
|
apache-2.0
| 874
| 0.002288
|
# Copyri
|
ght 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
"""Generate Java targets from Thrift.
See https://thrift.apache.org.
"""
from pants.backend.codegen.thrift.ja
|
va.apache_thrift_java_gen import ApacheThriftJavaGen
from pants.backend.codegen.thrift.java.java_thrift_library import (
JavaThriftLibrary as JavaThriftLibraryV1,
)
from pants.backend.codegen.thrift.java.target_types import JavaThriftLibrary
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.goal.task_registrar import TaskRegistrar as task
def build_file_aliases():
return BuildFileAliases(targets={"java_thrift_library": JavaThriftLibraryV1})
def register_goals():
task(name="thrift-java", action=ApacheThriftJavaGen).install("gen")
def target_types():
return [JavaThriftLibrary]
|
anconaesselmann/ClassesAndTests
|
classes_and_testsTest/DocumentationFromUnitTestsTestData/DataSet4_py_ClassFileTest.py
|
Python
|
mit
| 1,206
| 0.008292
|
class DataClassFileTests():
def test_functionName1_test_case_4(self):
# Given: First test function line one
obj = new aClass()
expected = "Some result"
parameter1 = False
# When: First test function line two
result = obj.functionName1(parameter1)
# Then: First test function line three
this->assertEquals(expected, result)
def test_functionName1_second_te
|
st_function(self):
# Given: Second test function Given: line
obj = new aClass()
expected = "Some result"
parameter1 = False
# When: Second test function When: line
result = obj.functionName1(parameter1)
# Then: Second test function Then:
|
line
this->assertEquals(expected, result)
def test_functionName1_third_test_function(self):
# Given: Last test function third from last line
obj = new aClass()
expected = "Some result"
parameter1 = False
# When: Last test function second from last line
result = obj.functionName1(parameter1)
# Then: Last test function last line
this->assertEquals(expected, result)
|
C4ptainCrunch/info-f-309
|
webview/documents/migrations/0003_auto_20160417_0946.py
|
Python
|
agpl-3.0
| 541
| 0.001848
|
# -*- co
|
ding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-17 09:46
from __future__ import unicode_literals
from django.db import migrations, models
import documents.models
class Migration(migrations.Migration):
dependencies = [
('documents', '0002_auto_20160417_0749'),
]
operations = [
migrations.AlterFiel
|
d(
model_name='document',
name='zipFile',
field=models.FileField(upload_to='uploads/', validators=[documents.models.validate_file_extension]),
),
]
|
mzdaniel/oh-mainline
|
vendor/packages/Django/tests/modeltests/properties/models.py
|
Python
|
agpl-3.0
| 567
| 0.001764
|
"""
22. Using properties on models
Use properties on models just like on any other Python object.
"""
from django.d
|
b import models
class Person(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def _get_full_name(self):
return "%s %s" % (self.first_name, self.la
|
st_name)
def _set_full_name(self, combined_name):
self.first_name, self.last_name = combined_name.split(' ', 1)
full_name = property(_get_full_name)
full_name_2 = property(_get_full_name, _set_full_name)
|
ediston/energi
|
qa/pull-tester/rpc-tests.py
|
Python
|
mit
| 8,724
| 0.003324
|
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Run Regression Test Suite
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts, other
than:
- `-extended`: run the "extended" test suite in addition to the basic one.
- `-win`: signal that this is running in a Windows environment, and we
should run the tests.
- `--coverage`: this generates a basic coverage report for the RPC
interface.
For a description of arguments recognized by test scripts, see
`qa/pull-tester/test_framework/test_framework.py:BitcoinTestFramework.main`.
"""
import os
import time
import shutil
import sys
import subprocess
import tempfile
import re
from tests_config import *
#If imported values are not defined then set to zero (or disabled)
if 'ENABLE_WALLET' not in vars():
ENABLE_WALLET=0
if 'ENABLE_BITCOIND' not in vars():
ENABLE_BITCOIND=0
if 'ENABLE_UTILS' not in vars():
ENAB
|
LE_UTILS=0
if 'ENABLE_ZMQ' not in vars():
ENABLE_ZMQ=0
ENABLE_COVERAGE=0
#Create a set to store arguments and create the passOn string
opts = set()
passOn = ""
p = re.compile("^--")
bold = ("","")
if (os.name == 'posix'):
bold = ('\033[0m', '\033[1m')
for arg in sys.argv[1:]:
if arg == '--coverage':
ENABLE_COVERAGE = 1
elif (p.match(arg) or arg == "-h"):
passOn += " " + arg
else:
opts.add(arg)
#Set env vars
buildDir = BUILDDIR
if "DASHD" not in
|
os.environ:
os.environ["DASHD"] = buildDir + '/src/dashd' + EXEEXT
if "DASHCLI" not in os.environ:
os.environ["DASHCLI"] = buildDir + '/src/dash-cli' + EXEEXT
if EXEEXT == ".exe" and "-win" not in opts:
# https://github.com/bitcoin/bitcoin/commit/d52802551752140cf41f0d9a225a43e84404d3e9
# https://github.com/bitcoin/bitcoin/pull/5677#issuecomment-136646964
print "Win tests currently disabled by default. Use -win option to enable"
sys.exit(0)
if not (ENABLE_WALLET == 1 and ENABLE_UTILS == 1 and ENABLE_BITCOIND == 1):
print "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
sys.exit(0)
# python-zmq may not be installed. Handle this gracefully and with some helpful info
if ENABLE_ZMQ:
try:
import zmq
except ImportError as e:
print("ERROR: \"import zmq\" failed. Set ENABLE_ZMQ=0 or " \
"to run zmq tests, see dependency info in /qa/README.md.")
raise e
#Tests
testScripts = [
'bip68-112-113-p2p.py',
'wallet.py',
'wallet-hd.py',
'listtransactions.py',
'receivedby.py',
'mempool_resurrect_test.py',
'txn_doublespend.py --mineblock',
'txn_clone.py',
'getchaintips.py',
'rawtransactions.py',
'rest.py',
'mempool_spendcoinbase.py',
'mempool_reorg.py',
'mempool_limit.py',
'httpbasics.py',
'multi_rpc.py',
'zapwallettxes.py',
'proxy_test.py',
'merkle_blocks.py',
'fundrawtransaction.py',
'signrawtransactions.py',
'walletbackup.py',
'nodehandling.py',
'reindex.py',
'addressindex.py',
'timestampindex.py',
'spentindex.py',
'decodescript.py',
'p2p-fullblocktest.py', # NOTE: needs dash_hash to pass
'blockchain.py',
'disablewallet.py',
'sendheaders.py', # NOTE: needs dash_hash to pass
'keypool.py',
'prioritise_transaction.py',
'invalidblockrequest.py', # NOTE: needs dash_hash to pass
'invalidtxrequest.py', # NOTE: needs dash_hash to pass
'abandonconflict.py',
'p2p-versionbits-warning.py',
]
if ENABLE_ZMQ:
testScripts.append('zmq_test.py')
testScriptsExt = [
'bip9-softforks.py',
'bip65-cltv.py',
'bip65-cltv-p2p.py', # NOTE: needs dash_hash to pass
'bip68-sequence.py',
'bipdersig-p2p.py', # NOTE: needs dash_hash to pass
'bipdersig.py',
'getblocktemplate_longpoll.py', # FIXME: "socket.error: [Errno 54] Connection reset by peer" on my Mac, same as https://github.com/bitcoin/bitcoin/issues/6651
'getblocktemplate_proposals.py',
'txn_doublespend.py',
'txn_clone.py --mineblock',
# 'pruning.py', # Prune mode is incompatible with -txindex.
'forknotify.py',
'invalidateblock.py',
# 'rpcbind_test.py', #temporary, bug in libevent, see #6655
'smartfees.py',
'maxblocksinflight.py',
'p2p-acceptblock.py', # NOTE: needs dash_hash to pass
'mempool_packages.py',
'maxuploadtarget.py',
# 'replace-by-fee.py', # RBF is disabled in Dash Core
]
def runtests():
coverage = None
if ENABLE_COVERAGE:
coverage = RPCCoverage()
print("Initializing coverage directory at %s\n" % coverage.dir)
rpcTestDir = buildDir + '/qa/rpc-tests/'
run_extended = '-extended' in opts
cov_flag = coverage.flag if coverage else ''
flags = " --srcdir %s/src %s %s" % (buildDir, cov_flag, passOn)
#Run Tests
for i in range(len(testScripts)):
if (len(opts) == 0
or (len(opts) == 1 and "-win" in opts )
or run_extended
or testScripts[i] in opts
or re.sub(".py$", "", testScripts[i]) in opts ):
print("Running testscript %s%s%s ..." % (bold[1], testScripts[i], bold[0]))
time0 = time.time()
subprocess.check_call(
rpcTestDir + testScripts[i] + flags, shell=True)
print("Duration: %s s\n" % (int(time.time() - time0)))
# exit if help is called so we print just one set of
# instructions
p = re.compile(" -h| --help")
if p.match(passOn):
sys.exit(0)
# Run Extended Tests
for i in range(len(testScriptsExt)):
if (run_extended or testScriptsExt[i] in opts
or re.sub(".py$", "", testScriptsExt[i]) in opts):
print(
"Running 2nd level testscript "
+ "%s%s%s ..." % (bold[1], testScriptsExt[i], bold[0]))
time0 = time.time()
subprocess.check_call(
rpcTestDir + testScriptsExt[i] + flags, shell=True)
print("Duration: %s s\n" % (int(time.time() - time0)))
if coverage:
coverage.report_rpc_coverage()
print("Cleaning up coverage data")
coverage.cleanup()
class RPCCoverage(object):
"""
Coverage reporting utilities for pull-tester.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `bitcoin-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: qa/rpc-tests/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir %s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % i) for i in sorted(uncovered)))
else:
print("All RPC commands covered.")
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `qa/rpc-tests/test-framework/coverage.py`
REFERENCE_FILENAME = 'rpc_interface.txt'
COVERAGE_FILE_PREFIX = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, REFERENCE_FILENAME)
coverage_filenames = set()
all_cmds = set()
covered_cmds = set()
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename
|
thamada/tool-private
|
commi.py
|
Python
|
mit
| 5,969
| 0.005726
|
#!/usr/bin/env python2.7
# -*- coding:utf-8 -*-
#
# Copyright (c) 2017 by Tsuyoshi Hamada. All rights reserved.
#
import os
import logging as LG
import random
import commands
import shelve
import pickle
import sys
import hashlib
import re as REGEXP
# -- set encode for your terminal --
config_term_encode = 'euc-jp'
# -- set filename for your database --
config_db_filename = '/t m p/g i t commit- '
def get_logger(str_position = ''):
log_basename = __file__
# Don't use Python's hasattr()
# unless you're writing Python 3-only code
# and understand how it works.
if getattr(get_logger, "__count_called", None) is not None:
log_basename = "%s @%s" % (__file__, str_position)
get_logger.__count_called = get_logger.__count_called + 1
'''
print "----------------- %d times called!!" % (get_logger.__count_called)
'''
else:
get_logger.__count_called = 1
'''
print "----------------- first time called!!"
'''
# create logger
logger = LG.getLogger(os.path.basename(log_basename))
logger.setLevel(LG.DEBUG)
# create console handler and set level to debug
ch = LG.StreamHandler()
ch.setLevel(LG.DEBUG)
# create formatter
formatter = LG.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# add formatter to ch
ch.setFormatter(formatter)
# add ch to logger
logger.addHandler(ch)
# 'application' code
## logger.debug('debug message')
## logger.info('info message')
## logger.warn('warn message')
## logger.error('error message')
## logger.critical('critical message')
return logger
def get_quotes():
result = [ ]
result.append(u"生きる -- 谷川俊太郎")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"それはのどがかわくということ")
result.append(u"木漏れ日がまぶしいということ")
result.append(u"ふっと或るメロディを思い出すということ")
result.append(u"くしゃみをすること")
result.append(u"あなたと手をつなぐこと")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"それはミニスカート")
result.append(u"それはプラネタリウム")
result.append(u"それはヨハン・シュトラウス")
result.append(u"それはピカソ")
result.append(u"それはアルプス")
result.append(u"すべての美しいものに出会うということ")
result.ap
|
pend(u"そして")
result.append(u"かくされた悪を注意深くこばむこと")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"泣けるということ")
result.append(u"笑えるということ")
result.append(u"怒れるということ")
result.append(u"自由ということ")
# --
result.append(u"生きているということ")
result.append(u"いま生きているということ")
result.append(u"いま遠くで犬が吠えるということ")
result.append(u"いま地球が廻っているということ")
result.append(u"いまどこかで産声があがるということ")
result.append(u"いまどこかで兵士が傷つくということ")
result.append(u"い
|
まぶらんこがゆれているということ")
result.append(u"いまいまがすぎてゆくこと")
# --
result.append(u"生きているということ")
result.append(u"いま生きてるということ")
result.append(u"鳥ははばたくということ")
result.append(u"海はとどろくということ")
result.append(u"かたつむりははうということ")
result.append(u"人は愛するということ")
result.append(u"あなたの手のぬくみ")
result.append(u"いのちということ")
result.append(u":-) ;-)")
return result
def get_shelve(fname, logger=None):
if logger is None: logger = get_logger('get_shelve()')
keyname = 'count'
pickle_protocol = pickle.HIGHEST_PROTOCOL
try :
dic = shelve.open(fname, protocol=pickle_protocol)
except Exception as e:
logger.error(e)
logger.error(fname)
sys.exit(-1)
keys = dic.keys()
if keyname not in keys: dic[keyname] = 0
count = dic[keyname]
dic[keyname] = count + 1
dic.close()
return count
def do_uncompress(filename, logger=None):
if logger is None: logger = get_logger('do_uncompress()')
check = commands.getoutput("hostname;time bzip2 -d %s.db.bz2" % filename )
# logger.debug("%s", check)
return True
def do_compress(filename, logger=None):
if logger is None: logger = get_logger('do_compress()')
check = commands.getoutput("hostname;time bzip2 -9 %s.db" % filename )
# logger.debug("%s", check)
return True
def get_id_git(logger=None):
if logger is None: logger = get_logger('get_id_git()')
check = commands.getoutput("git remote -v")
# logger.debug(check)
md5 = hashlib.md5()
md5.update(check)
md5sum = md5.hexdigest()
# logger.debug(md5sum)
return md5sum
def cut_space_str(str):
return REGEXP.sub(r' +', '', str)
if __name__ == "__main__":
msg = ''
logger = get_logger()
md5sum = get_id_git()
db_filename = cut_space_str(config_db_filename + md5sum)
do_uncompress(db_filename)
count = get_shelve(db_filename)
do_compress(db_filename)
qs = get_quotes()
msg = ("%d: %s" % (count+1, qs[count % len(qs)]))
logger.info('# %s', db_filename.encode(config_term_encode))
logger.info('# %s', msg.encode(config_term_encode))
cmd = 'git commit -m "' + msg + '"; git push origin master;'
print cmd.encode(config_term_encode)
|
Xunius/evernote2zim
|
lib/markdown2zim.py
|
Python
|
gpl-3.0
| 43,705
| 0.004896
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
"""Convert markdown to zim wiki syntax.
Stripped and modified from markdown2.py
Syntax converted:
type Markdown -> Zim
----------------------------------------------------
Heading1 # heading ===== heading =====
Heading2 ## heading ==== heading ====
Heading3 ### heading === heading ===
Heading4 #### heading == heading ==
Heading5 ##### heading = heading =
Heading6 ###### heading = heading =
----------------------------------------------------
unordered list -/+/* *
ordered list 1. 2. 3. 1. 2. 3.
----------------------------------------------------
bold **bold** **bold**
__bold__ __bold__
italic *italic* //italic//
_italic_ //italic//
strike ~~strike~~ ~~strike~~
----------------------------------------------------
quote > '''
texts... texts...
'''
code ``` ```
texts... texts...
``` ```
----------------------------------------------------
inline link [link](url) [[url|link]]
----------------------------------------------------
ref link [link text][id]
[id]:url "title" [[url|link]]
----------------------------------------------------
inline image  {{url}}
----------------------------------------------------
ref image ![img text][id]
[id]:url "title" {{url}}
Syntax not supported:
- footnote
- tables
Update time: 2016-03-21 21:17:19.
"""
import re
import sys,os
import argparse
from lib import tools
try:
from hashlib import md5
except ImportError:
from md5 import md5
from random import random, randint
# Use `bytes` for byte strings and `unicode` for unicode strings (str in Py3).
if sys.version_info[0] <= 2:
py3 = False
try:
bytes
except NameError:
bytes = str
base_string_type = basestring
elif sys.version_info[0] >= 3:
py3 = True
unicode = str
base_string_type = str
#---- globals
DEBUG = False
DEFAULT_TAB_WIDTH = 4
SECRET_SALT = bytes(randint(0, 1000000))
def _hash_text(s):
return 'md5-' + md5(SECRET_SALT + s.encode("utf-8")).hexdigest()
g_escape_table = dict([(ch, _hash_text(ch))
for ch in '\\`*_{}[]()>#+-.!'])
class Markdown2Zim(object):
urls = None
titles = None
# Used to track when we're inside an ordered or unordered list
# (see _ProcessListItems() for details):
list_level = 0
_ws_only_line_re = re.compile(r"^[ \t]+$", re.M)
def __init__(self, html4tags=False, tab_width=4):
self.tab_width = tab_width
self._outdent_re = re.compile(r'^(\t|[ ]{1,%d})' % tab_width, re.M)
self._escape_table = g_escape_table.copy()
def reset(self):
self.urls = {}
self.titles = {}
self.list_level = 0
self.footnotes = {}
self.footnote_ids = []
def convert(self, text):
"""Convert the given text."""
# Main function. The order in which other subs are called here is
# essential. Link and image substitutions need to happen before
# _EscapeSpecialChars(), so that any *'s or _'s in the <a>
# and <img> tags get encoded.
# Clear the global hashes. If we don't clear these, you get conflicts
# from other articles when generating a page which contains more than
# one article (e.g. an index page that shows the N most recent
# articles):
self.reset()
if not isinstance(text, unicode):
text = unicode(text, 'utf-8')
# Standardize line endings:
text = re.sub("\r\n|\r", "\n", text)
# Make sure $text ends with a couple of newlines:
text += "\n\n"
# Convert all tabs to spaces.
#text = self._detab(text)
# Strip any lines consisting only of spaces and tabs.
# This makes subsequent regexen easier to write, because we can
# match consecutive blank lines with /\n+/ instead of something
# contorted like /[ \t]*\n+/ .
text = self._ws_only_line_re.sub("", text)
text = self._do_fenced_code_blocks(text)
# Strip link definitions, store in hashes.
# Must do footnotes first because an unlucky footnote defn
# looks like a link defn:
# [^4]: this "looks like a link defn"
text = self._strip_footnote_definitions(text)
text = self._strip_link_definitions(text)
#text = self._strip_img_definitions(text)
text = self._run_block_gamut(text)
text = self._add_footnotes(text)
text += "\n"
|
return text
_detab_re = re.compile(r'(.*?)\t', re.M)
def _detab_sub(self, match):
g1 = match.group(1)
return g1 + (' ' * (self.tab_width - len(g1) % self.tab_width))
def _detab(self, text):
r"""Remove (leading?) tabs from a file.
>>> m = Markdown()
>>> m._detab("\tfoo")
' foo'
>>> m._detab(" \tfoo")
' foo'
>>> m._detab("\t foo")
|
' foo'
>>> m._detab(" foo")
' foo'
>>> m._detab(" foo\n\tbar\tblam")
' foo\n bar blam'
"""
if '\t' not in text:
return text
return self._detab_re.subn(self._detab_sub, text)[0]
def _strip_link_definitions(self, text):
# Strips link definitions from text, stores the URLs and titles in
# hash references.
less_than_tab = self.tab_width - 1
# Link defs are in the form:
# [id]: url "optional title"
_link_def_re = re.compile(r"""
^[ ]{0,%d}\[(.+)\]: # id = \1
[ \t]*
\n? # maybe *one* newline
[ \t]*
<?(.+?)>? # url = \2
[ \t]*
(?:
\n? # maybe one newline
[ \t]*
(?<=\s) # lookbehind for whitespace
['"(]
([^\n]*) # title = \3
['")]
[ \t]*
)? # title is optional
(?:\n+|\Z)
""" % less_than_tab, re.X | re.M | re.U)
return _link_def_re.sub(self._extract_link_def_sub, text)
def _strip_img_definitions(self, text):
# Strips img definitions from text, stores the URLs and titles in
# hash references.
# Link defs are in the form:
# ![id]: url "optional title"
_link_def_re = re.compile(r"""
![ ]*\[(.*?)\] # id = \1
[ \t]*
\((.+?)\) # url = \2
[ \t]*
(?:\n+|\Z)
""", re.X | re.M | re.U | re.S)
return _link_def_re.sub(self._extract_img_def_sub, text)
def _extract_img_def_sub(self, match):
id, url = match.groups()
key = id.lower() # Link IDs are case-insensitive
if key=='':
key=str(len(self.urls))
self.urls[key] = self._encode_amps_and_angles(url)
#if title:
#self.titles[key] = title
return ""
# Ampersand-encoding based entirely on Nat Irons's Amputator MT plugin:
# http://bumppo.net/projects/amputator/
_ampersand_re = re.compile(r'&(?!#?[xX]?(?:[0-9a-fA-F]+|\w+);)')
_naked_lt_re = re.compile(r'<(?![a-z/?\$!])', re.I)
_naked_gt_re = re.compile(r'''(?<![a-z0-9?!/'"-])>''', re.I)
def _encode_amps_and_angles(self, text):
# Smart processing for ampersands and angle brackets that need
# to be encoded.
text = self._ampersand_re.sub('&', text)
# Encode
|
cortext/crawtextV2
|
~/venvs/crawler/lib/python2.7/site-packages/setuptools/command/easy_install.py
|
Python
|
mit
| 74,243
| 0.002667
|
#!/usr/bin/env python
"""
Easy Install
------------
A tool for doing automatic download/extract/build of distutils-based Python
packages. For detailed documentation, see the accompanying EasyInstall.txt
file, or visit the `EasyInstall home page`__.
__ https://pythonhosted.org/setuptools/easy_install.html
"""
import sys
import os
import zipimport
import shutil
import tempfile
import zipfile
import re
import stat
import random
import platform
import textwrap
import warnings
import site
import struct
from glob import glob
from distutils import log, dir_util
from distutils.command.build_scripts import first_line_re
import pkg_resources
from setuptools import Command, _dont_write_bytecode
from setuptools.sandbox import run_setup
from setuptools.py31compat import get_path, get_config_vars
from distutils.util import get_platform
from distutils.util import convert_path, subst_vars
from distutils.errors import DistutilsArgError, DistutilsOptionError, \
DistutilsError, DistutilsPlatformError
from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS
from setuptools.command import setopt
from setuptools.archive_util import unpack_archive
from setuptools.package_index import PackageIndex
from setuptools.package_index import URL_SCHEME
from setuptools.command import bdist_egg, egg_info
from setuptools.compat import (iteritems, maxsize, basestring, unicode,
reraise)
from pkg_resources import (
yield_lines, normalize_path, resource_string, ensure_directory,
get_distribution, find_distributions, Environment, Requirement,
Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound,
VersionConflict, DEVELOP_DIST,
)
sys_executable = os.environ.get('__PYVENV_LAUNCHER__',
os.path.normpath(sys.executable))
__all__ = [
'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
'main', 'get_exe_prefixes',
]
def is_64bit():
return struct.calcsize("P") == 8
def samefile(p1, p2):
both_exist = os.path.exists(p1) and os.path.exists(p2)
use_samefile = hasattr(os.path, 'samefile') and both_exist
if use_samefile:
return os.path.samefile(p1, p2)
norm_p1 = os.path.normpath(os.path.normcase(p1))
norm_p2 = os.path.normpath(os.path.normcase(p2))
return norm_p1 == norm_p2
if sys.version_info <= (3,):
def _to_ascii(s):
return s
def isascii(s):
try:
unicode(s, 'ascii')
return True
except UnicodeError:
return False
else:
def _to_ascii(s):
return s.encode('ascii')
def isascii(s):
try:
s.encode('ascii')
return True
except UnicodeError:
return False
class easy_install(Command):
"""Manage a download/build/install process"""
description = "Find/get/install Python packages"
command_consumes_arguments = True
user_options = [
('prefix=', None, "installation prefix"),
("zip-ok", "z", "install package as a zipfile"),
("multi-version", "m", "make apps have to require() a version"),
("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
("install-dir=", "d", "install package to DIR"),
("script-dir=", "s", "install scripts to DIR"),
("exclude-scripts", "x", "Don't install scripts"),
("always-copy", "a", "Copy all needed packages to install dir"),
("index-url=", "i", "base URL of Python Package Index"),
("find-links=", "f", "additional URL(s) to search for packages"),
("build-directory=", "b",
"download/extract/build in DIR; keep the results"),
('optimize=', 'O',
"also compile with optimization: -O1 for \"python -O\", "
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
('record=', None,
"filename in which to record list of installed files"),
('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
('site-dirs=','S',"list of directories where .pth files work"),
('editable', 'e', "Install specified packages in editable form"),
('no-deps', 'N', "don't install dependencies"),
('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
('local-snapshots-ok', 'l',
"allow building eggs from local checkouts"),
('version', None, "print version information and exit"),
('no-find-links', None,
"Don't load find-links defined in packages being installed")
]
boolean_options = [
'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
'editable',
'no-deps', 'local-snapshots-ok', 'version'
]
if site.ENABLE_USER_SITE:
help_msg = "install in user site-package '%s'" % site.USER_SITE
user_options.append(('user', None, help_msg))
boolean_options.append('user')
nega
|
tive_opt = {'always-unzip': 'zip-ok'}
create_index = PackageIndex
def initialize_options(self):
if site.ENABLE_USER_SITE:
whereami = os.path.abspath(__file__)
self.user = whereami.sta
|
rtswith(site.USER_SITE)
else:
self.user = 0
self.zip_ok = self.local_snapshots_ok = None
self.install_dir = self.script_dir = self.exclude_scripts = None
self.index_url = None
self.find_links = None
self.build_directory = None
self.args = None
self.optimize = self.record = None
self.upgrade = self.always_copy = self.multi_version = None
self.editable = self.no_deps = self.allow_hosts = None
self.root = self.prefix = self.no_report = None
self.version = None
self.install_purelib = None # for pure module distributions
self.install_platlib = None # non-pure (dists w/ extensions)
self.install_headers = None # for C/C++ headers
self.install_lib = None # set to either purelib or platlib
self.install_scripts = None
self.install_data = None
self.install_base = None
self.install_platbase = None
if site.ENABLE_USER_SITE:
self.install_userbase = site.USER_BASE
self.install_usersite = site.USER_SITE
else:
self.install_userbase = None
self.install_usersite = None
self.no_find_links = None
# Options not specifiable via command line
self.package_index = None
self.pth_file = self.always_copy_from = None
self.site_dirs = None
self.installed_projects = {}
self.sitepy_installed = False
# Always read easy_install options, even if we are subclassed, or have
# an independent instance created. This ensures that defaults will
# always come from the standard configuration file(s)' "easy_install"
# section, even if this is a "develop" or "install" command, or some
# other embedding.
self._dry_run = None
self.verbose = self.distribution.verbose
self.distribution._set_command_options(
self, self.distribution.get_option_dict('easy_install')
)
def delete_blockers(self, blockers):
for filename in blockers:
if os.path.exists(filename) or os.path.islink(filename):
log.info("Deleting %s", filename)
if not self.dry_run:
if os.path.isdir(filename) and not os.path.islink(filename):
rmtree(filename)
else:
os.unlink(filename)
def finalize_options(self):
if self.version:
print('setuptools %s' % get_distribution('setuptools').version)
sys.exit()
py_version = sys.version.split()[0]
prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix')
self.config_vars = {
'dist_name': self.distribution.get_name(),
'dist_version': self.distribution.get_version(),
'dist_fullname': self.distribution.get_fullname(),
'py_version': py_version,
'py_version_short': py
|
mvaled/sentry
|
src/sentry/grouping/strategies/security.py
|
Python
|
bsd-3-clause
| 1,936
| 0.002583
|
from __future__ import absolute_import
from sentry.grouping.component import GroupingComponent
from sentry.grouping.strategies.base import strategy
def _security_v1(reported_id, obj):
return GroupingComponent(
id=reported_id,
values=[
GroupingComponent(id="salt", values=[reported_id]),
GroupingComponent(id="hostname", values=[obj.hostname]),
],
)
@strategy(id="expect-ct:v1", interfaces=["expectct"], variants=["default"], score=1000)
def expect_ct_v1(expectct_interface, **meta):
return _security_v1("expect-ct", expectct_interface)
@strategy(id="expect-staple:v1", interfaces=["expectstaple"], variants=["default"], score=1001)
def expect_staple_v1(expectstaple_interface, **meta):
return _security_v1("expect-staple", expectstaple_interface)
@strategy(id="hpkp:v1", interfaces=["hpkp"], variants=["default"], sco
|
re=1002)
def hpkp_v1(hpkp_interface, **meta):
return _security_v1("hpkp", hpkp_interface)
@strategy(id="csp:v1", interfaces=["csp"], variants=["default"], score=1003)
def csp_v1(csp_interface, **meta):
violation_component = GroupingComponent(id="violation")
|
uri_component = GroupingComponent(id="uri")
if csp_interface.local_script_violation_type:
violation_component.update(values=["'%s'" % csp_interface.local_script_violation_type])
uri_component.update(
contributes=False,
hint="violation takes precedence",
values=[csp_interface.normalized_blocked_uri],
)
else:
violation_component.update(contributes=False, hint="not a local script violation")
uri_component.update(values=[csp_interface.normalized_blocked_uri])
return GroupingComponent(
id="csp",
values=[
GroupingComponent(id="salt", values=[csp_interface.effective_directive]),
violation_component,
uri_component,
],
)
|
akash1808/oslo.log
|
oslo_log/fixture/__init__.py
|
Python
|
apache-2.0
| 665
| 0
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# Licen
|
se for the specific language governing permissions and limitations
# under the License.
from .logging_error import get_logging_handle_error_fixture
from .setlevel import SetLog
|
Level
|
kate-harrison/west
|
documentation/conf.py
|
Python
|
gpl-2.0
| 9,688
| 0.008567
|
# -*- coding: utf-8 -*-
#
# Whitespace Evaluation SofTware documentation build configuration file, created by
# sphinx-quickstart on Wed Jul 9 13:12:12 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('../west'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc',
'sphinx.ext.graphviz',
'sphinx.ext.autosummary',
'sphinx.ext.intersphinx',
'sphinx.ext.inheritance_diagram',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Whitespace Evaluation SofTware'
copyright = u'2014, Kate Harrison'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additi
|
onal_pa
|
ges = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'WhitespaceEvaluationSofTwaredoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto/manual]).
latex_documents = [
('index', 'WhitespaceEvaluationSofTware.tex', u'Whitespace Evaluation SofTware Documentation',
u'Kate Harrison', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'whitespaceevaluationsoftware', u'Whitespace Evaluation SofTware Documentation',
[u'Kate Harrison'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'WhitespaceEvaluationSofTware', u'Whitespace Evaluation S
|
ayemos/osho
|
akagi/utils.py
|
Python
|
mit
| 414
| 0
|
imp
|
ort re
import six
from six import BytesIO
import gzip
def gzip_decompress(data):
if six.PY2:
in_io = BytesIO()
in_io.write(data.read())
in_io.seek(0)
return BytesIO(gzip.GzipFile(fileobj=in_io, mode='rb').read())
else:
return BytesIO(gzip.decompress(data.read()))
def normalize_path(path):
return path and re.sub(r'^/', '', re.sub(r'\/{2,}', '/',
|
path))
|
bladekp/DroniadaDjangoDronekitAPP
|
app/maps/urls.py
|
Python
|
mit
| 339
| 0.00295
|
from django.conf.urls
|
import url
from . import views
urlpatterns = [
url(r'^$', views.render_map, name='render_map'),
url(r'^get
|
Data/$', views.get_data, name='get_data'),
url(r'^saveDroneData/$', views.save_drone_data, name='save_drone_data'),
url(r'^saveBeaconData/$', views.save_beacon_data, name='save_beacon_data'),
]
|
jaidevd/scikit-learn
|
sklearn/svm/tests/test_svm.py
|
Python
|
bsd-3-clause
| 35,876
| 0.000167
|
"""
Testing for Support Vector Machine module (sklearn.svm)
TODO: remove hard coded numerical results when possible
"""
import numpy as np
import itertools
from numpy.testing import assert_array_equal, assert_array_almost_equal
from numpy.testing import assert_almost_equal
from numpy.testing import assert_allclose
from scipy import sparse
from sklearn import svm, linear_model, datasets, metrics, base
from sklearn.model_selection import train_test_split
from sklearn.datasets import make_classification, make_blobs
from sklearn.metrics import f1_score
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.utils import check_random_state
from sklearn.utils.testing import assert_equal, assert_true, assert_false
from sklearn.utils.testing import assert_greater, assert_in, assert_less
from sklearn.utils.testing import assert_raises_regexp, assert_warns
from sklearn.utils.testing import assert_warns_message, assert_raise_message
from sklearn.utils.testing import ignore_warnings, assert_raises
from sklearn.exceptions import ConvergenceWarning
from sklearn.exceptions import NotFittedError
from sklearn.multiclass import OneVsRestClassifier
from sklearn.externals import six
# toy sample
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]]
Y = [1, 1, 1, 2, 2, 2]
T = [[-1, -1], [2, 2], [3, 2]]
true_result = [1, 2, 2]
# also load the iris dataset
iris = datasets.load_iris()
rng = check_random_state(42)
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
def test_libsvm_parameters():
# Test parameters on classes that make use of libsvm.
clf = svm.SVC(kernel='linear').fit(X, Y)
assert_array_equal(clf.dual_coef_, [[-0.25, .25]])
assert_array_equal(clf.support_, [1, 3])
assert_array_equal(clf.support_vectors_, (X[1], X[3]))
assert_array_equal(clf.intercept_, [0.])
assert_array_equal(clf.predict(X), Y)
def test_libsvm_iris():
# Check consistency on dataset iris.
# shuffle the dataset so that labels are not ordered
for k in ('linear', 'rbf'):
clf = svm.SVC(kernel=k).fit(iris.data, iris.target)
assert_greater(np.mean(clf.predict(iris.data) == iris.target), 0.9)
assert_true(hasattr(clf, "coef_") == (k == 'linear'))
assert_array_equal(clf.classes_, np.sort(clf.classes_))
# check also the low-level API
model = svm.libsvm.fit(iris.data, iris.target.astype(np.float64))
pred = svm.libsvm.predict(iris.data, *model)
assert_greater(np.mean(pred == iris.target), .95)
model = svm.libsvm.fit(iris.data, iris.target.astype(np.float64),
kernel='linear')
pred = svm.libsvm.predict(iris.data, *model, kernel='linear')
assert_greater(np.mean(pred == iris.target), .95)
pred = svm.libsvm.cross_validation(iris.data,
iris.target.astype(np.float64), 5,
kernel='linear',
random_seed=0)
assert_greater(np.mean(pred == iris.target), .95)
# If random_seed >= 0, the libsvm rng is
|
seeded (by calling `srand`), hence
# we should get deterministic results (assuming that there is no other
# thread calling this wrapper calling `srand` concurrently).
pred2 = svm.libsvm.cross_validation(iris.data,
iris.target.astype(np.float64), 5,
kernel='linear',
random_seed=0)
assert_array_equal(pred, pred2)
def test_precomputed():
# SVC wit
|
h a precomputed kernel.
# We test it with a toy dataset and with iris.
clf = svm.SVC(kernel='precomputed')
# Gram matrix for train data (square matrix)
# (we use just a linear kernel)
K = np.dot(X, np.array(X).T)
clf.fit(K, Y)
# Gram matrix for test data (rectangular matrix)
KT = np.dot(T, np.array(X).T)
pred = clf.predict(KT)
assert_raises(ValueError, clf.predict, KT.T)
assert_array_equal(clf.dual_coef_, [[-0.25, .25]])
assert_array_equal(clf.support_, [1, 3])
assert_array_equal(clf.intercept_, [0])
assert_array_almost_equal(clf.support_, [1, 3])
assert_array_equal(pred, true_result)
# Gram matrix for test data but compute KT[i,j]
# for support vectors j only.
KT = np.zeros_like(KT)
for i in range(len(T)):
for j in clf.support_:
KT[i, j] = np.dot(T[i], X[j])
pred = clf.predict(KT)
assert_array_equal(pred, true_result)
# same as before, but using a callable function instead of the kernel
# matrix. kernel is just a linear kernel
kfunc = lambda x, y: np.dot(x, y.T)
clf = svm.SVC(kernel=kfunc)
clf.fit(X, Y)
pred = clf.predict(T)
assert_array_equal(clf.dual_coef_, [[-0.25, .25]])
assert_array_equal(clf.intercept_, [0])
assert_array_almost_equal(clf.support_, [1, 3])
assert_array_equal(pred, true_result)
# test a precomputed kernel with the iris dataset
# and check parameters against a linear SVC
clf = svm.SVC(kernel='precomputed')
clf2 = svm.SVC(kernel='linear')
K = np.dot(iris.data, iris.data.T)
clf.fit(K, iris.target)
clf2.fit(iris.data, iris.target)
pred = clf.predict(K)
assert_array_almost_equal(clf.support_, clf2.support_)
assert_array_almost_equal(clf.dual_coef_, clf2.dual_coef_)
assert_array_almost_equal(clf.intercept_, clf2.intercept_)
assert_almost_equal(np.mean(pred == iris.target), .99, decimal=2)
# Gram matrix for test data but compute KT[i,j]
# for support vectors j only.
K = np.zeros_like(K)
for i in range(len(iris.data)):
for j in clf.support_:
K[i, j] = np.dot(iris.data[i], iris.data[j])
pred = clf.predict(K)
assert_almost_equal(np.mean(pred == iris.target), .99, decimal=2)
clf = svm.SVC(kernel=kfunc)
clf.fit(iris.data, iris.target)
assert_almost_equal(np.mean(pred == iris.target), .99, decimal=2)
def test_svr():
# Test Support Vector Regression
diabetes = datasets.load_diabetes()
for clf in (svm.NuSVR(kernel='linear', nu=.4, C=1.0),
svm.NuSVR(kernel='linear', nu=.4, C=10.),
svm.SVR(kernel='linear', C=10.),
svm.LinearSVR(C=10.),
svm.LinearSVR(C=10.),
):
clf.fit(diabetes.data, diabetes.target)
assert_greater(clf.score(diabetes.data, diabetes.target), 0.02)
# non-regression test; previously, BaseLibSVM would check that
# len(np.unique(y)) < 2, which must only be done for SVC
svm.SVR().fit(diabetes.data, np.ones(len(diabetes.data)))
svm.LinearSVR().fit(diabetes.data, np.ones(len(diabetes.data)))
def test_linearsvr():
# check that SVR(kernel='linear') and LinearSVC() give
# comparable results
diabetes = datasets.load_diabetes()
lsvr = svm.LinearSVR(C=1e3).fit(diabetes.data, diabetes.target)
score1 = lsvr.score(diabetes.data, diabetes.target)
svr = svm.SVR(kernel='linear', C=1e3).fit(diabetes.data, diabetes.target)
score2 = svr.score(diabetes.data, diabetes.target)
assert_allclose(np.linalg.norm(lsvr.coef_),
np.linalg.norm(svr.coef_), 1, 0.0001)
assert_almost_equal(score1, score2, 2)
def test_linearsvr_fit_sampleweight():
# check correct result when sample_weight is 1
# check that SVR(kernel='linear') and LinearSVC() give
# comparable results
diabetes = datasets.load_diabetes()
n_samples = len(diabetes.target)
unit_weight = np.ones(n_samples)
lsvr = svm.LinearSVR(C=1e3).fit(diabetes.data, diabetes.target,
sample_weight=unit_weight)
score1 = lsvr.score(diabetes.data, diabetes.target)
lsvr_no_weight = svm.LinearSVR(C=1e3).fit(diabetes.data, diabetes.target)
score2 = lsvr_no_weight.score(diabetes.data, diabetes.target)
assert_allclose(np.linalg.norm(lsvr.coef_),
np.linalg.norm(lsvr_no_weight.coef_), 1, 0.0001)
assert_almost_equal(score1, score2, 2)
# check that fit(X) = fit([X1, X2, X3],sample_weight = [n1, n2, n3]) where
# X = X1 repeated n1 time
|
alangwansui/mtl_ordercenter
|
openerp/addons/001_qingjia/qingjia_calendar.py
|
Python
|
agpl-3.0
| 2,648
| 0.047205
|
#!usr/bin/python
# -*- coding:utf-8 -*-
from osv import osv,fields
import time
from datetime import datetime
from dateutil import rrule
class qingjia_calendar(osv.osv):
_name='qingjia.calendar'
_columns={
'start_date':fields.datetime('start_date'),
'end_date':fields.datetime('end_date'),
'calendar_line_ids':fields.one2many('qingjia.calendar.line','qingjia_calendar_id','calendar_line_ids'),
'state':fields.selection([('arrange','arrange'),('not arrange','not arrange')],'state',readonly=True)
}
_defaults={
}
def plan_arrange(self,cr,uid,ids,context=None):
my=self.browse(cr,uid,ids[0])
line_obj=self.pool.get('qingjia.calendar.line')
holidays=[]
datas=[]
start_date=time.strptime(my.start_date,'%Y-%m-%d %H:%M:%S')
end_date=time.strptime(my.end_date,'%Y-%m-%d %H:%M:%S')
dt=datetime(start_date.tm_year,start_date.tm_mon,start_date.tm_mday)
unt=datetime(end_date.tm_year,end_date.tm_mon,end_date.tm_mday)
days=rrule.rrule(rrule.DAILY,dtstart=dt,until=unt,byweekday=[6])
ge=days._iter()
for i in range(days.count()):
date_info=ge.next()
date_list=map(str,(date_info.year,date_info.month,date_info.day))
date='-'.join(date_list)
holidays.append(date)
for day in holidays:
line_search=line_obj.search(cr,uid,[('date','=',day),('type','=','holiday'),('state','=','arrange')])
if line_search:
datas.append((4,line_search[0]))
else:
datas.append((0,0,{'date':day,'type':'holiday','state':'arrange','name':'holiday'}))
|
self.write(cr,uid,ids,{'calendar_line_ids':datas})
return True
qingjia_calendar()
class qingjia_calendar_line(osv.osv):
_name='qingjia.calendar.line'
_columns={
'qingjia_calendar_id':fields.many2one('qingjia.calendar','qingjia_calendar_id'),
|
'name':fields.char('type',size=64),
'date':fields.datetime('date'),
'type':fields.selection([('work','Work'),('holiday','Holiday')],'type',),
'state':fields.selection([('arrange','arrange'),('not arrange','not arrange')],'state'),
'is_holiday':fields.boolean('is_holiday'),
'note':fields.char('note',size=128),
}
_defaults={'type':'work'}
def onchange_type(self,cr,uid,ids,res,context=None):
if res:
print res,'res'
return {'value':{'name':res}}
qingjia_calendar_line()
|
yhat/ggplot
|
ggplot/themes/theme_xkcd.py
|
Python
|
bsd-2-clause
| 1,582
| 0.001264
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import matplotlib as mpl
import matplotlib.pyplot as plt
from .theme import theme_base
class theme_xkcd(theme_base):
"""
xkcd theme
The theme internaly uses the settings from pyplot.xkcd().
"""
def __init__(self, scale=1, length=100, randomness=2):
super(theme_xkcd, self).__init__()
with plt.xkcd(scale=scale, length=length, randomness=randomness):
_xkcd = mpl.rcParams.copy()
# no need to a get a deprecate warning for nothing...
for key in mpl._deprecated_map:
if key in _xkcd:
del _xkcd[key]
if 'tk.pythoninspect' in _xkcd:
del _xkcd['tk.pythoninspect']
self._rcParams.update(_xkcd)
def __deepcopy__(self, memo):
class _empty(object):
pass
result = _empty()
result.__class__ = self.__class__
result.__dict__["_rcParams"] = {}
for k, v in self._rcParams.items():
try:
result.__dict__["_rcParams"][k] = deepcopy(v, memo)
except NotImplementedError:
|
# deepcopy rais
|
es an error for objects that are drived from or
# composed of matplotlib.transform.TransformNode.
# Not desirable, but probably requires upstream fix.
# In particular, XKCD uses matplotlib.patheffects.withStrok
# -gdowding
result.__dict__["_rcParams"][k] = copy(v)
return result
|
massimo-nocentini/master-thesis
|
sympy/riordan_avoiding_patterns.py
|
Python
|
mit
| 2,308
| 0.009532
|
from sage.misc.functional import symbolic_sum
from sage.calculus import var
def from_pattern_family_10j_1(j, variable=var('t')):
"""
This function allow to build a pair of functions (d, h) to
build a Riordan array for the pattern family (10)**j1, for a given j.
"""
def make_sum(from_index): return symbolic_sum(variable**i, i, from_index, j)
i = var('i')
d = make_sum(from_index=0)/sqrt(
1-2*make_sum(from_index=1)-3*make_sum(from_index=1)**2)
h = (make_sum(from_index=0) - sqrt(
1-2*make_sum(from_index=1)-3*make_sum(from_index=1)**2))/(2*make_sum(from_index=0))
return d,h
def from_pattern_family_01j_0(j, variable=var('t')):
"""
This function allow to build a pair of functions (d, h) to
build a Riordan array for the pattern family (10)**j1, for a given j.
"""
def make_sum(from_index, to=j): return symbolic_sum(variable**i, i,
|
from_index, to)
i = var('i')
d = make_sum(from_index=0)/sqrt(
1-2*make_sum(from_index=1)-3*make_sum(from_index=1)**2)
h = (make_sum(from_index=0) - sqrt(
1-2*make_sum(from_index=1)-3*make_sum(from_index=1)**2))/(2*make_sum(
from_index=0, to=j-1))
return d,h
def from_pattern_family_j_j(j, variable=var('t
|
')):
"""
This function allow to build a pair of functions (d, h) to
build a Riordan array for the pattern family (10)**j1, for a given j.
"""
d = 1/sqrt(1-4*variable + 2*variable**j + variable**(2*j))
h = (1 + variable**j - sqrt(1-4*variable + 2*variable**j + variable**(2*j)))/2
return d,h
def from_pattern_family_1_succj_0_j(j, variable=var('t')):
"""
This function allow to build a pair of functions (d, h) to
build a Riordan array for the pattern family (10)**j1, for a given j.
"""
d = 1/sqrt(1-4*variable + 4*variable**(j+1))
h = (1 - sqrt(1-4*variable + 4*variable**(j+1)))/2
return d,h
def from_pattern_family_0_succj_1_j(j, variable=var('t')):
"""
This function allow to build a pair of functions (d, h) to
build a Riordan array for the pattern family (10)**j1, for a given j.
"""
d = 1/sqrt(1-4*variable + 4*variable**(j+1))
h = (1 - sqrt(1-4*variable + 4*variable**(j+1)))/(2*(1-variable**j))
return d,h
|
ervinyang/tutorial_zookeeper
|
zookeeper-trunk/src/contrib/zkpython/src/test/get_set_test.py
|
Python
|
mit
| 8,748
| 0.00583
|
#!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright
|
ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http:/
|
/www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import zookeeper, zktestbase, unittest, threading, sys
if sys.version_info < (3,):
range = xrange
ZOO_OPEN_ACL_UNSAFE = {"perms":0x1f, "scheme":"world", "id" :"anyone"}
class GetSetTest(zktestbase.TestBase):
def setUp( self ):
zktestbase.TestBase.setUp(self)
try:
zookeeper.create(self.handle, "/zk-python-getsettest", "on",[ZOO_OPEN_ACL_UNSAFE], zookeeper.EPHEMERAL)
zookeeper.create(self.handle, "/zk-python-agetsettest",
"on",[ZOO_OPEN_ACL_UNSAFE], zookeeper.EPHEMERAL)
except:
pass
def test_empty_node(self):
"""
Test for a bug when instead of empty string we can get
random data from buffer malloc'ed to hold node contents.
See ZOOKEEPER-1906 for details
"""
NODE_PATH = "/zk-python-test-empty-node"
self.ensureDeleted(NODE_PATH)
zookeeper.create(self.handle, NODE_PATH, "",
[{"perms":0x1f, "scheme":"world", "id" :"anyone"}])
(data,stat) = zookeeper.get(self.handle, NODE_PATH, None)
self.assertEqual(data, "", "Data is not empty as expected: " + data)
def test_sync_getset(self):
self.assertEqual(self.connected, True, "Not connected!")
(data,stat) = zookeeper.get(self.handle, "/zk-python-getsettest", None)
self.assertEqual(data, "on", "Data is not 'on' as expected: " + data)
ret = zookeeper.set(self.handle, "/zk-python-getsettest",
"off", stat["version"])
(data,stat) = zookeeper.get(self.handle, "/zk-python-getsettest", None)
self.assertEqual(data, "off", "Data is not 'off' as expected: " + data)
self.assertRaises(zookeeper.BadVersionException,
zookeeper.set,
self.handle,
"/zk-python-getsettest",
"test",
stat["version"]+1)
stat2 = zookeeper.set2(self.handle, "/zk-python-getsettest",
"set2", stat["version"])
self.assertNotEqual(stat2, None, "set2 call failed, return should not be None")
self.assertEqual(stat2["numChildren"], 0,
"set2 call failed, numChildren not 0 in set2 call")
(data,stat) = zookeeper.get(self.handle, "/zk-python-getsettest", None)
self.assertEqual(data, "set2", "Data is not 'set2' as expected: " + data)
def test_stat_deleted_node(self):
"""
Test for a bug that surfaced when trying to build a
stat object from a non-existant node.
"""
self.ensureDeleted("/zk-python-test-deleteme")
self.assertRaises(zookeeper.NoNodeException,
zookeeper.get,
self.handle,
"/zk-python-test-deleteme")
self.cv = threading.Condition()
def callback(handle, rc, value, stat):
self.cv.acquire()
self.stat = stat
self.rc = rc
self.value = value
self.callback_flag = True
self.cv.notify()
self.cv.release()
self.cv.acquire()
zookeeper.aget(self.handle, "/zk-python-test-deleteme", None, callback)
self.cv.wait(15)
self.assertEqual(self.callback_flag, True, "aget timed out!")
self.assertEqual(self.stat, None, "Stat should be none!")
self.assertEqual(self.value, None, "Value should be none!")
def test_sync_get_large_datanode(self):
"""
Test that we can retrieve datanode sizes up to
1Mb with default parameters (depends on ZooKeeper server).
"""
data = ''.join(["A" for x in range(1024*1023)])
self.ensureDeleted("/zk-python-test-large-datanode")
zookeeper.create(self.handle, "/zk-python-test-large-datanode", data,
[{"perms":0x1f, "scheme":"world", "id" :"anyone"}])
(ret,stat) = zookeeper.get(self.handle, "/zk-python-test-large-datanode")
self.assertEqual(len(ret), 1024*1023,
"Should have got 1Mb returned, instead got %s" % len(ret))
(ret,stat) = zookeeper.get(self.handle, "/zk-python-test-large-datanode",None,500)
self.assertEqual(len(ret), 500,
"Should have got 500 bytes returned, instead got %s" % len(ret))
def test_async_getset(self):
self.cv = threading.Condition()
def get_callback(handle, rc, value, stat):
self.cv.acquire()
self.callback_flag = True
self.rc = rc
self.value = (value,stat)
self.cv.notify()
self.cv.release()
def set_callback(handle, rc, stat):
self.cv.acquire()
self.callback_flag = True
self.rc = rc
self.value = stat
self.cv.notify()
self.cv.release()
self.assertEqual(self.connected, True, "Not connected!")
self.cv.acquire()
self.callback_flag = False
ret = zookeeper.aset(self.handle, "/zk-python-agetsettest", "off", -1, set_callback)
self.assertEqual(ret, zookeeper.OK, "aset failed")
while not self.callback_flag:
self.cv.wait(15)
self.cv.release()
self.assertEqual(self.callback_flag, True, "aset timed out")
self.cv.acquire()
self.callback_flag = False
ret = zookeeper.aget(self.handle, "/zk-python-agetsettest", None, get_callback)
self.assertEqual(ret, zookeeper.OK, "aget failed")
self.cv.wait(15)
self.cv.release()
self.assertEqual(self.callback_flag, True, "aget timed out")
self.assertEqual(self.value[0], "off", "Data is not 'off' as expected: " + self.value[0])
def test_sync_getchildren(self):
self.ensureCreated("/zk-python-getchildrentest", flags=0)
self.ensureCreated("/zk-python-getchildrentest/child")
children = zookeeper.get_children(self.handle, "/zk-python-getchildrentest")
self.assertEqual(len(children), 1, "Expected to find 1 child, got " + str(len(children)))
def test_async_getchildren(self):
self.ensureCreated("/zk-python-getchildrentest", flags=0)
self.ensureCreated("/zk-python-getchildrentest/child")
def gc_callback(handle, rc, children):
self.cv.acquire()
self.rc = rc
self.children = children
self.callback_flag = True
self.cv.notify()
self.cv.release()
self.cv.acquire()
self.callback_flag = False
zookeeper.aget_children(self.handle, "/zk-python-getchildrentest", None, gc_callback)
self.cv.wait(15)
self.assertEqual(self.callback_flag, True, "aget_children timed out")
self.assertEqual(self.rc, zookeeper.OK, "Return code for aget_children was not OK - %s" % zookeeper.zerror(self.rc))
self.assertEqual(len(self.children), 1, "Expected to find 1 child, got " + str(len(self.children)))
def test_async_getchildren_with_watcher(self):
self.ensureCreated("/zk-python-getchildrentest", flags=0)
self.ensureCreated("/zk-python-getchildrentest/child")
watched = []
def watcher(*args):
self.cv.acquire()
watched.append(args)
|
wronk/mne-python
|
mne/viz/raw.py
|
Python
|
bsd-3-clause
| 33,845
| 0
|
"""Functions to plot raw M/EEG data
"""
from __future__ import print_function
# Authors: Eric Larson <larson.eric.d@gmail.com>
# Jaakko Leppakangas <jaeilepp@student.jyu.fi>
#
# License: Simplified BSD
import copy
from functools import partial
import numpy as np
from ..externals.six import string_types
from ..io.pick import (pick_types, _pick_data_channels, pick_info,
_PICK_TYPES_KEYS)
from ..io.proj import setup_proj
from ..utils import verbose, get_config
from ..time_frequency import psd_welch
from .topo import _plot_topo, _plot_timeseries, _plot_timeseries_unified
from .utils import (_toggle_options, _toggle_proj, tight_layout,
_layout_figure, _plot_raw_onkey, figure_nobar,
_plot_raw_onscroll, _mouse_click, plt_show,
_helper_raw_resize, _select_bads, _onclick_help,
_setup_browser_offsets)
from ..defaults import _handle_default
from ..annotations import _onset_to_seconds
def _plot_update_raw_proj(params, bools):
"""Helper only needs to be called when proj is changed"""
if bools is not None:
inds = np.where(bools)[0]
params['info']['projs'] = [copy.deepcopy(params['projs'][ii])
for ii in inds]
params['proj_bools'] = bools
params['projector'], _ = setup_proj(params['info'], add_eeg_ref=False,
verbose=False)
params['update_fun']()
params['plot_fun']()
def _update_raw_data(params):
"""Helper only needs to be called when time or proj is changed"""
from scipy.signal import filtfilt
start = params['t_start']
stop = params['raw'].time_as_index(start + params['duration'])[0]
start = params['raw'].time_as_index(start)[0]
data_picks = _pick_data_channels(params['raw'].info)
data, times = params['raw'][:, start:stop]
if params['projector'] is not None:
data = np.dot(params['projector'], data)
# remove DC
if params['remove_dc'] is True:
data -= np.mean(data, axis=1)[:, np.newaxis]
if params['ba'] is not None:
data[data_picks] = filtfilt(params['ba'][0], params['ba'][1],
data[data_picks], axis=1, padlen=0)
# scale
for di in range(data.shape[0]):
data[di] /= params['scalings'][params['types'][di]]
# stim channels should be hard limited
if params['types'][di] == 'stim':
norm = float(max(data[di]))
data[di] /= norm if norm > 0 else 1.
# clip
if params['clipping'] == 'transparent':
data[np.logical_or(data > 1, data < -1)] = np.nan
elif params['clipping'] == 'clamp':
data = np.clip(data, -1, 1, data)
params['data'] = data
params['times'] = times
def _pick_bad_channels(event, params):
"""Helper for selecting / dropping bad channels onpick"""
# Both bad lists are updated. params['info'] used for colors.
bads = params['raw'].info['bads']
params['info']['bads'] = _select_bads(event, params, bads)
_plot_update_raw_proj(params, None)
def plot_raw(raw, events=None, duration=10.0, start=0.0, n_channels=20,
bgcolor='w', color=None, bad_color=(0.8, 0.8, 0.8),
event_color='cyan', scalings=None, remove_dc=True, order='type',
show_options=False, title=None, show=True, block=False,
highpass=None, lowpass=None, filtorder=4, clipping=None):
"""Plot raw data
Parameters
----------
raw : instance of Raw
The raw data to plot.
events : array | None
Events to show with vertical bars.
duration : float
Time window (sec) to plot. The lesser of this value and the duration
of the raw file will be used.
start : float
Initial time to show (can be changed dynamically once plotted).
n_channels : int
Number of channels to plot at once. Defaults to 20.
bgcolor : color object
Color of the
|
background.
color : dict | color object | None
|
Color for the data traces. If None, defaults to::
dict(mag='darkblue', grad='b', eeg='k', eog='k', ecg='m',
emg='k', ref_meg='steelblue', misc='k', stim='k',
resp='k', chpi='k')
bad_color : color object
Color to make bad channels.
event_color : color object | dict
Color to use for events. Can also be a dict with
``{event_number: color}`` pairings. Use ``event_number==-1`` for
any event numbers in the events list that are not in the dictionary.
scalings : dict | None
Scale factors for the traces. If None, defaults to::
dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6, ecg=5e-4,
emg=1e-3, ref_meg=1e-12, misc=1e-3, stim=1,
resp=1, chpi=1e-4)
remove_dc : bool
If True remove DC component when plotting data.
order : 'type' | 'original' | array
Order in which to plot data. 'type' groups by channel type,
'original' plots in the order of ch_names, array gives the
indices to use in plotting.
show_options : bool
If True, a dialog for options related to projection is shown.
title : str | None
The title of the window. If None, and either the filename of the
raw object or '<unknown>' will be displayed as title.
show : bool
Show figure if True.
block : bool
Whether to halt program execution until the figure is closed.
Useful for setting bad channels on the fly by clicking on a line.
May not work on all systems / platforms.
highpass : float | None
Highpass to apply when displaying data.
lowpass : float | None
Lowpass to apply when displaying data.
filtorder : int
Filtering order. Note that for efficiency and simplicity,
filtering during plotting uses forward-backward IIR filtering,
so the effective filter order will be twice ``filtorder``.
Filtering the lines for display may also produce some edge
artifacts (at the left and right edges) of the signals
during display. Filtering requires scipy >= 0.10.
clipping : str | None
If None, channels are allowed to exceed their designated bounds in
the plot. If "clamp", then values are clamped to the appropriate
range for display, creating step-like artifacts. If "transparent",
then excessive values are not shown, creating gaps in the traces.
Returns
-------
fig : Instance of matplotlib.figure.Figure
Raw traces.
Notes
-----
The arrow keys (up/down/left/right) can typically be used to navigate
between channels and time ranges, but this depends on the backend
matplotlib is configured to use (e.g., mpl.use('TkAgg') should work). The
scaling can be adjusted with - and + (or =) keys. The viewport dimensions
can be adjusted with page up/page down and home/end keys. Full screen mode
can be to toggled with f11 key. To mark or un-mark a channel as bad, click
on the rather flat segments of a channel's time series. The changes will be
reflected immediately in the raw object's ``raw.info['bads']`` entry.
"""
import matplotlib.pyplot as plt
import matplotlib as mpl
from scipy.signal import butter
color = _handle_default('color', color)
scalings = _handle_default('scalings_plot_raw', scalings)
if clipping is not None and clipping not in ('clamp', 'transparent'):
raise ValueError('clipping must be None, "clamp", or "transparent", '
'not %s' % clipping)
# figure out the IIR filtering parameters
nyq = raw.info['sfreq'] / 2.
if highpass is None and lowpass is None:
ba = None
else:
filtorder = int(filtorder)
if filtorder <= 0:
raise ValueError('filtorder (%s) must be >= 1' % filtorder)
if highpass is not None and highpass <= 0:
raise ValueError('highpass must be > 0, not %s' % highpass)
if lowpass is not None and lowpass >= nyq:
raise ValueError('lowpass mus
|
n0tr00t/Sreg
|
sreg.py
|
Python
|
mit
| 6,889
| 0.002177
|
#!/usr/bin/env python
# encoding: utf-8
# author: www.n0tr00t.com
import sys
import glob
import json
import chardet
import requests
import urlparse
import argparse
import multiprocessing
from common.color import *
from common.output import *
from collections import OrderedDict
def check(plugin, passport, passport_type):
"""
plugin: *.json
passport: username, email, phone
passport_type: passport type
"""
if plugin["request"]["{0}_url".format(passport_type)]:
url = plugin["request"]["{0}_url".format(passport_type)]
else:
return
app_name = plugin['information']['name']
category = plugin["information"]["category"]
website = plugin["information"]["website"].encode("utf-8")
judge_yes_keyword = plugin['status']['judge_yes_keyword'].encode("utf-8")
judge_no_keyword = plugin['status']['judge_no_keyword'].encode("utf-8")
headers = OrderedDict({
'Host': urlparse.urlparse(url).netloc,
'Connection': 'closed',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'User-Agent': 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6',
'Accept-Encoding': 'gzip, deflate',
'Accept': '*/*',
'Referer': url,
})
if plugin.has_key("headers"):
for header_key in plugin['headers'].keys():
headers[header_key] = plugin['headers'][header_key]
if plugin['request']['method'] == "GET":
try:
url = url.replace('{}', passport)
s = requests.Session()
s.headers = headers
content = s.get(url, headers={}, timeout=8).content
encoding = chardet.detect(content)["encoding"]
if encoding == None or encoding == "ascii":
content = content.encode("utf-8")
else:
content = content.decode(encoding).encode("utf-8")
except Exception, e:
print inRed('\n[-] %s Error: %s\n' % (app_name, str(e)))
return
if judge_yes_keyword in content and judge_no_keyword not in content:
print u"[{0}] {1}".format(category, ('%s (%s)' % (app_name, website)))
icon = plugin['information']['icon'].encode("utf-8")
desc = plugin['information']['desc'].encode("utf-8")
output_add(category.encode("utf-8"), app_name.encode("utf-8"), website,
passport.encode("utf-8"), passport_type, icon, desc)
else:
pass
elif plugin['request']['method'] == "POST":
post_data = plugin['request']['post_fields']
if post_data.values().count("") != 1:
print "[*] The POST field can only leave a null value."
return
for k, v in post_data.iteritems():
if v == "":
post_data[k] = passport
try:
s = requests.Session()
s.headers = headers
content = s.post(url, data=post_data, headers={}, timeout=8).content
encoding = chardet.detect(content)["encoding"]
if encoding == None or encoding == "ascii":
content = content.encode("utf-8")
else:
content = content.decode(encoding).encode("utf-8")
except Exception, e:
print inRed('\n[-] %s Error: %s\n' % (app_name, str(e)))
return
if judge_yes_keyword in content and judge_no_keyword not in content:
print u"[{0}] {1}".format(category, ('%s (%s)' % (app_name, website)))
icon = plugin['information']['icon'].encode("utf-8")
desc = plugin['information']['desc'].encode("utf-8")
output_add(category.encode("utf-8"), app_name.encode("utf-8"), website,
passport.encode("utf-8"), passport_type, icon, desc)
else:
pass
else:
print inRed(u'\n[*] {0} Error!\n'.format(plugin['request']['name']))
# print u"[-]{}:::Error
|
!".format(plugin['request']['name'])
def main():
parser = argparse.ArgumentParser(description="Check how many Platforms the User registered.")
parser.add_argument("-u", action="store", dest="user")
parser.add_argument("-e", action="store", dest="email")
parser.add_argument("-c", action="store", dest="cellphone")
parser_argument = parser.parse_args()
banner = '''
.d8888b.
d88P Y88b
Y88b.
"Y888b. 888d888 .d88b. .d88b.
|
"Y88b.888P" d8P Y8bd88P"88b
"888888 88888888888 888
Y88b d88P888 Y8b. Y88b 888
"Y8888P" 888 "Y8888 "Y88888
888
Y8b d88P
"Y88P"
'''
all_argument = [parser_argument.cellphone, parser_argument.user, parser_argument.email]
plugins = glob.glob("./plugins/*.json")
print inGreen(banner)
print '[*] App: Search Registration'
print '[*] Version: V1.1(20180419)'
print '[*] Website: www.n0tr00t.com'
file_name = ""
if all_argument.count(None) != 2:
print '\nInput "-h" view the help information.'
sys.exit(0)
if parser_argument.cellphone:
print inYellow('\n[+] Phone Checking: %s\n') % parser_argument.cellphone
file_name = "cellphone_" + str(parser_argument.cellphone)
output_init(file_name, "Phone: ", str(parser_argument.cellphone))
if parser_argument.user:
print inYellow('\n[+] Username Checking: %s\n') % parser_argument.user
file_name = "user_" + str(parser_argument.user)
output_init(file_name, "UserName: ", str(parser_argument.user))
if parser_argument.email:
print inYellow('\n[+] Email Checking: %s\n') % parser_argument.email
file_name = "email_" + str(parser_argument.email)
output_init(file_name, "E-mail: ", str(parser_argument.email))
jobs = []
for plugin in plugins:
with open(plugin) as f:
try:
content = json.load(f)
except Exception, e:
print e, plugin
continue
if parser_argument.cellphone:
p = multiprocessing.Process(target=check,
args=(content, unicode(parser_argument.cellphone, "utf-8"), "cellphone"))
elif parser_argument.user:
p = multiprocessing.Process(target=check,
args=(content, unicode(parser_argument.user, "utf-8"), "user"))
elif parser_argument.email:
p = multiprocessing.Process(target=check,
args=(content, unicode(parser_argument.email, "utf-8"), "email"))
p.start()
jobs.append(p)
while sum([i.is_alive() for i in jobs]) != 0:
pass
for i in jobs:
i.join()
output_finished(file_name)
if __name__ == '__main__':
main()
|
derblub/pixelpi
|
menu.py
|
Python
|
mit
| 6,460
| 0.002632
|
import time
import thread
import pygame
import input
from menu.menuitems import create_menu_items
from screenfactory import create_screen
from server import interface
from helpers import *
os.chdir(os.path.dirname(os.path.realpath(__file__)))
S = Settings()
S.load()
class Menu(object):
def __init__(self, screen, items):
self.screen = screen
input.on_press.append(self.on_key_down)
self.index = 0
self.MENU_ITEMS_MAP = {
'menu': -1,
'cycle': 0,
'tetris': 1,
'snake': 2,
'pacman': 3,
'gameoflife': 4,
'clock': 5,
'pie': 6,
'music': 7,
'scroll_message': 8,
'brightness': 9,
}
self.start_screen = self.MENU_ITEMS_MAP[S.get('others', 'start_screen')]
if self.start_screen is not -1:
self.index += self.start_screen
self.items = items
self.module = None
self.reset(redraw=False)
if self.start_screen is -1:
self.resume_animation()
print("input.available_input_methods")
print input.available_input_methods
if self.start_screen != -1 or len(input.available_input_methods) == 0:
self.launch()
self.webinterface = S.get('webinterface', 'enabled')
if self.webinterface:
from server.interface import index
self.http_server = interface.WebInterface(interface.urls, locals())
thread.start_new_thread(self.http_server.run, (), {})
self.socket_server = interface.SocketInterface(self.screen)
thread.start_new_thread(self.socket_server.run, (), {})
def reset(self, redraw=True):
self.dir = 0
self.offset = 0
self.zoom = 1
self.brightness = 1
if redraw:
self.draw()
def draw_on_screen(self, x, y, zoom, graphic):
if zoom == 0:
return
if self.brightness == 1 and zoom == 1:
for source_x in range(8):
for source_y in range(8):
target = Point(source_x + x - 4, source_y + y - 4)
if 0 <= target.x < 16 and 0 <= target.y < 16:
self.screen.pixel[target.x][target.y] = rgb_to_int(graphic[source_x][source_y])
return
for target_x in range(16):
for target_y in range(16):
source = Point(int((target_x - x) / zoom + 4), int((target_y - y) / zoom + 4))
if 0 <= source.x < 8 and 0 <= source.y < 8:
c = graphic[source.x][source.y]
self.screen.pixel[target_x][target_y] = Color(int(c.r * self.brightness),
int(c.g * self.brightness),
int(c.b * self.brightness))
def draw_scrollbar(self):
size = int(math.floor(16 / len(self.items)))
start = int(math.floor((16 - size) * self.index / (len(self.items) - 1)))
for x in range(size):
self.screen.pixel
|
[(start + x - int(size * self.offset) + 16) % 16][15] = Color(int(80 * self.brightness),
int(80 * self.brightness),
int(80 * self.brightness))
def draw(self):
if self.module is not None:
return
self.screen.clear()
self.draw_scrollbar()
self.draw_on_screen(8 + int(self.
|
offset * 12), 8, self.zoom, self.items[self.index].get_preview())
if self.dir != 0:
self.draw_on_screen(8 + int(self.offset * 12) - self.dir * 12, 8, self.zoom,
self.items[(self.index - self.dir + len(self.items)) % len(self.items)].get_preview())
self.screen.update()
@staticmethod
def ease(x):
return x
def tick(self):
input.tick()
if self.socket_server:
self.socket_server.tick()
if self.dir != 0:
self.offset = self.dir * self.ease((1 - (time.clock() - self.start) / (self.end - self.start)))
if time.clock() > self.end:
self.offset = 0
self.dir = 0
self.draw()
def move(self, direction):
self.index = (self.index + direction + len(self.items)) % len(self.items)
self.dir = direction
self.start = time.clock()
self.end = self.start + 0.1
def on_key_down(self, key):
self.items[self.index].on_key_press(key, self)
if self.module is not None:
if key == input.Key.HOME or key == input.Key.BACK:
self.stop()
return
if key == input.Key.RIGHT:
self.move(1)
if key == input.Key.LEFT:
self.move(-1)
if key == input.Key.ENTER or key == input.Key.A:
self.launch()
return True
def stop(self):
self.module.stop()
self.screen.fade_out(0.3)
self.module = None
self.resume_animation()
input.on_press = [self.on_key_down]
input.on_release = []
def launch(self):
if not self.items[self.index].is_launchable():
return
self.offset = 0
self.dir = 0
self.start_animation()
self.module = self.items[self.index].get_module(self.screen)
self.module.start()
def start_animation(self):
start = time.clock()
end = start + 1
while time.clock() <= end:
self.zoom = 1 + 16 * ((time.clock() - start) / (end - start)) ** 2
self.brightness = min(1, 1 - ((time.clock() - start) / (end - start)))
self.draw()
# pygame.time.wait(100)
time.sleep(0.1)
self.reset(redraw=False)
def resume_animation(self):
start = time.clock()
end = start + 0.5
while time.clock() <= end:
self.zoom = ((time.clock() - start) / (end - start))
self.brightness = min(1, 1 * ((time.clock() - start) / (end - start)))
self.draw()
self.reset()
if __name__ == '__main__':
menu = Menu(create_screen(), create_menu_items())
while True:
menu.tick()
time.sleep(0.01)
# time.sleep(0.1)
|
antoinecarme/pyaf
|
tests/artificial/transf_Anscombe/trend_MovingMedian/cycle_12/ar_/test_artificial_32_Anscombe_MovingMedian_12__20.py
|
Python
|
bsd-3-clause
| 266
| 0.086466
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process
|
_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingMedian", cycle_length = 12, transform = "Anscombe", sigma = 0.0, exog
|
_count = 20, ar_order = 0);
|
ContinuumIO/ashiba
|
enaml/enaml/qt/qt_color_dialog.py
|
Python
|
bsd-3-clause
| 5,551
| 0.001441
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Int, Typed
from enaml.colors import Color
from enaml.widgets.color_dialog import ProxyColorDialog
from .QtCore import Signal
from .QtGui import QColor, QColorDialog
from .qt_toolkit_dialog import QtToolkitDialog
def color_from_qcolor(q):
""" Convert a QColor into an Enaml Color.
Parameters
----------
q : QColor
The Qt color to convert to Enaml Color.
Returns
-------
result : Color or None
An Enaml Color or None if the QColor is not valid.
"""
if not q.isValid():
return None
return Color(q.red(), q.green(), q.blue(), q.alpha())
# Guard flags
CURRENT_GUARD = 0x1
class QColorDialogEx(QColorDialog):
""" A custom QColorDialog which emits a custom finished signal.
"""
#: A signal emitted at the end of the 'done' method. This works
#: around the standard QColorDialog behavior which emits the
#: 'colorSelected' signal *after* the 'finished' signal.
reallyFinished = Signal(int)
def done(self, result):
""" A reimplemented done method.
This method emits the 'reallyFinished' signal on completion.
"""
super(QColorDialogEx, self).done(result)
self.reallyFinished.emit(result)
class QtColorDialog(QtToolkitDialog, ProxyColorDialog):
""" A Qt implementation of an Enaml ProxyColorDialog.
"""
#: A reference to the widget created by the proxy.
widget = Typed(QColorDialogEx)
#: Cyclic notification guard. This a bitfield of multiple guards.
_guard = Int(0)
def create_widget(self):
""" Create the underlying QColorDialog.
"""
self.widget = QColorDialogEx(self.parent_widget())
def init_widget(self):
""" Initialize the underlying widget.
"""
# Do not call super(...) as it connects the standard 'finished'
# signal. This widget uses the custom 'reallyFinished' signal.
d = self.declaration
self.set_title(d.title)
self.set_current_color(d.current_color)
self.set_show_alpha(d.show_alpha)
self.set_show_buttons(d.show_buttons)
widget = self.widget
widget.currentColorChanged.connect(self.on_current_color_changed)
widget.colorSelected.connect(self.on_color_selected)
widget.reallyFinished.connect(self.on_finished)
#--------------------------------------------------------------------------
# Utility Methods
#--------------------------------------------------------------------------
def get_default_title(self):
""" Get the default window title for the color dialog.
"""
return u'Select Color'
#--------------------------------------------------------------------------
# Signal Handlers
#--------------------------------------------------------------------------
def on_current_color_changed(self, qcolor):
""" Handle the 'currentColorChanged' signal from the widget.
"""
d = self.declaration
if d is not None:
self._guard |= CURRENT_GUARD
try:
d.current_color = color_from_qcolor(qcolor)
finally:
self._guard &= ~CURRENT_GUARD
def on_color_selected(self, qcolor):
""" Handle the 'colorSelected' signal from the widget.
"""
d = self.declaration
if d is not None:
d.selected_color = color_from_qcolor(qcolor)
#--------------------------------------------------------------------------
# ProxyColorDialog API
#--------------------------------------------------------------------------
@staticmethod
def custom_count():
""" Get the number of available custom colors.
"""
return QColorDialog.customCount()
@staticmethod
def custom_color(index):
""" Get the custom color for the given index.
"""
qrgb = QColorDialog.customColor(index)
return color_from_qcolor(QColor.fromRgba(qrgb))
@staticmethod
def set_custom_color(index, color):
""" Set the custom color for the given index.
"""
QColorDialog.setCustomColor(index, color.argb)
def set_current_color(self, color):
""" Set the current color for the underlying widget.
"""
if not self._guard & CURRENT_GUARD:
if color is not None:
qcolor = QColor.fromRgba(color.argb)
else:
qcolor = QColor()
self.widget.setCurrentColor(qcolor)
def set_show_alpha(self, show):
""" Set the show alpha option on the underlying widget.
"""
widget = self.widget
opt = widget.options()
if show:
opt |= QColorDialog.ShowAlphaChannel
else:
opt &= ~QColorDialog.ShowAlphaChannel
widget.setOptions(opt)
def set_show_buttons(self, show):
""" Set the show
|
buttons option on the underlying widget.
"""
widget = self.widget
|
opt = widget.options()
if show:
opt &= ~QColorDialog.NoButtons
else:
opt |= QColorDialog.NoButtons
widget.setOptions(opt)
|
xiilei/pytools
|
mitm-savecookies.py
|
Python
|
apache-2.0
| 702
| 0.011396
|
#!/usr/bin/env python3
#
|
-*- coding: utf-8 -*-
# sudo iptables -t nat -A PREROUTING -i eth0 -p tcp --dport 80 -j REDIRECT --to-port 8080
# sudo sysctl -w net.ipv4.ip_forward=1
from libmproxy.protocol.http import decoded
def response(context, flow):
with decoded(flow.response): # automatically decode gzipped responses.
headers = flow.request.headers
host = headers.get('Host')
cookies = headers.get('Cookie')
print(cookies)
if host ==None or cookies ==None:
r
|
eturn True
with open('savecookies','a') as f:
f.write(','.join(host))
f.write("\n")
f.write(';'.join(cookies))
f.write("\n\n")
|
NicolasLM/sauna
|
sauna/__init__.py
|
Python
|
bsd-2-clause
| 15,099
| 0
|
from collections import namedtuple
import threading
import queue
from logging import getLogger
import time
import socket
import os
import textwrap
import signal
import importlib
import pkgutil
import re
import sys
import glob
import functools
from concurrent.futures import ThreadPoolExecutor
from sauna import plugins, consumers
from sauna.plugins.base import Check
from sauna.consumers.base import BatchQueuedConsumer, QueuedConsumer
from sauna.consumers import ConsumerRegister
from sauna.plugins import PluginRegister
from sauna.scheduler import Scheduler, Job
__version__ = '0.0.18'
logger = getLogger(__name__)
ServiceCheck = namedtuple('ServiceCheck',
['timestamp', 'hostname', 'name',
'status', 'output'])
# Global dict containing the last status of each check
# Needs to hold a lock to access it
check_results = {}
check_results_lock = threading.Lock()
try:
# In Python 3.2 threading.Event is a factory function
# the real class lives in threading._Event
event_type = threading._Event
except AttributeError:
event_type = threading.Event
class DependencyError(Exception):
def __init__(self, plugin, dep_name, pypi='', deb=''):
self.msg = '{} depends on {}. It can be installed with:\n'.format(
plugin, dep_name
)
if pypi:
self.msg = '{} pip install {}\n'.format(self.msg, pypi)
if deb:
self.msg = '{} apt-get install {}\n'.format(self.msg, deb)
def __str__(self):
return self.msg
def _merge_config(original, included):
"""Add properties from a dict to another dict.
:param original: dict to update
"""
for key, value in included.items():
if isinstance(value, list):
try:
original[key].extend(value)
except KeyError:
original[key] = value
elif isinstance(value, dict):
try:
original[key].update(value)
except KeyError:
original[key] = value
else:
original[key] = value
def read_config(config_file):
# importing yaml here because dependency is not installed
# when fetching __version__ from setup.py
import yaml
try:
with open(config_file) as f:
config = yaml.safe_load(f)
except OSError as e:
print('Cannot read configuration file {}: {}'
.format(config_file, e))
sys.exit(1)
for config_file_included in glob.glob(config.get('include', '')):
config_included = read_config(config_file_included)
_merge_config(config, config_included)
return config
class Sauna:
def __init__(self, config=None):
if config is None:
config = {}
self.config = config
self.must_stop = threading.Event()
self._consumers_queues = []
if self.config.get("concurrency", 1) > 1:
self._thread_pool = ThreadPoolExecutor(
max_workers=self.config.get("concurrency")
)
else:
self._thread_pool = None
self.import_submodules(__name__ + '.plugins.ext')
self.import_submodules(__name__ + '.consumers.ext')
for extra_plugin_path in self.config.get('extra_plugins', []):
self.import_directory_modules(extra_plugin_path)
self._current_checks = []
self._current_checks_lock = threading.Lock()
@classmethod
def assemble_config_sample(cls, path):
sample = '---\nperiodicity: 120\nhostname: node-1.domain.tld\n'
sample += '\nconsumers:\n'
consumers_sample = ''
for _, consumer_info in ConsumerRegister.all_consumers.items():
if hasattr(consumer_info['consumer_cls'], 'config_sample'):
consumers_sample += textwrap.dedent(
consumer_info['consumer_cls'].config_sample()
)
sample += consumers_sample.replace('\n', '\n ')
sample += '\nplugins:\n'
plugins_sample = ''
for _, plugin_info in PluginRegister.all_plugins.items():
if hasattr(plugin_info['plugin_cls'], 'config_sample'):
plugins_sample += textwrap.dedent(
plugin_info['plugin_cls'].config_sample()
)
sample += plugins_sample.replace('\n', '\n ')
|
file_p
|
ath = os.path.join(path, 'sauna-sample.yml')
with open(file_path, 'w') as f:
f.write(sample)
return file_path
@property
@functools.lru_cache()
def hostname(self):
# socket.getfqdn can be a very long call
# make sure to only call it when absolutely necessary
# that's why a cache is used and dict.get() is avoided
try:
return self.config['hostname']
except KeyError:
return socket.getfqdn()
@property
def periodicity(self):
return self.config.get('periodicity', 120)
@property
def plugins_checks(self):
plugins = []
if type(self.config['plugins']) is dict:
for plugin_name, plugin_data in self.config['plugins'].items():
plugin_data.update({'type': plugin_name})
plugins.append(plugin_data)
elif type(self.config['plugins']) is list:
plugins = self.config['plugins']
else:
print('Invalid configuration, plugins must be a list or a dict')
sys.exit(1)
return plugins
@property
def consumers(self):
consumers = []
if type(self.config['consumers']) is dict:
for cons_name, cons_data in self.config['consumers'].items():
# Consumer Stdout doesn't need configuration
if cons_data is None:
cons_data = {}
cons_data.update({'type': cons_name})
consumers.append(cons_data)
elif type(self.config['consumers']) is list:
consumers = self.config['consumers']
else:
print('Invalid configuration, consumers must be a list or a dict')
sys.exit(1)
return consumers
def get_active_checks_name(self):
checks = self.get_all_active_checks()
return [check.name for check in checks]
def get_all_available_consumers(self):
return_consumers = []
for plugin_name, _ in ConsumerRegister.all_consumers.items():
return_consumers.append(plugin_name)
return return_consumers
def get_all_available_checks(self):
checks = {}
for plugin_name, data in PluginRegister.all_plugins.items():
checks[plugin_name] = []
for check in data['checks']:
checks[plugin_name].append(check)
return checks
def get_all_active_checks(self):
checks = []
deps_error = []
for plugin_data in self.plugins_checks:
plugin_name = plugin_data['type']
# Load plugin
plugin_info = PluginRegister.get_plugin(plugin_name)
if not plugin_info:
print('Plugin {} does not exist'.format(plugin_name))
sys.exit(1)
# Configure plugin
try:
plugin = plugin_info['plugin_cls'](
plugin_data.get('config', {})
)
except DependencyError as e:
deps_error.append(str(e))
continue
# Launch plugin checks
for check in plugin_data['checks']:
func_name = plugin_info['checks'].get(check['type'])
if func_name is None:
print('Unknown check {} on plugin {}'.format(check['type'],
plugin_name))
sys.exit(1)
check_func = getattr(plugin, func_name)
# An empty string is a valid check name
check_name = check.get(
'name',
'{}_{}'.format(plugin_name, check['type'])
).lower()
check_periodicity = (ch
|
quaddra/engage
|
python_pkg/engage/engine/create_distribution.py
|
Python
|
apache-2.0
| 4,538
| 0.003967
|
import sys
import tarfile
import os
import os.path
from optparse import OptionParser
import fixup_python_path
from engage.engine.engage_file_layout import get_engine_layout_mgr
from engage.engine.cmdline_script_utils import add_standard_cmdline_options, process_standard_options
from engage.utils.log_setup import setup_engage_logger
logger = setup_engage_logger(__name__)
from engage.utils.system_info_bootstrap import SUPPORTED_PLATFORMS
def _validate_dir_exists(dirname):
if not os.path.isdir(dirname):
raise Exception("Directory %s does not exist, your deployment home does not appear to be set up properly" % dirname)
def get_distribution_archive_filename(deployment_home):
return os.path.join(os.path.join(deployment_home, "engage"), "engage-dist.tar.gz")
def create_distribution_from_deployment_home(deployment_home,
archive_name=None,
include_test_data=False):
dh = os.path.abspath(os.path.expanduser(deployment_home))
if not os.path.isdir(dh):
raise Exception("Deployment home %s does not exist" % dh)
engage_home = os.path.join(dh, "engage")
_validate_dir_exists(engage_home)
if not archive_name:
archive_name = get_distribution_archive_filename(dh)
if os.path.exists(archive_name):
logger.debug("Deleting old distribution archive file")
os.remove(archive_name)
logger.debug("Creating distribution archive at %s" % archive_name)
tf = tarfile.open(archive_name, "w:gz")
try:
sw_packages = os.path.join(engage_home, "sw_packages")
_validate_dir_exists(sw_packages)
tf.add(sw_packages, "engage/sw_packages")
metadata = os.path.join(engage_home, "metadata")
_validate_dir_exists(metadata)
tf.add(metadata, "engage/metadata")
python_pkg_dir = os.path.join(engage_home, "python_pkg")
_validate_dir_exists(python_pkg_dir)
tf.add(python_pkg_dir, "engage/python_pkg")
bootstrap_file = os.path.join(engage_home, "bootstrap.py")
tf.add(bootstrap_file, "engage/bootstrap.py")
upgrade_file = os.path.join(engage_home, "upgrade.py")
tf.add(upgrade_file, "engage/upgrade.py")
if include_test_data:
test_data_dir = os.path.join(engage_home, "test_data")
if os.path.isdir(test_data_dir):
tf.add(test_data_dir, "engage/test_data")
else:
logger.warning("--include-test-data was specified, but test data directory %s not found" % test_data_dir)
found_cfg_exe = False
for platform in SUPPORTED_PLATFORMS:
cfg_exe_src = os.path.join(engage_home, "bin/configurator-%s" % platform)
cfg_exe_dst = "engage/bin/configurator-%s" % platform
if os.path.exists(cfg_exe_src):
logger.debug("Copying configurator executable for %s" % platform)
tf.add(cfg_exe_src, cfg_exe_dst)
found_cfg_exe = True
if not found_cfg_exe:
raise Exception("Cound not find a configurator executable")
finally:
tf.close()
def main(argv):
usage = "usage: %prog [options]"
parser = OptionParser(usage=usage)
parser.add_option("--archive-name", "-a", dest="archive_name",
default=None,
help="Full
|
path of generated archive file (defaults to <deployment_home>/engage/engage-dist.tar.gz)")
parser.add_option("--include_test-data", dest="include_test_data",
default=False,
help="Include the engage/test_data
|
directory, if present")
add_standard_cmdline_options(parser, uses_pw_file=False,
running_deployment=False)
(options, args) = parser.parse_args(args=argv)
(file_layout, dh) = process_standard_options(options, parser,
allow_overrides_of_dh=True)
if options.archive_name:
archive_name = options.archive_name
else:
archive_name = get_distribution_archive_filename(dh)
create_distribution_from_deployment_home(dh, archive_name,
include_test_data=options.include_test_data)
print "Distribution successfully created at %s" % archive_name
return 0
def call_from_console_script():
sys.exit(main(sys.argv[1:]))
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
|
pythononwheels/pow_clean
|
start/stuff/comment.py
|
Python
|
mit
| 1,289
| 0.013964
|
#
# Model Comment
#
from sqlalchemy import Column, Integer, String, Boolean, Sequence
from sqlalchemy import BigInteger, Date, DateTime, Float, Numeric
from pow_comments.powlib import relation
from pow_comments.sqldblib import Base
#@relation.has_many("<plural_other_models>")
@relation.is_tree()
@relation.setup_schema()
class Comment(Base):
#
#
|
put your column definition here:
#
#
# sqlalchemy classic style
# which offer you all sqlalchemy options
#
#title = Column
|
(String(50))
#text = Column(String)
#
# or the new (cerberus) schema style
# which offer you immediate validation
#
schema = {
# string sqltypes can be TEXT or UNICODE or nothing
'author': {
'type': 'string', 'maxlength' : 35,
# the sql "sub"key lets you declare "raw" sql(alchemy) Column options
# the options below are implemented so far.
"sql" : {
"primary_key" : False,
"default" : "No Author Name",
"unique" : True,
"nullable" : False
}
},
'text': {'type': 'string'}
}
# init
def __init__(self, **kwargs):
self.init_on_load(**kwargs)
# your methods down here
|
harnasproject/harnas
|
harnas/userprofile/forms.py
|
Python
|
agpl-3.0
| 409
| 0
|
from django im
|
port forms
from django.contrib.auth.models import User
from harnas.userprofile.models import UserProfile
class UserProfileEditForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('organization', 'personal_page', 'show_email', 'show_age')
class UserFieldsForm(forms.ModelForm):
class Meta:
model = User
fields = ('first_name', 'last_n
|
ame')
|
tiborsimko/analysis-preservation.cern.ch
|
cap/modules/deposit/api.py
|
Python
|
gpl-2.0
| 22,465
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of CERN Analysis Preservation Framework.
# Copyright (C) 2016 CERN.
#
# CERN Analysis Preservation Framework is free software; you can redistribute
# it and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# CERN Analysis Preservation Framework is distributed in the hope that it will
# be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with CERN Analysis Preservation Framework; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Deposit API."""
from __future__ import absolute_import, print_function
import copy
import shutil
import tempfile
from copy import deepcopy
from functools import wraps
import requests
from celery import shared_task
from flask import current_app, request
from flask_login import current_user
from invenio_access.models import ActionRoles, ActionUsers
from invenio_db import db
from invenio_deposit.api import Deposit, index, preserve
from invenio_deposit.utils import mark_as_action
from invenio_files_rest.errors import MultipartMissingParts
from invenio_files_rest.models import Bucket, FileInstance, ObjectVersion
from invenio_jsonschemas.errors import JSONSchemaNotFound
from invenio_records.models import RecordMetadata
from invenio_records_files.models import RecordsBuckets
from invenio_rest.errors import FieldError
from jsonschema.validators import Draft4Validator, RefResolutionError
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import NoResultFound
from werkzeug.local import LocalProxy
from cap.config import FILES_URL_MAX_SIZE
from cap.modules.records.api import CAPRecord
from cap.modules.repoimporter.repo_importer import RepoImporter
from cap.modules.schemas.models import Schema
from cap.modules.user.errors import DoesNotExistInLDAP
from cap.modules.user.utils im
|
port (get_existing_or_register_role,
get_existing_or_register_user)
from .errors import (DepositValidationError, FileUploadError,
UpdateDepositPermissionsError)
from .fetchers import cap_deposit_fetcher
from .minters import cap_deposit_minter
from .permissions import (AdminDepositPermission, CloneDepositPermission,
DepositAdminActionNeed, DepositReadActionNeed,
DepositUpdateActionNeed, Up
|
dateDepositPermission)
_datastore = LocalProxy(lambda: current_app.extensions['security'].datastore)
current_jsonschemas = LocalProxy(
lambda: current_app.extensions['invenio-jsonschemas']
)
PRESERVE_FIELDS = (
'_deposit',
'_buckets',
'_files',
'_experiment',
'_access',
'general_title',
'$schema'
)
DEPOSIT_ACTIONS = [
'deposit-read',
'deposit-update',
'deposit-admin',
]
def DEPOSIT_ACTIONS_NEEDS(id):
"""Method to construct action needs."""
return {
'deposit-read': DepositReadActionNeed(str(id)),
'deposit-update': DepositUpdateActionNeed(str(id)),
'deposit-admin': DepositAdminActionNeed(str(id))
}
EMPTY_ACCESS_OBJECT = {
action: {'users': [], 'roles': []} for action in DEPOSIT_ACTIONS
}
class CAPDeposit(Deposit):
"""Define API for changing deposit state."""
deposit_fetcher = staticmethod(cap_deposit_fetcher)
deposit_minter = staticmethod(cap_deposit_minter)
published_record_class = CAPRecord
@property
def schema(self):
"""Schema property."""
return Schema.get_by_fullpath(self['$schema'])
@property
def record_schema(self):
"""Convert deposit schema to a valid record schema."""
record_schema = self.schema.get_matching_record_schema()
return record_schema.fullpath
def pop_from_data(method, fields=None):
"""Remove fields from deposit data.
:param fields: List of fields to remove (default: ``('_deposit',)``).
"""
fields = fields or (
'_deposit',
'_access',
'_experiment',
'general_title',
'$schema'
)
@wraps(method)
def wrapper(self, *args, **kwargs):
"""Check current deposit status."""
for field in fields:
if field in args[0]:
args[0].pop(field)
return method(self, *args, **kwargs)
return wrapper
def pop_from_data_patch(method, fields=None):
"""Remove fields from deposit data.
:param fields: List of fields to remove (default: ``('_deposit',)``).
"""
fields = fields or (
'/_deposit',
'/_access',
'/_files',
'/_experiment',
'/$schema',
)
@wraps(method)
def wrapper(self, *args, **kwargs):
"""Check current deposit status."""
for field in fields:
for k, patch in enumerate(args[0]):
if field == patch.get("path", None):
del args[0][k]
return method(self, *args, **kwargs)
return wrapper
@mark_as_action
def permissions(self, pid=None):
"""Permissions action.
We expect an array of objects:
[{
"email": "",
"type": "user|egroup",
"op": "add|remove",
"action": "deposit-read|deposit-update|deposit-admin"
}]
"""
with AdminDepositPermission(self).require(403):
data = request.get_json()
return self.edit_permissions(data)
@mark_as_action
def publish(self, *args, **kwargs):
"""Simple file check before publishing."""
with AdminDepositPermission(self).require(403):
for file_ in self.files:
if file_.data['checksum'] is None:
raise MultipartMissingParts()
return super(CAPDeposit, self).publish(*args, **kwargs)
@mark_as_action
def upload(self, pid=None, *args, **kwargs):
"""Upload action for file/repository."""
with UpdateDepositPermission(self).require(403):
data = request.get_json()
fileinfo = self._construct_fileinfo(data['url'],
data['type'])
if request:
_, record = request.view_args.get('pid_value').data
record_id = str(record.id)
filename = fileinfo['filename']
obj = ObjectVersion.create(
bucket=record.files.bucket, key=filename
)
obj.file = FileInstance.create()
record.files.flush()
record.files[filename]['source_url'] = data['url']
if data['type'] == 'url':
if data['url'].startswith(
('https://github',
'https://gitlab.cern.ch',
'root://')):
download_url.delay(record_id, data['url'], fileinfo)
else:
raise FileUploadError(
'Please provide a valid file url.')
else:
if data['url'].startswith(
('https://github', 'https://gitlab.cern.ch')):
download_repo.delay(record_id, data['url'], filename)
else:
raise FileUploadError(
'Please provide a valid repository url.')
return self
@index
@mark_as_action
def clone(self, pid=None, id_=None):
"""Clone a deposit.
Adds
|
Azure/azure-sdk-for-python
|
sdk/applicationinsights/azure-applicationinsights/azure/applicationinsights/models/events_application_info_py3.py
|
Python
|
mit
| 917
| 0.002181
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EventsApplicationInf
|
o(Model):
"""Application info for an event result.
:param version: Version of the application
:type version: str
"""
_attribute_map = {
'version': {'key': 'version', 'type': 'str'},
}
def __init__(self, *, version: str=None, **kwargs) -> None:
super(EventsApplicationInfo, self).__init__(**kwargs)
|
self.version = version
|
mattmcd/PyAnalysis
|
scripts/dsfs_chapter01.py
|
Python
|
apache-2.0
| 3,086
| 0.021063
|
# 'Data Science from Scratch' Chapter 1 exampl
# Create list of users
userNames = ["Hero", "Dunn", "Sue", "Chi", "Thor", "Clive", "Hicks", "Devin", "Kate", "Klein"]
users = []
for ind, name in enumerate( userNames ):
users.append( {"id": ind, "name": name})
# Helper function to get id
get_id = lambda userlist: map( lambda user: user["id"], userlist)
# Friendship
friendships = [(0,1), (0,2), (1,2), (1,3), (2,3), (3,4), (4,5), (5,6), (5,7), (6,8), (7,8), (8,9)]
# Store as directed graph
g = friendships
g.extend(map(lambda(i,j): (j,i), friendships))
# Add the list of friends to each user
for user in users:
user["friends"] = []
for i,j in g:
users[i]["friends"].append(users[j])
# Number of friends each user has
number_of_friends = lambda (user): len(user["friends"])
# Total numnber of connections
number_of_connections = reduce(lambda acc, el: acc + number_of_friends(el), users, 0)
# Sort by popularity
map(lambda user:(user["name"], number_of_friends(user)),
sorted(users, key=lambda user:number_of_friends(user), reverse=True))
# Friend of a friend
# A friend of a friend is someone who is not your friend
# but is the friend of one of your friends
# Want to keep track of how many ways we are foaf with each person
from collections import Counter
def foaf(user):
all_id = get_id(reduce( lambda acc, user: acc + user["friends"], user["friends"], []))
# Remove user id and user friends id
ignore_id = get_id(user["friends"]) + [user["id"]]
foaf_id = filter( lambda id: id not in ignore_id, all_id)
return Counter(foaf_id)
# Mutual interests
# Store interests as a lookup from user id to list of interests
interests_dict = {0: ["Hadoop", "Big Data", "HBase", "Java", "Spark",
|
"Storm", "Cassandra"],
1: ["NoSQL", "MongoDB", "Cassandra", "HBase", "Postgres", "Python", "scikit-learn", "scipy"],
2: ["numpy", "statsmodels", "pandas"],
3: ["R", "Python", "statistics", "regression", "probability"],
4: ["machine learning", "regression", "decision trees", "libsvm"],
5: ["Python", "R", "Java", "C++"],
6: ["statistics", "theory", "probability", "mathematics"],
7: ["machine learning", "scikit-learn",
|
"Mahout", "neural networks"],
8: ["neural networks", "deep learning", "Big Data", "artificical intelligence"],
9: ["Hadoop", "java", "MapReduce", "Big Data"]}
# Invert to look up from interest to list of user ids
from collections import defaultdict
users_dict = defaultdict(list)
for k in interests_dict.keys():
map(lambda v: users_dict[v].append(k), interests_dict[k])
def most_common_interests_with(user):
user_interests = interests_dict[user["id"]]
id_list = map( lambda interest: users_dict[interest], user_interests)
all_ids = filter(lambda x: x!= user["id"], reduce( lambda acc, ids: acc+ids, id_list, []))
return Counter(all_ids)
# Find topics of interest
topic_count = map( lambda k: (k.lower(), len(users_dict[k])), users_dict.keys())
topic_dict = defaultdict(int)
for topic, count in topic_count:
topic_dict[topic] += count
Counter(topic_dict)
|
draekko-rand/nik_on_gimp
|
plug-ins/NIK-HDREfexPro2.py
|
Python
|
apache-2.0
| 4,466
| 0.016346
|
#!/usr/bin/env python
'''
NIK-HDREfexPro2.py
Mod of ShellOut.py focused on getting Google NIK to work.
ShellOut call an external program passing the active layer as a temp file.
Tested only in Ubuntu 16.04 with Gimp 2.9.5 (git) with Nik Collection 1.2.11
Author:
Erico Porto on top of the work of Rob Antonishen
Benoit Touchette modified from Erico Porto
this script is modelled after the mm extern LabCurves trace plugin
by Michael Munzert http://www.mm-log.com/lab-curves-gimp
and thanks to the folds at gimp-chat has grown a bit ;)
License:
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 3 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
The GNU Public License is available at
http://www.gnu.org/copyleft/gpl.html
'''
from gimpfu import *
import shutil
import subprocess
import os, sys
import tempfile
TEMP_FNAME = "ShellOutTempFile"
def plugin
|
_main(image, drawable, visible):
pdb.gimp_image_undo_group_start(image)
# Copy so the save operations doesn't affect the original
if visible == 0:
# Save in temporary. Note: empty user entered file name
temp = pdb.gimp_image_get_active_drawable(image)
else:
|
# Get the current visible
temp = pdb.gimp_layer_new_from_visible(image, image, "HDR Efex")
image.add_layer(temp, 0)
buffer = pdb.gimp_edit_named_copy(temp, "ShellOutTemp")
#save selection if one exists
hassel = pdb.gimp_selection_is_empty(image) == 0
if hassel:
savedsel = pdb.gimp_selection_save(image)
tempimage = pdb.gimp_edit_named_paste_as_new_image(buffer)
pdb.gimp_buffer_delete(buffer)
if not tempimage:
raise RuntimeError
pdb.gimp_image_undo_disable(tempimage)
tempdrawable = pdb.gimp_image_get_active_layer(tempimage)
# Use temp file names from gimp, it reflects the user's choices in gimp.rc
# change as indicated if you always want to use the same temp file name
# tempfilename = pdb.gimp_temp_name(progtorun[2])
tempfiledir = tempfile.gettempdir()
tempfilename = os.path.join(tempfiledir, TEMP_FNAME + "." + "tif")
# !!! Note no run-mode first parameter, and user entered filename is empty string
pdb.gimp_progress_set_text ("Saving a copy")
pdb.gimp_file_save(tempimage, tempdrawable, tempfilename, tempfilename)
# Invoke external command
print("calling HDR Efex Pro 2...")
pdb.gimp_progress_set_text ("calling HDR Efex Pro 2...")
pdb.gimp_progress_pulse()
child = subprocess.Popen([ "nik_hdrefexpro2", tempfilename ], shell=False)
child.communicate()
# put it as a new layer in the opened image
try:
newlayer2 = pdb.gimp_file_load_layer(tempimage, tempfilename)
except:
RuntimeError
tempimage.add_layer(newlayer2,-1)
buffer = pdb.gimp_edit_named_copy(newlayer2, "ShellOutTemp")
if visible == 0:
drawable.resize(newlayer2.width,newlayer2.height,0,0)
sel = pdb.gimp_edit_named_paste(drawable, buffer, 1)
drawable.translate((tempdrawable.width-newlayer2.width)/2,(tempdrawable.height-newlayer2.height)/2)
else:
temp.resize(newlayer2.width,newlayer2.height,0,0)
sel = pdb.gimp_edit_named_paste(temp, buffer, 1)
temp.translate((tempdrawable.width-newlayer2.width)/2,(tempdrawable.height-newlayer2.height)/2)
pdb.gimp_buffer_delete(buffer)
pdb.gimp_edit_clear(temp)
pdb.gimp_floating_sel_anchor(sel)
#load up old selection
if hassel:
pdb.gimp_selection_load(savedsel)
image.remove_channel(savedsel)
# cleanup
os.remove(tempfilename) # delete the temporary file
gimp.delete(tempimage) # delete the temporary image
# Note the new image is dirty in Gimp and the user will be asked to save before closing.
pdb.gimp_image_undo_group_end(image)
gimp.displays_flush()
register(
"nikfilters_hdrefexpro2",
"HDR Efex Pro 2",
"HDR Efex Pro 2",
"Rob Antonishen (original) & Ben Touchette",
"(C)2011 Rob Antonishen (original) & (C)2016-2017 Ben Touchette",
"2017",
"<Image>/Filters/NIK Collection/HDR Efex Pro 2",
"RGB*, GRAY*",
[ (PF_RADIO, "visible", "Layer:", 1, (("new from visible", 1),("current layer",0))) ],
[],
plugin_main,
)
main()
|
cg31/tensorflow
|
tensorflow/contrib/distributions/python/kernel_tests/operator_pd_full_test.py
|
Python
|
apache-2.0
| 2,294
| 0.008718
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
|
==
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.contrib.distributions.python.ops import operator_pd_full
class OperatorPDFullTest(tf.test.TestCase):
# The only method needing checked (because it isn't part of the parent class)
# is the check for symmetry.
def setUp(self):
|
self._rng = np.random.RandomState(42)
def _random_positive_def_array(self, *shape):
matrix = self._rng.rand(*shape)
return tf.batch_matmul(matrix, matrix, adj_y=True).eval()
def testPositiveDefiniteMatrixDoesntRaise(self):
with self.test_session():
matrix = self._random_positive_def_array(2, 3, 3)
operator = operator_pd_full.OperatorPDFull(matrix, verify_pd=True)
operator.to_dense().eval() # Should not raise
def testNegativeDefiniteMatrixRaises(self):
with self.test_session():
matrix = -1 * self._random_positive_def_array(3, 2, 2)
operator = operator_pd_full.OperatorPDFull(matrix, verify_pd=True)
# Could fail inside Cholesky decomposition, or later when we test the
# diag.
with self.assertRaisesOpError("x > 0|LLT"):
operator.to_dense().eval()
def testNonSymmetricMatrixRaises(self):
with self.test_session():
matrix = self._random_positive_def_array(3, 2, 2)
matrix[0, 0, 1] += 0.001
operator = operator_pd_full.OperatorPDFull(matrix, verify_pd=True)
with self.assertRaisesOpError("x == y"):
operator.to_dense().eval()
if __name__ == "__main__":
tf.test.main()
|
dbrattli/RxPY
|
tests/test_observable/test_windowwithcount.py
|
Python
|
apache-2.0
| 3,069
| 0.002607
|
import unittest
from rx.observable import Observable
from rx.testing import TestScheduler, ReactiveTest
from rx.disposables import Disposable, SerialDisposable
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestWindowWithCount(unittest.TestCase):
def test_window_with_count_basic(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(100, 1), on_next(210, 2), on_next(240, 3), on_next(280, 4), on_next(320, 5), on_next(350, 6), on_next(380, 7), on_next(420, 8), on_next(470, 9), on_completed(600))
def create():
def proj(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window_with_count(3, 2).map(proj).merge_observable()
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, "0 2"), on_next(240, "0 3"), on_next(280, "0 4"), on_next(280, "1 4"), on_next(320, "1 5"), on_next(350, "1 6"), on_next(350, "2 6"), on_next(380, "2 7"), on_next(420, "2 8"), on_next(420, "3 8"), on_next(470, "3 9"), on_completed(600))
xs.subscriptions.assert_equal(subscribe(200, 600))
def test_window_with_count_disposed(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(100, 1), on_next(210, 2), on_next(240, 3), on_next(280, 4), on_next(320, 5), on_next(350, 6), on_next(380, 7), on_next(420, 8), on_next(470, 9), on_completed(600))
def create():
def proj(w, i):
return w.map(lambda x: str(i) + ' ' + str(x))
return xs.window_with_count(3, 2).map(proj).merge_observable()
results = scheduler.start(create, disposed=370)
results.messages.assert_equal(on_next(210, "0 2"), on_next(240, "0 3"), on_next(280, "0 4"), on_next(280, "1 4"), on_next(320, "1 5"), on_next(350, "1 6"), on_next(350, "2 6"))
xs.subscriptions.assert_equal(subscribe(200, 370))
def test_window_with_count_error(self):
ex = 'ex'
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(on_next(100, 1), on_next(210, 2), on_next(240, 3), on_next(280, 4), on_next(32
|
0, 5), on_next(350, 6), on_next(380, 7), on_next(420, 8), on_next(470, 9), on_error(600, ex))
def create():
def selector(w, i):
def mapping(x):
return "%s %s" % (i, x)
return w.map(mapping)
|
return xs.window_with_count(3, 2).map(selector).merge_observable()
results = scheduler.start(create)
results.messages.assert_equal(on_next(210, "0 2"), on_next(240, "0 3"), on_next(280, "0 4"), on_next(280, "1 4"), on_next(320, "1 5"), on_next(350, "1 6"), on_next(350, "2 6"), on_next(380, "2 7"), on_next(420, "2 8"), on_next(420, "3 8"), on_next(470, "3 9"), on_error(600, ex))
xs.subscriptions.assert_equal(subscribe(200, 600))
|
christianurich/VIBe2UrbanSim
|
3rdparty/opus/src/opus_core/hierarchical_choice_model.py
|
Python
|
gpl-2.0
| 12,765
| 0.010576
|
# Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from numpy import zeros, concatenate, array, where, ndarray, sort, ones, all
from opus_core.misc import unique
from opus_core.choice_model import ChoiceModel
from opus_core.samplers.constants import NO_STRATUM_ID
class HierarchicalChoiceModel(ChoiceModel):
""" Choice model with hierarchical structure, such as nested logit."""
model_name = "Hierarchical Choice Model"
model_short_name ="HChoiceM"
nest_id_name = "nest_id"
def __init__(self, choice_set, nested_structure=None, stratum=None, **kwargs):
"""'nested_structure' is a dictionary with keys being the nest identifiers and each value being
a list of identifiers of the elemental alternatives belonging to that nest.
'stratum' is either a string giving the name of variable/expression determining
the membership of choice's elements to nests. Or, it is an array of the size as choice set
giving directly the membership of choice's elements to nests.
Either 'nested_structure' or 'stratum' must be given.
All arguments of the Choice Model can be used.
"""
ChoiceModel.__init__(self, choice_set, **kwargs)
self.create_nested_and_tree_structure(nested_structure, stratum, **kwargs)
self.set_model_interaction(**kwargs)
def set_choice_set_size(self):
if self.sampler_size is None:
self.sampler_size = 0
for nest, values in self.nested_structure.iteritems():
self.sampler_size += len(values)
ChoiceModel.set_choice_set_size(self)
def get_number_of_elemental_alternatives(self):
return self.membership_in_nests[0].shape[1]
|
def create_nested_and_tree_structure(self, nested_structure=None, stratum=None, **kwargs):
strat = stratum
if isinstance(strat, str):
strat = self.choice_set.compute_variables(strat, dataset_pool=self.dataset_pool)
elif strat is not None:
strat = array(strat)
if strat is not None:
strat[strat<=0] = NO_STRATUM_ID # valid stratum must be la
|
rger than 0
if nested_structure is None:
if strat is None:
raise StandardError, "Either 'nested_structure' or 'stratum' must be given."
sampler_size = None
if self.sampler_class is not None:
sampler_size = self.sampler_size
self.nested_structure = create_nested_structure_from_list(strat, self.choice_set, sampler_size,
valid_strata_larger_than=NO_STRATUM_ID)
else:
self.nested_structure = nested_structure
if self.sampler_class is not None and self.sampler_size is not None:
for nest, values in self.nested_structure.iteritems():
self.nested_structure[nest] = values[0:self.sampler_size]
self.number_of_nests = len(self.nested_structure)
if strat is None:
strat = create_stratum_from_nests(self.nested_structure, self.choice_set)
if self.estimate_config.get('stratum', None) is None:
self.estimate_config['stratum'] = strat
if self.run_config.get('stratum', None) is None:
self.run_config['stratum'] = strat
# Uncomment the following line when the stratified sampler can handle an array for sample_size_from_each_stratum.
# Right now it can handle only single number.
#sample_size_for_each_stratum = array(map(lambda x: len(self.nested_structure[x]), self.nested_structure.keys()))
sample_size_for_each_stratum = len(self.nested_structure[self.nested_structure.keys()[0]])
if self.estimate_config.get("sample_size_from_each_stratum", None) is None:
self.estimate_config["sample_size_from_each_stratum"] = sample_size_for_each_stratum
if self.run_config.get("sample_size_from_each_stratum", None) is None:
self.run_config["sample_size_from_each_stratum"] = sample_size_for_each_stratum
def init_membership_in_nests(self):
self.membership_in_nests = create_tree_structure_from_dict(self.nested_structure)
self.estimate_config['membership_in_nests'] = self.membership_in_nests
self.run_config['membership_in_nests'] = self.membership_in_nests
def set_model_interaction(self, **kwargs):
self.model_interaction = ModelInteractionHM(self, kwargs.get('interaction_pkg',"opus_core"), self.choice_set)
def run_chunk(self, agents_index, agent_set, specification, coefficients):
self.add_logsum_to_specification(specification, coefficients)
self.init_membership_in_nests()
return ChoiceModel.run_chunk(self, agents_index, agent_set, specification, coefficients)
def estimate(self, specification, *args, **kwargs):
self.init_membership_in_nests()
# This is because there will be __logsum_ variables in the specification when configured from the GUI,
# in order to define starting values. They are not supposed to be included there.
self.delete_logsum_from_specification(specification)
return ChoiceModel.estimate(self, specification, *args, **kwargs)
def add_logsum_to_specification(self, specification, coefficients):
idx = where(array(map(lambda x: x.startswith('__logsum_'), coefficients.get_names())))[0]
if specification.get_equations().size > 0:
eqid = min(specification.get_equations())
else:
eqid = None
for i in idx:
submodel = None
if coefficients.get_submodels().size > 0:
submodel = coefficients.get_submodels()[i]
specification.add_item('__logsum', coefficients.get_names()[i], submodel=submodel,
equation = eqid, other_fields={'dim_%s' % self.nest_id_name: int(coefficients.get_names()[i][9:])})
def delete_logsum_from_specification(self, specification):
variable_names = specification.get_variable_names()
idx = where(array(map(lambda x: x.startswith('__logsum_'), variable_names)))[0]
specification.delete(variable_names[idx])
def run_sampler_class(self, agent_set, index1=None, index2=None, sample_size=None, weight=None,
include_chosen_choice=False, resources=None):
index, chosen_choice = self.sampler_class.run(agent_set, self.choice_set, index1=index1, index2=index2, sample_size=sample_size,
weight=weight, include_chosen_choice=include_chosen_choice, resources=resources)
if 'get_sampled_stratum' not in dir(self.sampler_class):
return index, chosen_choice
sampled_stratum = self.sampler_class.get_sampled_stratum()
if not is_same_for_all_agents(sampled_stratum):
self.membership_in_nests = create_3D_tree_structure_from_stratum(sampled_stratum, self.nested_structure)
self.estimate_config['membership_in_nests'] = self.membership_in_nests
self.run_config['membership_in_nests'] = self.membership_in_nests
return index, chosen_choice
def estimate_step(self):
self.set_correct_for_sampling()
self.init_membership_in_nests()
result = ChoiceModel.estimate_step(self)
self.add_logsum_to_coefficients(result)
return result
def set_correct_for_sampling(self):
if self.sampler_class is None:
return
self.estimate_config['correct_for_sampling'] = True
stratum_sample_size = self.estimate_config["sample_size_from_each_stratum"]
keys = sort(self.nested_structure.keys())
self.estimate_config["sampling_rate"] = ones(self.number_of_nests)
this_sample_size = stratum_sample_size
for nest in range(self.number_of_nests):
idx = where(self.estimate_config['stratum'] == keys[nest])[0
|
ryanss/holidays.py
|
holidays/countries/mozambique.py
|
Python
|
mit
| 2,356
| 0
|
# -*- coding: utf-8 -*-
# python-holidays
# ---------------
# A fast, efficient Python library for generating country, province and state
# specific sets of holidays on the fly. It aims to make determining whether a
# specific date is a holiday as fast and flexible as possible.
#
# Authors: dr-prodigy <maurizio.montel@gmail.com> (c) 2017-2022
# ryanss <ryanssdev@icloud.com> (c) 2014-2017
# Website: https://github.com/dr-prodigy/python-holidays
# License: MIT (see LICENSE file)
from datetime import date
from dateutil.easter import easter
from dateutil.relativedelta import relativedelta as rd
from holidays.constants import TUE, THU, SUN
from holidays.constants import FEB, APR, MAY, JUN, SEP, OCT, NOV, DEC
from holidays.holiday_base import HolidayBase
class Mozambique(HolidayBase):
country = "MZ"
def __init__(self, **kwargs):
HolidayBase.__init__(self, **kwargs)
def _populate(self, year):
if year > 1974:
self[date(year, 1, 1)] = "Ano novo"
e = easter(year)
good_friday = e - rd(days=2)
self[good_friday] = "Sexta-feira Santa"
# carnival is the Tuesday before Ash Wednesday
# which is 40 days before easter excluding sundays
carnival = e - rd(days=46)
while carnival.weekday() != TUE:
carn
|
ival = carnival - rd(days=1)
self[carnival] = "Carnaval"
self[date(year, FEB, 3)] = "Dia dos Heróis Moçambicanos"
self[date(year, APR, 7)] = "Dia da Mulher Moçambicana"
self[date(year, MAY, 1)] = "Dia Mundial do Trabalho"
self[date(year, JUN, 25)] = "Dia da Independência Nacional"
self[date(year, SEP, 7)] = "Dia da Vitória"
self[date(year, SEP, 25)] = "Dia das Forças Armadas"
self[date(year, OCT,
|
4)] = "Dia da Paz e Reconciliação"
self[date(year, DEC, 25)] = "Dia de Natal e da Família"
# whenever a public holiday falls on a Sunday,
# it rolls over to the following Monday
for k, v in list(self.items()):
if self.observed and year > 1974:
if k.weekday() == SUN:
self[k + rd(days=1)] = v + " (PONTE)"
class MZ(Mozambique):
pass
class MOZ(Mozambique):
pass
|
w4/belle
|
dave/modules/pollen.py
|
Python
|
gpl-3.0
| 1,618
| 0.003708
|
# -*- coding: utf-8 -*-
"""Get the pollen count for a UK postcode."""
import dave.module
from bs4 import BeautifulSoup
from requests import get
from twisted.words.protocols.irc import assembleFormattedText, attributes as A
import dave.config
@dave.module.help("Syntax: pollen [first part of postcode]. Get the forecast in the specified location. Only works fo
|
r UK postcodes.")
|
@dave.module.command(["pollen"], "(([gG][iI][rR] {0,}0[aA]{2})|((([a-pr-uwyzA-PR-UWYZ][a-hk-yA-HK-Y]?[0-9][0-9]?)|(([a-pr-uwyzA-PR-UWYZ][0-9][a-hjkstuwA-HJKSTUW])|([a-pr-uwyzA-PR-UWYZ][a-hk-yA-HK-Y][0-9][abehmnprv-yABEHMNPRV-Y])))))$")
@dave.module.priority(dave.module.Priority.HIGHEST)
@dave.module.ratelimit(1, 1)
def pollen(bot, args, sender, source):
postcode = args[0].lower()
text = None
if not dave.config.redis.exists("pollen:{}".format(postcode)):
res = get("https://www.bbc.co.uk/weather/{}".format(postcode))
soup = BeautifulSoup(res.text, "html.parser")
element = soup.find_all("div", class_="wr-c-environmental-data")
if element:
pollen = element[0].find("span", class_="wr-c-environmental-data__icon-text")
if pollen:
text = {'L': 'low', 'M': 'medium', 'H': 'high'}[pollen.text] or pollen.text
dave.config.redis.setex("pollen:{}".format(postcode), 1800, text)
else:
text = dave.config.redis.get("pollen:{}".format(postcode))
if text:
bot.reply(source, sender, assembleFormattedText(
A.normal["The pollen count is currently ", A.bold[str(text)], " in ", postcode.upper()]
))
|
google-business-communications/bm-snippets-python
|
send-message-suggested-action-dial.py
|
Python
|
apache-2.0
| 3,227
| 0.007127
|
## Copyright 2022 Google LLC
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## https://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""Sends a text mesage to the user with a suggestion action to dial a phone number.
Read more: https://developers.google.com/business-communications/business-messages/guides/how-to/message/send?hl=en#dial_action
This code is based on the https://github.com/google-business-communications/python-businessmessages
Python Business Messages client library.
"""
import uuid
from businessmessages import businessmessages_v1_client as bm_client
from businessmessages.businessmessages_v1_messages import BusinessmessagesConversationsMessagesCreateRequest
from businessmessages.businessmessages_v1_messages import BusinessMessagesDialAction
from businessmessages.businessmessages_v1_messages import BusinessMessagesMessage
from businessmessages.businessmessages_v1_messages import BusinessMessagesRepresentative
from busine
|
ssmessages.businessmessages_v1_messages import BusinessMessagesSuggestedAction
from businessmessages.businessmessages_v1_messages import BusinessMessagesSuggestion
from oauth2client.service_account import ServiceAccountCredentials
# Edit the values below:
pa
|
th_to_service_account_key = './service_account_key.json'
conversation_id = 'EDIT_HERE'
credentials = ServiceAccountCredentials.from_json_keyfile_name(
path_to_service_account_key,
scopes=['https://www.googleapis.com/auth/businessmessages'])
client = bm_client.BusinessmessagesV1(credentials=credentials)
representative_type_as_string = 'BOT'
if representative_type_as_string == 'BOT':
representative_type = BusinessMessagesRepresentative.RepresentativeTypeValueValuesEnum.BOT
else:
representative_type = BusinessMessagesRepresentative.RepresentativeTypeValueValuesEnum.HUMAN
# Create a text message with a dial action and fallback text
message = BusinessMessagesMessage(
messageId=str(uuid.uuid4().int),
representative=BusinessMessagesRepresentative(
representativeType=representative_type
),
text='Contact support for help with this issue.',
fallback='Give us a call at +12223334444.',
suggestions=[
BusinessMessagesSuggestion(
action=BusinessMessagesSuggestedAction(
text='Call support',
postbackData='call-support',
dialAction=BusinessMessagesDialAction(
phoneNumber='+12223334444'))
),
])
# Create the message request
create_request = BusinessmessagesConversationsMessagesCreateRequest(
businessMessagesMessage=message,
parent='conversations/' + conversation_id)
# Send the message
bm_client.BusinessmessagesV1.ConversationsMessagesService(
client=client).Create(request=create_request)
|
mdworks2016/work_development
|
Python/05_FirstPython/Chapter9_WebApp/fppython_develop/lib/python3.7/site-packages/zope/interface/tests/test_registry.py
|
Python
|
apache-2.0
| 109,233
| 0.00162
|
##############################################################################
#
# Copyright (c) 2001, 2002, 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Component Registry Tests"""
# pylint:disable=protected-access
import unittest
from zope.interface import Interface
from zope.interface.adapter import VerifyingAdapterRegistry
from zope.interface.registry import Components
class ComponentsTests(unittest.TestCase):
def _getTargetClass(self):
return Components
def _makeOne(self, name='test', *args, **kw):
return self._getTargetClass()(name, *args, **kw)
def _wrapEvents(self):
from zope.interface import registry
_events = []
def _notify(*args, **kw):
_events.append((args, kw))
_monkey = _Monkey(registry, notify=_notify)
return _monkey, _events
def test_ctor_no_bases(self):
from zope.interface.adapter import AdapterRegistry
comp = self._makeOne('testing')
self.assertEqual(comp.__name__, 'testing')
self.assertEqual(comp.__bases__, ())
self.assertTrue(isinstance(comp.adapters, AdapterRegistry))
self.assertTrue(isinstance(comp.utilities, AdapterRegistry))
self.assertEqual(comp.adapters.__bases__, ())
self.assertEqual(comp.utilities.__bases__, ())
self.assertEqual(comp._utility_registrations, {})
self.assertEqual(comp._adapter_registrations, {})
self.assertEqual(comp._subscription_registrations, [])
self.assertEqual(comp._handler_registrations, [])
def test_ctor_w_base(self):
base = self._makeOne('base')
comp = self._makeOne('testing', (base,))
self.assertEqual(comp.__name__, 'testing')
self.assertEqual(comp.__bases__, (base,))
self.assertEqual(comp.adapters.__bases__, (base.adapters,))
self.assertEqual(comp.utilities.__bases__, (base.utilities,))
def test___repr__(self):
comp = self._makeOne('testing')
self.assertEqual(repr(comp), '<Components testing>')
# test _init_registries / _init_registrations via only caller, __init__.
def test_assign_to___bases__(self):
base1 = self._makeOne('base1')
base2 = self._makeOne('base2')
comp = self._makeOne()
comp.__bases__ = (base1, base2)
self.assertEqual(comp.__bases__, (base1, base2))
self.assertEqual(comp.adapters.__bases__,
(base1.adapters, base2.adapters))
self.assertEqual(comp.utilities.__bases__,
(base1.utilities, base2.utilities))
def test_registerUtility_with_component_name(self):
from zope.interface.declarations import named, InterfaceClass
class IFoo(InterfaceClass):
pass
ifoo = IFoo('IFoo')
@named(u'foo')
class Foo(object):
pass
foo = Foo()
_info = u'info'
comp = self._makeOne()
comp.registerUtility(foo, ifoo, info=_info)
self.assertEqual(
comp._utility_registrations[ifoo, u'foo'],
(foo, _info, None))
def test_registerUtility_both_factory_and_component(self):
def _factory():
raise NotImplementedError()
_to_reg = object()
comp = self._makeOne()
self.assertRaises(Typ
|
eError, comp.registerUtility,
component=_to
|
_reg, factory=_factory)
def test_registerUtility_w_component(self):
from zope.interface.declarations import InterfaceClass
from zope.interface.interfaces import Registered
from zope.interface.registry import UtilityRegistration
class IFoo(InterfaceClass):
pass
ifoo = IFoo('IFoo')
_info = u'info'
_name = u'name'
_to_reg = object()
comp = self._makeOne()
_monkey, _events = self._wrapEvents()
with _monkey:
comp.registerUtility(_to_reg, ifoo, _name, _info)
self.assertTrue(comp.utilities._adapters[0][ifoo][_name] is _to_reg)
self.assertEqual(comp._utility_registrations[ifoo, _name],
(_to_reg, _info, None))
self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,))
self.assertEqual(len(_events), 1)
args, kw = _events[0]
event, = args
self.assertEqual(kw, {})
self.assertTrue(isinstance(event, Registered))
self.assertTrue(isinstance(event.object, UtilityRegistration))
self.assertTrue(event.object.registry is comp)
self.assertTrue(event.object.provided is ifoo)
self.assertTrue(event.object.name is _name)
self.assertTrue(event.object.component is _to_reg)
self.assertTrue(event.object.info is _info)
self.assertTrue(event.object.factory is None)
def test_registerUtility_w_factory(self):
from zope.interface.declarations import InterfaceClass
from zope.interface.interfaces import Registered
from zope.interface.registry import UtilityRegistration
class IFoo(InterfaceClass):
pass
ifoo = IFoo('IFoo')
_info = u'info'
_name = u'name'
_to_reg = object()
def _factory():
return _to_reg
comp = self._makeOne()
_monkey, _events = self._wrapEvents()
with _monkey:
comp.registerUtility(None, ifoo, _name, _info, factory=_factory)
self.assertEqual(len(_events), 1)
args, kw = _events[0]
event, = args
self.assertEqual(kw, {})
self.assertTrue(isinstance(event, Registered))
self.assertTrue(isinstance(event.object, UtilityRegistration))
self.assertTrue(event.object.registry is comp)
self.assertTrue(event.object.provided is ifoo)
self.assertTrue(event.object.name is _name)
self.assertTrue(event.object.component is _to_reg)
self.assertTrue(event.object.info is _info)
self.assertTrue(event.object.factory is _factory)
def test_registerUtility_no_provided_available(self):
class Foo(object):
pass
_info = u'info'
_name = u'name'
_to_reg = Foo()
comp = self._makeOne()
self.assertRaises(TypeError,
comp.registerUtility, _to_reg, None, _name, _info)
def test_registerUtility_wo_provided(self):
from zope.interface.declarations import directlyProvides
from zope.interface.declarations import InterfaceClass
from zope.interface.interfaces import Registered
from zope.interface.registry import UtilityRegistration
class IFoo(InterfaceClass):
pass
class Foo(object):
pass
ifoo = IFoo('IFoo')
_info = u'info'
_name = u'name'
_to_reg = Foo()
directlyProvides(_to_reg, ifoo)
comp = self._makeOne()
_monkey, _events = self._wrapEvents()
with _monkey:
comp.registerUtility(_to_reg, None, _name, _info)
self.assertEqual(len(_events), 1)
args, kw = _events[0]
event, = args
self.assertEqual(kw, {})
self.assertTrue(isinstance(event, Registered))
self.assertTrue(isinstance(event.object, UtilityRegistration))
self.assertTrue(event.object.registry is comp)
self.assertTrue(event.object.provided is ifoo)
self.assertTrue(event.object.name is _name)
self.assertTrue(event.object.component is _to_reg)
self.assertTrue(event.object.info is _info)
self.assertTrue(event.object.factory is None)
def test_registerUtility_duplicates_existing_reg(self):
fr
|
axinging/chromium-crosswalk
|
third_party/WebKit/Tools/Scripts/webkitpy/common/system/systemhost_mock.py
|
Python
|
bsd-3-clause
| 3,090
| 0.000647
|
# Copyright (c) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from StringIO import StringIO
from webkitpy.common.system.environment import Environment
from webkitpy.common.system.executive_mock import MockExecutive
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.platforminfo_mock import MockPlatformInfo
from webkitpy.common.system.user_mock import MockUser
from webkitpy.common.system.workspace_mock import MockWorkspace
class MockSystemHost(object):
def __init__(self, log_executive=False, executive_throws_when_run=None, os_name=None, os_version=None, executive=None, filesystem=None):
self.executable = 'python'
self.executive = executive or MockExecutive(should_log=log_executive, should_throw_when_run=executive_throws_when_run)
self.filesystem = filesystem or MockFileSystem()
self.user = M
|
ockUser()
self.platform = MockPlatformInfo()
if os_name:
|
self.platform.os_name = os_name
if os_version:
self.platform.os_version = os_version
# FIXME: Should this take pointers to the filesystem and the executive?
self.workspace = MockWorkspace()
self.stdin = StringIO()
self.stdout = StringIO()
self.stderr = StringIO()
def copy_current_environment(self):
return Environment({"MOCK_ENVIRON_COPY": '1'})
def print_(self, *args, **kwargs):
sep = kwargs.get('sep', ' ')
end = kwargs.get('end', '\n')
stream = kwargs.get('stream', self.stdout)
stream.write(sep.join([str(arg) for arg in args]) + end)
|
matpow2/gamedev-old
|
chowdren/project.py
|
Python
|
bsd-2-clause
| 2,297
| 0.002177
|
# Copyright (c) Mathias Kaerlev
# See LICENSE for details.
import os
from chowdren.image import Image
from chowdren.common import IDPool
from chowdren.object import get_objects
from chowdren.data import CodeData
class ProjectManager(object):
base_dir = None
def __init__(self, directory = None):
self.base_dir = directory
self.object_types = {}
self.object_type_ids = IDPool()
self.images = {}
self.image_ids = IDPool()
if directory is None:
self.data = CodeData()
return
with open(self.get_application_file(), 'rb') as fp:
self.data = CodeData.load(fp)
def get_application_file(self):
return os.path.join(self.base_dir, 'application.py')
def save(self):
self.data.object_types = {}
for k, v in self.object_types.iteritems():
self.data.object_types[k] = v.get_data()
with open(self.get_application_file(),
|
'wb') as fp:
self.data.save(fp)
def set_directory(self, directory):
self.base_dir = directory
def get_image_file(self, ref):
return os.path.join(self.base_dir, '%s.png' % ref)
def get_image(self, ref):
if ref in self.images:
return self.images[ref]
image = Image(self.get_image_file(ref))
|
self.image_ids.pop(ref)
image.id = ref
self.images[ref] = image
return image
def save_image(self, image):
if image.id is None:
image.id = self.image_ids.pop()
self.images[image.id] = image
image.save(self.get_image_file(image.id))
return image.id
# object type management
def create_object_type(self, klass):
object_type = klass(self)
object_type.id = self.object_type_ids.pop()
self.object_types[object_type.id] = object_type
return object_type
def get_object_type(self, ref):
if ref in self.object_types:
return self.object_types[ref]
type_data = self.data.object_types[ref]
object_type = get_objects()[type_data.name](self, type_data.data)
object_type.id = type_data.type_id
self.object_type_ids.pop(object_type.id)
self.object_types[ref] = object_type
return object_type
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.