repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
Unofficial-Extend-Project-Mirror/openfoam-extend-Breeder-other-scripting-PyFoam | unittests/Basics/test_MatplotlibTimelines.py | Python | gpl-2.0 | 114 | 0.008772 | import unittest
from PyFoam.Basics.Matp | lotlibTimelines import | MatplotlibTimelines
theSuite=unittest.TestSuite()
|
pdfminer/pdfminer.six | tests/test_pdfminer_psparser.py | Python | mit | 3,405 | 0 | import logging
from pdfminer.psparser import KWD, LIT, P | SBaseParser, PSStackParser, PSEOF
logger = logging.getLogger(__name__)
class TestPSBaseParser:
"""Simplistic Test cases"""
TESTDATA = rb"""%!PS
begin end
" @ #
/a/BCD /Some_Name /foo#5f#xbaa
0 +1 -2 .5 1.234
(abc) () (abc ( def ) ghi)
(def\040\0\0404ghi) (bach\\slask) (foo\nbaa)
(this % is not a comment.)
(foo
baa)
(foo\
baa)
<> <20> < 40 4020 >
<abcd00
12345>
func/a/b{(c)do*}def
[ 1 (z) ! ]
<< /foo (bar) >>
"""
TOKENS = [
(5, KWD(b"begin")),
(11, KWD(b"end")),
(1 | 6, KWD(b'"')),
(19, KWD(b"@")),
(21, KWD(b"#")),
(23, LIT("a")),
(25, LIT("BCD")),
(30, LIT("Some_Name")),
(41, LIT("foo_xbaa")),
(54, 0),
(56, 1),
(59, -2),
(62, 0.5),
(65, 1.234),
(71, b"abc"),
(77, b""),
(80, b"abc ( def ) ghi"),
(98, b"def \x00 4ghi"),
(118, b"bach\\slask"),
(132, b"foo\nbaa"),
(143, b"this % is not a comment."),
(170, b"foo\nbaa"),
(180, b"foobaa"),
(191, b""),
(194, b" "),
(199, b"@@ "),
(211, b"\xab\xcd\x00\x124\x05"),
(226, KWD(b"func")),
(230, LIT("a")),
(232, LIT("b")),
(234, KWD(b"{")),
(235, b"c"),
(238, KWD(b"do*")),
(241, KWD(b"}")),
(242, KWD(b"def")),
(246, KWD(b"[")),
(248, 1),
(250, b"z"),
(254, KWD(b"!")),
(256, KWD(b"]")),
(258, KWD(b"<<")),
(261, LIT("foo")),
(266, b"bar"),
(272, KWD(b">>")),
]
OBJS = [
(23, LIT("a")),
(25, LIT("BCD")),
(30, LIT("Some_Name")),
(41, LIT("foo_xbaa")),
(54, 0),
(56, 1),
(59, -2),
(62, 0.5),
(65, 1.234),
(71, b"abc"),
(77, b""),
(80, b"abc ( def ) ghi"),
(98, b"def \x00 4ghi"),
(118, b"bach\\slask"),
(132, b"foo\nbaa"),
(143, b"this % is not a comment."),
(170, b"foo\nbaa"),
(180, b"foobaa"),
(191, b""),
(194, b" "),
(199, b"@@ "),
(211, b"\xab\xcd\x00\x124\x05"),
(230, LIT("a")),
(232, LIT("b")),
(234, [b"c"]),
(246, [1, b"z"]),
(258, {"foo": b"bar"}),
]
def get_tokens(self, s):
from io import BytesIO
class MyParser(PSBaseParser):
def flush(self):
self.add_results(*self.popall())
parser = MyParser(BytesIO(s))
r = []
try:
while True:
r.append(parser.nexttoken())
except PSEOF:
pass
return r
def get_objects(self, s):
from io import BytesIO
class MyParser(PSStackParser):
def flush(self):
self.add_results(*self.popall())
parser = MyParser(BytesIO(s))
r = []
try:
while True:
r.append(parser.nextobject())
except PSEOF:
pass
return r
def test_1(self):
tokens = self.get_tokens(self.TESTDATA)
logger.info(tokens)
assert tokens == self.TOKENS
return
def test_2(self):
objs = self.get_objects(self.TESTDATA)
logger.info(objs)
assert objs == self.OBJS
return
|
codeyash/plugins | PyPlugins/PhpParser/py/PhpParser.py | Python | apache-2.0 | 2,782 | 0.007549 | #######################################################################
# Name: calc_peg.py
# Purpose: Simple expression evaluator example using PEG language
# Author: Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2009-2014 Igor R. Dejanovic <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#
# This example is functionally equivalent to calc.py. The difference is that
# in this example grammar is specified using PEG language instead of python constructs.
# Semantic actions are used to calculate expression during semantic
# analysis.
# Parser model as well as parse tree exported to dot files should be
# the same as parser model and parse tree generated in calc.py example.
#######################################################################
from __future__ import absolute_import, unicode_literals, print_function
#from PythonQt import *
from arpeggio.peg import ParserPEG
# Semantic actions
from calc import to_floatSA, factorSA, termSA, exprSA
# Grammar is defined using textual specification based on PEG language.
calc_grammar = """
number <- r'\d*\.\d*|\d+';
factor <- ("+" / "-")?
(number / "(" expression ")");
term <- factor (( "*" / "/") factor)*;
expression <- term (("+" / "-") term)*;
calc <- expression+ EOF;
"""
# Rules are mapped to semantic actions
sem_actions = {
"number" : to_floatSA,
"factor" : factorSA,
"term" : termSA,
"expression" : exprSA,
}
def main(debug=False):
# First we will make a parser - an instance of the calc parser model.
# Parser model is given in the form of PEG notation therefore we
# are using ParserPEG class. Root rule name (parsing expression) is "calc".
parser = ParserPEG(calc_grammar, "calc", debug=debug)
# An expression we want to evaluate
input_expr = "-(4-1)*5+(2+4.67)+5.89/(.2+7)"
# Then parse tree is created out of the input_expr expression.
parse_tree = parser.parse(input_expr)
result = parser.getASG(sem_actions)
if debug:
# getASG will start semantic analysis.
# In this case semantic analysis will evaluate expression and
# returned value will be evaluated result of the input_expr expression.
# Semantic actions are supplied to the getASG function.
print("{} = {}".format(input_expr, result))
if __name__ == "__main__":
# In debu | g mode dot (graphviz) files for parser model
# and parse tree will be created for visualization.
# Checkout current folde | r for .dot files.
main(debug=True)
class Parser():
"""docstring for Parser"""
# connect the button's clicked signal to our python method
#ftpui.btnConnect.connect('clicked()', connectFtp)
# show the window
#ftpui.show()
|
graalvm/mx | mx_proftool.py | Python | gpl-2.0 | 55,588 | 0.002429 | #
# ----------------------------------------------------------------------------------------------------
# Copyright (c) 2021, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 | only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it wil | l be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
from __future__ import print_function
import copy
import io
import os
import re
import shutil
import struct
import subprocess
import sys
import zipfile
from abc import ABCMeta, abstractmethod
from argparse import ArgumentParser, Action, OPTIONAL, RawTextHelpFormatter, REMAINDER
from zipfile import ZipFile
import mx
import mx_benchmark
import mx_logcompilation
try:
# import into the global scope but don't complain if it's not there. The commands themselves
# will perform the check again and produce a helpful error message if it's not available.
import capstone
except ImportError:
pass
def check_capstone_import(name):
try:
import capstone # pylint: disable=unused-variable, unused-import
except ImportError as e:
mx.abort(
'{}\nThe capstone module is required to support \'{}\'. Try installing it with `pip install capstone`'.format(
e, name))
# File header format
filetag = b"JVMTIASM"
MajorVersion = 1
MinorVersion = 0
# Marker values for various data sections
DynamicCodeTag, = struct.unpack('>i', b'DYNC')
CompiledMethodLoadTag, = struct.unpack('>i', b'CMLT')
MethodsTag, = struct.unpack('>i', b'MTHT')
DebugInfoTag, = struct.unpack('>i', b'DEBI')
CompiledMethodUnloadTag, = struct.unpack('>i', b'CMUT')
class ExperimentFiles(mx._with_metaclass(ABCMeta), object):
"""A collection of data files from a performance data collection experiment."""
def __init__(self): # pylint: disable=super-init-not-called
pass
@staticmethod
def open_experiment(filename):
if os.path.isdir(filename):
return FlatExperimentFiles(directory=filename)
elif zipfile.is_zipfile(filename):
return ZipExperimentFiles(filename)
return None
@staticmethod
def open(options):
if options.experiment is None:
mx.abort('Must specify an experiment')
experiment = ExperimentFiles.open_experiment(options.experiment)
if experiment is None:
mx.abort('Experiment \'{}\' does not exist'.format(options.experiment))
return experiment
@abstractmethod
def open_jvmti_asm_file(self):
raise NotImplementedError()
@abstractmethod
def has_assembly(self):
raise NotImplementedError()
@abstractmethod
def get_jvmti_asm_filename(self):
raise NotImplementedError()
@abstractmethod
def get_perf_binary_filename(self):
raise NotImplementedError()
@abstractmethod
def open_perf_output_file(self, mode='r'):
raise NotImplementedError()
@abstractmethod
def has_log_compilation(self):
raise NotImplementedError()
@abstractmethod
def get_log_compilation_filename(self):
raise NotImplementedError()
@abstractmethod
def open_log_compilation_file(self):
raise NotImplementedError()
class FlatExperimentFiles(ExperimentFiles):
"""A collection of data files from a performance data collection experiment."""
def __init__(self, directory, jvmti_asm_name='jvmti_asm_file', perf_binary_name='perf_binary_file',
perf_output_name='perf_output_file', log_compilation_name='log_compilation'):
super(FlatExperimentFiles, self).__init__()
self.dump_path = None
if not os.path.isdir(directory):
raise AssertionError('Must be directory')
self.directory = os.path.abspath(directory)
self.jvmti_asm_filename = os.path.join(directory, jvmti_asm_name)
self.perf_binary_filename = os.path.join(directory, perf_binary_name)
self.perf_output_filename = os.path.join(directory, perf_output_name)
self.log_compilation_filename = os.path.join(directory, log_compilation_name)
@staticmethod
def create(experiment, overwrite=False):
experiment = os.path.abspath(experiment)
if os.path.exists(experiment):
if not overwrite:
mx.abort('Experiment file already exists: {}'.format(experiment))
shutil.rmtree(experiment)
os.mkdir(experiment)
return FlatExperimentFiles(directory=experiment)
def open_jvmti_asm_file(self):
return open(self.jvmti_asm_filename, 'rb')
def has_assembly(self):
return self.jvmti_asm_filename and os.path.exists(self.jvmti_asm_filename)
def open_perf_output_file(self, mode='r'):
return open(self.perf_output_filename, mode)
def get_jvmti_asm_filename(self):
return self.jvmti_asm_filename
def get_perf_binary_filename(self):
return self.perf_binary_filename
def has_perf_binary(self):
return self.perf_binary_filename and os.path.exists(self.perf_binary_filename)
def get_perf_output_filename(self):
return self.perf_output_filename
def has_perf_output(self):
return self.perf_output_filename and os.path.exists(self.perf_output_filename)
def get_log_compilation_filename(self):
return self.log_compilation_filename
def open_log_compilation_file(self):
return open(self.log_compilation_filename, mode='r', encoding='utf-8')
def has_log_compilation(self):
return self.log_compilation_filename and os.path.exists(self.log_compilation_filename)
def create_dump_dir(self):
if self.dump_path:
return self.dump_path
if self.directory:
self.dump_path = os.path.join(self.directory, 'dump')
os.mkdir(self.dump_path)
return self.dump_path
else:
raise AssertionError('Unhandled')
def ensure_perf_output(self):
"""Convert the binary perf output into the text form if it doesn't already exist."""
if not self.has_perf_output():
if not PerfOutput.is_supported():
mx.abort('perf output parsing must be done on a system which supports the perf command')
if not self.has_perf_binary():
mx.abort('perf data file \'{}\' is missing'.format(self.perf_binary_filename))
convert_cmd = PerfOutput.perf_convert_binary_command(self)
# convert the perf binary data into text format
with self.open_perf_output_file(mode='w') as fp:
mx.run(convert_cmd, out=fp)
print('Created perf output file in {}'.format(self.directory))
def package(self, name=None):
self.ensure_perf_output()
directory_name = os.path.basename(self.directory)
parent = os.path.dirname(self.directory)
if not name:
name = directory_name
return shutil.make_archive(name, 'zip', root_dir=parent, base_dir=directory_name)
class ZipExperimentFiles(ExperimentFiles):
"""A collection of data files from a performance data collection experiment."""
def __init__(self, filename):
super(ZipExperimentFiles, self).__init__()
self.experiment_file = ZipFile(filename)
|
great-expectations/great_expectations | great_expectations/expectations/metrics/column_pair_map_metrics/__init__.py | Python | apache-2.0 | 191 | 0 | fr | om .column_pair_values_equal import ColumnPairValuesEqual
from .column_pair_values_greater import ColumnPairValuesAGreaterThanB
from | .column_pair_values_in_set import ColumnPairValuesInSet
|
googleads/google-ads-python | google/ads/googleads/v8/services/services/asset_field_type_view_service/transports/grpc.py | Python | apache-2.0 | 10,387 | 0.001155 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.ads.googleads.v8.resources.types import asset_field_type_view
from google.ads.googleads.v8.services.types import asset_field_type_view_service
from .base import AssetFieldTypeViewServiceTransport, DEFAULT_CLIENT_INFO
class AssetFieldTypeViewServiceGrpcTransport(
AssetFieldTypeViewServiceTransport
):
"""gRPC backend transport for AssetFieldTypeViewService.
Service to fetch asset field type views.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
def __init__(
self,
*,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._ssl_channel_credentials = ssl_channel_credentials
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
warnings.warn(
"api_mtls_endpoint and client_cert_source are deprecated",
DeprecationWarning,
)
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
if credentials is None:
credentials, _ = google.auth.default(
scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
)
# Create | SSL credentials with client_cert_so | urce or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
ssl_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
ssl_credentials = SslCredentials().ssl_credentials
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
ssl_credentials=ssl_credentials,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._ssl_channel_credentials = ssl_credentials
else:
host = host if ":" in host else host + ":443"
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# create a new channel. The provided one is ignored.
self._grpc_channel = type(self).create_channel(
host,
credentials=credentials,
ssl_credentials=ssl_channel_credentials,
scopes=self.AUTH_SCOPES,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
host=host, credentials=credentials, client_info=client_info,
)
@classmethod
def create_channel(
cls,
host: str = "googleads.googleapis.com",
credentials: ga_credentials.Credentials = None,
scopes: Optional[Sequence[str]] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
address (Optionsl[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from |
edx/edx-ora | controller/management/commands/import_graded_essays.py | Python | agpl-3.0 | 5,420 | 0.008118 | from django.core.management.base import BaseCommand
from django.conf import settings
from django.utils import timezone
#from http://jamesmckay.net/2009/03/django-custom-managepy-commands-not-committing-transactions/
#Fix issue where db data in manage.py commands is not refreshed at all once they start running
from django.db import transaction
transaction.commit_unless_managed()
import requests
import urlparse
import time
import json
import logging
import sys
from uuid import uuid4
from ConfigParser import SafeConfigParser
from datetime import datetime
from controller.models import Submission, Grader
from controller.models import GraderStatus, SubmissionState
import controller.rubric_functions
import random
from controller import grader_util
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = "<filename>"
help = "Poll grading controller and send items to be graded to ml"
def handle(self, *args, **options):
"""
Read from file
"""
parser = | SafeConfigParser()
parser.read(args[0])
print("Starting import...")
print("Reading config from file {0}".format(args[0]))
header_name = "importdata"
location = parser.get(header_name, 'location')
course_id = parser.get(header_name, 'course_id')
problem_id = parser.get(header_name, 'problem_id' | )
prompt_file = parser.get(header_name, 'prompt_file')
essay_file = parser.get(header_name, 'essay_file')
essay_limit = int(parser.get(header_name, 'essay_limit'))
state = parser.get(header_name, "state")
next_grader_type = parser.get(header_name, "next_grader")
add_grader = parser.get(header_name, "add_grader_object") == "True"
set_as_calibration = parser.get(header_name, "set_as_calibration") == "True"
max_score= parser.get(header_name,"max_score")
student_id = parser.get(header_name,'student_id')
increment_ids = parser.get(header_name,'increment_ids')
rubric_file = parser.get(header_name, 'rubric_file')
import_rubric_scores = parser.get(header_name, 'import_rubric_scores') == "True"
rubric_scores_file = parser.get(header_name, 'rubric_scores_file')
rubric=open(settings.REPO_PATH / rubric_file).read()
prompt=open(settings.REPO_PATH / prompt_file).read()
score, text = [], []
combined_raw = open(settings.REPO_PATH / essay_file).read()
raw_lines = combined_raw.splitlines()
for row in xrange(1, len(raw_lines)):
score1, text1 = raw_lines[row].strip().split("\t")
text.append(text1)
score.append(int(score1))
if increment_ids:
student_id = int(student_id)
if import_rubric_scores:
rubric_scores=[]
combined_raw = open(settings.REPO_PATH / rubric_scores_file).read()
raw_lines = combined_raw.splitlines()
for row in xrange(1, len(raw_lines)):
rubric_score_row=[]
for score_item in raw_lines[row].strip().split("\t"):
rubric_score_row.append(int(score_item))
rubric_scores.append(rubric_score_row)
for i in range(0, min(essay_limit, len(text))):
sub = Submission(
prompt=prompt,
student_id=student_id,
problem_id=problem_id,
state=state,
student_response=text[i],
student_submission_time=timezone.now(),
xqueue_submission_id=uuid4().hex,
xqueue_submission_key="",
xqueue_queue_name="",
location=location,
course_id=course_id,
next_grader_type=next_grader_type,
posted_results_back_to_queue=True,
previous_grader_type="BC",
max_score=max_score,
rubric=rubric,
preferred_grader_type = next_grader_type,
)
sub.save()
if add_grader:
sub.previous_grader_type="IN"
sub.save()
grade = Grader(
score=score[i],
feedback="",
status_code=GraderStatus.success,
grader_id="",
grader_type="IN",
confidence=1,
is_calibration=set_as_calibration,
)
grade.submission = sub
grade.save()
success, rubric_targets=controller.rubric_functions.generate_targets_from_rubric(sub.rubric)
scores=[]
for z in xrange(0,len(rubric_targets)):
scores.append(random.randint(0,rubric_targets[z]))
if import_rubric_scores:
score_item = rubric_scores[i]
if len(score_item) == len(scores):
scores = score_item
log.debug("Score: {0} Rubric Score: {1}".format(score[i], scores))
controller.rubric_functions.generate_rubric_object(grade, scores, sub.rubric)
if increment_ids:
student_id+=1
print ("Successfully imported {0} essays using configuration in file {1}.".format(
min(essay_limit, len(text)),
args[0],
))
|
1337/yesterday-i-learned | leetcode/318m.py | Python | gpl-3.0 | 1,034 | 0.000967 | # Hashing a string into a binary representation helps compare them in linear time, and storing the max length for each
# binary representation in a hashmap is just icing on the cake
class Solution:
def maxProduct(self, words: List[str]) -> int:
def string_to_bool(string):
mask = 0b0
for ord_of in range(ord('a'), ord('z') + 1):
char = chr(ord_of)
offset = ord_of - ord('a')
if char in string:
mask ^= (0b1 << offset)
return mask
word_map = {}
for string in words:
bin_ = string_to_bool(string)
| if bin_ in word_map:
word_map[bin_] = max(word_map[bin_], len(string))
else:
word_map[bin_] = len(string)
max_len = 0
for bin1, len1 in word_map.items():
for bin2, len2 in word_map.items():
if bin1 & bin2 == 0b0:
max_len = | max(max_len, len1 * len2)
return max_len
|
giampaolo/psutil | psutil/tests/__main__.py | Python | bsd-3-clause | 293 | 0 | #!/usr/bin/ | env python3
# Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Run unit tests. This is invoked by:
$ python -m psutil.tests
"""
from .runner imp | ort main
main()
|
Nikita1710/ANUFifty50-Online-Mentoring-Platform | project/fifty_fifty/webcore/migrations/0017_remove_profile_emails.py | Python | apache-2.0 | 388 | 0 | # -*- coding: | utf-8 -*-
# Generated by Django 1.11 on 2017-04-30 12:24
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('webcore', '0016_profile_emails'),
]
operations = [
migrations.RemoveField(
| model_name='profile',
name='emails',
),
]
|
liuzzfnst/tp-libvirt | libvirt/tests/src/virsh_cmd/nodedev/virsh_nodedev_create_destroy.py | Python | gpl-2.0 | 10,402 | 0 | import os
import re
import logging
from tempfile import mktemp
from virttest import virsh
from autotest.client.shared import error
from virttest.libvirt_xml.nodedev_xml import NodedevXML
from provider import libvirt_version
_FC_HOST_PATH = "/sys/class/fc_host"
def check_nodedev(dev_name, dev_parent=None):
"""
Check node device relevant values
:params dev_name: name of the device
:params dev_parent: parent name of the device, None is default
"""
host = dev_name.split("_")[1]
fc_host_path = os.path.join(_FC_HOST_PATH, host)
# Check if the /sys/class/fc_host/host$NUM exists
if not os.access(fc_host_path, os.R_OK):
logging.debug("Can't access %s", fc_host_path)
return False
dev_xml = NodedevXML.new_from_dumpxml(dev_name)
if not dev_xml:
logging.error("Can't dumpxml %s XML", dev_name)
return False
# Check device parent name
if dev_parent != dev_xml.parent:
logging.error("The parent name is different: %s is not %s",
dev_parent, dev_xml.parent)
return False
wwnn_from_xml = dev_xml.wwnn
wwpn_from_xml = dev_xml.wwpn
fabric_wwn_from_xml = dev_xml.fabric_wwn
fc_dict = {}
name_list = ["node_name", "port_name", "fabric_name"]
for name in name_list:
fc_file = os.path.join(fc_host_path, name)
fc_dict[name] = open(fc_file, "r").read().strip().split("0x")[1]
# Check wwnn, wwpn and fabric_wwn
if len(wwnn_from_xml) != 16 or \
len(wwpn_from_xml) != 16 or \
fc_dict["node_name"] != wwnn_from_xml or \
fc_dict["port_name"] != wwpn_from_xml or \
fc_dict["fabric_name"] != fabric_wwn_from_xml:
logging.debug("The fc_dict is: %s", fc_dict)
return False
fc_type_from_xml = dev_xml.fc_type
cap_type_from_xml = dev_xml.cap_type
# Check capability type
if cap_type_from_xml != "scsi_host" or fc_type_from_xml != "fc_host":
logging.debug("The capability type isn't 'scsi_host' or 'fc_host'")
return False
return True
def create_nodedev_from_xml(params):
"""
Create a device defined by an XML file on the node
:params: the parameter dictionary
"""
scsi_host = params.get("nodedev_scsi_host")
options = params.get("nodedev_options")
status_error = params.get("status_error", "no")
# libvirt acl polkit related params
uri = params.get("virsh_uri")
unprivileged_user = params.get('unprivileged_user')
if unprivileged_user:
if unprivileged_user.count('EXAMPLE'):
unprivileged_user = 'testacl'
vhba_xml = """
<device>
<parent>%s</parent>
<capability type='scsi_host'>
<capability type='fc_host'>
</capability>
</capability>
</device>
""" % scsi_host
logging.debug("Prepare the nodedev XML: %s", vhba_xml)
vhba_file = mktemp()
xml_object = open(vhba_file, 'w')
xml_object.write(vhba_xml)
xml_object.close()
result = virsh.nodedev_create(vhba_file, options, uri=uri,
debug=True,
unprivileged_user=unprivileged_user)
status = result.exit_status
# Remove temprorary file
os.unlink(vhba_file)
# Check status_error
if status_error == "yes":
if status:
logging.info("It's an expected %s", result.stderr)
else:
raise error.TestFail("%d not a expected command "
"r | eturn value", status)
elif status_error == "no":
if status:
raise error.TestFail(result. | stderr)
else:
output = result.stdout
logging.info(output)
for scsi in output.split():
if scsi.startswith('scsi_host'):
# Check node device
if check_nodedev(scsi, scsi_host):
return scsi
else:
raise error.TestFail("Can't find %s" % scsi)
def destroy_nodedev(params):
"""
Destroy (stop) a device on the node
:params: the parameter dictionary
"""
dev_name = params.get("nodedev_new_dev")
options = params.get("nodedev_options")
status_error = params.get("status_error", "no")
# libvirt acl polkit related params
uri = params.get("virsh_uri")
unprivileged_user = params.get('unprivileged_user')
if unprivileged_user:
if unprivileged_user.count('EXAMPLE'):
unprivileged_user = 'testacl'
result = virsh.nodedev_destroy(dev_name, options, uri=uri,
debug=True,
unprivileged_user=unprivileged_user)
status = result.exit_status
# Check status_error
if status_error == "yes":
if status:
logging.info("It's an expected %s", result.stderr)
else:
raise error.TestFail("%d not a expected command "
"return value", status)
elif status_error == "no":
if status:
raise error.TestFail(result.stderr)
else:
# Check nodedev value
if not check_nodedev(dev_name):
logging.info(result.stdout)
else:
raise error.TestFail("The relevant directory still exists"
"or mismatch with result")
def find_devices_by_cap(cap_type="scsi_host"):
"""
Find device by capability
:params cap_type: capability type
"""
result = virsh.nodedev_list(cap=cap_type)
if result.exit_status:
raise error.TestFail(result.stderr)
scsi_hosts = result.stdout.strip().splitlines()
return scsi_hosts
def check_vport_ops_cap(scsi_hosts):
"""
Check vport operation capability
:params scsi_hosts: list of the scsi_host
"""
vport_ops_list = []
for scsi_host in scsi_hosts:
result = virsh.nodedev_dumpxml(scsi_host)
if result.exit_status:
raise error.TestFail(result.stderr)
if re.search('vport_ops', result.stdout.strip()):
vport_ops_list.append(scsi_host)
logging.debug("The vport_ops list: %s", vport_ops_list)
return vport_ops_list
def check_port_connectivity(vport_ops_list):
"""
Check port connectivity
:params vport_ops_list: list of the vport operation
"""
port_state_dict = {}
port_linkup = []
port_linkdown = []
fc_path = "/sys/class/fc_host"
for scsi_host in vport_ops_list:
port_state = scsi_host.split('_')[1] + "/port_state"
port_state_file = os.path.join(fc_path, port_state)
logging.debug("The port_state file: %s", port_state_file)
state = open(port_state_file).read().strip()
logging.debug("The port state: %s", state)
if state == "Online" or state == "Linkup":
port_linkup.append(scsi_host)
if state == "Offline" or state == "Linkdown":
port_linkdown.append(scsi_host)
port_state_dict["online"] = port_linkup
port_state_dict["offline"] = port_linkdown
return port_state_dict
def run(test, params, env):
"""
Test create/destroy node device
1) Positive testing
1.1) create node device from XML file
1.2) destroy node device
2) Negative testing
2.1) create node device with noexist name of the parent HBA
2.2) create node device with offline port
2.3) create node device with invalid option
2.4) destroy noexist node device
2.5) destroy node device with invalid option
2.6) destroy node device without capable of vport operations
"""
# Run test case
options = params.get("nodedev_options")
dev_name = params.get("nodedev_dev_name")
status_error = params.get("status_error", "no")
no_vport_ops = params.get("nodedev_no_vport_ops", "no")
port_state = params.get("nodedev_port_state", "offline")
create_device = params.get("nodedev_create_device", "no")
if not libvirt_version.version_compare(1, 1, 1):
if params.get('setup_libvirt_polkit') == 'yes':
raise error.TestNAError("API acl test no |
ResolveWang/algrithm_qa | other/q14.py | Python | mit | 2,801 | 0.00045 | """
问题描述:给定一个正数数组arr,其中所有的值都为整数,以下是最小不可组成和的概念:
1)把arr每个子集内的所有元素加起来会出现很多值,其中最小的记为min,最大的记为max.
2)在区间[min,max]上,如果有数不可以被arr某一个子集相加得到,那么其中最小的那个数是
arr的最小不可组成和.
3)在区间[min,max]上,如果所有的数 | 都可以被arr的某一个子集相加得到,那么max+1是arr
的最小不可组成和.
请写函数返回正数数组arr的最小不可组成和.
举例:
arr=[3, 2, 5],子集{2}相加产生2为min,子集{3, 2, 5}相加产生10为max.在区间[2, 10]
上,4、6和9不能被任何子集相加得到,其中4是arr的最小不可组成和.
arr=[1,2,4]。子集{1}相加产生1为min,子集{1,2,4}相加产生7为max。在区间[1,7]上,任何
数都可以被子集相加得到,所以8是arr的最小不可组成和.
进阶题目:
如果已知正数数组arr中肯定有1这个数,是否能更快地得到最小不可组成和?
"""
import sys
class UnformedSum:
@classmethod
def unformed_sum(cls, arr):
if not arr:
return 1
my_set = set()
cls.process(arr, 0, 0, my_ | set)
min_value = sys.maxsize
for i in arr:
min_value = min([min_value, i])
i = min_value
while True:
if i not in my_set:
return i
i += 1
@classmethod
def process(cls, arr, index, cur_sum, cur_set):
if index == len(arr):
cur_set.add(cur_sum)
return
cls.process(arr, index+1, cur_sum+arr[index], cur_set)
cls.process(arr, index+1, cur_sum, cur_set)
@classmethod
def unformed_sum_dp(cls, arr):
if not arr:
return 1
max_sum = sum(arr)
min_value = min(arr)
dp = [False for _ in range(max_sum+1)]
dp[0] = True
for i in arr:
dp[i] = True
for i in range(len(arr)):
for j in range(arr[i]+1, max_sum-arr[i]+1):
if dp[j] is True:
dp[j+arr[i]] = True
for i in range(min_value, len(dp)):
if not dp[i]:
return i
return max_sum+1
@classmethod
def unformed_sum_contined_1(cls, arr):
if not arr:
return 1
sorted_arr = sorted(arr)
sum_val = 0
for i in range(len(sorted_arr)):
if sum_val + 1 < sorted_arr[i]:
return sum_val + 1
sum_val += arr[i]
return sum_val + 1
if __name__ == '__main__':
print(UnformedSum.unformed_sum_dp([3, 2, 5]))
print(UnformedSum.unformed_sum_dp([1, 2, 4]))
print(UnformedSum.unformed_sum_contined_1([1, 2, 4])) |
eduNEXT/edunext-platform | import_shims/lms/program_enrollments/api/grades.py | Python | agpl-3.0 | 413 | 0.009685 | """Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-t | oo-long
from import_shims.warn import warn_deprecated_import
warn_deprecated_impor | t('program_enrollments.api.grades', 'lms.djangoapps.program_enrollments.api.grades')
from lms.djangoapps.program_enrollments.api.grades import *
|
chris-purcell/zerobot | modules/ping.py | Python | mit | 92 | 0 | #!/usr/bin/env | python
# Import required modules
# Function
def ping():
return "Pong!"
| |
carrete/docker-flask-starterkit-mirror | flask-app/starterkit/settings/common.py | Python | unlicense | 810 | 0 | # -*- coding: utf-8 -*-
import os
SECRET_KEY = os.environ["SECRET_KEY"]
LANGUAGES = {"en": "English", "es": "Español"}
BABEL_TRANSLATION_DIRECTORIES = "translations"
HASHEDASSETS_CATALOG = "/srv/www/hashedassets.yml"
HASHEDASSETS_SRC_DIR = "static/build"
HASHEDASSETS_OUT_DIR = "/srv/www/site/static"
HASHEDASSETS_URL_PREFIX = "/static/"
SQLALCHEMY_DATABASE_URI = "postgresql: | //{}:{}@{}:{}/{}".format(
os.environ["STARTERKIT_DATABASE_USERNAME"],
os.environ["STARTERKIT_DATABASE_PASSWORD"],
os.environ["STARTERKIT_DATABASE_HOSTNAME"],
os.environ["STARTERKIT_DATABASE_TCP_POR | T"],
os.environ["STARTERKIT_ENVIRONMENT"],
)
SQLALCHEMY_TRACK_MODIFICATIONS = False
SENTRY_DSN = os.environ.get("SENTRY_DSN", "")
SENTRY_USER_ATTRS = ["email"]
STARTERKIT_HOMEPAGE_BLUEPRINT_URL_PREFIX = "/"
|
Beit-Hatfutsot/dbs-back | fabfile.py | Python | agpl-3.0 | 5,429 | 0.004052 | from __future__ import with_statement
import os
from datetime import datetime
import logging
from fabric.api import *
from fabric.contrib import files
env.user = 'bhs'
env.use_ssh_config = True
env.now = datetime.now().strftime('%Y%m%d-%H%M')
def dev():
env.hosts = ['bhs-dev']
def push_code(rev='HEAD', virtualenv=True, requirements=True, cur_date=None):
if cur_date is None:
cur_date = run("date +%d.%m.%y-%H:%M:%S")
local('git archive -o /tmp/api.tar.gz '+rev)
put('/tmp/api.tar.gz', '/tmp')
run('mv api /tmp/latest-api-{}'.format(cur_date))
run('mkdir api')
with cd("api"):
run('tar xzf /tmp/api.tar.gz')
run('rm -rf env')
run('cp -r /tmp/latest-api-{}/env env'.format(cur_d | ate))
if virtualenv:
if not files.exists('env'):
run('virtualenv env')
if requirements:
with prefix('. env/bin/activate'):
run('pip install -r requirements.txt')
run('rm -rf /tmp/api-*')
run('mv /tmp/latest-api-{} /tmp/api-{}'.format(cur_date, cur_date))
def push_conf():
with cd("api"):
sudo("cp conf/api-uwsgi.ini /etc/bhs/")
sudo("rsync -rv co | nf/supervisor/ /etc/supervisor/")
sudo('cp conf/bhs_api_site /etc/nginx/sites-available/bhs_api')
def deploy(bh_env=None):
if bh_env:
if bh_env == "infra":
deploy_infra()
else:
with settings(host_string=_get_bh_env_host_string(bh_env)):
push_code()
restart()
else:
push_code()
restart()
@hosts('bhs-infra')
def deploy_infra(reset_requirements=False):
cur_date = run("date +%d.%m.%y-%H:%M:%S")
if files.exists("api/env") and not reset_requirements:
api_env_backup_path="/tmp/env-api-{}".format(cur_date)
run("cp -r api/env/ {}/".format(api_env_backup_path))
else:
api_env_backup_path=None
push_code(virtualenv=False, requirements=False, cur_date=cur_date)
with cd("api"):
if api_env_backup_path:
run("mv {}/ env/".format(api_env_backup_path))
else:
run('virtualenv env')
with prefix(". env/bin/activate"):
run("pip install -r requirements.all.txt")
def test():
with cd("api"):
with prefix('. env/bin/activate'):
run('py.test tests bhs_api/*.py')
def restart():
with cd("api"):
'''
run("cp conf/supervisord.conf ~")
run("kill -HUP `cat /run/bhs/supervisord.pid`")
run("supervisorctl restart all")
'''
# change the ini file to use the corrent uid for bhs
sudo("supervisorctl restart uwsgi")
sudo("supervisorctl restart migration")
@hosts('bhs-infra')
def pull_mongo(dbname):
if not os.path.isdir('snapshots/latest'):
local('mkdir -p snapshots/latest')
run('mongodump -d {}'.format(dbname))
with cd('dump'):
run('tar czf {0}.tgz {0}'.format(dbname))
get('{}.tgz'.format(dbname),
'snapshots/')
run('rm {}.tgz'.format(dbname))
with lcd('snapshots/latest'):
local('tar xzvf ../{}.tgz'.format(dbname)
)
# delete the old db
local('mongorestore --drop -d {0} {0}'.format(dbname))
@hosts('bhs-infra')
def update_related(db):
with cd('api'), prefix('. env/bin/activate'):
run('python batch_related.py --db {}'.format(db))
def _get_bh_env_host_string(bh_env):
return {"dev": "bhs-dev",
"infra": "bhs-infra"}[bh_env]
def set_deployed_version(bh_env, version):
with settings(host_string=_get_bh_env_host_string(bh_env)):
with cd("api"):
version = version.strip().strip("v")
run('echo "{}" > VERSION.txt'.format(version))
def get_deployed_version(bh_env):
with settings(host_string=_get_bh_env_host_string(bh_env)):
with cd("api"):
run("cat VERSION.txt")
def _run_script(bh_env, script, args_str):
with cd("api"):
run("PYTHONPATH=. BH_ENV={} env/bin/python scripts/{}.py {}".format(bh_env, script, args_str))
def run_script(bh_env, script, args_str):
# very important to white-list the parameters to prevent mis-use or nasty accidents
errors = []
if bh_env not in ["local", "dev", "prd"]:
errors.append("Invalid env: '{}'".format(bh_env))
if script not in ["ensure_required_metadata", "migrate"]:
errors.append("Invalid script: '{}'".format(script))
if len(errors) == 0:
if bh_env == "local":
# allows to test it locally:
# fab "run_script:local,ensure_required_metadata,--collection places"
local("PYTHONPATH=. python scripts/{}.py {}".format(script, args_str))
elif bh_env in ["dev", "prd"]:
if script == "migrate":
get_deployed_version("infra")
with settings(host_string="bhs-infra"):
_run_script(bh_env, script, args_str)
else:
get_deployed_version(bh_env)
with settings(host_string=_get_bh_env_host_string(bh_env)):
with cd("api"):
run("PYTHONPATH=. env/bin/python scripts/{}.py {}".format(script, args_str))
else:
abort("\n".join(errors))
def fetch_file(bh_env, remote_file, local_file):
with settings(host_string=_get_bh_env_host_string(bh_env)):
get(remote_file, local_file)
|
ray-project/ray | python/ray/tests/test_ray_shutdown.py | Python | apache-2.0 | 4,012 | 0.000499 | import sys
import time
import platform
import pytest
import ray
import psutil # We must import psutil after ray because we bundle it with ray.
from ray._private.test_utils import wait_for_condition, run_string_as_driver_nonblocking
def get_all_ray_worker_processes():
processes = [
p.info["cmdline"] for p in psutil.process_iter(attrs=["pid", "name", "cmdline"])
]
result = []
for p in processes:
if p is not None and len(p) > 0 and "ray::" in p[0]:
result.append(p)
return result
@pytest.mark.skipif(platform.system() == "Windows", reason="Hang on Windows.")
def test_ray_shutdown(shutdown_only):
"""Make sure all ray workers are shutdown when driver is done."""
ray.init()
@ray.remote
def f():
import time
time.sleep(10)
num_cpus = int(ray.available_resources()["CPU"])
tasks = [f.remote() for _ in range(num_cpus)] # noqa
wait_for_condition(lambda: len(get_all_ray_worker_processes()) > 0)
ray.shutdown()
wait_for_condition(lambda: len(get_all_ray_worker_processes()) == 0)
@pytest.mark.skipif(platform.system() == "Windows", reason="Hang on Windows.")
def test_driver_dead(shutdown_only):
"""Make sure all ray workers are shutdown when driver is killed."""
driver = """
import ray
ray.init(_system_config={"gcs_rpc_server_reconnect_timeout_s": 1})
@ray.remote
def f():
import time
time.sleep(10)
num_cpus = int(ray.available_resources()["CPU"])
tasks = [f.remote() for _ in range(num_cpus)]
"""
p = run_string_as_driver_nonblocking(driver)
# Make sure the driver is running.
time.sleep(1)
assert p.poll() is None
wait_for_condition(lambda: len(get_all_ray_worker_processes()) > 0)
# Kill the driver process.
p.kill()
p.wait()
time.sleep(0.1)
wait_for_condition(lambda: len(get_all_ray_worker_processes()) == 0)
@pytest.mark.skipif(platform.system() == "Windows", reason="Hang on Windows.")
def test_node_killed(ray_start_cluster):
"""Make sure all ray workers when nodes are dead."""
cluster = ray_start_cluster
# head node.
cluster.add_node(
num_cpus=0, _system_config={"gcs_rpc_server_reconnect_timeout_s": 1}
)
ray.init(address="auto")
num_worker_nodes = 2
workers = []
for _ in range(num_worker_nodes):
workers.append(cluster.add_node(num_cpus=2))
cluster.wait_for_nodes()
@ray.remote
def f():
import time
time.sleep(100)
num_cpus = int(ray.available_resources()["CPU"])
tasks = [f.remote() for _ in range(num_cpus)] # noqa
wait_for_condition(lambda: len(get_all_ray_worker_processes()) > 0)
for worker in workers:
cluster.remove_node(worker)
wait_for_condition(lambda: len(get_all_ray_worker_processes()) == 0)
@pytest.mark.skipif(platform.system() == "Windows", reason="Hang on Windows.")
def test_head_node_down(ray_start_cluster):
"""Make sure all ray workers when head node is dead."""
cluster = ray_start_cluster
# head node.
head = cluster.add_node(
num_cpus=2, _system_config={"gcs_rpc_server_reconnect_timeout_s": 1}
)
# worker nodes.
num_worker_nodes = 2
for _ in range(num_worker_nodes):
cluster.add_node(num_cpus=2)
cluster.wait_for_nodes()
# Start a driver.
driver = """
import ray
ray.init(address="{}")
@ray.remote
def f():
import time
time.sleep(10)
num_cpus = int(ray.available_resources()["CPU"])
tasks = [f.remote() for _ in range(num_cpus)]
import time
time.sleep(100)
""".format(
cluster.address
)
p = run_string_as_driver_nonblocking(driver)
# Make sure the driver is running.
time.sleep(1)
wait_for_condition(lambda: | p.poll() is None)
wait_for_condition(lambda: len(get_all_ray_worker_processes()) > 0)
cluster.re | move_node(head)
wait_for_condition(lambda: len(get_all_ray_worker_processes()) == 0)
if __name__ == "__main__":
sys.exit(pytest.main(["-v", __file__]))
|
sansna/PythonWidgets.py | lib3/web/server.py | Python | lgpl-3.0 | 3,623 | 0.008633 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# Author: sansna
# Date : 2020 Aug 01 17:42:43
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
import re
# App Config
# XXX: https://stackoverflow.com/questions/3536620/how-to-change-a-module-variable-from-another-module
if __name__ == "__main__":
import config.base
if not config.base.Configured:
config.base.Configured = True
config.base.App = "lib/web/server"
config.base.Env = config.base.ENV_PRODUCTION
#config.base.Env = config.base.ENV_TEST
import time
from lib3.decorator.safe_run import safe_run_wrap
import logging
from lib3.lg import logger
from flask import Flask
from flask import request
import socket
app = Flask(__name__)
# Enable UTF-8 support in resp of flask
app.config['JSON_AS_ASCII'] = False
now = int(time.time())
today = int(now+8*3600)/86400*86400-8*3600
dayts = 86400
hourts = 3600
mints = 60
yesterday = today - dayts
def YMD(ts):
return time.strftime("%Y%m%d", time.localtime(ts))
def YM(ts):
return time.strftime("%Y%m", time.localtime(ts))
def DAY(ts):
return time.strftime("%d", time.localtime(ts))
pathre = re.compile('[^a-zA-Z0-9]')
def AddPath(path, f, methods=["POST"]):
if len(path) == 0:
return
if path[0] != "/":
path = "/"+path
fun = path+str(methods)
fun = pathre.sub('_', fun)
"""
see https://stackoverflow.com/questions/17256602/assertionerror-view-function-mapping-is-overwriting-an-existing-endpoint-functi
for usage of @app.route
"""
@app.route(path, methods=methods, endpoint=fun)
def run(*args, **kwargs):
"""
TODO: 针对同参数频繁请求,增加缓存
"""
if request.method == 'POST':
ret = f(request.get_json(force=True), args, kwargs)
logger.info("path: %s, hostname: %s, host: %s, raddr: %s, methods: %s, params: %s"%(path, socket.gethostname(), request.host, request.remote_addr, methods, request.get_json(force=True)))
elif request.method == 'GET':
ret = f(None, args, kwargs)
logger.info("path: %s, hostname: %s, host: %s, raddr: %s, methods: %s"%(path, socket.gethostname(), request.host, request.remote_addr, methods))
return ret
@safe_run_wrap
def add_path(*args, **kwargs):
"""
decorator of adding path for a function
Usage:
@add_path("path", methods=["POST","GET"])
def func(json):
print "ok"
"""
path = args[0]
if "methods" in kwargs:
methods = kwargs["methods"]
else:
methods = ["POST"]
def inner(func):
AddPath(path, func, methods=methods)
return func
return inner
@safe_run_wrap
def Run(port=8888):
if type(port) != int:
return
log = logging.getLogger("werkzeug")
log.disabled = True |
ap | p.run(host="0.0.0.0", port=port)
def main():
@add_path("hello")
def func1(json):
mid = 0
if "mid" in json:
mid=json["mid"]
if mid == 1:
json["zz"] = 10
if mid == -1:
raise KeyError
return json
#AddPath("hello", func1)
"""
curl -G 127.0.0.1:8080/xxx/123/xcv
"""
@add_path("xxx/<idx>/<mmm>", methods=["GET"])
def func2(json, *args, **kwargs):
print json, args, kwargs
variable_dict = args[1]
# prints "123", unicode
print variable_dict["idx"]
# prints "xcv", unicode
print variable_dict["mmm"]
return {"zz":"你户"}
Run(8080)
if __name__ == "__main__":
main()
|
foursquare/pants | contrib/python/src/python/pants/contrib/python/checks/tasks/checkstyle/indentation_subsystem.py | Python | apache-2.0 | 559 | 0.008945 | # coding=utf-8
# Copyright 2015 Pants proj | ect contributors (see CONTRIBUTORS.md).
# Licensed under the | Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants.contrib.python.checks.tasks.checkstyle.plugin_subsystem_base import PluginSubsystemBase
class IndentationSubsystem(PluginSubsystemBase):
options_scope = 'pycheck-indentation'
def get_plugin_type(self):
from pants.contrib.python.checks.tasks.checkstyle.indentation import Indentation
return Indentation
|
wavemind/mlgcb | tests/functional/tags_include.py | Python | apache-2.0 | 4,751 | 0 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Verify operation of <gcb-include> custom tag."""
__author__ = 'Mike Gainer (mgainer@google.com)'
import os
import StringIO
import appengine_config
from models import courses
from tests.functional import actions
COURSE_NAME = 'test_course'
COURSE_TITLE = 'Test Course'
ADMIN_EMAIL = 'test@example.com'
PRE_INCLUDE = 'XXX'
POST_INCLUDE = 'YYY'
HTML_DIR = os.path.join(appengine_config.BUNDLE_ROOT, 'assets/html')
HTML_FILE = 'test.html'
HTML_PATH = os.path.join(HTML_DIR, HTML_FILE)
GCB_INCLUDE = (PRE_INCLUDE +
'<gcb-include path="/assets/html/%s" ' +
'instanceid="uODxjWHTxxIC"></gcb-include>' +
POST_INCLUDE)
LESSON_URL = '/test_course/unit?unit=1&lesson=2'
class TagsInclude(actions.TestBase):
def setUp(self):
super(TagsInclude, self).setUp()
self.context = actions.simple_add_course(COURSE_NAME, ADMIN_EMAIL,
COURSE_TITLE)
self.course = courses.Course(None, self.context)
self.unit = self.course.add_unit()
self.unit.title = 'The Unit'
self.unit.now_available = True
self.lesson = self.course.add_lesson(self.unit)
self.lesson.title = 'The Lesson'
self.lesson.now_available = True
self.lesson.objectives = GCB_INCLUDE % HTML_FILE
self.course.save()
def tearDown(self):
self.context.fs.delete(HTML_PATH)
def _set_content(self, content):
self.context.fs.put(HTML_PATH, StringIO.StringIO(content))
def _expect_content(self, expected, response):
expected = '%s<div>%s</div>%s' % (PRE_INCLUDE, expected, POST_INCLUDE)
self.assertIn(expected, response.body)
def test_missing_file_gives_error(self):
self.lesson.objectives = GCB_INCLUDE % 'no_such_file.html'
self.course.save()
response = self.get(LESSON_URL)
self.assertIn('Invalid HTML tag: no_such_file.html', response.body)
def test_file_from_actual_filesystem(self):
# Note: This has the potential to cause a test flake: Adding an
# actual file to the filesystem and the | n removing it may cause
# ETL tests to complain - they saw the file, then failed to copy
# it because it went away.
simple_content = 'Fiery the angels fell'
if not os.path.isdir(HTML_DIR):
os.mkdir(HTML_DIR)
with open(HTML_PATH, 'w') as fp:
fp.write(simple_content)
response = self.get(LES | SON_URL)
os.unlink(HTML_PATH)
self._expect_content(simple_content, response)
def test_simple(self):
simple_content = 'Deep thunder rolled around their shores'
self._set_content(simple_content)
response = self.get(LESSON_URL)
self._expect_content(simple_content, response)
def test_content_containing_tags(self):
content = '<h1>This is a test</h1><p>This is only a test.</p>'
self._set_content(content)
response = self.get(LESSON_URL)
self._expect_content(content, response)
def test_jinja_base_path(self):
content = '{{ base_path }}'
self._set_content(content)
response = self.get(LESSON_URL)
self._expect_content('assets/html', response)
def test_jinja_course_base(self):
content = '{{ gcb_course_base }}'
self._set_content(content)
response = self.get(LESSON_URL)
self._expect_content('http://localhost/test_course/', response)
def test_jinja_course_title(self):
content = '{{ course_info.course.title }}'
self._set_content(content)
response = self.get(LESSON_URL)
self._expect_content('Test Course', response)
def test_inclusion(self):
content = 'Hello, World!'
sub_path = os.path.join(
appengine_config.BUNDLE_ROOT, HTML_DIR, 'sub.html')
self.context.fs.put(sub_path, StringIO.StringIO(content))
self._set_content('{% include "sub.html" %}')
try:
response = self.get(LESSON_URL)
self._expect_content(content, response)
finally:
self.context.fs.delete(sub_path)
|
josepht/snapcraft | snapcraft/tests/commands/test_clean.py | Python | gpl-3.0 | 10,914 | 0 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2015-2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import shutil
import fixtures
from unittest import mock
from testtools.matchers import FileExists, MatchesRegex, Not
from snapcraft.main import main
from snapcraft.internal import (
pluginhandler,
project_loader,
states,
)
from snapcraft import tests
class CleanCommandTestCase(tests.TestCase):
yaml_template = """name: clean-test
version: 1.0
summary: test clean
description: if the clean is succesful the state file will be updated
icon: icon.png
confinement: strict
grade: stable
parts:
{parts}"""
yaml_part = """ clean{:d}:
plugin: nil"""
def make_snapcraft_yaml(self, n=1, create=True):
parts = '\n'.join([self.yaml_part.format(i) for i in range(n)])
super().make_snapcraft_yaml(self.yaml_template.format(parts=parts))
open('icon.png', 'w').close()
parts = []
validator = project_loader.Validator()
for i in range(n):
part_name = 'clean{}'.format(i)
handler = pluginhandler.load_plugin(
part_name, plugin_name='nil',
part_properties={'plugin': 'nil'},
part_schema=validator.part_schema,
definitions_schema=validator.definitions_schema)
parts.append({
'part_dir': handler.code.partdir,
})
if create:
handler.makedirs()
open(os.path.join(
handler.code.installdir, part_name), 'w').close()
handler.mark_done('pull')
handler.mark_done('build')
handler.stage()
handler.prime()
return parts
def test_clean_all(self):
self.make_snapcraft_yaml(n=3)
main(['clean'])
self.assertFalse(os.path.exists(self.parts_dir))
self.assertFalse(os.path.exists(self.stage_dir))
self.assertFalse(os.path.exists(self.prime_dir))
def test_local_plugin_not_removed(self):
self.make_snapcraft_yaml(n=3)
local_plugin = os.path.join(self.local_plugins_dir, 'foo.py')
os.makedirs(os.path.dirname(local_plugin))
open(local_plugin, 'w').close()
main(['clean'])
self.assertThat(self.stage_dir, Not(FileExists()))
self.assertThat(self.prime_dir, Not(FileExists()))
self.assertThat(self.parts_dir, Not(FileExists()))
self.assertThat(local_plugin, FileExists())
def test_clean_all_when_all_parts_specified(self):
self.make_snapcraft_yaml(n=3)
main(['clean', 'clean0', 'clean1', 'clean2'])
self.assertFalse(os.path.exists(self.parts_dir))
self.assertFalse(os.path.exists(self.stage_dir))
self.assertFalse(os.path.exists(self.prime_dir))
| def test_partial_clean(self):
parts = self.make_snapcraft_yaml(n=3)
main(['clean', 'clean0', 'clean2'])
for i in [0, 2]:
self.assertFalse(
os.path.exists(parts[i]['part_dir']),
'Expected for {!r} to be wiped'. | format(parts[i]['part_dir']))
self.assertTrue(os.path.exists(parts[1]['part_dir']),
'Expected a part directory for the clean1 part')
self.assertTrue(os.path.exists(self.parts_dir))
self.assertTrue(os.path.exists(self.stage_dir))
self.assertTrue(os.path.exists(self.prime_dir))
# Now clean it the rest of the way
main(['clean', 'clean1'])
for i in range(0, 3):
self.assertFalse(
os.path.exists(parts[i]['part_dir']),
'Expected for {!r} to be wiped'.format(parts[i]['part_dir']))
self.assertFalse(os.path.exists(self.parts_dir))
self.assertFalse(os.path.exists(self.stage_dir))
self.assertFalse(os.path.exists(self.prime_dir))
def test_everything_is_clean(self):
"""Don't crash if everything is already clean."""
self.make_snapcraft_yaml(n=3, create=False)
main(['clean'])
def test_part_to_remove_not_defined_exits_with_error(self):
fake_logger = fixtures.FakeLogger(level=logging.ERROR)
self.useFixture(fake_logger)
self.make_snapcraft_yaml(n=3)
raised = self.assertRaises(
SystemExit,
main, ['clean', 'no-clean'])
self.assertEqual(1, raised.code)
self.assertEqual(
fake_logger.output,
"The part named 'no-clean' is not defined in "
"'snap/snapcraft.yaml'\n")
@mock.patch.object(pluginhandler.PluginHandler, 'clean')
def test_per_step_cleaning(self, mock_clean):
self.make_snapcraft_yaml(n=3)
main(['clean', '--step=foo'])
expected_staged_state = {
'clean0': states.StageState({'clean0'}, set()),
'clean1': states.StageState({'clean1'}, set()),
'clean2': states.StageState({'clean2'}, set()),
}
expected_primed_state = {
'clean0': states.PrimeState({'clean0'}, set()),
'clean1': states.PrimeState({'clean1'}, set()),
'clean2': states.PrimeState({'clean2'}, set()),
}
mock_clean.assert_called_with(
expected_staged_state, expected_primed_state, 'foo')
def test_cleaning_with_strip_does_prime_and_warns(self):
fake_logger = fixtures.FakeLogger(level=logging.WARNING)
self.useFixture(fake_logger)
self.make_snapcraft_yaml(n=3)
main(['clean', '--step=strip'])
self.assertThat(
fake_logger.output, MatchesRegex(
'DEPRECATED: Use `prime` instead of `strip` as the step to '
'clean'))
self.assertFalse(os.path.exists(self.prime_dir))
class CleanCommandReverseDependenciesTestCase(tests.TestCase):
def setUp(self):
super().setUp()
self.make_snapcraft_yaml("""name: clean-test
version: 1.0
summary: test clean
description: test clean
confinement: strict
grade: stable
parts:
main:
plugin: nil
dependent:
plugin: nil
after: [main]
nested-dependent:
plugin: nil
after: [dependent]""")
self.part_dirs = {}
for part in ['main', 'dependent', 'nested-dependent']:
self.part_dirs[part] = os.path.join(self.parts_dir, part)
os.makedirs(os.path.join(self.part_dirs[part], 'state'))
open(os.path.join(self.part_dirs[part], 'state', 'pull'),
'w').close()
os.makedirs(self.stage_dir)
os.makedirs(self.prime_dir)
def assert_clean(self, parts):
for part in parts:
self.assertFalse(
os.path.exists(self.part_dirs[part]),
'Expected part directory for {!r} to be cleaned'.format(part))
def test_clean_dependent_parts(self):
main(['clean', 'dependent', 'nested-dependent'])
self.assert_clean(['dependent', 'nested-dependent'])
self.assertTrue(
os.path.exists(self.part_dirs['main']),
'Expected part directory for main to be untouched by the clean')
def test_clean_part_with_clean_dependent(self):
main(['clean', 'nested-dependent'])
self.assert_clean(['nested-dependent'])
# Not specifying nested-dependent here should be okay since it's
# already clean.
main(['clean', 'dependent'])
self.assert_clean(['dependent', 'nested-dependent'])
def test_clean_part_unspecified_uncleaned_dependent_raises(self):
# Not specifying neste |
macs03/demo-cms | cms/lib/python2.7/site-packages/cms/extensions/extension_pool.py | Python | mit | 4,797 | 0.001876 | from cms.exceptions import SubClassNeededError
from .models import PageExtension, TitleExtension
class ExtensionPool(object):
def __init__(self):
self.page_extensions = set()
self.title_extensions = set()
self.signaling_activated = False
def register(self, extension):
"""
Registers the given extension.
"""
if issubclass(extension, PageExtension):
self.page_extensions.add(extension)
elif issubclass(extension, TitleExtension):
self.title_extensions.add(extension)
else:
raise SubClassNeededError(
'Extension has to subclass either %r or %r. %r does not!' % (PageExtension, TitleExtension, extension)
)
self._activate_signaling()
def unregister(self, extension):
"""
Unregisters the given extension. No error is thrown if given extension isn't an extension or wasn't
registered yet.
"""
try:
if issubclass(extension, PageExtension):
self.page_extensions.remove(extension)
elif issubclass(extension, TitleExtension):
self.title_extensions.remove(extension)
except KeyError:
pass
def _activate_signaling(self):
"""
Activates the post_publish signal receiver if not already done.
"""
if not self.signaling_activated:
from cms.signals import post_publish
post_publish.connect(self._receiver)
self.signaling_activated = True
def _receiver(self, sender, **kwargs):
"""
Receiver for the post_publish signal. Gets the published pag | e from kwargs.
"""
# instance from kwargs is the draft page
draft_page = kwargs.get('instance')
language = kwargs. | get('language')
# get the new public page from the draft page
public_page = draft_page.publisher_public
if self.page_extensions:
self._copy_page_extensions(draft_page, public_page, language)
self._remove_orphaned_page_extensions()
if self.title_extensions:
self._copy_title_extensions(draft_page, None, language)
self._remove_orphaned_title_extensions()
def _copy_page_extensions(self, source_page, target_page, language):
for extension in self.page_extensions:
for instance in extension.objects.filter(extended_object=source_page):
instance.copy_to_public(target_page, language)
def _copy_title_extensions(self, source_page, target_page, language):
source_title = source_page.title_set.get(language=language)
if target_page:
target_title = target_page.title_set.get(language=language)
else:
target_title = source_title.publisher_public
for extension in self.title_extensions:
for instance in extension.objects.filter(extended_object=source_title):
instance.copy_to_public(target_title, language)
def copy_extensions(self, source_page, target_page, languages=None):
if not languages:
languages = source_page.get_languages()
if self.page_extensions:
self._copy_page_extensions(source_page, target_page, None)
self._remove_orphaned_page_extensions()
for language in languages:
if self.title_extensions:
self._copy_title_extensions(source_page, target_page, language)
self._remove_orphaned_title_extensions()
def _remove_orphaned_page_extensions(self):
for extension in self.page_extensions:
extension.objects.filter(
extended_object__publisher_is_draft=False,
draft_extension=None
).delete()
def _remove_orphaned_title_extensions(self):
for extension in self.title_extensions:
extension.objects.filter(
extended_object__page__publisher_is_draft=False,
draft_extension=None
).delete()
def get_page_extensions(self, page=None):
extensions = []
for extension in self.page_extensions:
if page:
extensions.extend(list(extension.objects.filter(extended_object=page)))
else:
extensions.extend(list(extension.objects.all()))
return extensions
def get_title_extensions(self, title=None):
extensions = []
for extension in self.title_extensions:
if title:
extensions.extend(list(extension.objects.filter(extended_object=title)))
else:
extensions.extend(list(extension.objects.all()))
return extensions
extension_pool = ExtensionPool()
|
onshape-public/onshape-clients | python/onshape_client/oas/models/btp_function_declaration246.py | Python | mit | 13,254 | 0.000679 | # coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import bt_configured_feature_column_info1014_all_of
except ImportError:
bt_configured_feature_column_info1014_all_of = sys.modules[
"onshape_client.oas.models.bt_configured_feature_column_info1014_all_of"
]
try:
from onshape_client.oas.models import btp_annotation231
except ImportError:
btp_annotation231 = sys.modules["onshape_client.oas.models.btp_annotation231"]
try:
from onshape_client.oas.models import btp_argument_declaration232
except ImportError:
btp_argument_declaration232 = sys.modules[
"onshape_client.oas.models.btp_argument_declaration232"
]
try:
from onshape_client.oas.models import btp_function_or_predicate_declaration247
except ImportError:
btp_function_or_predicate_declaration247 = sys.modules[
"onshape_client.oas.models.btp_function_or_predicate_declaration247"
]
try:
from onshape_client.oas.models import btp_identifier8
except ImportError:
btp_identifier8 = sys.modules["onshape_client.oas.models.btp_identifier8"]
try:
from onshape_client.oas.models import btp_space10
except ImportError:
btp_space10 = sys.modules["onshape_client.oas.models.btp_space10"]
try:
from onshape_client.oas.models import btp_statement269
except ImportError:
btp_statement269 = sys.modules["onshape_client.oas.models.btp_statement269"]
try:
from onshape_client.oas.models import btp_statement_block271
except ImportError:
btp_statement_block271 = sys.modules[
"onshape_client.oas.models.btp_statement_block271"
]
try:
from onshape_client.oas.models import btp_type_name290
except ImportError:
btp_type_name290 = sys.modules["onshape_client.oas.models.btp_type_name290"]
class BTPFunctionDeclaration246(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
("documentation_type",): {
"FUNCTION": "FUNCTION",
"PREDICATE": "PREDICATE",
"CONSTANT": "CONSTANT",
"ENUM": "ENUM",
"USER_TYPE": "USER_TYPE",
"FEATURE_DEFINITION": "FEATURE_DEFINITION",
"FILE_HEADER": "FILE_HEADER",
"UNDOCUMENTABLE": "UNDOCUMENTABLE",
"UNKNOWN": "UNKNOWN",
},
}
validations = {}
additional_properties_type = None
@static | method
def openapi_types():
"""
This must be a class method so a model may have prop | erties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"atomic": (bool,), # noqa: E501
"documentation_type": (str,), # noqa: E501
"end_source_location": (int,), # noqa: E501
"node_id": (str,), # noqa: E501
"short_descriptor": (str,), # noqa: E501
"space_after": (btp_space10.BTPSpace10,), # noqa: E501
"space_before": (btp_space10.BTPSpace10,), # noqa: E501
"space_default": (bool,), # noqa: E501
"start_source_location": (int,), # noqa: E501
"annotation": (btp_annotation231.BTPAnnotation231,), # noqa: E501
"arguments_to_document": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"deprecated": (bool,), # noqa: E501
"deprecated_explanation": (str,), # noqa: E501
"for_export": (bool,), # noqa: E501
"space_after_export": (btp_space10.BTPSpace10,), # noqa: E501
"symbol_name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
"arguments": (
[btp_argument_declaration232.BTPArgumentDeclaration232],
), # noqa: E501
"body": (btp_statement_block271.BTPStatementBlock271,), # noqa: E501
"precondition": (btp_statement269.BTPStatement269,), # noqa: E501
"return_type": (btp_type_name290.BTPTypeName290,), # noqa: E501
"space_after_arglist": (btp_space10.BTPSpace10,), # noqa: E501
"space_in_empty_list": (btp_space10.BTPSpace10,), # noqa: E501
"name": (btp_identifier8.BTPIdentifier8,), # noqa: E501
}
@staticmethod
def discriminator():
return None
attribute_map = {
"bt_type": "btType", # noqa: E501
"atomic": "atomic", # noqa: E501
"documentation_type": "documentationType", # noqa: E501
"end_source_location": "endSourceLocation", # noqa: E501
"node_id": "nodeId", # noqa: E501
"short_descriptor": "shortDescriptor", # noqa: E501
"space_after": "spaceAfter", # noqa: E501
"space_before": "spaceBefore", # noqa: E501
"space_default": "spaceDefault", # noqa: E501
"start_source_location": "startSourceLocation", # noqa: E501
"annotation": "annotation", # noqa: E501
"arguments_to_document": "argumentsToDocument", # noqa: E501
"deprecated": "deprecated", # noqa: E501
"deprecated_explanation": "deprecatedExplanation", # noqa: E501
"for_export": "forExport", # noqa: E501
"space_after_export": "spaceAfterExport", # noqa: E501
"symbol_name": "symbolName", # noqa: E501
"arguments": "arguments", # noqa: E501
"body": "body", # noqa: E501
"precondition": "precondition", # noqa: E501
"return_type": "returnType", # noqa: E501
"space_after_arglist": "spaceAfterArglist", # noqa: E501
"space_in_empty_list": "spaceInEmptyList", # noqa: E501
"name": "name", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btp_function_declaration246.BTPFunctionDeclaration246 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_ty |
mewwts/bandpage | landingpage/urls.py | Python | mit | 146 | 0 | from django.conf.urls import patterns, url
from landingpage import views
u | rlpatterns = patterns('', url(r'^$', views.index, name='landingpag | e'))
|
DryTuna/pyramid_tutorial | authentication/tutorial/__init__.py | Python | mit | 838 | 0.001193 | from pyramid.authentication import AuthTktAuthenticationPolicy
from pyramid.authorization import ACLAuthorizationPolicy
from pyramid.config import Configurator
from | .security import groupfinder
def main(global_config, **settings):
config = Configurator(settings=settings)
config.include('pyramid_chameleon')
# Security policies
authn_policy = AuthTktAuthenticationPolicy(
settings['tutorial.secret'], callback=groupfinder,
hashalg='sha512')
authz_policy = | ACLAuthorizationPolicy()
config.set_authentication_policy(authn_policy)
config.set_authorization_policy(authz_policy)
config.add_route('home', '/')
config.add_route('hello', '/howdy')
config.add_route('login', '/login')
config.add_route('logout', '/logout')
config.scan('.views')
return config.make_wsgi_app() |
DaveBerkeley/iot | powermon.py | Python | gpl-2.0 | 985 | 0.011168 | #!/us | r/bin/python
import time
import json
from broker import Broker
#
#
class Handler:
def __init__(self):
self.history = []
self.size = 3
def on_power(self, msg):
data = json.loads(msg.payload)
power = data["power"]
self.process(power)
def process(self, power):
self.history = [ power, ] + self.history[:self.size-1]
if len(self.history) < self.size | :
return
diff = self.history[0] - self.history[-1]
av = sum(self.history) / len(self.history)
print int(av),
print [ int(x-av) for x in self.history ],
print [ int(self.history[0]), int(self.history[-1]) ],
print int(self.history[0]) - int(self.history[-1])
#
#
broker = Broker("myid", "mosquitto")
handler = Handler()
broker.subscribe("home/power", handler.on_power)
broker.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
broker.stop()
broker.join()
# FIN
|
yunify/qingcloud-cli | qingcloud/cli/iaas_client/actions/router/describe_router_vxnets.py | Python | apache-2.0 | 1,841 | 0.003259 | # =========================================================================
# Copyright 2012-present Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from qingcloud.cli.iaas_client.actions.base import BaseAction
class DescribeRouterVxnetsAction(BaseAction):
action = 'DescribeRouterVxnets'
command = 'describe-router-vxnets'
usage = '%(prog)s -r <router_id> [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-r', '--router', dest='router',
action='store', type=str, default='',
help='ID of router whose vxnets you want to list.')
| parser.add_argument('-v', '--vxnet', dest='vxnet',
action='store', type=str, default='',
help='filter by vxnet ID. ')
@classmethod
def build_directive(cls, options):
if not options.router:
print('error: [router] should be specified')
return None
return {
'route | r': options.router,
'vxnet': options.vxnet,
'offset':options.offset,
'limit': options.limit,
}
|
chromium/chromium | third_party/tflite_support/src/tensorflow_lite_support/python/task/processor/proto/embedding_options_pb2.py | Python | bsd-3-clause | 784 | 0.001276 | # Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WIT | HOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissi | ons and
# limitations under the License.
"""Embedding options protobuf."""
from tensorflow_lite_support.cc.task.processor.proto import embedding_options_pb2
EmbeddingOptions = embedding_options_pb2.EmbeddingOptions
|
NMTHydro/SWACodingMeeting | homework/for_meeting_3/vehicles_David.py | Python | apache-2.0 | 930 | 0.001075 | class Vehicle(object):
def __init__(self, make, model, year):
self.make = make
self.model = model
self.year = year
def foo(self, a, b, c):
print 'does some work'
print a, b, c
class Car(Vehicle):
def __init__(self, doors, *args):
| Vehicle.__init__(self, *args)
self.doors = doors
def foo(self, *args, **kw):
print args, kw
super(Car, self).foo(*args)
class Boat(Vehicle):
def __init__(self, power, *args):
Vehicle.__init__(se | lf, *args)
if power not in ('propeller', 'sail'):
print 'warning: drive type not acceptable'
raise TypeError
self.power = power
class Airplane(Vehicle):
def __init__(self):
pass
if __name__ == '__main__':
car = Car('honda', 'civic', '2002', '2')
car.foo(1, 2, 3)
# ============================ EOF =============================
|
GLPJJ/PL | py/g.py | Python | gpl-3.0 | 11 | 0.222222 | # | 类 pyth | on |
gangadharkadam/letzfrappe | frappe/email/bulk.py | Python | mit | 5,170 | 0.024371 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import HTMLParser
import urllib
from frappe import msgprint, throw, _
from frappe.email.smtp import SMTPServer, get_outgoing_email_account
from frappe.email.email_body import get_email, get_formatted_html
from html2text import html2text
from frappe.utils import cint, get_url, nowdate
class BulkLimitCrossedError(frappe.ValidationError): pass
def send(recipients=None, sender=None, doctype='User', email_field='email',
subject='[No Subject]', message='[No Content]', ref_doctype=None,
ref_docname=None, add_unsubscribe_link=True, attachments=None, reply_to=None):
def is_unsubscribed(rdata):
if not rdata:
return 1
return cint(rdata.unsubscribed)
def check_bulk_limit(new_mails):
this_month = frappe.db.sql("""select count(*) from `tabBulk Email` where
month(creation)=month(%s)""" % nowdate())[0][0]
# No limit for own email settings
smtp_server = SMTPServer()
if smtp_server.email_account and not getattr(smtp_server.email_account,
"from_site_config", False):
monthly_bulk_mail_limit = frappe.conf.get('monthly_bulk_mail_limit') or 500
if (this_month + len(recipients)) > monthly_bulk_mail_limit:
throw(_("Bulk email limit {0} crossed").format(monthly_bulk_mail_limit),
BulkLimitCrossedError)
def update_message(formatted, doc, add_unsubscribe_link):
updated = formatted
if add_unsubscribe_link:
unsubscribe_link = """<div style="padding: 7px; border-top: 1px solid #aaa;
margin-top: 17px;">
<small><a href="%s/?%s">
Unsubscribe</a> from this list.</small></div>""" % (get_url(),
urllib.urlencode({
"cmd": "frappe.email.bulk.unsubscribe",
"email": doc.get(email_field),
"type": doctype,
"email_field": email_field
}))
updated = updated.replace("<!--unsubscribe link here-->", unsubscribe_link)
return updated
if not recipients:
recipients = []
if not sender or sender == "Administrator":
email_account = get_outgoing_email_account()
sender = email_account.get("sender") or email_account.email_id
check_bulk_limit(len(recipients))
formatted = get_formatted_html(subject, message)
for r in filter(None, list(set(recipients))):
rdata = frappe.db.sql("""select * from `tab%s` where %s=%s""" % (doctype,
email_field, '%s'), (r,), as_dict=1)
doc = rdata and rdata[0] or {}
if (not add_unsubscribe_link) or (not is_unsubscribed(doc)):
# add to queue
updated = update_message(formatted, doc, add_unsubscribe_link)
try:
text_content = html2text(updated)
except HTMLParser.HTMLParseError:
text_content = "[See html attachment]"
add(r, sender, subject, updated, text_content, ref_doctype, ref_docname, attachments, reply_to)
def add(email, sender, subject, formatted, text_content=None,
ref_doctype=None, ref_docname=None, attachments=None, reply_to=None):
"""add to bulk mail queue"""
e = frappe.new_doc('Bulk Email')
e.sender = sender
e.recipient = email
try:
e.message = get_email(email, sender=e.sender, formatted=formatted, subject=subject,
text_content=text_content, attachments=attachments, reply_to=reply_to).as_string()
except frappe.InvalidEmailAddressError:
# bad email id - don't add to queue
return
e.ref_doctype = ref_doctype
e.ref_docname = ref_docname
e.insert(ignore_permissions=True)
@frappe.whitelist(allow_guest=True)
def unsubscribe():
doctype = frappe.form_dict.get('type')
field = frappe.form_dict.get('email_field')
email = frappe.form_dict.get('email')
frappe.db.sql("""update `tab%s` set unsubscribed=1
where `%s`=%s""" % (doctype, field, '%s'), (email,))
if not frappe.form_dict.get("from_test"):
frappe.db.commit()
frappe.local.message_title = "Unsubscribe"
frappe.local.message = "<h3>Unsubscribed</h3><p>%s has been successfully unsubscribed.</p>" % email
frappe.response['type'] = 'page'
frappe.response['page_name'] = 'message.html'
def flush(from_test=False):
"""flush email queue, every time: called from scheduler"""
smtpserver = SMTPServer()
auto_commit = not f | rom_test
if frappe.flags.mute_emails or frappe.conf.get("mute_emails") or Fa | lse:
msgprint(_("Emails are muted"))
from_test = True
for i in xrange(500):
email = frappe.db.sql("""select * from `tabBulk Email` where
status='Not Sent' limit 1 for update""", as_dict=1)
if email:
email = email[0]
else:
break
frappe.db.sql("""update `tabBulk Email` set status='Sending' where name=%s""",
(email["name"],), auto_commit=auto_commit)
try:
if not from_test:
smtpserver.sess.sendmail(email["sender"], email["recipient"], email["message"])
frappe.db.sql("""update `tabBulk Email` set status='Sent' where name=%s""",
(email["name"],), auto_commit=auto_commit)
except Exception, e:
frappe.db.sql("""update `tabBulk Email` set status='Error', error=%s
where name=%s""", (unicode(e), email["name"]), auto_commit=auto_commit)
def clear_outbox():
"""remove mails older than 30 days in Outbox"""
frappe.db.sql("""delete from `tabBulk Email` where
datediff(now(), creation) > 30""")
|
beckastar/django | tests/migrations/test_migrations/0002_second.py | Python | bsd-3-clause | 592 | 0.001689 | from django.db import migrations, models
class Migration(migrations.Migration):
dependencie | s = [
("migrations", "0001_initial"),
]
operations = [
migrations.DeleteModel("Tribble"),
migrations.RemoveField("Author", "silly_field"),
migrations.AddField("Author", "rating", models.IntegerField(default=0)),
migrations.CreateModel(
| "Book",
[
("id", models.AutoField(primary_key=True)),
("author", models.ForeignKey("migrations.Author", null=True)),
],
)
]
|
freedesktop-unofficial-mirror/telepathy__telepathy-phoenix | src/phoenix.py | Python | lgpl-2.1 | 4,699 | 0.017876 | #!/usr/bin/env python
import os
import sys
import getopt
from util import spawnbus, setup_data_dir, setup_run_dir, scrub_env
from gi.repository import GObject, Gio
from gi.repository import TelepathyGLib as Tp
# Watch one Telepathy connection
class Connection:
def __init__ (self, connection):
self.connection = connection
self.contacts = []
self.connection.prepare_async (
[ Tp.Connection.get_feature_quark_contact_list () ],
self.prepared_cb, None)
def check | _contact (self, contact):
# Remove from our contact list if the remote removed us
# Authorize and subscribe to the remote when asked
p = contact.get_property ("publish-state")
if p == Tp.SubscriptionState.REMOVED_R | EMOTELY:
self.connection.remove_contacts_async ([ contact ], None, None)
elif p == Tp.SubscriptionState.ASK:
self.connection.authorize_publication_async ([ contact ],
None, None )
self.connection.request_subscription_async ([ contact ],
"You subscribe to me, I subscribe to you!",
None, None )
def subscription_state_changed (self, contact, subscribe, publish,
request, data):
self.check_contact (contact)
def add_contact (self, contact):
if contact in self.contacts:
return
self.contacts.append (contact)
self.check_contact (contact)
contact.connect ('subscription-states-changed',
self.subscription_state_changed, None)
def remove_contact (self, contact):
print "Removed: %s" % (contact.get_identifier ())
self.contacts.remove (contact)
def contact_list_changed (self, connection, added, removed, data):
for contact in added:
self.add_contact (contact)
for contact in removed:
self.remove_contact (contact)
def prepared_cb (self, connection, result, data):
# Connect for future updates
self.connection.connect ('contact-list-changed',
self.contact_list_changed, None)
if (connection.get_contact_list_state() !=
Tp.ContactListState.SUCCESS):
print "Contactlist not retrieved just yet.."
return
contacts = connection.dup_contact_list ()
for c in contacts:
self.add_contact (c)
# Watch one Telepathy account
class Account:
def __init__ (self, account):
self.connection = None
self.account = account
self.account.connect("notify::connection", self.connection_changed, None)
self.setup_connection ()
# Reuest availability
self.account.request_presence_async (
Tp.ConnectionPresenceType.AVAILABLE,
"",
"",
None, None)
def setup_connection (self):
c = self.account.get_property("connection")
if c != None:
self.connection = Connection (c)
else:
self.connection = None
print "Setup connection for " \
+ self.account.get_property ("display-name") \
+ ": " + str (self.connection)
def connection_changed (self, account, spec, data):
self.setup_connection ()
# Watch the account manager
class Manager:
def __init__ (self):
self.am = am = Tp.AccountManager.dup()
factory = am.get_factory()
self.accounts = {}
factory.add_contact_features ([Tp.ContactFeature.SUBSCRIPTION_STATES])
am.connect ('account-removed', self.removed_cb )
am.connect ('account-validity-changed', self.validity_changed_cb)
am.prepare_async(None, self.prepared, None)
def add_account (self, account):
print "Adding account: " + account.get_property ("display-name")
self.accounts[account.get_property ("object-path")] = \
Account (account)
def remove_account (self, account):
self.accounts.delete (account.get_property ("object-path"))
def validity_changed_cb (self, am, account, valid):
if valid:
self.add_account (account)
def removed_cb (self, am, account, valid):
self.remove_account (account)
def prepared (self, am, result, data):
for a in am.get_valid_accounts():
self.add_account (a)
if __name__ == '__main__':
try:
opts, args = getopt.getopt(sys.argv[1:], "", ["datadir=", "rundir="])
for o, a in opts:
if o == "--datadir":
setup_data_dir(a)
elif o == "--rundir":
setup_run_dir(a)
except getopt.GetoptError, err:
print str(err)
sys.exit(2)
scrub_env()
spawnbus()
Tp.debug_set_flags(os.getenv('PHOENIX_DEBUG', ''))
loop = GObject.MainLoop()
m = Manager()
loop.run()
|
buluba89/Yatcobot | yatcobot/notifier.py | Python | gpl-2.0 | 577 | 0 | import logging
from yatcobot.plugins.notifiers import NotifierABC
logger = logging.getLogger(__name__)
class NotificationService:
def __init__(self):
self.active_notifiers = NotifierABC.get_enabled()
def send_notification(self, title, message):
"""Sends a message to all enabled notifiers"""
for notifier in self.active_noti | fiers:
notifier.notify(title, | message)
def is_enabled(self):
"""Checks if any notifier is enabled"""
if len(self.active_notifiers) > 0:
return True
return False
|
MartinHjelmare/home-assistant | homeassistant/components/hydrawise/switch.py | Python | apache-2.0 | 3,451 | 0 | """Support for Hydrawise cloud switches."""
import logging
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchDevice
from homeassistant.const import CONF_MONITORED_CONDITIONS
import homeassistant.helpers.config_validation as cv
from . import (
ALLOWED_WATERING_TIME, CONF_WATERING_TIME, DATA_HYDRAWISE,
DEFAULT_WATERING_TIME, DEVICE_MAP, DEVICE_MAP_INDEX, SWITCHES,
HydrawiseEntity)
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_MONITORED_CONDITIONS, default=SWITCHES):
vol.All(cv.ensure_list, [vol.In(SWITCHES)]),
vol.Optional(CONF_WATERING_TIME, default=DEFAULT_WATERING_TIME):
vol.All(vol.In(ALLOWED_WATERING_TIME)),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a Hydrawise device."""
hydrawise = hass.data[DATA_HYDRAWISE].data
default_watering_timer = config.get(CONF_WATERING_TIME)
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
# Create a switch for each zone
for zone in hydrawise.relays:
sensors.append(
HydrawiseSwitch(default_watering_timer, zone, sensor_type))
add_entities(sensors, True)
class HydrawiseSwitch(HydrawiseEntity, SwitchDevice):
"""A switch implementation for | Hydrawise device."""
def __init__(self, default_watering_timer, *args):
"""Initialize a switch for Hydrawise device."""
super().__init__(*args)
self._default_watering_timer = default_watering_timer
@property
def is_on(self):
""" | Return true if device is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the device on."""
if self._sensor_type == 'manual_watering':
self.hass.data[DATA_HYDRAWISE].data.run_zone(
self._default_watering_timer, (self.data['relay']-1))
elif self._sensor_type == 'auto_watering':
self.hass.data[DATA_HYDRAWISE].data.suspend_zone(
0, (self.data['relay']-1))
def turn_off(self, **kwargs):
"""Turn the device off."""
if self._sensor_type == 'manual_watering':
self.hass.data[DATA_HYDRAWISE].data.run_zone(
0, (self.data['relay']-1))
elif self._sensor_type == 'auto_watering':
self.hass.data[DATA_HYDRAWISE].data.suspend_zone(
365, (self.data['relay']-1))
def update(self):
"""Update device state."""
mydata = self.hass.data[DATA_HYDRAWISE].data
_LOGGER.debug("Updating Hydrawise switch: %s", self._name)
if self._sensor_type == 'manual_watering':
if not mydata.running:
self._state = False
else:
self._state = int(
mydata.running[0]['relay']) == self.data['relay']
elif self._sensor_type == 'auto_watering':
for relay in mydata.relays:
if relay['relay'] == self.data['relay']:
if relay.get('suspended') is not None:
self._state = False
else:
self._state = True
break
@property
def icon(self):
"""Return the icon to use in the frontend, if any."""
return DEVICE_MAP[self._sensor_type][
DEVICE_MAP_INDEX.index('ICON_INDEX')]
|
mahak/nova | nova/tests/unit/virt/disk/vfs/test_guestfs.py | Python | apache-2.0 | 14,130 | 0.000212 | # Copyright (C) 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import fixtures
import mock
from nova import exception
from nova import test
from nova.tests.unit.virt.disk.vfs import fakeguestfs
from nova.virt.disk.vfs import guestfs as vfsimpl
from nova.virt.image import model as imgmodel
os_uname = collections.namedtuple(
'uname_result', ['sysname', 'nodename', 'release', 'version', 'machine'],
)
class VirtDiskVFSGuestFSTest(test.NoDBTestCase):
def setUp(self):
super(VirtDiskVFSGuestFSTest, self).setUp()
self.useFixture(
fixtures.MonkeyPatch('nova.virt.disk.vfs.guestfs.guestfs',
fakeguestfs))
self.qcowfile = imgmodel.LocalFileImage("/dummy.qcow2",
imgmodel.FORMAT_QCOW2)
self.rawfile = imgmodel.LocalFileImage("/dummy.img",
imgmodel.FORMAT_RAW)
self.lvmfile = imgmodel.LocalBlockImage("/dev/volgroup/myvol")
self.rbdfile = imgmodel.RBDImage("myvol", "mypool",
"cthulu",
"arrrrrgh",
["server1:123", "server2:123"])
def _do_test_appliance_setup_inspect(self, image, drives, forcetcg):
if forcetcg:
vfsimpl.force_tcg()
else:
vfsimpl.force_tcg(False)
vfs = vfsimpl.VFSGuestFS(
image,
partition=-1)
vfs.setup()
if forcetcg:
self.assertEqual(["force_tcg"], vfs.handle.backend_settings)
vfsimpl.force_tcg(False)
else:
self.assertIsNone(vfs.handle.backend_settings)
self.assertTrue(vfs.handle.running)
self.assertEqual(drives,
vfs.handle.drives)
self.assertEqual(3, len(vfs.handle.mounts))
self.assertEqual("/dev/mapper/guestvgf-lv_root",
vfs.handle.mounts[0][1])
self.assertEqual("/dev | /vda1",
vfs.handle.mounts[1][1])
self.assertEqual("/dev/mapper/guestvgf-lv_home",
vfs.handle.mounts[2][1])
self.assertEqual("/", vfs.handle.mounts[0][2])
self.assertEqual("/boot", vfs.handle.mounts[1][2])
self | .assertEqual("/home", vfs.handle.mounts[2][2])
handle = vfs.handle
vfs.teardown()
self.assertIsNone(vfs.handle)
self.assertFalse(handle.running)
self.assertTrue(handle.closed)
self.assertEqual(0, len(handle.mounts))
def test_appliance_setup_inspect_auto(self):
drives = [("/dummy.qcow2", {"format": "qcow2"})]
self._do_test_appliance_setup_inspect(self.qcowfile, drives, False)
def test_appliance_setup_inspect_tcg(self):
drives = [("/dummy.qcow2", {"format": "qcow2"})]
self._do_test_appliance_setup_inspect(self.qcowfile, drives, True)
def test_appliance_setup_inspect_raw(self):
drives = [("/dummy.img", {"format": "raw"})]
self._do_test_appliance_setup_inspect(self.rawfile, drives, True)
def test_appliance_setup_inspect_lvm(self):
drives = [("/dev/volgroup/myvol", {"format": "raw"})]
self._do_test_appliance_setup_inspect(self.lvmfile, drives, True)
def test_appliance_setup_inspect_rbd(self):
drives = [("mypool/myvol", {"format": "raw",
"protocol": "rbd",
"username": "cthulu",
"secret": "arrrrrgh",
"server": ["server1:123",
"server2:123"]})]
self._do_test_appliance_setup_inspect(self.rbdfile, drives, True)
def test_appliance_setup_inspect_no_root_raises(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile,
partition=-1)
# call setup to init the handle so we can stub it
vfs.setup()
self.assertIsNone(vfs.handle.backend_settings)
with mock.patch.object(
vfs.handle, 'inspect_os', return_value=[]) as mock_inspect_os:
self.assertRaises(exception.NovaException, vfs.setup_os_inspect)
mock_inspect_os.assert_called_once_with()
def test_appliance_setup_inspect_multi_boots_raises(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile,
partition=-1)
# call setup to init the handle so we can stub it
vfs.setup()
self.assertIsNone(vfs.handle.backend_settings)
with mock.patch.object(
vfs.handle, 'inspect_os',
return_value=['fake1', 'fake2']) as mock_inspect_os:
self.assertRaises(exception.NovaException, vfs.setup_os_inspect)
mock_inspect_os.assert_called_once_with()
def test_appliance_setup_static_nopart(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile,
partition=None)
vfs.setup()
self.assertIsNone(vfs.handle.backend_settings)
self.assertTrue(vfs.handle.running)
self.assertEqual(1, len(vfs.handle.mounts))
self.assertEqual("/dev/sda", vfs.handle.mounts[0][1])
self.assertEqual("/", vfs.handle.mounts[0][2])
handle = vfs.handle
vfs.teardown()
self.assertIsNone(vfs.handle)
self.assertFalse(handle.running)
self.assertTrue(handle.closed)
self.assertEqual(0, len(handle.mounts))
def test_appliance_setup_static_part(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile,
partition=2)
vfs.setup()
self.assertIsNone(vfs.handle.backend_settings)
self.assertTrue(vfs.handle.running)
self.assertEqual(1, len(vfs.handle.mounts))
self.assertEqual("/dev/sda2", vfs.handle.mounts[0][1])
self.assertEqual("/", vfs.handle.mounts[0][2])
handle = vfs.handle
vfs.teardown()
self.assertIsNone(vfs.handle)
self.assertFalse(handle.running)
self.assertTrue(handle.closed)
self.assertEqual(0, len(handle.mounts))
def test_makepath(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile)
vfs.setup()
vfs.make_path("/some/dir")
vfs.make_path("/other/dir")
self.assertIn("/some/dir", vfs.handle.files)
self.assertIn("/other/dir", vfs.handle.files)
self.assertTrue(vfs.handle.files["/some/dir"]["isdir"])
self.assertTrue(vfs.handle.files["/other/dir"]["isdir"])
vfs.teardown()
def test_append_file(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile)
vfs.setup()
vfs.append_file("/some/file", " Goodbye")
self.assertIn("/some/file", vfs.handle.files)
self.assertEqual("Hello World Goodbye",
vfs.handle.files["/some/file"]["content"])
vfs.teardown()
def test_replace_file(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile)
vfs.setup()
vfs.replace_file("/some/file", "Goodbye")
self.assertIn("/some/file", vfs.handle.files)
self.assertEqual("Goodbye",
vfs.handle.files["/some/file"]["content"])
vfs.teardown()
def test_read_file(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile)
vfs.setup()
self.assertEqual("Hello World", vfs.read_file("/some/file"))
vfs.teardown()
def test_has_file(self):
vfs = vfsimpl.VFSGuestFS(self.qcowfile)
vfs.setup()
v |
archman/phantasy | phantasy/apps/__init__.py | Python | bsd-3-clause | 225 | 0 | # encoding: | UTF-8
#
# Copyright (c) 2015-2016 Facility for Rare Isotope Beams
#
"""
Physics Applications
"""
try:
from phantasy_apps import *
except ImportError:
| print("Package 'python-phantasy-apps' is required.")
|
JuliaSprenger/python-odmltables | setup.py | Python | bsd-3-clause | 2,563 | 0.002731 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, os, glob
import warnings
from setuptools import setup, find_packages
with open("README.rst") as f:
long_description = f.read()
with open('requirements.txt') as f:
install_requires = f.read().splitlines()
with open('requirements_doc.txt') as f:
doc_requires = f.read().splitlines()
with open('requirements_test.txt') as f:
test_requires = f.read().splitlines()
with open('requirements_gui.txt') as f:
gui_requires = f.read().splitlines()
extras_require = {'doc': doc_requires,
'test': test_requires,
'gui': gui_requires
}
# PyQt5 needs to be installed manually with python 2 when installing odmltables gui.
if sys.version_info.major < 3:
warnings.warn('The odMLtables gui requires PyQt5. Please install this dependency first before '
'installing the odmltables gui, eg. using "conda install -c anaconda '
'\'pyqt>=5\'"')
VERSION = open('./odmltables/VERSION.txt', 'r').read().strip()
setup(
name="odmltables",
version=VERSION,
packages=find_packages(),
package_data={'odmltables': ['gui/graphics/*', 'VERSION.txt']},
install_requires=install_requires,
extras_require=extras_require,
author="odMLtables authors and contributors",
author_email="j.sprenger@fz-juelich.de",
description="Interface to convert odML structures to and from table-like representations",
long_description=long_description,
license="BSD",
url='https://github.com/INM-6/python-odmltables',
download_url="https://github.com/INM-6/python-odmltables/archive/{0}.tar.gz".format(VERSION),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering'],
entry_points={
'gui_scripts': ['odmltables = odmltables.gu | i.main:parse_args []']},
| zip_safe=False,
keywords=['odml', 'excel', 'metadata management'],
# Extension('foo', ['foo.c'], include_dirs=['.']),
data_files=[('share/pixmaps', glob.glob(os.path.join("logo", "*"))),
('.', ['odmltables/VERSION.txt',
'requirements.txt',
'requirements_doc.txt',
'requirements_gui.txt',
'requirements_test.txt'])]
) |
DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Dsz/PyScripts/Lib/dsz/mca/file/cmd/grep/errors.py | Python | unlicense | 1,026 | 0.008772 | # uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: errors.py
import mcl.status
ERR_SUCCESS = mcl.status.MCL_SUCCESS
ERR_INVALID_PARAM = mcl.status.framework.ERR_START
ERR_MARSHAL_FAILED = mcl.status.framework.ERR_START + 1
ERR_GET_FULL | _PATH_FAILED = mcl.status.framework.ERR_START + 2
ERR_CALLBACK_FAILED | = mcl.status.framework.ERR_START + 3
ERR_ENUM_FAILED = mcl.status.framework.ERR_START + 4
ERR_DONE_MAX_ENTRIES = mcl.status.framework.ERR_START + 5
ERR_NOT_IMPLEMENTED = mcl.status.framework.ERR_START + 6
errorStrings = {ERR_INVALID_PARAM: 'Invalid parameter(s)',
ERR_MARSHAL_FAILED: 'Marshaling data failed',
ERR_GET_FULL_PATH_FAILED: 'Unable to get full path',
ERR_CALLBACK_FAILED: 'Return of data failed',
ERR_ENUM_FAILED: 'Failed to enumerate given directory',
ERR_DONE_MAX_ENTRIES: 'Maximum entries exceeded',
ERR_NOT_IMPLEMENTED: 'Feature not implemented on this platform'
} |
MalloyDelacroix/DownloaderForReddit | DownloaderForReddit/gui/database_views/filter_widget.py | Python | gpl-3.0 | 3,726 | 0.00161 | from PyQt5.QtWidgets import QWidget, QLabel, QToolButton, QHBoxLayout, QVBoxLayout, QListWidgetItem, QFrame, QListView
from PyQt5.QtCore import pyqtSignal
from PyQt5.QtGui import QColor
from DownloaderForReddit.guiresources.database_views.filter_widget_auto import Ui_FilterWidget
from DownloaderForReddit.utils import injector
from .filter_item import FilterItem
class FilterWidget(QWidget, Ui_FilterWidget):
filter_changed = pyqtSignal(list)
def __init__(self, parent=None):
QWidget.__init__(self, parent=parent)
self.setupUi(self)
self.settings_manager = injector.get_settings_manager()
self.active = False
self.filters = {}
self.list_item_map = {}
self. | filter_input_widget.export_filter.connect(self.add_filters)
| self.filter_box_list_widget.setSpacing(12)
self.filter_box_list_widget.setResizeMode(QListView.Adjust)
def set_default_filters(self, *filters):
self.add_filters([FilterItem(**filter_dict) for filter_dict in filters])
def filter(self, model_name):
return self.filter_download_filters(model_name)
def filter_download_filters(self, filter_name):
return [x.filter_tuple for x in filter(lambda x: x.model == filter_name, self.filters.values())]
def add_filters(self, filters):
models = []
for filter_item in filters:
widget = self.create_widget(**filter_item.widget_dict)
self.filters[widget] = filter_item
self.add_widget_to_list(widget)
models.append(filter_item.model)
self.filter_changed.emit(list(set(models)))
def add_widget_to_list(self, widget):
item = QListWidgetItem()
size = widget.sizeHint()
size.setWidth(size.width() + 25)
size.setHeight(size.height() + 10)
item.setSizeHint(size)
item.setBackground(QColor('#C8C8C8'))
self.filter_box_list_widget.addItem(item)
self.filter_box_list_widget.setItemWidget(item, widget)
self.list_item_map[widget] = item
def create_widget(self, **kwargs):
filter_item_widget = QWidget()
model_label = QLabel(kwargs.get('model', None))
field_label = QLabel(kwargs.get('field', None))
operator_label = QLabel(kwargs.get('operator', None))
# space added to this label text because it's the only way I could get it to stop cutting off the end of text
value_label = QLabel(str(kwargs.get('value', None)) + ' ')
close_button = QToolButton()
close_button.setText('X')
close_button.clicked.connect(lambda: self.remove_filter(filter_item_widget))
v_layout = QVBoxLayout()
title_layout = QHBoxLayout()
title_layout.addWidget(model_label)
title_layout.addWidget(close_button)
h_layout = QHBoxLayout()
h_layout.addWidget(field_label)
h_layout.addWidget(self.get_line())
h_layout.addWidget(operator_label)
h_layout.addWidget(self.get_line())
h_layout.addWidget(value_label)
v_layout.addItem(title_layout)
v_layout.addItem(h_layout)
h_layout.setSpacing(5)
v_layout.setSpacing(2)
filter_item_widget.setLayout(v_layout)
return filter_item_widget
def get_line(self):
line = QFrame()
line.setFrameShape(QFrame.VLine)
line.setFrameShadow(QFrame.Sunken)
return line
def remove_filter(self, widget):
f = self.filters[widget]
del self.filters[widget]
item = self.list_item_map[widget]
row = self.filter_box_list_widget.row(item)
self.filter_box_list_widget.takeItem(row)
self.filter_changed.emit([f.model])
|
redhat-openstack/neutron | neutron/tests/functional/agent/test_l3_agent.py | Python | apache-2.0 | 13,694 | 0 | # Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent import l3_agent
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.common import constants as l3_constants
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.tests.functional.agent.linux import base
from neutron.tests.unit import test_l3_agent
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
class L3AgentTestFramework(base.BaseOVSLinuxTestCase):
def setUp(self):
super(L3AgentTestFramework, self).setUp()
self.check_sudo_enabled()
self._configure()
def _configure(self):
l3_agent._register_opts(cfg.CONF)
cfg.CONF.set_override('debug', True)
config.setup_logging()
cfg.CONF.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
cfg.CONF.set_override('router_delete_namespaces', True)
cfg.CONF.set_override('root_helper', self.root_helper, group='AGENT')
cfg.CONF.set_override('use_namespaces', True)
cfg.CONF.set_override('enable_metadata_proxy', True)
br_int = self.create_ovs_bridge()
cfg.CONF.set_override('ovs_integration_bridge', br_int.br_name)
br_ex = self.create_ovs_bridge()
cfg.CONF.set_override('external_network_bridge', br_ex.br_name)
mock.patch('neutron.common.rpc.RpcProxy.cast').start()
mock.patch('neutron.common.rpc.RpcProxy.call').start()
mock.patch('neutron.common.rpc.RpcProxy.fanout_cast').start()
self.agent = l3_agent.L3NATAgent('localhost', cfg.CONF)
mock.patch.object(self.agent, '_send_gratuitous_arp_packet').start()
def manage_router(self, enable_ha, ip_version=4, enable_fip=True,
enable_snat=True):
if ip_version == 6:
enable_snat = False
enable_fip = False
r = test_l3_agent.prepare_router_data(ip_version=ip_version,
enable_snat=enable_snat,
enable_floating_ip=enable_fip,
enable_ha=enable_ha)
self.addCleanup(self._delete_router, r['id'])
ri = self._create_router(r)
return ri
def _create_router(self, router):
self.agent._router_added(router['id'], router)
ri = self.agent.router_info[router['id']]
ri.router = router
self.agent.process_router(ri)
return ri
def _delete_router(self, router_id):
self.agent._router_removed(router_id)
def _add_fip(self, router, fip_address, fixed_address='10.0.0.2'):
fip = {'id': _uuid(),
'port_id': _uuid(),
'floating_ip_address': fip_address,
'fixed_ip_address': fixed_address}
router.router[l3_constants.FLOATINGIP_KEY].append(fip)
def _namespace_exists(self, router):
ip = ip_lib.IPWrapper(self.root_helper, router.ns_name)
return ip.netns.exists(router.ns_name)
def _metadata_proxy_exists(self, router):
pm = external_process.ProcessManager(
cfg.CONF,
router.router_id,
self.root_helper,
router.ns_name)
return pm.active
def device_exists_with_ip_mac(self, expected_device, name_getter,
namespace):
return ip_lib.device_exists_with_ip_mac(
name_getter(expected_device['id']),
expected_device['ip_cidr'],
expected_device['mac_address'],
namespace, self.root_helper)
def get_expected_keepalive_configuration(self, router):
ha_confs_path = cfg.CONF.ha_confs_path
router_id = router.router_id
ha_device_name = self.agent.get_ha_device_name(router.ha_port['id'])
ha_device_cidr = router.ha_port['ip_cidr']
external_port = self.agent._get_ex_gw_port(router)
ex_port_ipv6 = self.agent._get_ipv6_lladdr(
external_port['mac_address'])
external_device_name = self.agent.get_external_device_name(
external_port['id'])
external_device_cidr = external_port['ip_cidr']
internal_port = router.router[l3_constants.INTERFACE_KEY][0]
int_port_ipv6 = self.agent._get_ipv6_lladdr(
internal_port['mac_address'])
internal_device_name = self.agent.get_internal_device_name(
internal_port['id'])
internal_device_cidr = internal_port['ip_cidr']
floating_ip_cidr = (
self.agent.get_floating_ips(router)[0]
['floating_ip_address'] + l3_agent.FLOATING_IP_CIDR_SUFFIX)
default_gateway_ip = external_port['subnet'].get('gateway_ip')
return """vrrp_sync_group VG_1 {
group {
VR_1
}
notify_master "%(ha_confs_path)s/%(router_id)s/notify_master.sh"
notify_backup "%(ha_confs_path)s/%(router_id)s/notify_backup.sh"
notify_fault "%(ha_confs_path)s/%(router_id)s/notify_fault.sh"
}
vrrp_instance VR_1 {
state BACKUP
interface %(ha_device_name)s
virtual_router_id 1
priority 50
garp_master_repeat 5
garp_master_refresh | 10
nopreempt
advert_int 2
track_interface {
%(ha_device_name)s
}
virtual_ipaddress {
169.254.0.1/24 dev %(ha_device_name)s
}
virtual_ipaddress_excluded {
%(floating_ip_cidr)s dev %(external_device_name)s
%(external_device_cidr)s dev %(external_device_name)s
%(internal_device_cidr)s dev %(internal_device_name)s
%(ex_port_ipv6)s dev %(external_device_name)s scope link
| %(int_port_ipv6)s dev %(internal_device_name)s scope link
}
virtual_routes {
0.0.0.0/0 via %(default_gateway_ip)s dev %(external_device_name)s
}
}""" % {
'ha_confs_path': ha_confs_path,
'router_id': router_id,
'ha_device_name': ha_device_name,
'ha_device_cidr': ha_device_cidr,
'external_device_name': external_device_name,
'external_device_cidr': external_device_cidr,
'internal_device_name': internal_device_name,
'internal_device_cidr': internal_device_cidr,
'floating_ip_cidr': floating_ip_cidr,
'default_gateway_ip': default_gateway_ip,
'int_port_ipv6': int_port_ipv6,
'ex_port_ipv6': ex_port_ipv6
}
class L3AgentTestCase(L3AgentTestFramework):
def test_legacy_router_lifecycle(self):
self._router_lifecycle(enable_ha=False)
def test_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True)
def test_ipv6_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True, ip_version=6)
def test_keepalived_configuration(self):
router = self.manage_router(enable_ha=True)
expected = self.get_expected_keepalive_configuration(router)
self.assertEqual(expected,
router.keepalived_manager.config.get_config_str())
# Add a new FIP and change the GW IP address
router.router = copy.deepcopy(router.router)
existing_fip = '19.4.4.2'
new_fip = '19.4.4.3'
self._add_fip(router, new_fip)
router.router['gw_port']['subnet']['gateway_ip'] = '19.4.4.5'
router.router['gw_port']['fixed_ips'][0]['ip_address'] = '19.4.4.10'
self.agent.process_ro |
npuichigo/ttsflow | third_party/tensorflow/tensorflow/contrib/learn/python/learn/tests/dataframe/csv_parser_test.py | Python | apache-2.0 | 2,054 | 0.002434 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for learn.python.learn.dataframe.transforms.csv_parser."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.learn.python.learn.dataframe.transforms import csv_parser
from tensorflow.contrib.learn.python.learn.tests.dataframe import mocks
from tensorflow.python.framework import constant_op
from tensorf | low.python.framework import dtypes
from tensorflow.python.platform import test
class CSVParserTestCase(test.TestCase):
def testParse(self):
parser = csv_parser.CSVParser(
column_names=["col0", "col1", "col2"], default_values=["", "", 1.4])
csv_lines = ["one,two,2.5", "four,five,6.0"]
csv_input = constant_op.constant(
csv_lines, dtype=dtypes.s | tring, shape=[len(csv_lines)])
csv_column = mocks.MockSeries("csv", csv_input)
expected_output = [
np.array([b"one", b"four"]), np.array([b"two", b"five"]),
np.array([2.5, 6.0])
]
output_columns = parser(csv_column)
self.assertEqual(3, len(output_columns))
cache = {}
output_tensors = [o.build(cache) for o in output_columns]
self.assertEqual(3, len(output_tensors))
with self.test_session() as sess:
output = sess.run(output_tensors)
for expected, actual in zip(expected_output, output):
np.testing.assert_array_equal(actual, expected)
if __name__ == "__main__":
test.main()
|
root-11/outscale | tests/auction_demo_tests.py | Python | mit | 2,607 | 0.003836 | from itertools import product
from demos.auction_model import demo
def test02():
sellers = [4]
buyers = [100]
results = demo | (sellers, buyers, time_limit=False)
expected_results = {100: 4, 4: 100}
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test03():
expected_results = {101: 5, 5: 101, 6: None, 7: None}
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
results | = demo(sellers, buyers)
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test04():
expected_results = {0: 101, 101: 0, 102: None,
103: None} # result: 0 enters contract with price 334.97 (the highest price)
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
results = demo(sellers, buyers)
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test05():
expected_results = {101: 7, 102: 6, 103: 5, 5: 103, 6: 102, 7: 101}
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
results = demo(sellers, buyers)
for k, v in results.items():
assert expected_results[k] == v, "Hmmm... That's not right {}={}".format(k, v)
def test06():
expected_results = {0: 102, 1: 108, 2: 105, 3: 107, 4: 100, 5: 106, 6: 112, 7: 111, 8: 103, 9: 109, 10: 104, 100: 4, 101: None, 102: 0, 103: 8, 104: 10, 105: 2, 106: 5, 107: 3, 108: 1, 109: 9, 110: None, 111: 7, 112: 6}
sellers = [k for k in expected_results if k < 100]
buyers = [k for k in expected_results if k >= 100]
error_sets = []
for s_init, b_init in list(product([True, False], repeat=2)):
if not s_init and not b_init:
continue # if neither seller or buyer initialise, obviously nothing will happen.
results = demo(sellers=sellers, buyers=buyers, seller_can_initialise=s_init, buyer_can_initialise=b_init)
errors = []
for k, v in results.items():
if not expected_results[k] == v: # , "Hmmm... That's not right {}={}".format(k, v)
errors.append((k, v))
if errors:
error_sets.append(errors)
if error_sets:
print("-" * 80)
for i in error_sets:
print(",".join(str(i) for i in sorted(i)), flush=True)
raise AssertionError("output does not reflect expected results.") |
talbor49/Poet | web/__main__.py | Python | mit | 62 | 0 | import poet
if __name__ == '_ | _main__':
roger.tool.main( | )
|
ptphp/PyLib | src/tornado/demos/lihuashu/docs/common_bak/ty.py | Python | apache-2.0 | 668 | 0.026946 | #coding:utf8
from img import Img
i=Img()
i.open()
k=i.convert_thumbnail(input_file="/home/insion/Pictures/l.jpg",output | _file="/home/insion/Pictures/toutput.jpg")
print(k)
k=i.convert_resize(input_file="/home/insion/Pictures/l.jpg",output_file="/home/insion/Pictures/loutput2.jpg",output_size="500x")
print(k)
ki=i.composite_watermark(watermark_file="/home/insion/Pictures/lhs_log | o.png",input_file="/home/insion/Pictures/loutput2.jpg",output_file="/home/insion/Pictures/loutput.jpg")
print(ki)
ki=i.convert_watermark(watermark_file="/home/insion/Pictures/lhs_logo.png",input_file="/home/insion/Pictures/m.jpg",output_file="/home/insion/Pictures/moutput.jpg")
print(ki) |
jeremiahyan/odoo | addons/pos_sale_product_configurator/models/pos_config.py | Python | gpl-3.0 | 276 | 0.003623 | from odoo import models, fields
cl | ass PosConfig(models.Model):
_inherit = 'pos.config'
iface_open_product_info = fields.Boolean(string="Open Product I | nfo", help="Display the 'Product Info' page when a product with optional products are added in the customer cart")
|
CiscoSystems/nova-solver-scheduler | nova/tests/scheduler/solvers/constraints/test_aggregate_image_properties_isolation.py | Python | apache-2.0 | 2,908 | 0.000688 | # Copyright (c) 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova.scheduler import solvers
from nova.s | cheduler.solvers.constraints import \
aggregate_image_properties_isolation
from nova import test
from nova.tests.scheduler import solver_scheduler_fakes as fakes
clas | s TestAggregateImagePropertiesIsolationConstraint(test.NoDBTestCase):
def setUp(self):
super(TestAggregateImagePropertiesIsolationConstraint, self).setUp()
self.constraint_cls = aggregate_image_properties_isolation.\
AggregateImagePropertiesIsolationConstraint
self._generate_fake_constraint_input()
def _generate_fake_constraint_input(self):
self.fake_variables = solvers.BaseVariables()
self.fake_variables.host_instance_matrix = [
['h0i0', 'h0i1', 'h0i2'],
['h1i0', 'h1i1', 'h1i2']]
self.fake_filter_properties = {
'instance_uuids': ['fake_uuid_%s' % x for x in range(3)],
'num_instances': 3}
host1 = fakes.FakeSolverSchedulerHostState('host1', 'node1', {})
host2 = fakes.FakeSolverSchedulerHostState('host2', 'node1', {})
self.fake_hosts = [host1, host2]
@mock.patch('nova.scheduler.solvers.constraints.'
'aggregate_image_properties_isolation.'
'AggregateImagePropertiesIsolationConstraint.host_filter_cls')
def test_aggregate_image_properties_isolation_get_components(
self, mock_filter_cls):
expected_cons_vars = [['h1i0'], ['h1i1'], ['h1i2']]
expected_cons_coeffs = [[1], [1], [1]]
expected_cons_consts = [0, 0, 0]
expected_cons_ops = ['==', '==', '==']
mock_filter = mock_filter_cls.return_value
mock_filter.host_passes.side_effect = [True, False]
cons_vars, cons_coeffs, cons_consts, cons_ops = (
self.constraint_cls().get_components(self.fake_variables,
self.fake_hosts, self.fake_filter_properties))
self.assertEqual(expected_cons_vars, cons_vars)
self.assertEqual(expected_cons_coeffs, cons_coeffs)
self.assertEqual(expected_cons_consts, cons_consts)
self.assertEqual(expected_cons_ops, cons_ops)
|
kdart/pycopia | mibs/pycopia/mibs/SNMP_PROXY_MIB.py | Python | apache-2.0 | 4,800 | 0.020625 | # python
# This file is generated by a program (mib2py). Any edits will be lost.
from pycopia.aid import Enum
import pycopia.SMI.Basetypes
Range = pycopia.SMI.Basetypes.Range
Ranges = pycopia.SMI.Basetypes.Ranges
from pycopia.SMI.Objects import ColumnObject, MacroObject, NotificationObject, RowObject, ScalarObject, NodeObject, ModuleObject, GroupObject
# imports
from SNMPv2_SMI import MODULE_IDENTITY, OBJECT_TYPE, snmpModules
from SNMPv2_CONF import MODULE_COMPLIANCE, OBJECT_GROUP
from SNMPv2_TC import RowStatus, StorageType
from SNMP_TARGET_MIB import SnmpTagValue, snmpTargetBasicGroup, snmpTargetResponseGroup
from SNMP_FRAMEWORK_MIB import SnmpEngineID, SnmpAdminString
class SNMP_PROXY_MIB(ModuleObject):
path = '/usr/share/mibs/ietf/SNMP-PROXY-MIB'
name = 'SNMP-PROXY-MIB'
language = 2
description = 'This MIB module defines MIB objects which provide\nmechanisms to remotely configure the parameters\nused by a proxy forwarding application.\n\nCopyright (C) The Internet Society (2002). This\nversion of this MIB module is part of RFC 3413;\nsee the RFC itself for full legal notices.'
# nodes
class snmpProxyMIB(NodeObject):
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14])
name = 'snmpProxyMIB'
class snmpProxyObjects(NodeObject):
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1])
name = 'snmpProxyObjects'
class snmpProxyConformance(NodeObject):
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 3])
name = 'snmpProxyConformance'
class snmpProxyCompliances(NodeObject):
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 3, 1])
name = 'snmpProxyCompliances'
class snmpProxyGroups(NodeObject):
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 3, 2])
name = 'snmpProxyGroups'
# macros
# types
# scalars
# columns
class snmpProxyName(ColumnObject):
access = 2
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 1])
syntaxobject = SnmpAdminString
class snmpProxyType(ColumnObject):
status = 1
access = 5
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 2])
syntaxobject = pycopia.SMI.Basetypes.Enumeration
enumerations = [Enum(1, 'read'), Enum(2, 'write'), Enum(3, 'trap'), Enum(4, 'inform')]
class snmpProxyContextEngineID(ColumnObject):
access = 5
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 3])
syntaxobject = SnmpEngineID
class snmpProxyContextName(ColumnObject):
access = 5
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 4])
syntaxobject = SnmpAdminString
class snmpProxyTargetParamsIn(ColumnObject):
access = 5
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 5])
syntaxobject = SnmpAdminString
class snmpProxySingleTargetOut(ColumnObject):
access = 5
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 6])
syntaxobject = SnmpAdminString
class snmpProxyMultipleTargetOut(ColumnObject):
a | ccess = 5
s | tatus = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 7])
syntaxobject = SnmpTagValue
class snmpProxyStorageType(ColumnObject):
access = 5
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 8])
syntaxobject = pycopia.SMI.Basetypes.StorageType
class snmpProxyRowStatus(ColumnObject):
access = 5
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1, 9])
syntaxobject = pycopia.SMI.Basetypes.RowStatus
# rows
class snmpProxyEntry(RowObject):
status = 1
index = pycopia.SMI.Objects.IndexObjects([snmpProxyName], True)
create = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 1, 2, 1])
access = 2
rowstatus = snmpProxyRowStatus
columns = {'snmpProxyName': snmpProxyName, 'snmpProxyType': snmpProxyType, 'snmpProxyContextEngineID': snmpProxyContextEngineID, 'snmpProxyContextName': snmpProxyContextName, 'snmpProxyTargetParamsIn': snmpProxyTargetParamsIn, 'snmpProxySingleTargetOut': snmpProxySingleTargetOut, 'snmpProxyMultipleTargetOut': snmpProxyMultipleTargetOut, 'snmpProxyStorageType': snmpProxyStorageType, 'snmpProxyRowStatus': snmpProxyRowStatus}
# notifications (traps)
# groups
class snmpProxyGroup(GroupObject):
access = 2
status = 1
OID = pycopia.SMI.Basetypes.ObjectIdentifier([1, 3, 6, 1, 6, 3, 14, 3, 2, 3])
group = [snmpProxyType, snmpProxyContextEngineID, snmpProxyContextName, snmpProxyTargetParamsIn, snmpProxySingleTargetOut, snmpProxyMultipleTargetOut, snmpProxyStorageType, snmpProxyRowStatus]
# capabilities
# special additions
# Add to master OIDMAP.
from pycopia import SMI
SMI.update_oidmap(__name__)
|
CooperLuan/note-you-like | nyl.py | Python | mit | 1,840 | 0.002717 | #!python2
import logging
import os
from datetime import datetime
FORMAT = '%(asctime)-15s %(levelname)s %(message)s'
logging. | basicConfig(format=FORMAT, level=logging.INFO)
log = logging
from flask import Flask, render_template, request, jsonify
from bson.objectid import ObjectId
from models.url_model import URLModel
from models.html_model import HTMLModel
from env import MONGO, REDIS
app = Flask('nyl')
@app.route("/")
def index():
return render_template('index.html')
@app.route('/about')
def about():
return render_template('index.html')
@app.route('/api/url/fetch', | methods=['POST'])
def api_url_extract():
url = request.form['url']
_doc = MONGO.url_response_cache.find_one({
'url': url,
})
if _doc:
html = _doc['body']
oid = str(_doc.pop('_id'))
log.info('load html from cache')
else:
html = URLModel(url).get_html()
_doc = {
'url': url,
'body': html,
'timestamp': None,
'datetime': datetime.now().strftime('%Y-%M-%d %H:%M:%S'),
'status': 1,
}
oid = str(MONGO.url_response_cache.insert(_doc))
data = HTMLModel(html).extract_all()
return jsonify(**{
'code': 1,
'data': data,
'oid': oid,
})
@app.route('/api/feedback/url_fetch', methods=['POST'])
def api_feedback_url_fetch():
oid = request.args.get('oid')
MONGO.url_response_cache.update({
'_id': ObjectId(oid),
}, {
'$set': {
'status': -1,
}
})
return jsonify(**{
'code': 1,
})
@app.route('/api/markdown/preview', methods=['POST'])
def api_markdown_preview():
pass
if __name__ == "__main__":
assert 'NYL_MONGO_URL' in os.environ, 'NYL_** not undefined'
app.run(host='0.0.0.0', port=9801, debug=True)
|
tropo/tropo-webapi-python | samples/gh-14.test_transfer.py | Python | mit | 778 | 0.003856 | # tests fix of gh-14 for "from" parameter in the "transfer" function.
# Proposed convention is to use "_from" as the parameter
# | so as not to conflict with "from" Python reserved word.
# _from arg works
# _from arg works with straight json
# Invoke by calling up app access number
# Sample application using the itty-bitty python web framework from:
# http://github.com/toastdriven/itty
from itty import *
from tropo import Tropo, Session
TO_NUMBER = '1xxxxxxxxxx'
FROM_NUMBER = '1yyyyyyyyyy'
@post('/index.json')
def index(request):
s = Session(request | .body)
t = Tropo()
t.say ("One moment please.")
t.transfer(TO_NUMBER, _from="tel:+" + FROM_NUMBER)
t.say("Hi. I am a robot")
json = t.RenderJson()
print json
return json
run_itty()
|
setsulla/stir | stir/define.py | Python | mit | 267 | 0 | import os
STIR_ROOT = os.path.normpath(os.path.dirname(__file__))
STIR_APP = os.path.normpath(os.path.join(STIR_ROO | T, "application"))
STIR_LIB = os.path.normpath(os.path.join(STIR_ROOT, "library"))
# STIR_SCRI | PT = os.path.normpath(os.path.join(STIR_ROOT, "script"))
|
sivel/ansible-modules-extras | windows/win_scheduled_task.py | Python | gpl-3.0 | 2,585 | 0 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: win_scheduled_task
version_added: "2.0"
short_description: Manage scheduled tasks
description:
- Manage scheduled tasks
notes:
- This module requires Wi | ndows Server 2012 or later.
options:
name:
description:
- | Name of the scheduled task
required: true
description:
description:
- The description for the scheduled task
required: false
enabled:
description:
- Enable/disable the task
choices:
- yes
- no
default: yes
state:
description:
- State that the task should become
required: true
choices:
- present
- absent
user:
description:
- User to run scheduled task as
required: false
execute:
description:
- Command the scheduled task should execute
required: false
argument:
description:
- Arguments to provide scheduled task action
required: false
frequency:
description:
- The frequency of the command, not idempotent
required: false
choices:
- daily
- weekly
time:
description:
- Time to execute scheduled task, not idempotent
required: false
days_of_week:
description:
- Days of the week to run a weekly task, not idempotent
required: false
path:
description:
- Task folder in which this task will be stored
default: '\'
'''
EXAMPLES = '''
# Create a scheduled task to open a command prompt
- win_scheduled_task:
name: TaskName
execute: cmd
frequency: daily
time: 9am
description: open command prompt
path: example
enable: yes
state: present
user: SYSTEM
'''
|
sadanandb/pmt | src/pyasm/widget/tab_wdg.py | Python | epl-1.0 | 24,986 | 0.006404 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in | any way without written permission.
#
#
#
__all__ = ['TabWdg', 'DynTabWdg','TabExtendWdg']
import types, sys, re
from pyasm.common import Common, Marshaller, Container, Environment, Xml
from pyasm.web import Widget, WebContainer, WidgetSettings, HtmlElement, | \
SpanWdg, DivWdg, AjaxLoader, MethodWdg, ClassWdg
from input_wdg import HiddenWdg, PopupMenuWdg
from pyasm.prod.biz import ProdSetting
from pyasm.biz import Project
class TabWdg(Widget):
'''The standard widget to display tabs. Most sites use this widget as
the outer container widget for navigation. It has special exception
handling code to ensure the even if a stack trace occurs within it,
the tabs are still displayed.
Note: TabWdg respects whether there is a database in existence, so this
widget can be used in SimpleAppServer'''
# there are 2 tab sizes
REG = 'regular'
SMALL = 'small'
TAB_REDIRECT = 'tab_redirect'
def __init__(my, dynamic_load=0, tab_key="tab", css=REG):
my.tab_names = []
my.wdg_dict = {}
my.dynamic_load = dynamic_load
my.set_tab_key(tab_key)
my.tab_style = css
my.content_height = 0
my.mode = Container.get("tab_mode")
# setting tab path
my.tab_path = Container.get("tab_path")
if not my.tab_path:
my.tab_path = "Main"
my.error_wdg = None
my.div = DivWdg(css='left_content')
if Environment.has_tactic_database():
my.invisible_list = ProdSetting.get_seq_by_key('invisible_tabs')
else:
my.invisible_list = []
super(TabWdg,my).__init__()
def class_init(my):
'''this is used for tab redirection. The set_redirect() takes
presecedence'''
tab_redirect = HiddenWdg(my.TAB_REDIRECT)
my.div.add(tab_redirect)
def set_mode(mode):
assert mode in ['normal', 'check']
Container.put("tab_mode", mode)
set_mode = staticmethod(set_mode)
def get_tab_names(my):
return my.tab_names
def get_tab_key(my):
return my.tab_key
def get_tab_value(my):
return my.tab_value
def set_tab_style(my, style):
my.tab_style = style
def set_content_height(my, height):
my.content_height = height
def set_tab_key(my,tab_key):
''' set the name of the tab for redirection. If one value is passed in,
it assumes it's one the current set of subtabs. To jump to a tab from
a totally different category, pass in a dict using set_redirect or
get_redirect_script'''
web = WebContainer.get_web()
my.tab_key = tab_key
redirect = Container.get(my.TAB_REDIRECT)
if not redirect:
# find it from the web form
redirect = web.get_form_value(my.TAB_REDIRECT)
if redirect:
redirect_dict = {}
redirect = redirect.split(',')
# expecting key, value pairs
for idx, item in enumerate(redirect):
if idx % 2 == 0:
redirect_dict[item] = redirect[idx+1]
redirect = redirect_dict
if redirect:
if isinstance(redirect, dict):
for key, value in redirect.items():
# set only the relevant key
if key == tab_key:
web.set_form_value(key, value)
break
else:
web.set_form_value(tab_key, redirect)
web.set_form_value('is_form_submitted','init')
# this implicitly sets the tab value
class_name = my.__class__.__name__
my.tab_value = WidgetSettings.get_key_value(class_name,my.tab_key)
def handle_exception(my, e):
'''The tab widget is a special widget concerning exceptions because
it usually represents the outer skin of the content of the web page.
The titles of the tab must be displayed in order for the site to remain
functional in the case of an exception'''
from web_wdg import ExceptionWdg
widget = ExceptionWdg(e)
my.error_wdg = Widget()
my.error_wdg.add(widget)
def init(my):
try:
super(TabWdg,my).init()
except Exception, e:
my.handle_exception(e)
def do_search(my):
try:
super(TabWdg,my).do_search()
except Exception, e:
my.handle_exception(e)
def add_widget(my,widget,title=None):
return my.add(widget,title)
def add(my,widget,title=None,index=None):
if title == None:
title = widget.__class__.__name__
# determine the url and check security
# DEPRECATED!!!! use "tab" security
url_selector = WebContainer.get_web().get_request_url().get_selector()
check = "%s|%s" % (url_selector,title)
# check tab security
if my.mode != "check":
security = WebContainer.get_security()
if not security.check_access("url", check, "view"):
return
# new security mechanism
if not security.check_access("tab_title", title, "view"):
return
# new, new security mechanism
tab_path = my.get_tab_path(title)
if not security.check_access("tab", tab_path, "view"):
return
# check if this tab is invisible
if not my.check_visibility(tab_path):
return
if index == None:
my.tab_names.append(title)
else:
my.tab_names.insert(index,title)
my.wdg_dict[title] = widget
# for tabs, the widget passed in can be None. Only the
# title is added
if widget == None:
return
# only the selected one really gets added
if not my.tab_value or title == my.tab_value:
Container.put("tab_path", my.get_tab_path(title))
widget = my.init_widget(widget, title)
# the very first time user click on the main tab
if not my.tab_value:
my.tab_value = title
super(TabWdg,my)._add_widget(widget, title)
def init_widget(my, widget, title=None):
''' instantiate the widget if selected. This can be called externally
to instantiate any widgets added to a TabWdg'''
try:
# if a method was passed in, then execute it
if my.mode == "check":
from base_tab_wdg import BaseTabWdg
try:
if not issubclass(widget, BaseTabWdg):
return Widget()
except:
return Widget()
if type(widget) == types.MethodType:
widget = widget()
elif isinstance(widget, basestring):
widget = Common.create_from_class_path(widget)
elif not isinstance(widget, Widget):
widget = widget()
# handle docs for the help widget
"""
from header_wdg import DocMapping
from web_wdg import HelpItemWdg
help_url = ProdSetting.get_value_by_key("project_doc_url")
if help_url:
widget.add(HelpItemWdg('SOOT Docs', help_url))
# add the help item automatically
doc = DocMapping()
widget.add(HelpItemWdg('%s Tab' % title, doc.get_mapping(title)))
"""
# catch all exceptions and log them
except Exception, e:
my.handle_exception(e)
return widget
def check_visibility(my, tab_path):
''' determine if a tab is visible or not '''
if not Environment.has_tactic_database():
|
bvernoux/micropython | tests/multi_net/tcp_accept_recv.py | Python | mit | 672 | 0 | # Test recv on socket that just accepted a connection
import socket
PORT = 8000
# Server
def instance0():
multit | est.globals(IP=multitest.get_network_ip())
s = socket.socket()
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(socket.getaddrinfo("0.0.0.0", PORT)[0][-1])
s.listen(1)
multitest.next()
s.accept()
try:
print("recv", s.recv(10)) # should raise Errno 107 ENOTCONN
except OSError as er:
print(er.errno)
s.close()
# Client
def instance1():
multitest.next()
s = socket.socket()
s.connect(socket.getaddrinfo(IP, PORT)[ | 0][-1])
s.send(b"GET / HTTP/1.0\r\n\r\n")
s.close()
|
mozvip/CouchPotatoServer | couchpotato/core/settings/__init__.py | Python | gpl-3.0 | 6,744 | 0.006376 | from __future__ import with_statement
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import isInt, toUnicode
from couchpotato.core.helpers.request import getParams, jsonified
from couchpotato.core.helpers.variable import mergeDicts, tryInt
from couchpotato.core.settings.model import Properties
import ConfigParser
import os.path
import time
import traceback
class Settings(object):
options = {}
types = {}
def __init__(self):
addApiView('settings', self.view, docs = {
'desc': 'Return the options and its values of settings.conf. Including the default values and group ordering used on the settings page.',
'return': {'type': 'object', 'example': """{
// objects like in __init__.py of plugin
"options": {
"moovee" : {
"groups" : [{
"description" : "SD movies only",
"name" : "#alt.binaries.moovee",
"options" : [{
"default" : false,
"name" : "enabled",
"type" : "enabler"
}],
"tab" : "providers"
}],
"name" : "moovee"
}
},
// object structured like settings.conf
"values": {
"moovee": {
"enabled": false
}
}
}"""}
})
addApiView('settings.save', self.saveView, docs = {
'desc': 'Save setting to config file (settings.conf)',
'params': {
'section': {'desc': 'The section name in settings.conf'},
'option': {'desc': 'The option name'},
'value': {'desc': 'The value you want to save'},
}
})
def setFile(self, config_file):
self.file = config_file
self.p = ConfigParser.RawConfigParser()
self.p.read(config_file)
from couchpotato.core.logger import CPLog
self.log = CPLog(__name__)
self.connectEvents()
def parser(self):
return self.p
def sections(self):
return self.p.sections()
def connectEvents(self):
addEvent('settings.options', self.addOptions)
addEvent('settings.register', self.registerDefaults)
addEvent('settings.save', self.save)
def registerDefaults(self, section_name, options = {}, save = True):
self.addSection(section_name)
for option_name, option in options.iteritems():
self.setDefault(section_name, option_name, option.get('default', ''))
if option.get('type'):
self.setType(section_name, option_name, option.get('type'))
if save:
self.save(self)
def set(self, section, option, value):
return self.p.set(section, option, value)
def get(self, option = '', section = 'core', default = None, type = None):
try:
try: type = self.types[section][option]
except: type = 'unicode' if not type else type
if hasattr(self, 'get%s' % type.capitalize()):
return getattr(self, 'get%s' % type.capitalize())(section, option)
else:
return self.getUnicode(section, option)
except:
return default
def getEnabler(self, section, option):
return self.getBool(section, option)
def getBool(self, section, option):
try:
return self.p.getboolean(section, option)
except:
return self.p.get(section, option) == 1
def getInt(self, section, option):
try:
return self.p.getint(section, option)
except:
return tryInt(self.p.get(section, option))
def getFloat(self, section, option):
try:
return self.p.getfloat(section, option)
except:
return tryInt(self.p.get(section, option))
def getUnicode(self, section, option):
value = self.p.get(section, option).decode('unicode_escape')
return toUnicode(value).strip()
def getValues(self):
values = {}
for section in self.sections():
values[section] = {}
for option in self.p.items(section):
(option_name, option_value) = option
values[section][option_name] = self.get(option_name, section)
return values
def save(self):
with open(self.file, 'wb') as configfile:
self.p.write(configfile)
self.log.debug('Saved settings')
def addSection(self, section):
if not self.p.has_section(section):
self.p.add_section(section)
def setDefault(self, section, option, value):
if not self.p.has_option(section, option):
self.p.set(section, option, value)
def setType(self, section, option, type):
if not self.types.get(section):
self.types[section] = {}
self.types[section][option] = type
def addOptions(self, section_name, options):
if not self.options.get(section_name):
self.options[section_name] = options
else:
self.options[section_name] = mergeDicts(self.options[section_name], options)
def getOptions(self):
return self.options
def view(self):
return jsonified({
'options': self.getOptions(),
'values': self.getValues()
})
def saveView(self):
params = getParams()
section = params.get('section')
option = params.get('name')
value = params.get('value')
# See if a value handler is attached, use that as value
new_value = fireEvent('setting.save.%s.%s' % (section, option), value, single = True)
self.set(section, option, (new_value if new_value else value).encode('unicode_escape'))
self.save()
# After save (for re-interval etc)
fireEvent('setting.save.%s.%s.after' % (section, option), single = True)
return jsonified({
'success': True,
})
def getProperty(self, identifier):
from couchpotato import get_session
db = get_session()
prop = None
try:
propert = db.query(Properties).filter_by(identifier = identifier).first()
prop = propert.value
except:
pass
return prop
def setProperty(self, identifier, value = '') | :
from couchpotato import get_session
db = get_session()
p = db.query(Properties).filter_by(identifier = identifier).first()
if not p:
| p = Properties()
db.add(p)
p.identifier = identifier
p.value = toUnicode(value)
db.commit()
|
FabienPean/sofa | applications/plugins/Flexible/examples/patch_test/FEM.py | Python | lgpl-2.1 | 6,005 | 0.035304 | #!/usr/bin/python
import math
import Sofa
def tostr(L):
return str(L).replace('[', '').replace("]", '').replace(",", ' ')
def transform(T,p):
return [T[0][0]*p[0]+T[0][1]*p[1]+T[0][2]*p[2]+T[1][0],T[0][3]*p[0]+T[0][4]*p[1]+T[0][5]*p[2]+T[1][1],T[0][6]*p[0]+T[0][7]*p[1]+T[0][8]*p[2]+T[1][2]]
def transformF(T,F):
return [T[0][0]*F[0]+T[0][1]*F[3]+T[0][2]*F[6],T[0][0]*F[1]+T[0][1]*F[4]+T[0][2]*F[7],T[0][0]*F[2]+T[0][1]*F[5]+T[0][2]*F[8],T[0][3]*F[0]+T[0][4]*F[3]+T[0][5]*F[6],T[0][3]*F[1]+T[0][4]*F[4]+T[0][5]*F[7],T[0][3]*F[2]+T[0][4]*F[5]+T[0][5]*F[8],T[0][6]*F[0]+T[0][7]*F[3]+T[0][8]*F[6],T[0][6]*F[1]+T[0][7]*F[4]+T[0][8]*F[7],T[0][6]*F[2]+T[0][7]*F[5]+T[0][8]*F[8]]
def compare(p1,p2):
res = 0
for i,P1 in enumerate(p1):
for j,item in enumerate(P1):
res = res+ (item-p2[i][j])*(item-p2[i][j])
return res
ERRORTOL = 1e-5
T = [[2,0,0,0,2,0,0,0,2],[0,0,0]]
#T = [[0.8,1.2,0.3,0,1.9,0.45,0.5,2.8,0.2],[5,2,8]]
samples= [[0.5,0.5,0.5], [0.23,0.5,0.8], [0,0.12,0], [0.8,0,0.58]]
# scene creation method
def createScene(rootNode):
rootNode.createObject('RequiredPlugin', pluginName="Flexible")
rootNode.createObject('VisualStyle', displayFlags="showBehaviorModels")
restpos = [[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0], [0, 0, 1], [1, 0, 1], [0, 1, 1], [1, 1, 1]]
pos = [transform(T,item) for item in restpos]
###########################################################
simNode = rootNode.createChild('Hexa_barycentric')
simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), hexahedra="0 1 3 2 4 5 7 6")
simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) )
simNode.createObject('BarycentricShapeFunction', position="@parent.rest_position", nbRef="8")
childNode = simNode.createChild('childP')
childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1")
childNode.createObject('LinearMapping', template="Vec3d,Vec3d")
childNode = simNode.createChild('childF')
childNode.createObject('GaussPointContainer', position=tostr(samples))
childNode.createObject('MechanicalObject', template="F331", name="child")
childNode.createObject('LinearMapping', template="Vec3d,F331", showDeformationGradientScale="1")
childNode = simNode.createChild('Visu')
childNode.createObject('VisualModel', color="8e-1 8e-1 1 1e-1")
childNode.createObject('IdentityMapping')
childNode = simNode.createChild('Visu2')
childNode.createObject('VisualStyle', displayFlags="showWireframe")
childNode.createObject('VisualModel', color="8e-1 8e-1 1 1")
childNode.createObject('IdentityMapping')
simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller")
###########################################################
simNode = rootNode.createChild('Tetra_barycentric')
simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), tetrahedra="0 5 1 7 0 1 2 7 1 2 7 3 7 2 0 6 7 6 0 5 6 5 4 0")
simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) )
simNode.createObj | ect('BarycentricShapeFunction', position="@parent.rest_position", nbRef="4")
childNode = simNode.createChild('childP')
childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1")
childNode.createObject('LinearMapping', template="Vec3d,Vec3d")
childNode = simNode.createChild('childF')
childNode.createObject('GaussPointContainer', position=tostr(samples))
childNode.createObject('MechanicalObject', template="F331", name="child")
childNode. | createObject('LinearMapping', template="Vec3d,F331")
simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller")
###########################################################
simNode = rootNode.createChild('Hexa_shepard')
simNode.createObject('MeshTopology', name="mesh", position=tostr(restpos), hexahedra="0 1 3 2 4 5 7 6")
simNode.createObject('MechanicalObject', template="Vec3d", name="parent", rest_position="@mesh.position",position=tostr(pos) )
simNode.createObject('ShepardShapeFunction', position="@parent.rest_position", power="2")
childNode = simNode.createChild('childP')
childNode.createObject('MechanicalObject', template="Vec3d", name="child", position=tostr(samples) , showObject="1")
childNode.createObject('LinearMapping', template="Vec3d,Vec3d")
childNode = simNode.createChild('childF')
childNode.createObject('GaussPointContainer', position=tostr(samples))
childNode.createObject('MechanicalObject', template="F331", name="child")
childNode.createObject('LinearMapping', template="Vec3d,F331")
simNode.createObject('PythonScriptController',filename="FEM.py", classname="Controller")
###########################################################
rootNode.animate=1
return rootNode
class Controller(Sofa.PythonScriptController):
def createGraph(self,node):
self.node=node
self.done=0
return 0
def onEndAnimationStep(self,dt):
if self.done==0:
print "TEST "+self.node.name+":"
# test points
restpos = self.node.getObject('childP/child').findData('rest_position').value
refpos = [transform(T,item) for item in restpos]
pos = self.node.getObject('childP/child').findData('position').value
error = compare(refpos,pos)
if error>ERRORTOL :
print "\t"+"\033[91m"+"[FAILED]"+"\033[0m"+" error on P= "+str(error)
else :
print "\t"+"\033[92m"+"[OK]"+"\033[0m"+" error on P= "+str(error)
# test defo gradients
restpos = [1,0,0,0,1,0,0,0,1]
pos = self.node.getObject('childF/child').findData('position').value
refpos = [transformF(T,restpos) for item in pos]
error = compare(refpos,pos)
if error>ERRORTOL :
print "\t"+"\033[91m"+"[FAILED]"+"\033[0m"+" error on F= "+str(error)
else :
print "\t"+"\033[92m"+"[OK]"+"\033[0m"+" error on F= "+str(error)
self.done=1
return 0
|
fake-name/ReadableWebProxy | WebMirror/management/rss_parser_funcs/feed_parse_extractThatbadtranslatorWordpressCom.py | Python | bsd-3-clause | 574 | 0.033101 |
def extract | ThatbadtranslatorWordpressCom(item):
'''
Parser for 'thatbadtranslator.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=fra | g, postfix=postfix, tl_type=tl_type)
return False
|
hanlind/nova | nova/tests/unit/api/openstack/compute/test_instance_actions.py | Python | apache-2.0 | 9,409 | 0.000106 | # Copyright 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# W | ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from oslo_policy import policy as oslo_policy
import six
from webob import exc
from nova.api.openstack.compute import instance_actions as instan | ce_actions_v21
from nova.api.openstack import wsgi as os_wsgi
from nova.compute import api as compute_api
from nova.db.sqlalchemy import models
from nova import exception
from nova import objects
from nova import policy
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_server_actions
from nova.tests import uuidsentinel as uuids
FAKE_UUID = fake_server_actions.FAKE_UUID
FAKE_REQUEST_ID = fake_server_actions.FAKE_REQUEST_ID1
def format_action(action):
'''Remove keys that aren't serialized.'''
to_delete = ('id', 'finish_time', 'created_at', 'updated_at', 'deleted_at',
'deleted')
for key in to_delete:
if key in action:
del(action[key])
if 'start_time' in action:
# NOTE(danms): Without WSGI above us, these will be just stringified
action['start_time'] = str(action['start_time'].replace(tzinfo=None))
for event in action.get('events', []):
format_event(event)
return action
def format_event(event):
'''Remove keys that aren't serialized.'''
to_delete = ('id', 'created_at', 'updated_at', 'deleted_at', 'deleted',
'action_id')
for key in to_delete:
if key in event:
del(event[key])
if 'start_time' in event:
# NOTE(danms): Without WSGI above us, these will be just stringified
event['start_time'] = str(event['start_time'].replace(tzinfo=None))
if 'finish_time' in event:
# NOTE(danms): Without WSGI above us, these will be just stringified
event['finish_time'] = str(event['finish_time'].replace(tzinfo=None))
return event
class InstanceActionsPolicyTestV21(test.NoDBTestCase):
instance_actions = instance_actions_v21
def setUp(self):
super(InstanceActionsPolicyTestV21, self).setUp()
self.controller = self.instance_actions.InstanceActionsController()
def _get_http_req(self, action):
fake_url = '/123/servers/12/%s' % action
return fakes.HTTPRequest.blank(fake_url)
def _get_instance_other_project(self, req):
context = req.environ['nova.context']
project_id = '%s_unequal' % context.project_id
return objects.Instance(project_id=project_id)
def _set_policy_rules(self):
rules = {'compute:get': '',
'os_compute_api:os-instance-actions':
'project_id:%(project_id)s'}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
@mock.patch('nova.api.openstack.common.get_instance')
def test_list_actions_restricted_by_project(self, mock_instance_get):
self._set_policy_rules()
req = self._get_http_req('os-instance-actions')
mock_instance_get.return_value = self._get_instance_other_project(req)
self.assertRaises(exception.Forbidden, self.controller.index, req,
uuids.fake)
@mock.patch('nova.api.openstack.common.get_instance')
def test_get_action_restricted_by_project(self, mock_instance_get):
self._set_policy_rules()
req = self._get_http_req('os-instance-actions/1')
mock_instance_get.return_value = self._get_instance_other_project(req)
self.assertRaises(exception.Forbidden, self.controller.show, req,
uuids.fake, '1')
class InstanceActionsTestV21(test.NoDBTestCase):
instance_actions = instance_actions_v21
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def fake_get(self, context, instance_uuid, expected_attrs=None):
return objects.Instance(uuid=instance_uuid)
def setUp(self):
super(InstanceActionsTestV21, self).setUp()
self.controller = self.instance_actions.InstanceActionsController()
self.fake_actions = copy.deepcopy(fake_server_actions.FAKE_ACTIONS)
self.fake_events = copy.deepcopy(fake_server_actions.FAKE_EVENTS)
self.stubs.Set(compute_api.API, 'get', self.fake_get)
def _get_http_req(self, action, use_admin_context=False):
fake_url = '/123/servers/12/%s' % action
return fakes.HTTPRequest.blank(fake_url,
use_admin_context=use_admin_context,
version=self.wsgi_api_version)
def _set_policy_rules(self):
rules = {'compute:get': '',
'os_compute_api:os-instance-actions': '',
'os_compute_api:os-instance-actions:events': 'is_admin:True'}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
def test_list_actions(self):
def fake_get_actions(context, uuid):
actions = []
for act in six.itervalues(self.fake_actions[uuid]):
action = models.InstanceAction()
action.update(act)
actions.append(action)
return actions
self.stub_out('nova.db.actions_get', fake_get_actions)
req = self._get_http_req('os-instance-actions')
res_dict = self.controller.index(req, FAKE_UUID)
for res in res_dict['instanceActions']:
fake_action = self.fake_actions[FAKE_UUID][res['request_id']]
self.assertEqual(format_action(fake_action), format_action(res))
def test_get_action_with_events_allowed(self):
def fake_get_action(context, uuid, request_id):
action = models.InstanceAction()
action.update(self.fake_actions[uuid][request_id])
return action
def fake_get_events(context, action_id):
events = []
for evt in self.fake_events[action_id]:
event = models.InstanceActionEvent()
event.update(evt)
events.append(event)
return events
self.stub_out('nova.db.action_get_by_request_id', fake_get_action)
self.stub_out('nova.db.action_events_get', fake_get_events)
req = self._get_http_req('os-instance-actions/1',
use_admin_context=True)
res_dict = self.controller.show(req, FAKE_UUID, FAKE_REQUEST_ID)
fake_action = self.fake_actions[FAKE_UUID][FAKE_REQUEST_ID]
fake_events = self.fake_events[fake_action['id']]
fake_action['events'] = fake_events
self.assertEqual(format_action(fake_action),
format_action(res_dict['instanceAction']))
def test_get_action_with_events_not_allowed(self):
def fake_get_action(context, uuid, request_id):
return self.fake_actions[uuid][request_id]
def fake_get_events(context, action_id):
return self.fake_events[action_id]
self.stub_out('nova.db.action_get_by_request_id', fake_get_action)
self.stub_out('nova.db.action_events_get', fake_get_events)
self._set_policy_rules()
req = self._get_http_req('os-instance-actions/1')
res_dict = self.controller.show(req, FAKE_UUID, FAKE_REQUEST_ID)
fake_action = self.fake_actions[FAKE_UUID][FAKE_REQUEST_ID]
self.assertEqual(format_action(fake_action),
format_action(res_dict['instanceAction']))
def test_action_not_found(self):
def fake_no_action(context, uuid, action_id):
return None
self.stub_out('nova.db.action_get_by_request_id', fake_no_action)
req = self._get_http_req('os-instance-actions/ |
XiangYz/webscraper | itchat_test.py | Python | lgpl-2.1 | 157 | 0.019108 | import itchat
itchat.login()
friends = itchat.get_friends(update = True)[0:]
info = {}
for i in friends:
| info[i['NickName']] = i.Signatur | e
print(info) |
heinzehavinga/bethepresidenttoday | game/migrations/0001_initial.py | Python | cc0-1.0 | 1,827 | 0.001642 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.CharField(max_length=255)),
('scoreA', models.FloatField(null=True, blank=True)),
('scoreB', models.FloatField(null=True, blank=True)),
('scoreC', models.FloatField(null=True, blank=True)),
('scoreD', models.FloatField(null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='HelpText',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('text', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Problem',
f | ields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
| ('text', models.TextField()),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='choice',
name='problem',
field=models.ForeignKey(to='game.Problem'),
preserve_default=True,
),
]
|
atruberg/django-custom | tests/null_queries/tests.py | Python | bsd-3-clause | 2,936 | 0.000341 | from __future__ import absolute_import
from django.test import TestCase
from django.core.exceptions import FieldError
from .models import Poll, Choice, OuterA, Inner, OuterB
class NullQueriesTests(TestCase):
def test_none_as_null(self):
"""
Regression test for the use of None as a query value.
None is interpreted as an SQL NULL, but only in __exact queries.
Set up some initial polls and choices
"""
p1 = Poll(question='Why?')
p1.save()
c1 = Choice(poll=p1, choice='Because.')
c1.save()
c2 = Choice(poll=p1, choice='Why Not?')
c2.save()
# Exact query with value None returns nothing ("is NULL" in sql,
# but every 'id' field has a value).
self.assertQuerysetEqual(Choice.objects.filter(choice__exact=None), [])
# Excluding the previous result returns everything.
self.assertQuerysetEqual(
Choice.objects.exclude(choice=None).order_by('id'),
[
'<Choice: Choice: Because. in poll Q: Why? >',
'<Choice: Choice: Why Not? in poll Q: Why? >'
]
)
# Valid query, but fails because foo isn't a keyword
self.assertRaises(FieldError, Choice.objects.filter, foo__exact=None)
# Can't use None on anything other than __exact
self.assertRaises(ValueError, Choice.objects.filter, id__gt=None)
# Can't use None on anything other than __exact
self.assertRaises(ValueError, Choice.objects.filter, foo__gt=None)
# Related managers use __exact=None implicitly if the object has | n't been saved.
p2 = Poll(question="How?")
self.assertEqual(repr(p2.choice_set.all()), '[]')
def test_reverse_relation | s(self):
"""
Querying across reverse relations and then another relation should
insert outer joins correctly so as not to exclude results.
"""
obj = OuterA.objects.create()
self.assertQuerysetEqual(
OuterA.objects.filter(inner__third=None),
['<OuterA: OuterA object>']
)
self.assertQuerysetEqual(
OuterA.objects.filter(inner__third__data=None),
['<OuterA: OuterA object>']
)
inner_obj = Inner.objects.create(first=obj)
self.assertQuerysetEqual(
Inner.objects.filter(first__inner__third=None),
['<Inner: Inner object>']
)
# Ticket #13815: check if <reverse>_isnull=False does not produce
# faulty empty lists
objB = OuterB.objects.create(data="reverse")
self.assertQuerysetEqual(
OuterB.objects.filter(inner__isnull=False),
[]
)
Inner.objects.create(first=obj)
self.assertQuerysetEqual(
OuterB.objects.exclude(inner__isnull=False),
['<OuterB: OuterB object>']
)
|
5agado/recurrent-neural-networks-intro | src/model/servingClient.py | Python | apache-2.0 | 2,662 | 0.005259 | import os
import sys
from ast import literal_eval
import tensorflow as tf
import numpy as np
from grpc.beta import implementations
# reference local copy of Tensorflow Serving API Files
sys.path.append(os.path.join(os.getcwd(), os.pardir, os.pardir, 'ext_libs'))
import lib.predict_pb2 as predict_pb2
import lib.prediction_service_pb2 as prediction_service_pb2
class ServingClient:
def __init__(self, host, port, model_info, inputs_info, timeout=10.0):
self.host = host
self.port = int(port)
self.channel = implementations.insecure_channel(host, self.port)
self.stub = prediction_service_pb2.beta_create_PredictionService_stub(self.channel)
self.model_info = model_info
self.inputs_info = inputs_info
self.timeout = timeout
def predict(self, x):
# TOFIX not generic
self.inputs_info[0]['value'] = x
res = self._predict(self.model_info, self.inputs_info)
#print("Results " + str(res))
#print("Results shape " + str(res.shape))
return res
def _predict(self, model_info, inputs_info):
#print("Inputs info" + str(inputs_info))
#print("Inputs info shape" + str(inputs_info[0]['value'].shape))
request = self._build_request(model_info, inputs_info)
# call prediction on the server
#print("Sending request " + str(request))
results = self.stub.Predict(request, timeout=self.timeout)
return ServingClient._transform_results(results)
def _build_request(self, model_info, inputs_info):
request = predict_pb2.PredictRequest()
request.model_spec.name = model_info['name']
request.model_spec.signature_name = model_info['signature_name']
# define inputs
for cur_input in inputs_info:
| cur_input_tensor = tf.contrib.util.make_tensor_proto(cur_input['value'],
dtype=tf.float32 if cur_input['type']=="float32" else tf.int64,
shape=cur_input['value'].shape)
request.inputs[cur_input['name']].CopyFrom(cur_input_tensor)
return request
# TODO generalize for N outputs
@staticmethod
def _transform_results(results):
# get the output from server response
outputs = results.outputs['outputs']
# extract response-tensor shape
tensor_shape = outputs.tensor_shape
tensor_shape = [dim.size for dim in tensor_shape.dim]
# reshape list of float to given shape
res_tensor = np.array(outputs.float_val).reshape(tensor_shape)
return res_tensor
| |
SINGROUP/pycp2k | pycp2k/classes/_centroid_gyr1.py | Python | lgpl-3.0 | 712 | 0.002809 | from pycp2k.inputsection import InputSection
from ._each73 import _each73
class _centroid_gyr1(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Section_parameters = None
self.Add_last = None
self.Common_iteration_levels = None
self.Filename = None
self.Log_print_key = None
self.Unit = None
self.EACH = _each73()
self._name = "CENTROID_GYR"
self._keywords = {'Log_print_key': 'LOG_ | PRINT_KEY', 'Filename': 'FILENAME', 'Add_last': 'ADD_LAST', 'Common_iteration_levels': 'COMMON_ITERATION_LEVELS', 'Unit': 'UNIT'}
self._subsections = {'EACH': 'EACH'}
self._attributes | = ['Section_parameters']
|
rkashapov/buildbot | master/buildbot/reporters/gerrit.py | Python | gpl-2.0 | 15,470 | 0.00084 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
"""
Push events to Gerrit
"""
from __future__ import absolute_import
from __future__ import print_function
from future.builtins import range
from future.utils import iteritems
import time
import warnings
from distutils.version import LooseVersion
from twisted.internet import defer
from twisted.internet import reactor
from twisted.internet.protocol import ProcessProtocol
from twisted.python import log
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.process.results import Results
from buildbot.reporters import utils
from buildbot.util import bytes2NativeString
from buildbot.util import service
# Cache the version that the gerrit server is running for this many seconds
GERRIT_VERSION_CACHE_TIMEOUT = 600
GERRIT_LABEL_VERIFIED = 'Verified'
GERRIT_LABEL_REVIEWED = 'Code-Review'
def makeReviewResult(message, *labels):
"""
helper to produce a review result
"""
return dict(message=message, labels=dict(labels))
def _han | dleLegacyResult(result):
"""
make sure the re | sult is backward compatible
"""
if not isinstance(result, dict):
warnings.warn('The Gerrit status callback uses the old way to '
'communicate results. The outcome might be not what is '
'expected.')
message, verified, reviewed = result
result = makeReviewResult(message,
(GERRIT_LABEL_VERIFIED, verified),
(GERRIT_LABEL_REVIEWED, reviewed))
return result
def _old_add_label(label, value):
if label == GERRIT_LABEL_VERIFIED:
return ["--verified %d" % int(value)]
elif label == GERRIT_LABEL_REVIEWED:
return ["--code-review %d" % int(value)]
warnings.warn('Gerrit older than 2.6 does not support custom labels. '
'Setting %s is ignored.' % label)
return []
def _new_add_label(label, value):
return ["--label %s=%d" % (label, int(value))]
def defaultReviewCB(builderName, build, result, master, arg):
if result == RETRY:
return makeReviewResult(None)
message = "Buildbot finished compiling your patchset\n"
message += "on configuration: %s\n" % builderName
message += "The result is: %s\n" % Results[result].upper()
return makeReviewResult(message,
(GERRIT_LABEL_VERIFIED, result == SUCCESS or -1))
def defaultSummaryCB(buildInfoList, results, master, arg):
success = False
failure = False
msgs = []
for buildInfo in buildInfoList:
msg = "Builder %(name)s %(resultText)s (%(text)s)" % buildInfo
link = buildInfo.get('url', None)
if link:
msg += " - " + link
else:
msg += "."
msgs.append(msg)
if buildInfo['result'] == SUCCESS: # pylint: disable=simplifiable-if-statement
success = True
else:
failure = True
if success and not failure:
verified = 1
else:
verified = -1
return makeReviewResult('\n\n'.join(msgs), (GERRIT_LABEL_VERIFIED, verified))
# These are just sentinel values for GerritStatusPush.__init__ args
class DEFAULT_REVIEW(object):
pass
class DEFAULT_SUMMARY(object):
pass
class GerritStatusPush(service.BuildbotService):
"""Event streamer to a gerrit ssh server."""
name = "GerritStatusPush"
gerrit_server = None
gerrit_username = None
gerrit_port = None
gerrit_version_time = None
gerrit_version = None
gerrit_identity_file = None
reviewCB = None
reviewArg = None
startCB = None
startArg = None
summaryCB = None
summaryArg = None
_gerrit_notify = None
def reconfigService(self, server, username, reviewCB=DEFAULT_REVIEW,
startCB=None, port=29418, reviewArg=None,
startArg=None, summaryCB=DEFAULT_SUMMARY, summaryArg=None,
identity_file=None, builders=None, notify=None):
# If neither reviewCB nor summaryCB were specified, default to sending
# out "summary" reviews. But if we were given a reviewCB and only a
# reviewCB, disable the "summary" reviews, so we don't send out both
# by default.
if reviewCB is DEFAULT_REVIEW and summaryCB is DEFAULT_SUMMARY:
reviewCB = None
summaryCB = defaultSummaryCB
if reviewCB is DEFAULT_REVIEW:
reviewCB = None
if summaryCB is DEFAULT_SUMMARY:
summaryCB = None
# Parameters.
self.gerrit_server = server
self.gerrit_username = username
self.gerrit_port = port
self.gerrit_version = None
self.gerrit_version_time = 0
self.gerrit_identity_file = identity_file
self.reviewCB = reviewCB
self.reviewArg = reviewArg
self.startCB = startCB
self.startArg = startArg
self.summaryCB = summaryCB
self.summaryArg = summaryArg
self.builders = builders
self._gerrit_notify = notify
def _gerritCmd(self, *args):
'''Construct a command as a list of strings suitable for
:func:`subprocess.call`.
'''
if self.gerrit_identity_file is not None:
options = ['-i', self.gerrit_identity_file]
else:
options = []
return ['ssh'] + options + [
'@'.join((self.gerrit_username, self.gerrit_server)),
'-p', str(self.gerrit_port),
'gerrit'
] + list(args)
class VersionPP(ProcessProtocol):
def __init__(self, func):
self.func = func
self.gerrit_version = None
def outReceived(self, data):
vstr = b"gerrit version "
if not data.startswith(vstr):
log.msg(b"Error: Cannot interpret gerrit version info: " + data)
return
vers = data[len(vstr):].strip()
log.msg(b"gerrit version: " + vers)
self.gerrit_version = LooseVersion(bytes2NativeString(vers))
def errReceived(self, data):
log.msg(b"gerriterr: " + data)
def processEnded(self, status_object):
if status_object.value.exitCode:
log.msg("gerrit version status: ERROR:", status_object)
return
if self.gerrit_version:
self.func(self.gerrit_version)
def getCachedVersion(self):
if self.gerrit_version is None:
return None
if time.time() - self.gerrit_version_time > GERRIT_VERSION_CACHE_TIMEOUT:
# cached version has expired
self.gerrit_version = None
return self.gerrit_version
def processVersion(self, gerrit_version, func):
self.gerrit_version = gerrit_version
self.gerrit_version_time = time.time()
func()
def callWithVersion(self, func):
command = self._gerritCmd("version")
def callback(gerrit_version):
return self.processVersion(gerrit_version, func)
self.spawnProcess(self.VersionPP(callback), command[0], command, env=None)
class LocalPP(ProcessProtocol):
def __init__(self, status):
self.status = status
def outReceived(self, data):
log.msg("gerritout:", data)
|
Azure/azure-sdk-for-python | sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2020_09_30/operations/_shared_galleries_operations.py | Python | mit | 9,946 | 0.004122 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request, _format_url_section
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_request(
subscription_id: str,
location: str,
*,
shared_to: Optional[Union[str, "_models.SharedToValues"]] = None,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/sharedGalleries')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"location": _SERIALIZER.url("location", location, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
if shared_to is not None:
query_parameters['sharedTo'] = _SERIALIZER.query("shared_to", shared_to, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
def build_get_request(
subscription_id: str,
location: str,
gallery_unique_name: str,
**kwargs: Any
) -> HttpRequest:
api_version = "2020-09-30"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/sharedGalleries/{galleryUniqueName}')
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, 'str'),
"location": _SERIALIZER.url("location", location, 'str'),
"galleryUniqueName": _SERIALIZER.url("gallery_unique_name", gallery_unique_name, 'str'),
}
url = _format_url_section(url, **path_format_arguments)
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class SharedGalleriesOperations(object):
"""SharedGalleriesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.compute.v2020_09_30.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, cl | ient, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deseri | alizer
self._config = config
@distributed_trace
def list(
self,
location: str,
shared_to: Optional[Union[str, "_models.SharedToValues"]] = None,
**kwargs: Any
) -> Iterable["_models.SharedGalleryList"]:
"""List shared galleries by subscription id or tenant id.
:param location: Resource location.
:type location: str
:param shared_to: The query parameter to decide what shared galleries to fetch when doing
listing operations.
:type shared_to: str or ~azure.mgmt.compute.v2020_09_30.models.SharedToValues
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SharedGalleryList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.compute.v2020_09_30.models.SharedGalleryList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SharedGalleryList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
location=location,
shared_to=shared_to,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
location=location,
shared_to=shared_to,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("SharedGalleryList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Compute/locations/{location}/sharedGalleries'} # type: ignore
@distributed_trace
def get(
self,
location: str,
gallery_unique_name: str,
**kwargs: Any
) -> "_models.SharedGallery":
"""Get a shared gallery by subscription id or tenant id.
:param location: Resource location.
|
IngenuityEngine/daemon | setup.py | Python | mit | 77 | 0 | from distutils.core import setup
import py2exe
setup(c | onsole | =['daemon.py'])
|
quaddra/engage | python_pkg/engage/drivers/genforma/engage_django_sdk/packager/utils.py | Python | apache-2.0 | 14,428 | 0.004713 | """Miscellaneous utility functions"""
import os
import os.path
import sys
import json
import re
import subprocess
import traceback
import logging
import copy
logger = logging.getLogger(__name__)
from parse_requirements import parse, get_local_files_matching_requirements
from engage_django_components import get_additional_requirements
import package_data
from e | rrors import RequestedPackageError, PipError
PIP_TIMEOUT=45
def app_module_name_to_dir(app_directory_path, app_module_name, check_for_init_pys=True):
"""The application module could be a submodule, so we may need to split each level"""
dirs = app_module_name.split(".")
dirpath = app_directory_path
module_name = None
for dirname in dirs:
| if module_name:
module_name = module_name + "." + dirname
else:
module_name = dirname
dirpath = os.path.join(dirpath, dirname)
init_file = os.path.join(dirpath, "__init__.py")
if check_for_init_pys and not os.path.exists(init_file):
raise ValidationError("Missing __init__.py file for module %s" % module_name)
return dirpath
def write_json(json_obj, filename):
with open(filename, 'wb') as f:
json.dump(json_obj, f)
def find_files(directory, filename_re_pattern, operation_function):
"""Find all the files recursively under directory whose names contain the specified pattern
and run the operation_function on the fileame.
"""
regexp = re.compile(filename_re_pattern)
directory = os.path.abspath(os.path.expanduser(directory))
for root, dirs, files in os.walk(directory):
for filename in files:
if regexp.search(filename):
operation_function(os.path.join(root, filename))
def get_deployed_settings_module(django_settings_module):
mod_comps = django_settings_module.split('.')
if len(mod_comps)==1:
return "deployed_settings"
else:
return '.'.join(mod_comps[0:-1]) + ".deployed_settings"
def import_module(qualified_module_name):
"""Import the specified module and return the contents of that module.
For example if we have a module foo.bar containing variables x and y,
we can do the following:
m = import_module("foo.bar")
print m.x, m.y
"""
m = __import__(qualified_module_name)
mod_comps = (qualified_module_name.split('.'))[1:]
for comp in mod_comps:
m = getattr(m, comp)
return m
def get_settings_file_directory(python_path_dir, django_settings_module):
settings_module_comps = django_settings_module.split(".")
if len(settings_module_comps)==1:
return python_path_dir
else:
return os.path.join(python_path_dir, "/".join(settings_module_comps[0:-1]))
def get_python_path(python_path_dir, django_settings_module):
"""The PYTHONPATH we give to the python subprocess should include
the python path needed to import our settings module as well as the
directory containing the settings file itself (unless it is the same
as python_path_dir).
"""
settings_dir = get_settings_file_directory(python_path_dir, django_settings_module)
if settings_dir != python_path_dir:
return "%s:%s" % (python_path_dir, settings_dir)
else:
return python_path_dir
# Copied from engage.utils.process
# TODO: need to share code directly
def run_and_log_program(program_and_args, env_mapping, logger, cwd=None,
input=None, hide_input=False, allow_broken_pipe=False):
"""Run the specified program as a subprocess and log its output.
program_and_args should be a list of entries where the first is the
executable path, and the rest are the arguments.
"""
logger.debug(' '.join(program_and_args))
if cwd != None:
logger.debug("Subprocess working directory is %s" % cwd)
subproc = subprocess.Popen(program_and_args,
env=env_mapping, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, cwd=cwd)
logger.debug("Started program %s, pid is %d" % (program_and_args[0],
subproc.pid))
if input!=None:
if not hide_input:
logger.debug("Input is " + input)
try:
(output, dummy) = subproc.communicate(input)
for line in output.split("\n"):
logger.debug("[%d] %s" % (subproc.pid, line.rstrip()))
except OSError:
if not allow_broken_pipe:
raise
else:
logger.warn("Subprocess %d closed stdin before write of input data complete" %
subproc.pid)
for line in subproc.stdout:
logger.debug("[%d] %s" % (subproc.pid, line))
else:
subproc.stdin.close()
for line in subproc.stdout:
logger.debug("[%d] %s" % (subproc.pid, line))
subproc.wait()
logger.debug("[%d] %s exited with return code %d" % (subproc.pid,
program_and_args[0],
subproc.returncode))
return subproc.returncode
class SubprocBadRc(Exception):
def __init__(self, msg, rc):
super(SubprocBadRc, self).__init__(msg)
self.rc = rc
def check_run_and_log_program(program_and_args, env_mapping, logger, cwd=None,
input=None, hide_input=False, allow_broken_pipe=False):
"""Version of run_and_log_program that checks for return code and throws an
exception if not zero.
"""
rc = run_and_log_program(program_and_args, env_mapping, logger, cwd,
input, hide_input, allow_broken_pipe)
if rc!=0:
raise SubprocBadRc("Call to %s failed with return code %d, full command was '%s'" %
(program_and_args[0], rc, ' '.join(program_and_args)),
rc)
def get_virtualenv_version(exe_path):
subproc = subprocess.Popen([exe_path, "--version"],
shell=False, stdout=subprocess.PIPE,
cwd=os.path.dirname(exe_path),
stderr=subprocess.STDOUT)
ver_string = (subproc.communicate()[0]).rstrip()
logger.debug("virtualenv version is %s" % ver_string)
return [int(component) if component.isdigit() else component
for component in ver_string.split(".")]
def create_virtualenv(desired_python_dir, package_cache_dir=None):
logger.info(">> Creating Python virtualenv for validation")
def find_exe_in_paths(paths, exe):
tried = []
for p in paths:
e = os.path.join(p, exe)
if os.path.exists(e):
return e
tried.append(e)
raise Exception("Unable to find %s, tried %s" % (exe, tried))
paths = []
if os.uname()[0]=="Darwin" and sys.executable.endswith("Resources/Python.app/Contents/MacOS/Python"):
# on MacOS, sys.executable could lie to us -- if we start a python like .....2.7/bin/python,
# it will tell us .....2.7/Resources/Python.app/Contents/MacOS/Python. This is problematic,
# because the other executable scripts (e.g. virtualenv) will be installed with the real python
# not the one that sys.executable claims is the real python. To fix this, we add the real python
# to the head of our search list.
real_python_dir = os.path.abspath(os.path.join(sys.executable, "../../../../../bin"))
paths.append(real_python_dir)
paths.append(os.path.dirname(sys.executable))
env_path = os.getenv("PATH")
if env_path:
for path in env_path.split(":"):
paths.append(os.path.abspath(os.path.expanduser(path)))
paths.append(os.path.expanduser("~/bin"))
python_exe = find_exe_in_paths(paths, "python")
virtualenv_exe = find_exe_in_paths(paths, "virtualenv")
version = get_virtualenv_version(virtualenv_exe)
if version[0]>1 or (version[0]==1 and version[1]>6) or \
(version[0]== |
suzp1984/web-dev-demos | data-virtualization/d3/server.py | Python | apache-2.0 | 338 | 0.005917 | import SimpleHTTPServer
import SocketServer
import os
if __name__ == "__main__":
PORT = int(os.environ.get("PORT", 8000))
handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), handler | )
print("python simple http server started at port ", PORT)
httpd.serve_forever()
| |
quantumlib/OpenFermion-FQE | src/fqe/fqe_data.py | Python | apache-2.0 | 130,770 | 0.000879 | # Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the Lice | nse at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY | KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Fermionic Quantum Emulator data class for holding wavefunction data.
"""
#Expanding out simple iterator indexes is unnecessary
#pylint: disable=invalid-name
#imports are ungrouped for type hinting
#pylint: disable=ungrouped-imports
#numpy.zeros_like initializer is not accepted
#pylint: disable=unsupported-assignment-operation
#pylint: disable=too-many-lines
#pylint: disable=too-many-locals
#pylint: disable=too-many-branches
#pylint: disable=too-many-arguments
#pylint: disable=dangerous-default-value
import copy
import itertools
from typing import List, Optional, Tuple, Callable, Iterator, \
TYPE_CHECKING
import numpy
from scipy.special import binom
from fqe.bitstring import integer_index, get_bit, count_bits_above
from fqe.bitstring import set_bit, unset_bit, reverse_integer_index
from fqe.util import rand_wfn, validate_config
from fqe.fci_graph import FciGraph
from fqe.fci_graph_set import FciGraphSet
import fqe.settings
from fqe.lib.fqe_data import _lm_apply_array1, _make_dvec_part, \
_make_coeff_part, _make_dvec, _make_coeff, _diagonal_coulomb, \
_lm_apply_array12_same_spin_opt, _lm_apply_array12_diff_spin_opt, \
_apply_array12_lowfillingab, _apply_array12_lowfillingab2, \
_apply_array12_lowfillingaa, _apply_array12_lowfillingaa2, \
_apply_individual_nbody1_accumulate, _sparse_scale, \
_evaluate_map_each, _make_Hcomp, \
_sparse_apply_array1, _lm_apply_array1_alpha_column, \
_apply_diagonal_inplace, _evolve_diagonal_inplace, _make_nh123, \
_apply_diagonal_coulomb
from fqe.lib.linalg import _zimatadd, _transpose
if TYPE_CHECKING:
from numpy import ndarray as Nparray
from numpy import dtype as Dtype
from fqe.fqe_data_set import FqeDataSet
class FqeData:
"""This is a basic data structure for use in the FQE.
"""
def __init__(self,
nalpha: int,
nbeta: int,
norb: int,
fcigraph: Optional[FciGraph] = None,
dtype: 'Dtype' = numpy.complex128) -> None:
"""The FqeData structure holds the wavefunction for a particular
configuration and provides an interace for accessing the data through
the fcigraph functionality.
Args:
nalpha (int): the number of alpha electrons
nbeta (int): the number of beta electrons
norb (int): the number of spatial orbitals
fcigraph (optional, FciGraph): the FciGraph to be used. When None, \
it is computed here
dtype (optional, Dtype): numpy.dtype of the underlying array
"""
validate_config(nalpha, nbeta, norb)
if not (fcigraph is None) and (nalpha != fcigraph.nalpha() or
nbeta != fcigraph.nbeta() or
norb != fcigraph.norb()):
raise ValueError("FciGraph does not match other parameters")
if fcigraph is None:
self._core = FciGraph(nalpha, nbeta, norb)
else:
self._core = fcigraph
self._dtype = dtype
if fqe.settings.use_accelerated_code:
# Use the same C extension for both cases by default
self._low_thresh = 0.0
else:
self._low_thresh = 0.3
self._nele = self.nalpha() + self.nbeta()
self._m_s = self.nalpha() - self.nbeta()
self.coeff = numpy.zeros((self.lena(), self.lenb()), dtype=self._dtype)
def __getitem__(self, key: Tuple[int, int]) -> complex:
"""Get an item from the fqe data structure by using the knowles-handy
pointers.
Args:
key (Tuple[int, int]): a pair of alpha and beta strings
Returns:
complex: the value of the corresponding element
"""
return self.coeff[self._core.index_alpha(key[0]),
self._core.index_beta(key[1])]
def __setitem__(self, key: Tuple[int, int], value: complex) -> None:
"""Set an element in the fqe data strucuture
Args:
key (Tuple[int, int]): a pair of alpha and beta strings
value: the value to be set
"""
self.coeff[self._core.index_alpha(key[0]),
self._core.index_beta(key[1])] = value
def __deepcopy__(self, memodict={}) -> 'FqeData':
"""Construct new FqeData that has the same coefficient
Returns:
FqeData: an object that is deepcopied from self
"""
new_data = FqeData(nalpha=self.nalpha(),
nbeta=self.nbeta(),
norb=self._core.norb(),
fcigraph=self._core,
dtype=self._dtype)
new_data._low_thresh = self._low_thresh
new_data.coeff = self.coeff.copy()
return new_data
def get_fcigraph(self) -> 'FciGraph':
"""
Returns the underlying FciGraph object
Returns:
FciGraph: underlying FciGraph object for this object
"""
return self._core
def apply_diagonal_inplace(self, array: 'Nparray') -> None:
"""Iterate over each element and perform apply operation in place
Args:
array (Nparray): a diagonal operator to be applied to self. The size \
of this array is norb or 2*norb depending on the context
"""
beta_ptr = 0
if array.size == 2 * self.norb():
beta_ptr = self.norb()
elif array.size != self.norb():
raise ValueError('Non-diagonal array passed'
' into apply_diagonal_inplace')
if not array.flags['C_CONTIGUOUS']:
array = numpy.copy(array)
if fqe.settings.use_accelerated_code:
aarray = array[:self.norb()]
barray = array[beta_ptr:]
_apply_diagonal_inplace(self.coeff, aarray, barray,
self._core.string_alpha_all(),
self._core.string_beta_all())
else:
alpha = numpy.zeros((self._core.lena(),), dtype=numpy.complex128)
beta = numpy.zeros((self._core.lenb(),), dtype=numpy.complex128)
for alp_cnf in range(self._core.lena()):
occupation = self._core.string_alpha(alp_cnf)
diag_ele = 0.0
for ind in integer_index(occupation):
diag_ele += array[ind]
alpha[alp_cnf] = diag_ele
for bet_cnf in range(self._core.lenb()):
occupation = self._core.string_beta(bet_cnf)
diag_ele = 0.0
for ind in integer_index(occupation):
diag_ele += array[beta_ptr + ind]
beta[bet_cnf] = diag_ele
for alp_cnf in range(self._core.lena()):
for bet_cnf in range(self._core.lenb()):
self.coeff[alp_cnf,
bet_cnf] *= alpha[alp_cnf] + beta[bet_cnf]
def evolve_diagonal(self, array: 'Nparray',
inplace: bool = False) -> 'Nparray':
"""Iterate over each element and return the exponential scaled
contribution.
Args:
array (Nparray): a diagonal operator using which time evolution is \
performed. The size of this array is norb or 2*norb depending \
on the context
inplace (bool): toggle to specify if the result will be stored \
in-place or out-of-place
Retu |
biomodels/MODEL1006230003 | MODEL1006230003/model.py | Python | cc0-1.0 | 427 | 0.009368 | import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'MODEL1006230003.xml')
with open(sbmlFilePath,'r') as f:
| sbmlString = f.read() |
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString) |
CLVsol/odoo_addons | clv_insured/seq/clv_insured_seq.py | Python | agpl-3.0 | 3,732 | 0.007771 | # -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
def format_code(code_seq):
code = map(int, str(code_seq))
code_len = len(code)
while len(code) < 14:
code.insert(0, 0)
while len(code) < 16:
n = sum([(len(code) + 1 - i) * v for i, v in enumerate(code)]) % 11
if n > 1:
f = 11 - n
else:
f = 0
code.append(f)
code_str = "%s.%s.%s.%s.%s-%s" % (str(code[0]) + str(code[1]),
str(code[2]) + str(code[3]) + str(code[4]),
str(code[5]) + str(code[6]) + str(code[7]),
str(code[8]) + str(code[9]) + str(code[10]),
str(code[11]) + str(code[12]) + str(code[13]),
str(code[14]) + str(code[15]))
if code_len <= 3:
code_form = code_str[18 - code_len:21]
elif code_len > 3 and code_len <= 6:
code_form = code_str[17 - code_len:21]
elif code_len > 6 and code_len <= 9:
code_form = code_str[16 - code_len:21]
elif code_len > 9 and code_len <= 12:
code_form = code_str[15 - code_len:21]
elif code_len > 12 and code_len <= 14:
code_form = code_str[14 - code_len:21]
return code_form
class clv_insured(models.Model):
_inherit = 'clv_insured'
code = fields.Char('Insured Code', size=64, select=1, required=False, readonly=False, default='/',
help=' | Use "/" to get an automatic new Insured Code.')
@api.model
def create(self, vals):
if not 'code' in vals or ('code' in vals and vals['code'] == '/'):
code_seq = self.pool.get('ir.s | equence').get(self._cr, self._uid, 'clv_insured.code')
vals['code'] = format_code(code_seq)
return super(clv_insured, self).create(vals)
@api.multi
def write(self, vals):
if 'code' in vals and vals['code'] == '/':
code_seq = self.pool.get('ir.sequence').get(self._cr, self._uid, 'clv_insured.code')
vals['code'] = format_code(code_seq)
return super(clv_insured, self).write(vals)
@api.one
def copy(self, default=None):
default = dict(default or {})
default.update({'code': '/',})
return super(clv_insured, self).copy(default)
|
lcpt/xc | verif/tests/materials/prestressing/test_short_term_loss_prestress_01.py | Python | gpl-3.0 | 3,985 | 0.032209 | # -*- coding: utf-8 -*-
from __future__ import division
'''Test for checking variation of initial prestress force along a
post-tensioned member.
Data and rough calculation are taken from
Example 4.3 of the topic 4 of course "Prestressed Concrete Design
(SAB 4323) by Baderul Hisham Ahmad
ocw.utm.my
Problem statement:
Determine the initial prestress force distribution
along the beam if the anchorage draw-in is 5
mm. Given the following:
• Span = 20m, μ= 0.25 & K = 17 x 10-4 per metre
• fpi = 1239 N/ mm2 ; A ps = 2850 mm2
• e at both ends = 0
• e at mid-span = 558 mm
• Es = 195 kN/mm2
'''
__author__= "Ana Ortega (AO_O)"
__copyright__= "Copyright 2017, AO_O"
__license__= "GPL"
__version__= "3.0"
__email__= "ana.ortega@xcengineering.xyz"
import numpy as np
import math
from materials.prestressing import prestressed_concrete as presconc
from model.geometry import geom_utils
#Geometry
lBeam=20 #beam span [m]
#Parabola
eEnds=0 #eccentricity of cables at both ends of the beam
eMidspan=-0.558 #eccentricity of cables at midspan [m]
angl_Parab_XZ=math.pi/4 #angle between the vertical plane that contains the
#parabola and the plane XZ
#Material
Ep=195e9 #elastic modulus of prestressing steel [Pa]
#Prestressing process
mu=0.25 #coefficient of friction between the cables and their sheating
k=0.0017 #wobble coefficient per meter length of cable [1/m]
sigmap0max=1239e6 #Initial stress of cable [Pa]
Aps=2850e-6 #Area of cable [m2]
# Interpolation
n_points_rough=5 #number of points provided to the interpolation algorithm
n_points_fine=101 #number of points interpolated
#Anchorage slip
deltaL=5e-3 #anchorage draw-in (provided by manufacturer) [m]
#Rough results from direct calculation (formula):
lp_anch_lhe=419.3 #loss of prestress force at left-hand end anchorage [kN]
fl_frc=15.82 #loss of prestress due to friction [kN/m]
P_le=3111.9 #prestress force at left end [kN]
P_ms=3270.1 #prestress force at midspan [kN] |
P_re=3214.8 #prestress force at right end [kN]
# XC model
#Tendon [m] definition, layout and friction losses
a,b,c=geom_utils.fit_parabola(x=np.array([0,lBeam/ | 2.0,lBeam]), y=np.array([eEnds,eMidspan,eEnds]))
x_parab_rough,y_parab_rough,z_parab_rough=geom_utils.eq_points_parabola(0,lBeam,n_points_rough,a,b,c,angl_Parab_XZ)
tendon=presconc.PrestressTendon([])
tendon.roughCoordMtr=np.array([x_parab_rough,y_parab_rough,z_parab_rough])
#Interpolated 3D spline
tendon.pntsInterpTendon(n_points_fine,smoothness=1,kgrade=3)
# Losses of prestressing due to friction
lssFrict=tendon.getLossFriction(coefFric=mu,k=k,sigmaP0_extr1=sigmap0max,sigmaP0_extr2=0.0)
# Losses of prestressing due to anchorage slip (loss due to friction must be
# previously calculated
lssAnch=tendon.getLossAnchor(Ep=Ep,anc_slip_extr1=deltaL,anc_slip_extr2=0.0)
Laffected=tendon.projXYcoordZeroAnchLoss[0] # effective length of tendon
#affected by the anchorage slip in extremity 1 [m]
# Results
lssAnch_e1=lssAnch[0] #prestress loss due to anchorage draw-in extremity 1
lssAnch_md=lssAnch[int(len(lssAnch)/2)] #prestress loss due to anchorage draw-in midspan
lssAnch_e2=lssAnch[-1] #prestress loss due to anchorage draw-in extremity 2
lssFrict_e1=lssFrict[0] #prestress loss due to friction extremity 1
lssFrict_md=lssFrict[int(len(lssFrict)/2)] #prestress loss due to friction midspan
lssFrict_e2=lssFrict[-1] #prestress loss due to friction extremity 2
P_extr1=(sigmap0max-lssAnch_e1-lssFrict_e1)*Aps*1e-3
P_midspan=(sigmap0max-lssAnch_md-lssFrict_md)*Aps*1e-3
P_extr2=(sigmap0max-lssAnch_e2-lssFrict_e2)*Aps*1e-3
ratio1=abs(P_extr1-P_le)/P_le
ratio2=abs(P_midspan-P_ms)/P_ms
ratio3=abs(P_extr2-P_re)/P_re
import os
from miscUtils import LogMessages as lmsg
fname= os.path.basename(__file__)
if (ratio1<5.e-3 and ratio2<5.e-4 and ratio3<5.e-3):
print "test ",fname,": ok."
else:
lmsg.error(fname+' ERROR.')
|
Pinyto/cloud | manage.py | Python | gpl-3.0 | 254 | 0 | #!/usr/bin/env python
import os
import sys
if __name__ == "__m | ain__":
| os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pinytoCloud.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
tswast/google-cloud-python | vision/setup.py | Python | apache-2.0 | 2,379 | 0.00042 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import setuptools
name = "google-cloud-vision"
description = "Cloud Vision API API client library"
version = "0.41.0"
release_status = "Development Status :: 4 - Beta"
dependencies = [
"google-api-core[grpc] >= 1.14.0, < 2.0.0dev",
'enum34; python_version < "3.4"',
]
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, "README.rst")
with io.open(readme_filename, encoding="utf-8") as readme_file:
readme = readme_file.read()
packages = [
package for package in setuptools.find_packages() if package.startswith("google")
]
namespaces = ["google"]
if "google.cloud" in packages:
namespaces.append("google.cloud")
setuptools.setup(
name=name,
version=version,
description=description,
long_description=readme,
author="Google LLC",
author_email="googleapis-packages@google.com",
license="Apache 2.0",
url="https://github.com/GoogleCloudPlatform/google-cloud-python",
cla | ssifiers=[
release_status,
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming | Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Operating System :: OS Independent",
"Topic :: Internet",
],
platforms="Posix; MacOS X; Windows",
packages=packages,
namespace_packages=namespaces,
install_requires=dependencies,
python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*",
include_package_data=True,
zip_safe=False,
)
|
ngageoint/voxel-globe | voxel_globe/image_view/apps.py | Python | mit | 125 | 0.032 | from django.apps import AppConfig
class ImageViewConfig(AppConfig):
name = 'v | oxe | l_globe.image_view'
label = 'image_view' |
qalit/DAMS | DAMS/apps/generic_views/forms.py | Python | gpl-2.0 | 4,640 | 0.004741 | from django import forms
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.db import models
import types
from DAMS import settings
def return_attrib(obj, attrib, arguments=None):
try:
result = reduce(getattr, attrib.split("."), obj)
if isinstance(result, types.MethodType):
if arguments:
return result(**arguments)
else:
return result()
else:
return result
except Exception, err:
if settings.DEBUG:
return "Attribute error: %s; %s" % (attrib, err)
else:
pass
class DetailSelectMultiple(forms.widgets.SelectMultiple):
def __init__(self, queryset=None, *args, **kwargs):
self.queryset=queryset
super(DetailSelectMultiple, self).__init__(*args, **kwargs)
def render(self, name, value, attrs=None, choices=()):
if value is None: value = ''
#final_attrs = self.build_attrs(attrs, name=name)
output = u'<ul class="list">'
options = None
if value:
if getattr(value, '__iter__', None):
options = [(index, string) for index, string in self.choices if index in value]
else:
options = [(index, string) for index, string in self.choices if index == value]
else:
if self.choices:
if self.choices[0] != (u'', u'---------') and value != []:
options = [(index, string) for index, string in self.choices]
if options:
for index, string in options:
if self.queryset:
try:
output += u'<li><a href="%s">%s</a></li>' % (self.queryset.get(pk=index).get_absolute_url(), string)
except AttributeError:
output += u'<li>%s</li>' % (string)
else:
output += u'<li>%s</li>' % string
else:
output += u'<li>%s</li>' % _(u"None")
return mark_safe(output + u'</ul>\n')
class DetailForm(forms.ModelForm):
def __init__(self, extra_fields=None, *args, **kwargs):
super(DetailForm, self).__init__(*args, **kwargs)
if extra_fields:
for extra_field in extra_fields:
result = return_attrib(self.instance, extra_field['field'])
label = 'label' in extra_field and extra_field['label'] or None
#TODO: Add others result types <=> Field types
if isinstance(result, models.query.QuerySet):
self.fields[extra_field['field']]=forms.ModelMultipleChoiceField(queryset=result, label=label)
for field_name, field in self.fields.items():
if isinstance(field.widget, forms.widgets.SelectMultiple):
self.fields[field_name].widget = | DetailSelectMultiple(
choices=field.widget.choices,
attrs=field.widget.attrs,
queryset=getattr(field, 'queryset', None),
)
| self.fields[field_name].help_text=''
elif isinstance(field.widget, forms.widgets.Select):
self.fields[field_name].widget = DetailSelectMultiple(
choices=field.widget.choices,
attrs=field.widget.attrs,
queryset=getattr(field, 'queryset', None),
)
self.fields[field_name].help_text=''
class GenericConfirmForm(forms.Form):
pass
class GenericAssignRemoveForm(forms.Form):
left_list = forms.ModelMultipleChoiceField(required=False, queryset=None)
right_list = forms.ModelMultipleChoiceField(required=False, queryset=None)
def __init__(self, left_list_qryset=None, right_list_qryset=None, left_filter=None, *args, **kwargs):
super(GenericAssignRemoveForm, self).__init__(*args, **kwargs)
if left_filter:
self.fields['left_list'].queryset = left_list_qryset.filter(*left_filter)
else:
self.fields['left_list'].queryset = left_list_qryset
self.fields['right_list'].queryset = right_list_qryset
class FilterForm(forms.Form):
def __init__(self, list_filters, *args, **kwargs):
super(FilterForm, self).__init__(*args, **kwargs)
for list_filter in list_filters:
label = list_filter.get('title', list_filter['name'])
self.fields[list_filter['name']] = forms.ModelChoiceField(queryset=list_filter['queryset'], label=label[0].upper() + label[1:], required=False)
|
senthil10/NouGAT | nougat/evaluete.py | Python | mit | 18,678 | 0.004711 | from __future__ import absolute_import
from __future__ import print_function
import sys, os, yaml, glob
import subprocess
import pandas as pd
import re
import shutil
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from nougat import common, align
from itertools import groupby
from collections import OrderedDict
def run(gl | obal_config, sample_config):
sorted | _libraries_by_insert = \
common._sort_libraries_by_insert(sample_config)
_check_libraries(sorted_libraries_by_insert)
computeAssemblyStats(sample_config)
# filter out short contigs
sample_config = _build_new_reference(sample_config)
if "tools" in sample_config:
"""If so, execute them one after the other in the specified order \
(might not work)"""
for command in sample_config["tools"]:
"""with this I pick up at run time the correct function in the \
current module"""
command_fn = getattr(sys.modules[__name__],
"_run_{}".format(command))
"""Update sample config, each command return sample_config and \
if necessary it modifies it"""
sample_config = command_fn(global_config, sample_config,
sorted_libraries_by_insert)
else:
#run default pipeline for QC
sample_config = _run_align(global_config, sample_config,
sorted_libraries_by_insert)
sample_config = _run_qaTools(global_config, sample_config,
sorted_libraries_by_insert)
sample_config = _run_FRC(global_config, sample_config,
sorted_libraries_by_insert)
def _run_align(global_config, sample_config,sorted_libraries_by_insert):
if "reference" not in sample_config:
print("reference sequence not provided, skypping alignment step.",
"Please provide a reference if you are intrested in aligning",
"the reads against a reference")
return sample_config
if not os.path.exists("alignments"):
os.makedirs("alignments")
os.chdir("alignments")
sorted_libraries_by_insert = align._align_reads(global_config,
sample_config, sorted_libraries_by_insert) # align reads
sorted_alignments_by_insert = align._merge_bam_files(global_config,
sample_config, sorted_libraries_by_insert) # merge alignments
sorted_alignments_by_insert = align.picard_CGbias(global_config,
sample_config,sorted_alignments_by_insert) # compute picard stats
sorted_alignments_by_insert = align.picard_collectInsertSizeMetrics(
global_config, sample_config,sorted_alignments_by_insert)
sorted_alignments_by_insert = align.picard_markDuplicates(global_config,
sample_config,sorted_alignments_by_insert)
os.chdir("..")
sample_config["alignments"] = sorted_alignments_by_insert
return sample_config
def _check_libraries(sorted_libraries_by_insert):
different_inserts = 0
current_insert = -1
orientation = ""
for library, libraryInfo in sorted_libraries_by_insert:
if current_insert == -1:
current_insert = libraryInfo["insert"]
different_inserts = 1
else :
if current_insert != libraryInfo["insert"]:
current_insert = libraryInfo["insert"]
different_inserts += 1
if different_inserts > 2:
sys.exit("error: in valiadation only two libraries are admitted "
"usually a PE and a MP, sometimes 2 PE)")
return
def _build_new_reference(sample_config):
minCtgLength = 500
if "minCtgLength" in sample_config:
minCtgLength = sample_config["minCtgLength"]
if minCtgLength < 500:
sys.exit("min contig length must be higher than 500bp, lower "
"values will complicate the job of valiadation tools and "
"make results difficult to interpret. For mammalian "
"genomes minCtgLength > 1Kbp is strongly suggested")
reference = sample_config["reference"]
reference_dir = os.path.abspath("reference")
if not os.path.exists(reference_dir):
os.makedirs(reference_dir)
os.chdir(reference_dir)
new_reference_name = os.path.abspath(os.path.basename(reference))
if os.path.exists(new_reference_name):
sample_config["reference"] = new_reference_name
os.chdir("..")
return sample_config # already created the new reference
with open(new_reference_name, "w") as new_ref_fd:
with open(reference, "r") as ref_fd:
fasta_header = ref_fd.readline()
sequence = ""
for line in ref_fd:
line = line
if line.startswith(">"):
if len(sequence) >= minCtgLength:
new_ref_fd.write(fasta_header)
new_ref_fd.write(sequence)
sequence = ""
fasta_header = line
else:
sequence+=line
if len(sequence) >= minCtgLength:
new_ref_fd.write(fasta_header)
new_ref_fd.write(sequence)
sample_config["reference"] = new_reference_name
os.chdir("..")
return sample_config
def _run_BUSCO(global_config, sample_config, sorted_alignments_by_insert):
program = global_config["Tools"]["BUSCO"]["bin"]
options = global_config["Tools"]["BUSCO"]["options"]
main_dir = os.getcwd()
BUSCOfolder = os.path.join(main_dir, "BUSCO")
if not os.path.exists(BUSCOfolder):
os.makedirs(BUSCOfolder)
os.chdir(BUSCOfolder)
BUSCO_data_path = os.path.expandvars(sample_config["BUSCODataPath"])
if not os.path.exists(BUSCO_data_path):
raise IOError("Path to the BUSCO data set does not exist!")
reference = sample_config["reference"]
output = sample_config["output"]
threads = sample_config.get("threads", 16)
command = [program, "-l", BUSCO_data_path, "-i", "{}".format(reference), "-o", "{}".format(output),
"-c", "{}".format(threads)]
command.extend(options)
common.print_command(command)
outfile = os.path.join(BUSCOfolder, "run_{}".format(output),
"short_summary_{}".format(output))
if not common.check_dryrun(sample_config) and not os.path.exists(outfile):
stdOut = open("BUSCO.stdOut", "a")
stdErr = open("BUSCO.stdErr", "a")
return_value = subprocess.call(command, stdout=stdOut, stderr=stdErr)
if not return_value == 0:
sys.exit("Error running BUSCO")
os.chdir("..")
def _run_FRC(global_config, sample_config, sorted_libraries_by_insert):
mainDir = os.getcwd()
FRCurveFolder = os.path.join(os.getcwd(), "FRCurve")
if not os.path.exists(FRCurveFolder):
os.makedirs(FRCurveFolder)
os.chdir("FRCurve")
program=global_config["Tools"]["FRC"]["bin"]
genomeSize = sample_config["genomeSize"]
reference = sample_config["reference"]
output = sample_config["output"]
alignments = sample_config["alignments"]
peBam = alignments[0][1]
peInsert = alignments[0][0]
peMinInsert = int(peInsert - peInsert*0.60)
peMaxInsert = int(peInsert + peInsert*0.60)
command = [program, "--pe-sam", peBam, "--pe-max-insert", "5000"]
if len(alignments) > 1:
mpBam = alignments[1][1]
mpInsert = alignments[1][0]
mpMinInsert = int(mpInsert - mpInsert*0.50)
mpMaxInsert = int(mpInsert + mpInsert*0.50)
command += ["--mp-sam", mpBam, "--mp-max-insert", "25000"]
command += [ "--genome-size", "{}".format(genomeSize), "--output", output]
common.print_command(command)
if not common.check_dryrun(sample_config) and not os.path.exists(
"{}_FRC.png".format(output)):
stdOut = open("FRC.stdOut", "a")
stdErr = open("FRC.stdErr", "a")
returnValue = subprocess.call(command , stdout=stdOut , stderr=stdErr)
if not returnValue == 0:
sys.exit("error, while running FRCurve: {}".format(command))
plotFRCurve(output)
o |
sdickreuter/python-pistage | build/lib/PIStage/_defines.py | Python | mit | 77 | 0 | __auth | or__ = 'sei'
DEFAULT | _SERIAL = '/dev/ttyUSB0'
DEFAULT_BAUDRATE = 57600
|
chrisndodge/edx-platform | common/djangoapps/student/views.py | Python | agpl-3.0 | 105,293 | 0.002678 | """
Student Views
"""
import datetime
import logging
import uuid
import json
import warnings
from collections import defaultdict
from urlparse import urljoin, urlsplit, parse_qs, urlunsplit
from django.views.generic import TemplateView
from pytz import UTC
from requests import HTTPError
from ipware.ip import get_ip
import edx_oauth2_provider
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.contrib import messages
from django.core.context_processors import csrf
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch, reverse_lazy
from django.core.validators import validate_email, ValidationError
from django.db import IntegrityError, transaction
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseServerError, Http404
from django.shortcuts import redirect
from django.utils.encoding import force_bytes, force_text
from django.utils.translation import ungettext
from django.utils.http import base36_to_int, urlsafe_base64_encode, urlencode
from django.utils.translation import ugettext as _, get_language
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST, require_GET
from django.db.models.signals import post_save
from django.dispatch import receiver, Signal
from django.template.response import TemplateResponse
from provider.oauth2.models import Client
from ratelimitbackend.exceptions import RateLimitException
from social.apps.django_app import utils as social_utils
from social.backends import oauth as social_oauth
from social.exceptions import AuthException, AuthAlreadyAssociated
from edxmako.shortcuts import render_to_response, render_to_string
from course_modes.models import CourseMode
from shoppingcart.api import order_history
from student.models import (
Registration, UserProfile,
PendingEmailChange, CourseEnrollment, CourseEnrollmentAttribute, unique_id_for_user,
CourseEnrollmentAllowed, UserStanding, LoginFailures,
create_comments_service_user, PasswordHistory, UserSignupSource,
DashboardConfiguration, LinkedInAddToProfileConfiguration, ManualEnrollmentAudit, ALLOWEDTOENROLL_TO_ENROLLED,
LogoutViewConfiguration)
from student.forms import AccountCreationForm, PasswordResetFormNoActive, get_registration_extension_form
from lms.djangoapps.commerce.utils import EcommerceService # pylint: disable=import-error
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from bulk_email.models import Optout, BulkEmailFlag # pylint: disable=import-error
from certificates.models import CertificateStatuses, certificate_status_for_student
from certificates.api import ( # pylint: disable=import-error
get_certificate_url,
has_html_certificates_enabled,
)
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from collections import namedtuple
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from courseware.access import has_access
from django_comment_common.models import Role
from external_auth.models import ExternalAuthMap
import external_auth.views
from external_auth.login_and_register import (
login as external_auth_login,
register as external_auth_register
)
from lang_pref import LANGUAGE_KEY
import track.views
import dogstats_wrapper as dog_stats_api
from util.db import outer_atomic
from util.json_request import JsonResponse
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.milestones_helpers import (
get_pre_requisite_courses_not_completed,
)
from util.password_policy_validators import validate_password_strength
import third_party_auth
from third_party_auth import pipeline, provider
from student.helpers import (
check_verify_status_by_course,
auth_pipeline_urls, get_next_url_for_login_page,
DISABLE_UNENROLL_CERT_STATES,
destroy_oauth_tokens
)
from student.cookies import set_logged_in_cookies, delete_logged_in_cookies
from student.models import anonymous_id_for_user, UserAttribute, EnrollStatusChange
from shoppingcart.models import DonationConfiguration, CourseRegistrationCode
from embargo import api as embargo_api
import analytics
from eventtracking import tracker
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifi | cations
from openedx.core.djangoapps.credit.email_utils import get_credit_provider_display_names, make_providers_strings
from openedx.core.djangoapps | .user_api.preferences import api as preferences_api
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs import utils as programs_utils
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.theming import helpers as theming_helpers
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
# Used as the name of the user attribute for tracking affiliate registrations
REGISTRATION_AFFILIATE_ID = 'registration_affiliate_id'
# used to announce a registration
REGISTER_USER = Signal(providing_args=["user", "profile"])
# Disable this warning because it doesn't make sense to completely refactor tests to appease Pylint
# pylint: disable=logging-format-interpolation
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
courses = get_courses(user)
if configuration_helpers.get_value(
"ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"],
):
courses = sort_by_start_date(courses)
else:
courses = sort_by_announcement(courses)
context = {'courses': courses}
context['homepage_overlay_html'] = configuration_helpers.get_value('homepage_overlay_html')
# This appears to be an unused context parameter, at least for the master templates...
context['show_partners'] = configuration_helpers.get_value('show_partners', True)
# TO DISPLAY A YOUTUBE WELCOME VIDEO
# 1) Change False to True
context['show_homepage_promo_video'] = configuration_helpers.get_value('show_homepage_promo_video', False)
# 2) Add your video's YouTube ID (11 chars, eg "123456789xX"), or specify via site configuration
# Note: This value should be moved into a configuration setting and plumbed-through to the
# context via the site configuration workflow, versus living here
youtube_video_id = configuration_helpers.get_value('homepage_promo_video_youtube_id', "your-youtube-id")
context['homepage_promo_video_youtube_id'] = youtube_video_id
# allow for theme override of the courses list
context['courses |
drodri/python-cpp-accu2016 | 5pybind11_zlib/test_compress.py | Python | mit | 2,059 | 0.01117 | import myzlib
st=z_stream()
ZLIB_VERSION = "1.2.8"
Z_NULL = 0x00
Z_OK = 0x00
Z_STREAM_END = 0x01
Z_NEED_DICT = 0x02
Z_NO_FLUSH = 0x00
Z_FINISH = 0x04
CHUNK = 1024 * 32
def compress(input, level=6):
out = []
st = z_stream()
st.avail_in = len(input)
st.next_in = input
st.avail_out = Z_NULL
st.next_out = Z_NULL
err = myzlib.deflateInit_(st, level, ZLIB_VERSION, len(st))
assert err == Z_OK, err
while True:
st.avail_out = CHUNK
outbuf = ctypes.create_string_buffer(CHUNK)
st.next_out = outbuf
err = myzlib.deflate(st, Z_FINISH)
out.append(outbuf[:CHUNK-st.avail_out])
if err == Z_STREAM_END: break
elif err == Z_OK: pass
else:
raise AssertionError, err
err = myzlib.deflateEnd(st)
assert err == Z_OK, err
return "".join(out)
def decompress(input):
out = []
st = _z_stream()
st.avail_in = len(input)
st.next_in = ctypes.cast(ctypes.c_char_p(input), ctypes.POINTER(ctypes.c_ubyte))
st.avail_out = Z_NULL
st.next_out = ctypes.cast(Z_NULL, ctypes.POINTER(ctypes.c_ubyte))
err = _zlib.inflateInit2_(ctypes.byref(st), 15, ZLIB_VERSION, ctypes.sizeof(st))
assert err == Z_OK, err
while True:
st.avail_out = CHUNK
outbuf = ctypes.create_string_buffer(CHUNK)
st.next_out = ctypes.cast(outbuf, ctypes.POINTER(ctypes.c_ubyte))
er | r = _zlib.inflate(ctypes.byref(st), Z_NO_FLUSH)
if err in [Z_OK, Z_STREAM_END]:
out.append(outbuf[:CHUNK-st.avail_out])
else:
raise AssertionError, err
if err == Z_STREAM_E | ND:
break
err = _zlib.inflateEnd(ctypes.byref(st))
assert err == Z_OK, err
return "".join(out)
def _test():
input = "Hello world, hello world" * 100000
ct_archive = compress(input, 6)
print "Compression ", len(input), "=>", len(ct_archive)
ct_orig = decompress(ct_archive)
assert ct_orig == input
print "OK ",
if __name__ == '__main__':
_test()
|
astrofle/CRRLpy | crrlpy/imtools.py | Python | mit | 21,537 | 0.006454 | #!/usr/bin/env python
import logging
import inspect
import numpy as np
#from matplotlib import _cntr as cntr
#from contours.core import shapely_formatter as shapely_fmt
#from contours.quad import QuadContourGenerator
from astropy.coordinates import Angle
from astropy import constants as c
from astropy import wcs
from astropy.io import fits
from matplotlib.patheffects import withStroke
class Polygon:
""" |
Generic polygon class.
Note: code based on:
http://code.activestate.com/recipes/578381-a-point-in-polygon-program-sw-sloan-algorithm/
Parameters
----------
x : array
A sequence of nodal x-coords.
y : array
A sequence | of nodal y-coords.
"""
def __init__(self, x, y):
self.logger = logging.getLogger(__name__)
self.logger.info("Creating Polygon")
if len(x) != len(y):
raise IndexError('x and y must be equally sized.')
self.x = np.asfarray(x)
self.y = np.asfarray(y)
# Closes the polygon if were open
x1, y1 = x[0], y[0]
xn, yn = x[-1], y[-1]
if x1 != xn or y1 != yn:
self.x = np.concatenate((self.x, [x1]))
self.y = np.concatenate((self.y, [y1]))
# Anti-clockwise coordinates
if _det(self.x, self.y) < 0:
self.x = self.x[::-1]
self.y = self.y[::-1]
def get_vertices(self):
"""
Returns the vertices of the polygon as a 2xNvert list.
"""
return [[i,j] for i,j in zip(self.x, self.y)]
def is_inside(self, xpoint, ypoint, smalld=1e-12):
"""
Check if point is inside a general polygon.
An improved version of the algorithm of Nordbeck and Rydstedt.
REF: SLOAN, S.W. (1985): A point-in-polygon program. Adv. Eng.
Software, Vol 7, No. 1, pp 45-47.
Parameters
----------
xpoint : array or float
The x-coord of the point to be tested.
ypoint : array or float
The y-coords of the point to be tested.
smalld : float
Tolerance within which point is considered to be on a side.
Returns
-------
mindst : array or float
The distance from the point to the nearest point of the polygon:
If mindst < 0 then point is outside the polygon.
If mindst = 0 then point in on a side of the polygon.
If mindst > 0 then point is inside the polygon.
"""
xpoint = np.asfarray(xpoint)
ypoint = np.asfarray(ypoint)
# Scalar to array
if xpoint.shape is tuple():
xpoint = np.array([xpoint], dtype=float)
ypoint = np.array([ypoint], dtype=float)
scalar = True
else:
scalar = False
# Check consistency
if xpoint.shape != ypoint.shape:
raise IndexError('x and y must be equally sized.')
# If snear = True: Dist to nearest side < nearest vertex
# If snear = False: Dist to nearest vertex < nearest side
snear = np.ma.masked_all(xpoint.shape, dtype=bool)
# Initialize arrays
mindst = np.ones_like(xpoint, dtype=float) * np.inf
j = np.ma.masked_all(xpoint.shape, dtype=int)
x = self.x
y = self.y
n = len(x) - 1 # Number of sides/vertices defining the polygon
# Loop over each side defining polygon
for i in range(n):
d = np.ones_like(xpoint, dtype=float) * np.inf
# Start of side has coords (x1, y1)
# End of side has coords (x2, y2)
# Point has coords (xpoint, ypoint)
x1 = x[i]
y1 = y[i]
x21 = x[i+1] - x1
y21 = y[i+1] - y1
x1p = x1 - xpoint
y1p = y1 - ypoint
# Points on infinite line defined by
# x = x1 + t * (x1 - x2)
# y = y1 + t * (y1 - y2)
# where
# t = 0 at (x1, y1)
# t = 1 at (x2, y2)
# Find where normal passing through (xpoint, ypoint) intersects
# infinite line
t = -(x1p * x21 + y1p * y21) / (x21 ** 2 + y21 ** 2)
tlt0 = t < 0
tle1 = (0 <= t) & (t <= 1)
# Normal intersects side
d[tle1] = ((x1p[tle1] + t[tle1] * x21) ** 2 +
(y1p[tle1] + t[tle1] * y21) ** 2)
# Normal does not intersects side
# Point is closest to vertex (x1, y1)
# Compute square of distance to this vertex
d[tlt0] = x1p[tlt0] ** 2 + y1p[tlt0] ** 2
# Store distances
mask = d < mindst
mindst[mask] = d[mask]
j[mask] = i
# Point is closer to (x1, y1) than any other vertex or side
snear[mask & tlt0] = False
# Point is closer to this side than to any other side or vertex
snear[mask & tle1] = True
if np.ma.count(snear) != snear.size:
raise IndexError('Error computing distances')
mindst **= 0.5
# Point is closer to its nearest vertex than its nearest side, check if
# nearest vertex is concave.
# If the nearest vertex is concave then point is inside the polygon,
# else the point is outside the polygon.
jo = j.copy()
jo[j==0] -= 1
area = _det([x[j+1], x[j], x[jo-1]], [y[j+1], y[j], y[jo-1]])
mindst[~snear] = np.copysign(mindst, area)[~snear]
# Point is closer to its nearest side than to its nearest vertex, check
# if point is to left or right of this side.
# If point is to left of side it is inside polygon, else point is
# outside polygon.
area = _det([x[j], x[j+1], xpoint], [y[j], y[j+1], ypoint])
mindst[snear] = np.copysign(mindst, area)[snear]
# Point is on side of polygon
mindst[np.fabs(mindst) < smalld] = 0
# If input values were scalar then the output should be too
if scalar:
mindst = float(mindst)
return mindst
def make_mask(self, shape, **kwargs):
"""
Creates a mask of a given shape using the Polygon as boundaries.
All points inside the Polygon will have a value of 1.
:param shape: Shape of the output mask.
:type shape: tuple
:returns: Mask of the Polygon.
:rtype: array
"""
mask = np.zeros(shape)
xmax = int(round(max(self.x)))
xmin = int(round(min(self.x)))
ymax = int(round(max(self.y)))
ymin = int(round(min(self.y)))
for j in xrange(ymax - ymin):
for i in xrange(xmax - xmin):
if self.is_inside(i+xmin, j+ymin, **kwargs) >= 0:
self.logger.debug("Point ({0},{1}) ".format(i+xmin,j+ymin) +
"is inside the Polygon")
mask[j+ymin,i+xmin] = 1
return mask
def _det(xvert, yvert):
"""
Compute twice the area of the triangle defined by points using the
determinant formula.
Parameters
----------
xvert : array
A vector of nodal x-coords.
yvert : array
A vector of nodal y-coords.
Returns
-------
area : float
Twice the area of the triangle defined by the points:
area is positive if points define polygon in anticlockwise order.
area is negative if points define polygon in clockwise order.
area is zero if at least two of the points are concident or if
all points are collinear.
"""
xvert = np.asfarray(xvert)
yvert = np.asfarray(yvert)
x_prev = np.concatenate(([xvert[-1]], xvert[:-1]))
y_prev = np.concatenate(([yvert[-1]], yvert[:-1]))
return np.sum(yvert * x_prev - xvert * y_prev, axis=0)
def beam_area_pix(head):
"""
Computes the beam area in pixels.
It uses an approximation accurate to
within 5%.
K. Rohlfs and T.L. Wilson, 'Tools of Radio Astronomy |
chrisortman/CIS-121 | projects/LCD.py | Python | mit | 1,100 | 0.012727 | def zero():
print " __ "
print "| |"
print "|__|"
def one():
print " "
print " |"
print " |"
def two():
print " __"
print " __|"
print "|__ "
def three():
print " __ "
print " __|"
print " __|"
def four():
print "|_|"
print " |"
def five():
print " __ "
print "|__ "
print " __|"
def six():
print " __ "
print "|__ "
print "|__|"
def seven():
print " __"
print " |"
print " |"
def eight():
print " __ "
print "|__|"
print "|__|"
def nine():
print " __ "
print "|__|"
print | " |"
while True:
x = raw_input("Type the number you would like printed: ")
if x == '1':
one()
elif x == '2':
two()
elif x == '3':
three()
elif x == '4':
four()
elif x == '5':
five()
elif x == '6':
six()
elif x == '7':
seven()
elif x == '8':
eight()
elif x == '9':
ni | ne()
elif x == '0':
zero()
else:
print "Number not entered"
|
joksnet/youtube-dl | youtube_dl/extractor/sohu.py | Python | unlicense | 3,221 | 0.000622 | # encoding: utf-8
import json
import re
from .common import InfoExtractor
from ..utils import ExtractorError
class SohuIE(InfoExtractor):
_VALID_URL = r'https?://(?P<mytv>my\.)?tv\.sohu\.com/.+?/(?(mytv)|n)(?P<id>\d+)\.shtml.*?'
_TEST = {
u'url': u'http://tv.sohu.com/20130724/n382479172.shtml#super',
u'file': u'382479172.mp4',
u'md5': u'bde8d9a6ffd82c63a1eefaef4ee | efec7',
u'info_dict': {
u'title': u'MV:Far East Movement《The Illest》',
| },
}
def _real_extract(self, url):
def _fetch_data(vid_id, mytv=False):
if mytv:
base_data_url = 'http://my.tv.sohu.com/play/videonew.do?vid='
else:
base_data_url = u'http://hot.vrs.sohu.com/vrs_flash.action?vid='
data_url = base_data_url + str(vid_id)
data_json = self._download_webpage(
data_url, video_id,
note=u'Downloading JSON data for ' + str(vid_id))
return json.loads(data_json)
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
mytv = mobj.group('mytv') is not None
webpage = self._download_webpage(url, video_id)
raw_title = self._html_search_regex(r'(?s)<title>(.+?)</title>',
webpage, u'video title')
title = raw_title.partition('-')[0].strip()
vid = self._html_search_regex(r'var vid ?= ?["\'](\d+)["\']', webpage,
u'video path')
data = _fetch_data(vid, mytv)
QUALITIES = ('ori', 'super', 'high', 'nor')
vid_ids = [data['data'][q + 'Vid']
for q in QUALITIES
if data['data'][q + 'Vid'] != 0]
if not vid_ids:
raise ExtractorError(u'No formats available for this video')
# For now, we just pick the highest available quality
vid_id = vid_ids[-1]
format_data = data if vid == vid_id else _fetch_data(vid_id, mytv)
part_count = format_data['data']['totalBlocks']
allot = format_data['allot']
prot = format_data['prot']
clipsURL = format_data['data']['clipsURL']
su = format_data['data']['su']
playlist = []
for i in range(part_count):
part_url = ('http://%s/?prot=%s&file=%s&new=%s' %
(allot, prot, clipsURL[i], su[i]))
part_str = self._download_webpage(
part_url, video_id,
note=u'Downloading part %d of %d' % (i+1, part_count))
part_info = part_str.split('|')
video_url = '%s%s?key=%s' % (part_info[0], su[i], part_info[3])
video_info = {
'id': '%s_part%02d' % (video_id, i + 1),
'title': title,
'url': video_url,
'ext': 'mp4',
}
playlist.append(video_info)
if len(playlist) == 1:
info = playlist[0]
info['id'] = video_id
else:
info = {
'_type': 'playlist',
'entries': playlist,
'id': video_id,
}
return info
|
facebookexperimental/eden | eden/hg-server/edenscm/hgext/infinitepush/bundlestore.py | Python | gpl-2.0 | 6,167 | 0.001135 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# Infinitepush Bundle Store
"""store for infinitepush bundles"""
import hashlib
import os
import subprocess
from tempfile import NamedTemporaryFile
from edenscm.mercurial import error
| from edenscm.mercurial.i18n import _
class bundlestore(object):
def __init__(self, repo):
storetype = repo.ui.config("infinitepush", "storetype", "")
if storetype == "disk":
self.store = filebundlestore(repo)
elif storetype == "external":
self.store = externalbundlestore(repo)
else:
raise error.Abort(
_("unknown infinitepush store type specifie | d %s") % storetype
)
indextype = repo.ui.config("infinitepush", "indextype", "")
if indextype == "disk":
from . import fileindex
self.index = fileindex.fileindex(repo)
elif indextype == "sql":
# Delayed import of sqlindex to avoid including unnecessary
# dependencies on mysql.connector.
from . import sqlindex
self.index = sqlindex.sqlindex(repo)
else:
raise error.Abort(
_("unknown infinitepush index type specified %s") % indextype
)
class filebundlestore(object):
"""bundle store in filesystem
meant for storing bundles somewhere on disk and on network filesystems
"""
def __init__(self, repo):
self.storepath = repo.ui.configpath("scratchbranch", "storepath")
if not self.storepath:
self.storepath = repo.localvfs.join("scratchbranches", "filebundlestore")
if not os.path.exists(self.storepath):
os.makedirs(self.storepath)
def _dirpath(self, hashvalue):
"""First two bytes of the hash are the name of the upper
level directory, next two bytes are the name of the
next level directory"""
return os.path.join(self.storepath, hashvalue[0:2], hashvalue[2:4])
def _filepath(self, filename):
return os.path.join(self._dirpath(filename), filename)
def write(self, data):
filename = hashlib.sha1(data).hexdigest()
dirpath = self._dirpath(filename)
if not os.path.exists(dirpath):
os.makedirs(dirpath)
with open(self._filepath(filename), "wb") as f:
f.write(data)
return filename
def read(self, key):
try:
f = open(self._filepath(key), "rb")
except IOError:
return None
return f.read()
class externalbundlestore(object):
def __init__(self, repo):
"""
`put_binary` - path to binary file which uploads bundle to external
storage and prints key to stdout
`put_args` - format string with additional args to `put_binary`
{filename} replacement field can be used.
`get_binary` - path to binary file which accepts filename and key
(in that order), downloads bundle from store and saves it to file
`get_args` - format string with additional args to `get_binary`.
{filename} and {handle} replacement field can be used.
"""
ui = repo.ui
# path to the binary which uploads a bundle to the external store
# and prints the key to stdout.
self.put_binary = ui.config("infinitepush", "put_binary")
if not self.put_binary:
raise error.Abort("put binary is not specified")
# Additional args to ``put_binary``. The '{filename}' replacement field
# can be used to get the filename.
self.put_args = ui.configlist("infinitepush", "put_args", [])
# path to the binary which accepts a file and key (in that order) and
# downloads the bundle form the store and saves it to the file.
self.get_binary = ui.config("infinitepush", "get_binary")
if not self.get_binary:
raise error.Abort("get binary is not specified")
# Additional args to ``get_binary``. The '{filename}' and '{handle}'
# replacement fields can be used to get the filename and key.
self.get_args = ui.configlist("infinitepush", "get_args", [])
def _call_binary(self, args):
p = subprocess.Popen(
args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True
)
stdout, stderr = p.communicate()
returncode = p.returncode
return returncode, stdout, stderr
def write(self, data):
# Won't work on windows because you can't open file second time without
# closing it
with NamedTemporaryFile() as temp:
temp.write(data)
temp.flush()
temp.seek(0)
formatted_args = [arg.format(filename=temp.name) for arg in self.put_args]
returncode, stdout, stderr = self._call_binary(
[self.put_binary] + formatted_args
)
if returncode != 0:
raise error.Abort(
"Infinitepush failed to upload bundle to external store: %s"
% stderr
)
stdout_lines = stdout.splitlines()
if len(stdout_lines) == 1:
return stdout_lines[0]
else:
raise error.Abort(
"Infinitepush received bad output from %s: %s"
% (self.put_binary, stdout)
)
def read(self, handle):
# Won't work on windows because you can't open file second time without
# closing it
with NamedTemporaryFile() as temp:
formatted_args = [
arg.format(filename=temp.name, handle=handle) for arg in self.get_args
]
returncode, stdout, stderr = self._call_binary(
[self.get_binary] + formatted_args
)
if returncode != 0:
raise error.Abort("Failed to download from external store: %s" % stderr)
return temp.read()
|
Lorquas/subscription-manager | src/rhsmlib/dbus/facts/client.py | Python | gpl-2.0 | 2,191 | 0.001826 | from __future__ import print_function, division, absolute_import
# Copyright (c) 2010-2016 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import logging
import dbus
from rhsmlib.dbus.facts import constants as facts_constants
log = logging.getLogger(__name__)
class FactsClientAuthenticationError(Exception):
def __init__(self, *args, **kwargs):
action_id = kwargs.pop("action_id")
super(FactsClientAuthenticationError, self).__init__(*args, **kwargs)
log.debug("FactsClientAuthenticationError created for %s", action_id)
self.action_id = action_id
class FactsClient(object):
bus_name = facts_constants.FACTS_DBUS_NAME
object_path = facts_constants.FACTS_DBUS_PATH
interface_name = facts_constants.FACTS_DBUS_INTERFACE
def __init__(self, bus=None, bus_name=None, object_path=None, interface_name=None):
self.bus = bus or dbus.SystemBus()
if bus_name:
self.bus_name = bus_name
if object_path:
self.object_path = object_path
if interface_name:
self.interface_name = interface_name
self.dbus_proxy_object = self.bus.get_object(self.bus_name, self.object_path,
follow_name_owner_changes=True)
self.interface = dbus.Interface(self.dbus_proxy_object,
dbus_interface=self.interface_name)
self.bus.call_on_disconnection(self._on_bus_disconnect)
def GetFacts(self, *args, **kwargs):
return self.interface.GetFacts(*args, **kwargs)
def _on_bus_disconnect(self, connection):
self.dbus_proxy_object | = None
| log.debug("Disconnected from FactsService")
|
mountainpenguin/pyrt | modules/config.py | Python | gpl-3.0 | 5,462 | 0.001098 | #!/usr/bin/env python
""" Copyright (C) 2012 mountainpenguin (pinguino.de.montana@googlemail.com)
<http://github.com/mountainpenguin/pyrt>
This file is part of pyRT.
pyRT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
pyRT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pyRT. If not, see <http://www.gnu.org/licenses/>.
"""
import cPickle as pickle
import os
try:
import json
except ImportError:
import simplejson as json
class ConfigError(Exception):
def __init__(self, value):
self.parameter = value
def __repr__(self):
return repr(self.parameter)
def __str__(self):
return self.__repr__()
class ConfigStore(object):
def __init__(self, sockpath, serverhost, serverport, password, ssl_certificate=None, ssl_private_key=None, ca_certs=None, root_directory="/", logfile="pyrt.log", refresh=10, scgi_username=None, scgi_password=None, scgi_method="Digest"):
self.rtorrent_socket = sockpath
self.host = serverhost
self.port = serverport
self.password = password
self.ssl_certificate = ssl_certificate
self.ssl_private_key = ssl_private_key
self.ssl_ca_certs = ca_certs
self.root_directory = root_directory
self.logfile = logfile
self.refresh = refresh
self.scgi_username = scgi_username
self.scgi_password = scgi_password
self.scgi_method = scgi_method
class Config:
def __init__(self):
# look for saved config file
if os.path.exists(os.path.join("config", ".pyrtconfig")):
try:
self.CONFIG = pickle.load(open(os.path.join("config", ".pyrtconfig")))
except:
os.remove(os.path.join("config", ".pyrtconfig"))
self.loadconfig()
else:
self.loadconfig()
def set(self, key, value):
if key not in self.CONFIG.__dict__:
return False
else:
self.CONFIG.__dict__[key] = value
self._flush()
return self.CONFIG.__dict__[key]
def _flush(self):
pickle.dump(self.CONFIG, open(os.path.join("config", ".pyrtconfig"), "w"))
def loadconfig(self):
if not os.path.exists(os.path.join("config", ".pyrtrc")):
raise ConfigError("Config File doesn't exist")
config_ = open(os.path.join("config", ".pyrtrc")).read()
config_stripped = ""
for line in config_.split("\n"):
if line == "":
pass
else:
for char in line:
if char == "#":
break
else:
config_stripped += char
config_stripped += "\n"
try:
configfile = json.loads(config_stripped)
if "ssl_certificate" in configfile.keys() and "ssl_private_key" in configfile.keys():
cert = configfile["ssl_certificate"]
pkey = configfile["ssl_private_key"]
else:
cert, pkey = None, None
if "ssl_ca_certs" in configfile.keys():
ca_certs = configfile["ssl_ca_certs"]
else:
ca_certs = None
if "root_directory" in configfile:
root_dir = configfile["root_directory"]
else:
root_dir = "/"
if "logfile" in configfile:
logfile = configfile["logfile"]
else:
logfile = "pyrt.log"
try:
refresh = int(configfile["refresh"])
except:
refresh = 10
if "scgi_username" in configfile:
scgi_username = configfile["scgi_username"]
else:
scgi_username = None
if "scgi_password" in configfile:
scgi_password = configfile["scgi_password"]
else:
scgi_password = None
if "scgi_method" in configfile:
scgi_method = configfile["scgi_method"]
else:
scgi_method = "Digest"
self.CONFIG = ConfigS | tore(
sockpath=configfile["rtorrent_socket"],
serverhost=configfile["host"],
serverport=configfile["port"],
password=configfile["password"],
ssl_certificate=cert,
ssl_private_key=pkey,
ca_certs=ca_certs,
root_directory=root_dir,
logfile=logfile,
| refresh=refresh,
scgi_username=scgi_username,
scgi_password=scgi_password,
scgi_method=scgi_method,
)
self._flush()
except KeyError:
raise ConfigError("Config File is malformed")
def get(self, conf):
if conf in self.CONFIG.__dict__.keys():
return self.CONFIG.__dict__[conf]
else:
return None
|
damianam/easybuild-framework | easybuild/tools/package/utilities.py | Python | gpl-2.0 | 7,628 | 0.002622 | ##
# Copyright 2015-2017 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Various utilities related to packaging support.
:author: Marc Litherland (Novartis)
:author: Gianluca Santarossa (Novartis)
:author: Robert Schmidt (The Ottawa Hospital, Research Institute)
:author: Fotis Georgatos (Uni.Lu, NTUA)
:author: Kenneth Hoste (Ghent University)
"""
import os
import tempfile
import pprint
from vsc.utils import fancylogger
from vsc.utils.missing import get_subclasses
from vsc.utils.patterns import Singleton
from easybuild.tools.config import PKG_TOOL_FPM, PKG_TYPE_RPM, build_option, get_package_naming_scheme, log_path
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import which
from easybuild.tools.package.package_naming_scheme.pns import PackageNamingScheme
from easybuild.tools.run import run_cmd
from easybuild.tools.toolchain import DUMMY_TOOLCHAIN_NAME
from easybuild.tools.utilities import import_available_modules, quote_str
_log = fancylogger.getLogger('tools.package')
def avail_package_naming_schemes():
"""
Returns the list of valed naming schemes
They are loaded from the easybuild.package.package_naming_scheme namespace
"""
import_available_modules('easybuild.tools.package.package_naming_scheme')
class_dict = dict([(x.__name__, x) for x in get_subclasses(PackageNamingScheme)])
return class_dict
def package(easyblock):
"""
Package installed software, according to active packaging configuration settings."""
pkgtool = build_option('package_tool')
if pkgtool == PKG_TOOL_FPM:
pkgdir = package_with_fpm(easyblock)
else:
raise EasyBuildError("Unknown packaging tool specified: %s", pkgtool)
return pkgdir
def package_with_fpm(easyblock):
"""
This function will build a package using fpm and return the directory where the packages are
"""
workdir = tempfile.mk | dtemp(prefix='eb-pkgs-')
pkgtype = build_option('package_type')
_log.info("Will be creating %s package(s) in %s", pkgtype, workdir)
try:
origdir = os.getcwd()
os.chdir(workdir)
except OSError, err:
raise EasyBuildError("Failed to chdir into workdir %s: %s", workdir, err)
package_naming_scheme = ActivePNS()
pkgname = package_naming_schem | e.name(easyblock.cfg)
pkgver = package_naming_scheme.version(easyblock.cfg)
pkgrel = package_naming_scheme.release(easyblock.cfg)
_log.debug("Got the PNS values name: %s version: %s release: %s", pkgname, pkgver, pkgrel)
deps = []
if easyblock.toolchain.name != DUMMY_TOOLCHAIN_NAME:
toolchain_dict = easyblock.toolchain.as_dict()
deps.extend([toolchain_dict])
deps.extend(easyblock.cfg.dependencies())
_log.debug("The dependencies to be added to the package are: %s",
pprint.pformat([easyblock.toolchain.as_dict()] + easyblock.cfg.dependencies()))
depstring = ''
for dep in deps:
if dep.get('external_module', False):
_log.debug("Skipping dep marked as external module: %s", dep['name'])
else:
_log.debug("The dep added looks like %s ", dep)
dep_pkgname = package_naming_scheme.name(dep)
depstring += " --depends %s" % quote_str(dep_pkgname)
# Excluding the EasyBuild logs and test reports that might be in the installdir
exclude_files_glob = [
os.path.join(log_path(), "*.log"),
os.path.join(log_path(), "*.md"),
]
# stripping off leading / to match expected glob in fpm
exclude_files_glob = [
'--exclude %s' % quote_str(os.path.join(easyblock.installdir.lstrip(os.sep), x))
for x in exclude_files_glob
]
_log.debug("The list of excluded files passed to fpm: %s", exclude_files_glob)
cmdlist = [
PKG_TOOL_FPM,
'--workdir', workdir,
'--name', quote_str(pkgname),
'--provides', quote_str(pkgname),
'-t', pkgtype, # target
'-s', 'dir', # source
'--version', pkgver,
'--iteration', pkgrel,
'--description', quote_str(easyblock.cfg["description"]),
'--url', quote_str(easyblock.cfg["homepage"]),
]
cmdlist.extend(exclude_files_glob)
if build_option('debug'):
cmdlist.append('--debug')
cmdlist.extend([
depstring,
easyblock.installdir,
easyblock.module_generator.get_module_filepath(),
])
cmd = ' '.join(cmdlist)
_log.debug("The flattened cmdlist looks like: %s", cmd)
run_cmd(cmd, log_all=True, simple=True)
_log.info("Created %s package(s) in %s", pkgtype, workdir)
try:
os.chdir(origdir)
except OSError, err:
raise EasyBuildError("Failed to chdir back to %s: %s", origdir, err)
return workdir
def check_pkg_support():
"""Check whether packaging is possible, if required dependencies are available."""
pkgtool = build_option('package_tool')
pkgtool_path = which(pkgtool)
if pkgtool_path:
_log.info("Selected packaging tool '%s' found at %s", pkgtool, pkgtool_path)
# rpmbuild is required for generating RPMs with FPM
if pkgtool == PKG_TOOL_FPM and build_option('package_type') == PKG_TYPE_RPM:
rpmbuild_path = which('rpmbuild')
if rpmbuild_path:
_log.info("Required tool 'rpmbuild' found at %s", rpmbuild_path)
else:
raise EasyBuildError("rpmbuild is required when generating RPM packages but was not found")
else:
raise EasyBuildError("Selected packaging tool '%s' not found", pkgtool)
class ActivePNS(object):
"""
The wrapper class for Package Naming Schemes.
"""
__metaclass__ = Singleton
def __init__(self):
"""Initialize logger and find available PNSes to load"""
self.log = fancylogger.getLogger(self.__class__.__name__, fname=False)
avail_pns = avail_package_naming_schemes()
sel_pns = get_package_naming_scheme()
if sel_pns in avail_pns:
self.pns = avail_pns[sel_pns]()
else:
raise EasyBuildError("Selected package naming scheme %s could not be found in %s",
sel_pns, avail_pns.keys())
def name(self, easyconfig):
"""Determine package name"""
name = self.pns.name(easyconfig)
return name
def version(self, easyconfig):
"""Determine package version"""
version = self.pns.version(easyconfig)
return version
def release(self, easyconfig):
"""Determine package release"""
release = self.pns.release(easyconfig)
return release
|
opendatatrentino/ckan-api-client | ckan_api_client/tests/functional/client_sync/test_real_harvesting_scenario.py | Python | bsd-2-clause | 1,266 | 0 | # """
# Test some "real life" harvesting scenario.
# We have "data dumps" of an imaginary catalog for a set of days.
# The testing procedure should be run as follows:
# 1- Get current state of the database
# 2- Update data from the "harvest source"
# 3- Make sure the database state matches the expected one:
# - unrelated data | sets should still be there
# - only datasets from this souce should have been changed,
# and should match the desired state.
# 4- Loop for all the days
# """
# import os
# import pytest
# from ckan_api_client.syncing import CkanDataImportClient
# from .utils import ckan_client # noqa (fixture)
# # from .utils.harvest_source import HarvestSource
# HERE = os.path.abspath(os.path.dir | name(__file__))
# DATA_DIR = os.path.join(os.path.dirname(HERE), 'data', 'random')
# HARVEST_SOURCE_NAME = 'dummy-harvest-source'
# @pytest.fixture(params=['day-{0:02d}'.format(x) for x in xrange(4)])
# def harvest_source(request):
# return HarvestSource(DATA_DIR, request.param)
# @pytest.mark.skipif(True, reason="Disabled")
# def test_real_harvesting_scenario(ckan_url, api_key, harvest_source):
# client = CkanDataImportClient(ckan_url, api_key, 'test-source')
# client.sync_data(harvest_source, double_check=True)
|
awemulya/fieldsight-kobocat | onadata/apps/logger/south_migrations/0031_auto__add_field_xform_last_submission_time.py | Python | bsd-2-clause | 10,336 | 0.007933 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'XForm.last_submission_time'
db.add_column(u'odk | _logger_xform', 'last_submission_time',
self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'XForm.last_submission_time'
db.delete_column(u'odk_logger_xform', 'last_submission_time')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db. | models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'odk_logger.attachment': {
'Meta': {'object_name': 'Attachment'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attachments'", 'to': "orm['odk_logger.Instance']"}),
'media_file': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'mimetype': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'})
},
'odk_logger.instance': {
'Meta': {'object_name': 'Instance'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}),
'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['odk_logger.SurveyType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': u"orm['auth.User']"}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['odk_logger.XForm']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.instancehistory': {
'Meta': {'object_name': 'InstanceHistory'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'uuid': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '249'}),
'xform_instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'submission_history'", 'to': "orm['odk_logger.Instance']"}),
'xml': ('django.db.models.fields.TextField', [], {})
},
'odk_logger.surveytype': {
'Meta': {'object_name': 'SurveyType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'odk_logger.xform': {
'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'), ('user', 'sms_id_string'))", 'object_name': 'XForm'},
'allows_sms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'bamboo_dataset': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '60'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "u''", 'null': 'True'}),
'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'encrypted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_start_time': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'is_crowd_form': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'json': ('django.db.models.fields.TextField', [], {'default |
Danisan/dvit-odoo8 | invoice_discount/__init__.py | Python | agpl-3.0 | 26 | 0 | #
imp | ort invoice_discount | |
OEASLAN/LetsEat | web/manage.py | Python | gpl-2.0 | 249 | 0.004016 | #!/usr/bin/env python
import os
import sys
i | f __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "letseat.settings")
from django.core.management import execute_from_command_line
| execute_from_command_line(sys.argv) |
ibm-messaging/message-hub-samples | kafka-python-console-sample/producertask.py | Python | apache-2.0 | 1,944 | 0.003603 | """
Copyright 2015-2018 IBM
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org | /licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Licensed Materials - Property of IBM
© Copyright IBM Corp. 2015-2 | 018
"""
import asyncio
from confluent_kafka import Producer
class ProducerTask(object):
def __init__(self, conf, topic_name):
self.topic_name = topic_name
self.producer = Producer(conf)
self.counter = 0
self.running = True
def stop(self):
self.running = False
def on_delivery(self, err, msg):
if err:
print('Delivery report: Failed sending message {0}'.format(msg.value()))
print(err)
# We could retry sending the message
else:
print('Message produced, offset: {0}'.format(msg.offset()))
@asyncio.coroutine
def run(self):
print('The producer has started')
while self.running:
message = 'This is a test message #{0}'.format(self.counter)
key = 'key'
sleep = 2 # Short sleep for flow control
try:
self.producer.produce(self.topic_name, message, key, -1, self.on_delivery)
self.producer.poll(0)
self.counter += 1
except Exception as err:
print('Failed sending message {0}'.format(message))
print(err)
sleep = 5 # Longer sleep before retrying
yield from asyncio.sleep(sleep)
self.producer.flush()
|
spulec/moto | moto/mediastoredata/models.py | Python | apache-2.0 | 2,506 | 0.001197 | import hashlib
from collections import OrderedDict
from moto.core import BaseBackend, BaseModel
from moto.core.utils import BackendDict
from .exceptions import ClientError
class Object(BaseModel):
def __init__(self, path, body, etag, storage_class="TEMPORAL"):
self.path = path
self.body = body
self.content_sha256 = hashlib.sha256(body.encode("utf-8")).hexdigest()
self.etag = etag
self.storage_class = storage_class
def to_dict(self):
data = {
"ETag": self.etag,
"Name": self.path,
"Type": "FILE",
"ContentLength": 123,
"StorageClass": s | elf.storage_class,
"Path": self.path,
"ContentSHA256": self.content_sha256,
}
return data
class MediaStoreDataBackend(BaseBackend):
def __init__(self, region_name=None):
super().__init__()
self.region_name = region_name
self._objects = OrderedDict()
def reset(self):
region | _name = self.region_name
self.__dict__ = {}
self.__init__(region_name)
def put_object(
self,
body,
path,
content_type=None,
cache_control=None,
storage_class="TEMPORAL",
upload_availability="STANDARD",
):
new_object = Object(
path=path, body=body, etag="etag", storage_class=storage_class
)
self._objects[path] = new_object
return new_object
def delete_object(self, path):
if path not in self._objects:
error = "ObjectNotFoundException"
raise ClientError(error, "Object with id={} not found".format(path))
del self._objects[path]
return {}
def get_object(self, path, object_range=None):
"""
The Range-parameter is not yet supported.
"""
objects_found = [item for item in self._objects.values() if item.path == path]
if len(objects_found) == 0:
error = "ObjectNotFoundException"
raise ClientError(error, "Object with id={} not found".format(path))
return objects_found[0]
def list_items(self, path, max_results=1000, next_token=None):
"""
The Path- and MaxResults-parameters are not yet supported.
"""
items = self._objects.values()
response_items = [c.to_dict() for c in items]
return response_items
mediastoredata_backends = BackendDict(MediaStoreDataBackend, "mediastore-data")
|
vitorio/ocropodium | ocradmin/presets/tests/test_scripts.py | Python | apache-2.0 | 4,800 | 0.002708 | """
Test plugin views.
"""
import os
import glob
from django.test import TestCase
from django.utils import simplejson as json
from django.conf import settings
from django.test.client import Client
from django.contrib.auth.models import User
from ocradmin.core.tests import testutils
from nodetree import script, node
import numpy
from mock import patch
VALID_SCRIPTDIR = "nodelib/scripts/valid"
INVALID_SCRIPTDIR = "nodelib/scripts/invalid"
from ocradmin.nodelib import cache
class ViewsTest(TestCase):
fixtures = [
"presets/fixtures/test_fixtures.json",
"ocrmodels/fixtures/test_fixtures.json"]
def setUp(self):
"""
Setup OCR tests. Creates a test user.
"""
testutils.symlink_model_fixtures()
self.scripts = {}
for fname in os.listdir(VALID_SCRIPTDIR):
if fname.endswith("json"):
with open(os.path.join(VALID_SCRIPTDIR, fname), "r") as f:
self.scripts[fname] = json | .load(f)
for fname in os.listdir(INVALID_SCRIPTDIR):
if fname.endswith("json"):
| with open(os.path.join(INVALID_SCRIPTDIR, fname), "r") as f:
self.scripts[fname] = json.load(f)
self.testuser = User.objects.create_user("test_user", "test@testing.com", "testpass")
self.client = Client()
self.client.login(username="test_user", password="testpass")
def tearDown(self):
"""
Revert any changes.
"""
#cache.PersistantFileCacher = self.old_cacher
def test_binarise_script(self):
"""
Test a script that should return image data, i.e.
a path to a DZI file.
"""
self._run_script("binarize.json", "SUCCESS", "image", ["output"])
def test_segment_script(self):
"""
Test a script that should return line image geometry.
"""
self._run_script("segment.json", "SUCCESS", "pseg", ["input", "lines"])
def test_ocropus_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("ocropus.json", "SUCCESS", "hocr", ["data"])
def test_tesseract_native_seg_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("tesseract_native_seg.json", "SUCCESS", "hocr", ["data"])
def test_tesseract_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("tesseract.json", "SUCCESS", "hocr", ["data"])
def test_cuneiform_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("cuneiform.json", "SUCCESS", "hocr", ["data"])
def test_evaluation_script(self):
"""
Test a script that should return transcript data.
"""
self._run_script("evaluation.json", "SUCCESS", "text", ["data"])
def test_invalid_path(self):
"""
Test a script that should return a node error.
"""
script = self.scripts.get("invalid_filein_path.json")
self.assertIsNotNone(script)
r = self.client.post("/presets/run/", dict(
script=json.dumps(script)))
content = json.loads(r.content)
for field in ["status", "errors"]:
self.assertIn(field, content, "No '%s' field in content" % field)
expectedstatus = "VALIDATION"
self.assertEqual(expectedstatus,
content["status"], "Status field is not '%s'" % expectedstatus)
self.assertIn("filein1", content["errors"], "'filein1' not in errors field" )
@patch(settings.NODETREE_PERSISTANT_CACHER, cache.TestMockCacher)
def _run_script(self, scriptname, expectedstatus, expectedtype, expecteddatafields):
"""
Run a script and assert the results resemble what we expect.
"""
script = self.scripts.get(scriptname)
self.assertIsNotNone(script)
r = self.client.post("/presets/run/", dict(script=json.dumps(script)))
content = json.loads(r.content)
for field in ["status", "task_id", "results"]:
self.assertIn(field, content, "No '%s' field in content" % field)
self.assertEqual(expectedstatus,
content["status"], "Status field is not '%s'" % expectedstatus)
for field in ["type"]:
self.assertIn(field, content["results"], "No '%s' field in content results" % field)
self.assertEqual(expectedtype,
content["results"]["type"], "Type field is not '%s'" % expectedtype)
for field in expecteddatafields:
self.assertIn(field, content["results"], "No '%s' field in content results" % field)
return content
|
PacificBiosciences/rDnaTools | src/pbrdna/__init__.py | Python | bsd-3-clause | 1,847 | 0.002166 | #################################################################################
# Copyright (c) 2013, Pacific Biosciences of California, Inc.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# d | ocumentation and/or other materials provided with the distribution.
# * Neither the name of Pacific Biosciences nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY
# THIS LICENSE. T | HIS SOFTWARE IS PROVIDED BY PACIFIC BIOSCIENCES AND ITS
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL PACIFIC BIOSCIENCES OR
# ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#################################################################################
__VERSION__ = "0.7.1"
|
lukas-ke/faint-graphics-editor | py/faint/formatsvgz.py | Python | apache-2.0 | 1,026 | 0.001949 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Lukas Kemmer
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Load and save functions for zipped svg files."""
import faint.svg.parse_svg as parse_svg
import faint.svg.write_svg as write_svg
def load(filename, imageprops):
"""Load image from the zipped svg file."""
parse_svg.parse_svgz_file(file | name, imageprops, "en")
def save(filename, canvas):
"""Save the image to the specified file as zipped svg."""
write_svg.write_svgz(filenam | e, canvas)
|
TheAlgorithms/Python | sorts/comb_sort.py | Python | mit | 1,851 | 0.001621 | """
This is pure Python implementation of comb sort algorithm.
Comb sort is a relatively simple sorting algorithm originally designed by Wlodzimierz
Dobosiewicz in 1980. It was rediscovered by Stephen Lacey and Richard Box in 1991.
Comb sort improves on bubble sort algorithm.
In bubble sort, distance (or gap) between two compared elements is always one.
Comb sort improvement is that gap can be much more than 1, in order | to prevent slowing
down by small values
at the end of a list.
More info on: https://en.wikipedia.org/wiki/Comb_sort
For doctests run following command:
python -m doctest -v comb_sort.py
or
python3 -m doctest -v comb_sort.py
For manual testing run:
python comb_sort.py
"""
def comb_sort(data: list) -> list:
"""Pure implementation of comb sort algorithm in Python
:param data: mutable collection with comparable item | s
:return: the same collection in ascending order
Examples:
>>> comb_sort([0, 5, 3, 2, 2])
[0, 2, 2, 3, 5]
>>> comb_sort([])
[]
>>> comb_sort([99, 45, -7, 8, 2, 0, -15, 3])
[-15, -7, 0, 2, 3, 8, 45, 99]
"""
shrink_factor = 1.3
gap = len(data)
completed = False
while not completed:
# Update the gap value for a next comb
gap = int(gap / shrink_factor)
if gap <= 1:
completed = True
index = 0
while index + gap < len(data):
if data[index] > data[index + gap]:
# Swap values
data[index], data[index + gap] = data[index + gap], data[index]
completed = False
index += 1
return data
if __name__ == "__main__":
import doctest
doctest.testmod()
user_input = input("Enter numbers separated by a comma:\n").strip()
unsorted = [int(item) for item in user_input.split(",")]
print(comb_sort(unsorted))
|
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_4_0_0/models/group.py | Python | bsd-3-clause | 7,973 | 0.007651 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.0-a53ec6ee1b (http://hl7.org/fhir/StructureDefinition/Group) on 2019-05-07.
# 2019, SMART Health IT.
from . import domainresource
class Group(domainresource.DomainResource):
""" Group of multiple entities.
Represents a defined collection of entities that may be discussed or acted
upon collectively but which are not expected to act collectively, and are
not formally or legally recognized; i.e. a collection of entities that
isn't an Organization.
"""
resource_type = "Group"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.active = None
""" Whether this group's record is in active use.
Type `bool`. """
self.actual = None
""" Descriptive or actual.
Type `bool`. """
self.characteristic = None
""" Include / Exclude group members by Trait.
List of `GroupCharacteristic` items (represented as `dict` in JSON). """
self.code = None
""" Kind of Gro | up members.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.identifier = None
""" Unique id.
List of `Identifier` items (represented as `dict` in JSON). """
self.managingEntity = None
"" | " Entity that is the custodian of the Group's definition.
Type `FHIRReference` (represented as `dict` in JSON). """
self.member = None
""" Who or what is in group.
List of `GroupMember` items (represented as `dict` in JSON). """
self.name = None
""" Label for Group.
Type `str`. """
self.quantity = None
""" Number of members.
Type `int`. """
self.type = None
""" person | animal | practitioner | device | medication | substance.
Type `str`. """
super(Group, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Group, self).elementProperties()
js.extend([
("active", "active", bool, False, None, False),
("actual", "actual", bool, False, None, True),
("characteristic", "characteristic", GroupCharacteristic, True, None, False),
("code", "code", codeableconcept.CodeableConcept, False, None, False),
("identifier", "identifier", identifier.Identifier, True, None, False),
("managingEntity", "managingEntity", fhirreference.FHIRReference, False, None, False),
("member", "member", GroupMember, True, None, False),
("name", "name", str, False, None, False),
("quantity", "quantity", int, False, None, False),
("type", "type", str, False, None, True),
])
return js
from . import backboneelement
class GroupCharacteristic(backboneelement.BackboneElement):
""" Include / Exclude group members by Trait.
Identifies traits whose presence r absence is shared by members of the
group.
"""
resource_type = "GroupCharacteristic"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.code = None
""" Kind of characteristic.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.exclude = None
""" Group includes or excludes.
Type `bool`. """
self.period = None
""" Period over which characteristic is tested.
Type `Period` (represented as `dict` in JSON). """
self.valueBoolean = None
""" Value held by characteristic.
Type `bool`. """
self.valueCodeableConcept = None
""" Value held by characteristic.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.valueQuantity = None
""" Value held by characteristic.
Type `Quantity` (represented as `dict` in JSON). """
self.valueRange = None
""" Value held by characteristic.
Type `Range` (represented as `dict` in JSON). """
self.valueReference = None
""" Value held by characteristic.
Type `FHIRReference` (represented as `dict` in JSON). """
super(GroupCharacteristic, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(GroupCharacteristic, self).elementProperties()
js.extend([
("code", "code", codeableconcept.CodeableConcept, False, None, True),
("exclude", "exclude", bool, False, None, True),
("period", "period", period.Period, False, None, False),
("valueBoolean", "valueBoolean", bool, False, "value", True),
("valueCodeableConcept", "valueCodeableConcept", codeableconcept.CodeableConcept, False, "value", True),
("valueQuantity", "valueQuantity", quantity.Quantity, False, "value", True),
("valueRange", "valueRange", range.Range, False, "value", True),
("valueReference", "valueReference", fhirreference.FHIRReference, False, "value", True),
])
return js
class GroupMember(backboneelement.BackboneElement):
""" Who or what is in group.
Identifies the resource instances that are members of the group.
"""
resource_type = "GroupMember"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.entity = None
""" Reference to the group member.
Type `FHIRReference` (represented as `dict` in JSON). """
self.inactive = None
""" If member is no longer in group.
Type `bool`. """
self.period = None
""" Period member belonged to the group.
Type `Period` (represented as `dict` in JSON). """
super(GroupMember, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(GroupMember, self).elementProperties()
js.extend([
("entity", "entity", fhirreference.FHIRReference, False, None, True),
("inactive", "inactive", bool, False, None, False),
("period", "period", period.Period, False, None, False),
])
return js
import sys
try:
from . import codeableconcept
except ImportError:
codeableconcept = sys.modules[__package__ + '.codeableconcept']
try:
from . import fhirreference
except ImportError:
fhirreference = sys.modules[__package__ + '.fhirreference']
try:
from . import identifier
except ImportError:
identifier = sys.modules[__package__ + '.identifier']
try:
from . import period
except ImportError:
period = sys.modules[__package__ + '.period']
try:
from . import quantity
except ImportError:
quantity = sys.modules[__package__ + '.quantity']
try:
from . import range
except ImportError:
range = sys.modules[__package__ + '.range']
|
SamHames/scikit-image | skimage/feature/util.py | Python | bsd-3-clause | 4,764 | 0 | import numpy as np
from skimage.util import img_as_float
class FeatureDetector(object):
def __init__(self):
self.keypoints_ = np.array([])
def detect(self, image):
"""Detect keypoints in image.
Parameters
----------
image : 2D array
Input image.
"""
raise NotImplementedError()
class DescriptorExtractor(object):
def __init__(self):
self.descriptors_ = np.array([])
def extract(self, image, keypoints):
"""Extract feature descriptors in image for given keypoints.
Parameters
----------
image : 2D array
Input image.
keypoints : (N, 2) array
Keypoint locations as ``(row, col)``.
"""
raise NotImplementedError()
def plot_matches(ax, image1, image2, keypoints1, keypoints2, matches,
keypoints_color='k', matches_color=None, only_matches=False):
"""Plot matched features.
Parameters
----------
ax : matplotlib.axes.Axes
Matches and image are drawn in this ax.
image1 : (N, M [, 3]) array
First grayscale or color image.
image2 : (N, M [, 3]) array
Second grayscale or color image.
keypoints1 : (K1, 2) array
First keypoint coordinates as ``(row, col)``.
keypoints2 : (K2, 2) array
Second keypoint coordinates as ``(row, col)``.
matches : (Q, 2) array
Indices of corresponding matches in first and second set of
descriptors, where ``matches[:, 0]`` denote the indices in the first
and ``matches[:, 1]`` the indices in the second set of descriptors.
keypoints_color : matplotlib color, optional
Color for keypoint locations.
matches_color : matplotlib color, optional
Color for lines which connect keypoint matches. By default the
color is chosen randomly.
only_matches : bool, optional
Whether to only plot matches and not plot the keypoint locations.
"""
image1 = img_as_float(image1)
image2 = img_as_float(image2)
new_shape1 = list(image1.shape)
new_shape2 = list(image2.shape)
if image1.shape[0] < image2.shape[0]:
new_shape1[0] = image2.shape[0]
elif image1.shape[0] > image2.shape[0]:
new_shape2[0] = image1.shape[0]
if image1.shape[1] < image2.shape[1]:
new_shape1[1] = image2.shape[1]
elif image1.shape[1] > image2.shape[1]:
new_shape2[1] = image1.shape[1]
if new_shape1 != image1.shape:
new_image1 = np.zeros(new_shape1, dtype=image1.dtype)
new_image1[:image1.shape[0], :image1.shape[1]] = image1
image1 = new_image1
if new_shape2 != image2.shape:
new_image2 = np.zeros(new_shape2, dtype=image2.dtype)
new_image2[:image2.shape[0], :image2.shape[1]] = image2
image2 = new_image2
image = np.concatenate([image1, image2], axis=1)
offset = image1.shape
if not only_matches:
ax.scatter(keypoints1[:, 1], keypoints1[:, 0],
facecolors='none', edgecolors=keypoints_color)
ax.scatter(keypoints2[:, 1] + offset[1], keypoints2[:, 0],
facecolors='none', edgecolors=keypoints_color)
ax.imshow(image, interpolation='nearest', cmap='gray')
ax.axis | ((0, 2 * offset[1], offset[0], 0))
for i in range(matches.shape[0]):
idx1 = matches[i, 0]
idx2 = matches[i, 1]
if matches_color is None:
color = np.random.rand(3, 1)
else:
color = matches_color
ax.plot((keypoints1[idx1, 1], keypoints2[idx2, 1] + offset[1]),
(keypoints1[idx1, 0], keypoints2[idx2, 0]),
'-', color=color)
def | _prepare_grayscale_input_2D(image):
image = np.squeeze(image)
if image.ndim != 2:
raise ValueError("Only 2-D gray-scale images supported.")
return img_as_float(image)
def _mask_border_keypoints(image_shape, keypoints, distance):
"""Mask coordinates that are within certain distance from the image border.
Parameters
----------
image_shape : (2, ) array_like
Shape of the image as ``(rows, cols)``.
keypoints : (N, 2) array
Keypoint coordinates as ``(rows, cols)``.
distance : int
Image border distance.
Returns
-------
mask : (N, ) bool array
Mask indicating if pixels are within the image (``True``) or in the
border region of the image (``False``).
"""
rows = image_shape[0]
cols = image_shape[1]
mask = (((distance - 1) < keypoints[:, 0])
& (keypoints[:, 0] < (rows - distance + 1))
& ((distance - 1) < keypoints[:, 1])
& (keypoints[:, 1] < (cols - distance + 1)))
return mask
|
mulkieran/pyudev | pyudev/pyqt4.py | Python | lgpl-2.1 | 3,930 | 0.001018 | # -*- coding: utf-8 -*-
# Copyright (C) 2010, 2011, 2012, 2013 Sebastian Wiesner <lunaryorn@gmail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# pylint: disable=anomalous-backslash-in-string
"""
pyudev.pyqt4
============
PyQt4 integration.
:class:`MonitorObserver` integrates device monitoring into the PyQt4\_
mainlo | op by turning device events into Qt signals.
:mod:`PyQt4.QtCore` from PyQt4\_ must be available | when importing this
module.
.. _PyQt4: http://riverbankcomputing.co.uk/software/pyqt/intro
.. moduleauthor:: Sebastian Wiesner <lunaryorn@gmail.com>
"""
from __future__ import (print_function, division, unicode_literals,
absolute_import)
from PyQt4.QtCore import QSocketNotifier, QObject, pyqtSignal
from pyudev._util import text_type
from pyudev.core import Device
from pyudev._qt_base import QUDevMonitorObserverMixin, MonitorObserverMixin
class MonitorObserver(QObject, MonitorObserverMixin):
"""An observer for device events integrating into the :mod:`PyQt4` mainloop.
This class inherits :class:`~PyQt4.QtCore.QObject` to turn device events
into Qt signals:
>>> from pyudev import Context, Monitor
>>> from pyudev.pyqt4 import MonitorObserver
>>> context = Context()
>>> monitor = Monitor.from_netlink(context)
>>> monitor.filter_by(subsystem='input')
>>> observer = MonitorObserver(monitor)
>>> def device_event(device):
... print('event {0} on device {1}'.format(device.action, device))
>>> observer.deviceEvent.connect(device_event)
>>> monitor.start()
This class is a child of :class:`~PyQt4.QtCore.QObject`.
"""
#: emitted upon arbitrary device events
deviceEvent = pyqtSignal(Device)
def __init__(self, monitor, parent=None):
"""
Observe the given ``monitor`` (a :class:`~pyudev.Monitor`):
``parent`` is the parent :class:`~PyQt4.QtCore.QObject` of this
object. It is passed unchanged to the inherited constructor of
:class:`~PyQt4.QtCore.QObject`.
"""
QObject.__init__(self, parent)
self._setup_notifier(monitor, QSocketNotifier)
class QUDevMonitorObserver(QObject, QUDevMonitorObserverMixin):
"""An observer for device events integrating into the :mod:`PyQt4` mainloop.
.. deprecated:: 0.17
Will be removed in 1.0. Use :class:`MonitorObserver` instead.
"""
#: emitted upon arbitrary device events
deviceEvent = pyqtSignal(text_type, Device)
#: emitted, if a device was added
deviceAdded = pyqtSignal(Device)
#: emitted, if a device was removed
deviceRemoved = pyqtSignal(Device)
#: emitted, if a device was changed
deviceChanged = pyqtSignal(Device)
#: emitted, if a device was moved
deviceMoved = pyqtSignal(Device)
def __init__(self, monitor, parent=None):
"""
Observe the given ``monitor`` (a :class:`~pyudev.Monitor`):
``parent`` is the parent :class:`~PyQt4.QtCore.QObject` of this
object. It is passed unchanged to the inherited constructor of
:class:`~PyQt4.QtCore.QObject`.
"""
QObject.__init__(self, parent)
self._setup_notifier(monitor, QSocketNotifier)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.