hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a6567d3cfcc84789855fb9d65e3cbdd736d4d725 | 5,573 | py | Python | workspacemanager/setup.py | hayj/WorkspaceManager | 81a9b7fdb5b532ffed75742090a620447ee614eb | [
"MIT"
] | 3 | 2018-12-03T22:50:35.000Z | 2019-06-18T10:56:20.000Z | workspacemanager/setup.py | hayj/WorkspaceManager | 81a9b7fdb5b532ffed75742090a620447ee614eb | [
"MIT"
] | null | null | null | workspacemanager/setup.py | hayj/WorkspaceManager | 81a9b7fdb5b532ffed75742090a620447ee614eb | [
"MIT"
] | null | null | null | # coding: utf-8
# from __future__ import division, print_function, absolute_import
import os
import sys
import json
import getpass
from shutil import *
from os import listdir
from os.path import isfile, join
import datetime
from workspacemanager.utils import *
"""
This script will create a dir tree for the current project
"""
def getConf(workspacePath):
conf = dict()
description = ""
confPath = workspacePath + "/wm-conf.json"
if os.path.isfile(confPath):
with open(confPath) as confFile:
try:
conf = json.load(confFile)
except ValueError:
pass
return conf
def generateSetup(theProjectDirectory=None, userInput=True):
# Get all dirs:
# (thisLibPackageDirectory,
# theProjectDirectory,
# theProjectPackageDirectory,
# thisLibName) = getDirs(theProjectDirectory=theProjectDirectory)
# workspacePath = os.path.abspath(os.path.join(theProjectDirectory, os.pardir))
#
(thisLibPackageDirectory,
theProjectDirectory,
theProjectPackageDirectory,
thisLibName,
workspacePath,
theProjectName,
thePackageName,
realPackagePath,
realPackageName) = getDirs3(theProjectDirectory=theProjectDirectory)
# For tests :
if theProjectDirectory is not None:
userInput = False
# If there is a setup.py, stop here:
if os.path.isfile(theProjectDirectory + "/setup.py"):
print("Project already setup.")
exit()
# We check the directory structure:
answer = None
if userInput:
answer = input('Do you want to check the directory structure ? Write "N" or press enter: ')
if not (answer == "N"):
# theFirstPackage = None
# for (dirname, dirnames, filenames) in os.walk(theProjectDirectory):
# theFirstPackage = dirname
# break
# if theFirstPackage is None:
# print "The project must have a package folder."
# exit()
# if theFirstPackage != theProjectPackageDirectory:
# ok = False
# for (dirname, dirnames, filenames) in os.walk(theProjectDirectory):
# if dirname == theProjectPackageDirectory:
# ok = True
# break
# if not ok:
# print "The project must have a package with the same name in lower case."
# exit()
if not os.path.isfile(realPackagePath + "/__init__.py"):
print("The package of this project must have a __init__.py file.")
exit()
# Get all datas from the conf or the user:
conf = getConf(workspacePath)
if "author" not in conf or conf["author"] is None:
author = getpass.getuser()
authorInput = None
if userInput:
authorInput = input('Please write your username or press enter for "' + author + '": ')
if authorInput is None or len(authorInput) <= 1:
conf["author"] = author
else:
conf["author"] = authorInput
if "author_email" not in conf or conf["author_email"] is None:
conf["author_email"] = None
if userInput:
conf["author_email"] = input('Please write your email or press enter: ')
if conf["author_email"] is None:
conf["author_email"] = ""
description = ""
if userInput:
description = input('Please write a description or press enter: ')
# Copy datas as default in the conf:
# Deprecated
# answer = None
# if userInput:
# answer = raw_input('Do you want to keep these entries as default ? Write "Y" or press enter: ')
# if answer == 'Y':
# with open(confPath, "w") as confFile:
# json.dump(conf, confFile)
# Copy all file from the template:
templatePath = thisLibPackageDirectory + "/setup-templates"
allTemplateFiles = [f for f in listdir(templatePath) if isfile(join(templatePath, f))]
for fileName in allTemplateFiles:
filePath = templatePath + "/" + fileName
filePathPaste = theProjectDirectory + "/" + fileName
print(fileName + " created.")
if not os.path.isfile(filePathPaste) and ".pyc" not in filePathPaste:
copyfile(filePath, filePathPaste)
# Replace "<year>" and "<copyright holders>":
now = datetime.datetime.now()
listSrc = ["<year>", "<copyright holders>"]
listRep = [str(now.year), conf["author"]]
replaceInFile(theProjectDirectory + "/LICENCE.txt", listSrc, listRep)
print("LICENCE.txt updated.")
# Replace datas in the setup:
listSrc = ["__DESCRIPTION__", "__AUTHOR__", "__AUTHOR_EMAIL__"]
listRep = [description, conf["author"], conf["author_email"]]
replaceInFile(theProjectDirectory + "/setup.py", listSrc, listRep)
print("setup.py updated.")
# Create a requirement file if not exists:
requPath = theProjectDirectory + "/requirements.txt"
if not os.path.isfile(requPath):
touch(requPath)
print("requirements.txt created.")
# If there is no __init__ or it is empty, create it with version:
toWrite = '__version__ = "0.0.1"'
initPath = realPackagePath + "/" + "__init__.py"
if not os.path.isfile(initPath):
touch(initPath)
with open(initPath, 'w+') as f :
filedata = f.read()
if filedata is None or len(filedata) == 0 or filedata == "" or filedata == " ":
f.write(toWrite)
print("__version__ added to the __init__.py.")
if __name__ == '__main__':
generateSetup()
| 34.401235 | 105 | 0.625695 |
d76e99524a76f490581e991fd8169b5caee39fc9 | 13,056 | py | Python | Tests/svgLib/path/parser_test.py | serginhoabe/fonttools | f6a62e0db127c01f4d23f3398d70df21bbc6dccc | [
"Apache-2.0",
"MIT"
] | null | null | null | Tests/svgLib/path/parser_test.py | serginhoabe/fonttools | f6a62e0db127c01f4d23f3398d70df21bbc6dccc | [
"Apache-2.0",
"MIT"
] | 74 | 2020-01-30T07:27:54.000Z | 2021-08-03T05:47:17.000Z | Tests/svgLib/path/parser_test.py | serginhoabe/fonttools | f6a62e0db127c01f4d23f3398d70df21bbc6dccc | [
"Apache-2.0",
"MIT"
] | 1 | 2020-01-22T20:06:09.000Z | 2020-01-22T20:06:09.000Z |
from fontTools.misc.py23 import *
from fontTools.pens.recordingPen import RecordingPen
from fontTools.svgLib import parse_path
import pytest
@pytest.mark.parametrize(
"pathdef, expected",
[
# Examples from the SVG spec
(
"M 100 100 L 300 100 L 200 300 z",
[
("moveTo", ((100.0, 100.0),)),
("lineTo", ((300.0, 100.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
# for Z command behavior when there is multiple subpaths
(
"M 0 0 L 50 20 M 100 100 L 300 100 L 200 300 z",
[
("moveTo", ((0.0, 0.0),)),
("lineTo", ((50.0, 20.0),)),
("endPath", ()),
("moveTo", ((100.0, 100.0),)),
("lineTo", ((300.0, 100.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
(
"M100,200 C100,100 250,100 250,200 S400,300 400,200",
[
("moveTo", ((100.0, 200.0),)),
("curveTo", ((100.0, 100.0),
(250.0, 100.0),
(250.0, 200.0))),
("curveTo", ((250.0, 300.0),
(400.0, 300.0),
(400.0, 200.0))),
("endPath", ()),
]
),
(
"M100,200 C100,100 400,100 400,200",
[
("moveTo", ((100.0, 200.0),)),
("curveTo", ((100.0, 100.0),
(400.0, 100.0),
(400.0, 200.0))),
("endPath", ()),
]
),
(
"M100,500 C25,400 475,400 400,500",
[
("moveTo", ((100.0, 500.0),)),
("curveTo", ((25.0, 400.0),
(475.0, 400.0),
(400.0, 500.0))),
("endPath", ()),
]
),
(
"M100,800 C175,700 325,700 400,800",
[
("moveTo", ((100.0, 800.0),)),
("curveTo", ((175.0, 700.0),
(325.0, 700.0),
(400.0, 800.0))),
("endPath", ()),
]
),
(
"M600,200 C675,100 975,100 900,200",
[
("moveTo", ((600.0, 200.0),)),
("curveTo", ((675.0, 100.0),
(975.0, 100.0),
(900.0, 200.0))),
("endPath", ()),
]
),
(
"M600,500 C600,350 900,650 900,500",
[
("moveTo", ((600.0, 500.0),)),
("curveTo", ((600.0, 350.0),
(900.0, 650.0),
(900.0, 500.0))),
("endPath", ()),
]
),
(
"M600,800 C625,700 725,700 750,800 S875,900 900,800",
[
("moveTo", ((600.0, 800.0),)),
("curveTo", ((625.0, 700.0),
(725.0, 700.0),
(750.0, 800.0))),
("curveTo", ((775.0, 900.0),
(875.0, 900.0),
(900.0, 800.0))),
("endPath", ()),
]
),
(
"M200,300 Q400,50 600,300 T1000,300",
[
("moveTo", ((200.0, 300.0),)),
("qCurveTo", ((400.0, 50.0),
(600.0, 300.0))),
("qCurveTo", ((800.0, 550.0),
(1000.0, 300.0))),
("endPath", ()),
]
),
# End examples from SVG spec
# Relative moveto
(
"M 0 0 L 50 20 m 50 80 L 300 100 L 200 300 z",
[
("moveTo", ((0.0, 0.0),)),
("lineTo", ((50.0, 20.0),)),
("endPath", ()),
("moveTo", ((100.0, 100.0),)),
("lineTo", ((300.0, 100.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
# Initial smooth and relative curveTo
(
"M100,200 s 150,-100 150,0",
[
("moveTo", ((100.0, 200.0),)),
("curveTo", ((100.0, 200.0),
(250.0, 100.0),
(250.0, 200.0))),
("endPath", ()),
]
),
# Initial smooth and relative qCurveTo
(
"M100,200 t 150,0",
[
("moveTo", ((100.0, 200.0),)),
("qCurveTo", ((100.0, 200.0),
(250.0, 200.0))),
("endPath", ()),
]
),
# relative l command
(
"M 100 100 L 300 100 l -100 200 z",
[
("moveTo", ((100.0, 100.0),)),
("lineTo", ((300.0, 100.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
# relative q command
(
"M200,300 q200,-250 400,0",
[
("moveTo", ((200.0, 300.0),)),
("qCurveTo", ((400.0, 50.0),
(600.0, 300.0))),
("endPath", ()),
]
),
# absolute H command
(
"M 100 100 H 300 L 200 300 z",
[
("moveTo", ((100.0, 100.0),)),
("lineTo", ((300.0, 100.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
# relative h command
(
"M 100 100 h 200 L 200 300 z",
[
("moveTo", ((100.0, 100.0),)),
("lineTo", ((300.0, 100.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
# absolute V command
(
"M 100 100 V 300 L 200 300 z",
[
("moveTo", ((100.0, 100.0),)),
("lineTo", ((100.0, 300.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
# relative v command
(
"M 100 100 v 200 L 200 300 z",
[
("moveTo", ((100.0, 100.0),)),
("lineTo", ((100.0, 300.0),)),
("lineTo", ((200.0, 300.0),)),
("lineTo", ((100.0, 100.0),)),
("closePath", ()),
]
),
]
)
def test_parse_path(pathdef, expected):
pen = RecordingPen()
parse_path(pathdef, pen)
assert pen.value == expected
@pytest.mark.parametrize(
"pathdef1, pathdef2",
[
# don't need spaces between numbers and commands
(
"M 100 100 L 200 200",
"M100 100L200 200",
),
# repeated implicit command
(
"M 100 200 L 200 100 L -100 -200",
"M 100 200 L 200 100 -100 -200"
),
# don't need spaces before a minus-sign
(
"M100,200c10-5,20-10,30-20",
"M 100 200 c 10 -5 20 -10 30 -20"
),
# closed paths have an implicit lineTo if they don't
# end on the same point as the initial moveTo
(
"M 100 100 L 300 100 L 200 300 z",
"M 100 100 L 300 100 L 200 300 L 100 100 z"
)
]
)
def test_equivalent_paths(pathdef1, pathdef2):
pen1 = RecordingPen()
parse_path(pathdef1, pen1)
pen2 = RecordingPen()
parse_path(pathdef2, pen2)
assert pen1.value == pen2.value
def test_exponents():
# It can be e or E, the plus is optional, and a minimum of +/-3.4e38 must be supported.
pen = RecordingPen()
parse_path("M-3.4e38 3.4E+38L-3.4E-38,3.4e-38", pen)
expected = [
("moveTo", ((-3.4e+38, 3.4e+38),)),
("lineTo", ((-3.4e-38, 3.4e-38),)),
("endPath", ()),
]
assert pen.value == expected
def test_invalid_implicit_command():
with pytest.raises(ValueError) as exc_info:
parse_path("M 100 100 L 200 200 Z 100 200", RecordingPen())
assert exc_info.match("Unallowed implicit command")
def test_arc_to_cubic_bezier():
pen = RecordingPen()
parse_path("M300,200 h-150 a150,150 0 1,0 150,-150 z", pen)
expected = [
('moveTo', ((300.0, 200.0),)),
('lineTo', ((150.0, 200.0),)),
(
'curveTo',
(
(150.0, 282.842),
(217.157, 350.0),
(300.0, 350.0)
)
),
(
'curveTo',
(
(382.842, 350.0),
(450.0, 282.842),
(450.0, 200.0)
)
),
(
'curveTo',
(
(450.0, 117.157),
(382.842, 50.0),
(300.0, 50.0)
)
),
('lineTo', ((300.0, 200.0),)),
('closePath', ())
]
result = list(pen.value)
assert len(result) == len(expected)
for (cmd1, points1), (cmd2, points2) in zip(result, expected):
assert cmd1 == cmd2
assert len(points1) == len(points2)
for pt1, pt2 in zip(points1, points2):
assert pt1 == pytest.approx(pt2, rel=1e-5)
class ArcRecordingPen(RecordingPen):
def arcTo(self, rx, ry, rotation, arc_large, arc_sweep, end_point):
self.value.append(
("arcTo", (rx, ry, rotation, arc_large, arc_sweep, end_point))
)
def test_arc_pen_with_arcTo():
pen = ArcRecordingPen()
parse_path("M300,200 h-150 a150,150 0 1,0 150,-150 z", pen)
expected = [
('moveTo', ((300.0, 200.0),)),
('lineTo', ((150.0, 200.0),)),
('arcTo', (150.0, 150.0, 0.0, True, False, (300.0, 50.0))),
('lineTo', ((300.0, 200.0),)),
('closePath', ())
]
assert pen.value == expected
@pytest.mark.parametrize(
"path, expected",
[
(
"M1-2A3-4-1.0 01.5.7",
[
("moveTo", ((1.0, -2.0),)),
("arcTo", (3.0, -4.0, -1.0, False, True, (0.5, 0.7))),
("endPath", ()),
],
),
(
"M21.58 7.19a2.51 2.51 0 10-1.77-1.77",
[
("moveTo", ((21.58, 7.19),)),
("arcTo", (2.51, 2.51, 0.0, True, False, (19.81, 5.42))),
("endPath", ()),
],
),
(
"M22 12a25.87 25.87 0 00-.42-4.81",
[
("moveTo", ((22.0, 12.0),)),
("arcTo", (25.87, 25.87, 0.0, False, False, (21.58, 7.19))),
("endPath", ()),
],
),
(
"M0,0 A1.2 1.2 0 012 15.8",
[
("moveTo", ((0.0, 0.0),)),
("arcTo", (1.2, 1.2, 0.0, False, True, (2.0, 15.8))),
("endPath", ()),
],
),
(
"M12 7a5 5 0 105 5 5 5 0 00-5-5",
[
("moveTo", ((12.0, 7.0),)),
("arcTo", (5.0, 5.0, 0.0, True, False, (17.0, 12.0))),
("arcTo", (5.0, 5.0, 0.0, False, False, (12.0, 7.0))),
("endPath", ()),
],
)
],
)
def test_arc_flags_without_spaces(path, expected):
pen = ArcRecordingPen()
parse_path(path, pen)
assert pen.value == expected
@pytest.mark.parametrize(
"path", ["A", "A0,0,0,0,0,0", "A 0 0 0 0 0 0 0 0 0 0 0 0 0"]
)
def test_invalid_arc_not_enough_args(path):
pen = ArcRecordingPen()
with pytest.raises(ValueError, match="Invalid arc command") as e:
parse_path(path, pen)
assert isinstance(e.value.__cause__, ValueError)
assert "Not enough arguments" in str(e.value.__cause__)
def test_invalid_arc_argument_value():
pen = ArcRecordingPen()
with pytest.raises(ValueError, match="Invalid arc command") as e:
parse_path("M0,0 A0,0,0,2,0,0,0", pen)
cause = e.value.__cause__
assert isinstance(cause, ValueError)
assert "Invalid argument for 'large-arc-flag' parameter: '2'" in str(cause)
pen = ArcRecordingPen()
with pytest.raises(ValueError, match="Invalid arc command") as e:
parse_path("M0,0 A0,0,0,0,-2.0,0,0", pen)
cause = e.value.__cause__
assert isinstance(cause, ValueError)
assert "Invalid argument for 'sweep-flag' parameter: '-2.0'" in str(cause)
| 29.740319 | 91 | 0.381127 |
b8162a52324d6e734cc7828824cbcc76c3306069 | 54 | py | Python | src/friendlypins/utils/__init__.py | TheFriendlyCoder/friendly_pinterest | 5f7f4a70c1681f1177a14d4aa7669797e2f5bdcd | [
"Apache-2.0"
] | 5 | 2018-05-13T06:08:32.000Z | 2020-04-27T13:16:22.000Z | src/friendlypins/utils/__init__.py | TheFriendlyCoder/friendly_pinterest | 5f7f4a70c1681f1177a14d4aa7669797e2f5bdcd | [
"Apache-2.0"
] | 94 | 2018-04-04T01:50:02.000Z | 2021-09-08T02:21:13.000Z | src/friendlypins/utils/__init__.py | TheFriendlyCoder/friendly_pinterest | 5f7f4a70c1681f1177a14d4aa7669797e2f5bdcd | [
"Apache-2.0"
] | null | null | null | """Helper scripts used by various parts of the API"""
| 27 | 53 | 0.722222 |
0db429ccf1fc4a51fab81adef876cee936700975 | 138,567 | py | Python | nova/tests/unit/api/openstack/compute/legacy_v2/test_servers.py | whitepages/nova | 2adbc897cccb2dbc828d261f9a56257884e7d75c | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/api/openstack/compute/legacy_v2/test_servers.py | whitepages/nova | 2adbc897cccb2dbc828d261f9a56257884e7d75c | [
"Apache-2.0"
] | 1 | 2021-03-21T11:39:52.000Z | 2021-03-21T11:39:52.000Z | nova/tests/unit/api/openstack/compute/legacy_v2/test_servers.py | isabella232/nova | 2adbc897cccb2dbc828d261f9a56257884e7d75c | [
"Apache-2.0"
] | 1 | 2021-03-21T11:37:33.000Z | 2021-03-21T11:37:33.000Z | # Copyright 2010-2011 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import collections
import contextlib
import datetime
import urllib
import uuid
import iso8601
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from six.moves import range
import six.moves.urllib.parse as urlparse
import testtools
import webob
from nova.api.openstack.compute.legacy_v2 import ips
from nova.api.openstack.compute.legacy_v2 import servers
from nova.api.openstack.compute import views
from nova.api.openstack import extensions
from nova.compute import api as compute_api
from nova.compute import flavors
from nova.compute import task_states
from nova.compute import vm_states
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.image import glance
from nova.network import manager
from nova.network.neutronv2 import api as neutron_api
from nova import objects
from nova.objects import instance as instance_obj
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network
from nova.tests.unit.image import fake
from nova.tests.unit import matchers
from nova.tests.unit.objects import test_keypair
from nova import utils as nova_utils
CONF = cfg.CONF
CONF.import_opt('password_length', 'nova.utils')
FAKE_UUID = fakes.FAKE_UUID
NS = "{http://docs.openstack.org/compute/api/v1.1}"
ATOMNS = "{http://www.w3.org/2005/Atom}"
XPATH_NS = {
'atom': 'http://www.w3.org/2005/Atom',
'ns': 'http://docs.openstack.org/compute/api/v1.1'
}
INSTANCE_IDS = {FAKE_UUID: 1}
FIELDS = instance_obj.INSTANCE_DEFAULT_FIELDS
def fake_gen_uuid():
return FAKE_UUID
def return_servers_empty(context, *args, **kwargs):
return objects.InstanceList(objects=[])
def return_security_group(context, instance_id, security_group_id):
pass
def instance_update_and_get_original(context, instance_uuid, values,
columns_to_join=None,
):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return (inst, inst)
def instance_update(context, instance_uuid, values):
inst = fakes.stub_instance(INSTANCE_IDS.get(instance_uuid),
name=values.get('display_name'))
inst = dict(inst, **values)
return inst
def fake_compute_api(cls, req, id):
return True
class MockSetAdminPassword(object):
def __init__(self):
self.instance_id = None
self.password = None
def __call__(self, context, instance_id, password):
self.instance_id = instance_id
self.password = password
class Base64ValidationTest(test.TestCase):
def setUp(self):
super(Base64ValidationTest, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def test_decode_base64(self):
value = "A random string"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_binary(self):
value = "\x00\x12\x75\x99"
result = self.controller._decode_base64(base64.b64encode(value))
self.assertEqual(result, value)
def test_decode_base64_whitespace(self):
value = "A random string"
encoded = base64.b64encode(value)
white = "\n \n%s\t%s\n" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertEqual(result, value)
def test_decode_base64_invalid(self):
invalid = "A random string"
result = self.controller._decode_base64(invalid)
self.assertIsNone(result)
def test_decode_base64_illegal_bytes(self):
value = "A random string"
encoded = base64.b64encode(value)
white = ">\x01%s*%s()" % (encoded[:2], encoded[2:])
result = self.controller._decode_base64(white)
self.assertIsNone(result)
class NeutronV2Subclass(neutron_api.API):
"""Used to ensure that API handles subclasses properly."""
pass
class ControllerTest(test.TestCase):
def setUp(self):
super(ControllerTest, self).setUp()
self.flags(verbose=True, use_ipv6=False)
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
return_server = fakes.fake_compute_get()
return_servers = fakes.fake_compute_get_all()
# Server sort keys extension is not enabled in v2 test so no sort
# data is passed to the instance API and the non-sorted DB API is
# invoked
self.stubs.Set(compute_api.API, 'get_all',
lambda api, *a, **k: return_servers(*a, **k))
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: return_server(*a, **k))
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'instance_update_and_get_original',
instance_update_and_get_original)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
self.ips_controller = ips.Controller()
policy.reset()
policy.init()
fake_network.stub_out_nw_api_get_instance_nw_info(self.stubs)
class ServersControllerTest(ControllerTest):
def test_can_check_loaded_extensions(self):
self.ext_mgr.extensions = {'os-fake': None}
self.assertTrue(self.controller.ext_mgr.is_loaded('os-fake'))
self.assertFalse(self.controller.ext_mgr.is_loaded('os-not-loaded'))
def test_requested_networks_prefix(self):
uuid = 'br-00000000-0000-0000-0000-000000000000'
requested_networks = [{'uuid': uuid}]
res = self.controller._get_requested_networks(requested_networks)
self.assertIn((uuid, None), res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_enabled_with_network_and_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_with_duplicate_networks(self):
# duplicate networks are allowed only for nova neutron v2.0
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}, {'uuid': network}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_with_neutronv2_and_duplicate_networks(self):
# duplicate networks are allowed only for nova neutron v2.0
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}, {'uuid': network}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(network, None, None, None),
(network, None, None, None)], res.as_tuples())
def test_requested_networks_neutronv2_disabled_with_port(self):
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
self.assertRaises(
webob.exc.HTTPBadRequest,
self.controller._get_requested_networks,
requested_networks)
def test_requested_networks_api_enabled_with_v2_subclass(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_requested_networks_neutronv2_subclass_with_port(self):
cls = ('nova.tests.unit.api.openstack.compute.legacy_v2'
'.test_servers.NeutronV2Subclass')
self.flags(network_api_class=cls)
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port}]
res = self.controller._get_requested_networks(requested_networks)
self.assertEqual([(None, None, port, None)], res.as_tuples())
def test_get_server_by_uuid(self):
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
res_dict = self.controller.show(req, FAKE_UUID)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
def test_unique_host_id(self):
"""Create two servers with the same host and different
project_ids and check that the hostId's are unique.
"""
def return_instance_with_host(context, *args, **kwargs):
project_id = str(uuid.uuid4())
self.assertIn('expected_attrs', kwargs)
self.assertEqual(['flavor', 'info_cache', 'metadata'],
kwargs['expected_attrs'])
return fakes.stub_instance_obj(context, id=1, uuid=FAKE_UUID,
project_id=project_id,
host='fake_host')
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
with mock.patch.object(compute_api.API, 'get') as mock_get:
mock_get.side_effect = return_instance_with_host
server1 = self.controller.show(req, FAKE_UUID)
server2 = self.controller.show(req, FAKE_UUID)
self.assertNotEqual(server1['server']['hostId'],
server2['server']['hostId'])
def _get_server_data_dict(self, uuid, image_bookmark, flavor_bookmark,
status="ACTIVE", progress=100):
return {
"server": {
"id": uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": progress,
"name": "server2",
"status": status,
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": image_bookmark,
},
],
},
"flavor": {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {
"seq": "2",
},
"links": [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % uuid,
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % uuid,
},
],
}
}
def test_get_server_by_id(self):
self.flags(use_ipv6=True)
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark,
status="BUILD",
progress=0)
expected_server['server']['name'] = 'server1'
expected_server['server']['metadata']['seq'] = '1'
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_active_status_by_id(self):
image_bookmark = "http://localhost/fake/images/10"
flavor_bookmark = "http://localhost/fake/flavors/2"
new_return_server = fakes.fake_compute_get(
id=2, vm_state=vm_states.ACTIVE, progress=100)
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: new_return_server(*a, **k))
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_with_id_image_ref_by_id(self):
image_ref = "10"
image_bookmark = "http://localhost/fake/images/10"
flavor_id = "1"
flavor_bookmark = "http://localhost/fake/flavors/2"
new_return_server = fakes.fake_compute_get(
id=2, vm_state=vm_states.ACTIVE, image_ref=image_ref,
flavor_id=flavor_id, progress=100)
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: new_return_server(*a, **k))
uuid = FAKE_UUID
req = fakes.HTTPRequest.blank('/fake/servers/%s' % uuid)
res_dict = self.controller.show(req, uuid)
expected_server = self._get_server_data_dict(uuid,
image_bookmark,
flavor_bookmark)
self.assertThat(res_dict, matchers.DictMatches(expected_server))
def test_get_server_addresses_from_cache(self):
pub0 = ('172.19.0.1', '172.19.0.2',)
pub1 = ('1.2.3.4',)
pub2 = ('b33f::fdee:ddff:fecc:bbaa',)
priv0 = ('192.168.0.3', '192.168.0.4',)
def _ip(ip):
return {'address': ip, 'type': 'fixed'}
nw_cache = [
{'address': 'aa:aa:aa:aa:aa:aa',
'id': 1,
'network': {'bridge': 'br0',
'id': 1,
'label': 'public',
'subnets': [{'cidr': '172.19.0.0/24',
'ips': [_ip(ip) for ip in pub0]},
{'cidr': '1.2.3.0/16',
'ips': [_ip(ip) for ip in pub1]},
{'cidr': 'b33f::/64',
'ips': [_ip(ip) for ip in pub2]}]}},
{'address': 'bb:bb:bb:bb:bb:bb',
'id': 2,
'network': {'bridge': 'br1',
'id': 2,
'label': 'private',
'subnets': [{'cidr': '192.168.0.0/24',
'ips': [_ip(ip) for ip in priv0]}]}}]
return_server = fakes.fake_compute_get(nw_cache=nw_cache)
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: return_server(*a, **k))
req = fakes.HTTPRequest.blank('/fake/servers/%s/ips' % FAKE_UUID)
res_dict = self.ips_controller.index(req, FAKE_UUID)
expected = {
'addresses': {
'private': [
{'version': 4, 'addr': '192.168.0.3'},
{'version': 4, 'addr': '192.168.0.4'},
],
'public': [
{'version': 4, 'addr': '172.19.0.1'},
{'version': 4, 'addr': '172.19.0.2'},
{'version': 4, 'addr': '1.2.3.4'},
{'version': 6, 'addr': 'b33f::fdee:ddff:fecc:bbaa'},
],
},
}
self.assertThat(res_dict, matchers.DictMatches(expected))
# Make sure we kept the addresses in order
self.assertIsInstance(res_dict['addresses'], collections.OrderedDict)
labels = [vif['network']['label'] for vif in nw_cache]
for index, label in enumerate(res_dict['addresses'].keys()):
self.assertEqual(label, labels[index])
def test_get_server_addresses_nonexistent_network(self):
url = '/fake/servers/%s/ips/network_0' % FAKE_UUID
req = fakes.HTTPRequest.blank(url)
self.assertRaises(webob.exc.HTTPNotFound, self.ips_controller.show,
req, FAKE_UUID, 'network_0')
def test_get_server_addresses_nonexistent_server(self):
def fake_instance_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'get', fake_instance_get)
server_id = str(uuid.uuid4())
req = fakes.HTTPRequest.blank('/fake/servers/%s/ips' % server_id)
self.assertRaises(webob.exc.HTTPNotFound,
self.ips_controller.index, req, server_id)
def test_get_server_list_empty(self):
self.stubs.Set(compute_api.API, 'get_all',
return_servers_empty)
req = fakes.HTTPRequest.blank('/fake/servers')
res_dict = self.controller.index(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_list_with_reservation_id(self):
req = fakes.HTTPRequest.blank('/fake/servers?reservation_id=foo')
res_dict = self.controller.index(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_empty(self):
req = fakes.HTTPRequest.blank('/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list_with_reservation_id_details(self):
req = fakes.HTTPRequest.blank('/fake/servers/detail?'
'reservation_id=foo')
res_dict = self.controller.detail(req)
i = 0
for s in res_dict['servers']:
self.assertEqual(s.get('name'), 'server%d' % (i + 1))
i += 1
def test_get_server_list(self):
req = fakes.HTTPRequest.blank('/fake/servers')
res_dict = self.controller.index(req)
self.assertEqual(len(res_dict['servers']), 5)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertIsNone(s.get('image', None))
expected_links = [
{
"rel": "self",
"href": "http://localhost/v2/fake/servers/%s" % s['id'],
},
{
"rel": "bookmark",
"href": "http://localhost/fake/servers/%s" % s['id'],
},
]
self.assertEqual(s['links'], expected_links)
def test_get_servers_with_limit(self):
req = fakes.HTTPRequest.blank('/fake/servers?limit=3')
res_dict = self.controller.index(req)
servers = res_dict['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res_dict['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected_params = {'limit': ['3'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected_params))
def test_get_servers_with_limit_bad_value(self):
req = fakes.HTTPRequest.blank('/fake/servers?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_server_details_empty(self):
self.stubs.Set(compute_api.API, 'get_all',
return_servers_empty)
req = fakes.HTTPRequest.blank('/fake/servers/detail')
res_dict = self.controller.detail(req)
num_servers = len(res_dict['servers'])
self.assertEqual(0, num_servers)
def test_get_server_details_with_limit(self):
req = fakes.HTTPRequest.blank('/fake/servers/detail?limit=3')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_server_details_with_limit_bad_value(self):
req = fakes.HTTPRequest.blank('/fake/servers/detail?limit=aaa')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.detail, req)
def test_get_server_details_with_limit_and_other_params(self):
req = fakes.HTTPRequest.blank('/fake/servers/detail'
'?limit=3&blah=2:t'
'&sort_key=id1&sort_dir=asc')
res = self.controller.detail(req)
servers = res['servers']
self.assertEqual([s['id'] for s in servers],
[fakes.get_fake_uuid(i) for i in range(len(servers))])
servers_links = res['servers_links']
self.assertEqual(servers_links[0]['rel'], 'next')
# Retrieve the parameters from the next link, they should contain the
# same limit, filter, and sort information as the original request as
# well as a marker; this ensures that the caller can simply use the
# "next" link and that they do not need to manually insert the limit
# and sort information.
href_parts = urlparse.urlparse(servers_links[0]['href'])
self.assertEqual('/v2/fake/servers/detail', href_parts.path)
params = urlparse.parse_qs(href_parts.query)
expected = {'limit': ['3'], 'blah': ['2:t'],
'sort_key': ['id1'], 'sort_dir': ['asc'],
'marker': [fakes.get_fake_uuid(2)]}
self.assertThat(params, matchers.DictMatches(expected))
def test_get_servers_with_too_big_limit(self):
req = fakes.HTTPRequest.blank('/fake/servers?limit=30')
res_dict = self.controller.index(req)
self.assertNotIn('servers_links', res_dict)
def test_get_servers_with_bad_limit(self):
req = fakes.HTTPRequest.blank('/fake/servers?limit=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_get_servers_with_marker(self):
url = '/v2/fake/servers?marker=%s' % fakes.get_fake_uuid(2)
req = fakes.HTTPRequest.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ["server4", "server5"])
def test_get_servers_with_limit_and_marker(self):
url = '/v2/fake/servers?limit=2&marker=%s' % fakes.get_fake_uuid(1)
req = fakes.HTTPRequest.blank(url)
servers = self.controller.index(req)['servers']
self.assertEqual([s['name'] for s in servers], ['server3', 'server4'])
def test_get_servers_with_bad_marker(self):
req = fakes.HTTPRequest.blank('/fake/servers?limit=2&marker=asdf')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
@mock.patch('nova.compute.api.API.get_all')
def test_get_servers_with_sorting_enabled(self, mock_compute_get_all):
'''Sorting params honored if os-server-sort-keys is loaded.'''
self.ext_mgr.extensions = {'os-server-sort-keys': 'fake'}
req = fakes.HTTPRequest.blank('/fake/servers'
'?sort_key=id1&sort_dir=asc')
self.controller.index(req)
self.assertEqual(mock_compute_get_all.call_count, 1)
# Ensure that sort_dirs and sort_dirs is correct
kwargs = mock_compute_get_all.call_args[1]
self.assertEqual(['id1'], kwargs['sort_keys'])
self.assertEqual(['asc'], kwargs['sort_dirs'])
@mock.patch('nova.compute.api.API.get_all')
def test_get_servers_with_sorting_disabled(self, mock_compute_get_all):
'''Sorting params ignored if os-server-sort-keys is not loaded.'''
self.ext_mgr.extensions = {}
req = fakes.HTTPRequest.blank('/fake/servers'
'?sort_key=id1&sort_dir=asc')
self.controller.index(req)
self.assertEqual(mock_compute_get_all.call_count, 1)
# Ensure that sort_dirs and sort_dirs is None
kwargs = mock_compute_get_all.call_args[1]
self.assertIsNone(kwargs['sort_keys'])
self.assertIsNone(kwargs['sort_dirs'])
def test_get_servers_with_bad_option(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?unknownoption=whee')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_image(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('image', search_opts)
self.assertEqual(search_opts['image'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?image=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_all_tenants_param_normal(self):
def fake_get_all(context, search_opts=None, **kwargs):
self.assertNotIn('project_id', search_opts)
return [fakes.stub_instance_obj(100)]
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants',
use_admin_context=True)
with mock.patch.object(compute_api.API, 'get_all') as mock_get:
mock_get.side_effect = fake_get_all
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_one(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
return [fakes.stub_instance(100)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants=1',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_zero(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance(100)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants=0',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_false(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance(100)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants=false',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_param_invalid(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
self.assertNotIn('all_tenants', search_opts)
return [fakes.stub_instance(100)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants=xxx',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.index, req)
def test_admin_restricted_tenant(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertEqual(search_opts['project_id'], 'fake')
return [fakes.stub_instance(100)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_pass_policy(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
self.assertNotIn('project_id', search_opts)
self.assertTrue(context.is_admin)
return [fakes.stub_instance(100)]
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants=1')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
def test_all_tenants_fail_policy(self):
def fake_get_all(api, context, search_opts=None, **kwargs):
self.assertIsNotNone(search_opts)
return [fakes.stub_instance(100)]
rules = {
"compute:get_all_tenants":
common_policy.parse_rule("project_id:non_fake"),
"compute:get_all":
common_policy.parse_rule("project_id:fake"),
}
policy.set_rules(rules)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?all_tenants=1')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.index, req)
def test_get_servers_allows_flavor(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('flavor', search_opts)
# flavor is an integer ID
self.assertEqual(search_opts['flavor'], '12345')
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?flavor=12345')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_with_bad_flavor(self):
req = fakes.HTTPRequest.blank('/fake/servers?flavor=abcde')
with mock.patch.object(compute_api.API, 'get_all') as mock_get:
mock_get.return_value = objects.InstanceList(objects=[])
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_server_details_with_bad_flavor(self):
req = fakes.HTTPRequest.blank('/fake/servers/detail?flavor=abcde')
with mock.patch.object(compute_api.API, 'get_all') as mock_get:
mock_get.return_value = objects.InstanceList(objects=[])
servers = self.controller.detail(req)['servers']
self.assertThat(servers, testtools.matchers.HasLength(0))
def test_get_servers_allows_status(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], [vm_states.ACTIVE])
db_list = [fakes.stub_instance(100, uuid=server_uuid)]
return instance_obj._make_instance_list(
context, objects.InstanceList(), db_list, FIELDS)
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?status=active')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
@mock.patch.object(compute_api.API, 'get_all')
def test_get_servers_allows_multi_status(self, get_all_mock):
server_uuid0 = str(uuid.uuid4())
server_uuid1 = str(uuid.uuid4())
db_list = [fakes.stub_instance(100, uuid=server_uuid0),
fakes.stub_instance(101, uuid=server_uuid1)]
get_all_mock.return_value = instance_obj._make_instance_list(
context, instance_obj.InstanceList(), db_list, FIELDS)
req = fakes.HTTPRequest.blank(
'/fake/servers?status=active&status=error')
servers = self.controller.index(req)['servers']
self.assertEqual(2, len(servers))
self.assertEqual(server_uuid0, servers[0]['id'])
self.assertEqual(server_uuid1, servers[1]['id'])
expected_search_opts = dict(deleted=False,
vm_state=[vm_states.ACTIVE,
vm_states.ERROR],
project_id='fake')
get_all_mock.assert_called_once_with(mock.ANY,
search_opts=expected_search_opts, limit=mock.ANY,
marker=mock.ANY, want_objects=mock.ANY,
expected_attrs=None,
sort_keys=mock.ANY, sort_dirs=mock.ANY)
@mock.patch.object(compute_api.API, 'get_all')
def test_get_servers_system_metadata_filter(self, get_all_mock):
server_uuid0 = str(uuid.uuid4())
server_uuid1 = str(uuid.uuid4())
expected_system_metadata = u'{"some_value": "some_key"}'
db_list = [fakes.stub_instance(100, uuid=server_uuid0),
fakes.stub_instance(101, uuid=server_uuid1)]
get_all_mock.return_value = instance_obj._make_instance_list(
context, instance_obj.InstanceList(), db_list, FIELDS)
req = fakes.HTTPRequest.blank(
'/fake/servers?status=active&status=error&system_metadata=' +
urllib.quote(expected_system_metadata),
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(2, len(servers))
self.assertEqual(server_uuid0, servers[0]['id'])
self.assertEqual(server_uuid1, servers[1]['id'])
expected_search_opts = dict(
deleted=False, vm_state=[vm_states.ACTIVE, vm_states.ERROR],
system_metadata=expected_system_metadata, project_id='fake')
get_all_mock.assert_called_once_with(mock.ANY,
search_opts=expected_search_opts, limit=mock.ANY,
marker=mock.ANY, want_objects=mock.ANY,
expected_attrs=None,
sort_keys=mock.ANY, sort_dirs=mock.ANY)
@mock.patch.object(compute_api.API, 'get_all')
def test_get_servers_flavor_not_found(self, get_all_mock):
get_all_mock.side_effect = exception.FlavorNotFound(flavor_id=1)
req = fakes.HTTPRequest.blank(
'/fake/servers?status=active&flavor=abc')
servers = self.controller.index(req)['servers']
self.assertEqual(0, len(servers))
@mock.patch.object(compute_api.API, 'get_all')
def test_get_servers_allows_invalid_status(self, get_all_mock):
server_uuid0 = str(uuid.uuid4())
server_uuid1 = str(uuid.uuid4())
db_list = [fakes.stub_instance(100, uuid=server_uuid0),
fakes.stub_instance(101, uuid=server_uuid1)]
get_all_mock.return_value = instance_obj._make_instance_list(
context, instance_obj.InstanceList(), db_list, FIELDS)
req = fakes.HTTPRequest.blank(
'/fake/servers?status=active&status=invalid')
servers = self.controller.index(req)['servers']
self.assertEqual(2, len(servers))
self.assertEqual(server_uuid0, servers[0]['id'])
self.assertEqual(server_uuid1, servers[1]['id'])
expected_search_opts = dict(deleted=False,
vm_state=[vm_states.ACTIVE],
project_id='fake')
get_all_mock.assert_called_once_with(mock.ANY,
search_opts=expected_search_opts, limit=mock.ANY,
marker=mock.ANY, want_objects=mock.ANY,
expected_attrs=None,
sort_keys=mock.ANY, sort_dirs=mock.ANY)
def test_get_servers_allows_task_status(self):
server_uuid = str(uuid.uuid4())
task_state = task_states.REBOOTING
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('task_state', search_opts)
self.assertEqual([task_states.REBOOT_PENDING,
task_states.REBOOT_STARTED,
task_states.REBOOTING],
search_opts['task_state'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid,
task_state=task_state)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/servers?status=reboot')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_resize_status(self):
# Test when resize status, it maps list of vm states.
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'],
[vm_states.ACTIVE, vm_states.STOPPED])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?status=resize')
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_invalid_status(self):
# Test getting servers by invalid status.
req = fakes.HTTPRequest.blank('/fake/servers?status=baloney',
use_admin_context=False)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 0)
def test_get_servers_deleted_status_as_user(self):
req = fakes.HTTPRequest.blank('/fake/servers?status=deleted',
use_admin_context=False)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.detail, req)
def test_get_servers_deleted_status_as_admin(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIn('vm_state', search_opts)
self.assertEqual(search_opts['vm_state'], ['deleted'])
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?status=deleted',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
@mock.patch.object(compute_api.API, 'get_all')
def test_get_servers_deleted_filter_str_to_bool(self, mock_get_all):
server_uuid = str(uuid.uuid4())
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid,
vm_state='deleted')])
mock_get_all.return_value = db_list
req = fakes.HTTPRequest.blank('/fake/servers?deleted=true',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(server_uuid, servers[0]['id'])
# Assert that 'deleted' filter value is converted to boolean
# while calling get_all() method.
expected_search_opts = {'deleted': True, 'project_id': 'fake'}
mock_get_all.assert_called_once_with(
mock.ANY, search_opts=expected_search_opts, limit=mock.ANY,
marker=mock.ANY, want_objects=mock.ANY,
expected_attrs=['flavor', 'info_cache', 'metadata'],
sort_keys=mock.ANY, sort_dirs=mock.ANY)
@mock.patch.object(compute_api.API, 'get_all')
def test_get_servers_deleted_filter_invalid_str(self, mock_get_all):
server_uuid = str(uuid.uuid4())
db_list = objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
mock_get_all.return_value = db_list
req = fakes.HTTPRequest.blank('/fake/servers?deleted=abc',
use_admin_context=True)
servers = self.controller.detail(req)['servers']
self.assertEqual(1, len(servers))
self.assertEqual(server_uuid, servers[0]['id'])
# Assert that invalid 'deleted' filter value is converted to boolean
# False while calling get_all() method.
expected_search_opts = {'deleted': False, 'project_id': 'fake'}
mock_get_all.assert_called_once_with(
mock.ANY, search_opts=expected_search_opts, limit=mock.ANY,
marker=mock.ANY, want_objects=mock.ANY,
expected_attrs=['flavor', 'info_cache', 'metadata'],
sort_keys=mock.ANY, sort_dirs=mock.ANY)
def test_get_servers_allows_name(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('name', search_opts)
self.assertEqual(search_opts['name'], 'whee.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?name=whee.*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since(self):
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('changes-since', search_opts)
changes_since = datetime.datetime(2011, 1, 24, 17, 8, 1,
tzinfo=iso8601.iso8601.UTC)
self.assertEqual(search_opts['changes-since'], changes_since)
self.assertNotIn('deleted', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
params = 'changes-since=2011-01-24T17:08:01Z'
req = fakes.HTTPRequest.blank('/fake/servers?%s' % params)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_changes_since_bad_value(self):
params = 'changes-since=asdf'
req = fakes.HTTPRequest.blank('/fake/servers?%s' % params)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.index, req)
def test_get_servers_admin_filters_as_user(self):
"""Test getting servers by admin-only or unknown options when
context is not admin. Make sure the admin and unknown options
are stripped before they get to compute_api.get_all()
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
self.assertIn('ip', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertNotIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/fake/servers?%s' % query_str)
res = self.controller.index(req)
servers = res['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_options_as_admin(self):
"""Test getting servers by admin-only or unknown options when
context is admin. All options should be passed
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
# Allowed by user
self.assertIn('name', search_opts)
# OSAPI converts status to vm_state
self.assertIn('vm_state', search_opts)
# Allowed only by admins with admin API on
self.assertIn('ip', search_opts)
self.assertIn('unknown_option', search_opts)
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
query_str = "name=foo&ip=10.*&status=active&unknown_option=meow"
req = fakes.HTTPRequest.blank('/fake/servers?%s' % query_str,
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_allows_ip(self):
"""Test getting servers by ip."""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip', search_opts)
self.assertEqual(search_opts['ip'], '10\..*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?ip=10\..*')
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_servers_admin_allows_ip6(self):
"""Test getting servers by ip6 with admin_api enabled and
admin context
"""
server_uuid = str(uuid.uuid4())
def fake_get_all(compute_self, context, search_opts=None,
limit=None, marker=None, want_objects=False,
sort_keys=None, sort_dirs=None,
expected_attrs=None):
self.assertIsNotNone(search_opts)
self.assertIn('ip6', search_opts)
self.assertEqual(search_opts['ip6'], 'ffff.*')
return objects.InstanceList(
objects=[fakes.stub_instance_obj(100, uuid=server_uuid)])
self.stubs.Set(compute_api.API, 'get_all', fake_get_all)
req = fakes.HTTPRequest.blank('/fake/servers?ip6=ffff.*',
use_admin_context=True)
servers = self.controller.index(req)['servers']
self.assertEqual(len(servers), 1)
self.assertEqual(servers[0]['id'], server_uuid)
def test_get_all_server_details(self):
expected_flavor = {
"id": "2",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/flavors/2',
},
],
}
expected_image = {
"id": "10",
"links": [
{
"rel": "bookmark",
"href": 'http://localhost/fake/images/10',
},
],
}
req = fakes.HTTPRequest.blank('/fake/servers/detail')
res_dict = self.controller.detail(req)
for i, s in enumerate(res_dict['servers']):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], '')
self.assertEqual(s['name'], 'server%d' % (i + 1))
self.assertEqual(s['image'], expected_image)
self.assertEqual(s['flavor'], expected_flavor)
self.assertEqual(s['status'], 'BUILD')
self.assertEqual(s['metadata']['seq'], str(i + 1))
def test_get_all_server_details_with_host(self):
"""We want to make sure that if two instances are on the same host,
then they return the same hostId. If two instances are on different
hosts, they should return different hostId's. In this test, there
are 5 instances - 2 on one host and 3 on another.
"""
def return_servers_with_host(*args, **kwargs):
return objects.InstanceList(
objects=[fakes.stub_instance_obj(None,
id=i + 1, user_id='fake',
project_id='fake',
host=i % 2,
uuid=fakes.get_fake_uuid(i))
for i in range(5)])
self.stubs.Set(self.controller.compute_api, 'get_all',
return_servers_with_host)
req = fakes.HTTPRequest.blank('/fake/servers/detail')
res_dict = self.controller.detail(req)
server_list = res_dict['servers']
host_ids = [server_list[0]['hostId'], server_list[1]['hostId']]
self.assertTrue(host_ids[0] and host_ids[1])
self.assertNotEqual(host_ids[0], host_ids[1])
for i, s in enumerate(server_list):
self.assertEqual(s['id'], fakes.get_fake_uuid(i))
self.assertEqual(s['hostId'], host_ids[i % 2])
self.assertEqual(s['name'], 'server%d' % (i + 1))
class ServersControllerUpdateTest(ControllerTest):
def _get_request(self, body=None, content_type='json', options=None):
if options:
self.stubs.Set(compute_api.API, 'get',
fakes.fake_compute_get(**options))
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = 'application/%s' % content_type
req.body = jsonutils.dumps(body)
return req
def test_update_server_all_attributes(self):
body = {'server': {
'name': 'server_test',
'accessIPv4': '0.0.0.0',
'accessIPv6': 'beef::0123',
}}
req = self._get_request(body, {'name': 'server_test',
'access_ipv4': '0.0.0.0',
'access_ipv6': 'beef::0123'})
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
self.assertEqual(res_dict['server']['accessIPv4'], '0.0.0.0')
self.assertEqual(res_dict['server']['accessIPv6'], 'beef::123')
def test_update_server_invalid_xml_raises_lookup(self):
body = """<?xml version="1.0" encoding="TF-8"?>
<metadata
xmlns="http://docs.openstack.org/compute/api/v1.1"
key="Label"></meta>"""
req = self._get_request(body, content_type='xml')
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_update_server_invalid_xml_raises_expat(self):
body = """<?xml version="1.0" encoding="UTF-8"?>
<metadata
xmlns="http://docs.openstack.org/compute/api/v1.1"
key="Label"></meta>"""
req = self._get_request(body, content_type='xml')
res = req.get_response(fakes.wsgi_app())
self.assertEqual(res.status_int, 400)
def test_update_server_name(self):
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_name_too_long(self):
body = {'server': {'name': 'x' * 256}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_name_all_blank_spaces(self):
body = {'server': {'name': ' ' * 64}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_personality(self):
body = {
'server': {
'personality': []
}
}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, req, FAKE_UUID, body)
def test_update_server_adminPass_ignored(self):
inst_dict = dict(name='server_test', adminPass='bacon')
body = dict(server=inst_dict)
def server_update(context, id, params):
filtered_dict = {
'display_name': 'server_test',
}
self.assertEqual(params, filtered_dict)
filtered_dict['uuid'] = id
return filtered_dict
self.stubs.Set(db, 'instance_update', server_update)
# FIXME (comstud)
# self.stubs.Set(db, 'instance_get',
# return_server_with_attributes(name='server_test'))
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'PUT'
req.content_type = "application/json"
req.body = jsonutils.dumps(body)
res_dict = self.controller.update(req, FAKE_UUID, body)
self.assertEqual(res_dict['server']['id'], FAKE_UUID)
self.assertEqual(res_dict['server']['name'], 'server_test')
def test_update_server_not_found(self):
def fake_get(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(compute_api.API, 'get', fake_get)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_not_found_on_update(self):
def fake_update(*args, **kwargs):
raise exception.InstanceNotFound(instance_id='fake')
self.stubs.Set(db, 'instance_update_and_get_original', fake_update)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, FAKE_UUID, body)
def test_update_server_policy_fail(self):
rule = {'compute:update': common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
body = {'server': {'name': 'server_test'}}
req = self._get_request(body, {'name': 'server_test'})
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.update, req, FAKE_UUID, body)
class ServersControllerDeleteTest(ControllerTest):
def setUp(self):
super(ServersControllerDeleteTest, self).setUp()
self.server_delete_called = False
def fake_delete(api, context, instance):
if instance.uuid == 'non-existent-uuid':
raise exception.InstanceNotFound(instance_id=instance.uuid)
self.server_delete_called = True
self.stubs.Set(compute_api.API, 'delete', fake_delete)
def _create_delete_request(self, uuid):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
req = fakes.HTTPRequest.blank('/v2/fake/servers/%s' % uuid)
req.method = 'DELETE'
return req
def _delete_server_instance(self, uuid=FAKE_UUID):
req = self._create_delete_request(uuid)
fake_get = fakes.fake_compute_get(uuid=uuid,
vm_state=vm_states.ACTIVE)
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: fake_get(*a, **k))
self.controller.delete(req, uuid)
def test_delete_server_instance(self):
self._delete_server_instance()
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_not_found(self):
self.assertRaises(webob.exc.HTTPNotFound,
self._delete_server_instance,
uuid='non-existent-uuid')
def test_delete_locked_server(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(compute_api.API, 'soft_delete',
fakes.fake_actions_to_locked_server)
self.stubs.Set(compute_api.API, 'delete',
fakes.fake_actions_to_locked_server)
self.assertRaises(webob.exc.HTTPConflict, self.controller.delete,
req, FAKE_UUID)
def test_delete_server_instance_while_building(self):
fakes.stub_out_instance_quota(self.stubs, 0, 10)
request = self._create_delete_request(FAKE_UUID)
self.controller.delete(request, FAKE_UUID)
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_while_deleting_host_up(self):
req = self._create_delete_request(FAKE_UUID)
return_server = fakes.fake_compute_get(
vm_state=vm_states.ACTIVE,
task_state=task_states.DELETING,
host='fake_host')
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: return_server(*a, **k))
self.stubs.Set(objects.Instance, 'save',
lambda *args, **kwargs: None)
@classmethod
def fake_get_by_compute_host(cls, context, host):
return {'updated_at': timeutils.utcnow()}
self.stubs.Set(objects.Service, 'get_by_compute_host',
fake_get_by_compute_host)
self.controller.delete(req, FAKE_UUID)
def test_delete_server_instance_while_deleting_host_down(self):
fake_network.stub_out_network_cleanup(self.stubs)
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.DELETING,
host='fake_host'))
self.stubs.Set(objects.Instance, 'save',
lambda *args, **kwargs: None)
@classmethod
def fake_get_by_compute_host(cls, context, host):
return {'updated_at': datetime.datetime.min}
self.stubs.Set(objects.Service, 'get_by_compute_host',
fake_get_by_compute_host)
self.controller.delete(req, FAKE_UUID)
# Delete request would be ignored, because it's been accepted before
# but since the host is down, api should remove the instance anyway.
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_while_resize(self):
req = self._create_delete_request(FAKE_UUID)
self.stubs.Set(db, 'instance_get_by_uuid',
fakes.fake_instance_get(vm_state=vm_states.ACTIVE,
task_state=task_states.RESIZE_PREP))
self.controller.delete(req, FAKE_UUID)
# Delete shoud be allowed in any case, even during resizing,
# because it may get stuck.
self.assertTrue(self.server_delete_called)
def test_delete_server_instance_if_not_launched(self):
self.flags(reclaim_instance_interval=3600)
req = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
req.method = 'DELETE'
self.server_delete_called = False
fake_get = fakes.fake_compute_get(launched_at=None)
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: fake_get(*a, **k))
def instance_destroy_mock(*args, **kwargs):
self.server_delete_called = True
deleted_at = timeutils.utcnow()
return fake_instance.fake_db_instance(deleted_at=deleted_at)
self.stubs.Set(db, 'instance_destroy', instance_destroy_mock)
self.controller.delete(req, FAKE_UUID)
# delete() should be called for instance which has never been active,
# even if reclaim_instance_interval has been set.
self.assertEqual(self.server_delete_called, True)
class ServersControllerRebuildInstanceTest(ControllerTest):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
image_href = 'http://localhost/v2/fake/images/%s' % image_uuid
def setUp(self):
super(ServersControllerRebuildInstanceTest, self).setUp()
return_server = fakes.fake_compute_get(vm_state=vm_states.ACTIVE)
self.stubs.Set(compute_api.API, 'get',
lambda api, *a, **k: return_server(*a, **k))
self.body = {
'rebuild': {
'name': 'new_name',
'imageRef': self.image_href,
'metadata': {
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers/a/action')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def test_rebuild_instance_with_blank_metadata_key(self):
self.body['rebuild']['accessIPv4'] = '0.0.0.0'
self.body['rebuild']['accessIPv6'] = 'fead::1234'
self.body['rebuild']['metadata'][''] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, self.body)
def test_rebuild_instance_with_metadata_key_too_long(self):
self.body['rebuild']['accessIPv4'] = '0.0.0.0'
self.body['rebuild']['accessIPv6'] = 'fead::1234'
self.body['rebuild']['metadata'][('a' * 260)] = 'world'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild,
self.req, FAKE_UUID, self.body)
def test_rebuild_instance_with_metadata_value_too_long(self):
self.body['rebuild']['accessIPv4'] = '0.0.0.0'
self.body['rebuild']['accessIPv6'] = 'fead::1234'
self.body['rebuild']['metadata']['key1'] = ('a' * 260)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller._action_rebuild,
self.req, FAKE_UUID, self.body)
def test_rebuild_instance_fails_when_min_ram_too_small(self):
# make min_ram larger than our instance ram size
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="4096", min_disk="10")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild,
self.req, FAKE_UUID, self.body)
def test_rebuild_instance_fails_when_min_disk_too_small(self):
# make min_disk larger than our instance disk size
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', properties={'key1': 'value1'},
min_ram="128", min_disk="100000")
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req,
FAKE_UUID, self.body)
def test_rebuild_instance_image_too_large(self):
# make image size larger than our instance disk size
size = str(1000 * (1024 ** 3))
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='active', size=size)
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req, FAKE_UUID, self.body)
def test_rebuild_instance_with_deleted_image(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True,
status='DELETED')
self.stubs.Set(fake._FakeImageService, 'show', fake_get_image)
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req, FAKE_UUID, self.body)
def test_rebuild_instance_onset_file_limit_over_quota(self):
def fake_get_image(self, context, image_href, **kwargs):
return dict(id='76fa36fc-c930-4bf3-8c8a-ea2a2420deb6',
name='public image', is_public=True, status='active')
with contextlib.nested(
mock.patch.object(fake._FakeImageService, 'show',
side_effect=fake_get_image),
mock.patch.object(self.controller.compute_api, 'rebuild',
side_effect=exception.OnsetFileLimitExceeded)
) as (
show_mock, rebuild_mock
):
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller._action_rebuild,
self.req, FAKE_UUID, body=self.body)
def test_rebuild_instance_with_null_image_ref(self):
self.body['rebuild']['imageRef'] = None
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller._action_rebuild, self.req, FAKE_UUID,
self.body)
class ServerStatusTest(test.TestCase):
def setUp(self):
super(ServerStatusTest, self).setUp()
fakes.stub_out_nw_api(self.stubs)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
def _get_with_state(self, vm_state, task_state=None):
request = fakes.HTTPRequest.blank('/fake/servers/%s' % FAKE_UUID)
with mock.patch.object(self.controller.compute_api, 'get') as get:
get.side_effect = fakes.fake_compute_get(
vm_state=vm_state,
task_state=task_state)
return self.controller.show(request, FAKE_UUID)
def _req_with_policy_fail(self, policy_rule_name):
rule = {'compute:%s' % policy_rule_name:
common_policy.parse_rule('role:admin')}
policy.set_rules(rule)
return fakes.HTTPRequest.blank('/fake/servers/1234/action')
def test_active(self):
response = self._get_with_state(vm_states.ACTIVE)
self.assertEqual(response['server']['status'], 'ACTIVE')
def test_reboot(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING)
self.assertEqual(response['server']['status'], 'REBOOT')
def test_reboot_hard(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBOOTING_HARD)
self.assertEqual(response['server']['status'], 'HARD_REBOOT')
@mock.patch.object(servers.Controller, "_get_server")
def test_reboot_resize_policy_fail(self, mock_get_server):
req = self._req_with_policy_fail('reboot')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_reboot, req, '1234',
{'reboot': {'type': 'HARD'}})
def test_rebuild(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.REBUILDING)
self.assertEqual(response['server']['status'], 'REBUILD')
def test_rebuild_error(self):
response = self._get_with_state(vm_states.ERROR)
self.assertEqual(response['server']['status'], 'ERROR')
def test_resize(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.RESIZE_PREP)
self.assertEqual(response['server']['status'], 'RESIZE')
@mock.patch.object(servers.Controller, "_get_server")
def test_confirm_resize_policy_fail(self, mock_get_server):
req = self._req_with_policy_fail('confirm_resize')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_confirm_resize, req, '1234', {})
def test_verify_resize(self):
response = self._get_with_state(vm_states.RESIZED, None)
self.assertEqual(response['server']['status'], 'VERIFY_RESIZE')
def test_revert_resize(self):
response = self._get_with_state(vm_states.RESIZED,
task_states.RESIZE_REVERTING)
self.assertEqual(response['server']['status'], 'REVERT_RESIZE')
@mock.patch.object(servers.Controller, "_get_server")
def test_revert_resize_policy_fail(self, mock_get_server):
req = self._req_with_policy_fail('revert_resize')
self.assertRaises(exception.PolicyNotAuthorized,
self.controller._action_revert_resize, req, '1234', {})
def test_password_update(self):
response = self._get_with_state(vm_states.ACTIVE,
task_states.UPDATING_PASSWORD)
self.assertEqual(response['server']['status'], 'PASSWORD')
def test_stopped(self):
response = self._get_with_state(vm_states.STOPPED)
self.assertEqual(response['server']['status'], 'SHUTOFF')
class ServersControllerCreateTest(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTest, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
fakes.stub_out_nw_api(self.stubs)
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
self.volume_id = 'fake'
def instance_create(context, inst):
inst_type = flavors.get_flavor_by_flavor_id(3)
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
def_image_ref = 'http://localhost/images/%s' % image_uuid
self.instance_cache_num += 1
instance = fake_instance.fake_db_instance(**{
'id': self.instance_cache_num,
'display_name': inst['display_name'] or 'test',
'uuid': FAKE_UUID,
'instance_type': inst_type,
'access_ip_v4': '1.2.3.4',
'access_ip_v6': 'fead::1234',
'image_ref': inst.get('image_ref', def_image_ref),
'user_id': 'fake',
'project_id': 'fake',
'reservation_id': inst['reservation_id'],
"created_at": datetime.datetime(2010, 10, 10, 12, 0, 0),
"updated_at": datetime.datetime(2010, 11, 11, 11, 0, 0),
"config_drive": None,
"progress": 0,
"fixed_ips": [],
"task_state": "",
"vm_state": "",
"root_device_name": inst.get('root_device_name', 'vda'),
"security_groups": inst['security_groups'],
})
self.instance_cache_by_id[instance['id']] = instance
self.instance_cache_by_uuid[instance['uuid']] = instance
return instance
def instance_get(context, instance_id):
"""Stub for compute/api create() pulling in instance after
scheduling
"""
return self.instance_cache_by_id[instance_id]
def instance_update(context, uuid, values):
instance = self.instance_cache_by_uuid[uuid]
instance.update(values)
return instance
def server_update_and_get_original(
context, instance_uuid, params, columns_to_join=None):
inst = self.instance_cache_by_uuid[instance_uuid]
inst.update(params)
return (inst, inst)
def fake_method(*args, **kwargs):
pass
def project_get_networks(context, user_id):
return dict(id='1', host='localhost')
fakes.stub_out_rate_limiting(self.stubs)
fakes.stub_out_key_pair_funcs(self.stubs)
fake.stub_out_image_service(self.stubs)
self.stubs.Set(uuid, 'uuid4', fake_gen_uuid)
self.stubs.Set(db, 'instance_add_security_group',
return_security_group)
self.stubs.Set(db, 'project_get_networks',
project_get_networks)
self.stubs.Set(db, 'instance_create', instance_create)
self.stubs.Set(db, 'instance_system_metadata_update',
fake_method)
self.stubs.Set(db, 'instance_get', instance_get)
self.stubs.Set(db, 'instance_update', instance_update)
self.stubs.Set(db, 'instance_update_and_get_original',
server_update_and_get_original)
self.stubs.Set(manager.VlanManager, 'allocate_fixed_ip',
fake_method)
self.body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
self.bdm = [{'delete_on_termination': 1,
'device_name': 123,
'volume_size': 1,
'volume_id': '11111111-1111-1111-1111-111111111111'}]
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _check_admin_pass_len(self, server_dict):
"""utility function - check server_dict for adminPass length."""
self.assertEqual(CONF.password_length,
len(server_dict["adminPass"]))
def _check_admin_pass_missing(self, server_dict):
"""utility function - check server_dict for absence of adminPass."""
self.assertNotIn("adminPass", server_dict)
def _test_create_instance(self, flavor=2):
image_uuid = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.body['server']['imageRef'] = image_uuid
self.body['server']['flavorRef'] = flavor
self.req.body = jsonutils.dumps(self.body)
server = self.controller.create(self.req, self.body).obj['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_private_flavor(self):
values = {
'name': 'fake_name',
'memory_mb': 512,
'vcpus': 1,
'root_gb': 10,
'ephemeral_gb': 10,
'flavorid': '1324',
'swap': 0,
'rxtx_factor': 0.5,
'vcpu_weight': 1,
'disabled': False,
'is_public': False,
}
db.flavor_create(context.get_admin_context(), values)
self.assertRaises(webob.exc.HTTPBadRequest, self._test_create_instance,
flavor=1324)
def test_create_server_bad_image_href(self):
image_href = 1
self.body['server']['imageRef'] = image_href,
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, self.body)
def test_create_server_with_invalid_networks_parameter(self):
self.ext_mgr.extensions = {'os-networks': 'fake'}
self.body['server']['networks'] = {
'uuid': '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req,
self.body)
def test_create_server_with_deleted_image(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# Get the fake image service so we can set the status to deleted
(image_service, image_id) = glance.get_remote_image_service(
context, '')
image_service.update(context, image_uuid, {'status': 'DELETED'})
self.addCleanup(image_service.update, context, image_uuid,
{'status': 'active'})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
'Image 76fa36fc-c930-4bf3-8c8a-ea2a2420deb6 is not active.'):
self.controller.create(self.req, self.body)
def test_create_server_image_too_large(self):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
# Get the fake image service so we can update the size of the image
(image_service, image_id) = glance.get_remote_image_service(context,
image_uuid)
image = image_service.show(context, image_id)
orig_size = image['size']
new_size = str(1000 * (1024 ** 3))
image_service.update(context, image_uuid, {'size': new_size})
self.addCleanup(image_service.update, context, image_uuid,
{'size': orig_size})
self.body['server']['flavorRef'] = 2
self.req.body = jsonutils.dumps(self.body)
with testtools.ExpectedException(
webob.exc.HTTPBadRequest,
"Flavor's disk is too small for requested image."):
self.controller.create(self.req, self.body)
def test_create_multiple_instances(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_len(res["server"])
def test_create_multiple_instances_pass_disabled(self):
"""Test creating multiple instances but not asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
self.flags(enable_instance_password=False)
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_missing(res["server"])
def test_create_multiple_instances_resv_id_return(self):
"""Test creating multiple instances with asking for
reservation_id
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
self.body['server']['return_reservation_id'] = True
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body)
reservation_id = res.obj.get('reservation_id')
self.assertNotEqual(reservation_id, "")
self.assertIsNotNone(reservation_id)
self.assertTrue(len(reservation_id) > 1)
def test_create_multiple_instances_with_multiple_volume_bdm(self):
"""Test that a BadRequest is raised if multiple instances
are requested with a list of block device mappings for volumes.
"""
self.ext_mgr.extensions = {'os-multiple-create': 'fake'}
min_count = 2
bdm = [{'device_name': 'foo1', 'volume_id': 'vol-xxxx'},
{'device_name': 'foo2', 'volume_id': 'vol-yyyy'}
]
params = {
'block_device_mapping': bdm,
'min_count': min_count
}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['min_count'], 2)
self.assertEqual(len(kwargs['block_device_mapping']), 2)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params, no_image=True)
def test_create_instance_image_ref_is_bookmark(self):
image_href = 'http://localhost/fake/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_image_ref_is_invalid(self):
image_uuid = 'this_is_not_a_valid_uuid'
image_href = 'http://localhost/fake/images/%s' % image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, self.body)
def test_create_instance_no_key_pair(self):
fakes.stub_out_key_pair_funcs(self.stubs, have_key_pair=False)
self._test_create_instance()
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.assertIn('server',
self.controller.create(self.req, self.body).obj)
def test_create_instance_with_security_group_enabled(self):
self.ext_mgr.extensions = {'os-security-groups': 'fake'}
group = 'foo'
old_create = compute_api.API.create
def sec_group_get(ctx, proj, name):
if name == group:
return True
else:
raise exception.SecurityGroupNotFoundForProject(
project_id=proj, security_group_id=name)
def create(*args, **kwargs):
self.assertEqual(kwargs['security_group'], [group])
return old_create(*args, **kwargs)
self.stubs.Set(db, 'security_group_get_by_name', sec_group_get)
# negative test
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra,
{'security_groups': [{'name': 'bogus'}]})
# positive test - extra assert in create path
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra({'security_groups': [{'name': group}]})
def test_create_instance_with_non_unique_secgroup_name(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks,
'security_groups': [{'name': 'dup'}, {'name': 'dup'}]}
def fake_create(*args, **kwargs):
raise exception.NoUniqueMatch("No Unique match found for ...")
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_with_port_with_no_fixed_ips(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
port_id = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'port': port_id}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortRequiresFixedIP(port_id=port_id)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_raise_user_data_too_large(self, mock_create):
mock_create.side_effect = exception.InstanceUserDataTooLarge(
maxsize=1, length=2)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, self.body)
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_raise_auto_disk_config_exc(self, mock_create):
mock_create.side_effect = exception.AutoDiskConfigDisabledByImage(
image='dummy')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
self.req, self.body)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InstanceExists(
name='instance-name'))
def test_create_instance_raise_instance_exists(self, mock_create):
self.assertRaises(webob.exc.HTTPConflict,
self.controller.create,
self.req, self.body)
def test_create_instance_with_network_with_no_subnet(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkRequiresSubnet(network_uuid=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_name_all_blank_spaces(self):
self.body['server']['name'] = ' ' * 64
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_name_too_long(self):
self.body['server']['name'] = 'X' * 256
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
self.req, self.body)
def test_create_instance(self):
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self._check_admin_pass_len(server)
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_pass_disabled(self):
self.flags(enable_instance_password=False)
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self._check_admin_pass_missing(server)
self.assertEqual(FAKE_UUID, server['id'])
@mock.patch('nova.virt.hardware.numa_get_constraints')
def test_create_instance_numa_topology_wrong(self, numa_constraints_mock):
numa_constraints_mock.side_effect = (
exception.ImageNUMATopologyIncomplete)
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_too_much_metadata(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata']['vote'] = 'fiddletown'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPForbidden,
self.controller.create, self.req, self.body)
def test_create_instance_metadata_key_too_long(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata'] = {('a' * 260): '12345'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, self.req, self.body)
def test_create_instance_metadata_value_too_long(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata'] = {'key1': ('a' * 260)}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPRequestEntityTooLarge,
self.controller.create, self.req, self.body)
def test_create_instance_metadata_key_blank(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata'] = {'': 'abcd'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_metadata_not_dict(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata'] = 'string'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_metadata_key_not_string(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata'] = {1: 'test'}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_metadata_value_not_string(self):
self.flags(quota_metadata_items=1)
self.body['server']['metadata'] = {'test': ['a', 'list']}
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_user_data_malformed_bad_request(self):
self.ext_mgr.extensions = {'os-user-data': 'fake'}
params = {'user_data': 'u1234!'}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch('nova.compute.api.API.create',
side_effect=exception.KeypairNotFound(name='nonexistentkey',
user_id=1))
def test_create_instance_invalid_key_name(self, mock_create):
self.body['server']['key_name'] = 'nonexistentkey'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
@mock.patch('nova.compute.api.API.create',
side_effect=exception.KeypairNotFound(name='',
user_id=1))
def test_create_instance_empty_key_name(self, mock_create):
self.body['server']['key_name'] = ''
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_valid_key_name(self):
self.body['server']['key_name'] = 'key'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
self.assertEqual(FAKE_UUID, res["server"]["id"])
self._check_admin_pass_len(res["server"])
def test_create_instance_invalid_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/asdf'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_invalid_flavor_id_int(self):
flavor_ref = -1
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_invalid_flavor_id_empty(self):
flavor_ref = ""
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_bad_flavor_href(self):
flavor_ref = 'http://localhost/v2/flavors/17'
self.body['server']['flavorRef'] = flavor_ref
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_with_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
self.body['server']['config_drive'] = "true"
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_bad_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
self.body['server']['config_drive'] = 'adcd'
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_without_config_drive(self):
self.ext_mgr.extensions = {'os-config-drive': 'fake'}
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_with_config_drive_disabled(self):
config_drive = [{'config_drive': 'foo'}]
params = {'config_drive': config_drive}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['config_drive'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_bad_href(self):
image_href = 'asdf'
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_instance_local_href(self):
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self.assertEqual(FAKE_UUID, server['id'])
def test_create_instance_admin_pass(self):
self.body['server']['flavorRef'] = 3,
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self.assertEqual(server['adminPass'], self.body['server']['adminPass'])
def test_create_instance_admin_pass_pass_disabled(self):
self.flags(enable_instance_password=False)
self.body['server']['flavorRef'] = 3,
self.body['server']['adminPass'] = 'testpass'
self.req.body = jsonutils.dumps(self.body)
res = self.controller.create(self.req, self.body).obj
server = res['server']
self.assertIn('adminPass', self.body['server'])
self.assertNotIn('adminPass', server)
def test_create_instance_admin_pass_empty(self):
self.body['server']['flavorRef'] = 3,
self.body['server']['adminPass'] = ''
self.req.body = jsonutils.dumps(self.body)
# The fact that the action doesn't raise is enough validation
self.controller.create(self.req, self.body)
def test_create_instance_with_security_group_disabled(self):
group = 'foo'
params = {'security_groups': [{'name': group}]}
old_create = compute_api.API.create
def create(*args, **kwargs):
# NOTE(vish): if the security groups extension is not
# enabled, then security groups passed in
# are ignored.
self.assertEqual(kwargs['security_group'], ['default'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_disk_config_enabled(self):
self.ext_mgr.extensions = {'OS-DCF': 'fake'}
# NOTE(vish): the extension converts OS-DCF:disk_config into
# auto_disk_config, so we are testing with
# the_internal_value
params = {'auto_disk_config': 'AUTO'}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['auto_disk_config'], 'AUTO')
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_disk_config_disabled(self):
params = {'auto_disk_config': True}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['auto_disk_config'], False)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_scheduler_hints_enabled(self):
self.ext_mgr.extensions = {'OS-SCH-HNT': 'fake'}
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], hints)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_scheduler_hints_disabled(self):
hints = {'a': 'b'}
params = {'scheduler_hints': hints}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['scheduler_hints'], {})
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_volumes_enabled_no_image(self):
"""Test that the create will fail if there is no image
and no bdms supplied in the request
"""
self.ext_mgr.extensions = {'os-volumes': 'fake'}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {}, no_image=True)
def test_create_instance_with_bdm_v2_enabled_no_image(self):
self.ext_mgr.extensions = {'os-block-device-mapping-v2-boot': 'fake'}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertNotIn('imageRef', kwargs)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {}, no_image=True)
def test_create_instance_with_user_data_enabled(self):
self.ext_mgr.extensions = {'os-user-data': 'fake'}
user_data = 'fake'
params = {'user_data': user_data}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['user_data'], user_data)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_user_data_disabled(self):
user_data = 'fake'
params = {'user_data': user_data}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['user_data'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_keypairs_enabled(self):
self.ext_mgr.extensions = {'os-keypairs': 'fake'}
key_name = 'green'
params = {'key_name': key_name}
old_create = compute_api.API.create
# NOTE(sdague): key pair goes back to the database,
# so we need to stub it out for tests
def key_pair_get(context, user_id, name):
return dict(test_keypair.fake_keypair,
public_key='FAKE_KEY',
fingerprint='FAKE_FINGERPRINT',
name=name)
def create(*args, **kwargs):
self.assertEqual(kwargs['key_name'], key_name)
return old_create(*args, **kwargs)
self.stubs.Set(db, 'key_pair_get', key_pair_get)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_keypairs_disabled(self):
key_name = 'green'
params = {'key_name': key_name}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['key_name'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_availability_zone_enabled(self):
self.ext_mgr.extensions = {'os-availability-zone': 'fake'}
availability_zone = 'fake'
params = {'availability_zone': availability_zone}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertEqual(kwargs['availability_zone'], availability_zone)
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
try:
self._test_create_extra(params)
except webob.exc.HTTPBadRequest as e:
expected = 'The requested availability zone is not available'
self.assertEqual(e.explanation, expected)
admin_context = context.get_admin_context()
db.service_create(admin_context, {'host': 'host1_zones',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0})
agg = db.aggregate_create(admin_context,
{'name': 'agg1'}, {'availability_zone': availability_zone})
db.aggregate_host_add(admin_context, agg['id'], 'host1_zones')
self._test_create_extra(params)
def test_create_instance_with_availability_zone_disabled(self):
availability_zone = 'fake'
params = {'availability_zone': availability_zone}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['availability_zone'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_enabled(self):
self.ext_mgr.extensions = {'os-networks': 'fake'}
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_neutronv2_port_in_use(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortInUse(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPConflict,
self._test_create_extra, params)
def test_create_instance_with_neutronv2_not_found_network(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
requested_networks = [{'uuid': network}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.NetworkNotFound(network_id=network)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_neutronv2_port_not_found(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
raise exception.PortNotFound(port_id=port)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
@mock.patch.object(compute_api.API, 'create')
def test_create_multiple_instance_with_specified_ip_neutronv2(self,
_api_mock):
_api_mock.side_effect = exception.InvalidFixedIpAndMaxCountRequest(
reason="")
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
address = '10.0.0.1'
self.body['server']['max_count'] = 2
requested_networks = [{'uuid': network, 'fixed_ip': address,
'port': port}]
params = {'networks': requested_networks}
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_multiple_instance_with_neutronv2_port(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
port = 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee'
self.body['server']['max_count'] = 2
requested_networks = [{'uuid': network, 'port': port}]
params = {'networks': requested_networks}
def fake_create(*args, **kwargs):
msg = ("Unable to launch multiple instances with"
" a single configured port ID. Please launch your"
" instance one by one with different ports.")
raise exception.MultiplePortsNotApplicable(reason=msg)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
def test_create_instance_with_networks_disabled_neutronv2(self):
self.flags(network_api_class='nova.network.neutronv2.api.API')
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
result = [('76fa36fc-c930-4bf3-8c8a-ea2a2420deb6', None,
None, None)]
self.assertEqual(result, kwargs['requested_networks'].as_tuples())
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_with_networks_disabled(self):
self.ext_mgr.extensions = {}
net_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
requested_networks = [{'uuid': net_uuid}]
params = {'networks': requested_networks}
old_create = compute_api.API.create
def create(*args, **kwargs):
self.assertIsNone(kwargs['requested_networks'])
return old_create(*args, **kwargs)
self.stubs.Set(compute_api.API, 'create', create)
self._test_create_extra(params)
def test_create_instance_invalid_personality(self):
def fake_create(*args, **kwargs):
codec = 'utf8'
content = 'b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA=='
start_position = 19
end_position = 20
msg = 'invalid start byte'
raise UnicodeDecodeError(codec, content, start_position,
end_position, msg)
self.stubs.Set(compute_api.API, 'create', fake_create)
self.body['server']['personality'] = [
{
"path": "/etc/banner.txt",
"contents": "b25zLiINCg0KLVJpY2hhcmQgQ$$%QQmFjaA==",
},
]
self.req.body = jsonutils.dumps(self.body)
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create, self.req, self.body)
def test_create_location(self):
selfhref = 'http://localhost/v2/fake/servers/%s' % FAKE_UUID
image_href = 'http://localhost/v2/images/%s' % self.image_uuid
self.body['server']['imageRef'] = image_href
self.req.body = jsonutils.dumps(self.body)
robj = self.controller.create(self.req, self.body)
self.assertEqual(robj['Location'], selfhref)
def _do_test_create_instance_above_quota(self, resource, allowed, quota,
expected_msg):
fakes.stub_out_instance_quota(self.stubs, allowed, quota, resource)
self.body['server']['flavorRef'] = 3
self.req.body = jsonutils.dumps(self.body)
try:
self.controller.create(self.req, self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_above_quota_instances(self):
msg = ('Quota exceeded for instances: Requested 1, but'
' already used 10 of 10 instances')
self._do_test_create_instance_above_quota('instances', 0, 10, msg)
def test_create_instance_above_quota_ram(self):
msg = ('Quota exceeded for ram: Requested 4096, but'
' already used 8192 of 10240 ram')
self._do_test_create_instance_above_quota('ram', 2048, 10 * 1024, msg)
def test_create_instance_above_quota_cores(self):
msg = ('Quota exceeded for cores: Requested 2, but'
' already used 9 of 10 cores')
self._do_test_create_instance_above_quota('cores', 1, 10, msg)
def test_create_instance_above_quota_group_members(self):
ctxt = context.get_admin_context()
fake_group = objects.InstanceGroup(ctxt)
fake_group.create()
def fake_count(context, name, group, user_id):
self.assertEqual(name, "server_group_members")
self.assertEqual(group.uuid, fake_group.uuid)
self.assertEqual(user_id,
self.req.environ['nova.context'].user_id)
return 10
def fake_limit_check(context, **kwargs):
if 'server_group_members' in kwargs:
raise exception.OverQuota(overs={})
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stubs.Set(fakes.QUOTAS, 'count', fake_count)
self.stubs.Set(fakes.QUOTAS, 'limit_check', fake_limit_check)
self.stubs.Set(db, 'instance_destroy', fake_instance_destroy)
self.ext_mgr.extensions = {'OS-SCH-HNT': 'fake',
'os-server-group-quotas': 'fake'}
self.body['server']['scheduler_hints'] = {'group': fake_group.uuid}
self.req.body = jsonutils.dumps(self.body)
expected_msg = "Quota exceeded, too many servers in group"
try:
self.controller.create(self.req, self.body).obj['server']
self.fail('expected quota to be exceeded')
except webob.exc.HTTPForbidden as e:
self.assertEqual(e.explanation, expected_msg)
def test_create_instance_with_group_hint(self):
ctxt = context.get_admin_context()
test_group = objects.InstanceGroup(ctxt)
test_group.create()
def fake_instance_destroy(context, uuid, constraint):
return fakes.stub_instance(1)
self.stubs.Set(db, 'instance_destroy', fake_instance_destroy)
self.ext_mgr.extensions = {'OS-SCH-HNT': 'fake',
'os-server-group-quotas': 'fake'}
self.body['server']['scheduler_hints'] = {'group': test_group.uuid}
self.req.body = jsonutils.dumps(self.body)
server = self.controller.create(self.req, self.body).obj['server']
test_group = objects.InstanceGroup.get_by_uuid(ctxt, test_group.uuid)
self.assertIn(server['id'], test_group.members)
def test_resolve_exception(self):
class AA(object):
pass
class BB(AA):
pass
class CC(BB):
pass
list1 = [AA, BB, CC]
list2 = [BB, AA, CC]
list3 = [CC, AA]
list4 = [CC, BB, AA]
for test_list in [list1, list2, list3, list4]:
result = self.controller._resolve_exception(test_list)
# Since CC is the most specific, we always expect that returned.
self.assertEqual(result, CC)
class ServersControllerCreateTestWithMock(test.TestCase):
image_uuid = '76fa36fc-c930-4bf3-8c8a-ea2a2420deb6'
flavor_ref = 'http://localhost/123/flavors/3'
def setUp(self):
"""Shared implementation for tests below that create instance."""
super(ServersControllerCreateTestWithMock, self).setUp()
self.flags(verbose=True,
enable_instance_password=True)
self.instance_cache_num = 0
self.instance_cache_by_id = {}
self.instance_cache_by_uuid = {}
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = servers.Controller(self.ext_mgr)
self.volume_id = 'fake'
self.body = {
'server': {
'min_count': 2,
'name': 'server_test',
'imageRef': self.image_uuid,
'flavorRef': self.flavor_ref,
'metadata': {
'hello': 'world',
'open': 'stack',
},
'personality': [
{
"path": "/etc/banner.txt",
"contents": "MQ==",
},
],
},
}
self.req = fakes.HTTPRequest.blank('/fake/servers')
self.req.method = 'POST'
self.req.headers["content-type"] = "application/json"
def _test_create_extra(self, params, no_image=False):
self.body['server']['flavorRef'] = 2
if no_image:
self.body['server'].pop('imageRef', None)
self.body['server'].update(params)
self.req.body = jsonutils.dumps(self.body)
self.controller.create(self.req, self.body).obj['server']
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_fixed_ip_already_in_use(self,
create_mock):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '10.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
create_mock.side_effect = exception.FixedIpAlreadyInUse(
address=address,
instance_uuid=network)
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, params)
self.assertEqual(1, len(create_mock.call_args_list))
@mock.patch.object(compute_api.API, 'create')
def test_create_instance_with_neutronv2_invalid_fixed_ip(self,
create_mock):
self.flags(network_api_class='nova.network.neutronv2.api.API')
network = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
address = '999.0.2.3'
requested_networks = [{'uuid': network, 'fixed_ip': address}]
params = {'networks': requested_networks}
try:
self._test_create_extra(params)
self.fail()
except webob.exc.HTTPBadRequest as ex:
self.assertEqual(ex.explanation,
'Invalid fixed IP address (%s)' % address)
self.assertFalse(create_mock.called)
@mock.patch.object(compute_api.API, 'create',
side_effect=exception.InvalidVolume(reason='error'))
def test_create_instance_with_invalid_volume_error(self, create_mock):
# Tests that InvalidVolume is translated to a 400 error.
self.assertRaises(webob.exc.HTTPBadRequest,
self._test_create_extra, {})
class ServersViewBuilderTest(test.TestCase):
image_bookmark = "http://localhost/fake/images/5"
flavor_bookmark = "http://localhost/fake/flavors/1"
def setUp(self):
super(ServersViewBuilderTest, self).setUp()
self.flags(use_ipv6=True)
db_inst = fakes.stub_instance(
id=1,
image_ref="5",
uuid="deadbeef-feed-edee-beef-d0ea7beefedd",
display_name="test_server",
include_fake_metadata=False)
privates = ['172.19.0.1']
publics = ['192.168.0.3']
public6s = ['b33f::fdee:ddff:fecc:bbaa']
def nw_info(*args, **kwargs):
return [(None, {'label': 'public',
'ips': [dict(ip=ip) for ip in publics],
'ip6s': [dict(ip=ip) for ip in public6s]}),
(None, {'label': 'private',
'ips': [dict(ip=ip) for ip in privates]})]
fakes.stub_out_nw_api_get_instance_nw_info(self.stubs, nw_info)
self.uuid = db_inst['uuid']
self.view_builder = views.servers.ViewBuilder()
self.request = fakes.HTTPRequest.blank("/v2/fake")
self.request.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(
self.request.context,
expected_attrs=instance_obj.INSTANCE_DEFAULT_FIELDS,
**db_inst)
self.self_link = "http://localhost/v2/fake/servers/%s" % self.uuid
self.bookmark_link = "http://localhost/fake/servers/%s" % self.uuid
self.expected_detailed_server = {
"server": {
"id": self.uuid,
"user_id": "fake_user",
"tenant_id": "fake_project",
"updated": "2010-11-11T11:00:00Z",
"created": "2010-10-10T12:00:00Z",
"progress": 0,
"name": "test_server",
"status": "BUILD",
"accessIPv4": "",
"accessIPv6": "",
"hostId": '',
"image": {
"id": "5",
"links": [
{
"rel": "bookmark",
"href": self.image_bookmark,
},
],
},
"flavor": {
"id": "1",
"links": [
{
"rel": "bookmark",
"href": self.flavor_bookmark,
},
],
},
"addresses": {
'test1': [
{'version': 4, 'addr': '192.168.1.100'},
{'version': 6, 'addr': '2001:db8:0:1::1'}
]
},
"metadata": {},
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
self.expected_server = {
"server": {
"id": self.uuid,
"name": "test_server",
"links": [
{
"rel": "self",
"href": self.self_link,
},
{
"rel": "bookmark",
"href": self.bookmark_link,
},
],
}
}
def test_get_flavor_valid_flavor(self):
expected = {"id": "1",
"links": [{"rel": "bookmark",
"href": self.flavor_bookmark}]}
result = self.view_builder._get_flavor(self.request, self.instance)
self.assertEqual(result, expected)
def test_build_server(self):
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_server))
def test_build_server_with_project_id(self):
output = self.view_builder.basic(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_server))
def test_build_server_detail(self):
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
def test_build_server_no_image(self):
self.instance["image_ref"] = ""
output = self.view_builder.show(self.request, self.instance)
self.assertEqual(output['server']['image'], "")
def test_build_server_detail_with_fault(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
self.expected_detailed_server["server"]["status"] = "ERROR"
self.expected_detailed_server["server"]["fault"] = {
"code": 404,
"created": "2010-10-10T12:00:00Z",
"message": "HTTPNotFound",
"details": "Stock details for test",
}
del self.expected_detailed_server["server"]["progress"]
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
def test_build_server_detail_with_fault_that_has_been_deleted(self):
self.instance['deleted'] = 1
self.instance['vm_state'] = vm_states.ERROR
fault = fake_instance.fake_fault_obj(self.request.context,
self.uuid, code=500,
message="No valid host was found")
self.instance['fault'] = fault
# Regardless of the vm_state deleted servers sholud have DELETED status
self.expected_detailed_server["server"]["status"] = "DELETED"
self.expected_detailed_server["server"]["fault"] = {
"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "No valid host was found",
}
del self.expected_detailed_server["server"]["progress"]
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
def test_build_server_detail_with_fault_no_details_not_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.context = context.RequestContext('fake', 'fake')
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error",
'details': 'Stock details for test'}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_no_details_admin(self):
self.instance['vm_state'] = vm_states.ERROR
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context,
self.uuid,
code=500,
message='Error',
details='')
expected_fault = {"code": 500,
"created": "2010-10-10T12:00:00Z",
"message": "Error"}
self.request.environ['nova.context'].is_admin = True
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output['server']['fault'],
matchers.DictMatches(expected_fault))
def test_build_server_detail_with_fault_but_active(self):
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.instance['fault'] = fake_instance.fake_fault_obj(
self.request.context, self.uuid)
output = self.view_builder.show(self.request, self.instance)
self.assertNotIn('fault', output['server'])
def test_build_server_detail_active_status(self):
# set the power state of the instance to running
self.instance['vm_state'] = vm_states.ACTIVE
self.instance['progress'] = 100
self.expected_detailed_server["server"]["status"] = "ACTIVE"
self.expected_detailed_server["server"]["progress"] = 100
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
def test_build_server_detail_with_accessipv4(self):
access_ip_v4 = '1.2.3.4'
self.instance['access_ip_v4'] = access_ip_v4
self.expected_detailed_server["server"]["accessIPv4"] = access_ip_v4
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
def test_build_server_detail_with_accessipv6(self):
access_ip_v6 = 'fead::1234'
self.instance['access_ip_v6'] = access_ip_v6
self.expected_detailed_server["server"]["accessIPv6"] = access_ip_v6
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
def test_build_server_detail_with_metadata(self):
metadata = []
metadata.append(models.InstanceMetadata(key="Open", value="Stack"))
metadata = nova_utils.metadata_to_dict(metadata)
self.instance['metadata'] = metadata
self.expected_detailed_server["server"]["metadata"] = {"Open": "Stack"}
output = self.view_builder.show(self.request, self.instance)
self.assertThat(output,
matchers.DictMatches(self.expected_detailed_server))
| 42.675393 | 79 | 0.600828 |
64caec2808dbad1ff7f18ff31679080fd586d42f | 113 | py | Python | frameworks/Virus-sploit/annoying.py | ArkAngeL43/all-weapons | 3d2d4b0bf58acaf54a202f0644a5a4716f3fbe56 | [
"MIT"
] | 32 | 2021-10-30T17:10:36.000Z | 2022-02-09T09:43:34.000Z | frameworks/Virus-sploit/annoying.py | ArkAngeL43/all-weapons | 3d2d4b0bf58acaf54a202f0644a5a4716f3fbe56 | [
"MIT"
] | 2 | 2021-11-02T00:36:49.000Z | 2022-01-03T18:58:36.000Z | frameworks/Virus-sploit/annoying.py | ArkAngeL43/all-weapons | 3d2d4b0bf58acaf54a202f0644a5a4716f3fbe56 | [
"MIT"
] | 5 | 2021-12-15T14:02:51.000Z | 2022-02-06T21:39:58.000Z | from espeak import espeak
from time import sleep
while True:
espeak.synth("Get your Os fucked")
sleep(1) | 18.833333 | 38 | 0.725664 |
5e06bf3e51cf54bb92a903562bf172b0b2191d72 | 493 | py | Python | 题源分类/剑指offer/python/xx.py | ZhengyangXu/Algorithm-Daily-Practice | 3017a3d476fc9a857026190ea4fae2911058df59 | [
"MIT"
] | null | null | null | 题源分类/剑指offer/python/xx.py | ZhengyangXu/Algorithm-Daily-Practice | 3017a3d476fc9a857026190ea4fae2911058df59 | [
"MIT"
] | null | null | null | 题源分类/剑指offer/python/xx.py | ZhengyangXu/Algorithm-Daily-Practice | 3017a3d476fc9a857026190ea4fae2911058df59 | [
"MIT"
] | null | null | null | # 面试题28:字符串的排列
# 题目:输入一个字符串,打印出该字符串中字符的所有排列。
# 例如输入字符串abc,则打印出由字符a、b、c所能排列出来的所有字符串
# abc、acb、bac、bca、cab和cba。
def backtrack(string,path):
if len(path) == len(string):
path = path[:]
res.append(path)
for letter in string:
if letter in path:
continue
path += letter
backtrack(string,path)
path = path[:len(path)-1]
if __name__ == "__main__":
res = []
path = ""
backtrack('abc',"")
print(res)
| 19.72 | 37 | 0.559838 |
38e66ae5d97e9c44539ede5122834e33ec90d594 | 8,035 | py | Python | packages/syft/src/syft/core/adp/scalar/intermediate_gamma_scalar.py | leosole/PySyft | 01606f08f5ec5510840644e198301cd25c3ccfa5 | [
"Apache-1.1"
] | null | null | null | packages/syft/src/syft/core/adp/scalar/intermediate_gamma_scalar.py | leosole/PySyft | 01606f08f5ec5510840644e198301cd25c3ccfa5 | [
"Apache-1.1"
] | null | null | null | packages/syft/src/syft/core/adp/scalar/intermediate_gamma_scalar.py | leosole/PySyft | 01606f08f5ec5510840644e198301cd25c3ccfa5 | [
"Apache-1.1"
] | null | null | null | # stdlib
from typing import Any
from typing import List as TypeList
from typing import Optional
from typing import Tuple as TypeTuple
# third party
import numpy as np
from scipy import optimize
from sympy.core.basic import Basic as BasicSymbol
# relative
from ...common import UID
from ..entity import Entity
from ..search import create_lookup_tables_for_symbol
from ..search import create_searchable_function_from_polynomial
from ..search import max_lipschitz_via_jacobian
from ..search import minimize_function
from ..search import ssid2obj
from .abstract.intermediate_scalar import IntermediateScalar
from .abstract.scalar import Scalar
class IntermediateGammaScalar(IntermediateScalar):
"""
A Superclass for Scalars with data from multiple entities (GammaScalars).
Most importantly, this is where all of the operations (+/-/*/div) are implemented,
as well as the various methods with which to perform the search for the max Lipschitz.
"""
def __init__(
self,
poly: BasicSymbol,
min_val: float,
max_val: float,
id: Optional[UID] = None,
) -> None:
super().__init__(poly=poly, id=id)
self._min_val = float(min_val)
self._max_val = float(max_val)
self.is_linear: Optional[
bool
] = None # None means skip performance optimization
# GammaScalar +/-/*/div other ---> GammaScalar
def __add__(self, other: Any) -> IntermediateScalar:
if isinstance(other, Scalar):
# relative
from .intermediate_phi_scalar import IntermediatePhiScalar
if isinstance(other, IntermediatePhiScalar):
other = other.gamma
return IntermediateGammaScalar(
poly=self.poly + other.poly,
min_val=self.min_val + other.min_val,
max_val=self.max_val + other.max_val,
)
return IntermediateGammaScalar(
poly=self.poly + other,
min_val=self.min_val + other,
max_val=self.max_val + other,
)
def __sub__(self, other: Any) -> IntermediateScalar:
if isinstance(other, Scalar):
# relative
from .intermediate_phi_scalar import IntermediatePhiScalar
if isinstance(other, IntermediatePhiScalar):
other = other.gamma
return IntermediateGammaScalar(
poly=self.poly - other.poly,
min_val=self.min_val - other.min_val,
max_val=self.max_val - other.max_val,
)
return IntermediateGammaScalar(
poly=self.poly - other,
min_val=self.min_val - other,
max_val=self.max_val - other,
)
def __mul__(self, other: Any) -> IntermediateScalar:
if isinstance(other, Scalar):
# relative
from .intermediate_phi_scalar import IntermediatePhiScalar
if isinstance(other, IntermediatePhiScalar):
other = other.gamma
max_val = max(
self.min_val * other.min_val,
self.min_val * other.max_val,
self.max_val * other.min_val,
self.max_val * other.max_val,
)
min_val = min(
self.min_val * other.min_val,
self.min_val * other.max_val,
self.max_val * other.min_val,
)
return IntermediateGammaScalar(
poly=self.poly * other.poly, max_val=max_val, min_val=min_val
)
max_val = max(
self.min_val * other,
self.max_val * other,
)
min_val = min(
self.min_val * other,
self.max_val * other,
)
return IntermediateGammaScalar(
poly=self.poly * other, min_val=min_val, max_val=max_val
)
def max_lipschitz_via_explicit_search(
self, force_all_searches: bool = False
) -> TypeTuple[TypeList[optimize.OptimizeResult], np.float64]:
r1 = np.array([x.poly for x in self.input_scalars]) # type: ignore
# relative
from .gamma_scalar import GammaScalar
r2_diffs = np.array(
[
GammaScalar(x.min_val, x.value, x.max_val, entity=x.entity, prime=x.prime).poly # type: ignore
for x in self.input_scalars
]
)
r2 = r1 + r2_diffs
fr1 = self.poly
fr2 = self.poly.copy().subs({x[0]: x[1] for x in list(zip(r1, r2))})
left = np.sum(np.square(fr1 - fr2)) ** 0.5
right = np.sum(np.square(r1 - r2)) ** 0.5
C = -left / right
i2s, s2i = create_lookup_tables_for_symbol(C)
search_fun = create_searchable_function_from_polynomial(
poly=C, symbol2index=s2i
)
r1r2diff_zip = list(zip(r1, r2_diffs))
s2range = {}
for _input_scalar, _additive_counterpart in r1r2diff_zip:
input_scalar = ssid2obj[_input_scalar.name]
additive_counterpart = ssid2obj[_additive_counterpart.name]
s2range[input_scalar.ssid] = (input_scalar.min_val, input_scalar.max_val)
s2range[additive_counterpart.ssid] = (
input_scalar.min_val,
input_scalar.max_val,
)
rranges = list()
for _, symbol in enumerate(i2s):
rranges.append(s2range[symbol])
r2_indices_list = list()
min_max_list = list()
for r2_val in r2:
r2_syms = [ssid2obj[x.name] for x in r2_val.free_symbols]
r2_indices = [s2i[x.ssid] for x in r2_syms]
r2_indices_list.append(r2_indices)
min_max_list.append((r2_syms[0].min_val, r2_syms[0].max_val))
functions = list()
for i in range(2):
f1 = (
lambda x, i=i: x[r2_indices_list[i][0]]
+ x[r2_indices_list[i][1]]
+ min_max_list[i][0]
)
f2 = (
lambda x, i=i: -(x[r2_indices_list[i][0]] + x[r2_indices_list[i][1]])
+ min_max_list[i][1]
)
functions.append(f1)
functions.append(f2)
constraints = [{"type": "ineq", "fun": f} for f in functions]
def non_negative_additive_terms(symbol_vector: np.ndarray) -> np.float64:
out = 0
for index in [s2i[x.name] for x in r2_diffs]:
out += symbol_vector[index] ** 2
# there's a small bit of rounding error from this constraint - this should
# only be used as a double check or as a backup!!!
return out**0.5 - 1 / 2**16
constraints.append({"type": "ineq", "fun": non_negative_additive_terms})
results = minimize_function(
f=search_fun,
rranges=rranges,
constraints=constraints,
force_all_searches=force_all_searches,
)
return results, C
def max_lipschitz_via_jacobian(
self,
input_entity: Optional[Entity] = None,
data_dependent: bool = True,
force_all_searches: bool = False,
try_hessian_shortcut: bool = False,
) -> TypeList[optimize.OptimizeResult]:
return max_lipschitz_via_jacobian(
scalars=[self],
input_entity=input_entity,
data_dependent=data_dependent,
force_all_searches=force_all_searches,
try_hessian_shortcut=try_hessian_shortcut,
) # type: ignore
@property
def max_lipschitz(self) -> float:
result = self.max_lipschitz_via_jacobian()[0][-1]
if isinstance(result, float):
return -result
else:
return -float(result.fun)
def max_lipschitz_wrt_entity(self, entity: Entity) -> float:
result = self.max_lipschitz_via_jacobian(input_entity=entity)[0][-1]
if isinstance(result, float):
return -result
else:
return -float(result.fun)
| 33.340249 | 111 | 0.59191 |
aa9f9818fe18b708f49a5d103e198916405717b5 | 400 | py | Python | suorganizer/suorganizer/wsgi.py | jmcevoy1984/Django-Unleashed | 8c9014ee3bf271dcd86727c131184f7348af75d8 | [
"MIT"
] | null | null | null | suorganizer/suorganizer/wsgi.py | jmcevoy1984/Django-Unleashed | 8c9014ee3bf271dcd86727c131184f7348af75d8 | [
"MIT"
] | null | null | null | suorganizer/suorganizer/wsgi.py | jmcevoy1984/Django-Unleashed | 8c9014ee3bf271dcd86727c131184f7348af75d8 | [
"MIT"
] | null | null | null | """
WSGI config for suorganizer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "suorganizer.settings")
application = get_wsgi_application()
| 23.529412 | 78 | 0.79 |
efe8b9dcf47cc2799375f6dbf43e03213639d0ef | 1,529 | py | Python | samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py | renovate-bot/python-dataplex | e633eeb1ba6c9c9b5105f443f4ec1f83de2246e9 | [
"Apache-2.0"
] | 1 | 2022-01-29T03:16:24.000Z | 2022-01-29T03:16:24.000Z | samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py | renovate-bot/python-dataplex | e633eeb1ba6c9c9b5105f443f4ec1f83de2246e9 | [
"Apache-2.0"
] | 10 | 2022-01-28T22:58:22.000Z | 2022-03-27T20:29:22.000Z | samples/generated_samples/dataplex_v1_generated_dataplex_service_list_zone_actions_async.py | renovate-bot/python-dataplex | e633eeb1ba6c9c9b5105f443f4ec1f83de2246e9 | [
"Apache-2.0"
] | 2 | 2022-01-28T22:22:19.000Z | 2022-01-29T08:09:23.000Z | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ListZoneActions
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-dataplex
# [START dataplex_v1_generated_DataplexService_ListZoneActions_async]
from google.cloud import dataplex_v1
async def sample_list_zone_actions():
# Create a client
client = dataplex_v1.DataplexServiceAsyncClient()
# Initialize request argument(s)
request = dataplex_v1.ListZoneActionsRequest(
parent="parent_value",
)
# Make the request
page_result = client.list_zone_actions(request=request)
# Handle the response
async for response in page_result:
print(response)
# [END dataplex_v1_generated_DataplexService_ListZoneActions_async]
| 32.531915 | 85 | 0.761936 |
5af24e186e732dca7aff91760c61a5091d88cae5 | 24,608 | py | Python | optapy-core/src/main/python/annotations.py | optapy/optapy | 0240f744f592d964110c86d1054ce63cf8c38452 | [
"Apache-2.0"
] | 85 | 2021-07-26T11:42:51.000Z | 2022-03-29T14:14:27.000Z | optapy-core/src/main/python/annotations.py | optapy/optapy | 0240f744f592d964110c86d1054ce63cf8c38452 | [
"Apache-2.0"
] | 22 | 2021-07-21T21:10:16.000Z | 2022-03-31T17:49:00.000Z | optapy-core/src/main/python/annotations.py | optapy/optapy | 0240f744f592d964110c86d1054ce63cf8c38452 | [
"Apache-2.0"
] | 9 | 2021-07-15T12:07:32.000Z | 2022-02-09T02:11:55.000Z | from .optaplanner_java_interop import ensure_init, _add_shallow_copy_to_class, _generate_planning_entity_class, \
_generate_problem_fact_class, _generate_planning_solution_class, _generate_constraint_provider_class, get_class
from jpype import JImplements, JOverride
from typing import Union, List, Callable, Type, Any, TYPE_CHECKING
if TYPE_CHECKING:
from org.optaplanner.core.api.score.stream import Constraint, ConstraintFactory
from org.optaplanner.core.api.score import Score
from org.optaplanner.core.api.domain.valuerange import ValueRange
from org.optaplanner.core.api.domain.variable import PlanningVariableGraphType
"""
All OptaPlanner Python annotations work like this:
1. Ensure OptaPy is init using ensure_init
2. Import the corresponding Java annotation
3. Set __optaplanner<annotation_name> (ex: __optaplannerPlanningId) on the given function/class
to a dict containing the following:
- 'annotationType' -> the imported Java annotation
- annotation parameter -> parameter value or None if unset
4. Return the modified function/class
For classes, JImplements('org.optaplanner.optapy.OpaquePythonReference')(the_class)
is called and used (which allows __getattr__ to work w/o casting to a Java Proxy).
"""
def is_snake_case(the_function: Callable):
"""
Try to determine the convenction used for getters/setters from function name
since the class is not available yet
:param the_function: the function
:return: True iff the_function name starts with get_, False otherwise
"""
return the_function.__name__.startswith('get_')
def planning_id(getter_function: Callable[[], Union[int, str]]) -> Callable[[], Union[int, str]]:
"""Specifies that a bean property is the id to match when locating an externalObject (often from another Thread).
Used during Move rebasing and in a ProblemFactChange.
It is specified on a getter of a java bean property of a PlanningEntity class,
planning value class or any problem fact class.
The return type can be any Comparable type which overrides Object.equals(Object) and Object.hashCode(),
and is usually number or str. It must never return a null instance.
"""
ensure_init()
from org.optaplanner.core.api.domain.lookup import PlanningId as JavaPlanningId
getter_function.__optaplannerPlanningId = {
'annotationType': JavaPlanningId
}
return getter_function
def planning_pin(getter_function: Callable[[], bool]) -> Callable[[], bool]:
"""Specifies that a boolean property (or field) of a @planning_entity determines if the planning entity is pinned.
A pinned planning entity is never changed during planning.
For example, it allows the user to pin a shift to a specific employee before solving
and the solver will not undo that, regardless of the constraints.
The boolean is false if the planning entity is movable and true if the planning entity is pinned.
It applies to all the planning variables of that planning entity.
To make individual variables pinned, see https://issues.redhat.com/browse/PLANNER-124
This is syntactic sugar for @planning_entity(pinning_filter=is_pinned_function),
which is a more flexible and verbose way to pin a planning entity.
:type getter_function: Callable[[], bool]
"""
ensure_init()
from org.optaplanner.core.api.domain.entity import PlanningPin as JavaPlanningPin
getter_function.__optaplannerPlanningId = {
'annotationType': JavaPlanningPin
}
getter_function.__optapy_return = get_class(bool)
return getter_function
def planning_variable(variable_type: Type, value_range_provider_refs: List[str], nullable: bool = False,
graph_type: 'PlanningVariableGraphType' = None, strength_comparator_class=None,
strength_weight_factory_class=None) -> Callable[[Callable[[], Any]], Callable[[], Any]]:
"""Specifies that a bean property can be changed and should be optimized by the optimization algorithms.
It is specified on a getter of a java bean property (or directly on a field) of
a PlanningEntity class. A PlanningVariable MUST be annotated on the getter.
The getter MUST be named get<X> (ex: getRoom) and has
a corresponding setter set<X> (ex: setRoom).
:param variable_type: The type of values this variable can take.
:param value_range_provider_refs: The value range providers refs that this
planning variable takes values from.
:param nullable: If this planning variable can take None as a value. Default False.
:param graph_type: In some use cases, such as Vehicle Routing, planning entities form a specific graph type, as
specified by org.optaplanner.core.api.domain.variable.PlanningVariableGraphType; default None.
:param strength_comparator_class: Allows a collection of planning values for this variable to be sorted by strength.
:param strength_weight_factory_class: The SelectionSorterWeightFactory alternative for strength_comparator_class.
"""
def planning_variable_function_wrapper(variable_getter_function: Callable[[], Any]):
ensure_init()
from org.optaplanner.core.api.domain.variable import PlanningVariable as JavaPlanningVariable
variable_getter_function.__optaplannerPlanningVariable = {
'annotationType': JavaPlanningVariable,
'valueRangeProviderRefs': value_range_provider_refs,
'nullable': nullable,
'graphType': graph_type,
'strengthComparatorClass': strength_comparator_class,
'strengthWeightFactoryClass': strength_weight_factory_class
}
variable_getter_function.__optapy_return = get_class(variable_type)
return variable_getter_function
return planning_variable_function_wrapper
def anchor_shadow_variable(anchor_type: Type, source_variable_name: str) -> Callable[[Callable[[], Any]],
Callable[[], Any]]:
"""
Specifies that a bean property (or a field) is the anchor of a chained @planning_variable, which implies it's
a shadow variable.
It is specified on a getter of a java bean property (or a field) of a @planning_entity class.
:param anchor_type: The type of the anchor class.
:param source_variable_name: The source planning variable is a chained planning variable that leads to the anchor.
Both the genuine variable and the shadow variable should be consistent:
if A chains to B, then A must have the same anchor as B (unless B is the anchor).
When the Solver changes a genuine variable, it adjusts the shadow variable accordingly.
In practice, the Solver ignores shadow variables (except for consistency housekeeping).
"""
def anchor_shadow_variable_function_mapper(anchor_getter_function: Callable[[], Any]):
ensure_init()
from org.optaplanner.core.api.domain.variable import AnchorShadowVariable as JavaAnchorShadowVariable
planning_variable_name = source_variable_name
if is_snake_case(anchor_getter_function):
planning_variable_name = f'_{planning_variable_name}'
anchor_getter_function.__optaplannerPlanningVariable = {
'annotationType': JavaAnchorShadowVariable,
'sourceVariableName': planning_variable_name,
}
anchor_getter_function.__optapy_return = get_class(anchor_type)
return anchor_getter_function
return anchor_shadow_variable_function_mapper
def inverse_relation_shadow_variable(source_type: Type, source_variable_name: str,
is_singleton: bool = False) -> Callable[
[Callable[[], Any]],
Callable[[], Any]]:
"""
Specifies that a bean property (or a field) is the inverse of a @planning_variable, which implies it's a shadow
variable.
It is specified on a getter of a java bean property (or a field) of a @planning_entity class.
:param source_type: The planning entity that contains the planning variable that reference this entity.
:param source_variable_name: In a bidirectional relationship, the shadow side (= the follower side) uses this
property (and nothing else) to declare for which @planning_variable (= the leader side) it is a shadow.
Both sides of a bidirectional relationship should be consistent: if A points to B, then B must point to A.
When the Solver changes a genuine variable, it adjusts the shadow variable accordingly.
In practice, the Solver ignores shadow variables (except for consistency housekeeping).
:param is_singleton: True if and only if the shadow variable has a 1-to-{0,1} relationship
(i.e. if at most one planning variable can take this value). Defaults to False.
"""
def inverse_relation_shadow_variable_function_mapper(inverse_relation_getter_function):
ensure_init()
from org.optaplanner.optapy import PythonWrapperGenerator, SelfType # noqa
from org.optaplanner.core.api.domain.variable import InverseRelationShadowVariable as \
JavaInverseRelationShadowVariable
from java.util import Collection
the_source_type = source_type
if the_source_type is None:
the_source_type = SelfType
planning_variable_name = source_variable_name
if is_snake_case(inverse_relation_getter_function):
planning_variable_name = f'_{planning_variable_name}'
inverse_relation_getter_function.__optaplannerPlanningVariable = {
'annotationType': JavaInverseRelationShadowVariable,
'sourceVariableName': planning_variable_name,
}
if is_singleton:
inverse_relation_getter_function.__optapy_return = the_source_type
else:
inverse_relation_getter_function.__optapy_return = Collection
inverse_relation_getter_function.__optapy_signature = PythonWrapperGenerator.getCollectionSignature(
get_class(the_source_type))
return inverse_relation_getter_function
return inverse_relation_shadow_variable_function_mapper # noqa
def problem_fact_property(fact_type: Type) -> Callable[[Callable[[], List]],
Callable[[], List]]:
"""Specifies that a property on a @planning_solution class is a problem fact.
A problem fact must not change during solving (except through a ProblemFactChange event). The constraints in a
ConstraintProvider rely on problem facts for ConstraintFactory.from(Class).
Do not annotate planning entities as problem facts: they are automatically available as facts for
ConstraintFactory.from(Class).
"""
def problem_fact_property_function_mapper(getter_function: Callable[[], Any]):
ensure_init()
from org.optaplanner.optapy import PythonWrapperGenerator # noqa
from org.optaplanner.core.api.domain.solution import \
ProblemFactProperty as JavaProblemFactProperty
getter_function.__optapy_return = get_class(fact_type)
getter_function.__optaplannerPlanningEntityCollectionProperty = {
'annotationType': JavaProblemFactProperty
}
return getter_function
return problem_fact_property_function_mapper
def problem_fact_collection_property(fact_type: Type) -> Callable[[Callable[[], List]],
Callable[[], List]]:
"""Specifies that a property on a @planning_solution class is a Collection of problem facts.
A problem fact must not change during solving (except through a ProblemFactChange event). The constraints in a
ConstraintProvider rely on problem facts for ConstraintFactory.from(Class).
Do not annotate planning entities as problem facts: they are automatically available as facts for
ConstraintFactory.from(Class).
"""
def problem_fact_collection_property_function_mapper(getter_function: Callable[[], List]):
ensure_init()
from org.optaplanner.optapy import PythonWrapperGenerator # noqa
from org.optaplanner.core.api.domain.solution import \
ProblemFactCollectionProperty as JavaProblemFactCollectionProperty
getter_function.__optapy_return = PythonWrapperGenerator.getArrayClass(get_class(fact_type))
getter_function.__optaplannerPlanningEntityCollectionProperty = {
'annotationType': JavaProblemFactCollectionProperty
}
return getter_function
return problem_fact_collection_property_function_mapper
def planning_entity_property(entity_type: Type) -> Callable[[Callable[[], List]],
Callable[[], List]]:
"""Specifies that a property on a PlanningSolution class is a Collection of planning entities.
Every element in the planning entity collection should have the @planning_entity annotation. Every element in the
planning entity collection will be added to the ScoreDirector.
"""
def planning_entity_property_function_mapper(getter_function: Callable[[], List]):
ensure_init()
from org.optaplanner.optapy import PythonWrapperGenerator # noqa
from org.optaplanner.core.api.domain.solution import \
PlanningEntityProperty as JavaPlanningEntityProperty
getter_function.__optaplannerPlanningEntityCollectionProperty = {
'annotationType': JavaPlanningEntityProperty
}
getter_function.__optapy_return = get_class(entity_type)
return getter_function
return planning_entity_property_function_mapper
def planning_entity_collection_property(entity_type: Type) -> Callable[[Callable[[], List]],
Callable[[], List]]:
"""Specifies that a property on a PlanningSolution class is a Collection of planning entities.
Every element in the planning entity collection should have the @planning_entity annotation. Every element in the
planning entity collection will be added to the ScoreDirector.
"""
def planning_entity_collection_property_function_mapper(getter_function: Callable[[], List]):
ensure_init()
from org.optaplanner.optapy import PythonWrapperGenerator # noqa
from org.optaplanner.core.api.domain.solution import \
PlanningEntityCollectionProperty as JavaPlanningEntityCollectionProperty
getter_function.__optaplannerPlanningEntityCollectionProperty = {
'annotationType': JavaPlanningEntityCollectionProperty
}
getter_function.__optapy_return = PythonWrapperGenerator.getArrayClass(get_class(entity_type))
return getter_function
return planning_entity_collection_property_function_mapper
def value_range_provider(range_id: str, value_range_type: type = None) -> Callable[
[Callable[[], Union[List, 'ValueRange']]], Callable[[], Union[List, 'ValueRange']]]:
"""Provides the planning values that can be used for a PlanningVariable.
This is specified on a getter which returns a list or ValueRange. A list is implicitly converted to a ValueRange.
:param range_id: The id of the value range. Referenced by @planning_variable's value_range_provider_refs
parameter. Required.
:param value_range_type: The type of the value range. Only required if the function is not also
decorated with @problem_fact_collection_property. Should either be
list or a Java class that implements ValueRangeProvider.
"""
def value_range_provider_function_wrapper(getter_function: Callable[[], Union[List, 'ValueRange']]):
ensure_init()
from org.optaplanner.core.api.domain.valuerange import ValueRangeProvider as JavaValueRangeProvider
from org.optaplanner.optapy import PythonWrapperGenerator, OpaquePythonReference # noqa
getter_function.__optaplannerValueRangeProvider = {
'annotationType': JavaValueRangeProvider,
'id': range_id
}
if value_range_type is not None:
if value_range_type == list:
getter_function.__optapy_return = PythonWrapperGenerator.getArrayClass(OpaquePythonReference)
else:
getter_function.__optapy_return = get_class(value_range_type)
return getter_function
return value_range_provider_function_wrapper
def planning_score(score_type: Type['Score'],
bendable_hard_levels_size: int = None,
bendable_soft_levels_size: int = None,
score_definition_class: Type = None):
"""Specifies that a property on a @planning_solution class holds the Score of that solution.
This property can be null if the @planning_solution is uninitialized.
This property is modified by the Solver, every time when the Score of this PlanningSolution has been calculated.
:param score_type: The type of the score. Should be imported from optapy.types.
:type score_type: Type[Score]
:param bendable_hard_levels_size: Required for bendable scores.
For example with 3 hard levels, hard level 0 always outweighs hard level 1 which
always outweighs hard level 2, which outweighs all the soft levels.
:param bendable_soft_levels_size: Required for bendable scores. For example with 3 soft levels,
soft level 0 always outweighs soft level 1 which always outweighs soft level 2.
:param score_definition_class: Overrides the default determined ScoreDefinition to implement a custom one.
In most cases, this should not be used.
"""
def planning_score_function_wrapper(getter_function):
ensure_init()
from org.optaplanner.core.api.domain.solution import PlanningScore as JavaPlanningScore
getter_function.__optaplannerPlanningScore = {
'annotationType': JavaPlanningScore,
'bendableHardLevelsSize': bendable_hard_levels_size,
'bendableSoftLevelsSize': bendable_soft_levels_size,
'scoreDefinitionClass': score_definition_class
}
getter_function.__optapy_return = get_class(score_type)
return getter_function
return planning_score_function_wrapper
@JImplements('org.optaplanner.core.api.domain.entity.PinningFilter', deferred=True)
class _PythonPinningFilter:
def __init__(self, delegate):
self.delegate = delegate
@JOverride
def accept(self, solution, entity):
return self.delegate(solution, entity)
def planning_entity(entity_class: Type = None, /, *, pinning_filter: Callable = None) -> Union[Type,
Callable[[Type], Type]]:
"""Specifies that the class is a planning entity. Each planning entity must have at least
1 PlanningVariable property.
The class MUST allow passing None to all of __init__ arguments, so it can be cloned.
(ex: this is allowed:
def __init__(self, a_list):
self.a_list = a_list
this is NOT allowed:
def __init__(self, a_list):
self.a_list = a_list
self.list_length = len(a_list)
)
Optional Parameters: @:param pinning_filter: A function that takes the @planning_solution class and an entity,
and return true if the entity cannot be changed, false otherwise
"""
ensure_init()
from org.optaplanner.core.api.domain.entity import PlanningEntity as JavaPlanningEntity
annotation_data = {
'annotationType': JavaPlanningEntity,
'pinningFilter': _PythonPinningFilter(pinning_filter) if pinning_filter is not None else None,
'difficultyComparatorClass': None,
'difficultyWeightFactoryClass': None,
}
def planning_entity_wrapper(entity_class_argument):
out = JImplements('org.optaplanner.optapy.OpaquePythonReference')(entity_class_argument)
out.__optapy_java_class = _generate_planning_entity_class(entity_class_argument, annotation_data)
out.__optapy_is_planning_clone = True
_add_shallow_copy_to_class(out)
return out
if entity_class: # Called as @planning_entity
return planning_entity_wrapper(entity_class)
else: # Called as @planning_entity(pinning_filter=some_function)
return planning_entity_wrapper
def problem_fact(fact_class: Type) -> Type:
"""Specifies that a class is a problem fact.
A problem fact must not change during solving (except through a ProblemFactChange event).
The constraints in a ConstraintProvider rely on problem facts for ConstraintFactory.from(Class).
Do not annotate a planning entity as a problem fact:
they are automatically available as facts for ConstraintFactory.from(Class)
"""
ensure_init()
out = JImplements('org.optaplanner.optapy.OpaquePythonReference')(fact_class)
out.__optapy_java_class = _generate_problem_fact_class(fact_class)
return out
def planning_solution(planning_solution_class: Type) -> Type:
"""Specifies that the class is a planning solution (represents a problem and a possible solution of that problem).
A possible solution does not need to be optimal or even feasible.
A solution's planning variables might not be initialized (especially when delivered as a problem).
A solution is mutable. For scalability reasons (to facilitate incremental score calculation),
the same solution instance (called the working solution per move thread) is continuously modified.
It's cloned to recall the best solution.
Each planning solution must have exactly 1 PlanningScore property.
Each planning solution must have at least 1 PlanningEntityCollectionProperty property.
The class MUST allow passing None to all of __init__ arguments, so it can be cloned.
(ex: this is allowed:
def __init__(self, a_list):
self.a_list = a_list
this is NOT allowed:
def __init__(self, a_list):
self.a_list = a_list
self.list_length = len(a_list)
)
"""
ensure_init()
out = JImplements('org.optaplanner.optapy.OpaquePythonReference')(planning_solution_class)
out.__optapy_java_class = _generate_planning_solution_class(planning_solution_class)
out.__optapy_is_planning_solution = True
out.__optapy_is_planning_clone = True
_add_shallow_copy_to_class(out)
return out
def deep_planning_clone(planning_clone_object: Union[Type, Callable]):
"""
Marks a problem fact class as being required to be deep planning cloned.
Not needed for a @planning_solution or @planning_entity because those are automatically deep cloned.
It can also mark a property (getter for a field) as being required to be deep planning cloned.
This is especially useful for list (or dictionary) properties.
Not needed for a list or map that contain only planning entities or planning solution as values,
because they are automatically deep cloned.
Note: If a list or map contains both planning entities and problem facts, this decorator is needed.
:param planning_clone_object: The class or property that should be deep planning cloned.
:return: planning_clone_object marked as being required for deep planning clone.
"""
planning_clone_object.__optapy_is_planning_clone = True
if isinstance(planning_clone_object, type):
_add_shallow_copy_to_class(planning_clone_object)
return planning_clone_object
def constraint_provider(constraint_provider_function: Callable[['ConstraintFactory'], List['Constraint']]) -> \
Callable[['ConstraintFactory'], List['Constraint']]:
"""Marks a function as a ConstraintProvider.
The function takes a single parameter, the ConstraintFactory, and
must return a list of Constraints.
To create a Constraint, start with ConstraintFactory.from(get_class(PythonClass)).
:type constraint_provider_function: Callable[[ConstraintFactory], List[Constraint]]
:rtype: Callable[[ConstraintFactory], List[Constraint]]
"""
ensure_init()
constraint_provider_function.__optapy_java_class = _generate_constraint_provider_class(constraint_provider_function)
return constraint_provider_function
| 49.314629 | 120 | 0.72452 |
a4b1d785e7d6fee8aa46f2a3e9925e4120f0d925 | 1,098 | py | Python | src/wai/annotations/isp/map_labels/specifier/_MapLabelsISPSpecifier.py | waikato-ufdl/wai-annotations-core | bac3429e9488efb456972c74f9d462f951c4af3d | [
"Apache-2.0"
] | null | null | null | src/wai/annotations/isp/map_labels/specifier/_MapLabelsISPSpecifier.py | waikato-ufdl/wai-annotations-core | bac3429e9488efb456972c74f9d462f951c4af3d | [
"Apache-2.0"
] | 3 | 2021-06-30T23:42:47.000Z | 2022-03-01T03:45:07.000Z | src/wai/annotations/isp/map_labels/specifier/_MapLabelsISPSpecifier.py | waikato-ufdl/wai-annotations-core | bac3429e9488efb456972c74f9d462f951c4af3d | [
"Apache-2.0"
] | null | null | null | from typing import Type, Tuple
from ....core.component import ProcessorComponent
from ....core.domain import DomainSpecifier
from ....core.specifier import ProcessorStageSpecifier
class MapLabelsISPSpecifier(ProcessorStageSpecifier):
"""
Specifies the map-labels ISP.
"""
@classmethod
def description(cls) -> str:
return "Maps object-detection labels from one set to another"
@classmethod
def domain_transfer_function(
cls,
input_domain: Type[DomainSpecifier]
) -> Type[DomainSpecifier]:
from ....domain.image.object_detection import ImageObjectDetectionDomainSpecifier
if input_domain is ImageObjectDetectionDomainSpecifier:
return ImageObjectDetectionDomainSpecifier
else:
raise Exception(
f"MapLabels only handles the "
f"{ImageObjectDetectionDomainSpecifier.name()} domain"
)
@classmethod
def components(cls) -> Tuple[Type[ProcessorComponent]]:
from ...map_labels.component import MapLabels
return MapLabels,
| 32.294118 | 89 | 0.683971 |
3428c337105b1ef4f96888f0d2bfda842b6bee34 | 2,205 | py | Python | octopy/endpoints.py | mbad0la/octopy | 612eb3fe4709093e5ca71b73d0e495ebec203d66 | [
"MIT"
] | null | null | null | octopy/endpoints.py | mbad0la/octopy | 612eb3fe4709093e5ca71b73d0e495ebec203d66 | [
"MIT"
] | null | null | null | octopy/endpoints.py | mbad0la/octopy | 612eb3fe4709093e5ca71b73d0e495ebec203d66 | [
"MIT"
] | null | null | null | import requests, json, base64
api_url = "https://api.github.com/"
def build_call(method, endpoint, username, authstring, params = {}, token = False):
headers = None
if method == "get":
if not token:
headers = { 'Authorization': 'Basic ' + base64.urlsafe_b64encode("%s:%s" % (username,authstring)), 'Accept': 'application/json', 'Content-Type': 'application/json' }
else:
headers = { 'Authorization': 'token ' + authstring, 'Accept': 'application/json', 'Content-Type': 'application/json' }
r = requests.get(api_url + endpoint, headers = headers, params = params)
return json.loads(r.text)
elif method == "post":
if not token:
headers = { 'Authorization': 'Basic ' + base64.urlsafe_b64encode("%s:%s" % (username,authstring)), 'Accept': 'application/json', 'Content-Type': 'application/json' }
else:
headers = { 'Authorization': 'token ' + authstring, 'Accept': 'application/json', 'Content-Type': 'application/json' }
r = requests.post(api_url + endpoint, headers = headers, data = json.dumps(params))
return json.loads(r.text)
elif method == "put":
if not token:
headers = { 'Authorization': 'Basic ' + base64.urlsafe_b64encode("%s:%s" % (username,authstring)), 'Content-length': 0, 'Accept': 'application/json', 'Content-Type': 'application/json' }
else:
headers = { 'Authorization': 'token ' + authstring, 'Content-length': 0, 'Accept': 'application/json', 'Content-Type': 'application/json' }
r = requests.put(api_url + endpoint, headers = headers, params = params)
return r.status_code
elif method == "delete":
if not token:
headers = { 'Authorization': 'Basic ' + base64.urlsafe_b64encode("%s:%s" % (username,authstring)), 'Content-length': 0, 'Accept': 'application/json', 'Content-Type': 'application/json' }
else:
headers = { 'Authorization': 'token ' + authstring, 'Content-length': 0, 'Accept': 'application/json', 'Content-Type': 'application/json' }
r = requests.delete(api_url + endpoint, headers = headers, params = params)
return r.status_code
| 63 | 198 | 0.622676 |
9b2afbc9fbe32a47d808cedbf69960906b2a8804 | 48 | py | Python | tests/__init__.py | LSDtopotools/lsdttparamselector | 107950a76a23a7ad56280d9a68f16493dcfa39ac | [
"MIT"
] | null | null | null | tests/__init__.py | LSDtopotools/lsdttparamselector | 107950a76a23a7ad56280d9a68f16493dcfa39ac | [
"MIT"
] | 21 | 2020-07-28T17:57:47.000Z | 2021-06-03T00:20:21.000Z | tests/__init__.py | LSDtopotools/lsdttparamselector | 107950a76a23a7ad56280d9a68f16493dcfa39ac | [
"MIT"
] | null | null | null | """Unit test package for lsdttparamselector."""
| 24 | 47 | 0.75 |
0eb7409db089b32004a667f23f36718a7701db31 | 4,142 | py | Python | python/tests/test_table_read.py | yang040840219/delta-rs | 0d8b23fb3401171f33c2f299335418abff4df606 | [
"Apache-2.0"
] | null | null | null | python/tests/test_table_read.py | yang040840219/delta-rs | 0d8b23fb3401171f33c2f299335418abff4df606 | [
"Apache-2.0"
] | null | null | null | python/tests/test_table_read.py | yang040840219/delta-rs | 0d8b23fb3401171f33c2f299335418abff4df606 | [
"Apache-2.0"
] | null | null | null | from threading import Barrier, Thread
import pytest
from deltalake import DeltaTable
def test_read_simple_table_to_dict():
table_path = "../rust/tests/data/simple_table"
dt = DeltaTable(table_path)
assert dt.to_pyarrow_dataset().to_table().to_pydict() == {"id": [5, 7, 9]}
def test_read_simple_table_by_version_to_dict():
table_path = "../rust/tests/data/delta-0.2.0"
dt = DeltaTable(table_path, version=2)
assert dt.to_pyarrow_dataset().to_table().to_pydict() == {"value": [1, 2, 3]}
class ExcPassThroughThread(Thread):
"""Wrapper around `threading.Thread` that propagates exceptions."""
def __init__(self, target, *args):
Thread.__init__(self, target=target, *args)
self.exc = None
def run(self):
"""Method representing the thread's activity.
You may override this method in a subclass. The standard run() method
invokes the callable object passed to the object's constructor as the
target argument, if any, with sequential and keyword arguments taken
from the args and kwargs arguments, respectively.
"""
try:
Thread.run(self)
except BaseException as e:
self.exc = e
def join(self, timeout=None):
"""Wait until the thread terminates.
This blocks the calling thread until the thread whose join() method is
called terminates -- either normally or through an unhandled exception
or until the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof). As join() always returns None, you must call
is_alive() after join() to decide whether a timeout happened -- if the
thread is still alive, the join() call timed out.
When the timeout argument is not present or None, the operation will
block until the thread terminates.
A thread can be join()ed many times.
join() raises a RuntimeError if an attempt is made to join the current
thread as that would cause a deadlock. It is also an error to join() a
thread before it has been started and attempts to do so raises the same
exception.
"""
super(ExcPassThroughThread, self).join(timeout)
if self.exc:
raise self.exc
@pytest.mark.timeout(timeout=5, method="thread")
def test_read_multiple_tables_from_s3(s3cred):
"""
Should be able to create multiple cloud storage based DeltaTable instances
without blocking on async rust function calls.
"""
for path in ["s3://deltars/simple", "s3://deltars/simple"]:
t = DeltaTable(path)
assert t.files() == [
"part-00000-c1777d7d-89d9-4790-b38a-6ee7e24456b1-c000.snappy.parquet",
"part-00001-7891c33d-cedc-47c3-88a6-abcfb049d3b4-c000.snappy.parquet",
"part-00004-315835fe-fb44-4562-98f6-5e6cfa3ae45d-c000.snappy.parquet",
"part-00007-3a0e4727-de0d-41b6-81ef-5223cf40f025-c000.snappy.parquet",
"part-00000-2befed33-c358-4768-a43c-3eda0d2a499d-c000.snappy.parquet",
]
@pytest.mark.timeout(timeout=10, method="thread")
def test_read_multiple_tables_from_s3_multi_threaded(s3cred):
thread_count = 10
b = Barrier(thread_count, timeout=5)
# make sure it works within multiple threads as well
def read_table():
b.wait()
t = DeltaTable("s3://deltars/simple")
assert t.files() == [
"part-00000-c1777d7d-89d9-4790-b38a-6ee7e24456b1-c000.snappy.parquet",
"part-00001-7891c33d-cedc-47c3-88a6-abcfb049d3b4-c000.snappy.parquet",
"part-00004-315835fe-fb44-4562-98f6-5e6cfa3ae45d-c000.snappy.parquet",
"part-00007-3a0e4727-de0d-41b6-81ef-5223cf40f025-c000.snappy.parquet",
"part-00000-2befed33-c358-4768-a43c-3eda0d2a499d-c000.snappy.parquet",
]
threads = [ExcPassThroughThread(target=read_table) for _ in range(thread_count)]
for t in threads:
t.start()
for t in threads:
t.join()
| 41.009901 | 84 | 0.673588 |
af31fbcecbefbeb1ee04f7dcc33b104b8f4c59ac | 721 | py | Python | zerver/migrations/0201_zoom_video_chat.py | TylerPham2000/zulip | 2e7aaba0dde5517b4a55cb0bd782f009be45e3ba | [
"Apache-2.0"
] | 17,004 | 2015-09-25T18:27:24.000Z | 2022-03-31T22:02:32.000Z | zerver/migrations/0201_zoom_video_chat.py | TylerPham2000/zulip | 2e7aaba0dde5517b4a55cb0bd782f009be45e3ba | [
"Apache-2.0"
] | 20,344 | 2015-09-25T19:02:42.000Z | 2022-03-31T23:54:40.000Z | zerver/migrations/0201_zoom_video_chat.py | TylerPham2000/zulip | 2e7aaba0dde5517b4a55cb0bd782f009be45e3ba | [
"Apache-2.0"
] | 7,271 | 2015-09-25T18:48:39.000Z | 2022-03-31T21:06:11.000Z | # Generated by Django 1.11.16 on 2018-12-28 18:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("zerver", "0200_remove_preregistrationuser_invited_as_admin"),
]
operations = [
migrations.AddField(
model_name="realm",
name="zoom_api_key",
field=models.TextField(default=""),
),
migrations.AddField(
model_name="realm",
name="zoom_api_secret",
field=models.TextField(default=""),
),
migrations.AddField(
model_name="realm",
name="zoom_user_id",
field=models.TextField(default=""),
),
]
| 24.862069 | 71 | 0.568655 |
335274a1f639526f4568d5420d62f4e440b7798a | 2,020 | py | Python | tests/ut/python/dataset/test_c_random_choice.py | huxian123/mindspore | ec5ba10c82bbd6eccafe32d3a1149add90105bc8 | [
"Apache-2.0"
] | 2 | 2021-04-22T07:00:59.000Z | 2021-11-08T02:49:09.000Z | tests/ut/python/dataset/test_c_random_choice.py | huxian123/mindspore | ec5ba10c82bbd6eccafe32d3a1149add90105bc8 | [
"Apache-2.0"
] | 1 | 2020-12-29T06:46:38.000Z | 2020-12-29T06:46:38.000Z | tests/ut/python/dataset/test_c_random_choice.py | huxian123/mindspore | ec5ba10c82bbd6eccafe32d3a1149add90105bc8 | [
"Apache-2.0"
] | 1 | 2021-05-10T03:30:36.000Z | 2021-05-10T03:30:36.000Z | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import mindspore.dataset as ds
import mindspore.dataset.transforms.c_transforms as ops
def test_random_choice():
"""
Test RandomChoice op
"""
ds.config.set_seed(0)
def test_config(arr, op_list):
try:
data = ds.NumpySlicesDataset(arr, column_names="col", shuffle=False)
data = data.map(operations=ops.RandomChoice(op_list), input_columns=["col"])
res = []
for i in data.create_dict_iterator(num_epochs=1, output_numpy=True):
res.append(i["col"].tolist())
return res
except (TypeError, ValueError) as e:
return str(e)
# Test whether an operation would be randomly chosen.
# In order to prevent random failure, both results need to be checked.
res1 = test_config([[0, 1, 2]], [ops.PadEnd([4], 0), ops.Slice([0, 2])])
assert res1 in [[[0, 1, 2, 0]], [[0, 2]]]
# Test nested structure
res2 = test_config([[0, 1, 2]], [ops.Compose([ops.Duplicate(), ops.Concatenate()]),
ops.Compose([ops.Slice([0, 1]), ops.OneHot(2)])])
assert res2 in [[[[1, 0], [0, 1]]], [[0, 1, 2, 0, 1, 2]]]
# Test RandomChoice where there is only 1 operation
assert test_config([[4, 3], [2, 1]], [ops.Slice([0])]) == [[4], [2]]
if __name__ == "__main__":
test_random_choice()
| 38.113208 | 88 | 0.613366 |
7aca953c3c0acbfc8589e7d9d0a60797f229bec4 | 276 | py | Python | lino_book/projects/eric/manage.py | lino-framework/lino_book | 4eab916832cd8f48ff1b9fc8c2789f0b437da0f8 | [
"BSD-2-Clause"
] | 3 | 2016-08-25T05:58:09.000Z | 2019-12-05T11:13:45.000Z | lino_book/projects/eric/manage.py | lino-framework/lino_book | 4eab916832cd8f48ff1b9fc8c2789f0b437da0f8 | [
"BSD-2-Clause"
] | 18 | 2016-11-12T21:38:58.000Z | 2019-12-03T17:54:38.000Z | lino_book/projects/eric/manage.py | lino-framework/lino_book | 4eab916832cd8f48ff1b9fc8c2789f0b437da0f8 | [
"BSD-2-Clause"
] | 9 | 2016-10-15T11:12:33.000Z | 2021-09-22T04:37:37.000Z | #!/usr/bin/env python
if __name__ == "__main__":
import sys
import os
os.environ['DJANGO_SETTINGS_MODULE'] = \
'lino_book.projects.eric.settings.demo'
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 30.666667 | 64 | 0.728261 |
2f2394bfa61da9366104181a292ab97f745f1146 | 14,398 | py | Python | keras_retinanet/models/retinanet.py | frenebo/keras-retinanet | 8c9b39de2472f392db9b134954d86dd7a8feb817 | [
"Apache-2.0"
] | null | null | null | keras_retinanet/models/retinanet.py | frenebo/keras-retinanet | 8c9b39de2472f392db9b134954d86dd7a8feb817 | [
"Apache-2.0"
] | null | null | null | keras_retinanet/models/retinanet.py | frenebo/keras-retinanet | 8c9b39de2472f392db9b134954d86dd7a8feb817 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import keras
from .. import initializers
from .. import layers
from ..utils.anchors import AnchorParameters
from . import assert_training_model
def default_classification_model(
num_classes,
num_anchors,
pyramid_feature_size=256,
prior_probability=0.01,
classification_feature_size=256,
name='classification_submodel'
):
""" Creates the default regression submodel.
Args
num_classes : Number of classes to predict a score for at each feature level.
num_anchors : Number of anchors to predict classification scores for at each feature level.
pyramid_feature_size : The number of filters to expect from the feature pyramid levels.
classification_feature_size : The number of filters to use in the layers in the classification submodel.
name : The name of the submodel.
Returns
A keras.models.Model that predicts classes for each anchor.
"""
options = {
'kernel_size' : 3,
'strides' : 1,
'padding' : 'same',
}
if keras.backend.image_data_format() == 'channels_first':
inputs = keras.layers.Input(shape=(pyramid_feature_size, None, None))
else:
inputs = keras.layers.Input(shape=(None, None, pyramid_feature_size))
outputs = inputs
for i in range(4):
outputs = keras.layers.Conv2D(
filters=classification_feature_size,
activation='relu',
name='pyramid_classification_{}'.format(i),
kernel_initializer=keras.initializers.normal(mean=0.0, stddev=0.01, seed=None),
bias_initializer='zeros',
**options
)(outputs)
outputs = keras.layers.Conv2D(
filters=num_classes * num_anchors,
kernel_initializer=keras.initializers.normal(mean=0.0, stddev=0.01, seed=None),
bias_initializer=initializers.PriorProbability(probability=prior_probability),
name='pyramid_classification',
**options
)(outputs)
# reshape output and apply sigmoid
if keras.backend.image_data_format() == 'channels_first':
outputs = keras.layers.Permute((2, 3, 1), name='pyramid_classification_permute')(outputs)
outputs = keras.layers.Reshape((-1, num_classes), name='pyramid_classification_reshape')(outputs)
outputs = keras.layers.Activation('sigmoid', name='pyramid_classification_sigmoid')(outputs)
return keras.models.Model(inputs=inputs, outputs=outputs, name=name)
def default_regression_model(num_values, num_anchors, pyramid_feature_size=256, regression_feature_size=256, name='regression_submodel'):
""" Creates the default regression submodel.
Args
num_values : Number of values to regress.
num_anchors : Number of anchors to regress for each feature level.
pyramid_feature_size : The number of filters to expect from the feature pyramid levels.
regression_feature_size : The number of filters to use in the layers in the regression submodel.
name : The name of the submodel.
Returns
A keras.models.Model that predicts regression values for each anchor.
"""
# All new conv layers except the final one in the
# RetinaNet (classification) subnets are initialized
# with bias b = 0 and a Gaussian weight fill with stddev = 0.01.
options = {
'kernel_size' : 3,
'strides' : 1,
'padding' : 'same',
'kernel_initializer' : keras.initializers.normal(mean=0.0, stddev=0.01, seed=None),
'bias_initializer' : 'zeros'
}
if keras.backend.image_data_format() == 'channels_first':
inputs = keras.layers.Input(shape=(pyramid_feature_size, None, None))
else:
inputs = keras.layers.Input(shape=(None, None, pyramid_feature_size))
outputs = inputs
for i in range(4):
outputs = keras.layers.Conv2D(
filters=regression_feature_size,
activation='relu',
name='pyramid_regression_{}'.format(i),
**options
)(outputs)
outputs = keras.layers.Conv2D(num_anchors * num_values, name='pyramid_regression', **options)(outputs)
if keras.backend.image_data_format() == 'channels_first':
outputs = keras.layers.Permute((2, 3, 1), name='pyramid_regression_permute')(outputs)
outputs = keras.layers.Reshape((-1, num_values), name='pyramid_regression_reshape')(outputs)
return keras.models.Model(inputs=inputs, outputs=outputs, name=name)
def __create_pyramid_features(C3, C4, C5, feature_size=256):
""" Creates the FPN layers on top of the backbone features.
Args
C3 : Feature stage C3 from the backbone.
C4 : Feature stage C4 from the backbone.
C5 : Feature stage C5 from the backbone.
feature_size : The feature size to use for the resulting feature levels.
Returns
A list of feature levels [P3, P4, P5, P6, P7].
"""
# upsample C5 to get P5 from the FPN paper
P5 = keras.layers.Conv2D(feature_size, kernel_size=1, strides=1, padding='same', name='C5_reduced')(C5)
P5_upsampled = layers.UpsampleLike(name='P5_upsampled')([P5, C4])
P5 = keras.layers.Conv2D(feature_size, kernel_size=3, strides=1, padding='same', name='P5')(P5)
# add P5 elementwise to C4
P4 = keras.layers.Conv2D(feature_size, kernel_size=1, strides=1, padding='same', name='C4_reduced')(C4)
P4 = keras.layers.Add(name='P4_merged')([P5_upsampled, P4])
P4_upsampled = layers.UpsampleLike(name='P4_upsampled')([P4, C3])
P4 = keras.layers.Conv2D(feature_size, kernel_size=3, strides=1, padding='same', name='P4')(P4)
# add P4 elementwise to C3
P3 = keras.layers.Conv2D(feature_size, kernel_size=1, strides=1, padding='same', name='C3_reduced')(C3)
P3 = keras.layers.Add(name='P3_merged')([P4_upsampled, P3])
P3 = keras.layers.Conv2D(feature_size, kernel_size=3, strides=1, padding='same', name='P3')(P3)
# "P6 is obtained via a 3x3 stride-2 conv on C5"
P6 = keras.layers.Conv2D(feature_size, kernel_size=3, strides=2, padding='same', name='P6')(C5)
# "P7 is computed by applying ReLU followed by a 3x3 stride-2 conv on P6"
P7 = keras.layers.Activation('relu', name='C6_relu')(P6)
P7 = keras.layers.Conv2D(feature_size, kernel_size=3, strides=2, padding='same', name='P7')(P7)
return [P3, P4, P5, P6, P7]
def default_submodels(num_classes, num_anchors):
""" Create a list of default submodels used for object detection.
The default submodels contains a regression submodel and a classification submodel.
Args
num_classes : Number of classes to use.
num_anchors : Number of base anchors.
Returns
A list of tuple, where the first element is the name of the submodel and the second element is the submodel itself.
"""
return [
('regression', default_regression_model(4, num_anchors)),
('classification', default_classification_model(num_classes, num_anchors))
]
def __build_model_pyramid(name, model, features):
""" Applies a single submodel to each FPN level.
Args
name : Name of the submodel.
model : The submodel to evaluate.
features : The FPN features.
Returns
A tensor containing the response from the submodel on the FPN features.
"""
return keras.layers.Concatenate(axis=1, name=name)([model(f) for f in features])
def __build_pyramid(models, features):
""" Applies all submodels to each FPN level.
Args
models : List of sumodels to run on each pyramid level (by default only regression, classifcation).
features : The FPN features.
Returns
A list of tensors, one for each submodel.
"""
return [__build_model_pyramid(n, m, features) for n, m in models]
def __build_anchors(anchor_parameters, features):
""" Builds anchors for the shape of the features from FPN.
Args
anchor_parameters : Parameteres that determine how anchors are generated.
features : The FPN features.
Returns
A tensor containing the anchors for the FPN features.
The shape is:
```
(batch_size, num_anchors, 4)
```
"""
anchors = [
layers.Anchors(
size=anchor_parameters.sizes[i],
stride=anchor_parameters.strides[i],
ratios=anchor_parameters.ratios,
scales=anchor_parameters.scales,
name='anchors_{}'.format(i)
)(f) for i, f in enumerate(features)
]
return keras.layers.Concatenate(axis=1, name='anchors')(anchors)
def retinanet(
inputs,
backbone_layers,
num_classes,
num_anchors = None,
create_pyramid_features = __create_pyramid_features,
submodels = None,
name = 'retinanet'
):
""" Construct a RetinaNet model on top of a backbone.
This model is the minimum model necessary for training (with the unfortunate exception of anchors as output).
Args
inputs : keras.layers.Input (or list of) for the input to the model.
num_classes : Number of classes to classify.
num_anchors : Number of base anchors.
create_pyramid_features : Functor for creating pyramid features given the features C3, C4, C5 from the backbone.
submodels : Submodels to run on each feature map (default is regression and classification submodels).
name : Name of the model.
Returns
A keras.models.Model which takes an image as input and outputs generated anchors and the result from each submodel on every pyramid level.
The order of the outputs is as defined in submodels:
```
[
regression, classification, other[0], other[1], ...
]
```
"""
if num_anchors is None:
num_anchors = AnchorParameters.default.num_anchors()
if submodels is None:
submodels = default_submodels(num_classes, num_anchors)
C3, C4, C5 = backbone_layers
# compute pyramid features as per https://arxiv.org/abs/1708.02002
features = create_pyramid_features(C3, C4, C5)
# for all pyramid levels, run available submodels
pyramids = __build_pyramid(submodels, features)
return keras.models.Model(inputs=inputs, outputs=pyramids, name=name)
def retinanet_bbox(
model = None,
nms = True,
class_specific_filter = True,
name = 'retinanet-bbox',
anchor_params = None,
using_direction = False,
**kwargs
):
""" Construct a RetinaNet model on top of a backbone and adds convenience functions to output boxes directly.
This model uses the minimum retinanet model and appends a few layers to compute boxes within the graph.
These layers include applying the regression values to the anchors and performing NMS.
Args
model : RetinaNet model to append bbox layers to. If None, it will create a RetinaNet model using **kwargs.
nms : Whether to use non-maximum suppression for the filtering step.
class_specific_filter : Whether to use class specific filtering or filter for the best scoring class only.
name : Name of the model.
anchor_params : Struct containing anchor parameters. If None, default values are used.
*kwargs : Additional kwargs to pass to the minimal retinanet model.
Returns
A keras.models.Model which takes an image as input and outputs the detections on the image.
The order is defined as follows:
```
[
boxes, scores, labels, other[0], other[1], ...
]
```
"""
# if no anchor parameters are passed, use default values
if anchor_params is None:
anchor_params = AnchorParameters.default
# create RetinaNet model
if model is None:
model = retinanet(num_anchors=anchor_params.num_anchors(), **kwargs)
else:
assert_training_model(model)
# compute the anchors
features = [model.get_layer(p_name).output for p_name in ['P3', 'P4', 'P5', 'P6', 'P7']]
anchors = __build_anchors(anchor_params, features)
# we expect the anchors, regression and classification values as first output
regression = model.outputs[0]
classification = model.outputs[1]
# "other" can be any additional output from custom submodels, by default this will be []
other = model.outputs[2:]
# apply predicted regression to anchors
boxes = layers.RegressBoxes(name='boxes')([anchors, regression])
boxes = layers.ClipBoxes(name='clipped_boxes')([model.inputs[0], boxes])
# filter detections (apply NMS / score threshold / select top-k)
detections = layers.FilterDetections(
nms = nms,
class_specific_filter = class_specific_filter,
name = 'filtered_detections'
)([boxes, classification] + other)
if using_direction:
outputs = detections[:3] + [layers.Argmax(axis=-1)(detections[3])] + detections[4:]
else:
outputs = detections
# print("=======================================================Output shapes:")
# for output in outputs:
# print(" shape: " + str(output.shape))
# # print("============================================================================", other[0].shape)
# construct the model
return keras.models.Model(inputs=model.inputs, outputs=outputs, name=name)
| 39.446575 | 146 | 0.650646 |
ac52b66b20580398a34cdd6729ac8119e74243ec | 90 | py | Python | tests/test_sunportal.py | KiOui/SunPortal | 3b575e791a0fd072f4582403124d869f6ded9ac5 | [
"MIT"
] | 3 | 2018-09-02T13:39:34.000Z | 2021-08-04T13:06:47.000Z | tests/test_sunportal.py | KiOui/SunPortal | 3b575e791a0fd072f4582403124d869f6ded9ac5 | [
"MIT"
] | 1 | 2018-09-03T07:17:04.000Z | 2018-09-03T15:37:23.000Z | tests/test_sunportal.py | KiOui/SunPortal | 3b575e791a0fd072f4582403124d869f6ded9ac5 | [
"MIT"
] | 1 | 2018-09-02T13:39:35.000Z | 2018-09-02T13:39:35.000Z | from sunportal import __version__
def test_version():
assert __version__ == '0.1.0'
| 15 | 33 | 0.722222 |
75b35fe83e7340b3e22e54063fdc4119e3b8c232 | 1,027 | py | Python | vos2020/wd_api.py | kosovojs/pywikibot-scripts | 4a9bf5177ebcfbba719970f9f3b48fbd51831818 | [
"MIT"
] | 3 | 2021-10-03T17:27:43.000Z | 2021-10-05T12:27:06.000Z | vos2020/wd_api.py | kosovojs/pywikibot-scripts | 4a9bf5177ebcfbba719970f9f3b48fbd51831818 | [
"MIT"
] | null | null | null | vos2020/wd_api.py | kosovojs/pywikibot-scripts | 4a9bf5177ebcfbba719970f9f3b48fbd51831818 | [
"MIT"
] | null | null | null |
from pywikiapi import wikipedia
from helpers import clean_api, chunker
import os, pymysql, json, re
# Connect to English Wikipedia
class WikidataAPI:
site = None
def __init__(self):
self.site = wikipedia('www', 'wikidata')
def get_item_data(self, wd_items, raw=False, attributes = ['sitelinks', 'claims'], claim_props=[]):
retMap = {}
for batch in chunker(wd_items, 49):
res = self.site('wbgetentities', ids=batch, props='|'.join(attributes))
for entity in res.get('entities'):
data = res.get('entities').get(entity)
tmp_data = {}
for attr in attributes:
if attr == 'sitelinks':
sitelinks = {f:data.get(attr).get(f).get('title') for f in data.get(attr)}
data.update({'sitelinks': sitelinks})
if attr == 'claims':
claims = clean_api(data.get(attr))
data.update({'claims': claims})
#print(data.get('type'))
#parsed_data = clean_api(data) if raw and 'claims' else data
retMap.update({entity: data})
return retMap
| 29.342857 | 101 | 0.640701 |
f7d438e6ebd52f986250873339ec1dacaf389c77 | 9,139 | py | Python | nl_regions.py | tomkooij/covid19 | a7d8a5781ed84b4a59652fc4575c15679de7898a | [
"MIT"
] | null | null | null | nl_regions.py | tomkooij/covid19 | a7d8a5781ed84b4a59652fc4575c15679de7898a | [
"MIT"
] | null | null | null | nl_regions.py | tomkooij/covid19 | a7d8a5781ed84b4a59652fc4575c15679de7898a | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Holiday zones, municipality classes in the Netherlands; module for importing.
Function:
- build_municipality_csv()
- get_municipality_data()
- select_cases_region()
Created on Sat Nov 7 16:08:51 2020 @hk_nien
Note: changes as of 2021-01-07:
'Haaren', # disappears
'Boxtel', 'Vught', 'Tilburg': expanded
'Eemsdelta', merger of 'Appingedam', 'Delfzijl', 'Loppersum',
'Hengelo' renamed to 'Hengelo (O.)' (we undo this)
"""
from pathlib import Path
import re
import json
import pandas as pd
DATA_PATH = Path(__file__).parent / 'data'
DF_MUN = None
def build_municipality_csv(df_cases):
"""Write data/municipalities.csv.
The csv will have columns:
- Municipality_name
- Population
- Province
- HolRegion
This function only needs to be called only rarely (output will be committed).
Parameters:
- df_cases: dataframe with columns 'Municipality_name' and 'Province'.
"""
df_mun = _load_municipality_data_cbs(df_cases)
df_mun.rename(columns={'Inwoners': 'Population'}, inplace=True)
### Get provinces from cases dataframe.
# dataframe: index=Municipality_name, column 'Province'
mun_provs = df_cases.groupby('Municipality_name').first()[['Province']]
df_mun['Province'] = mun_provs['Province']
new_row = dict(
Municipality='Eemsdelta',
Population=df_mun.loc[['Appingedam', 'Delfzijl', 'Loppersum'], 'Population'].sum(),
Province='Groningen',
)
df_mun = df_mun.append(
pd.DataFrame.from_records([new_row]).set_index('Municipality')
)
_add_holiday_regions(df_mun)
fpath = DATA_PATH / 'municipalities.csv'
df_mun.to_csv(fpath, float_format='%.7g', header=True)
print(f'Wrote {fpath}')
def get_municipality_data():
"""Return dataframe with municipality data:
Index: Municipality (name)
Columns: Population, Province, HolRegion,
This just loads the csv file created by build_municipality_csv(),
or use a previously cached version.
"""
global DF_MUN
if DF_MUN is None:
df = pd.read_csv(DATA_PATH / 'municipalities.csv')
df.set_index('Municipality', inplace=True)
DF_MUN = df
return DF_MUN.copy()
def _load_municipality_data_cbs(df_cases):
"""Return municipality dataframe from cases dataframe.
Cases dataframe must have 'Municipality_name' column.
This takes data from the CBS table 'Regionale_kerncijfers*.csv'.
Return dataframe with:
- index: municipality
- 'Inwoners' column
- 'Province' column
"""
## Load municipality populations
path = DATA_PATH / 'Regionale_kerncijfers_Nederland_15082020_130832.csv'
df_mun = pd.read_csv(path, sep=';')
df_mun.rename(columns={
#'Perioden',
#"Regio's",
'Bevolking/Bevolkingssamenstelling op 1 januari/Totale bevolking (aantal)': 'total',
'Bevolking/Bevolkingssamenstelling op 1 januari/Burgerlijke staat/Bevolking 15 jaar of ouder/Inwoners 15 jaar of ouder (aantal)': 'n15plus',
'Bevolking/Bevolkingssamenstelling op 1 januari/Burgerlijke staat/Bevolking 15 jaar of ouder/Gehuwd (in % van inwoners 15 jaar of ouder)': 'n15gehuwd',
'Bevolking/Bevolkingssamenstelling op 1 januari/Bevolkingsdichtheid (aantal inwoners per km²)': 'dichtheid',
'Bouwen en wonen/Woningvoorraad/Voorraad op 1 januari (aantal)': 'woningen',
'Milieu en bodemgebruik/Bodemgebruik/Oppervlakte/Totale oppervlakte (km²)': 'opp'
}, inplace=True)
df_mun = pd.DataFrame({'Municipality': df_mun['Regio\'s'], 'Inwoners': df_mun['total']})
df_mun.set_index('Municipality', inplace=True)
df_mun = df_mun.loc[~df_mun.Inwoners.isna()]
import re
df_mun.rename(index=lambda x: re.sub(r' \(gemeente\)$', '', x), inplace=True)
rename_muns = {
'Beek (L.)': 'Beek',
'Hengelo (O.)': 'Hengelo',
'Laren (NH.)': 'Laren',
'Middelburg (Z.)': 'Middelburg',
'Rijswijk (ZH.)': 'Rijswijk',
'Stein (L.)': 'Stein',
}
df_mun.rename(index=rename_muns, inplace=True)
# df_mun.drop(index=['Valkenburg (ZH.)'], inplace=True)
return df_mun
def _add_holiday_regions(df_mun):
"""Add a column 'HolRegion' with holiday region names (Noord, Midden, Zuid).
Parameter:
- df_mun: Dataframe with index 'Municipality_name' and at least column 'Province'.
Update df_mun in-place with new 'HolRegion' column.
"""
# Definitions holiday regions
# https://www.rijksoverheid.nl/onderwerpen/schoolvakanties/regios-schoolvakantie
rules = [
# Region name, (P:|M:) province/municipality name
['Noord',
'P:Drenthe', 'P:Flevoland', 'P:Friesland', 'P:Groningen',
'P:Overijssel', 'P:Noord-Holland'],
['Midden', 'M:Zeewolde', 'P:Utrecht', 'P:Zuid-Holland'],
['Zuid', 'P:Limburg', 'P:Noord-Brabant', 'P:Zeeland'],
['Noord', 'M:Hattem', 'M:Eemnes'],
['Zuid', 'P:Gelderland'],
['Midden', 'M:Aalten', 'M:Apeldoorn', 'M:Barneveld', 'M:Berkelland',
'M:Bronckhorst', 'M:Brummen', 'M:Buren', 'M:Culemborg', 'M:Doetinchem',
'M:Ede', 'M:Elburg', 'M:Epe', 'M:Ermelo', 'M:Harderwijk', 'M:Heerde',
'M:Lochem', 'M: Montferland', 'M:Neder-Betuwe', 'M:Nijkerk', 'M:Nunspeet',
'M:Oldebroek', 'M:Oost-Gelre', 'M:Oude IJsselstreek', 'M:Putten',
'M:Scherpenzeel', 'M:Tiel', 'M:Voorst', 'M:Wageningen', 'M:West Betuwe',
'M:Winterswijk en Zutphen', 'M:Werkendam', 'M:Woudrichem'],
]
df_mun['HolRegion'] = None
for rule in rules:
hrname = rule[0]
for pmname in rule[1:]:
if pmname.startswith('P:'):
df_mun.loc[df_mun['Province'] == pmname[2:], 'HolRegion'] = hrname
elif pmname.startswith('M:'):
df_mun.loc[df_mun.index == pmname[2:] , 'HolRegion'] = hrname
else:
raise ValueError(f'pmname {pmname!r}: bad pattern.')
def select_cases_region(dfc, region):
"""Select daily cases by region.
Parameters:
- dfc: cases dataframe, with columns
'Date_of_report', 'Municipality', and various numerical columns.
- region: one of:
- the name of a municipality
- 'Nederland': all
- 'HR:Zuid', 'HR:Noord', 'HR:Midden', 'HR:Midden+Zuid', 'HR:Midden+Noord':
holiday regions.
- 'POP:xx-yy': municipalities with population xx <= pop/1000 < yy'
- 'P:xx': province
- 'JSON:{...}' json dict containing key 'muns' with a list
of municipalities, to be aggregrated.
Return:
- Dataframe with Date_of_report as index and
numerical columns summed as appropriate.
- npop: population.
Note: population is sampled at final date. This may result in funny
results if the municipality selection changes due to municipality
reorganization.
"""
df_mun = get_municipality_data()
# First, mselect is Dataframe of selected municipalities.
if region == 'Nederland':
mselect = df_mun
elif region == 'HR:Midden+Zuid':
mselect = df_mun.loc[df_mun['HolRegion'].str.match('Midden|Zuid')]
elif region == 'HR:Midden+Noord':
mselect = df_mun.loc[df_mun['HolRegion'].str.match('Midden|Noord')]
elif region.startswith('HR:'):
mselect = df_mun.loc[df_mun['HolRegion'] == region[3:]]
elif region.startswith('P:'):
mselect = df_mun.loc[df_mun['Province'] == region[2:]]
elif region.startswith('POP:'):
ma = re.match(r'POP:(\d+)-(\d+)$', region)
if not ma:
raise ValueError(f'region={region!r} does not match \'MS:NUM-NUM\'.')
pop_lo, pop_hi = float(ma.group(1)), float(ma.group(2))
mask = (df_mun['Population'] >= pop_lo*1e3) & (df_mun['Population'] < pop_hi*1e3)
mselect = df_mun.loc[mask]
elif region.startswith('JSON:'):
muns = json.loads(region[5:])['muns']
mselect = df_mun.loc[muns]
else:
mselect = df_mun.loc[[region]]
# Select the corresponding rows in dfc.
dfc_sel = dfc.join(mselect[[]], on='Municipality_name', how='inner')
if len(dfc_sel) == 0:
raise ValueError(f'No data for region={region!r}.')
# Population based on final date; avoid double-counting
# due to municipality reorganization as of 2021-01-07.
date_end = dfc_sel['Date_of_report'].max()
muns_end = dfc_sel.loc[dfc['Date_of_report'] == date_end, 'Municipality_name']
if date_end > pd.to_datetime('2021-01-07'):
# Distribute 'Haren' over the new municipalities
df_mun = df_mun.copy()
for mun in ['Boxtel', 'Vught', 'Tilburg']:
df_mun.loc[mun, 'Population'] += df_mun.loc['Haaren', 'Population'] // 3
df_mun.drop(index='Haaren', inplace=True)
npop = df_mun.loc[muns_end, 'Population'].sum()
# combine
dfc_sel = dfc_sel.groupby('Date_of_report').sum()
return dfc_sel, npop
if __name__ == '__main__':
# recreate municipalities.csv
df = pd.read_csv('data/COVID-19_aantallen_gemeente_cumulatief.csv', sep=';')
build_municipality_csv(df)
| 34.357143 | 157 | 0.64263 |
8f1c7cac7fd30aeb0614895b77f757e31f70d5f1 | 322 | py | Python | infosessions/constants.py | ernado-legacy/infosessions | d1d205c1948f0e4ad40e7edb398fd9890dcf278d | [
"BSD-3-Clause"
] | null | null | null | infosessions/constants.py | ernado-legacy/infosessions | d1d205c1948f0e4ad40e7edb398fd9890dcf278d | [
"BSD-3-Clause"
] | 1 | 2016-05-05T16:37:45.000Z | 2016-05-05T16:37:45.000Z | infosessions/constants.py | ernado-legacy/infosessions | d1d205c1948f0e4ad40e7edb398fd9890dcf278d | [
"BSD-3-Clause"
] | 1 | 2016-05-05T14:35:16.000Z | 2016-05-05T14:35:16.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
MAX_PREFIX_LENGTH = 16
MAX_KEY_LENGTH = 32
GLOBAL_PREFIX = 'sessions'
DATA_KEY = 'd'
INFO_KEY = 'i'
DEFAULT_PREFIX = 'default'
INFO_EXIST_VALUE = 1
IP_KEY = 'ip'
SESSION_PROCESSED = 'session_processed'
META_IP = 'REMOTE_ADDR'
META_AGENT = 'HTTP_USER_AGENT'
| 21.466667 | 39 | 0.754658 |
fc090dbc4d3e57477e752ce14033d0f7d572e0a7 | 40,577 | py | Python | src/oci/appmgmt_control/appmgmt_control_client.py | xjuarez/oci-python-sdk | 3c1604e4e212008fb6718e2f68cdb5ef71fd5793 | [
"Apache-2.0",
"BSD-3-Clause"
] | 3 | 2020-09-10T22:09:45.000Z | 2021-12-24T17:00:07.000Z | src/oci/appmgmt_control/appmgmt_control_client.py | xjuarez/oci-python-sdk | 3c1604e4e212008fb6718e2f68cdb5ef71fd5793 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/appmgmt_control/appmgmt_control_client.py | xjuarez/oci-python-sdk | 3c1604e4e212008fb6718e2f68cdb5ef71fd5793 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from __future__ import absolute_import
from oci._vendor import requests # noqa: F401
from oci._vendor import six
from oci import retry, circuit_breaker # noqa: F401
from oci.base_client import BaseClient
from oci.config import get_config_value_or_default, validate_config
from oci.signer import Signer
from oci.util import Sentinel, get_signer_from_authentication_type, AUTHENTICATION_TYPE_FIELD_NAME
from .models import appmgmt_control_type_mapping
missing = Sentinel("Missing")
class AppmgmtControlClient(object):
"""
AppMgmt Control API
"""
def __init__(self, config, **kwargs):
"""
Creates a new service client
:param dict config:
Configuration keys and values as per `SDK and Tool Configuration <https://docs.cloud.oracle.com/Content/API/Concepts/sdkconfig.htm>`__.
The :py:meth:`~oci.config.from_file` method can be used to load configuration from a file. Alternatively, a ``dict`` can be passed. You can validate_config
the dict using :py:meth:`~oci.config.validate_config`
:param str service_endpoint: (optional)
The endpoint of the service to call using this client. For example ``https://iaas.us-ashburn-1.oraclecloud.com``. If this keyword argument is
not provided then it will be derived using the region in the config parameter. You should only provide this keyword argument if you have an explicit
need to specify a service endpoint.
:param timeout: (optional)
The connection and read timeouts for the client. The default values are connection timeout 10 seconds and read timeout 60 seconds. This keyword argument can be provided
as a single float, in which case the value provided is used for both the read and connection timeouts, or as a tuple of two floats. If
a tuple is provided then the first value is used as the connection timeout and the second value as the read timeout.
:type timeout: float or tuple(float, float)
:param signer: (optional)
The signer to use when signing requests made by the service client. The default is to use a :py:class:`~oci.signer.Signer` based on the values
provided in the config parameter.
One use case for this parameter is for `Instance Principals authentication <https://docs.cloud.oracle.com/Content/Identity/Tasks/callingservicesfrominstances.htm>`__
by passing an instance of :py:class:`~oci.auth.signers.InstancePrincipalsSecurityTokenSigner` as the value for this keyword argument
:type signer: :py:class:`~oci.signer.AbstractBaseSigner`
:param obj retry_strategy: (optional)
A retry strategy to apply to all calls made by this service client (i.e. at the client level). There is no retry strategy applied by default.
Retry strategies can also be applied at the operation level by passing a ``retry_strategy`` keyword argument as part of calling the operation.
Any value provided at the operation level will override whatever is specified at the client level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
:param obj circuit_breaker_strategy: (optional)
A circuit breaker strategy to apply to all calls made by this service client (i.e. at the client level).
This client uses :py:data:`~oci.circuit_breaker.DEFAULT_CIRCUIT_BREAKER_STRATEGY` as default if no circuit breaker strategy is provided.
The specifics of circuit breaker strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/circuit_breakers.html>`__.
:param function circuit_breaker_callback: (optional)
Callback function to receive any exceptions triggerred by the circuit breaker.
"""
validate_config(config, signer=kwargs.get('signer'))
if 'signer' in kwargs:
signer = kwargs['signer']
elif AUTHENTICATION_TYPE_FIELD_NAME in config:
signer = get_signer_from_authentication_type(config)
else:
signer = Signer(
tenancy=config["tenancy"],
user=config["user"],
fingerprint=config["fingerprint"],
private_key_file_location=config.get("key_file"),
pass_phrase=get_config_value_or_default(config, "pass_phrase"),
private_key_content=config.get("key_content")
)
base_client_init_kwargs = {
'regional_client': True,
'service_endpoint': kwargs.get('service_endpoint'),
'base_path': '/20210330',
'service_endpoint_template': 'https://cp.appmgmt.{region}.oci.{secondLevelDomain}',
'skip_deserialization': kwargs.get('skip_deserialization', False),
'circuit_breaker_strategy': kwargs.get('circuit_breaker_strategy', circuit_breaker.GLOBAL_CIRCUIT_BREAKER_STRATEGY)
}
if 'timeout' in kwargs:
base_client_init_kwargs['timeout'] = kwargs.get('timeout')
if base_client_init_kwargs.get('circuit_breaker_strategy') is None:
base_client_init_kwargs['circuit_breaker_strategy'] = circuit_breaker.DEFAULT_CIRCUIT_BREAKER_STRATEGY
self.base_client = BaseClient("appmgmt_control", config, signer, appmgmt_control_type_mapping, **base_client_init_kwargs)
self.retry_strategy = kwargs.get('retry_strategy')
self.circuit_breaker_callback = kwargs.get('circuit_breaker_callback')
def activate_monitoring_plugin(self, monitored_instance_id, **kwargs):
"""
Activates Resource Plugin for compute instance identified by the instance ocid.
Stores monitored instances Id and its state. Tries to enable Resource Monitoring plugin by making
remote calls to Oracle Cloud Agent and Management Agent Cloud Service.
:param str monitored_instance_id: (required)
OCID of monitored instance.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call
for a resource, set the `if-match` parameter to the value of the
etag from a previous GET or POST response for that resource.
The resource will be updated or deleted only if the etag you
provide matches the resource's current etag value.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/activate_monitoring_plugin.py.html>`__ to see an example of how to use activate_monitoring_plugin API.
"""
resource_path = "/monitoredInstances/{monitoredInstanceId}/actions/activateMonitoringPlugin"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"activate_monitoring_plugin got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"monitoredInstanceId": monitored_instance_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
def get_monitored_instance(self, monitored_instance_id, **kwargs):
"""
Gets a monitored instance by identifier
:param str monitored_instance_id: (required)
OCID of monitored instance.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.appmgmt_control.models.MonitoredInstance`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/get_monitored_instance.py.html>`__ to see an example of how to use get_monitored_instance API.
"""
resource_path = "/monitoredInstances/{monitoredInstanceId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_monitored_instance got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"monitoredInstanceId": monitored_instance_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="MonitoredInstance")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="MonitoredInstance")
def get_work_request(self, work_request_id, **kwargs):
"""
Gets the status of the work request with the given ID.
:param str work_request_id: (required)
The ID of the asynchronous request.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.appmgmt_control.models.WorkRequest`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/get_work_request.py.html>`__ to see an example of how to use get_work_request API.
"""
resource_path = "/workRequests/{workRequestId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_work_request got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"workRequestId": work_request_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="WorkRequest")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="WorkRequest")
def list_monitored_instances(self, compartment_id, **kwargs):
"""
Returns a list of monitored instances.
:param str compartment_id: (required)
The ID of the compartment in which to list resources.
:param str display_name: (optional)
A filter to return only resources that match the entire display name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use, either ascending ('ASC') or descending ('DESC').
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for timeCreated is descending. Default order for displayName is ascending. If no value is specified timeCreated is default.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.appmgmt_control.models.MonitoredInstanceCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/list_monitored_instances.py.html>`__ to see an example of how to use list_monitored_instances API.
"""
resource_path = "/monitoredInstances"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"display_name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_monitored_instances got unknown kwargs: {!r}".format(extra_kwargs))
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"compartmentId": compartment_id,
"displayName": kwargs.get("display_name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="MonitoredInstanceCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="MonitoredInstanceCollection")
def list_work_request_errors(self, work_request_id, **kwargs):
"""
Return a (paginated) list of errors for a given work request.
:param str work_request_id: (required)
The ID of the asynchronous request.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of items to return.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.appmgmt_control.models.WorkRequestErrorCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/list_work_request_errors.py.html>`__ to see an example of how to use list_work_request_errors API.
"""
resource_path = "/workRequests/{workRequestId}/errors"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"page",
"limit"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_work_request_errors got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"workRequestId": work_request_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
query_params = {
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestErrorCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestErrorCollection")
def list_work_request_logs(self, work_request_id, **kwargs):
"""
Return a (paginated) list of logs for a given work request.
:param str work_request_id: (required)
The ID of the asynchronous request.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of items to return.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.appmgmt_control.models.WorkRequestLogEntryCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/list_work_request_logs.py.html>`__ to see an example of how to use list_work_request_logs API.
"""
resource_path = "/workRequests/{workRequestId}/logs"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"page",
"limit"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_work_request_logs got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"workRequestId": work_request_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
query_params = {
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestLogEntryCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestLogEntryCollection")
def list_work_requests(self, compartment_id, **kwargs):
"""
Lists the work requests in a compartment.
:param str compartment_id: (required)
The ID of the compartment in which to list resources.
:param str work_request_id: (optional)
The ID of the asynchronous work request.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of items to return.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.appmgmt_control.models.WorkRequestSummaryCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/list_work_requests.py.html>`__ to see an example of how to use list_work_requests API.
"""
resource_path = "/workRequests"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"work_request_id",
"opc_request_id",
"page",
"limit"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_work_requests got unknown kwargs: {!r}".format(extra_kwargs))
query_params = {
"compartmentId": compartment_id,
"workRequestId": kwargs.get("work_request_id", missing),
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestSummaryCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestSummaryCollection")
def publish_top_processes_metrics(self, monitored_instance_id, **kwargs):
"""
Starts cpu and memory top processes collection.
:param str monitored_instance_id: (required)
OCID of monitored instance.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or
server error without risk of executing that same action again. Retry tokens expire after 24
hours, but can be invalidated before then due to conflicting operations. For example, if a resource
has been deleted and purged from the system, then a retry of the original creation request
might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. This operation will not retry by default, users can also use the convenient :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY` provided by the SDK to enable retries for it.
The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/appmgmtcontrol/publish_top_processes_metrics.py.html>`__ to see an example of how to use publish_top_processes_metrics API.
"""
resource_path = "/monitoredInstances/{monitoredInstanceId}/actions/publishTopProcessesMetrics"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"publish_top_processes_metrics got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"monitoredInstanceId": monitored_instance_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.base_client.get_preferred_retry_strategy(
operation_retry_strategy=kwargs.get('retry_strategy'),
client_retry_strategy=self.retry_strategy
)
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
self.base_client.add_opc_client_retries_header(header_params)
retry_strategy.add_circuit_breaker_callback(self.circuit_breaker_callback)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
| 48.946924 | 261 | 0.654681 |
178924a0a4f0d52d5a590d83ea224f6334004cff | 3,082 | py | Python | Lib/site-packages/pythonwin/pywin/framework/editor/vss.py | egorcompany/telegram-chat-members | 19a7c2bffe2fb832b79a4475ca324c438d5f548d | [
"MIT"
] | 3 | 2016-11-24T03:57:22.000Z | 2019-02-27T15:19:50.000Z | Lib/site-packages/pythonwin/pywin/framework/editor/vss.py | egorcompany/telegram-chat-members | 19a7c2bffe2fb832b79a4475ca324c438d5f548d | [
"MIT"
] | 67 | 2016-10-19T01:23:47.000Z | 2016-12-14T04:30:38.000Z | Lib/site-packages/pythonwin/pywin/framework/editor/vss.py | egorcompany/telegram-chat-members | 19a7c2bffe2fb832b79a4475ca324c438d5f548d | [
"MIT"
] | 1 | 2020-08-08T12:44:48.000Z | 2020-08-08T12:44:48.000Z | # vss.py -- Source Control using Microsoft VSS.
# Provides routines for checking files out of VSS.
#
# Uses an INI file very similar to how VB integrates with VSS - even
# as far as using the same name.
# The file must be named "Mssccprj.scc", and be in the format of
# an INI file. This file may be in a parent directory, in which
# case the project name will be built from what is specified in the
# ini file, plus the path from the INI file to the file itself.
#
# The INI file should have a [Python] section, and a
# Project=Project Name
# and optionally
# Database=??
import win32ui, win32api, win32con, os, string, sys
import traceback
g_iniName = "Mssccprj.scc" # Use the same INI name as VB!
g_sourceSafe = None
def FindVssProjectInfo(fullfname):
"""Looks up the file system for an INI file describing the project.
Looking up the tree is for ni style packages.
Returns (projectName, pathToFileName) where pathToFileName contains
the path from the ini file to the actual file.
"""
path, fnameonly = os.path.split(fullfname)
origPath = path
project = ""
retPaths = [fnameonly]
while not project:
iniName = os.path.join(path, g_iniName)
database = win32api.GetProfileVal("Python","Database", "", iniName)
project = win32api.GetProfileVal("Python","Project", "", iniName)
if project:
break;
# No valid INI file in this directory - look up a level.
path, addpath = os.path.split(path)
if not addpath: # Root?
break
retPaths.insert(0, addpath)
if not project:
win32ui.MessageBox("%s\r\n\r\nThis directory is not configured for Python/VSS" % origPath)
return
return project, "/".join(retPaths), database
def CheckoutFile(fileName):
global g_sourceSafe
import pythoncom
ok = 0
# Assumes the fileName has a complete path,
# and that the INI file can be found in that path
# (or a parent path if a ni style package)
try:
import win32com.client, win32com.client.gencache
mod = win32com.client.gencache.EnsureModule('{783CD4E0-9D54-11CF-B8EE-00608CC9A71F}', 0, 5, 0)
if mod is None:
win32ui.MessageBox("VSS does not appear to be installed. The TypeInfo can not be created")
return ok
rc = FindVssProjectInfo(fileName)
if rc is None:
return
project, vssFname, database = rc
if g_sourceSafe is None:
g_sourceSafe=win32com.client.Dispatch("SourceSafe")
# SS seems a bit wierd. It defaults the arguments as empty strings, but
# then complains when they are used - so we pass "Missing"
if not database:
database = pythoncom.Missing
g_sourceSafe.Open(database, pythoncom.Missing, pythoncom.Missing)
item = g_sourceSafe.VSSItem("$/%s/%s" % (project, vssFname))
item.Checkout(None, fileName)
ok = 1
except pythoncom.com_error as exc:
win32ui.MessageBox(exc.strerror, "Error checking out file")
except:
typ, val, tb = sys.exc_info()
traceback.print_exc()
win32ui.MessageBox("%s - %s" % (str(typ), str(val)),"Error checking out file")
tb = None # Cleanup a cycle
return ok
| 32.787234 | 97 | 0.700844 |
5b6b3edfece476aadcbea96ff53dec9fc11a9bbf | 2,215 | py | Python | release/stubs.min/Autodesk/Revit/DB/__init___parts/FormatValueOptions.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/Autodesk/Revit/DB/__init___parts/FormatValueOptions.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/Autodesk/Revit/DB/__init___parts/FormatValueOptions.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | class FormatValueOptions(object, IDisposable):
"""
Options for formatting numbers with units into strings.
FormatValueOptions()
"""
def Dispose(self):
""" Dispose(self: FormatValueOptions) """
pass
def GetFormatOptions(self):
"""
GetFormatOptions(self: FormatValueOptions) -> FormatOptions
Gets the FormatOptions to optionally override the default settings in the Units
class.
Returns: A copy of the FormatOptions.
"""
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: FormatValueOptions,disposing: bool) """
pass
def SetFormatOptions(self, formatOptions):
"""
SetFormatOptions(self: FormatValueOptions,formatOptions: FormatOptions)
Sets the FormatOptions to optionally override the default settings in the Units
class.
formatOptions: The FormatOptions.
"""
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __repr__(self, *args):
""" __repr__(self: object) -> str """
pass
AppendUnitSymbol = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Indicates if a unit symbol should be appended regardless of the settings in the FormatOptions.
Get: AppendUnitSymbol(self: FormatValueOptions) -> bool
Set: AppendUnitSymbol(self: FormatValueOptions)=value
"""
IsValidObject = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Specifies whether the .NET object represents a valid Revit entity.
Get: IsValidObject(self: FormatValueOptions) -> bool
"""
| 23.817204 | 221 | 0.636569 |
78af3291e760422e95a03d86b19b02c18befead3 | 169 | py | Python | runner_master/runner/data/filereaders/direct_reader.py | bigvideoresearch/SCC | f26cdb6aaf248b5112812dbdac1f1b5086aebccc | [
"MIT"
] | 5 | 2021-09-15T21:48:55.000Z | 2022-03-22T11:21:58.000Z | hogwarts/data/readers/direct_reader.py | PingchuanMa/hogwarts | 404e1d524fee4f190d8de1c1e8bc0711d895089a | [
"MIT"
] | null | null | null | hogwarts/data/readers/direct_reader.py | PingchuanMa/hogwarts | 404e1d524fee4f190d8de1c1e8bc0711d895089a | [
"MIT"
] | 1 | 2021-08-20T08:40:15.000Z | 2021-08-20T08:40:15.000Z | __all__ = ['DirectReader']
class DirectReader:
def __call__(self, path):
with open(path, 'rb') as f:
content = f.read()
return content
| 18.777778 | 35 | 0.573964 |
f119fdcb1d6dcf972d2fabcfda4067e7a1092806 | 599 | py | Python | sim/network/communicator.py | bx3/perigee-bandit | 73771672abe9321edbb7d455a59bfb072fafa33f | [
"MIT"
] | null | null | null | sim/network/communicator.py | bx3/perigee-bandit | 73771672abe9321edbb7d455a59bfb072fafa33f | [
"MIT"
] | null | null | null | sim/network/communicator.py | bx3/perigee-bandit | 73771672abe9321edbb7d455a59bfb072fafa33f | [
"MIT"
] | null | null | null | class Communicator:
def __init__(self, nid, n_delay, in_lim, out_lim, out_conns):
self.ins =set()
# self.ordered_outs = out_conns
self.outs = set(out_conns)
self.id = nid
self.node_delay = n_delay
self.in_lim = in_lim
self.out_lim = out_lim
# self.received = False
# self.from_whom = None # for debug
# self.recv_time = 0
def get_peers(self):
return self.outs | self.ins # get union
def update_conns(self, outs, ins):
self.outs = set(outs)
self.ins = set(ins)
| 26.043478 | 66 | 0.564274 |
37c258f5206a91cab20f4db15bfc7db101b23709 | 9,304 | py | Python | bdfparser.py | njmh/bdfparser | 75a01f1d37f2a0d634ade1b4d523a5bfc6c0a688 | [
"MIT"
] | null | null | null | bdfparser.py | njmh/bdfparser | 75a01f1d37f2a0d634ade1b4d523a5bfc6c0a688 | [
"MIT"
] | null | null | null | bdfparser.py | njmh/bdfparser | 75a01f1d37f2a0d634ade1b4d523a5bfc6c0a688 | [
"MIT"
] | null | null | null | # Simple BDF to BMP tool in Python
#
# Copyright (c) 2017 tomchen.org (tomchen.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation self.files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# works for .bdf where every entry has one and only one STARTCHAR, BBX, BITMAP, ENDCHAR, and no empty line inside BITMAP
import re
class BdfParser(object):
REGEX = (r'(\s*ENCODING\s*(\d+)\s*\n'
r'(?:.*?\n)*?'
r'\s*BBX\s*([-\d]+)\s*([-\d]+)\s*([-\d]+)\s*([-\d]+)\s*\n'
r'(?:.*?\n)*?'
r'\s*BITMAP\s*\n'
r'(?:\s*?\n)*?'
r'((?:\s*\w+\s*\n)*?)'
r'(?:\s*?\n)*?'
r'\s*ENDCHAR\s*)')
def __init__(self, bdfFileName):
self.file = open(bdfFileName, 'r')
self.bdfContent = self.file.read()
# FONT name
fontResult = re.search(r'^\s*FONT\s*(?P<name>.*)\s*\n', self.bdfContent, re.MULTILINE)
self.name = fontResult.group('name')
fbbResult = re.search(r'^\s*FONTBOUNDINGBOX\s*(?P<fbbw>[-\d]+)\s*(?P<fbbh>[-\d]+)\s*(?P<fbbxoff>[-\d]+)\s*(?P<fbbyoff>[-\d]+)\s*$', self.bdfContent, re.MULTILINE)
# FONTBOUNDINGBOX FBBw FBBh Xoff Yoff
self.fbbW = int(fbbResult.group('fbbw'))
self.fbbWShadowed = int(fbbResult.group('fbbw')) + 1
self.fbbWGlowed = int(fbbResult.group('fbbw')) + 2
self.fbbH = int(fbbResult.group('fbbh'))
self.fbbHShadowed = int(fbbResult.group('fbbh')) + 1
self.fbbHGlowed = int(fbbResult.group('fbbh')) + 2
self.fbbXOff = int(fbbResult.group('fbbxoff'))
self.fbbYOff = int(fbbResult.group('fbbyoff'))
regexfDwx0 = (r'(.*?\n)*'
r'\s*ENDPROPERTIES\s*')
propResult = re.match(regexfDwx0, self.bdfContent, re.MULTILINE)
fDwx0Result = re.search(r'^\s*DWIDTH\s*(?P<fdwx0>[-\d]+)\s*[-\d]+\s*$', propResult.group(0), re.MULTILINE)
# DWIDTH fdwx0 0
if fDwx0Result:
self.fDwx0 = int(fDwx0Result.group('fdwx0'))
else:
self.fDwx0 = self.fbbW
result = re.findall(self.REGEX, self.bdfContent, re.MULTILINE)
self.resultDict = {}
for val in result:
self.resultDict[int(val[1])] = val
# hex (string) to 4*n-bit binary (string) then get a 'number'-character substring from left
def __hex2bin(self, hexStr, number):
theInt = int(hexStr, 16)
lenTemp = number % 8
if lenTemp == 0:
lenTemp = number
else:
lenTemp = number + 8 - lenTemp
res = format(theInt, '0' + str(lenTemp) + 'b')
res = res[:number]
return res
def __cutOrPad(self, string, margin, isLeft):
number = len(string) + margin
if margin == 0:
return string
elif margin > 0:
if isLeft:
return string.rjust(number, '0')
else:
return string.ljust(number, '0')
else:
if isLeft:
return string[-number:]
else:
return string[:number]
def __bytes2string(self, hexStr, number):
theInt = int(hexStr, 16)
lenTemp = number % 8
if lenTemp == 0:
lenTemp = number
else:
lenTemp = number + 8 - lenTemp
res = format(theInt, '0' + str(lenTemp) + 'b')
res = res[:number]
return res
def getFontName(self):
return self.name
def getCharBmpByUnicode(self, uCode):
thisGlyphInfo = self.getGlyphInfo(uCode)
dwx0 = thisGlyphInfo['dwx0']
bbW = thisGlyphInfo['bbW']
bbH = thisGlyphInfo['bbH']
bbXOff = thisGlyphInfo['bbXOff']
bbYOff = thisGlyphInfo['bbYOff']
bitmap = thisGlyphInfo['bitmap']
marginLeft = bbXOff - self.fbbXOff
marginBottom = bbYOff - self.fbbYOff
marginRight = dwx0 + self.fbbXOff - bbW - bbXOff
marginTop = self.fbbH + self.fbbYOff - bbH - bbYOff
bitmapProd = ''
if marginTop > 0:
bitmapProd += ('0' * dwx0 + '\n') * marginTop
bitmapLineList = bitmap.splitlines()
bitmapLineListLen = len(bitmapLineList)
if bitmapLineListLen != bbH:
print('Warning: The number of lines is not equal to the defined bbH for ' + str(uCode))
for index, line in enumerate(bitmapLineList):
if (index >= bbH) or (marginTop < 0 and index < -marginTop) or (marginBottom < 0 and index >= bitmapLineListLen + marginBottom):
continue
bitmapProdLine = self.__hex2bin(line, bbW)
bitmapProdLine = self.__cutOrPad(bitmapProdLine, marginLeft, True)
bitmapProdLine = self.__cutOrPad(bitmapProdLine, marginRight, False)
bitmapProd += bitmapProdLine
bitmapProd += '\n'
bitmapProd = bitmapProd.rstrip()
if marginBottom > 0:
bitmapProd += ('\n' + '0' * dwx0) * marginBottom
bitmapProd = bitmapProd.lstrip()
return bitmapProd
def getShadowedCharBmpByUnicode(self, uCode):
thisGlyphInfo = self.getGlyphInfo(uCode)
dwx0 = thisGlyphInfo['dwx0']
bitmap = self.getCharBmpByUnicode(uCode) + '\n' + '0' * dwx0
bitmapLineList = bitmap.splitlines()
# bitmapLineListLen = len(bitmapLineList)
thisLineTemp = []
lastLineCharList = []
for index, thisLine in enumerate(bitmapLineList):
thisLineTemp = thisLine
thisLineCharList = list(thisLine + '0')
if len(lastLineCharList) != 0:
for i, char in enumerate(lastLineCharList):
if (char == '1') and (thisLineCharList[i+1] == '0'):
thisLineCharList[i+1] = '2'
bitmapLineList[index] = ''.join(thisLineCharList)
lastLineCharList = list(thisLineTemp)
return '\n'.join(bitmapLineList)
def getGlowedCharBmpByUnicode(self, uCode):
thisGlyphInfo = self.getGlyphInfo(uCode)
dwx0 = thisGlyphInfo['dwx0']
bitmap = '0' * dwx0 + '\n' + self.getCharBmpByUnicode(uCode) + '\n' + '0' * dwx0
bitmapLineList = bitmap.splitlines()
bitmapLineProdList = []
bitmapLineListLen = len(bitmapLineList)
for index, thisLine in enumerate(bitmapLineList):
thisLineCharList = list(thisLine)
thisLineCharListLen = len(thisLineCharList)
if thisLineCharList[0] == '1':
thisLineCharPrefix = '2'
else:
thisLineCharPrefix = '0'
for i, char in enumerate(thisLineCharList):
if index > 0:
lastLineTest = bitmapLineList[index - 1][i] == '1'
else:
lastLineTest = False
if index < bitmapLineListLen - 1:
nextLineTest = bitmapLineList[index + 1][i] == '1'
else:
nextLineTest = False
if i > 0:
lastCharTest = thisLineCharList[i - 1] == '1'
else:
lastCharTest = False
if i < thisLineCharListLen - 1:
nextCharTest = thisLineCharList[i + 1] == '1'
else:
nextCharTest = False
if ((lastLineTest or nextLineTest or lastCharTest or nextCharTest) and (char == '0')):
thisLineCharList[i] = '2'
if thisLineCharList[thisLineCharListLen - 1] == '1':
thisLineCharSuffix = '2'
else:
thisLineCharSuffix = '0'
thisLineCharList.insert(0, thisLineCharPrefix)
thisLineCharList.append(thisLineCharSuffix)
bitmapLineProdList.append(''.join(thisLineCharList))
return '\n'.join(bitmapLineProdList)
def getCharHexByUnicode(self, uCode):
return self.getCharHex(self.getCharBmpByUnicode(uCode))
def getShadowedCharHexByUnicode(self, uCode):
return self.getCharHex(self.getShadowedCharBmpByUnicode(uCode))
def getGlowedCharHexByUnicode(self, uCode):
return self.getCharHex(self.getGlowedCharBmpByUnicode(uCode))
def getBlackedCharHexByUnicode(self, uCode):
return self.getBlackedCharHex(self.getCharBmpByUnicode(uCode))
def getGlyphInfo(self, uCode):
if uCode in self.resultDict:
thisResultDictList = self.resultDict[uCode]
# BBX BBw BBh BBxoff0x BByoff0y
resultDwx0 = re.search(r'\s*DWIDTH\s*(?P<dwx0>[-\d]+)\s*[-\d]+\s*\n', thisResultDictList[0], re.MULTILINE)
# DWIDTH dwx0 0
if resultDwx0:
dwx0 = resultDwx0.group('dwx0')
else:
dwx0 = self.fDwx0
retDict = {
'dwx0' : int(dwx0),
'bbW' : int(thisResultDictList[2]),
'bbH' : int(thisResultDictList[3]),
'bbXOff' : int(thisResultDictList[4]),
'bbYOff' : int(thisResultDictList[5]),
'bitmap' : thisResultDictList[6].rstrip(),
'outputW' : int(dwx0),
'outputH' : self.fbbH,
'shadowedOutputW' : int(dwx0) + 1,
'shadowedOutputH' : self.fbbH + 1,
'glowedOutputW' : int(dwx0) + 2,
'glowedOutputH' : self.fbbH + 2
}
return retDict
else:
return None
# foreground 1 FF
# shadow 2 01
# background 0 00
def getCharHex(self, bmpCode):
return bytes(bmpCode, 'ascii').replace(b'0', b'\x00').replace(b'1', b'\xff').replace(b'2', b'\x01').replace(b'\n', b'')
# return bmpCode.replace('0', '00').replace('1', 'ff').replace('2', '01').replace('\n', '')
def getBlackedCharHex(self, bmpCode):
return bytes(bmpCode, 'ascii').replace(b'0', b'\x00').replace(b'1', b'\x01').replace(b'\n', b'')
# return bmpCode.replace('0', '00').replace('1', '01').replace('\n', '')
| 36.629921 | 164 | 0.675623 |
80db16baf33a4d30325595669f1c96f25628eb62 | 517 | py | Python | cern_search_rest_api/modules/cernsearch/fetchers.py | inveniosoftware-contrib/citadel-search | 736fdb3a5b32f750111bc846bc815c4671978fa1 | [
"MIT"
] | 6 | 2020-04-12T18:30:08.000Z | 2021-09-15T05:53:40.000Z | cern_search_rest_api/modules/cernsearch/fetchers.py | inveniosoftware-contrib/cern-search | 736fdb3a5b32f750111bc846bc815c4671978fa1 | [
"MIT"
] | 6 | 2020-03-19T13:28:38.000Z | 2020-12-08T16:54:05.000Z | cern_search_rest_api/modules/cernsearch/fetchers.py | inveniosoftware-contrib/cern-search | 736fdb3a5b32f750111bc846bc815c4671978fa1 | [
"MIT"
] | 2 | 2019-04-22T21:20:17.000Z | 2019-05-16T08:50:38.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of CERN Search.
# Copyright (C) 2018-2021 CERN.
#
# Citadel Search is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Persistent identifier fetcher."""
from invenio_pidstore.fetchers import FetchedPID
def recid_fetcher(record_uuid, data):
"""Fetch PID from record."""
return FetchedPID(
pid_type="recid",
pid_value=str(data["recid"]),
)
| 24.619048 | 75 | 0.688588 |
c6b87aec57c42f241f23d0c0182b491a229e3c39 | 3,694 | py | Python | paper_trading/api/pytdx_api.py | pTraderTeam/paper_trading | 720d709bab8a39d41b5953563bba68b1e3b480c7 | [
"MIT"
] | 19 | 2020-05-30T06:05:15.000Z | 2021-09-26T11:58:44.000Z | paper_trading/api/pytdx_api.py | markqiu/paper_trading | 074e9bd34f78a395b4b7af922c2623c3a83c4f60 | [
"MIT"
] | 5 | 2020-06-01T02:44:48.000Z | 2020-08-15T03:05:47.000Z | paper_trading/api/pytdx_api.py | pTraderTeam/paper_trading | 720d709bab8a39d41b5953563bba68b1e3b480c7 | [
"MIT"
] | 14 | 2020-05-30T07:19:37.000Z | 2022-02-10T02:20:38.000Z | import random
import pandas as pd
from pytdx.config.hosts import hq_hosts
from pytdx.hq import TdxHq_API
from pytdx.pool.hqpool import TdxHqPool_API
from pytdx.pool.ippool import AvailableIPPool
# 市场代码对照表
exchange_map = {}
exchange_map["SH"] = 1
exchange_map["SZ"] = 0
class PYTDXService:
"""pytdx数据服务类"""
def __init__(self, client):
"""Constructor"""
self.connected = False # 数据服务连接状态
self.hq_api = None # 行情API
self.client = client # mongo client
def connect_api(self):
"""连接API"""
# 连接增强行情API并检查连接情况
try:
if not self.connected:
ips = [(v[1], v[2]) for v in hq_hosts]
# 获取5个随机ip作为ip池
random.shuffle(ips)
ips5 = ips[:5]
# IP 池对象
ippool = AvailableIPPool(TdxHq_API, ips5)
# 选出M, H
primary_ip, hot_backup_ip = ippool.sync_get_top_n(2)
# 生成hqpool对象,第一个参数为TdxHq_API后者 TdxExHq_API里的一个,第二个参数为ip池对象。
self.hq_api = TdxHqPool_API(TdxHq_API, ippool)
self.hq_api.connect(primary_ip, hot_backup_ip)
self.connected = True
return True
except Exception:
raise ConnectionError("pytdx连接错误")
def get_realtime_data(self, symbol: str):
"""获取股票实时数据"""
try:
symbols = self.generate_symbols(symbol)
df = self.hq_api.to_df(self.hq_api.get_security_quotes(symbols))
data = self.client["stocks"]["security"].find_one({"code": symbols[0][1], "market": str(symbols[0][0])})
# 处理基金价格:通达信基金数据是实际价格的10倍
if data["decimal_point"] == 3:
for val in [
"price",
"last_close",
"open",
"high",
"low",
"ask1",
"bid1",
"ask2",
"bid2",
"ask3",
"bid3",
"ask4",
"bid4",
"ask5",
"bid5",
]:
df[val] = df[val] / 10
return df
except Exception:
raise ValueError("股票数据获取失败")
def get_history_transaction_data(self, symbol, date):
"""
查询历史分笔数据
get_history_transaction_data(TDXParams.MARKET_SZ, '000001', 0, 10, 20170209)
参数:市场代码, 股票代码, 起始位置, 数量, 日期
输出[time, price, vol, buyorsell(0:buy, 1:sell, 2:平)]
"""
# 获得标的
code, market = self.check_symbol(symbol)
# 设置参数
check_date = int(date)
count = 2000
data_list = []
position = [6000, 4000, 2000, 0]
for start in position:
data = self.hq_api.to_df(self.hq_api.get_history_transaction_data(market, code, start, count, check_date))
data_list.append(data)
df = pd.concat(data_list)
df.drop_duplicates(inplace=True)
return df
@staticmethod
def generate_symbols(symbol: str):
"""组装symbols数据,pytdx接收的是以市场代码和标的代码组成的元祖的list"""
new_symbols = []
code, exchange = symbol.split(".")
new_symbols.append((exchange_map[exchange], code))
return new_symbols
@staticmethod
def check_symbol(symbol: str):
"""检查标的格式"""
if symbol:
code, market = symbol.split(".")
market = exchange_map.get(market)
return code, market
else:
return False
def close(self):
"""数据服务关闭"""
self.connected = False
self.hq_api.disconnect()
| 30.03252 | 118 | 0.520845 |
4095f05dde2d28437c75b209869b40d054793135 | 6,339 | py | Python | core.caribosystem.py | ZanvokCorporation/CariboSystem | 1b267c6acc9872cbca3f071f7c85f1dd7ab543b7 | [
"Apache-2.0"
] | 1 | 2022-01-08T16:21:08.000Z | 2022-01-08T16:21:08.000Z | core.caribosystem.py | ZanvokCorporation/CariboSystem | 1b267c6acc9872cbca3f071f7c85f1dd7ab543b7 | [
"Apache-2.0"
] | null | null | null | core.caribosystem.py | ZanvokCorporation/CariboSystem | 1b267c6acc9872cbca3f071f7c85f1dd7ab543b7 | [
"Apache-2.0"
] | 1 | 2022-01-22T11:15:52.000Z | 2022-01-22T11:15:52.000Z | import os
import sys
import subprocess
from termcolor import colored
print()
f = open("user.dat", 'r')
read = f.read()
user_log = input("Enter password of user " + read + " : ")
v = open("password.dat", 'r')
verify = v.read()
if user_log == verify:
print()
ver = 6.11
r_type = "Public Release"
print()
print("""
╭━╮╱╱╱╱╭┳╮╱╱╭━━╮╱╭━┳╮
┃╭╋━╮╭┳╋┫╰┳━┫━━╋┳┫━┫╰┳━┳━━╮
┃╰┫╋╰┫╭┫┃╋┃╋┣━━┃┃┣━┃╭┫┻┫┃┃┃
╰━┻━━┻╯╰┻━┻━┻━━╋╮┣━┻━┻━┻┻┻╯
╱╱╱╱╱╱╱╱╱╱╱╱╱╱╱╰━╯
""")
core = ""
if sys.platform == "win32":
os.system('cls')
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('clear')
print("Zanvok CariboSystem ")
print("v6.11")
print("Copyright 2022 (C) Zanvok Corporation")
print()
while core != "quit" or core != "exit":
core = input(colored("CariboSystem > ", "green"))
if core == "ver":
print()
print(colored("""
╭━╮╱╱╱╱╭┳╮╱╱╭━━╮╱╭━┳╮
┃╭╋━╮╭┳╋┫╰┳━┫━━╋┳┫━┫╰┳━┳━━╮
┃╰┫╋╰┫╭┫┃╋┃╋┣━━┃┃┣━┃╭┫┻┫┃┃┃
╰━┻━━┻╯╰┻━┻━┻━━╋╮┣━┻━┻━┻┻┻╯
╱╱╱╱╱╱╱╱╱╱╱╱╱╱╱╰━╯
""" , "green"))
print()
print("CariboSystem")
print("6.11")
print("Zanvok Corporation")
print()
elif core == "ver core":
print()
print(colored("""
╭━╮╱╱╱╱╭┳╮╱╱╭━━╮╱╭━┳╮
┃╭╋━╮╭┳╋┫╰┳━┫━━╋┳┫━┫╰┳━┳━━╮
┃╰┫╋╰┫╭┫┃╋┃╋┣━━┃┃┣━┃╭┫┻┫┃┃┃
╰━┻━━┻╯╰┻━┻━┻━━╋╮┣━┻━┻━┻┻┻╯
╱╱╱╱╱╱╱╱╱╱╱╱╱╱╱╰━╯
""", "green"))
print()
print("Core(CariboSystem6).6.11Core")
print()
elif core == 'calc':
if sys.platform == "win32":
os.system('python calc.caribosystem.py')
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('python3 calc.caribosystem.py')
elif core == "user":
if sys.platform == "win32":
os.system('python users.caribosystem.py')
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('python3 users.caribosystem.py')
elif core == "cmd":
print("Starting Zanvok PY-DOS Subsystem...")
if sys.platform == "win32":
os.system('python cmd8.py')
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('python3 cmd8.py')
elif core == "recovery":
if sys.platform == "win32":
os.system('python recovery.caribosystem.py')
else:
os.system('python3 recovery.caribosystem.py')
elif core == 'cls':
os.system('cls')
elif core == 'clear':
os.system('clear')
elif core == "app --manage":
if sys.platform == "win32":
os.system('python app/app.caribosystem.py')
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('python3 app/app.caribosystem.py')
elif core == "app":
if sys.platform == "win32":
print("Enter App name to run")
select = input("CariboSystem > App > ")
app = select + ".py"
app_dir = "python app/"+app
os.system(app_dir)
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
print("Enter App name to run")
select = input("CariboSystem > App > ")
app = select + ".py"
app_dir = "python3 app/"+app
os.system(app_dir)
elif core == "":
print()
elif core == "help":
print()
print("CariboSystem Help")
print("--------------------------")
f = open("HELP.md", 'r')
file1 = f.read()
print(file1)
print()
print("--------------------------")
elif core == "app --ver":
print()
print("Zanvok App Manager for CariboSystem")
print("v6.11 Core")
print()
elif core == "readme":
print()
print("README for CariboSystem")
print("-------------------------------")
f = open("README.md", 'r')
file2 = f.read()
print(file2)
print()
print("-------------------------------")
elif core == "exit" or core == "quit":
print()
print("GoodBye!")
print()
break
elif core == "netget":
print("NetGet for CariboSystem")
print()
link = input("Enter Download Link: ")
if sys.platform == "win32":
netget = "python netget.py " + link + " C:\CariboSystem\Downloads"
os.system(netget)
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
netget = "python3 netget.py " + link + " Downloads"
os.system(netget)
elif core == "bsod":
if sys.platform == "win32":
os.system('python bsod.caribosystem.py')
break
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('python3 bsod.caribosystem.py')
break
else:
print()
print("Bad Command..")
print()
c_mode = input("Do you want to use core mode to execute the command: " + core + " ? (yes/no)").lower()
if c_mode == "yes" or c_mode == "y":
os.system(core)
elif c_mode == "no" or c_mode == "n":
print()
else:
print()
else:
print("Invalid Password..!")
exit_core = input("ERROR x1101")
if sys.platform == "win32":
os.system('python bsod.caribosystem.py')
sys.exit()
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
os.system('python3 bsod.caribosystem.py')
sys.exit()
| 28.945205 | 114 | 0.416154 |
cda7c8db9e8cc92af4b82b8a6061272e8753a442 | 776 | py | Python | picamraw/constants.py | OsmoSystems/picamraw | 4936bea6188e5284d5ed80652d0ca79306815880 | [
"BSD-3-Clause"
] | 20 | 2018-12-10T21:05:39.000Z | 2021-11-22T16:51:35.000Z | picamraw/constants.py | OsmoSystems/picamraw | 4936bea6188e5284d5ed80652d0ca79306815880 | [
"BSD-3-Clause"
] | 5 | 2018-12-19T17:05:37.000Z | 2022-01-10T14:21:35.000Z | picamraw/constants.py | OsmoSystems/picamraw | 4936bea6188e5284d5ed80652d0ca79306815880 | [
"BSD-3-Clause"
] | 6 | 2019-04-01T11:23:21.000Z | 2021-04-25T02:00:10.000Z | from enum import Enum
class PiCameraVersion(Enum):
V1 = 'OV5647'
V2 = 'IMX219'
class BayerOrder(Enum):
''' There are four supported arrangements of the R, G, G, and B pixels:
RGGB:
RG
GB
GBRG:
GB
RG
BGGR:
BG
GR
GRBG:
GR
BG
'''
RGGB = 'RGGB'
GBRG = 'GBRG'
BGGR = 'BGGR'
GRBG = 'GRBG'
BAYER_ORDER_TO_RGB_CHANNEL_COORDINATES = {
# (ry, rx), (gy, gx), (Gy, Gx), (by, bx)
BayerOrder.RGGB: ((0, 0), (1, 0), (0, 1), (1, 1)),
BayerOrder.GBRG: ((1, 0), (0, 0), (1, 1), (0, 1)),
BayerOrder.BGGR: ((1, 1), (0, 1), (1, 0), (0, 0)),
BayerOrder.GRBG: ((0, 1), (1, 1), (0, 0), (1, 0)),
}
| 20.421053 | 75 | 0.430412 |
343e8adcc9af7f0daf4cda82f00a4a55c34ec9cc | 3,411 | py | Python | main.py | Redsword326/botbot | 8874ca8ac86e836d563b4beabce384f2c792fe47 | [
"MIT"
] | null | null | null | main.py | Redsword326/botbot | 8874ca8ac86e836d563b4beabce384f2c792fe47 | [
"MIT"
] | null | null | null | main.py | Redsword326/botbot | 8874ca8ac86e836d563b4beabce384f2c792fe47 | [
"MIT"
] | null | null | null | import discord
import auth
import random
class MyClient(discord.Client):
async def on_ready(self):
print("Ready")
async def on_message(self, message):
if not message.content.startswith(auth.prefix):
return
command = message.content.split(" ")[0].split(auth.prefix)[1]
args = message.content.split(" ")[1:]
if command == "roll":
if len(args) == 1:
try:
if args[0].startswith("d"):
roll = random.randint(1, int(args[0].split("d")[1]))
await send(message, ":game_die: Rolled " + str(roll), "Rolled a dice")
else:
dArgs = args[0].split("d")
rolls = []
x = 0
while x < int(dArgs[0]):
rolls.append(random.randint(1, int(dArgs[1])))
x += 1
total = 0
dicemsg = ""
for x in rolls:
total += x
dicemsg += ":game_die: `" + str(x) + "`\n"
await send(message, ":game_die: Rolled " + str(args[0]) + " dice and got: `" + str(total) + "`! \n" + dicemsg, "Rolled " + str(args[0]) + " dice")
except:
await send(message, "Whoops! Your syntax is wrong!", "Tried to roll a dice, but failed. Error: Improper Syntax")
else:
await send(message, "Whoops! Too many arguments! It should look like: `!roll d6` or `!roll 2d6`", "Tried to roll a dice, but failed. Error: Too Many Arguments")
elif command == "fetch":
if len(args) == 0:
try:
await send(message, "Börk! Börk!", "Fetched a stick!", responseFile=discord.File("botbotstik.jpg"))
except:
await send(message, "Whoops! Your syntax is wrong!", "Tried to fetch a stick, but failed. Error: Improper Syntax")
else:
await send(message, "Whoops! Too many arguments! It should look like: '!fetch'", "Tried to fetch a stick, but failed. Error: Too Many Arguments")
elif command == "coinflip":
if len(args) == 0:
try:
flip = random.randint(1, 2)
if flip == 1:
await send(message, "Heads!", "Flipped a coin!")
if flip == 2:
await send(message, "Tails!", "Flipped a coin!")
except:
await send(message, "Whoops! Your syntax is wrong!", "Tried to flip a coin, but failed. Error: Improper Syntax")
else:
await send(message, "Whoops! Too many arguments! It should look like: '!coinflip'", "Tried to flip a coin, but failed. Error: Too Many Arguments")
async def send(message, response, logmsg, responseFile=None, responseEmbed=None):
await message.channel.send(response, file=responseFile, embed=responseEmbed)
await client.get_channel(logs channel ID here).send(logmsg)
client = MyClient()
client.run(auth.token) | 48.042254 | 177 | 0.475814 |
4c6f39296e4e29dc46646228d2dafe597c048ec9 | 90,126 | py | Python | pyuvdata/uvbeam/tests/test_uvbeam.py | no-lex/pyuvdata | 90537f78230d3d34f5db4d39a9f2a18373435437 | [
"BSD-2-Clause"
] | null | null | null | pyuvdata/uvbeam/tests/test_uvbeam.py | no-lex/pyuvdata | 90537f78230d3d34f5db4d39a9f2a18373435437 | [
"BSD-2-Clause"
] | null | null | null | pyuvdata/uvbeam/tests/test_uvbeam.py | no-lex/pyuvdata | 90537f78230d3d34f5db4d39a9f2a18373435437 | [
"BSD-2-Clause"
] | null | null | null | # -*- mode: python; coding: utf-8 -*-
# Copyright (c) 2018 Radio Astronomy Software Group
# Licensed under the 2-clause BSD License
"""Tests for uvbeam object.
"""
import os
import copy
import numpy as np
from astropy import units
from astropy.coordinates import Angle
import pytest
from pyuvdata import UVBeam
import pyuvdata.tests as uvtest
import pyuvdata.utils as uvutils
from pyuvdata.data import DATA_PATH
try:
from astropy_healpix import HEALPix
healpix_installed = True
except (ImportError):
healpix_installed = False
@pytest.fixture(scope="function")
def uvbeam_data():
"""Setup and teardown for basic parameter, property and iterator tests."""
required_parameters = [
"_beam_type",
"_Nfreqs",
"_Naxes_vec",
"_Nspws",
"_pixel_coordinate_system",
"_freq_array",
"_spw_array",
"_data_normalization",
"_data_array",
"_bandpass_array",
"_telescope_name",
"_feed_name",
"_feed_version",
"_model_name",
"_model_version",
"_history",
"_antenna_type",
]
required_properties = [
"beam_type",
"Nfreqs",
"Naxes_vec",
"Nspws",
"pixel_coordinate_system",
"freq_array",
"spw_array",
"data_normalization",
"data_array",
"bandpass_array",
"telescope_name",
"feed_name",
"feed_version",
"model_name",
"model_version",
"history",
"antenna_type",
]
extra_parameters = [
"_Naxes1",
"_Naxes2",
"_Npixels",
"_Nfeeds",
"_Npols",
"_Ncomponents_vec",
"_axis1_array",
"_axis2_array",
"_nside",
"_ordering",
"_pixel_array",
"_feed_array",
"_polarization_array",
"_basis_vector_array",
"_extra_keywords",
"_Nelements",
"_element_coordinate_system",
"_element_location_array",
"_delay_array",
"_x_orientation",
"_interpolation_function",
"_freq_interp_kind",
"_gain_array",
"_coupling_matrix",
"_reference_impedance",
"_receiver_temperature_array",
"_loss_array",
"_mismatch_array",
"_s_parameters",
]
extra_properties = [
"Naxes1",
"Naxes2",
"Npixels",
"Nfeeds",
"Npols",
"Ncomponents_vec",
"axis1_array",
"axis2_array",
"nside",
"ordering",
"pixel_array",
"feed_array",
"polarization_array",
"basis_vector_array",
"extra_keywords",
"Nelements",
"element_coordinate_system",
"element_location_array",
"delay_array",
"x_orientation",
"interpolation_function",
"freq_interp_kind",
"gain_array",
"coupling_matrix",
"reference_impedance",
"receiver_temperature_array",
"loss_array",
"mismatch_array",
"s_parameters",
]
other_properties = ["pyuvdata_version_str"]
beam_obj = UVBeam()
class DataHolder:
def __init__(
self,
beam_obj,
required_parameters,
required_properties,
extra_parameters,
extra_properties,
other_properties,
):
self.beam_obj = beam_obj
self.required_parameters = required_parameters
self.required_properties = required_properties
self.extra_parameters = extra_parameters
self.extra_properties = extra_properties
self.other_properties = other_properties
uvbeam_data = DataHolder(
beam_obj,
required_parameters,
required_properties,
extra_parameters,
extra_properties,
other_properties,
)
# yields the data we need but will continue to the del call after tests
yield uvbeam_data
# some post-test object cleanup
del uvbeam_data
return
def test_parameter_iter(uvbeam_data):
"""Test expected parameters."""
all_params = []
for prop in uvbeam_data.beam_obj:
all_params.append(prop)
for a in uvbeam_data.required_parameters + uvbeam_data.extra_parameters:
assert a in all_params, (
"expected attribute " + a + " not returned in object iterator"
)
def test_required_parameter_iter(uvbeam_data):
"""Test expected required parameters."""
required = []
for prop in uvbeam_data.beam_obj.required():
required.append(prop)
for a in uvbeam_data.required_parameters:
assert a in required, (
"expected attribute " + a + " not returned in required iterator"
)
def test_extra_parameter_iter(uvbeam_data):
"""Test expected optional parameters."""
extra = []
for prop in uvbeam_data.beam_obj.extra():
extra.append(prop)
for a in uvbeam_data.extra_parameters:
assert a in extra, "expected attribute " + a + " not returned in extra iterator"
def test_unexpected_parameters(uvbeam_data):
"""Test for extra parameters."""
expected_parameters = uvbeam_data.required_parameters + uvbeam_data.extra_parameters
attributes = [i for i in uvbeam_data.beam_obj.__dict__.keys() if i[0] == "_"]
for a in attributes:
assert a in expected_parameters, (
"unexpected parameter " + a + " found in UVBeam"
)
def test_unexpected_attributes(uvbeam_data):
"""Test for extra attributes."""
expected_attributes = (
uvbeam_data.required_properties
+ uvbeam_data.extra_properties
+ uvbeam_data.other_properties
)
attributes = [i for i in uvbeam_data.beam_obj.__dict__.keys() if i[0] != "_"]
for a in attributes:
assert a in expected_attributes, (
"unexpected attribute " + a + " found in UVBeam"
)
def test_properties(uvbeam_data):
"""Test that properties can be get and set properly."""
prop_dict = dict(
list(
zip(
uvbeam_data.required_properties + uvbeam_data.extra_properties,
uvbeam_data.required_parameters + uvbeam_data.extra_parameters,
)
)
)
for k, v in prop_dict.items():
rand_num = np.random.rand()
setattr(uvbeam_data.beam_obj, k, rand_num)
this_param = getattr(uvbeam_data.beam_obj, v)
try:
assert rand_num == this_param.value
except AssertionError:
print("setting {prop_name} to a random number failed".format(prop_name=k))
raise
def test_deprecation_warnings_set_cs_params(cst_efield_2freq):
"""
Test the deprecation warnings in set_cs_params.
"""
efield_beam = cst_efield_2freq
efield_beam2 = efield_beam.copy()
with uvtest.check_warnings(
DeprecationWarning, match="`set_cs_params` is deprecated"
):
efield_beam2.set_cs_params()
assert efield_beam2 == efield_beam
def test_deprecation_warnings_set_efield(cst_efield_2freq):
"""
Test the deprecation warnings in set_efield.
"""
efield_beam = cst_efield_2freq
efield_beam2 = efield_beam.copy()
with uvtest.check_warnings(DeprecationWarning, match="`set_efield` is deprecated"):
efield_beam2.set_efield()
assert efield_beam2 == efield_beam
def test_deprecation_warnings_set_power(cst_power_2freq):
"""
Test the deprecation warnings in set_power.
"""
power_beam = cst_power_2freq
power_beam2 = power_beam.copy()
with uvtest.check_warnings(DeprecationWarning, match="`set_power` is deprecated"):
power_beam2.set_power()
assert power_beam2 == power_beam
def test_deprecation_warnings_set_antenna_type(cst_efield_2freq):
"""
Test the deprecation warnings in set_simple and set_phased_array.
"""
efield_beam = cst_efield_2freq
efield_beam2 = efield_beam.copy()
with uvtest.check_warnings(DeprecationWarning, match="`set_simple` is deprecated"):
efield_beam2.set_simple()
assert efield_beam2 == efield_beam
efield_beam._set_phased_array()
with uvtest.check_warnings(
DeprecationWarning, match="`set_phased_array` is deprecated"
):
efield_beam2.set_phased_array()
assert efield_beam2 == efield_beam
def test_errors():
beam_obj = UVBeam()
with pytest.raises(ValueError, match="filetype must be beamfits"):
beam_obj._convert_to_filetype("foo")
def test_peak_normalize(cst_efield_2freq, cst_power_2freq):
efield_beam = cst_efield_2freq
orig_bandpass_array = copy.deepcopy(efield_beam.bandpass_array)
maxima = np.zeros(efield_beam.Nfreqs)
for freq_i in range(efield_beam.Nfreqs):
maxima[freq_i] = np.amax(abs(efield_beam.data_array[:, :, :, freq_i]))
efield_beam.peak_normalize()
assert np.amax(abs(efield_beam.data_array)) == 1
assert np.sum(abs(efield_beam.bandpass_array - orig_bandpass_array * maxima)) == 0
assert efield_beam.data_normalization == "peak"
power_beam = cst_power_2freq
orig_bandpass_array = copy.deepcopy(power_beam.bandpass_array)
maxima = np.zeros(efield_beam.Nfreqs)
for freq_i in range(efield_beam.Nfreqs):
maxima[freq_i] = np.amax(power_beam.data_array[:, :, :, freq_i])
power_beam.peak_normalize()
assert np.amax(abs(power_beam.data_array)) == 1
assert np.sum(abs(power_beam.bandpass_array - orig_bandpass_array * maxima)) == 0
assert power_beam.data_normalization == "peak"
power_beam.data_normalization = "solid_angle"
with pytest.raises(
NotImplementedError,
match="Conversion from solid_angle to peak "
"normalization is not yet implemented",
):
power_beam.peak_normalize()
def test_stokes_matrix():
beam = UVBeam()
with pytest.raises(ValueError, match="n must be positive integer."):
beam._stokes_matrix(-2)
with pytest.raises(ValueError, match="n should lie between 0 and 3."):
beam._stokes_matrix(5)
def test_efield_to_pstokes(cst_efield_2freq_cut, cst_efield_2freq_cut_healpix):
pstokes_beam_2 = cst_efield_2freq_cut_healpix
# convert to pstokes after interpolating
beam_return = pstokes_beam_2.efield_to_pstokes(inplace=False)
pstokes_beam = cst_efield_2freq_cut
# interpolate after converting to pstokes
pstokes_beam.interpolation_function = "az_za_simple"
pstokes_beam.efield_to_pstokes()
pstokes_beam.to_healpix()
pstokes_beam.peak_normalize()
beam_return.peak_normalize()
# NOTE: So far, the following doesn't hold unless the beams are
# peak_normalized again.
# This seems to be the fault of interpolation
assert np.allclose(pstokes_beam.data_array, beam_return.data_array, atol=1e-2)
def test_efield_to_pstokes_error(cst_power_2freq_cut):
power_beam = cst_power_2freq_cut
with pytest.raises(ValueError, match="beam_type must be efield."):
power_beam.efield_to_pstokes()
def test_efield_to_power(cst_efield_2freq_cut, cst_power_2freq_cut, tmp_path):
efield_beam = cst_efield_2freq_cut
power_beam = cst_power_2freq_cut
new_power_beam = efield_beam.efield_to_power(calc_cross_pols=False, inplace=False)
# The values in the beam file only have 4 sig figs, so they don't match precisely
diff = np.abs(new_power_beam.data_array - power_beam.data_array)
assert np.max(diff) < 2
reldiff = diff / power_beam.data_array
assert np.max(reldiff) < 0.002
# set data_array tolerances higher to test the rest of the object
# tols are (relative, absolute)
tols = [0.002, 0]
power_beam._data_array.tols = tols
# modify the history to match
power_beam.history += " Converted from efield to power using pyuvdata."
assert power_beam == new_power_beam
# test with non-orthogonal basis vectors
# first construct a beam with non-orthogonal basis vectors
new_basis_vecs = np.zeros_like(efield_beam.basis_vector_array)
new_basis_vecs[0, 0, :, :] = np.sqrt(0.5)
new_basis_vecs[0, 1, :, :] = np.sqrt(0.5)
new_basis_vecs[1, :, :, :] = efield_beam.basis_vector_array[1, :, :, :]
new_data = np.zeros_like(efield_beam.data_array)
# drop all the trailing colons in the slicing below
new_data[0] = np.sqrt(2) * efield_beam.data_array[0]
new_data[1] = efield_beam.data_array[1] - efield_beam.data_array[0]
efield_beam2 = efield_beam.copy()
efield_beam2.basis_vector_array = new_basis_vecs
efield_beam2.data_array = new_data
efield_beam2.check()
# now convert to power. Should get the same result
new_power_beam2 = efield_beam2.copy()
new_power_beam2.efield_to_power(calc_cross_pols=False)
assert new_power_beam == new_power_beam2
if healpix_installed:
# check that this raises an error if trying to convert to HEALPix:
efield_beam2.interpolation_function = "az_za_simple"
with pytest.raises(
NotImplementedError,
match="interpolation for input basis vectors that are not aligned to the "
"native theta/phi coordinate system is not yet supported",
):
efield_beam2.to_healpix(inplace=False)
# now try a different rotation to non-orthogonal basis vectors
new_basis_vecs = np.zeros_like(efield_beam.basis_vector_array)
new_basis_vecs[0, :, :, :] = efield_beam.basis_vector_array[0, :, :, :]
new_basis_vecs[1, 0, :, :] = np.sqrt(0.5)
new_basis_vecs[1, 1, :, :] = np.sqrt(0.5)
new_data = np.zeros_like(efield_beam.data_array)
new_data[0, :, :, :, :, :] = (
efield_beam.data_array[0, :, :, :, :, :]
- efield_beam.data_array[1, :, :, :, :, :]
)
new_data[1, :, :, :, :, :] = np.sqrt(2) * efield_beam.data_array[1, :, :, :, :, :]
efield_beam2 = efield_beam.copy()
efield_beam2.basis_vector_array = new_basis_vecs
efield_beam2.data_array = new_data
efield_beam2.check()
# now convert to power. Should get the same result
new_power_beam2 = efield_beam2.copy()
new_power_beam2.efield_to_power(calc_cross_pols=False)
assert new_power_beam == new_power_beam2
# now construct a beam with orthogonal but rotated basis vectors
new_basis_vecs = np.zeros_like(efield_beam.basis_vector_array)
new_basis_vecs[0, 0, :, :] = np.sqrt(0.5)
new_basis_vecs[0, 1, :, :] = np.sqrt(0.5)
new_basis_vecs[1, 0, :, :] = -1 * np.sqrt(0.5)
new_basis_vecs[1, 1, :, :] = np.sqrt(0.5)
new_data = np.zeros_like(efield_beam.data_array)
new_data[0, :, :, :, :, :] = np.sqrt(0.5) * (
efield_beam.data_array[0, :, :, :, :, :]
+ efield_beam.data_array[1, :, :, :, :, :]
)
new_data[1, :, :, :, :, :] = np.sqrt(0.5) * (
-1 * efield_beam.data_array[0, :, :, :, :, :]
+ efield_beam.data_array[1, :, :, :, :, :]
)
efield_beam2 = efield_beam.copy()
efield_beam2.basis_vector_array = new_basis_vecs
efield_beam2.data_array = new_data
efield_beam2.check()
# now convert to power. Should get the same result
new_power_beam2 = efield_beam2.copy()
new_power_beam2.efield_to_power(calc_cross_pols=False)
assert new_power_beam == new_power_beam2
# test calculating cross pols
new_power_beam = efield_beam.efield_to_power(calc_cross_pols=True, inplace=False)
assert np.all(
np.abs(
new_power_beam.data_array[
:, :, 0, :, :, np.where(new_power_beam.axis1_array == 0)[0]
]
)
> np.abs(
new_power_beam.data_array[
:, :, 2, :, :, np.where(new_power_beam.axis1_array == 0)[0]
]
)
)
assert np.all(
np.abs(
new_power_beam.data_array[
:, :, 0, :, :, np.where(new_power_beam.axis1_array == np.pi / 2.0)[0]
]
)
> np.abs(
new_power_beam.data_array[
:, :, 2, :, :, np.where(new_power_beam.axis1_array == np.pi / 2.0)[0]
]
)
)
# test writing out & reading back in power files (with cross pols which are complex)
write_file = str(tmp_path / "outtest_beam.fits")
new_power_beam.write_beamfits(write_file, clobber=True)
new_power_beam2 = UVBeam()
new_power_beam2.read_beamfits(write_file)
assert new_power_beam == new_power_beam2
# test keeping basis vectors
new_power_beam = efield_beam.efield_to_power(
calc_cross_pols=False, keep_basis_vector=True, inplace=False
)
assert np.allclose(new_power_beam.data_array, np.abs(efield_beam.data_array) ** 2)
# test raises error if beam is already a power beam
with pytest.raises(ValueError, match="beam_type must be efield"):
power_beam.efield_to_power()
# test raises error if input efield beam has Naxes_vec=3
efield_beam.Naxes_vec = 3
with pytest.raises(
ValueError,
match="Conversion to power with 3-vector efields " "is not currently supported",
):
efield_beam.efield_to_power()
def test_freq_interpolation(cst_power_2freq):
power_beam = cst_power_2freq
power_beam.interpolation_function = "az_za_simple"
# test frequency interpolation returns data arrays for small and large tolerances
freq_orig_vals = np.array([123e6, 150e6])
interp_data, interp_basis_vector, interp_bandpass = power_beam.interp(
freq_array=freq_orig_vals, freq_interp_tol=0.0, return_bandpass=True
)
assert isinstance(interp_data, np.ndarray)
assert isinstance(interp_bandpass, np.ndarray)
np.testing.assert_array_almost_equal(power_beam.bandpass_array, interp_bandpass)
np.testing.assert_array_almost_equal(power_beam.data_array, interp_data)
assert interp_basis_vector is None
interp_data, interp_basis_vector, interp_bandpass = power_beam.interp(
freq_array=freq_orig_vals, freq_interp_tol=1.0, return_bandpass=True
)
assert isinstance(interp_data, np.ndarray)
assert isinstance(interp_bandpass, np.ndarray)
np.testing.assert_array_almost_equal(power_beam.bandpass_array, interp_bandpass)
np.testing.assert_array_almost_equal(power_beam.data_array, interp_data)
assert interp_basis_vector is None
# test frequency interpolation returns new UVBeam for small and large tolerances
power_beam.saved_interp_functions = {}
new_beam_obj = power_beam.interp(
freq_array=freq_orig_vals, freq_interp_tol=0.0, new_object=True
)
assert isinstance(new_beam_obj, UVBeam)
np.testing.assert_array_almost_equal(new_beam_obj.freq_array[0], freq_orig_vals)
assert new_beam_obj.freq_interp_kind == "linear"
# test that saved functions are erased in new obj
assert not hasattr(new_beam_obj, "saved_interp_functions")
assert power_beam.history != new_beam_obj.history
new_beam_obj.history = power_beam.history
assert power_beam == new_beam_obj
new_beam_obj = power_beam.interp(
freq_array=freq_orig_vals, freq_interp_tol=1.0, new_object=True
)
assert isinstance(new_beam_obj, UVBeam)
np.testing.assert_array_almost_equal(new_beam_obj.freq_array[0], freq_orig_vals)
# assert interp kind is 'nearest' when within tol
assert new_beam_obj.freq_interp_kind == "nearest"
new_beam_obj.freq_interp_kind = "linear"
assert power_beam.history != new_beam_obj.history
new_beam_obj.history = power_beam.history
assert power_beam == new_beam_obj
# test frequency interpolation returns valid new UVBeam for different
# number of freqs from input
power_beam.saved_interp_functions = {}
new_beam_obj = power_beam.interp(
freq_array=np.linspace(123e6, 150e6, num=5),
freq_interp_tol=0.0,
new_object=True,
)
assert isinstance(new_beam_obj, UVBeam)
np.testing.assert_array_almost_equal(
new_beam_obj.freq_array[0], np.linspace(123e6, 150e6, num=5)
)
assert new_beam_obj.freq_interp_kind == "linear"
# test that saved functions are erased in new obj
assert not hasattr(new_beam_obj, "saved_interp_functions")
assert power_beam.history != new_beam_obj.history
new_beam_obj.history = power_beam.history
# down select to orig freqs and test equality
new_beam_obj.select(frequencies=freq_orig_vals)
assert power_beam.history != new_beam_obj.history
new_beam_obj.history = power_beam.history
assert power_beam == new_beam_obj
# using only one freq chan should trigger a ValueError if interp_bool is True
# unless requesting the original frequency channel such that interp_bool is False.
# Therefore, to test that interp_bool is False returns array slice as desired,
# test that ValueError is not raised in this case.
# Other ways of testing this (e.g. interp_data_array.flags['OWNDATA']) does not work
_pb = power_beam.select(frequencies=power_beam.freq_array[0, :1], inplace=False)
try:
interp_data, interp_basis_vector = _pb.interp(freq_array=_pb.freq_array[0])
except ValueError:
raise AssertionError("UVBeam.interp didn't return an array slice as expected")
# test errors if one frequency
power_beam_singlef = power_beam.select(freq_chans=[0], inplace=False)
with pytest.raises(
ValueError, match="Only one frequency in UVBeam so cannot interpolate."
):
power_beam_singlef.interp(freq_array=np.array([150e6]))
# assert freq_interp_kind ValueError
power_beam.interpolation_function = "az_za_simple"
power_beam.freq_interp_kind = None
with pytest.raises(
ValueError, match="freq_interp_kind must be set on object first"
):
power_beam.interp(
az_array=power_beam.axis1_array,
za_array=power_beam.axis2_array,
freq_array=freq_orig_vals,
polarizations=["xx"],
)
def test_freq_interp_real_and_complex(cst_power_2freq):
# test interpolation of real and complex data are the same
power_beam = cst_power_2freq
power_beam.interpolation_function = "az_za_simple"
# make a new object with more frequencies
freqs = np.linspace(123e6, 150e6, 4)
power_beam.freq_interp_kind = "linear"
pbeam = power_beam.interp(freq_array=freqs, new_object=True)
# modulate the data
pbeam.data_array[:, :, :, 1] *= 2
pbeam.data_array[:, :, :, 2] *= 0.5
# interpolate cubic on real data
freqs = np.linspace(123e6, 150e6, 10)
pbeam.freq_interp_kind = "cubic"
pb_int = pbeam.interp(freq_array=freqs)[0]
# interpolate cubic on complex data and compare to ensure they are the same
pbeam.data_array = pbeam.data_array.astype(np.complex)
pb_int2 = pbeam.interp(freq_array=freqs)[0]
assert np.all(np.isclose(np.abs(pb_int - pb_int2), 0))
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_spatial_interpolation_samepoints(
beam_type, cst_power_2freq_cut, cst_efield_2freq_cut
):
"""
check that interpolating to existing points gives the same answer
"""
if beam_type == "power":
uvbeam = cst_power_2freq_cut
else:
uvbeam = cst_efield_2freq_cut
za_orig_vals, az_orig_vals = np.meshgrid(uvbeam.axis2_array, uvbeam.axis1_array)
az_orig_vals = az_orig_vals.ravel(order="C")
za_orig_vals = za_orig_vals.ravel(order="C")
freq_orig_vals = np.array([123e6, 150e6])
# test error if no interpolation function is set
with pytest.raises(
ValueError, match="interpolation_function must be set on object first"
):
uvbeam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, freq_array=freq_orig_vals,
)
uvbeam.interpolation_function = "az_za_simple"
interp_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, freq_array=freq_orig_vals
)
interp_data_array = interp_data_array.reshape(uvbeam.data_array.shape, order="F")
assert np.allclose(uvbeam.data_array, interp_data_array)
if beam_type == "efield":
interp_basis_vector = interp_basis_vector.reshape(
uvbeam.basis_vector_array.shape, order="F"
)
assert np.allclose(uvbeam.basis_vector_array, interp_basis_vector)
# test that new object from interpolation is identical
new_beam = uvbeam.interp(
az_array=uvbeam.axis1_array,
za_array=uvbeam.axis2_array,
az_za_grid=True,
freq_array=freq_orig_vals,
new_object=True,
)
assert new_beam.freq_interp_kind == "nearest"
assert new_beam.history == (
uvbeam.history + " Interpolated in "
"frequency and to a new azimuth/zenith "
"angle grid using pyuvdata with "
"interpolation_function = az_za_simple "
"and freq_interp_kind = nearest."
)
# make histories & freq_interp_kind equal
new_beam.history = uvbeam.history
new_beam.freq_interp_kind = "linear"
assert new_beam == uvbeam
# test error if new_object set without az_za_grid
with pytest.raises(ValueError, match="A new object can only be returned"):
uvbeam.interp(
az_array=az_orig_vals,
za_array=za_orig_vals,
freq_array=freq_orig_vals,
new_object=True,
)
if beam_type == "power":
# test only a single polarization
interp_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_orig_vals,
za_array=za_orig_vals,
freq_array=freq_orig_vals,
polarizations=["xx"],
)
data_array_compare = uvbeam.data_array[:, :, :1]
interp_data_array = interp_data_array.reshape(
data_array_compare.shape, order="F"
)
assert np.allclose(data_array_compare, interp_data_array)
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_spatial_interpolation_everyother(
beam_type, cst_power_2freq_cut, cst_efield_2freq_cut
):
"""
test that interp to every other point returns an object that matches a select
"""
if beam_type == "power":
uvbeam = cst_power_2freq_cut
else:
uvbeam = cst_efield_2freq_cut
uvbeam.interpolation_function = "az_za_simple"
axis1_inds = np.arange(0, uvbeam.Naxes1, 2)
axis2_inds = np.arange(0, uvbeam.Naxes2, 2)
select_beam = uvbeam.select(
axis1_inds=axis1_inds, axis2_inds=axis2_inds, inplace=False
)
interp_beam = uvbeam.interp(
az_array=uvbeam.axis1_array[axis1_inds],
za_array=uvbeam.axis2_array[axis2_inds],
az_za_grid=True,
new_object=True,
)
assert select_beam.history != interp_beam.history
interp_beam.history = select_beam.history
assert select_beam == interp_beam
# test no errors using different points
az_interp_vals = np.array(
np.arange(0, 2 * np.pi, np.pi / 9.0).tolist()
+ np.arange(0, 2 * np.pi, np.pi / 9.0).tolist()
)
za_interp_vals = np.array(
(np.zeros((18)) + np.pi / 18).tolist() + (np.zeros((18)) + np.pi / 36).tolist()
)
freq_interp_vals = np.arange(125e6, 145e6, 5e6)
interp_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals, za_array=za_interp_vals, freq_array=freq_interp_vals
)
if beam_type == "power":
# Test requesting separate polarizations on different calls
# while reusing splines.
interp_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals[:2],
za_array=za_interp_vals[:2],
freq_array=freq_interp_vals,
polarizations=["xx"],
reuse_spline=True,
)
interp_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals[:2],
za_array=za_interp_vals[:2],
freq_array=freq_interp_vals,
polarizations=["yy"],
reuse_spline=True,
)
# test reusing the spline fit.
orig_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals,
za_array=za_interp_vals,
freq_array=freq_interp_vals,
reuse_spline=True,
)
reused_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals,
za_array=za_interp_vals,
freq_array=freq_interp_vals,
reuse_spline=True,
)
assert np.all(reused_data_array == orig_data_array)
# test passing spline options
spline_opts = {"kx": 4, "ky": 4}
quartic_data_array, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals,
za_array=za_interp_vals,
freq_array=freq_interp_vals,
spline_opts=spline_opts,
)
# slightly different interpolation, so not identical.
assert np.allclose(quartic_data_array, orig_data_array, atol=1e-10)
assert not np.all(quartic_data_array == orig_data_array)
select_data_array_orig, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals[0:1],
za_array=za_interp_vals[0:1],
freq_array=np.array([127e6]),
)
select_data_array_reused, interp_basis_vector = uvbeam.interp(
az_array=az_interp_vals[0:1],
za_array=za_interp_vals[0:1],
freq_array=np.array([127e6]),
reuse_spline=True,
)
assert np.allclose(select_data_array_orig, select_data_array_reused)
del uvbeam.saved_interp_functions
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_spatial_interp_cutsky(beam_type, cst_power_2freq_cut, cst_efield_2freq_cut):
"""
Test that when the beam doesn't cover the full sky it still works.
"""
if beam_type == "power":
uvbeam = cst_power_2freq_cut
else:
uvbeam = cst_efield_2freq_cut
uvbeam.interpolation_function = "az_za_simple"
# limit phi range
axis1_inds = np.arange(0, np.ceil(uvbeam.Naxes1 / 2), dtype=int)
axis2_inds = np.arange(0, uvbeam.Naxes2)
uvbeam.select(axis1_inds=axis1_inds, axis2_inds=axis2_inds)
# now do every other point test.
axis1_inds = np.arange(0, uvbeam.Naxes1, 2)
axis2_inds = np.arange(0, uvbeam.Naxes2, 2)
select_beam = uvbeam.select(
axis1_inds=axis1_inds, axis2_inds=axis2_inds, inplace=False
)
interp_beam = uvbeam.interp(
az_array=uvbeam.axis1_array[axis1_inds],
za_array=uvbeam.axis2_array[axis2_inds],
az_za_grid=True,
new_object=True,
)
assert select_beam.history != interp_beam.history
interp_beam.history = select_beam.history
assert select_beam == interp_beam
def test_spatial_interpolation_errors(cst_power_2freq_cut):
"""
test that interp to every other point returns an object that matches a select
"""
uvbeam = cst_power_2freq_cut
uvbeam.interpolation_function = "az_za_simple"
az_interp_vals = np.array(
np.arange(0, 2 * np.pi, np.pi / 9.0).tolist()
+ np.arange(0, 2 * np.pi, np.pi / 9.0).tolist()
)
za_interp_vals = np.array(
(np.zeros((18)) + np.pi / 18).tolist() + (np.zeros((18)) + np.pi / 36).tolist()
)
freq_interp_vals = np.arange(125e6, 145e6, 5e6)
# test errors if frequency interp values outside range
with pytest.raises(
ValueError,
match="at least one interpolation frequency is outside of "
"the UVBeam freq_array range.",
):
uvbeam.interp(
az_array=az_interp_vals,
za_array=za_interp_vals,
freq_array=np.array([100]),
)
# test errors if positions outside range
with pytest.raises(
ValueError,
match="at least one interpolation location "
"is outside of the UVBeam pixel coverage.",
):
uvbeam.interp(
az_array=az_interp_vals, za_array=za_interp_vals + np.pi / 2,
)
# test no errors only frequency interpolation
interp_data_array, interp_basis_vector = uvbeam.interp(freq_array=freq_interp_vals)
# assert polarization value error
with pytest.raises(
ValueError,
match="Requested polarization 1 not found in self.polarization_array",
):
uvbeam.interp(
az_array=az_interp_vals, za_array=za_interp_vals, polarizations=["pI"],
)
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_interp_longitude_branch_cut(beam_type, cst_efield_2freq, cst_power_2freq):
if beam_type == "power":
beam = cst_power_2freq
else:
beam = cst_efield_2freq
beam.interpolation_function = "az_za_simple"
interp_data_array, interp_basis_vector = beam.interp(
az_array=np.deg2rad(
np.repeat(np.array([[-1], [359], [0], [360]]), 181, axis=1).flatten()
),
za_array=np.repeat(beam.axis2_array[np.newaxis, :], 4, axis=0).flatten(),
)
if beam_type == "power":
npol_feed = beam.Npols
else:
npol_feed = beam.Nfeeds
interp_data_array = interp_data_array.reshape(
beam.Naxes_vec, beam.Nspws, npol_feed, beam.Nfreqs, 4, beam.Naxes2
)
assert np.allclose(
interp_data_array[:, :, :, :, 0, :],
interp_data_array[:, :, :, :, 1, :],
rtol=beam._data_array.tols[0],
atol=beam._data_array.tols[1],
)
assert np.allclose(
interp_data_array[:, :, :, :, 2, :],
interp_data_array[:, :, :, :, 3, :],
rtol=beam._data_array.tols[0],
atol=beam._data_array.tols[1],
)
def test_interp_healpix_nside(cst_efield_2freq_cut, cst_efield_2freq_cut_healpix):
efield_beam = cst_efield_2freq_cut
efield_beam.interpolation_function = "az_za_simple"
# test calling interp with healpix parameters directly gives same result
min_res = np.min(
np.array(
[np.diff(efield_beam.axis1_array)[0], np.diff(efield_beam.axis2_array)[0]]
)
)
nside_min_res = np.sqrt(3 / np.pi) * np.radians(60.0) / min_res
nside = int(2 ** np.ceil(np.log2(nside_min_res)))
new_efield_beam = cst_efield_2freq_cut_healpix
assert new_efield_beam.nside == nside
new_efield_beam.interpolation_function = "healpix_simple"
# check error with cut sky
with pytest.raises(
ValueError, match="simple healpix interpolation requires full sky healpix maps."
):
new_efield_beam.interp(
az_array=efield_beam.axis1_array,
za_array=efield_beam.axis2_array,
az_za_grid=True,
new_object=True,
)
def test_healpix_interpolation(cst_efield_2freq):
pytest.importorskip("astropy_healpix")
efield_beam = cst_efield_2freq
efield_beam.interpolation_function = "az_za_simple"
# select every fourth point to make it smaller
axis1_inds = np.arange(0, efield_beam.Naxes1, 4)
axis2_inds = np.arange(0, efield_beam.Naxes2, 4)
efield_beam.select(axis1_inds=axis1_inds, axis2_inds=axis2_inds)
orig_efield_beam = efield_beam.copy()
efield_beam.to_healpix()
# check that interpolating to existing points gives the same answer
efield_beam.interpolation_function = "healpix_simple"
hp_obj = HEALPix(nside=efield_beam.nside)
hpx_lon, hpx_lat = hp_obj.healpix_to_lonlat(efield_beam.pixel_array)
za_orig_vals = (Angle(np.pi / 2, units.radian) - hpx_lat).radian
az_orig_vals = hpx_lon.radian
az_orig_vals = az_orig_vals.ravel(order="C")
za_orig_vals = za_orig_vals.ravel(order="C")
freq_orig_vals = np.array([123e6, 150e6])
interp_data_array, interp_basis_vector = efield_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, freq_array=freq_orig_vals
)
data_array_compare = efield_beam.data_array
interp_data_array = interp_data_array.reshape(data_array_compare.shape, order="F")
assert np.allclose(data_array_compare, interp_data_array)
# test that interp to every other point returns an object that matches a select
pixel_inds = np.arange(0, efield_beam.Npixels, 2)
select_beam = efield_beam.select(pixels=pixel_inds, inplace=False)
interp_beam = efield_beam.interp(
healpix_inds=efield_beam.pixel_array[pixel_inds],
healpix_nside=efield_beam.nside,
new_object=True,
)
assert select_beam.history != interp_beam.history
interp_beam.history = select_beam.history
assert select_beam == interp_beam
# test interp from healpix to regular az/za grid
new_reg_beam = efield_beam.interp(
az_array=orig_efield_beam.axis1_array,
za_array=orig_efield_beam.axis2_array,
az_za_grid=True,
new_object=True,
)
# this diff is pretty large. 2 rounds of interpolation is not a good thing.
# but we can check that the rest of the object makes sense
diff = new_reg_beam.data_array - orig_efield_beam.data_array
diff_ratio = diff / orig_efield_beam.data_array
assert np.all(np.abs(diff_ratio) < 4)
# set data_array tolerances higher to test the rest of the object
# tols are (relative, absolute)
tols = [4, 0]
new_reg_beam._data_array.tols = tols
assert new_reg_beam.history != orig_efield_beam.history
new_reg_beam.history = orig_efield_beam.history
new_reg_beam.interpolation_function = "az_za_simple"
assert new_reg_beam == orig_efield_beam
# test errors with specifying healpix_inds without healpix_nside
hp_obj = HEALPix(nside=efield_beam.nside)
with pytest.raises(
ValueError, match="healpix_nside must be set if healpix_inds is set"
):
efield_beam.interp(
healpix_inds=np.arange(hp_obj.npix), freq_array=freq_orig_vals
)
# test error setting both healpix_nside and az_array
with pytest.raises(
ValueError,
match="healpix_nside and healpix_inds can not be set if az_array or "
"za_array is set.",
):
efield_beam.interp(
healpix_nside=efield_beam.nside,
az_array=az_orig_vals,
za_array=za_orig_vals,
freq_array=freq_orig_vals,
)
# basis_vector exception
efield_beam.basis_vector_array[0, 1, :] = 10.0
with pytest.raises(
NotImplementedError,
match="interpolation for input basis vectors that are not aligned to the "
"native theta/phi coordinate system is not yet supported",
):
efield_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals,
)
# now convert to power beam
power_beam = efield_beam.efield_to_power(inplace=False)
del efield_beam
interp_data_array, interp_basis_vector = power_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, freq_array=freq_orig_vals
)
data_array_compare = power_beam.data_array
interp_data_array = interp_data_array.reshape(data_array_compare.shape, order="F")
assert np.allclose(data_array_compare, interp_data_array)
# test that interp to every other point returns an object that matches a select
pixel_inds = np.arange(0, power_beam.Npixels, 2)
select_beam = power_beam.select(pixels=pixel_inds, inplace=False)
interp_beam = power_beam.interp(
healpix_inds=power_beam.pixel_array[pixel_inds],
healpix_nside=power_beam.nside,
new_object=True,
)
assert select_beam.history != interp_beam.history
interp_beam.history = select_beam.history
assert select_beam == interp_beam
# assert not feeding frequencies gives same answer
interp_data_array2, interp_basis_vector2 = power_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals
)
assert np.allclose(interp_data_array, interp_data_array2)
# assert not feeding az_array gives same answer
interp_data_array2, interp_basis_vector2 = power_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals
)
assert np.allclose(interp_data_array, interp_data_array2)
# test requesting polarization gives the same answer
interp_data_array2, interp_basis_vector2 = power_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, polarizations=["yy"]
)
assert np.allclose(interp_data_array[:, :, 1:2], interp_data_array2[:, :, :1])
# change complex data_array to real data_array and test again
assert power_beam.data_array.dtype == np.complex
power_beam.data_array = np.abs(power_beam.data_array)
interp_data_array, interp_basis_vector = power_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, freq_array=freq_orig_vals
)
data_array_compare = power_beam.data_array
interp_data_array = interp_data_array.reshape(data_array_compare.shape, order="F")
assert np.allclose(data_array_compare, interp_data_array)
# test no inputs equals same answer
interp_data_array2, interp_basis_vector2 = power_beam.interp()
assert np.allclose(interp_data_array, interp_data_array2)
# assert polarization value error
with pytest.raises(
ValueError,
match="Requested polarization 1 not found in self.polarization_array",
):
power_beam.interp(
az_array=az_orig_vals, za_array=za_orig_vals, polarizations=["pI"]
)
# check error when pixels out of order
power_beam.pixel_array = power_beam.pixel_array[
np.argsort(power_beam.data_array[0, 0, 0, 0, :])
]
with pytest.raises(
ValueError,
match="simple healpix interpolation requires healpix pixels to be in order.",
):
power_beam.interp(az_array=az_orig_vals, za_array=za_orig_vals)
# healpix coord exception
power_beam.pixel_coordinate_system = "foo"
with pytest.raises(ValueError, match='pixel_coordinate_system must be "healpix"'):
power_beam.interp(az_array=az_orig_vals, za_array=za_orig_vals)
def test_to_healpix(
cst_power_2freq_cut,
cst_power_2freq_cut_healpix,
cst_efield_2freq_cut,
cst_efield_2freq_cut_healpix,
):
power_beam = cst_power_2freq_cut
power_beam_healpix = cst_power_2freq_cut_healpix
sky_area_reduction_factor = (1.0 - np.cos(np.deg2rad(10))) / 2.0
# check that history is updated appropriately
assert power_beam_healpix.history == (
power_beam.history
+ " Interpolated from "
+ power_beam.coordinate_system_dict["az_za"]["description"]
+ " to "
+ power_beam.coordinate_system_dict["healpix"]["description"]
+ " using pyuvdata with interpolation_function = az_za_simple."
)
hp_obj = HEALPix(nside=power_beam_healpix.nside)
assert power_beam_healpix.Npixels <= hp_obj.npix * (sky_area_reduction_factor * 1.5)
# test that Npixels make sense
n_max_pix = power_beam.Naxes1 * power_beam.Naxes2
assert power_beam_healpix.Npixels <= n_max_pix
# Test error if not az_za
power_beam.interpolation_function = "az_za_simple"
power_beam.pixel_coordinate_system = "sin_zenith"
with pytest.raises(ValueError, match='pixel_coordinate_system must be "az_za"'):
power_beam.to_healpix()
# Now check Efield interpolation
efield_beam = cst_efield_2freq_cut
interp_then_sq = cst_efield_2freq_cut_healpix
interp_then_sq.efield_to_power(calc_cross_pols=False)
# convert to power and then interpolate to compare.
# Don't use power read from file because it has rounding errors that will
# dominate this comparison
efield_beam.interpolation_function = "az_za_simple"
sq_then_interp = efield_beam.efield_to_power(calc_cross_pols=False, inplace=False)
sq_then_interp.to_healpix()
# square then interpolate is different from interpolate then square at a
# higher level than normally allowed in the equality.
# We can live with it for now, may need to improve it later
diff = np.abs(interp_then_sq.data_array - sq_then_interp.data_array)
assert np.max(diff) < 0.6
reldiff = diff * 2 / np.abs(interp_then_sq.data_array + sq_then_interp.data_array)
assert np.max(reldiff) < 0.005
# set data_array tolerances higher to test the rest of the object
# tols are (relative, absolute)
tols = [0.05, 0]
sq_then_interp._data_array.tols = tols
# check history changes
interp_history_add = (
" Interpolated from "
+ power_beam.coordinate_system_dict["az_za"]["description"]
+ " to "
+ power_beam.coordinate_system_dict["healpix"]["description"]
+ " using pyuvdata with interpolation_function = az_za_simple."
)
sq_history_add = " Converted from efield to power using pyuvdata."
assert (
sq_then_interp.history
== efield_beam.history + sq_history_add + interp_history_add
)
assert (
interp_then_sq.history
== efield_beam.history + interp_history_add + sq_history_add
)
# now change history on one so we can compare the rest of the object
sq_then_interp.history = efield_beam.history + interp_history_add + sq_history_add
assert sq_then_interp == interp_then_sq
def test_select_axis(cst_power_1freq, tmp_path):
power_beam = cst_power_1freq
# add optional parameters for testing purposes
power_beam.extra_keywords = {"KEY1": "test_keyword"}
power_beam.reference_impedance = 340.0
power_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.loss_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, power_beam.Nspws, power_beam.Nfreqs)
)
old_history = power_beam.history
# Test selecting on axis1
inds1_to_keep = np.arange(14, 63)
power_beam2 = power_beam.select(axis1_inds=inds1_to_keep, inplace=False)
assert len(inds1_to_keep) == power_beam2.Naxes1
for i in inds1_to_keep:
assert power_beam.axis1_array[i] in power_beam2.axis1_array
for i in np.unique(power_beam2.axis1_array):
assert i in power_beam.axis1_array
assert uvutils._check_histories(
old_history + " Downselected to "
"specific parts of first image axis "
"using pyuvdata.",
power_beam2.history,
)
write_file_beamfits = str(tmp_path / "select_beam.fits")
# test writing beamfits with only one element in axis1
inds_to_keep = [len(inds1_to_keep) + 1]
power_beam2 = power_beam.select(axis1_inds=inds_to_keep, inplace=False)
power_beam2.write_beamfits(write_file_beamfits, clobber=True)
# check for errors associated with indices not included in data
with pytest.raises(ValueError, match="axis1_inds must be > 0 and < Naxes1"):
power_beam2.select(axis1_inds=[power_beam.Naxes1 - 1])
# check for warnings and errors associated with unevenly spaced image pixels
power_beam2 = power_beam.copy()
with uvtest.check_warnings(
UserWarning, "Selected values along first image axis are not evenly spaced"
):
power_beam2.select(axis1_inds=[0, 5, 6])
with pytest.raises(
ValueError,
match="The pixels are not evenly spaced along first axis. "
"The beam fits format does not support unevenly spaced pixels.",
):
power_beam2.write_beamfits(write_file_beamfits)
# Test selecting on axis2
inds2_to_keep = np.arange(5, 14)
power_beam2 = power_beam.select(axis2_inds=inds2_to_keep, inplace=False)
assert len(inds2_to_keep) == power_beam2.Naxes2
for i in inds2_to_keep:
assert power_beam.axis2_array[i] in power_beam2.axis2_array
for i in np.unique(power_beam2.axis2_array):
assert i in power_beam.axis2_array
assert uvutils._check_histories(
old_history + " Downselected to "
"specific parts of second image axis "
"using pyuvdata.",
power_beam2.history,
)
write_file_beamfits = str(tmp_path / "select_beam.fits")
# test writing beamfits with only one element in axis2
inds_to_keep = [len(inds2_to_keep) + 1]
power_beam2 = power_beam.select(axis2_inds=inds_to_keep, inplace=False)
power_beam2.write_beamfits(write_file_beamfits, clobber=True)
# check for errors associated with indices not included in data
with pytest.raises(ValueError, match="axis2_inds must be > 0 and < Naxes2"):
power_beam2.select(axis2_inds=[power_beam.Naxes2 - 1])
# check for warnings and errors associated with unevenly spaced image pixels
power_beam2 = power_beam.copy()
with uvtest.check_warnings(
UserWarning, "Selected values along second image axis are not evenly spaced"
):
power_beam2.select(axis2_inds=[0, 5, 6])
with pytest.raises(
ValueError,
match="The pixels are not evenly spaced along second axis. "
"The beam fits format does not support unevenly spaced pixels.",
):
power_beam2.write_beamfits(write_file_beamfits)
def test_select_frequencies(cst_power_1freq, tmp_path):
power_beam = cst_power_1freq
# generate more frequencies for testing by copying and adding several times
while power_beam.Nfreqs < 8:
new_beam = power_beam.copy()
new_beam.freq_array = power_beam.freq_array + power_beam.Nfreqs * 1e6
power_beam += new_beam
# add optional parameters for testing purposes
power_beam.extra_keywords = {"KEY1": "test_keyword"}
power_beam.reference_impedance = 340.0
power_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.loss_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, power_beam.Nspws, power_beam.Nfreqs)
)
old_history = power_beam.history
freqs_to_keep = power_beam.freq_array[0, np.arange(2, 7)]
power_beam2 = power_beam.select(frequencies=freqs_to_keep, inplace=False)
assert len(freqs_to_keep) == power_beam2.Nfreqs
for f in freqs_to_keep:
assert f in power_beam2.freq_array
for f in np.unique(power_beam2.freq_array):
assert f in freqs_to_keep
assert uvutils._check_histories(
old_history + " Downselected to " "specific frequencies using pyuvdata.",
power_beam2.history,
)
write_file_beamfits = str(tmp_path / "select_beam.fits")
# test writing beamfits with only one frequency
freqs_to_keep = power_beam.freq_array[0, 5]
power_beam2 = power_beam.select(frequencies=freqs_to_keep, inplace=False)
power_beam2.write_beamfits(write_file_beamfits, clobber=True)
freq_select = np.max(power_beam.freq_array) + 10
# check for errors associated with frequencies not included in data
with pytest.raises(
ValueError,
match="Frequency {f} is not present in the freq_array".format(f=freq_select),
):
power_beam.select(frequencies=[freq_select])
# check for warnings and errors associated with unevenly spaced frequencies
power_beam2 = power_beam.copy()
with uvtest.check_warnings(
UserWarning, "Selected frequencies are not evenly spaced"
):
power_beam2.select(frequencies=power_beam2.freq_array[0, [0, 5, 6]])
with pytest.raises(ValueError, match="The frequencies are not evenly spaced "):
power_beam2.write_beamfits(write_file_beamfits)
# Test selecting on freq_chans
chans_to_keep = np.arange(2, 7)
power_beam2 = power_beam.select(freq_chans=chans_to_keep, inplace=False)
assert len(chans_to_keep) == power_beam2.Nfreqs
for chan in chans_to_keep:
assert power_beam.freq_array[0, chan] in power_beam2.freq_array
for f in np.unique(power_beam2.freq_array):
assert f in power_beam.freq_array[0, chans_to_keep]
assert uvutils._check_histories(
old_history + " Downselected to " "specific frequencies using pyuvdata.",
power_beam2.history,
)
# Test selecting both channels and frequencies
freqs_to_keep = power_beam.freq_array[0, np.arange(6, 8)] # Overlaps with chans
all_chans_to_keep = np.arange(2, 8)
power_beam2 = power_beam.select(
frequencies=freqs_to_keep, freq_chans=chans_to_keep, inplace=False
)
assert len(all_chans_to_keep) == power_beam2.Nfreqs
for chan in all_chans_to_keep:
assert power_beam.freq_array[0, chan] in power_beam2.freq_array
for f in np.unique(power_beam2.freq_array):
assert f in power_beam.freq_array[0, all_chans_to_keep]
def test_select_feeds(cst_efield_1freq):
efield_beam = cst_efield_1freq
# add optional parameters for testing purposes
efield_beam.extra_keywords = {"KEY1": "test_keyword"}
efield_beam.reference_impedance = 340.0
efield_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.loss_array = np.random.normal(
50.0, 5, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, efield_beam.Nspws, efield_beam.Nfreqs)
)
old_history = efield_beam.history
feeds_to_keep = ["x"]
efield_beam2 = efield_beam.select(feeds=feeds_to_keep, inplace=False)
assert len(feeds_to_keep) == efield_beam2.Nfeeds
for f in feeds_to_keep:
assert f in efield_beam2.feed_array
for f in np.unique(efield_beam2.feed_array):
assert f in feeds_to_keep
assert uvutils._check_histories(
old_history + " Downselected to " "specific feeds using pyuvdata.",
efield_beam2.history,
)
# check for errors associated with feeds not included in data
with pytest.raises(
ValueError, match="Feed {f} is not present in the feed_array".format(f="N")
):
efield_beam.select(feeds=["N"])
# check for error with selecting polarizations on efield beams
with pytest.raises(
ValueError, match="polarizations cannot be used with efield beams"
):
efield_beam.select(polarizations=[-5, -6])
# Test check basis vectors
efield_beam.basis_vector_array[0, 1, :, :] = 1.0
with pytest.raises(
ValueError, match="basis vectors must have lengths of 1 or less."
):
efield_beam.check()
efield_beam.basis_vector_array[0, 0, :, :] = np.sqrt(0.5)
efield_beam.basis_vector_array[0, 1, :, :] = np.sqrt(0.5)
assert efield_beam.check()
efield_beam.basis_vector_array = None
with pytest.raises(
ValueError, match="Required UVParameter _basis_vector_array has not been set."
):
efield_beam.check()
def test_select_polarizations(cst_power_1freq):
power_beam = cst_power_1freq
# generate more polarizations for testing by copying and adding several times
while power_beam.Npols < 4:
new_beam = power_beam.copy()
new_beam.polarization_array = power_beam.polarization_array - power_beam.Npols
power_beam += new_beam
# add optional parameters for testing purposes
power_beam.extra_keywords = {"KEY1": "test_keyword"}
power_beam.reference_impedance = 340.0
power_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.loss_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, power_beam.Nspws, power_beam.Nfreqs)
)
old_history = power_beam.history
pols_to_keep = [-5, -6]
power_beam2 = power_beam.select(polarizations=pols_to_keep, inplace=False)
assert len(pols_to_keep) == power_beam2.Npols
for p in pols_to_keep:
assert p in power_beam2.polarization_array
for p in np.unique(power_beam2.polarization_array):
assert p in pols_to_keep
assert uvutils._check_histories(
old_history + " Downselected to " "specific polarizations using pyuvdata.",
power_beam2.history,
)
# check for errors associated with polarizations not included in data
with pytest.raises(
ValueError,
match="polarization {p} is not present in the polarization_array".format(p=-3),
):
power_beam.select(polarizations=[-3, -4])
# check for warnings and errors associated with unevenly spaced polarizations
with uvtest.check_warnings(
UserWarning, "Selected polarizations are not evenly spaced"
):
power_beam.select(polarizations=power_beam.polarization_array[[0, 1, 3]])
write_file_beamfits = os.path.join(DATA_PATH, "test/select_beam.fits")
with pytest.raises(
ValueError, match="The polarization values are not evenly spaced "
):
power_beam.write_beamfits(write_file_beamfits)
# check for error with selecting on feeds on power beams
with pytest.raises(ValueError, match="feeds cannot be used with power beams"):
power_beam.select(feeds=["x"])
def test_select(cst_power_1freq, cst_efield_1freq):
power_beam = cst_power_1freq
# generate more frequencies for testing by copying and adding
new_beam = power_beam.copy()
new_beam.freq_array = power_beam.freq_array + power_beam.Nfreqs * 1e6
power_beam += new_beam
# add optional parameters for testing purposes
power_beam.extra_keywords = {"KEY1": "test_keyword"}
power_beam.reference_impedance = 340.0
power_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.loss_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, power_beam.Nspws, power_beam.Nfreqs)
)
# now test selecting along all axes at once
old_history = power_beam.history
inds1_to_keep = np.arange(14, 63)
inds2_to_keep = np.arange(5, 14)
freqs_to_keep = [power_beam.freq_array[0, 0]]
pols_to_keep = [-5]
power_beam2 = power_beam.select(
axis1_inds=inds1_to_keep,
axis2_inds=inds2_to_keep,
frequencies=freqs_to_keep,
polarizations=pols_to_keep,
inplace=False,
)
assert len(inds1_to_keep) == power_beam2.Naxes1
for i in inds1_to_keep:
assert power_beam.axis1_array[i] in power_beam2.axis1_array
for i in np.unique(power_beam2.axis1_array):
assert i in power_beam.axis1_array
assert len(inds2_to_keep) == power_beam2.Naxes2
for i in inds2_to_keep:
assert power_beam.axis2_array[i] in power_beam2.axis2_array
for i in np.unique(power_beam2.axis2_array):
assert i in power_beam.axis2_array
assert len(freqs_to_keep) == power_beam2.Nfreqs
for f in freqs_to_keep:
assert f in power_beam2.freq_array
for f in np.unique(power_beam2.freq_array):
assert f in freqs_to_keep
assert len(pols_to_keep) == power_beam2.Npols
for p in pols_to_keep:
assert p in power_beam2.polarization_array
for p in np.unique(power_beam2.polarization_array):
assert p in pols_to_keep
assert uvutils._check_histories(
old_history + " Downselected to "
"specific parts of first image axis, "
"parts of second image axis, "
"frequencies, polarizations using pyuvdata.",
power_beam2.history,
)
# repeat for efield beam
efield_beam = cst_efield_1freq
# generate more frequencies for testing by copying and adding
new_beam = efield_beam.copy()
new_beam.freq_array = efield_beam.freq_array + efield_beam.Nfreqs * 1e6
efield_beam += new_beam
# add optional parameters for testing purposes
efield_beam.extra_keywords = {"KEY1": "test_keyword"}
efield_beam.reference_impedance = 340.0
efield_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.loss_array = np.random.normal(
50.0, 5, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, efield_beam.Nspws, efield_beam.Nfreqs)
)
feeds_to_keep = ["x"]
efield_beam2 = efield_beam.select(
axis1_inds=inds1_to_keep,
axis2_inds=inds2_to_keep,
frequencies=freqs_to_keep,
feeds=feeds_to_keep,
inplace=False,
)
assert len(inds1_to_keep) == efield_beam2.Naxes1
for i in inds1_to_keep:
assert efield_beam.axis1_array[i] in efield_beam2.axis1_array
for i in np.unique(efield_beam2.axis1_array):
assert i in efield_beam.axis1_array
assert len(inds2_to_keep) == efield_beam2.Naxes2
for i in inds2_to_keep:
assert efield_beam.axis2_array[i] in efield_beam2.axis2_array
for i in np.unique(efield_beam2.axis2_array):
assert i in efield_beam.axis2_array
assert len(freqs_to_keep) == efield_beam2.Nfreqs
for f in freqs_to_keep:
assert f in efield_beam2.freq_array
for f in np.unique(efield_beam2.freq_array):
assert f in freqs_to_keep
assert len(feeds_to_keep) == efield_beam2.Nfeeds
for f in feeds_to_keep:
assert f in efield_beam2.feed_array
for f in np.unique(efield_beam2.feed_array):
assert f in feeds_to_keep
assert uvutils._check_histories(
old_history + " Downselected to "
"specific parts of first image axis, "
"parts of second image axis, "
"frequencies, feeds using pyuvdata.",
efield_beam2.history,
)
def test_add(cst_power_1freq, cst_efield_1freq):
power_beam = cst_power_1freq
# generate more frequencies for testing by copying and adding
new_beam = power_beam.copy()
new_beam.freq_array = power_beam.freq_array + power_beam.Nfreqs * 1e6
power_beam += new_beam
# add optional parameters for testing purposes
power_beam.extra_keywords = {"KEY1": "test_keyword"}
power_beam.reference_impedance = 340.0
power_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.loss_array = np.random.normal(
50.0, 5, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(power_beam.Nspws, power_beam.Nfreqs)
)
power_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, power_beam.Nspws, power_beam.Nfreqs)
)
# Add along first image axis
beam1 = power_beam.select(axis1_inds=np.arange(0, 180), inplace=False)
beam2 = power_beam.select(axis1_inds=np.arange(180, 360), inplace=False)
beam1 += beam2
# Check history is correct, before replacing and doing a full object check
assert uvutils._check_histories(
power_beam.history + " Downselected to specific parts of "
"first image axis using pyuvdata. "
"Combined data along first image axis "
"using pyuvdata.",
beam1.history,
)
beam1.history = power_beam.history
assert beam1 == power_beam
# Out of order - axis1
beam1 = power_beam.select(axis1_inds=np.arange(180, 360), inplace=False)
beam2 = power_beam.select(axis1_inds=np.arange(0, 180), inplace=False)
beam1 += beam2
beam1.history = power_beam.history
assert beam1 == power_beam
# Add along second image axis
beam1 = power_beam.select(axis2_inds=np.arange(0, 90), inplace=False)
beam2 = power_beam.select(axis2_inds=np.arange(90, 181), inplace=False)
beam1 += beam2
# Check history is correct, before replacing and doing a full object check
assert uvutils._check_histories(
power_beam.history + " Downselected to specific parts of "
"second image axis using pyuvdata. "
"Combined data along second image axis "
"using pyuvdata.",
beam1.history,
)
beam1.history = power_beam.history
assert beam1 == power_beam
# Out of order - axis2
beam1 = power_beam.select(axis2_inds=np.arange(90, 181), inplace=False)
beam2 = power_beam.select(axis2_inds=np.arange(0, 90), inplace=False)
beam1 += beam2
beam1.history = power_beam.history
assert beam1 == power_beam
# Add frequencies
beam1 = power_beam.select(freq_chans=0, inplace=False)
beam2 = power_beam.select(freq_chans=1, inplace=False)
beam1 += beam2
# Check history is correct, before replacing and doing a full object check
assert uvutils._check_histories(
power_beam.history + " Downselected to specific frequencies "
"using pyuvdata. Combined data along "
"frequency axis using pyuvdata.",
beam1.history,
)
beam1.history = power_beam.history
assert beam1 == power_beam
# Out of order - freqs
beam1 = power_beam.select(freq_chans=1, inplace=False)
beam2 = power_beam.select(freq_chans=0, inplace=False)
beam1 += beam2
beam1.history = power_beam.history
assert beam1 == power_beam
# Add polarizations
beam1 = power_beam.select(polarizations=-5, inplace=False)
beam2 = power_beam.select(polarizations=-6, inplace=False)
beam1 += beam2
assert uvutils._check_histories(
power_beam.history + " Downselected to specific polarizations "
"using pyuvdata. Combined data along "
"polarization axis using pyuvdata.",
beam1.history,
)
beam1.history = power_beam.history
assert beam1 == power_beam
# Out of order - pols
beam1 = power_beam.select(polarizations=-6, inplace=False)
beam2 = power_beam.select(polarizations=-5, inplace=False)
beam1 += beam2
beam1.history = power_beam.history
assert beam1 == power_beam
# Add feeds
efield_beam = cst_efield_1freq
# generate more frequencies for testing by copying and adding
new_beam = efield_beam.copy()
new_beam.freq_array = efield_beam.freq_array + efield_beam.Nfreqs * 1e6
efield_beam += new_beam
# add optional parameters for testing purposes
efield_beam.extra_keywords = {"KEY1": "test_keyword"}
efield_beam.reference_impedance = 340.0
efield_beam.receiver_temperature_array = np.random.normal(
50.0, 5, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.loss_array = np.random.normal(
50.0, 5, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.mismatch_array = np.random.normal(
0.0, 1.0, size=(efield_beam.Nspws, efield_beam.Nfreqs)
)
efield_beam.s_parameters = np.random.normal(
0.0, 0.3, size=(4, efield_beam.Nspws, efield_beam.Nfreqs)
)
beam1 = efield_beam.select(feeds=efield_beam.feed_array[0], inplace=False)
beam2 = efield_beam.select(feeds=efield_beam.feed_array[1], inplace=False)
beam1 += beam2
assert uvutils._check_histories(
efield_beam.history + " Downselected to specific feeds "
"using pyuvdata. Combined data along "
"feed axis using pyuvdata.",
beam1.history,
)
beam1.history = efield_beam.history
assert beam1 == efield_beam
# Out of order - feeds
beam1 = efield_beam.select(feeds=efield_beam.feed_array[1], inplace=False)
beam2 = efield_beam.select(feeds=efield_beam.feed_array[0], inplace=False)
beam1 += beam2
beam1.history = efield_beam.history
assert beam1, efield_beam
# Add multiple axes
beam_ref = power_beam.copy()
beam1 = power_beam.select(
axis1_inds=np.arange(0, power_beam.Naxes1 // 2),
polarizations=power_beam.polarization_array[0],
inplace=False,
)
beam2 = power_beam.select(
axis1_inds=np.arange(power_beam.Naxes1 // 2, power_beam.Naxes1),
polarizations=power_beam.polarization_array[1],
inplace=False,
)
beam1 += beam2
assert uvutils._check_histories(
power_beam.history + " Downselected to specific parts of "
"first image axis, polarizations using "
"pyuvdata. Combined data along first "
"image, polarization axis using pyuvdata.",
beam1.history,
)
# Zero out missing data in reference object
beam_ref.data_array[:, :, 0, :, :, power_beam.Naxes1 // 2 :] = 0.0
beam_ref.data_array[:, :, 1, :, :, : power_beam.Naxes1 // 2] = 0.0
beam1.history = power_beam.history
assert beam1 == beam_ref
# Another combo with efield
beam_ref = efield_beam.copy()
beam1 = efield_beam.select(
axis1_inds=np.arange(0, efield_beam.Naxes1 // 2),
axis2_inds=np.arange(0, efield_beam.Naxes2 // 2),
inplace=False,
)
beam2 = efield_beam.select(
axis1_inds=np.arange(efield_beam.Naxes1 // 2, efield_beam.Naxes1),
axis2_inds=np.arange(efield_beam.Naxes2 // 2, efield_beam.Naxes2),
inplace=False,
)
beam1 += beam2
assert uvutils._check_histories(
efield_beam.history + " Downselected to specific parts of "
"first image axis, parts of second "
"image axis using pyuvdata. Combined "
"data along first image, second image "
"axis using pyuvdata.",
beam1.history,
)
# Zero out missing data in reference object
beam_ref.data_array[
:, :, :, :, : efield_beam.Naxes2 // 2, efield_beam.Naxes1 // 2 :
] = 0.0
beam_ref.data_array[
:, :, :, :, efield_beam.Naxes2 // 2 :, : efield_beam.Naxes1 // 2
] = 0.0
beam_ref.basis_vector_array[
:, :, : efield_beam.Naxes2 // 2, efield_beam.Naxes1 // 2 :
] = 0.0
beam_ref.basis_vector_array[
:, :, efield_beam.Naxes2 // 2 :, : efield_beam.Naxes1 // 2
] = 0.0
beam1.history = efield_beam.history
assert beam1, beam_ref
# Check warnings
# generate more frequencies for testing by copying and adding several times
while power_beam.Nfreqs < 8:
new_beam = power_beam.copy()
new_beam.freq_array = power_beam.freq_array + power_beam.Nfreqs * 1e6
power_beam += new_beam
beam1 = power_beam.select(freq_chans=np.arange(0, 4), inplace=False)
beam2 = power_beam.select(freq_chans=np.arange(5, 8), inplace=False)
with uvtest.check_warnings(
UserWarning, "Combined frequencies are not evenly spaced"
):
beam1.__add__(beam2)
# generate more polarizations for testing by copying and adding several times
while power_beam.Npols < 4:
new_beam = power_beam.copy()
new_beam.polarization_array = power_beam.polarization_array - power_beam.Npols
power_beam += new_beam
power_beam.receiver_temperature_array = np.ones((1, 8))
beam1 = power_beam.select(
polarizations=power_beam.polarization_array[0:2], inplace=False
)
beam2 = power_beam.select(
polarizations=power_beam.polarization_array[3], inplace=False
)
with uvtest.check_warnings(
UserWarning, "Combined polarizations are not evenly spaced"
):
beam1.__iadd__(beam2)
beam1 = power_beam.select(
polarizations=power_beam.polarization_array[0:2], inplace=False
)
beam2 = power_beam.select(
polarizations=power_beam.polarization_array[2:3], inplace=False
)
beam2.receiver_temperature_array = None
assert beam1.receiver_temperature_array is not None
with uvtest.check_warnings(
UserWarning,
"Only one of the UVBeam objects being combined has optional parameter",
):
beam1.__iadd__(beam2)
assert beam1.receiver_temperature_array is None
# Combining histories
beam1 = power_beam.select(
polarizations=power_beam.polarization_array[0:2], inplace=False
)
beam2 = power_beam.select(
polarizations=power_beam.polarization_array[2:4], inplace=False
)
beam2.history += " testing the history. Read/written with pyuvdata"
beam1 += beam2
assert uvutils._check_histories(
power_beam.history + " Downselected to specific polarizations "
"using pyuvdata. Combined data along "
"polarization axis using pyuvdata. "
"testing the history.",
beam1.history,
)
beam1.history = power_beam.history
assert beam1 == power_beam
# ------------------------
# Test failure modes of add function
# Wrong class
beam1 = power_beam.copy()
with pytest.raises(ValueError, match="Only UVBeam "):
beam1.__iadd__(np.zeros(5))
params_to_change = {
"beam_type": "efield",
"data_normalization": "solid_angle",
"telescope_name": "foo",
"feed_name": "foo",
"feed_version": "v12",
"model_name": "foo",
"model_version": "v12",
"pixel_coordinate_system": "sin_zenith",
"Naxes_vec": 3,
"nside": 16,
"ordering": "nested",
}
beam1 = power_beam.select(freq_chans=0, inplace=False)
beam2 = power_beam.select(freq_chans=1, inplace=False)
for param, value in params_to_change.items():
beam1_copy = beam1.copy()
if param == "beam_type":
beam2_copy = efield_beam.select(freq_chans=1, inplace=False)
elif param == "Naxes_vec":
beam2_copy = beam2.copy()
beam2_copy.Naxes_vec = value
beam2_copy.data_array = np.concatenate(
(beam2_copy.data_array, beam2_copy.data_array, beam2_copy.data_array)
)
else:
beam2_copy = beam2.copy()
setattr(beam2_copy, param, value)
with pytest.raises(
ValueError,
match=f"UVParameter {param} does not match. Cannot combine objects.",
):
beam1_copy.__iadd__(beam2_copy)
del beam1_copy
del beam2_copy
# Overlapping data
beam2 = power_beam.copy()
with pytest.raises(
ValueError, match="These objects have overlapping data and cannot be combined."
):
beam1.__iadd__(beam2)
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_select_healpix_pixels(
beam_type, cst_power_1freq_cut_healpix, cst_efield_1freq_cut_healpix, tmp_path
):
if beam_type == "power":
beam_healpix = cst_power_1freq_cut_healpix
else:
beam_healpix = cst_efield_1freq_cut_healpix
# add optional parameters for testing purposes
beam_healpix.extra_keywords = {"KEY1": "test_keyword"}
beam_healpix.reference_impedance = 340.0
beam_healpix.receiver_temperature_array = np.random.normal(
50.0, 5, size=(beam_healpix.Nspws, beam_healpix.Nfreqs)
)
beam_healpix.loss_array = np.random.normal(
50.0, 5, size=(beam_healpix.Nspws, beam_healpix.Nfreqs)
)
beam_healpix.mismatch_array = np.random.normal(
0.0, 1.0, size=(beam_healpix.Nspws, beam_healpix.Nfreqs)
)
beam_healpix.s_parameters = np.random.normal(
0.0, 0.3, size=(4, beam_healpix.Nspws, beam_healpix.Nfreqs)
)
old_history = beam_healpix.history
pixels_to_keep = np.arange(31, 184)
beam_healpix2 = beam_healpix.select(pixels=pixels_to_keep, inplace=False)
assert len(pixels_to_keep) == beam_healpix2.Npixels
for pi in pixels_to_keep:
assert pi in beam_healpix2.pixel_array
for pi in np.unique(beam_healpix2.pixel_array):
assert pi in pixels_to_keep
assert uvutils._check_histories(
old_history + " Downselected to " "specific healpix pixels using pyuvdata.",
beam_healpix2.history,
)
write_file_beamfits = str(tmp_path / "select_beam.fits")
# test writing beamfits with only one pixel
pixels_to_keep = [43]
beam_healpix2 = beam_healpix.select(pixels=pixels_to_keep, inplace=False)
beam_healpix2.write_beamfits(write_file_beamfits, clobber=True)
# check for errors associated with pixels not included in data
pixel_select = 12 * beam_healpix.nside ** 2 + 10
with pytest.raises(
ValueError,
match="Pixel {p} is not present in the pixel_array".format(p=pixel_select),
):
beam_healpix.select(pixels=[pixel_select])
# test writing beamfits with non-contiguous pixels
pixels_to_keep = np.arange(2, 150, 4)
beam_healpix2 = beam_healpix.select(pixels=pixels_to_keep, inplace=False)
beam_healpix2.write_beamfits(write_file_beamfits, clobber=True)
# -----------------
# check for errors selecting axis1_inds on healpix beams
inds1_to_keep = np.arange(14, 63)
with pytest.raises(
ValueError, match="axis1_inds cannot be used with healpix coordinate system"
):
beam_healpix.select(axis1_inds=inds1_to_keep)
# check for errors selecting axis2_inds on healpix beams
inds2_to_keep = np.arange(5, 14)
with pytest.raises(
ValueError, match="axis2_inds cannot be used with healpix coordinate system"
):
beam_healpix.select(axis2_inds=inds2_to_keep)
# ------------------------
# test selecting along all axes at once for healpix beams
freqs_to_keep = [beam_healpix.freq_array[0, 0]]
if beam_type == "efield":
feeds_to_keep = ["x"]
pols_to_keep = None
else:
pols_to_keep = [-5]
feeds_to_keep = None
beam_healpix2 = beam_healpix.select(
pixels=pixels_to_keep,
frequencies=freqs_to_keep,
polarizations=pols_to_keep,
feeds=feeds_to_keep,
inplace=False,
)
assert len(pixels_to_keep) == beam_healpix2.Npixels
for pi in pixels_to_keep:
assert pi in beam_healpix2.pixel_array
for pi in np.unique(beam_healpix2.pixel_array):
assert pi in pixels_to_keep
assert len(freqs_to_keep) == beam_healpix2.Nfreqs
for f in freqs_to_keep:
assert f in beam_healpix2.freq_array
for f in np.unique(beam_healpix2.freq_array):
assert f in freqs_to_keep
if beam_type == "efield":
assert len(feeds_to_keep) == beam_healpix2.Nfeeds
for f in feeds_to_keep:
assert f in beam_healpix2.feed_array
for f in np.unique(beam_healpix2.feed_array):
assert f in feeds_to_keep
else:
assert len(pols_to_keep) == beam_healpix2.Npols
for p in pols_to_keep:
assert p in beam_healpix2.polarization_array
for p in np.unique(beam_healpix2.polarization_array):
assert p in pols_to_keep
if beam_type == "efield":
history_add = "feeds"
else:
history_add = "polarizations"
assert uvutils._check_histories(
old_history + " Downselected to "
"specific healpix pixels, frequencies, "
f"{history_add} using pyuvdata.",
beam_healpix2.history,
)
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_select_healpix_pixels_error(
beam_type, cst_power_2freq_cut, cst_efield_2freq_cut
):
if beam_type == "power":
beam = cst_power_2freq_cut
else:
beam = cst_efield_2freq_cut
# check for errors selecting pixels on non-healpix beams
with pytest.raises(
ValueError, match="pixels can only be used with healpix coordinate system"
):
beam.select(pixels=np.arange(31, 184))
@pytest.mark.parametrize("beam_type", ["efield", "power"])
def test_add_healpix(
beam_type, cst_power_2freq_cut_healpix, cst_efield_2freq_cut_healpix
):
if beam_type == "power":
beam_healpix = cst_power_2freq_cut_healpix
else:
beam_healpix = cst_efield_2freq_cut_healpix
# add optional parameters for testing purposes
beam_healpix.extra_keywords = {"KEY1": "test_keyword"}
beam_healpix.reference_impedance = 340.0
beam_healpix.receiver_temperature_array = np.random.normal(
50.0, 5, size=(beam_healpix.Nspws, beam_healpix.Nfreqs)
)
beam_healpix.loss_array = np.random.normal(
50.0, 5, size=(beam_healpix.Nspws, beam_healpix.Nfreqs)
)
beam_healpix.mismatch_array = np.random.normal(
0.0, 1.0, size=(beam_healpix.Nspws, beam_healpix.Nfreqs)
)
beam_healpix.s_parameters = np.random.normal(
0.0, 0.3, size=(4, beam_healpix.Nspws, beam_healpix.Nfreqs)
)
# Test adding a different combo with healpix
beam_ref = beam_healpix.copy()
beam1 = beam_healpix.select(
pixels=beam_healpix.pixel_array[0 : beam_healpix.Npixels // 2],
freq_chans=0,
inplace=False,
)
beam2 = beam_healpix.select(
pixels=beam_healpix.pixel_array[beam_healpix.Npixels // 2 :],
freq_chans=1,
inplace=False,
)
beam1 += beam2
assert uvutils._check_histories(
beam_healpix.history + " Downselected to specific healpix "
"pixels, frequencies using pyuvdata. "
"Combined data along healpix pixel, "
"frequency axis using pyuvdata.",
beam1.history,
)
# Zero out missing data in reference object
beam_ref.data_array[:, :, :, 0, beam_healpix.Npixels // 2 :] = 0.0
beam_ref.data_array[:, :, :, 1, : beam_healpix.Npixels // 2] = 0.0
beam1.history = beam_healpix.history
assert beam1 == beam_ref
if beam_type == "efield":
# Test adding another combo with efield
beam_ref = beam_healpix.copy()
beam1 = beam_healpix.select(
freq_chans=0, feeds=beam_healpix.feed_array[0], inplace=False
)
beam2 = beam_healpix.select(
freq_chans=1, feeds=beam_healpix.feed_array[1], inplace=False
)
beam1 += beam2
assert uvutils._check_histories(
beam_healpix.history + " Downselected to specific frequencies, "
"feeds using pyuvdata. Combined data "
"along frequency, feed axis using pyuvdata.",
beam1.history,
)
# Zero out missing data in reference object
beam_ref.data_array[:, :, 1, 0, :] = 0.0
beam_ref.data_array[:, :, 0, 1, :] = 0.0
beam1.history = beam_healpix.history
assert beam1 == beam_ref
# Add without inplace
beam1 = beam_healpix.select(
pixels=beam_healpix.pixel_array[0 : beam_healpix.Npixels // 2], inplace=False
)
beam2 = beam_healpix.select(
pixels=beam_healpix.pixel_array[beam_healpix.Npixels // 2 :], inplace=False
)
beam1 = beam1 + beam2
assert uvutils._check_histories(
beam_healpix.history + " Downselected to specific healpix pixels "
"using pyuvdata. Combined data "
"along healpix pixel axis using pyuvdata.",
beam1.history,
)
beam1.history = beam_healpix.history
assert beam1 == beam_healpix
# ---------------
# Test error: adding overlapping data with healpix
beam1 = beam_healpix.copy()
beam2 = beam_healpix.copy()
with pytest.raises(
ValueError, match="These objects have overlapping data and cannot be combined."
):
beam1.__iadd__(beam2)
def test_beam_area_healpix(cst_power_1freq_cut_healpix, cst_efield_1freq_cut_healpix):
power_beam_healpix = cst_power_1freq_cut_healpix
# Test beam area methods
# Check that non-peak normalizations error
with pytest.raises(ValueError, match="beam must be peak normalized"):
power_beam_healpix.get_beam_area()
with pytest.raises(ValueError, match="beam must be peak normalized"):
power_beam_healpix.get_beam_sq_area()
healpix_norm = power_beam_healpix.copy()
healpix_norm.data_normalization = "solid_angle"
with pytest.raises(ValueError, match="beam must be peak normalized"):
healpix_norm.get_beam_area()
with pytest.raises(ValueError, match="beam must be peak normalized"):
healpix_norm.get_beam_sq_area()
# change it back to 'physical'
healpix_norm.data_normalization = "physical"
# change it to peak for rest of checks
healpix_norm.peak_normalize()
# Check sizes of output
numfreqs = healpix_norm.freq_array.shape[-1]
beam_int = healpix_norm.get_beam_area(pol="xx")
beam_sq_int = healpix_norm.get_beam_sq_area(pol="xx")
assert beam_int.shape[0] == numfreqs
assert beam_sq_int.shape[0] == numfreqs
# Check for the case of a uniform beam over the whole sky
hp_obj = HEALPix(nside=healpix_norm.nside)
d_omega = hp_obj.pixel_area.to("steradian").value
npix = healpix_norm.Npixels
healpix_norm.data_array = np.ones_like(healpix_norm.data_array)
assert np.allclose(
np.sum(healpix_norm.get_beam_area(pol="xx")), numfreqs * npix * d_omega
)
healpix_norm.data_array = 2.0 * np.ones_like(healpix_norm.data_array)
assert np.allclose(
np.sum(healpix_norm.get_beam_sq_area(pol="xx")), numfreqs * 4.0 * npix * d_omega
)
# check XX and YY beam areas work and match to within 5 sigfigs
xx_area = healpix_norm.get_beam_area("XX")
xx_area = healpix_norm.get_beam_area("xx")
assert np.allclose(xx_area, xx_area)
yy_area = healpix_norm.get_beam_area("YY")
assert np.allclose(yy_area / xx_area, np.ones(numfreqs))
# nt.assert_almost_equal(yy_area / xx_area, 1.0, places=5)
xx_area = healpix_norm.get_beam_sq_area("XX")
yy_area = healpix_norm.get_beam_sq_area("YY")
assert np.allclose(yy_area / xx_area, np.ones(numfreqs))
# nt.assert_almost_equal(yy_area / xx_area, 1.0, places=5)
# Check that if pseudo-Stokes I (pI) is in the beam polarization_array it
# just uses it
healpix_norm.polarization_array = [1, 2]
# Check error if desired pol is allowed but isn't in the polarization_array
with pytest.raises(
ValueError, match="Do not have the right polarization information"
):
healpix_norm.get_beam_area(pol="xx")
with pytest.raises(
ValueError, match="Do not have the right polarization information"
):
healpix_norm.get_beam_sq_area(pol="xx")
# Check polarization error
healpix_norm.polarization_array = [9, 18, 27, -4]
with pytest.raises(
ValueError, match="Do not have the right polarization information"
):
healpix_norm.get_beam_area(pol="xx")
with pytest.raises(
ValueError, match="Do not have the right polarization information"
):
healpix_norm.get_beam_sq_area(pol="xx")
efield_beam = cst_efield_1freq_cut_healpix
healpix_norm_fullpol = efield_beam.efield_to_power(inplace=False)
healpix_norm_fullpol.peak_normalize()
xx_area = healpix_norm_fullpol.get_beam_sq_area("XX")
yy_area = healpix_norm_fullpol.get_beam_sq_area("YY")
XY_area = healpix_norm_fullpol.get_beam_sq_area("XY")
YX_area = healpix_norm_fullpol.get_beam_sq_area("YX")
# check if XY beam area is equal to beam YX beam area
assert np.allclose(XY_area, YX_area)
# check if XY/YX beam area is less than XX/YY beam area
assert np.all(np.less(XY_area, xx_area))
assert np.all(np.less(XY_area, yy_area))
assert np.all(np.less(YX_area, xx_area))
assert np.all(np.less(YX_area, yy_area))
# Check if power is scalar
healpix_vec_norm = efield_beam.efield_to_power(
keep_basis_vector=True, calc_cross_pols=False, inplace=False
)
healpix_vec_norm.peak_normalize()
with pytest.raises(ValueError, match="Expect scalar for power beam, found vector"):
healpix_vec_norm.get_beam_area()
with pytest.raises(ValueError, match="Expect scalar for power beam, found vector"):
healpix_vec_norm.get_beam_sq_area()
# Check only power beams accepted
with pytest.raises(ValueError, match="beam_type must be power"):
efield_beam.get_beam_area()
with pytest.raises(ValueError, match="beam_type must be power"):
efield_beam.get_beam_sq_area()
# check pseudo-Stokes parameters
efield_beam = cst_efield_1freq_cut_healpix
efield_beam.efield_to_pstokes()
efield_beam.peak_normalize()
pI_area = efield_beam.get_beam_sq_area("pI")
pQ_area = efield_beam.get_beam_sq_area("pQ")
pU_area = efield_beam.get_beam_sq_area("pU")
pV_area = efield_beam.get_beam_sq_area("pV")
assert np.all(np.less(pQ_area, pI_area))
assert np.all(np.less(pU_area, pI_area))
assert np.all(np.less(pV_area, pI_area))
# check backwards compatability with pstokes nomenclature and int polnum
I_area = efield_beam.get_beam_area("I")
pI_area = efield_beam.get_beam_area("pI")
area1 = efield_beam.get_beam_area(1)
assert np.allclose(I_area, pI_area)
assert np.allclose(I_area, area1)
# check efield beam type is accepted for pseudo-stokes and power for
# linear polarizations
with pytest.raises(ValueError, match="Expect scalar for power beam, found vector"):
healpix_vec_norm.get_beam_sq_area("pI")
with pytest.raises(
ValueError, match="Do not have the right polarization information"
):
efield_beam.get_beam_sq_area("xx")
def test_get_beam_function_errors(cst_power_1freq_cut):
power_beam = cst_power_1freq_cut.copy()
with pytest.raises(AssertionError, match="pixel_coordinate_system must be healpix"):
power_beam._get_beam("xx")
# Check only healpix accepted (HEALPix checks are in test_healpix)
# change data_normalization to peak for rest of checks
power_beam.peak_normalize()
with pytest.raises(ValueError, match="Currently only healpix format supported"):
power_beam.get_beam_area()
with pytest.raises(ValueError, match="Currently only healpix format supported"):
power_beam.get_beam_sq_area()
def test_get_beam_functions(cst_power_1freq_cut_healpix):
healpix_power_beam = cst_power_1freq_cut_healpix
healpix_power_beam.peak_normalize()
healpix_power_beam._get_beam("xx")
with pytest.raises(
ValueError, match="Do not have the right polarization information"
):
healpix_power_beam._get_beam(4)
| 36.093712 | 88 | 0.687293 |
b53cc94ff6e498d700944e5d6d8ada399e93516f | 391 | py | Python | server.py | bundito/joeremote | 254c76fb7616a78d1f62ee7a383fff279fb8a842 | [
"MIT"
] | null | null | null | server.py | bundito/joeremote | 254c76fb7616a78d1f62ee7a383fff279fb8a842 | [
"MIT"
] | null | null | null | server.py | bundito/joeremote | 254c76fb7616a78d1f62ee7a383fff279fb8a842 | [
"MIT"
] | null | null | null | from socket import *
myhost = "10.0.0.53"
myport = 10000
sockobj = socket(AF_INET, SOCK_STREAM)
sockobj.bind((myhost, myport))
sockobj.listen(5)
while True:
connection, address = sockobj.accept()
print('Server connected by ', address)
while True:
data = connection.recv(1024)
if not data: break
connection.send(b'Echo => ' + data)
connection.close() | 24.4375 | 43 | 0.659847 |
ac244aa5daaaa85407ae4f965df95323e75758cb | 2,085 | py | Python | src/cms/constants/administrative_division.py | digitalfabrik/coldaid-backend | b769510570d5921e30876565263813c0362994e2 | [
"Apache-2.0"
] | 4 | 2019-12-05T16:45:17.000Z | 2020-05-09T07:26:34.000Z | src/cms/constants/administrative_division.py | digitalfabrik/coldaid-backend | b769510570d5921e30876565263813c0362994e2 | [
"Apache-2.0"
] | 56 | 2019-12-05T12:31:37.000Z | 2021-01-07T15:47:45.000Z | src/cms/constants/administrative_division.py | digitalfabrik/coldaid-backend | b769510570d5921e30876565263813c0362994e2 | [
"Apache-2.0"
] | 2 | 2019-12-11T09:52:26.000Z | 2020-05-09T07:26:38.000Z | """
This module contains constants which represent all possible administrative divisions of a region in Germany:
* ``CITY``: City (German: "Stadt")
* ``DISTRICT``: District (German: "Kreis")
* ``RURAL_DISTRICT``: Rural district (German: "Landkreis")
* ``REGION``: Region (German: "Region")
* ``CITY_AND_DISTRICT``: City and district (German: "Stadt und Landkreis")
* ``URBAN_DISTRICT``: Urban district (German: "Kreisfreie Stadt")
* ``GOVERNMENTAL_DISTRICT``: Governmental district (German: "Regierungsbezirk")
* ``CITY_STATE``: City state (German: "Stadtstaat")
* ``AREA_STATE``: Area state (German: "Flächenland")
* ``FREE_STATE``: Free state (German: "Freistaat")
* ``FEDERAL_STATE``: Federal state (German: "Bundesland")
* ``MUNICIPALITY``: Municipality (German: "Gemeinde")
* ``COLLECTIVE_MUNICIPALITY``: Collective municipality (German: "Gemeindeverband")
* ``INITIAL_RECEPTION_CENTER``: Initial reception center (German: "Erstaufnahmeeinrichtung")
"""
from django.utils.translation import ugettext_lazy as _
FEDERAL_STATE = 'FEDERAL_STATE'
AREA_STATE = 'AREA_STATE'
FREE_STATE = 'FREE_STATE'
CITY_STATE = 'CITY_STATE'
GOVERNMENTAL_DISTRICT = 'GOVERNMENTAL_DISTRICT'
URBAN_DISTRICT = 'URBAN_DISTRICT'
RURAL_DISTRICT = 'RURAL_DISTRICT'
DISTRICT = 'DISTRICT'
CITY = 'CITY'
CITY_AND_DISTRICT = 'CITY_AND_DISTRICT'
REGION = 'REGION'
MUNICIPALITY = 'MUNICIPALITY'
COLLECTIVE_MUNICIPALITY = 'COLLECTIVE_MUNICIPALITY'
INITIAL_RECEPTION_CENTER = 'INITIAL_RECEPTION_CENTER'
CHOICES = (
(CITY, _('City')),
(DISTRICT, _('District')),
(RURAL_DISTRICT, _('Rural district')),
(REGION, _('Region')),
(CITY_AND_DISTRICT, _('City and district')),
(URBAN_DISTRICT, _('Urban district')),
(GOVERNMENTAL_DISTRICT, _('Governmental district')),
(CITY_STATE, _('City state')),
(AREA_STATE, _('Area state')),
(FREE_STATE, _('Free state')),
(FEDERAL_STATE, _('Federal state')),
(MUNICIPALITY, _('Municipality')),
(COLLECTIVE_MUNICIPALITY, _('Collective municipality')),
(INITIAL_RECEPTION_CENTER, _('Initial reception center')),
)
| 31.119403 | 108 | 0.717986 |
a09dc23593923045c9cabe1d7a9dfad3abdf0f85 | 416 | py | Python | libpython/spam/db.py | Verla1973/libpython | 75ed3486bd3298019b446a230099695d489cacde | [
"MIT"
] | 1 | 2021-10-03T11:31:00.000Z | 2021-10-03T11:31:00.000Z | libpython/spam/db.py | Verla1973/libpython | 75ed3486bd3298019b446a230099695d489cacde | [
"MIT"
] | null | null | null | libpython/spam/db.py | Verla1973/libpython | 75ed3486bd3298019b446a230099695d489cacde | [
"MIT"
] | null | null | null | class Sessao:
contador = 0
usuarios = []
def salvar(self, usuario):
Sessao.contador += 1
usuario.id = Sessao.contador
self.usuarios.append(usuario)
def listar(self):
return self.usuarios
def roll_back(self):
pass
def fechar(self):
pass
class Conexao:
def gerar_sessao(self):
return Sessao()
def fechar(self):
pass | 16.64 | 37 | 0.574519 |
68d89d6ea453b9ac930e61e137d0be67f8784e4a | 8,066 | py | Python | sdks/python/http_client/v1/polyaxon_sdk/models/v1_kf_replica.py | polyaxon/polyaxon | a835f2872a63f6cf5c27d2dd1125ad7c18eb849a | [
"Apache-2.0"
] | 3,200 | 2017-05-09T11:35:31.000Z | 2022-03-28T05:43:22.000Z | sdks/python/http_client/v1/polyaxon_sdk/models/v1_kf_replica.py | polyaxon/polyaxon | a835f2872a63f6cf5c27d2dd1125ad7c18eb849a | [
"Apache-2.0"
] | 1,324 | 2017-06-29T07:21:27.000Z | 2022-03-27T12:41:10.000Z | sdks/python/http_client/v1/polyaxon_sdk/models/v1_kf_replica.py | polyaxon/polyaxon | a835f2872a63f6cf5c27d2dd1125ad7c18eb849a | [
"Apache-2.0"
] | 341 | 2017-01-10T23:06:53.000Z | 2022-03-10T08:15:18.000Z | #!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
"""
Polyaxon SDKs and REST API specification.
Polyaxon SDKs and REST API specification. # noqa: E501
The version of the OpenAPI document: 1.11.3
Contact: contact@polyaxon.com
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from polyaxon_sdk.configuration import Configuration
class V1KFReplica(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'replicas': 'int',
'environment': 'V1Environment',
'connections': 'list[str]',
'volumes': 'list[V1Volume]',
'init': 'list[V1Init]',
'sidecars': 'list[V1Container]',
'container': 'V1Container'
}
attribute_map = {
'replicas': 'replicas',
'environment': 'environment',
'connections': 'connections',
'volumes': 'volumes',
'init': 'init',
'sidecars': 'sidecars',
'container': 'container'
}
def __init__(self, replicas=None, environment=None, connections=None, volumes=None, init=None, sidecars=None, container=None, local_vars_configuration=None): # noqa: E501
"""V1KFReplica - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._replicas = None
self._environment = None
self._connections = None
self._volumes = None
self._init = None
self._sidecars = None
self._container = None
self.discriminator = None
if replicas is not None:
self.replicas = replicas
if environment is not None:
self.environment = environment
if connections is not None:
self.connections = connections
if volumes is not None:
self.volumes = volumes
if init is not None:
self.init = init
if sidecars is not None:
self.sidecars = sidecars
if container is not None:
self.container = container
@property
def replicas(self):
"""Gets the replicas of this V1KFReplica. # noqa: E501
:return: The replicas of this V1KFReplica. # noqa: E501
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""Sets the replicas of this V1KFReplica.
:param replicas: The replicas of this V1KFReplica. # noqa: E501
:type: int
"""
self._replicas = replicas
@property
def environment(self):
"""Gets the environment of this V1KFReplica. # noqa: E501
:return: The environment of this V1KFReplica. # noqa: E501
:rtype: V1Environment
"""
return self._environment
@environment.setter
def environment(self, environment):
"""Sets the environment of this V1KFReplica.
:param environment: The environment of this V1KFReplica. # noqa: E501
:type: V1Environment
"""
self._environment = environment
@property
def connections(self):
"""Gets the connections of this V1KFReplica. # noqa: E501
:return: The connections of this V1KFReplica. # noqa: E501
:rtype: list[str]
"""
return self._connections
@connections.setter
def connections(self, connections):
"""Sets the connections of this V1KFReplica.
:param connections: The connections of this V1KFReplica. # noqa: E501
:type: list[str]
"""
self._connections = connections
@property
def volumes(self):
"""Gets the volumes of this V1KFReplica. # noqa: E501
:return: The volumes of this V1KFReplica. # noqa: E501
:rtype: list[V1Volume]
"""
return self._volumes
@volumes.setter
def volumes(self, volumes):
"""Sets the volumes of this V1KFReplica.
:param volumes: The volumes of this V1KFReplica. # noqa: E501
:type: list[V1Volume]
"""
self._volumes = volumes
@property
def init(self):
"""Gets the init of this V1KFReplica. # noqa: E501
:return: The init of this V1KFReplica. # noqa: E501
:rtype: list[V1Init]
"""
return self._init
@init.setter
def init(self, init):
"""Sets the init of this V1KFReplica.
:param init: The init of this V1KFReplica. # noqa: E501
:type: list[V1Init]
"""
self._init = init
@property
def sidecars(self):
"""Gets the sidecars of this V1KFReplica. # noqa: E501
:return: The sidecars of this V1KFReplica. # noqa: E501
:rtype: list[V1Container]
"""
return self._sidecars
@sidecars.setter
def sidecars(self, sidecars):
"""Sets the sidecars of this V1KFReplica.
:param sidecars: The sidecars of this V1KFReplica. # noqa: E501
:type: list[V1Container]
"""
self._sidecars = sidecars
@property
def container(self):
"""Gets the container of this V1KFReplica. # noqa: E501
:return: The container of this V1KFReplica. # noqa: E501
:rtype: V1Container
"""
return self._container
@container.setter
def container(self, container):
"""Sets the container of this V1KFReplica.
:param container: The container of this V1KFReplica. # noqa: E501
:type: V1Container
"""
self._container = container
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1KFReplica):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1KFReplica):
return True
return self.to_dict() != other.to_dict()
| 27.435374 | 175 | 0.594843 |
db8f5b3ecf6a9ee9f0421da45281f39943a8f3c9 | 896 | py | Python | kivy/tests/test_video.py | Sentient07/kivy | e5022e1cc84b1bcda6e4619d618509dc4ea7da04 | [
"MIT"
] | 2 | 2021-05-16T09:46:14.000Z | 2021-11-17T11:23:15.000Z | kivy/tests/test_video.py | Sentient07/kivy | e5022e1cc84b1bcda6e4619d618509dc4ea7da04 | [
"MIT"
] | 1 | 2016-11-11T13:45:42.000Z | 2016-11-11T13:45:42.000Z | kivy/tests/test_video.py | Sentient07/kivy | e5022e1cc84b1bcda6e4619d618509dc4ea7da04 | [
"MIT"
] | 2 | 2020-03-28T10:18:00.000Z | 2021-02-13T06:34:14.000Z |
import unittest
class AnimationTestCase(unittest.TestCase):
def test_video_unload(self):
# fix issue https://github.com/kivy/kivy/issues/2275
# AttributeError: 'NoneType' object has no attribute 'texture'
from kivy.uix.video import Video
from kivy.clock import Clock
from kivy.base import runTouchApp, stopTouchApp
from os.path import join, dirname, abspath
here = dirname(__file__)
source = abspath(join(
here, "..", "..", "examples", "widgets", "softboy.mpg"))
video = Video(source=source, play=True)
Clock.schedule_once(lambda x: stopTouchApp(), 1)
def unload_video(video, position):
if position > 0.01:
video.unload()
Clock.schedule_once(lambda x: stopTouchApp(), 0.1)
video.bind(position=unload_video)
runTouchApp(video)
| 33.185185 | 70 | 0.625 |
32599329096d5f3fcbb400ddb10a74246161669d | 4,712 | py | Python | otp/avatar/ShadowCaster.py | SuperM0use24/TT-CL-Edition | fdad8394f0656ae122b687d603f72afafd220c65 | [
"MIT"
] | null | null | null | otp/avatar/ShadowCaster.py | SuperM0use24/TT-CL-Edition | fdad8394f0656ae122b687d603f72afafd220c65 | [
"MIT"
] | 1 | 2021-06-08T17:16:48.000Z | 2021-06-08T17:16:48.000Z | otp/avatar/ShadowCaster.py | SuperM0use24/TT-CL-Edition | fdad8394f0656ae122b687d603f72afafd220c65 | [
"MIT"
] | 3 | 2021-06-03T05:36:36.000Z | 2021-06-22T15:07:31.000Z | from panda3d.core import *
from direct.directnotify import DirectNotifyGlobal
from direct.showbase.ShadowPlacer import ShadowPlacer
from otp.otpbase import OTPGlobals
globalDropShadowFlag = 1
def setGlobalDropShadowFlag(flag):
global globalDropShadowFlag
if flag != globalDropShadowFlag:
globalDropShadowFlag = flag
messenger.send('globalDropShadowFlagChanged')
globalDropShadowGrayLevel = 0.5
def setGlobalDropShadowGrayLevel(grayLevel):
global globalDropShadowGrayLevel
if grayLevel != globalDropShadowGrayLevel:
globalDropShadowGrayLevel = grayLevel
messenger.send('globalDropShadowGrayLevelChanged')
class ShadowCaster:
notify = DirectNotifyGlobal.directNotify.newCategory('ShadowCaster')
def __init__(self, squareShadow = False):
if squareShadow:
self.shadowFileName = 'phase_3/models/props/square_drop_shadow'
else:
self.shadowFileName = 'phase_3/models/props/drop_shadow'
self.dropShadow = None
self.shadowPlacer = None
self.activeShadow = 0
self.wantsActive = 1
self.storedActiveState = 0
if hasattr(base, 'wantDynamicShadows') and base.wantDynamicShadows:
messenger.accept('globalDropShadowFlagChanged', self, self.__globalDropShadowFlagChanged)
messenger.accept('globalDropShadowGrayLevelChanged', self, self.__globalDropShadowGrayLevelChanged)
return
def delete(self):
if hasattr(base, 'wantDynamicShadows') and base.wantDynamicShadows:
messenger.ignore('globalDropShadowFlagChanged', self)
messenger.ignore('globalDropShadowGrayLevelChanged', self)
self.deleteDropShadow()
self.shadowJoint = None
return
def initializeDropShadow(self, hasGeomNode = True):
self.deleteDropShadow()
if hasGeomNode:
self.getGeomNode().setTag('cam', 'caster')
dropShadow = loader.loadModel(self.shadowFileName)
dropShadow.setScale(0.4)
dropShadow.flattenMedium()
dropShadow.setBillboardAxis(2)
dropShadow.setColor(0.0, 0.0, 0.0, globalDropShadowGrayLevel, 1)
self.shadowPlacer = ShadowPlacer(base.shadowTrav, dropShadow, OTPGlobals.WallBitmask, OTPGlobals.FloorBitmask)
self.dropShadow = dropShadow
if not globalDropShadowFlag:
self.dropShadow.hide()
if self.getShadowJoint():
dropShadow.reparentTo(self.getShadowJoint())
else:
self.dropShadow.hide()
self.setActiveShadow(self.wantsActive)
self.__globalDropShadowFlagChanged()
self.__globalDropShadowGrayLevelChanged()
def update(self):
pass
def deleteDropShadow(self):
if self.shadowPlacer:
self.shadowPlacer.delete()
self.shadowPlacer = None
if self.dropShadow:
self.dropShadow.removeNode()
self.dropShadow = None
return
def setActiveShadow(self, isActive = 1):
isActive = isActive and self.wantsActive
if not globalDropShadowFlag:
self.storedActiveState = isActive
if self.shadowPlacer != None:
isActive = isActive and globalDropShadowFlag
if self.activeShadow != isActive:
self.activeShadow = isActive
if isActive:
self.shadowPlacer.on()
else:
self.shadowPlacer.off()
return
def setShadowHeight(self, shadowHeight):
if self.dropShadow:
self.dropShadow.setZ(-shadowHeight)
def getShadowJoint(self):
if hasattr(self, 'shadowJoint'):
return self.shadowJoint
shadowJoint = self.find('**/attachShadow')
if shadowJoint.isEmpty():
self.shadowJoint = NodePath(self)
else:
self.shadowJoint = shadowJoint
return self.shadowJoint
def hideShadow(self):
self.dropShadow.hide()
def showShadow(self):
if not globalDropShadowFlag:
self.dropShadow.hide()
else:
self.dropShadow.show()
def __globalDropShadowFlagChanged(self):
if self.dropShadow != None:
if globalDropShadowFlag == 0:
if self.activeShadow == 1:
self.storedActiveState = 1
self.setActiveShadow(0)
elif self.activeShadow == 0:
self.setActiveShadow(1)
self.showShadow()
return
def __globalDropShadowGrayLevelChanged(self):
if self.dropShadow != None:
self.dropShadow.setColor(0.0, 0.0, 0.0, globalDropShadowGrayLevel, 1)
return
| 35.164179 | 118 | 0.650679 |
52620d45fe71b5f1bc9842e893a86e1fbad6f65a | 16,915 | py | Python | server.py/server.py/app.py/env/lib/python3.9/site-packages/pip/vendor/contextlib2.py | dlminvestments/VM1-Flask-App | 0cac754523faa1d87622f6c0e0b51f617fd10f12 | [
"MIT"
] | null | null | null | server.py/server.py/app.py/env/lib/python3.9/site-packages/pip/vendor/contextlib2.py | dlminvestments/VM1-Flask-App | 0cac754523faa1d87622f6c0e0b51f617fd10f12 | [
"MIT"
] | 39 | 2021-10-29T20:51:42.000Z | 2022-03-27T23:43:04.000Z | server.py/server.py/app.py/env/lib/python3.9/site-packages/pip/vendor/contextlib2.py | dlminvestments/VM1-Flask-App | 0cac754523faa1d87622f6c0e0b51f617fd10f12 | [
"MIT"
] | null | null | null | ""contextlib2 - backports and enhancements to the contextlib module"""
import abc
import sys
import warnings
from collections import deque
from functools import wraps
__all__ = ["contextmanager", "closing", "nullcontext",
"AbstractContextManager",
"ContextDecorator", "ExitStack",
"redirect_stdout", "redirect_stderr", "suppress"]
# Backwards compatibility
__all__ += ["ContextStack"]
# Backport abc.ABC
if sys.version_info[:2] >= (3, 4):
_abc_ABC = abc.ABC
else:
_abc_ABC = abc.ABCMeta('ABC', (object,), {'__slots__': ()})
# Backport classic class MRO
def _classic_mro(C, result):
if C in result:
return
result.append(C)
for B in C.__bases__:
_classic_mro(B, result)
return result
# Backport _collections_abc._check_methods
def _check_methods(C, *methods):
try:
mro = C.__mro__
except AttributeError:
mro = tuple(_classic_mro(C, []))
for method in methods:
for B in mro:
if method in B.__dict__:
if B.__dict__[method] is None:
return NotImplemented
break
else:
return NotImplemented
return True
class AbstractContextManager(_abc_ABC):
"""An abstract base class for context managers."""
def __enter__(self):
"""Return `self` upon entering the runtime context."""
return self
@abc.abstractmethod
def __exit__(self, exc_type, exc_value, traceback):
"""Raise any exception triggered within the runtime context."""
return None
@classmethod
def __subclasshook__(cls, C):
"""Check whether subclass is considered a subclass of this ABC."""
if cls is AbstractContextManager:
return _check_methods(C, "__enter__", "__exit__")
return NotImplemented
class ContextDecorator(object):
"""A base class or mixin that enables context managers to work as decorators."""
def refresh_cm(self):
"""Returns the context manager used to actually wrap the call to the
decorated function.
The default implementation just returns *self*.
Overriding this method allows otherwise one-shot context managers
like _GeneratorContextManager to support use as decorators via
implicit recreation.
DEPRECATED: refresh_cm was never added to the standard library's
ContextDecorator API
"""
warnings.warn("refresh_cm was never added to the standard library",
DeprecationWarning)
return self._recreate_cm()
def _recreate_cm(self):
"""Return a recreated instance of self.
Allows an otherwise one-shot context manager like
_GeneratorContextManager to support use as
a decorator via implicit recreation.
This is a private interface just for _GeneratorContextManager.
See issue #11647 for details.
"""
return self
def __call__(self, func):
@wraps(func)
def inner(*args, **kwds):
with self._recreate_cm():
return func(*args, **kwds)
return inner
class _GeneratorContextManager(ContextDecorator):
"""Helper for @contextmanager decorator."""
def __init__(self, func, args, kwds):
self.gen = func(*args, **kwds)
self.func, self.args, self.kwds = func, args, kwds
# Issue 19330: ensure context manager instances have good docstrings
doc = getattr(func, "__doc__", None)
if doc is None:
doc = type(self).__doc__
self.__doc__ = doc
# Unfortunately, this still doesn't provide good help output when
# inspecting the created context manager instances, since pydoc
# currently bypasses the instance docstring and shows the docstring
# for the class instead.
# See http://bugs.python.org/issue19404 for more details.
def _recreate_cm(self):
# _GCM instances are one-shot context managers, so the
# CM must be recreated each time a decorated function is
# called
return self.__class__(self.func, self.args, self.kwds)
def __enter__(self):
try:
return next(self.gen)
except StopIteration:
raise RuntimeError("generator didn't yield")
def __exit__(self, type, value, traceback):
if type is None:
try:
next(self.gen)
except StopIteration:
return
else:
raise RuntimeError("generator didn't stop")
else:
if value is None:
# Need to force instantiation so we can reliably
# tell if we get the same exception back
value = type()
try:
self.gen.throw(type, value, traceback)
raise RuntimeError("generator didn't stop after throw()")
except StopIteration as exc:
# Suppress StopIteration *unless* it's the same exception that
# was passed to throw(). This prevents a StopIteration
# raised inside the "with" statement from being suppressed.
return exc is not value
except RuntimeError as exc:
# Don't re-raise the passed in exception
if exc is value:
return False
# Likewise, avoid suppressing if a StopIteration exception
# was passed to throw() and later wrapped into a RuntimeError
# (see PEP 479).
if _HAVE_EXCEPTION_CHAINING and exc.__cause__ is value:
return False
raise
except:
# only re-raise if it's *not* the exception that was
# passed to throw(), because __exit__() must not raise
# an exception unless __exit__() itself failed. But throw()
# has to raise the exception to signal propagation, so this
# fixes the impedance mismatch between the throw() protocol
# and the __exit__() protocol.
#
if sys.exc_info()[1] is not value:
raise
def contextmanager(func):
"""@contextmanager decorator.
Typical usage:
@contextmanager
def some_generator(<arguments>):
<setup>
try:
yield <value>
finally:
<cleanup>
This makes this:
with some_generator(<arguments>) as <variable>:
<body>
equivalent to this:
<setup>
try:
<variable> = <value>
<body>
finally:
<cleanup>
"""
@wraps(func)
def helper(*args, **kwds):
return _GeneratorContextManager(func, args, kwds)
return helper
class closing(object):
"""Context to automatically close something at the end of a block.
Code like this:
with closing(<module>.open(<arguments>)) as f:
<block>
is equivalent to this:
f = <module>.open(<arguments>)
try:
<block>
finally:
f.close()
"""
def __init__(self, thing):
self.thing = thing
def __enter__(self):
return self.thing
def __exit__(self, *exc_info):
self.thing.close()
class _RedirectStream(object):
_stream = None
def __init__(self, new_target):
self._new_target = new_target
# We use a list of old targets to make this CM re-entrant
self._old_targets = []
def __enter__(self):
self._old_targets.append(getattr(sys, self._stream))
setattr(sys, self._stream, self._new_target)
return self._new_target
def __exit__(self, exctype, excinst, exctb):
setattr(sys, self._stream, self._old_targets.pop())
class redirect_stdout(_RedirectStream):
"""Context manager for temporarily redirecting stdout to another file.
# How to send help() to stderr
with redirect_stdout(sys.stderr):
help(dir)
# How to write help() to a file
with open('help.txt', 'w') as f:
with redirect_stdout(f):
help(pow)
"""
_stream = "stdout"
class redirect_stderr(_RedirectStream):
"""Context manager for temporarily redirecting stderr to another file."""
_stream = "stderr"
class suppress(object):
"""Context manager to suppress specified exceptions
After the exception is suppressed, execution proceeds with the next
statement following the with statement.
with suppress(FileNotFoundError):
os.remove(somefile)
# Execution still resumes here if the file was already removed
"""
def __init__(self, *exceptions):
self._exceptions = exceptions
def __enter__(self):
pass
def __exit__(self, exctype, excinst, exctb):
# Unlike isinstance and issubclass, CPython exception handling
# currently only looks at the concrete type hierarchy (ignoring
# the instance and subclass checking hooks). While Guido considers
# that a bug rather than a feature, it's a fairly hard one to fix
# due to various internal implementation details. suppress provides
# the simpler issubclass based semantics, rather than trying to
# exactly reproduce the limitations of the CPython interpreter.
#
# See http://bugs.python.org/issue12029 for more details
return exctype is not None and issubclass(exctype, self._exceptions)
# Context manipulation is Python 3 only
_HAVE_EXCEPTION_CHAINING = sys.version_info[0] >= 3
if _HAVE_EXCEPTION_CHAINING:
def _make_context_fixer(frame_exc):
def _fix_exception_context(new_exc, old_exc):
# Context may not be correct, so find the end of the chain
while 1:
exc_context = new_exc.__context__
if exc_context is old_exc:
# Context is already set correctly (see issue 20317)
return
if exc_context is None or exc_context is frame_exc:
break
new_exc = exc_context
# Change the end of the chain to point to the exception
# we expect it to reference
new_exc.__context__ = old_exc
return _fix_exception_context
def _reraise_with_existing_context(exc_details):
try:
# bare "raise exc_details[1]" replaces our carefully
# set-up context
fixed_ctx = exc_details[1].__context__
raise exc_details[1]
except BaseException:
exc_details[1].__context__ = fixed_ctx
raise
else:
# No exception context in Python 2
def _make_context_fixer(frame_exc):
return lambda new_exc, old_exc: None
# Use 3 argument raise in Python 2,
# but use exec to avoid SyntaxError in Python 3
def _reraise_with_existing_context(exc_details):
exc_type, exc_value, exc_tb = exc_details
exec("raise exc_type, exc_value, exc_tb")
# Handle old-style classes if they exist
try:
from types import InstanceType
except ImportError:
# Python 3 doesn't have old-style classes
_get_type = type
else:
# Need to handle old-style context managers on Python 2
def _get_type(obj):
obj_type = type(obj)
if obj_type is InstanceType:
return obj.__class__ # Old-style class
return obj_type # New-style class
# Inspired by discussions on http://bugs.python.org/issue13585
class ExitStack(object):
"""Context manager for dynamic management of a stack of exit callbacks
For example:
with ExitStack() as stack:
files = [stack.enter_context(open(fname)) for fname in filenames]
# All opened files will automatically be closed at the end of
# the with statement, even if attempts to open files later
# in the list raise an exception
"""
def __init__(self):
self._exit_callbacks = deque()
def pop_all(self):
"""Preserve the context stack by transferring it to a new instance"""
new_stack = type(self)()
new_stack._exit_callbacks = self._exit_callbacks
self._exit_callbacks = deque()
return new_stack
def _push_cm_exit(self, cm, cm_exit):
"""Helper to correctly register callbacks to __exit__ methods"""
def _exit_wrapper(*exc_details):
return cm_exit(cm, *exc_details)
_exit_wrapper.__self__ = cm
self.push(_exit_wrapper)
def push(self, exit):
"""Registers a callback with the standard __exit__ method signature
Can suppress exceptions the same way __exit__ methods can.
Also accepts any object with an __exit__ method (registering a call
to the method instead of the object itself)
"""
# We use an unbound method rather than a bound method to follow
# the standard lookup behaviour for special methods
_cb_type = _get_type(exit)
try:
exit_method = _cb_type.__exit__
except AttributeError:
# Not a context manager, so assume its a callable
self._exit_callbacks.append(exit)
else:
self._push_cm_exit(exit, exit_method)
return exit # Allow use as a decorator
def callback(self, callback, *args, **kwds):
"""Registers an arbitrary callback and arguments.
Cannot suppress exceptions.
"""
def _exit_wrapper(exc_type, exc, tb):
callback(*args, **kwds)
# We changed the signature, so using @wraps is not appropriate, but
# setting __wrapped__ may still help with introspection
_exit_wrapper.__wrapped__ = callback
self.push(_exit_wrapper)
return callback # Allow use as a decorator
def enter_context(self, cm):
"""Enters the supplied context manager
If successful, also pushes its __exit__ method as a callback and
returns the result of the __enter__ method.
"""
# We look up the special methods on the type to match the with statement
_cm_type = _get_type(cm)
_exit = _cm_type.__exit__
result = _cm_type.__enter__(cm)
self._push_cm_exit(cm, _exit)
return result
def close(self):
"""Immediately unwind the context stack"""
self.__exit__(None, None, None)
def __enter__(self):
return self
def __exit__(self, *exc_details):
received_exc = exc_details[0] is not None
# We manipulate the exception state so it behaves as though
# we were actually nesting multiple with statements
frame_exc = sys.exc_info()[1]
_fix_exception_context = _make_context_fixer(frame_exc)
# Callbacks are invoked in LIFO order to match the behaviour of
# nested context managers
suppressed_exc = False
pending_raise = False
while self._exit_callbacks:
cb = self._exit_callbacks.pop()
try:
if cb(*exc_details):
suppressed_exc = True
pending_raise = False
exc_details = (None, None, None)
except:
new_exc_details = sys.exc_info()
# simulate the stack of exceptions by setting the context
_fix_exception_context(new_exc_details[1], exc_details[1])
pending_raise = True
exc_details = new_exc_details
if pending_raise:
_reraise_with_existing_context(exc_details)
return received_exc and suppressed_exc
# Preserve backwards compatibility
class ContextStack(ExitStack):
"""Backwards compatibility alias for ExitStack"""
def __init__(self):
warnings.warn("ContextStack has been renamed to ExitStack",
DeprecationWarning)
super(ContextStack, self).__init__()
def register_exit(self, callback):
return self.push(callback)
def register(self, callback, *args, **kwds):
return self.callback(callback, *args, **kwds)
def preserve(self):
return self.pop_all()
class nullcontext(AbstractContextManager):
"""Context manager that does no additional processing.
Used as a stand-in for a normal context manager, when a particular
block of code is only sometimes used with a normal context manager:
cm = optional_cm if condition else nullcontext()
with cm:
# Perform operation, using optional_cm if condition is True
"""
def __init__(self, enter_result=None):
self.enter_result = enter_result
def __enter__(self):
return self.enter_result
def __exit__(self, *excinfo):
pass
| 32.528846 | 84 | 0.624061 |
576902dc073b4e32eed99978345955b4cfdc85e2 | 4,599 | py | Python | qa/rpc-tests/multi_rpc.py | CoinMovement/Idea | a030d4bd6fadc2baa064a0eec3a37a9a80d003cb | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | CoinMovement/Idea | a030d4bd6fadc2baa064a0eec3a37a9a80d003cb | [
"MIT"
] | null | null | null | qa/rpc-tests/multi_rpc.py | CoinMovement/Idea | a030d4bd6fadc2baa064a0eec3a37a9a80d003cb | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test mulitple rpc user config option rpcauth
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import base64
try:
import http.client as httplib
except ImportError:
import httplib
try:
import urllib.parse as urlparse
except ImportError:
import urlparse
class HTTPBasicsTest (BitcoinTestFramework):
def setup_nodes(self):
return start_nodes(4, self.options.tmpdir)
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain(self.options.tmpdir)
#Append rpcauth to crowdcoin.conf before initialization
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
with open(os.path.join(self.options.tmpdir+"/node0", "crowdcoin.conf"), 'a') as f:
f.write(rpcauth+"\n")
f.write(rpcauth2+"\n")
def run_test(self):
##################################################
# Check correctness of the rpcauth config option #
##################################################
url = urlparse.urlparse(self.nodes[0].url)
#Old authpair
authpair = url.username + ':' + url.password
#New authpair generated via share/rpcuser tool
rpcauth = "rpcauth=rt:93648e835a54c573682c2eb19f882535$7681e9c5b74bdd85e78166031d2058e1069b3ed7ed967c93fc63abba06f31144"
password = "cA773lm788buwYe4g4WT+05pKyNruVKjQ25x3n0DQcM="
#Second authpair with different username
rpcauth2 = "rpcauth=rt2:f8607b1a88861fac29dfccf9b52ff9f$ff36a0c23c8c62b4846112e50fa888416e94c17bfd4c42f88fd8f55ec6a3137e"
password2 = "8/F3uMDw4KSEbw96U3CA1C4X05dkHDN2BPFjTgZW4KI="
authpairnew = "rt:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Use new authpair to confirm both work
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong login name with rt's password
authpairnew = "rtwrong:"+password
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Wrong password for rt
authpairnew = "rt:"+password+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
#Correct for rt2
authpairnew = "rt2:"+password2
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, False)
conn.close()
#Wrong password for rt2
authpairnew = "rt2:"+password2+"wrong"
headers = {"Authorization": "Basic " + str_to_b64str(authpairnew)}
conn = httplib.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
resp = conn.getresponse()
assert_equal(resp.status==401, True)
conn.close()
if __name__ == '__main__':
HTTPBasicsTest ().main ()
| 37.696721 | 129 | 0.649924 |
c44ca6db94c23a6f0cea09bdc7c740248e6829b2 | 3,067 | py | Python | emodelrunner/run.py | BlueBrain/EModelRunner | 3d46e9ce20e76666288e84a300c329b46f0fa2c4 | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2021-12-03T15:28:26.000Z | 2022-02-01T11:44:29.000Z | emodelrunner/run.py | BlueBrain/EModelRunner | 3d46e9ce20e76666288e84a300c329b46f0fa2c4 | [
"ECL-2.0",
"Apache-2.0"
] | 27 | 2021-12-03T09:16:29.000Z | 2022-03-03T10:29:21.000Z | emodelrunner/run.py | BlueBrain/EModelRunner | 3d46e9ce20e76666288e84a300c329b46f0fa2c4 | [
"ECL-2.0",
"Apache-2.0"
] | 4 | 2021-12-07T08:16:29.000Z | 2022-02-22T17:55:58.000Z | """Create python recordings."""
# Copyright 2020-2022 Blue Brain Project / EPFL
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from bluepyopt import ephys
from emodelrunner.configuration.configparser import PackageType
from emodelrunner.create_cells import create_cell_using_config
from emodelrunner.parsing_utilities import get_parser_args, set_verbosity
from emodelrunner.protocols.create_protocols import ProtocolBuilder
from emodelrunner.load import (
load_config,
get_prot_args,
get_release_params,
)
from emodelrunner.output import write_current
from emodelrunner.output import write_responses
logger = logging.getLogger(__name__)
def main(config_path):
"""Main.
Args:
config_path (str): path to config file
The config file should have '.ini' suffix
"""
# pylint: disable=too-many-locals
config = load_config(config_path=config_path)
cell = create_cell_using_config(config)
release_params = get_release_params(config)
cvode_active = config.getboolean("Sim", "cvode_active")
# simulator
dt = config.getfloat("Sim", "dt")
sim = ephys.simulators.NrnSimulator(dt=dt, cvode_active=cvode_active)
# create protocols
add_synapses = config.getboolean("Synapses", "add_synapses")
prot_args = get_prot_args(config)
if config.package_type == PackageType.sscx:
protocols = ProtocolBuilder.using_sscx_protocols(add_synapses, prot_args, cell)
elif config.package_type == PackageType.thalamus:
protocols = ProtocolBuilder.using_thalamus_protocols(
add_synapses, prot_args, cell
)
else:
raise ValueError(f"unsupported package type: {config.package_type}")
ephys_protocols = protocols.get_ephys_protocols()
# run
logger.info("Python Recordings Running...")
responses = ephys_protocols.run(
cell_model=cell, param_values=release_params, sim=sim, isolate=False
)
mtype = config.get("Morphology", "mtype")
if config.package_type == PackageType.sscx:
currents = protocols.get_stim_currents(responses, dt)
elif config.package_type == PackageType.thalamus:
currents = protocols.get_thalamus_stim_currents(responses, mtype, dt)
# write responses
output_dir = config.get("Paths", "output_dir")
write_responses(responses, output_dir)
write_current(currents, output_dir)
logger.info("Python Recordings Done")
if __name__ == "__main__":
args = get_parser_args()
set_verbosity(args.verbosity)
main(config_path=args.config_path)
| 32.62766 | 87 | 0.741115 |
ffd75318933bc231b7b746bf82cbe7af57ac5e4b | 7,577 | py | Python | gitea_api/models/general_repo_settings.py | r7l/python-gitea-api | 31d3dba27ea7e551e2048a1230c4ab4d73365006 | [
"MIT"
] | 1 | 2022-02-09T23:43:26.000Z | 2022-02-09T23:43:26.000Z | gitea_api/models/general_repo_settings.py | r7l/python-gitea-api | 31d3dba27ea7e551e2048a1230c4ab4d73365006 | [
"MIT"
] | null | null | null | gitea_api/models/general_repo_settings.py | r7l/python-gitea-api | 31d3dba27ea7e551e2048a1230c4ab4d73365006 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Gitea API.
This documentation describes the Gitea API. # noqa: E501
OpenAPI spec version: 1.16.7
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class GeneralRepoSettings(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'http_git_disabled': 'bool',
'lfs_disabled': 'bool',
'migrations_disabled': 'bool',
'mirrors_disabled': 'bool',
'stars_disabled': 'bool',
'time_tracking_disabled': 'bool'
}
attribute_map = {
'http_git_disabled': 'http_git_disabled',
'lfs_disabled': 'lfs_disabled',
'migrations_disabled': 'migrations_disabled',
'mirrors_disabled': 'mirrors_disabled',
'stars_disabled': 'stars_disabled',
'time_tracking_disabled': 'time_tracking_disabled'
}
def __init__(self, http_git_disabled=None, lfs_disabled=None, migrations_disabled=None, mirrors_disabled=None, stars_disabled=None, time_tracking_disabled=None): # noqa: E501
"""GeneralRepoSettings - a model defined in Swagger""" # noqa: E501
self._http_git_disabled = None
self._lfs_disabled = None
self._migrations_disabled = None
self._mirrors_disabled = None
self._stars_disabled = None
self._time_tracking_disabled = None
self.discriminator = None
if http_git_disabled is not None:
self.http_git_disabled = http_git_disabled
if lfs_disabled is not None:
self.lfs_disabled = lfs_disabled
if migrations_disabled is not None:
self.migrations_disabled = migrations_disabled
if mirrors_disabled is not None:
self.mirrors_disabled = mirrors_disabled
if stars_disabled is not None:
self.stars_disabled = stars_disabled
if time_tracking_disabled is not None:
self.time_tracking_disabled = time_tracking_disabled
@property
def http_git_disabled(self):
"""Gets the http_git_disabled of this GeneralRepoSettings. # noqa: E501
:return: The http_git_disabled of this GeneralRepoSettings. # noqa: E501
:rtype: bool
"""
return self._http_git_disabled
@http_git_disabled.setter
def http_git_disabled(self, http_git_disabled):
"""Sets the http_git_disabled of this GeneralRepoSettings.
:param http_git_disabled: The http_git_disabled of this GeneralRepoSettings. # noqa: E501
:type: bool
"""
self._http_git_disabled = http_git_disabled
@property
def lfs_disabled(self):
"""Gets the lfs_disabled of this GeneralRepoSettings. # noqa: E501
:return: The lfs_disabled of this GeneralRepoSettings. # noqa: E501
:rtype: bool
"""
return self._lfs_disabled
@lfs_disabled.setter
def lfs_disabled(self, lfs_disabled):
"""Sets the lfs_disabled of this GeneralRepoSettings.
:param lfs_disabled: The lfs_disabled of this GeneralRepoSettings. # noqa: E501
:type: bool
"""
self._lfs_disabled = lfs_disabled
@property
def migrations_disabled(self):
"""Gets the migrations_disabled of this GeneralRepoSettings. # noqa: E501
:return: The migrations_disabled of this GeneralRepoSettings. # noqa: E501
:rtype: bool
"""
return self._migrations_disabled
@migrations_disabled.setter
def migrations_disabled(self, migrations_disabled):
"""Sets the migrations_disabled of this GeneralRepoSettings.
:param migrations_disabled: The migrations_disabled of this GeneralRepoSettings. # noqa: E501
:type: bool
"""
self._migrations_disabled = migrations_disabled
@property
def mirrors_disabled(self):
"""Gets the mirrors_disabled of this GeneralRepoSettings. # noqa: E501
:return: The mirrors_disabled of this GeneralRepoSettings. # noqa: E501
:rtype: bool
"""
return self._mirrors_disabled
@mirrors_disabled.setter
def mirrors_disabled(self, mirrors_disabled):
"""Sets the mirrors_disabled of this GeneralRepoSettings.
:param mirrors_disabled: The mirrors_disabled of this GeneralRepoSettings. # noqa: E501
:type: bool
"""
self._mirrors_disabled = mirrors_disabled
@property
def stars_disabled(self):
"""Gets the stars_disabled of this GeneralRepoSettings. # noqa: E501
:return: The stars_disabled of this GeneralRepoSettings. # noqa: E501
:rtype: bool
"""
return self._stars_disabled
@stars_disabled.setter
def stars_disabled(self, stars_disabled):
"""Sets the stars_disabled of this GeneralRepoSettings.
:param stars_disabled: The stars_disabled of this GeneralRepoSettings. # noqa: E501
:type: bool
"""
self._stars_disabled = stars_disabled
@property
def time_tracking_disabled(self):
"""Gets the time_tracking_disabled of this GeneralRepoSettings. # noqa: E501
:return: The time_tracking_disabled of this GeneralRepoSettings. # noqa: E501
:rtype: bool
"""
return self._time_tracking_disabled
@time_tracking_disabled.setter
def time_tracking_disabled(self, time_tracking_disabled):
"""Sets the time_tracking_disabled of this GeneralRepoSettings.
:param time_tracking_disabled: The time_tracking_disabled of this GeneralRepoSettings. # noqa: E501
:type: bool
"""
self._time_tracking_disabled = time_tracking_disabled
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(GeneralRepoSettings, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, GeneralRepoSettings):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 31.439834 | 179 | 0.63574 |
9bfe1ee02fd7c1245298689b1f0825669edab626 | 233 | py | Python | app/admin/examples/__init__.py | davidgacc/docusign | e63167101656d0066d481844576ce687ea80eb91 | [
"MIT"
] | 21 | 2020-05-13T21:08:44.000Z | 2022-02-18T01:32:16.000Z | app/admin/examples/__init__.py | davidgacc/docusign | e63167101656d0066d481844576ce687ea80eb91 | [
"MIT"
] | 8 | 2020-11-23T09:28:04.000Z | 2022-02-02T12:04:08.000Z | app/admin/examples/__init__.py | davidgacc/docusign | e63167101656d0066d481844576ce687ea80eb91 | [
"MIT"
] | 26 | 2020-05-12T22:20:01.000Z | 2022-03-09T10:57:27.000Z | from .eg001_create_a_new_user import eg001
from .eg002_create_active_clm_esign_user import eg002
from .eg003_bulk_export_user_data import eg003
from .eg004_add_users_via_bulk_import import eg004
from .eg005_audit_users import eg005
| 33.285714 | 53 | 0.888412 |
9c94d6a72bea32887cb8963414d2bfeff610e60e | 1,601 | py | Python | vendor-local/packages/pytz/setup.py | glogiotatidis/affiliates | 34d0ded8e24be9dd207d6419a5157dc8ce34bc06 | [
"BSD-3-Clause"
] | 5,079 | 2015-01-01T03:39:46.000Z | 2022-03-31T07:38:22.000Z | vendor-local/packages/pytz/setup.py | glogiotatidis/affiliates | 34d0ded8e24be9dd207d6419a5157dc8ce34bc06 | [
"BSD-3-Clause"
] | 1,623 | 2015-01-01T08:06:24.000Z | 2022-03-30T19:48:52.000Z | vendor-local/packages/pytz/setup.py | glogiotatidis/affiliates | 34d0ded8e24be9dd207d6419a5157dc8ce34bc06 | [
"BSD-3-Clause"
] | 2,033 | 2015-01-04T07:18:02.000Z | 2022-03-28T19:55:47.000Z | '''
pytz setup script
'''
import pytz, sys, os, os.path
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
me = 'Stuart Bishop'
memail = 'stuart@stuartbishop.net'
packages = ['pytz']
resources = ['zone.tab', 'locales/pytz.pot']
for dirpath, dirnames, filenames in os.walk(os.path.join('pytz', 'zoneinfo')):
# remove the 'pytz' part of the path
basepath = dirpath.split(os.path.sep, 1)[1]
resources.extend([os.path.join(basepath, filename)
for filename in filenames])
package_data = {'pytz': resources}
assert len(resources) > 10, 'zoneinfo files not found!'
setup (
name='pytz',
version=pytz.VERSION,
zip_safe=True,
description='World timezone definitions, modern and historical',
long_description=open('README.txt','r').read(),
author=me,
author_email=memail,
maintainer=me,
maintainer_email=memail,
url='http://pythonhosted.org/pytz',
license='MIT',
keywords=['timezone','tzinfo', 'datetime', 'olson', 'time'],
packages=packages,
package_data=package_data,
download_url='http://pypi.python.org/pypi/pytz',
platforms=['Independant'],
classifiers = [
'Development Status :: 6 - Mature',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| 30.207547 | 78 | 0.645846 |
1a7971fc897279dcb89cb07d32b248757d3e6cf2 | 602 | py | Python | LeetCode/Aug 2021 LeetCoding Challenge/N-ary Tree Level Order Traversal.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | 13 | 2021-09-02T07:30:02.000Z | 2022-03-22T19:32:03.000Z | LeetCode/Aug 2021 LeetCoding Challenge/N-ary Tree Level Order Traversal.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | null | null | null | LeetCode/Aug 2021 LeetCoding Challenge/N-ary Tree Level Order Traversal.py | UtkarshPathrabe/Competitive-Coding | ba322fbb1b88682d56a9b80bdd92a853f1caa84e | [
"MIT"
] | 3 | 2021-08-24T16:06:22.000Z | 2021-09-17T15:39:53.000Z | """
# Definition for a Node.
class Node:
def __init__(self, val=None, children=None):
self.val = val
self.children = children
"""
class Solution:
def levelOrder(self, root: 'Node') -> List[List[int]]:
if root is None:
return []
result, previousLayer = [], [root,]
while previousLayer:
currentLayer = []
result.append([])
for node in previousLayer:
result[-1].append(node.val)
currentLayer.extend(node.children)
previousLayer = currentLayer
return result | 28.666667 | 58 | 0.551495 |
e8682425d5abbd41957fb45d933c9e8687e7ee90 | 1,608 | py | Python | p3_collab-compet/workspace_utils.py | k-staple/deep-reinforcement-learning | 84529c289fdee4e23563bc29d606f39e5b0ab488 | [
"MIT"
] | null | null | null | p3_collab-compet/workspace_utils.py | k-staple/deep-reinforcement-learning | 84529c289fdee4e23563bc29d606f39e5b0ab488 | [
"MIT"
] | null | null | null | p3_collab-compet/workspace_utils.py | k-staple/deep-reinforcement-learning | 84529c289fdee4e23563bc29d606f39e5b0ab488 | [
"MIT"
] | null | null | null | # Udacity-provided script from GPU Workspaces: Best Practices page
import signal
from contextlib import contextmanager
import requests
DELAY = INTERVAL = 4 * 60 # interval time in seconds
MIN_DELAY = MIN_INTERVAL = 2 * 60
KEEPALIVE_URL = "https://nebula.udacity.com/api/v1/remote/keep-alive"
TOKEN_URL = "http://metadata.google.internal/computeMetadata/v1/instance/attributes/keep_alive_token"
TOKEN_HEADERS = {"Metadata-Flavor":"Google"}
def _request_handler(headers):
def _handler(signum, frame):
requests.request("POST", KEEPALIVE_URL, headers=headers)
return _handler
@contextmanager
def active_session(delay=DELAY, interval=INTERVAL):
"""
Example:
from workspace_utils import active session
with active_session():
# do long-running work here
"""
token = requests.request("GET", TOKEN_URL, headers=TOKEN_HEADERS).text
headers = {'Authorization': "STAR " + token}
delay = max(delay, MIN_DELAY)
interval = max(interval, MIN_INTERVAL)
original_handler = signal.getsignal(signal.SIGALRM)
try:
signal.signal(signal.SIGALRM, _request_handler(headers))
signal.setitimer(signal.ITIMER_REAL, delay, interval)
yield
finally:
signal.signal(signal.SIGALRM, original_handler)
signal.setitimer(signal.ITIMER_REAL, 0)
def keep_awake(iterable, delay=DELAY, interval=INTERVAL):
"""
Example:
from workspace_utils import keep_awake
for i in keep_awake(range(5)):
# do iteration with lots of work here
"""
with active_session(delay, interval): yield from iterable
| 28.210526 | 101 | 0.715174 |
8facaf5746523d39fbbbae4163083ff377addd8a | 133 | py | Python | tests/test_helloworld.py | azavea/PyCurb | 9492ca40b0639680b73aa7bdfcf9f744f9e75727 | [
"Apache-2.0"
] | null | null | null | tests/test_helloworld.py | azavea/PyCurb | 9492ca40b0639680b73aa7bdfcf9f744f9e75727 | [
"Apache-2.0"
] | 8 | 2020-09-30T17:15:50.000Z | 2020-10-23T21:00:53.000Z | tests/test_helloworld.py | azavea/PyCurb | 9492ca40b0639680b73aa7bdfcf9f744f9e75727 | [
"Apache-2.0"
] | null | null | null | from unittest import TestCase
class HelloWorldTestCase(TestCase):
def test_harness(self):
self.assertEqual(True, True)
| 19 | 36 | 0.744361 |
0304814b7aa010aaac118580606b284de573e476 | 11,859 | py | Python | tools/clusterfuzz/v8_foozzie_test.py | jie-pan/v8 | 780a495c58a32ff17d4b4332a122aea1d2e0f0b1 | [
"BSD-3-Clause"
] | 1 | 2020-06-01T18:07:24.000Z | 2020-06-01T18:07:24.000Z | tools/clusterfuzz/v8_foozzie_test.py | jie-pan/v8 | 780a495c58a32ff17d4b4332a122aea1d2e0f0b1 | [
"BSD-3-Clause"
] | null | null | null | tools/clusterfuzz/v8_foozzie_test.py | jie-pan/v8 | 780a495c58a32ff17d4b4332a122aea1d2e0f0b1 | [
"BSD-3-Clause"
] | 1 | 2020-06-03T13:25:49.000Z | 2020-06-03T13:25:49.000Z | #!/usr/bin/env python
# Copyright 2016 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import random
import subprocess
import sys
import unittest
import v8_commands
import v8_foozzie
import v8_fuzz_config
import v8_suppressions
try:
basestring
except NameError:
basestring = str
PYTHON3 = sys.version_info >= (3, 0)
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
FOOZZIE = os.path.join(BASE_DIR, 'v8_foozzie.py')
TEST_DATA = os.path.join(BASE_DIR, 'testdata')
KNOWN_BUILDS = [
'd8',
'clang_x86/d8',
'clang_x86_v8_arm/d8',
'clang_x64_v8_arm64/d8',
'clang_x64_pointer_compression/d8',
]
class ConfigTest(unittest.TestCase):
def testExperiments(self):
"""Test integrity of probabilities and configs."""
CONFIGS = v8_foozzie.CONFIGS
EXPERIMENTS = v8_fuzz_config.FOOZZIE_EXPERIMENTS
FLAGS = v8_fuzz_config.ADDITIONAL_FLAGS
# Probabilities add up to 100%.
first_is_int = lambda x: type(x[0]) == int
assert all(map(first_is_int, EXPERIMENTS))
assert sum(x[0] for x in EXPERIMENTS) == 100
# Configs used in experiments are defined.
assert all(map(lambda x: x[1] in CONFIGS, EXPERIMENTS))
assert all(map(lambda x: x[2] in CONFIGS, EXPERIMENTS))
# The last config item points to a known build configuration.
assert all(map(lambda x: x[3] in KNOWN_BUILDS, EXPERIMENTS))
# Ensure we compare different configs and same d8, or same config
# to different d8.
is_sane_comparison = lambda x: (x[1] == x[2]) == ('d8' != x[3])
assert all(map(is_sane_comparison, EXPERIMENTS))
# All flags have a probability.
first_is_float = lambda x: type(x[0]) == float
assert all(map(first_is_float, FLAGS))
first_between_0_and_1 = lambda x: x[0] > 0 and x[0] < 1
assert all(map(first_between_0_and_1, FLAGS))
# Test consistent flags.
second_is_string = lambda x: isinstance(x[1], basestring)
assert all(map(second_is_string, FLAGS))
# We allow spaces to separate more flags. We don't allow spaces in the flag
# value.
is_flag = lambda x: x.startswith('--')
all_parts_are_flags = lambda x: all(map(is_flag, x[1].split()))
assert all(map(all_parts_are_flags, FLAGS))
def testConfig(self):
"""Smoke test how to choose experiments."""
config = v8_fuzz_config.Config('foo', random.Random(42))
experiments = [
[25, 'ignition', 'jitless', 'd8'],
[75, 'ignition', 'ignition', 'clang_x86/d8'],
]
flags = [
[0.1, '--flag'],
[0.3, '--baz'],
[0.3, '--foo --bar'],
]
self.assertEqual(
[
'--first-config=ignition',
'--second-config=jitless',
'--second-d8=d8',
'--second-config-extra-flags=--baz',
'--second-config-extra-flags=--foo',
'--second-config-extra-flags=--bar',
],
config.choose_foozzie_flags(experiments, flags),
)
self.assertEqual(
[
'--first-config=ignition',
'--second-config=jitless',
'--second-d8=d8',
],
config.choose_foozzie_flags(experiments, flags),
)
class UnitTest(unittest.TestCase):
def testCluster(self):
crash_test_example_path = 'CrashTests/path/to/file.js'
self.assertEqual(
v8_foozzie.ORIGINAL_SOURCE_DEFAULT,
v8_foozzie.cluster_failures(''))
self.assertEqual(
v8_foozzie.ORIGINAL_SOURCE_CRASHTESTS,
v8_foozzie.cluster_failures(crash_test_example_path))
self.assertEqual(
'_o_O_',
v8_foozzie.cluster_failures(
crash_test_example_path,
known_failures={crash_test_example_path: '_o_O_'}))
self.assertEqual(
'980',
v8_foozzie.cluster_failures('v8/test/mjsunit/apply.js'))
def testDiff(self):
def diff_fun(one, two, skip=False):
suppress = v8_suppressions.get_suppression(
'x64', 'ignition', 'x64', 'ignition_turbo', skip)
return suppress.diff_lines(one.splitlines(), two.splitlines())
one = ''
two = ''
diff = None, None
self.assertEqual(diff, diff_fun(one, two))
one = 'a \n b\nc();'
two = 'a \n b\nc();'
diff = None, None
self.assertEqual(diff, diff_fun(one, two))
# Ignore line before caret, caret position and error message.
one = """
undefined
weird stuff
^
somefile.js: TypeError: undefined is not a function
undefined
"""
two = """
undefined
other weird stuff
^
somefile.js: TypeError: baz is not a function
undefined
"""
diff = None, None
self.assertEqual(diff, diff_fun(one, two))
one = """
Still equal
Extra line
"""
two = """
Still equal
"""
diff = '- Extra line', None
self.assertEqual(diff, diff_fun(one, two))
one = """
Still equal
"""
two = """
Still equal
Extra line
"""
diff = '+ Extra line', None
self.assertEqual(diff, diff_fun(one, two))
one = """
undefined
somefile.js: TypeError: undefined is not a constructor
"""
two = """
undefined
otherfile.js: TypeError: undefined is not a constructor
"""
diff = """- somefile.js: TypeError: undefined is not a constructor
+ otherfile.js: TypeError: undefined is not a constructor""", None
self.assertEqual(diff, diff_fun(one, two))
# Test that skipping suppressions works.
one = """
v8-foozzie source: foo
23:TypeError: bar is not a function
"""
two = """
v8-foozzie source: foo
42:TypeError: baz is not a function
"""
self.assertEqual((None, 'foo'), diff_fun(one, two))
diff = """- 23:TypeError: bar is not a function
+ 42:TypeError: baz is not a function""", 'foo'
self.assertEqual(diff, diff_fun(one, two, skip=True))
def testOutputCapping(self):
def output(stdout, is_crash):
exit_code = -1 if is_crash else 0
return v8_commands.Output(
exit_code=exit_code, timed_out=False, stdout=stdout, pid=0)
def check(stdout1, stdout2, is_crash1, is_crash2, capped_lines1,
capped_lines2):
output1 = output(stdout1, is_crash1)
output2 = output(stdout2, is_crash2)
self.assertEqual(
(capped_lines1, capped_lines2),
v8_suppressions.get_output_capped(output1, output2))
# No capping, already equal.
check('1\n2', '1\n2', True, True, '1\n2', '1\n2')
# No crash, no capping.
check('1\n2', '1\n2\n3', False, False, '1\n2', '1\n2\n3')
check('1\n2\n3', '1\n2', False, False, '1\n2\n3', '1\n2')
# Cap smallest if all runs crash.
check('1\n2', '1\n2\n3', True, True, '1\n2', '1\n2')
check('1\n2\n3', '1\n2', True, True, '1\n2', '1\n2')
check('1\n2', '1\n23', True, True, '1\n2', '1\n2')
check('1\n23', '1\n2', True, True, '1\n2', '1\n2')
# Cap the non-crashy run.
check('1\n2\n3', '1\n2', False, True, '1\n2', '1\n2')
check('1\n2', '1\n2\n3', True, False, '1\n2', '1\n2')
check('1\n23', '1\n2', False, True, '1\n2', '1\n2')
check('1\n2', '1\n23', True, False, '1\n2', '1\n2')
# The crashy run has more output.
check('1\n2\n3', '1\n2', True, False, '1\n2\n3', '1\n2')
check('1\n2', '1\n2\n3', False, True, '1\n2', '1\n2\n3')
check('1\n23', '1\n2', True, False, '1\n23', '1\n2')
check('1\n2', '1\n23', False, True, '1\n2', '1\n23')
# Keep output difference when capping.
check('1\n2', '3\n4\n5', True, True, '1\n2', '3\n4')
check('1\n2\n3', '4\n5', True, True, '1\n2', '4\n5')
check('12', '345', True, True, '12', '34')
check('123', '45', True, True, '12', '45')
def cut_verbose_output(stdout):
# This removes first lines containing d8 commands.
return '\n'.join(stdout.split('\n')[4:])
def run_foozzie(second_d8_dir, *extra_flags, **kwargs):
second_config = 'ignition_turbo'
if 'second_config' in kwargs:
second_config = 'jitless'
kwargs = {}
if PYTHON3:
kwargs['text'] = True
return subprocess.check_output([
sys.executable, FOOZZIE,
'--random-seed', '12345',
'--first-d8', os.path.join(TEST_DATA, 'baseline', 'd8.py'),
'--second-d8', os.path.join(TEST_DATA, second_d8_dir, 'd8.py'),
'--first-config', 'ignition',
'--second-config', second_config,
os.path.join(TEST_DATA, 'fuzz-123.js'),
] + list(extra_flags), **kwargs)
class SystemTest(unittest.TestCase):
"""This tests the whole correctness-fuzzing harness with fake build
artifacts.
Overview of fakes:
baseline: Example foozzie output including a syntax error.
build1: Difference to baseline is a stack trace differece expected to
be suppressed.
build2: Difference to baseline is a non-suppressed output difference
causing the script to fail.
build3: As build1 but with an architecture difference as well.
"""
def testSyntaxErrorDiffPass(self):
stdout = run_foozzie('build1', '--skip-sanity-checks')
self.assertEqual('# V8 correctness - pass\n', cut_verbose_output(stdout))
# Default comparison includes suppressions.
self.assertIn('v8_suppressions.js', stdout)
# Default comparison doesn't include any specific mock files.
self.assertNotIn('v8_mock_archs.js', stdout)
self.assertNotIn('v8_mock_webassembly.js', stdout)
def testDifferentOutputFail(self):
with open(os.path.join(TEST_DATA, 'failure_output.txt')) as f:
expected_output = f.read()
with self.assertRaises(subprocess.CalledProcessError) as ctx:
run_foozzie('build2', '--skip-sanity-checks',
'--first-config-extra-flags=--flag1',
'--first-config-extra-flags=--flag2=0',
'--second-config-extra-flags=--flag3')
e = ctx.exception
self.assertEqual(v8_foozzie.RETURN_FAIL, e.returncode)
self.assertEqual(expected_output, cut_verbose_output(e.output))
def testSanityCheck(self):
with open(os.path.join(TEST_DATA, 'sanity_check_output.txt')) as f:
expected_output = f.read()
with self.assertRaises(subprocess.CalledProcessError) as ctx:
run_foozzie('build2')
e = ctx.exception
self.assertEqual(v8_foozzie.RETURN_FAIL, e.returncode)
self.assertEqual(expected_output, e.output)
def testDifferentArch(self):
"""Test that the architecture-specific mocks are passed to both runs when
we use executables with different architectures.
"""
# Build 3 simulates x86, while the baseline is x64.
stdout = run_foozzie('build3', '--skip-sanity-checks')
lines = stdout.split('\n')
# TODO(machenbach): Don't depend on the command-lines being printed in
# particular lines.
self.assertIn('v8_mock_archs.js', lines[1])
self.assertIn('v8_mock_archs.js', lines[3])
def testJitless(self):
"""Test that webassembly is mocked out when comparing with jitless."""
stdout = run_foozzie(
'build1', '--skip-sanity-checks', second_config='jitless')
lines = stdout.split('\n')
# TODO(machenbach): Don't depend on the command-lines being printed in
# particular lines.
self.assertIn('v8_mock_webassembly.js', lines[1])
self.assertIn('v8_mock_webassembly.js', lines[3])
def testSkipSuppressions(self):
"""Test that the suppressions file is not passed when skipping
suppressions.
"""
# Compare baseline with baseline. This passes as there is no difference.
stdout = run_foozzie(
'baseline', '--skip-sanity-checks', '--skip-suppressions')
self.assertNotIn('v8_suppressions.js', stdout)
# Compare with a build that usually suppresses a difference. Now we fail
# since we skip suppressions.
with self.assertRaises(subprocess.CalledProcessError) as ctx:
run_foozzie(
'build1', '--skip-sanity-checks', '--skip-suppressions')
e = ctx.exception
self.assertEqual(v8_foozzie.RETURN_FAIL, e.returncode)
self.assertNotIn('v8_suppressions.js', e.output)
if __name__ == '__main__':
unittest.main()
| 33.979943 | 79 | 0.653343 |
6942472c91ec7408fe900889b044c95ede235eee | 25,581 | py | Python | src/dms-preview/azext_dms/tests/latest/test_service_scenarios.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 207 | 2017-11-29T06:59:41.000Z | 2022-03-31T10:00:53.000Z | src/dms-preview/azext_dms/tests/latest/test_service_scenarios.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 4,061 | 2017-10-27T23:19:56.000Z | 2022-03-31T23:18:30.000Z | src/dms-preview/azext_dms/tests/latest/test_service_scenarios.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 802 | 2017-10-11T17:36:26.000Z | 2022-03-31T22:24:32.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.testsdk import (ScenarioTest,
ResourceGroupPreparer,
VirtualNetworkPreparer,
JMESPathCheck)
class DmsServiceTests(ScenarioTest):
service_random_name_prefix = 'dmsextclitest'
location_name = 'centralus'
sku_name = 'Premium_4vCores'
vsubnet_rg = 'ERNetwork'
vsubnet_vn = 'AzureDMS-CORP-USC-VNET-5044'
vsubnet_sn = 'Subnet-1'
name_exists_checks = [JMESPathCheck('nameAvailable', False),
JMESPathCheck('reason', 'AlreadyExists')]
name_available_checks = [JMESPathCheck('nameAvailable', True)]
@ResourceGroupPreparer(name_prefix='dmsext_cli_test_', location=location_name)
@VirtualNetworkPreparer(name_prefix='dmsext.clitest.vn')
def test_project_commands(self, resource_group):
service_name = self.create_random_name(self.service_random_name_prefix, 20)
project_name1 = self.create_random_name('project1', 15)
project_name2 = self.create_random_name('project2', 15)
project_name_pg = self.create_random_name('projectpg', 20)
project_name_mg = self.create_random_name('projectmg', 20)
self.kwargs.update({
'vsubnet_rg': self.vsubnet_rg,
'vsubnet_vn': self.vsubnet_vn,
'vsubnet_sn': self.vsubnet_sn
})
subnet = self.cmd(("az network vnet subnet show "
"-g {vsubnet_rg} "
"-n {vsubnet_sn} "
"--vnet-name {vsubnet_vn}")).get_output_in_json()
self.kwargs.update({
'lname': self.location_name,
'skuname': self.sku_name,
'vnetid': subnet['id'],
'sname': service_name,
'pname1': project_name1,
'pname2': project_name2,
'pnamepg': project_name_pg,
'pnamemg': project_name_mg
})
# Set up container service
self.cmd(("az dms create "
"-l {lname} "
"-n {sname} "
"-g {rg} "
"--sku-name {skuname} "
"--subnet {vnetid} "
"--tags area=cli env=test"))
self.cmd(("az dms project show "
"-g {rg} "
"--service-name {sname} "
"-n {pname1}"),
expect_failure=True)
create_checks = [JMESPathCheck('location', self.location_name),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('name', project_name1),
JMESPathCheck('sourcePlatform', 'SQL'),
JMESPathCheck('targetPlatform', 'SQLDB'),
JMESPathCheck('provisioningState', 'Succeeded'),
JMESPathCheck('tags.Cli', ''),
JMESPathCheck('tags.Type', 'test'),
JMESPathCheck('type', 'Microsoft.DataMigration/services/projects')]
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pname1} "
"--source-platform SQL "
"--target-platform SQLDB "
"--tags Type=test Cli"),
checks=create_checks)
self.cmd(("az dms project show "
"-g {rg} "
"--service-name {sname} "
"-n {pname1}"),
create_checks)
# Test PostgreSQL project creation and deletion
create_checks_pg = [JMESPathCheck('location', self.location_name),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('name', project_name_pg),
JMESPathCheck('sourcePlatform', 'PostgreSQL'),
JMESPathCheck('targetPlatform', 'AzureDbForPostgreSQL'),
JMESPathCheck('provisioningState', 'Succeeded'),
JMESPathCheck('tags.Cli', ''),
JMESPathCheck('tags.Type', 'test'),
JMESPathCheck('type', 'Microsoft.DataMigration/services/projects')]
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pnamepg} "
"--source-platform PostgreSQL "
"--target-platform AzureDbForPostgreSQL "
"--tags Type=test Cli"),
checks=create_checks_pg)
self.cmd(("az dms project show "
"-g {rg} "
"--service-name {sname} "
"-n {pnamepg}"),
create_checks_pg)
# Test MongoDb project creation and deletion
create_checks_mg = [JMESPathCheck('location', self.location_name),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('name', project_name_mg),
JMESPathCheck('sourcePlatform', 'MongoDb'),
JMESPathCheck('targetPlatform', 'MongoDb'),
JMESPathCheck('provisioningState', 'Succeeded'),
JMESPathCheck('tags.Cli', ''),
JMESPathCheck('tags.Type', 'test'),
JMESPathCheck('type', 'Microsoft.DataMigration/services/projects')]
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pnamemg} "
"--source-platform MongoDb "
"--target-platform MongoDb "
"--tags Type=test Cli"),
checks=create_checks_mg)
self.cmd(("az dms project show "
"-g {rg} "
"--service-name {sname} "
"-n {pnamemg}"),
create_checks_mg)
create_checks_notags = [JMESPathCheck('tags', None)]
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pname2} "
"--source-platform SQL "
"--target-platform SQLDB"),
checks=create_checks_notags)
list_checks = [JMESPathCheck('length(@)', 4),
JMESPathCheck("length([?name == '{}'])".format(project_name1), 1)]
self.cmd(("az dms project list "
"-g {rg} "
"--service-name {sname}"),
list_checks)
self.cmd(("az dms project check-name "
"-g {rg} "
"--service-name {sname} "
"-n {pname2}"),
checks=self.name_exists_checks)
self.cmd(("az dms project delete "
"-g {rg} "
"--service-name {sname} "
"-n {pname2} -y"))
self.cmd(("az dms project check-name "
"-g {rg} "
"--service-name {sname} "
"-n {pname2}"),
checks=self.name_available_checks)
# Clean up service for live runs
self.cmd(("az dms delete "
"-g {rg} "
"-n {sname} "
"--delete-running-tasks true -y"))
@ResourceGroupPreparer(name_prefix='dms_cli_test_', location=location_name)
@VirtualNetworkPreparer(name_prefix='dms.clitest.vn')
def test_task_commands(self, resource_group):
from azure.cli.testsdk.checkers import JMESPathPatternCheck
local_vars = {
"service_name": self.create_random_name(self.service_random_name_prefix, 20),
"project_name": self.create_random_name('project', 15),
"task_name1": self.create_random_name('task1', 15),
"task_name2": self.create_random_name('task2', 15),
"database_options1": ("[ { 'name': 'SourceDatabase1', 'target_database_name': 'TargetDatabase1', "
"'make_source_db_read_only': False, 'table_map': { 'dbo.TestTableSource1': "
"'dbo.TestTableTarget1', 'dbo.TestTableSource2': 'dbo.TestTableTarget2' } } ]"),
"database_options2": ("[ { 'name': 'SourceDatabase2', 'target_database_name': 'TargetDatabase2', "
"'make_source_db_read_only': False, 'table_map': { 'dbo.TestTableSource1': "
"'dbo.TestTableTarget1', 'dbo.TestTableSource2': 'dbo.TestTableTarget2' } } ]"),
"source_connection_info": ("{ 'userName': 'testuser', 'password': 'testpassword', 'dataSource': "
"'notarealsourceserver', 'authentication': 'SqlAuthentication', "
"'encryptConnection': True, 'trustServerCertificate': True }"),
"target_connection_info": ("{ 'userName': 'testuser', 'password': 'testpassword', 'dataSource': "
"'notarealtargetserver', 'authentication': 'SqlAuthentication', "
"'encryptConnection': True, 'trustServerCertificate': True }"),
"project_name_pg": self.create_random_name('projectpg', 20),
"task_name_pg": self.create_random_name('taskpg', 20),
"source_connection_info_pg": ("{ 'userName': 'testuser', 'password': 'testpassword', 'serverName': "
"'notarealsourceserver', 'databaseName': 'notarealdatabasename', "
"'encryptConnection': False, 'trustServerCertificate': True }"),
"target_connection_info_pg": ("{ 'userName': 'testuser', 'password': 'testpassword', 'serverName': "
"'notarealtargetserver', 'databaseName': 'notarealdatabasename'}"),
"database_options_pg": ("[ { 'name': 'SourceDatabase1', 'target_database_name': 'TargetDatabase1', "
"'selectedTables': [ 'public.TestTableSource1', 'public.TestTableSource2'] } ]"),
"project_name_mg": self.create_random_name('projectmg', 20),
"task_name_mgv": self.create_random_name('taskmgv', 20),
"source_connection_info_mg": ("{ 'userName': 'mongoadmin', "
"'password': 'password', "
"'connectionString': 'mongodb://127.0.0.1:27017'}"),
"target_connection_info_mg": ("{ 'userName': 'mongoadmin', "
"'password': 'password', "
"'connectionString': 'mongodb://127.0.0.1:27017'}"),
"database_options_mg": ("{ \"boostRUs\": 0, \"replication\": \"OneTime\", "
"\"throttling\": { \"minFreeCpu\": 25, \"minFreeMemoryMb\": 1024, "
"\"maxParallelism\": 2 }, \"databases\": {\"db1\": {\"targetRUs\": 0, "
"\"collections\": { \"cdb11\": {\"canDelete\": true, \"shardKey\": null, "
"\"targetRUs\": null }, \"cdb12\": {\"canDelete\": true, \"shardKey\": null, "
"\"targetRUs\": null }}},\"db2\": {\"targetRUs\": 0, \"collections\": { "
"\"cdb21\": {\"canDelete\": true, \"shardKey\": null, \"targetRUs\": null }, "
"\"cdb22\": {\"canDelete\": true, \"shardKey\": null, \"targetRUs\": null }}}}}")
}
self.kwargs.update({
'vsubnet_rg': self.vsubnet_rg,
'vsubnet_vn': self.vsubnet_vn,
'vsubnet_sn': self.vsubnet_sn
})
subnet = self.cmd(("az network vnet subnet show "
"-g {vsubnet_rg} "
"-n {vsubnet_sn} "
"--vnet-name {vsubnet_vn}")).get_output_in_json()
self.kwargs.update({
'lname': self.location_name,
'skuname': self.sku_name,
'vnetid': subnet['id'],
'sname': local_vars["service_name"],
'pname': local_vars["project_name"],
'pnamepg': local_vars["project_name_pg"],
'pnamemg': local_vars["project_name_mg"],
'tname1': local_vars["task_name1"],
'tname2': local_vars["task_name2"],
'tnamepg': local_vars["task_name_pg"],
'tnamemgv': local_vars["task_name_mgv"],
'dboptions1': local_vars["database_options1"],
'dboptions2': local_vars["database_options2"],
'dboptionspg': local_vars["database_options_pg"],
'dboptionsmg': local_vars["database_options_mg"],
'sconn': local_vars["source_connection_info"],
'sconnpg': local_vars["source_connection_info_pg"],
'sconnmg': local_vars["source_connection_info_mg"],
'tconn': local_vars["target_connection_info"],
'tconnpg': local_vars["target_connection_info_pg"],
'tconnmg': local_vars["target_connection_info_mg"]
})
# Set up container service and project
self.cmd(("az dms create "
"-l {lname} "
"-n {sname} "
"-g {rg} "
"--sku-name {skuname} "
"--subnet {vnetid} "
"--tags area=cli env=test"))
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pname} "
"--source-platform SQL "
"--target-platform SQLDB"))
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pnamepg} "
"--source-platform PostgreSQL "
"--target-platform AzureDbForPostgreSQL"))
self.cmd(("az dms project create "
"-g {rg} "
"--service-name {sname} "
"-l {lname} "
"-n {pnamemg} "
"--source-platform MongoDb "
"--target-platform MongoDb "))
self.cmd(("az dms project task show "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"-n {tname1}"),
expect_failure=True)
self.cmd(("az dms project task show "
"-g {rg} "
"--service-name {sname} "
"--project-name {pnamepg} "
"-n {tnamepg}"),
expect_failure=True)
self.cmd(("az dms project task show "
"-g {rg} "
"--service-name {sname} "
"--project-name {pnamemg} "
"-n {tnamemgv}"),
expect_failure=True)
create_checks = [JMESPathCheck('name', local_vars["task_name1"]),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('type', 'Microsoft.DataMigration/services/projects/tasks'),
JMESPathCheck('length(properties.input.selectedDatabases[0].tableMap)', 2),
JMESPathCheck('properties.input.sourceConnectionInfo.dataSource', 'notarealsourceserver'),
JMESPathCheck('properties.input.targetConnectionInfo.dataSource', 'notarealtargetserver'),
JMESPathCheck('properties.taskType', 'Migrate.SqlServer.SqlDb'),
JMESPathCheck('properties.input.validationOptions.enableDataIntegrityValidation', False),
JMESPathCheck('properties.input.validationOptions.enableQueryAnalysisValidation', False),
JMESPathCheck('properties.input.validationOptions.enableSchemaValidation', False)]
cancel_checks = [JMESPathCheck('name', local_vars["task_name1"]),
JMESPathPatternCheck('properties.state', 'Cancel(?:ed|ing)')]
create_checks_pg = [JMESPathCheck('name', local_vars["task_name_pg"]),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('type', 'Microsoft.DataMigration/services/projects/tasks'),
JMESPathCheck('length(properties.input.selectedDatabases[0].selectedTables)', 2),
JMESPathCheck('properties.input.sourceConnectionInfo.serverName', 'notarealsourceserver'),
JMESPathCheck('properties.input.sourceConnectionInfo.encryptConnection', False),
JMESPathCheck('properties.input.sourceConnectionInfo.trustServerCertificate', True),
JMESPathCheck('properties.input.targetConnectionInfo.serverName', 'notarealtargetserver'),
JMESPathCheck('properties.input.targetConnectionInfo.encryptConnection', True),
JMESPathCheck('properties.input.targetConnectionInfo.trustServerCertificate', False),
JMESPathCheck('properties.taskType', 'Migrate.PostgreSql.AzureDbForPostgreSql.SyncV2')]
cancel_checks_pg = [JMESPathCheck('name', local_vars["task_name_pg"]),
JMESPathPatternCheck('properties.state', 'Cancel(?:ed|ing)')]
create_checks_mgv = [JMESPathCheck('name', local_vars["task_name_mgv"]),
JMESPathCheck('resourceGroup', resource_group),
JMESPathCheck('type', 'Microsoft.DataMigration/services/projects/tasks'),
JMESPathCheck('properties.input.throttling.maxParallelism', 2),
JMESPathCheck('properties.input.throttling.minFreeCpu', 25),
JMESPathCheck('properties.input.throttling.minFreeMemoryMb', 1024),
JMESPathCheck('properties.input.replication', 'OneTime'),
JMESPathCheck('length(properties.input.databases)', 2),
JMESPathCheck('properties.input.databases.db1.targetRUs', 0),
JMESPathCheck('length(properties.input.databases.db1.collections)', 2),
JMESPathCheck('properties.input.databases.db1.collections.cdb11.canDelete', True),
JMESPathCheck('properties.input.databases.db1.collections.cdb11.shardKey', 'None'),
JMESPathCheck('properties.input.databases.db1.collections.cdb11.targetRUs', 'None'),
JMESPathCheck('properties.input.databases.db1.collections.cdb12.canDelete', True),
JMESPathCheck('properties.input.databases.db1.collections.cdb12.shardKey', 'None'),
JMESPathCheck('properties.input.databases.db1.collections.cdb12.targetRUs', 'None'),
JMESPathCheck('properties.input.databases.db2.targetRUs', 0),
JMESPathCheck('length(properties.input.databases.db2.collections)', 2),
JMESPathCheck('properties.input.databases.db2.collections.cdb21.canDelete', True),
JMESPathCheck('properties.input.databases.db2.collections.cdb21.shardKey', 'None'),
JMESPathCheck('properties.input.databases.db2.collections.cdb21.targetRUs', 'None'),
JMESPathCheck('properties.input.databases.db2.collections.cdb22.canDelete', True),
JMESPathCheck('properties.input.databases.db2.collections.cdb22.shardKey', 'None'),
JMESPathCheck('properties.input.databases.db2.collections.cdb22.targetRUs', 'None'),
JMESPathCheck('properties.input.source.type', 'MongoDbConnectionInfo'),
JMESPathCheck('properties.input.source.userName', 'mongoadmin'),
JMESPathCheck('properties.input.target.type', 'MongoDbConnectionInfo'),
JMESPathCheck('properties.input.target.userName', 'mongoadmin'),
JMESPathCheck('properties.taskType', 'Validate.MongoDb')]
# SQL Tests
self.cmd(("az dms project task create "
"--task-type OfflineMigration "
"--database-options-json \"{dboptions1}\" "
"-n {tname1} "
"--project-name {pname} "
"-g {rg} "
"--service-name {sname} "
"--source-connection-json \"{sconn}\" "
"--target-connection-json \"{tconn}\""),
checks=create_checks)
self.cmd(("az dms project task show "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"-n {tname1}"),
checks=create_checks)
self.cmd(("az dms project task cancel "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"-n {tname1}"),
checks=cancel_checks)
# PG Tests
self.cmd(("az dms project task create "
"--task-type OnlineMigration "
"--database-options-json \"{dboptionspg}\" "
"-n {tnamepg} "
"--project-name {pnamepg} "
"-g {rg} "
"--service-name {sname} "
"--source-connection-json \"{sconnpg}\" "
"--target-connection-json \"{tconnpg}\""),
checks=create_checks_pg)
self.cmd(("az dms project task show "
"-g {rg} "
"--service-name {sname} "
"--project-name {pnamepg} "
"-n {tnamepg}"),
checks=create_checks_pg)
self.cmd(("az dms project task cancel "
"-g {rg} "
"--service-name {sname} "
"--project-name {pnamepg} "
"-n {tnamepg}"),
checks=cancel_checks_pg)
self.cmd(("az dms project task create "
"--task-type OfflineMigration "
"--database-options-json \"{dboptions2}\" "
"-n {tname2} "
"--project-name {pname} "
"-g {rg} "
"--service-name {sname} "
"--source-connection-json \"{sconn}\" "
"--target-connection-json \"{tconn}\""))
# Mongo Tests
self.cmd(("az dms project task create "
"--task-type OfflineMigration "
"--database-options-json '{dboptionsmg}' "
"-n {tnamemgv} "
"--project-name {pnamemg} "
"-g {rg} "
"--service-name {sname} "
"--source-connection-json \"{sconnmg}\" "
"--target-connection-json \"{tconnmg}\" "
"--validate-only"),
checks=create_checks_mgv)
self.cmd(("az dms project task show "
"-g {rg} "
"--service-name {sname} "
"--project-name {pnamemg} "
"-n {tnamemgv}"),
checks=create_checks_mgv)
list_checks = [JMESPathCheck('length(@)', 2),
JMESPathCheck("length([?name == '{}'])".format(local_vars["task_name1"]), 1)]
self.cmd(("az dms project task list "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"--task-type \"Migrate.SqlServer.SqlDb\""),
checks=list_checks)
self.cmd(("az dms project task check-name "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"-n {tname1}"),
checks=self.name_exists_checks)
self.cmd(("az dms project task delete "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"-n {tname1} "
"--delete-running-tasks true -y"))
self.cmd(("az dms project task check-name "
"-g {rg} "
"--service-name {sname} "
"--project-name {pname} "
"-n {tname1}"),
checks=self.name_available_checks)
# Clean up service for live runs
self.cmd(("az dms delete "
"-g {rg} "
"-n {sname} "
"--delete-running-tasks true -y"))
| 52.74433 | 118 | 0.495133 |
a32f553a10644f85b925b0cfabf9b2d8de56158d | 674 | py | Python | applications/CoSimulationApplication/custom_data_structure/pyKratos/Element.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | 2 | 2019-10-25T09:28:10.000Z | 2019-11-21T12:51:46.000Z | applications/CoSimulationApplication/custom_data_structure/pyKratos/Element.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | 13 | 2019-10-07T12:06:51.000Z | 2020-02-18T08:48:33.000Z | applications/CoSimulationApplication/custom_data_structure/pyKratos/Element.py | lcirrott/Kratos | 8406e73e0ad214c4f89df4e75e9b29d0eb4a47ea | [
"BSD-4-Clause"
] | null | null | null | from __future__ import print_function, absolute_import, division # makes these scripts backward compatible with python 2.6 and 2.7
# pyKratos imports
from .data_value_container import DataValueContainer
class Element(DataValueContainer):
def __init__(self, elem_id, nodes):
super(Element, self).__init__()
self.Id = elem_id
self.__nodes = nodes
self.__variables = {}
def GetNode(self, node_index):
return self.__nodes[node_index]
def GetNodes(self):
return self.__nodes
def Initialize(self):
pass
def __str__(self):
return "Element #{} with {}".format(self.Id, self.__variables)
| 24.962963 | 131 | 0.681009 |
1c015235c0ace1dcd81812a3a61972adfeb260be | 6,489 | py | Python | ggshield/filter.py | segudev/ggshield | d880ca58d6405f81dd57e1151f337010a019bcda | [
"MIT"
] | null | null | null | ggshield/filter.py | segudev/ggshield | d880ca58d6405f81dd57e1151f337010a019bcda | [
"MIT"
] | null | null | null | ggshield/filter.py | segudev/ggshield | d880ca58d6405f81dd57e1151f337010a019bcda | [
"MIT"
] | null | null | null | import hashlib
import math
import operator
import re
from collections import OrderedDict
from typing import Dict, Iterable, List, Optional, Set
import click
from pygitguardian.models import Match, PolicyBreak, ScanResult
from ggshield.config_types import IgnoredMatch
REGEX_MATCH_HIDE = re.compile(r"[^+\-\s]")
REGEX_SPECIAL_CHARS = set(".^$+*?{}()[]\\|")
INVALID_PATTERNS_REGEX = re.compile(
r"(\*\*\*)" # the "***" sequence is not valid
r"|(\*\*[^/])" # a "**" sequence must be immediately followed by a "/"
r"|([^/]\*\*)" # a "**" sequence must be either at the start of the string or
# immediately preceded by a "/"
)
MAXIMUM_CENSOR_LENGTH = 60
def is_ignored(
policy_break: PolicyBreak,
all_policies: bool,
matches_ignore: Iterable[IgnoredMatch],
) -> bool:
"""
is_ignored checks if a occurrence is ignored.
There are 2 ways of ignoring a occurrence:
- matching the occurrence sha
- matching one of the match.match values
:param policy_break: Policy Break occurrence to judge
:param matches_ignore: Iterable of match ignores
:return: True if ignored
"""
matches_ignore = [
match["match"] if isinstance(match, dict) else match for match in matches_ignore
]
if not all_policies and policy_break.policy.lower() != "secrets detection":
return True
if get_ignore_sha(policy_break) in matches_ignore or any(
match.match in matches_ignore for match in policy_break.matches
):
return True
return False
def remove_ignored_from_result(
scan_result: ScanResult, all_policies: bool, matches_ignore: Iterable[IgnoredMatch]
) -> None:
"""
remove_ignored removes occurrences from a Scan Result based on a sha
made from its matches.
:param scan_result: ScanResult to filter
:param matches_ignore: match SHAs or plaintext matches to filter out
"""
scan_result.policy_breaks = [
policy_break
for policy_break in scan_result.policy_breaks
if not is_ignored(policy_break, all_policies, matches_ignore)
]
scan_result.policy_break_count = len(scan_result.policy_breaks)
def remove_results_from_banlisted_detectors(
scan_result: ScanResult,
banlisted_detectors: Optional[Set[str]] = None,
) -> None:
if not banlisted_detectors:
return
scan_result.policy_breaks = [
policy_break
for policy_break in scan_result.policy_breaks
if policy_break.break_type not in banlisted_detectors
]
scan_result.policy_break_count = len(scan_result.policy_breaks)
def get_ignore_sha(policy_break: PolicyBreak) -> str:
hashable = "".join(
[
f"{match.match},{match.match_type}"
for match in sorted(
policy_break.matches, key=operator.attrgetter("match_type")
)
]
)
return hashlib.sha256(hashable.encode("UTF-8")).hexdigest()
def leak_dictionary_by_ignore_sha(
policy_breaks: List[PolicyBreak],
) -> Dict[str, List[PolicyBreak]]:
"""
leak_dictionary_by_ignore_sha sorts matches and incidents by
first appearance in file.
sort incidents by first appearance on file,
file wide matches have no index
so give it -1 so they get bumped to the top
:return: Dictionary with line number as index and a list of
matches that start on said line.
"""
policy_breaks.sort(
key=lambda x: min( # type: ignore
(match.index_start if match.index_start else -1 for match in x.matches)
)
)
sha_dict: Dict[str, List[PolicyBreak]] = OrderedDict()
for policy_break in policy_breaks:
policy_break.matches.sort(key=lambda x: x.index_start if x.index_start else -1)
ignore_sha = get_ignore_sha(policy_break)
sha_dict.setdefault(ignore_sha, []).append(policy_break)
return sha_dict
def translate_user_pattern(pattern: str) -> str:
"""
Translate the user pattern into a regex. This function assumes that the given
pattern is valid and has been normalized beforehand.
"""
# Escape each special character
pattern = "".join(
f"\\{char}" if char in REGEX_SPECIAL_CHARS else char for char in pattern
)
# Handle start/end of pattern
if pattern[-1] != "/":
pattern += "$"
if pattern[0] == "/":
pattern = "^" + pattern[1:]
else:
pattern = "(^|/)" + pattern
# Replace * and ** sequences
pattern = re.sub(r"\\\*\\\*/", "([^/]+/)*", pattern)
pattern = re.sub(r"\\\*", "([^/]+)", pattern)
return pattern
def is_pattern_valid(pattern: str) -> bool:
return bool(pattern) and not INVALID_PATTERNS_REGEX.search(pattern)
def init_exclusion_regexes(paths_ignore: Iterable[str]) -> Set[re.Pattern]:
"""
filter_set creates a set of paths of the ignored
entries from 3 sources:
.gitguardian.yaml
files in .git
files ignore in .gitignore
"""
res = set()
for path in paths_ignore:
if not is_pattern_valid(path):
raise click.ClickException(f"{path} is not a valid exclude pattern.")
res.add(re.compile(translate_user_pattern(path)))
return res
def is_filepath_excluded(filepath: str, exclusion_regexes: Set[re.Pattern]) -> bool:
return any(r.search(filepath) for r in exclusion_regexes)
def censor_match(match: Match) -> str:
"""
censored_match censors a match value revealing only the first and last
1/6th of the match up to a maximum of MAXIMUM_CENSOR_LENGTH.
:return: match value censored
"""
len_match = len(match.match)
start_privy_len = min(math.ceil(len_match / 6), MAXIMUM_CENSOR_LENGTH)
end_privy_len = len_match - min(math.ceil(len_match / 6), MAXIMUM_CENSOR_LENGTH)
censored = REGEX_MATCH_HIDE.sub("*", match.match)
return str(
match.match[:start_privy_len]
+ censored[start_privy_len:end_privy_len]
+ match.match[end_privy_len:]
)
def censor_content(content: str, policy_breaks: List[PolicyBreak]) -> str:
for policy_break in policy_breaks:
for match in policy_break.matches:
if match.index_start is None:
continue
match.match = censor_match(match)
content = "".join(
(
content[: match.index_start],
match.match,
content[len(match.match) + match.index_start :],
)
)
return content
| 29.903226 | 88 | 0.661889 |
ab67bd3946844bcb4f28d45d0ca5758248cf4ff9 | 7,489 | py | Python | python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py | camvogel/dagster | b4df94bf34906e7f81c973a7fdad5429ae3697ba | [
"Apache-2.0"
] | null | null | null | python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py | camvogel/dagster | b4df94bf34906e7f81c973a7fdad5429ae3697ba | [
"Apache-2.0"
] | null | null | null | python_modules/libraries/dagster-gcp/dagster_gcp_tests/gcs_tests/test_compute_log_manager.py | camvogel/dagster | b4df94bf34906e7f81c973a7fdad5429ae3697ba | [
"Apache-2.0"
] | 1 | 2021-12-08T18:13:19.000Z | 2021-12-08T18:13:19.000Z | import os
import sys
import tempfile
from dagster import DagsterEventType, job, op
from dagster.core.instance import DagsterInstance, InstanceRef, InstanceType
from dagster.core.launcher import DefaultRunLauncher
from dagster.core.run_coordinator import DefaultRunCoordinator
from dagster.core.storage.compute_log_manager import ComputeIOType
from dagster.core.storage.event_log import SqliteEventLogStorage
from dagster.core.storage.root import LocalArtifactStorage
from dagster.core.storage.runs import SqliteRunStorage
from dagster.core.test_utils import environ
from dagster_gcp.gcs import GCSComputeLogManager
from google.cloud import storage
HELLO_WORLD = "Hello World"
SEPARATOR = os.linesep if (os.name == "nt" and sys.version_info < (3,)) else "\n"
EXPECTED_LOGS = [
'STEP_START - Started execution of step "easy".',
'STEP_OUTPUT - Yielded output "result" of type "Any"',
'STEP_SUCCESS - Finished execution of step "easy"',
]
def test_compute_log_manager(gcs_bucket):
@job
def simple():
@op
def easy(context):
context.log.info("easy")
print(HELLO_WORLD) # pylint: disable=print-call
return "easy"
easy()
with tempfile.TemporaryDirectory() as temp_dir:
with environ({"DAGSTER_HOME": temp_dir}):
run_store = SqliteRunStorage.from_local(temp_dir)
event_store = SqliteEventLogStorage(temp_dir)
manager = GCSComputeLogManager(
bucket=gcs_bucket, prefix="my_prefix", local_dir=temp_dir
)
instance = DagsterInstance(
instance_type=InstanceType.PERSISTENT,
local_artifact_storage=LocalArtifactStorage(temp_dir),
run_storage=run_store,
event_storage=event_store,
compute_log_manager=manager,
run_coordinator=DefaultRunCoordinator(),
run_launcher=DefaultRunLauncher(),
ref=InstanceRef.from_dir(temp_dir),
)
result = simple.execute_in_process(instance=instance)
compute_steps = [
event.step_key
for event in result.all_node_events
if event.event_type == DagsterEventType.STEP_START
]
assert len(compute_steps) == 1
step_key = compute_steps[0]
stdout = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDOUT)
assert stdout.data == HELLO_WORLD + SEPARATOR
stderr = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDERR)
for expected in EXPECTED_LOGS:
assert expected in stderr.data
# Check GCS directly
stderr_gcs = (
storage.Client()
.get_bucket(gcs_bucket)
.blob(f"my_prefix/storage/{result.run_id}/compute_logs/easy.err")
.download_as_bytes()
.decode("utf-8")
)
for expected in EXPECTED_LOGS:
assert expected in stderr_gcs
# Check download behavior by deleting locally cached logs
compute_logs_dir = os.path.join(temp_dir, result.run_id, "compute_logs")
for filename in os.listdir(compute_logs_dir):
os.unlink(os.path.join(compute_logs_dir, filename))
stdout = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDOUT)
assert stdout.data == HELLO_WORLD + SEPARATOR
stderr = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDERR)
for expected in EXPECTED_LOGS:
assert expected in stderr.data
def test_compute_log_manager_with_envvar(gcs_bucket):
@job
def simple():
@op
def easy(context):
context.log.info("easy")
print(HELLO_WORLD) # pylint: disable=print-call
return "easy"
easy()
with open(os.environ.get("GOOGLE_APPLICATION_CREDENTIALS")) as f:
with tempfile.TemporaryDirectory() as temp_dir:
with environ({"ENV_VAR": f.read(), "DAGSTER_HOME": temp_dir}):
run_store = SqliteRunStorage.from_local(temp_dir)
event_store = SqliteEventLogStorage(temp_dir)
manager = GCSComputeLogManager(
bucket=gcs_bucket,
prefix="my_prefix",
local_dir=temp_dir,
json_credentials_envvar="ENV_VAR",
)
instance = DagsterInstance(
instance_type=InstanceType.PERSISTENT,
local_artifact_storage=LocalArtifactStorage(temp_dir),
run_storage=run_store,
event_storage=event_store,
compute_log_manager=manager,
run_coordinator=DefaultRunCoordinator(),
run_launcher=DefaultRunLauncher(),
ref=InstanceRef.from_dir(temp_dir),
)
result = simple.execute_in_process(instance=instance)
compute_steps = [
event.step_key
for event in result.all_node_events
if event.event_type == DagsterEventType.STEP_START
]
assert len(compute_steps) == 1
step_key = compute_steps[0]
stdout = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDOUT)
assert stdout.data == HELLO_WORLD + SEPARATOR
stderr = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDERR)
for expected in EXPECTED_LOGS:
assert expected in stderr.data
# Check GCS directly
stderr_gcs = (
storage.Client()
.get_bucket(gcs_bucket)
.blob(f"my_prefix/storage/{result.run_id}/compute_logs/easy.err")
.download_as_bytes()
.decode("utf-8")
)
for expected in EXPECTED_LOGS:
assert expected in stderr_gcs
# Check download behavior by deleting locally cached logs
compute_logs_dir = os.path.join(temp_dir, result.run_id, "compute_logs")
for filename in os.listdir(compute_logs_dir):
os.unlink(os.path.join(compute_logs_dir, filename))
stdout = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDOUT)
assert stdout.data == HELLO_WORLD + SEPARATOR
stderr = manager.read_logs_file(result.run_id, step_key, ComputeIOType.STDERR)
for expected in EXPECTED_LOGS:
assert expected in stderr.data
def test_compute_log_manager_from_config(gcs_bucket):
s3_prefix = "foobar"
dagster_yaml = """
compute_logs:
module: dagster_gcp.gcs.compute_log_manager
class: GCSComputeLogManager
config:
bucket: "{bucket}"
local_dir: "/tmp/cool"
prefix: "{prefix}"
""".format(
bucket=gcs_bucket, prefix=s3_prefix
)
with tempfile.TemporaryDirectory() as tempdir:
with open(os.path.join(tempdir, "dagster.yaml"), "wb") as f:
f.write(dagster_yaml.encode("utf-8"))
instance = DagsterInstance.from_config(tempdir)
assert isinstance(instance.compute_log_manager, GCSComputeLogManager)
| 39.415789 | 94 | 0.615837 |
c8292952267078133c5de9acbeaa95dcb3c63e48 | 800 | py | Python | neutron/services/l3_router/service_providers/single_node.py | congnt95/neutron | 6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1 | [
"Apache-2.0"
] | 1,080 | 2015-01-04T08:35:00.000Z | 2022-03-27T09:15:52.000Z | neutron/services/l3_router/service_providers/single_node.py | congnt95/neutron | 6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1 | [
"Apache-2.0"
] | 24 | 2015-02-21T01:48:28.000Z | 2021-11-26T02:38:56.000Z | neutron/services/l3_router/service_providers/single_node.py | congnt95/neutron | 6a73a362c5ff5b7c28c15a49f47a9900c0d2b4e1 | [
"Apache-2.0"
] | 1,241 | 2015-01-02T10:47:10.000Z | 2022-03-27T09:42:23.000Z | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.services.l3_router.service_providers import base
class SingleNodeDriver(base.L3ServiceProvider):
"""Provider for single L3 agent routers."""
use_integrated_agent_scheduler = True
| 38.095238 | 78 | 0.74375 |
bfe71dbc894f90eb36b81a20d58668f67621c9ca | 9,668 | py | Python | W2U_U2W/win2uni.py | EiEiKyaw/Right | 710665ce0ee132f56249530cf1389b4b7fabb9be | [
"MIT"
] | null | null | null | W2U_U2W/win2uni.py | EiEiKyaw/Right | 710665ce0ee132f56249530cf1389b4b7fabb9be | [
"MIT"
] | null | null | null | W2U_U2W/win2uni.py | EiEiKyaw/Right | 710665ce0ee132f56249530cf1389b4b7fabb9be | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import re
def replace(input):
output = input
output = output.replace(u'\u004F', u'\u1025') # NyaLay...
output = output.replace(u'\u00CD', u'\u1009') # NyaLay...
output = output.replace(u'\u00DA', u'\u1009')
output = output.replace(u'\u0075', u'\u1000') # ka
output = output.replace(u'\u0063', u'\u1001') # kha
output = output.replace(u'\u002A', u'\u1002') # ga
output = output.replace(u'\u0043', u'\u1003') # ga-gyi
output = output.replace(u'\u0069', u'\u1004') # nga
output = output.replace(u'\u0070', u'\u1005') # sa-lone
output = output.replace(u'\u0071', u'\u1006') # sa-lane
output = output.replace(u'\u005A', u'\u1007') # za
output = output.replace(u'\u006E', u'\u100A') # nya
output = output.replace(u'\u00F1', u'\u100A') # nya
output = output.replace(u'\u0023', u'\u100B') # tatlin
output = output.replace(u'\u0058', u'\u100C') # hta-won-bae
output = output.replace(u'\u0021', u'\u100D') # da-yin-kaut
output = output.replace(u'\u00A1', u'\u100E') # da-yin-mote
output = output.replace(u'\u0050', u'\u100F') # na-gyi
output = output.replace(u'\u0077', u'\u1010') # ta
output = output.replace(u'\u0078', u'\u1011') # hta
output = output.replace(u'\u0027', u'\u1012') # da-dwe
output = output.replace(u'\u0022', u'\u1013') # da-out-cha
output = output.replace(u'\u0065', u'\u1014') # na-nge
output = output.replace(u'\u0045', u'\u1014') # na-nge
output = output.replace(u'\u0079', u'\u1015') # pa
output = output.replace(u'\u007A', u'\u1016') # pa
output = output.replace(u'\u0041', u'\u1017') # ba-chite
output = output.replace(u'\u0062', u'\u1018') # ba
output = output.replace(u'\u0072', u'\u1019') # ma
output = output.replace(u'\u002C', u'\u101A') # ya-palat
output = output.replace(u'\u0026', u'\u101B') # ya-kaut
output = output.replace(u'\u00BD', u'\u101B') # ya-kaut
output = output.replace(u'\u0076', u'\u101C') # la
output = output.replace(u'\u0030', u'\u101D') # wa
output = output.replace(u'\u006F', u'\u101E') # tha
output = output.replace(u'\u005B', u'\u101F') # ha
output = output.replace(u'\u0056', u'\u1020') # la-gyi
output = output.replace(u'\u0074', u'\u1021') # art
output = output.replace(u'\u00A3', u'\u1023') # ka-ku
output = output.replace(u'\u00FE', u'\u1024') # e
output = output.replace(u'\u007B', u'\u1027') # aye
output = output.replace(u'\u00FC', u'\u104C') # hnice
output = output.replace(u'\u00ED', u'\u104D') # ywae
output = output.replace(u'\u00A4', u'\u104E') # la-gaung
output = output.replace(u'\u005C', u'\u104F') # ei
output = output.replace(u'\u00F3', u'\u103F') # tha-gyi
#-------------------------------------------------------------- Byee ---------------------------------------------------
output = output.replace(u'\u0031', u'\u1041') # 1
output = output.replace(u'\u0032', u'\u1042') # 2
output = output.replace(u'\u0033', u'\u1043') # 3
output = output.replace(u'\u0034', u'\u1044') # 4
output = output.replace(u'\u0035', u'\u1045') # 5
output = output.replace(u'\u0036', u'\u1046') # 6
output = output.replace(u'\u0037', u'\u1047') # 7
output = output.replace(u'\u0038', u'\u1048') # 8
output = output.replace(u'\u0039', u'\u1049') # 9
output = output.replace(u'\u003F', u'\u104A') # 1chaung-pote
output = output.replace(u'\u002F', u'\u104B') # 2chaung-pote
#----------------------------------------------------------------------------------------------------------------------
output = output.replace(u'\u0067', u'\u102B') # yay-char
output = output.replace(u'\u006D', u'\u102C') # yay-char-ato
output = output.replace(u'\u0064', u'\u102D') # lone-gyi-tin
output = output.replace(u'\u0044', u'\u102E') # lgt-sk
output = output.replace(u'\u004B', u'\u102F') # 1chaung
output = output.replace(u'\u006B', u'\u102F') # 1chaung-ato
output = output.replace(u'\u004C', u'\u1030') # 2chaung
output = output.replace(u'\u006C', u'\u1030') # 2chaung-ato
output = output.replace(u'\u0061', u'\u1031') # tha-wai
output = output.replace(u'\u004A', u'\u1032') # naut-pyit
output = output.replace(u'\u0048', u'\u1036') # ttt
output = output.replace(u'\u0055', u'\u1037') # out-ka-myit
output = output.replace(u'\u0059', u'\u1037') # out-ka-myit
output = output.replace(u'\u0068', u'\u1037') # out-ka-myit
output = output.replace(u'\u003B', u'\u1038') # wit-sa-nalone-pauk
output = output.replace(u'\u0066', u'\u103A') # athet
output = output.replace(u'\u00DF', u'\u103B') # ya-pit
output = output.replace(u'\u0073', u'\u103B')
output = output.replace(u'\u0042', u'\u103C') # ya-yit
output = output.replace(u'\u004D', u'\u103C') # ya-yit
output = output.replace(u'\u004E', u'\u103C') # ya-
output = output.replace(u'\u0060', u'\u103C') # ya-yit
output = output.replace(u'\u006A', u'\u103C') # ya-yit
output = output.replace(u'\u007E', u'\u103C') # ya-yit
output = output.replace(u'\u0047', u'\u103D') # wa-swe
output = output.replace(u'\u00A7', u'\u103E') # ha-htoe
output = output.replace(u'\u0053', u'\u103E') # ha-htoe
return output
def decompose(input):
output = input
output = re.sub(u'([\u1000-\u1021])\u0046', u'\u0046\\1', output) # up-ngathet
output = re.sub(u'([\u1000-\u1021])\u00d0', u'\u0046\\1\u102e', output) # with-lgtsk
output = re.sub(u'([\u1000-\u1021])\u00d8', u'\u0046\\1\u102d', output) # with-lonegyitin
output = re.sub(u'([\u1000-\u1021])\u00f8', u'\u0046\\1\u1036', output) # with-ttt
output = re.sub(u'\u0070\u0073', u'\u1008', output) # za-myin-zwe
output = re.sub(u'\u1005\u103b', u'\u1008', output) # za-myin-zwe
output = re.sub(u'\u0046', u'\u1004\u103A\u1039', output)# up-ngathet
output = re.sub(u'\u003A', u'\u102B' + u'\u103A', output)# yaychar-htoe
output = re.sub(u'\u0054', u'\u103D' + u'\u103E', output)# wa ha-htoe
output = re.sub(u'\u0049', u'\u103E' + u'\u102F', output)# hahtoe1chaung
output = re.sub(u'\u00AA', u'\u103E' + u'\u1030', output)# hahtoe2chaung
output = re.sub(u'\u0051', u'\u103B' + u'\u103E', output)# yapit-hahtoe
output = re.sub(u'\u0052', u'\u103B' + u'\u103D', output)# yapit-waswe
output = re.sub(u'\u0057', u'\u103B' + u'\u103D' + u'\u103E', output)# yapit-waswe-hahtoe
output = re.sub(u'\u00F0', u'\u102D' + u'\u1036', output)# lgt-ttt
output = re.sub(u'\u00D8', u'\u1004' + u'\u103A' + u'\u1039' + u'\u102D', output)# ngathet-lgt
output = re.sub(u'\u00D0', u'\u1004' + u'\u103A' + u'\u1039' + u'\u102E', output)# ngathet-lgtsk
output = re.sub(u'\u00F8', u'\u1004' + u'\u103A' + u'\u1039' + u'\u1036', output)# ngathet-ttt
output = re.sub(u'[\u003C\u003E]', u'\u103C' + u'\u103D', output)# yayit-waswe
output = re.sub(u'\u00FB', u'\u103C' + u'\u102F', output)# yayit-1chaung
output = re.sub(u'\u00D3', u'\u1009' + u'\u102C', output) # nya-lay-yaychar
output = re.sub(u'\u004F\u0044', u'\u1025\u102E', output) # nya-lay-lgtsk
output = re.sub(u'\u1025\u102E', u'\u1026', output) # nya-lay-lgtsk
output = re.sub(u'\u004F\u0044', u'\u1026', output) # nya-lay-lgtsk
#----------------------------------------------------- A Sint --------------------------------------------------------------------
output = re.sub(u'\u00FA', u'\u1039\u1000', output)# ka
output = re.sub(u'\u00A9', u'\u1039\u1001', output)# kha
output = re.sub(u'\u00BE', u'\u1039\u1002', output)# ga-nge
output = re.sub(u'\u00A2', u'\u1039\u1003', output)# ga-gyi
output = re.sub(u'\u00F6', u'\u1039\u1005', output)# sa-lone
output = re.sub(u'\u00E4', u'\u1039\u1006', output)# sa-lane
output = re.sub(u'\u00C6', u'\u1039\u1007', output)# za
output = re.sub(u'\u00D1', u'\u1039\u1008', output)# za-zwe
output = re.sub(u'\u00B3', u'\u1039\u100B', output)# tatalin
output = re.sub(u'\u00D6', u'\u1039\u100F', output)# na-gyi
output = re.sub(u'\u00C5', u'\u1039\u1010', output)# ta
output = re.sub(u'\00AC', u'\u1039\u1011', output)# hta
output = re.sub(u'\u00B4', u'\u1039\u1012', output)# da-dwe
output = re.sub(u'\u00A8', u'\u1039\u1013', output)# da-out
output = re.sub(u'\u00E9', u'\u1039\u1014', output)# na
output = re.sub(u'\u00DC', u'\u1039\u1015', output)# pa
output = re.sub(u'\u00E6', u'\u1039\u1016', output)# pha
output = re.sub(u'\u00C1', u'\u1039\u1017', output)# ba-htet
output = re.sub(u'\u00C7', u'\u1039\u1018', output)# ba
output = re.sub(u'\u00AE', u'\u1039\u1019', output)# ma
output = re.sub(u'\u2019', u'\u1039\u101C', output)# la
output = re.sub(u'\u00B2', u'\u1039\u100C', output)# htawonbae
#----------------------------------------------------------------------------------------------------------------------
output = re.sub(u'\u0040', u'\u100F\u1039\u100D', output)# gan-da
output = re.sub(u'\u00A5', u'\u100B\u1039\u100B', output)# tatalin
output = re.sub(u'\u00B9', u'\u100D\u1039\u100E', output)# dayin-hmote
output = re.sub(u'\u00D7', u'\u100D\u1039\u100D', output)# dayin-kaut
return output
def visual2logical(input):
output = input
output = re.sub(u'((?:\u1031)?)((?:\u103c)?)([\u1000-\u1021])((?:\u103b)?)((?:\u103d)?)((?:\u103e)?)((?:\u1037)?)((?:\u102c)?)','\\3\\2\\4\\5\\6\\1\\7\\8', output)
return output
def convert(input):
output = input
output = replace(output)
output = decompose(output)
output = visual2logical(output)
return output
| 54.931818 | 167 | 0.572818 |
bf3e479df71ca87404df819a94fdb50fc8cae420 | 3,428 | py | Python | pactools/utils/arma.py | mathurinm/pactools | 2ad08061c69378368137a26e0519ce3ce6e5c7bd | [
"BSD-3-Clause"
] | 61 | 2017-03-22T17:03:27.000Z | 2022-03-29T02:38:57.000Z | pactools/utils/arma.py | mathurinm/pactools | 2ad08061c69378368137a26e0519ce3ce6e5c7bd | [
"BSD-3-Clause"
] | 34 | 2017-03-30T18:41:17.000Z | 2021-10-19T09:42:01.000Z | pactools/utils/arma.py | mathurinm/pactools | 2ad08061c69378368137a26e0519ce3ce6e5c7bd | [
"BSD-3-Clause"
] | 28 | 2017-03-30T18:30:15.000Z | 2021-01-03T03:45:13.000Z | import numpy as np
from scipy import signal, linalg, fftpack
from .spectrum import Spectrum
class Arma(Spectrum):
def __init__(self, ordar=2, ordma=0, **kargs):
"""Create an estimator of ARMA model:
y(t) + a(1)y(t-1) + ... + a(ordar)y(t-ordar) =
b(0)e(t) + b(1)e(t-1) + ... + b(ordma)e(t-ordma)
ordar : order of the autogressive part
ordma : order of the moving average part
"""
Spectrum.__init__(self, **kargs)
self.ordar = ordar
self.ordma = ordma
def estimate(self, nbcorr=np.nan, numpsd=-1):
fft_length, _ = self.check_params()
if np.isnan((nbcorr)):
nbcorr = self.ordar
# -------- estimate correlation from psd
full_psd = self.psd[numpsd]
full_psd = np.c_[full_psd, np.conjugate(full_psd[:, :0:-1])]
correl = fftpack.ifft(full_psd[0], fft_length, 0).real
# -------- estimate AR part
col1 = correl[self.ordma:self.ordma + nbcorr]
row1 = correl[np.abs(
np.arange(self.ordma, self.ordma - self.ordar, -1))]
R = linalg.toeplitz(col1, row1)
r = -correl[self.ordma + 1:self.ordma + nbcorr + 1]
AR = linalg.solve(R, r)
self.AR_ = AR
# -------- estimate correlation of MA part
# -------- estimate MA part
if self.ordma == 0:
sigma2 = correl[0] + np.dot(AR, correl[1:self.ordar + 1])
self.MA = np.ones(1) * np.sqrt(sigma2)
else:
raise NotImplementedError(
'arma: estimation of the MA part not yet implemented')
def arma2psd(self, hold=False):
"""Compute the power spectral density of the ARMA model
"""
fft_length, _ = self.check_params()
arpart = np.concatenate((np.ones(1), self.AR_))
psdar = np.abs(fftpack.fft(arpart, fft_length, 0)) ** 2
psdma = np.abs(fftpack.fft(self.MA, fft_length, 0)) ** 2
psd = psdma / psdar
if not hold:
self.psd = []
self.psd.append(psd[None, :fft_length // 2 + 1])
def inverse(self, sigin):
"""Apply the inverse ARMA filter to a signal
sigin : input signal (ndarray)
returns the filtered signal(ndarray)
"""
arpart = np.concatenate((np.ones(1), self.AR_))
return signal.fftconvolve(sigin, arpart, 'same')
def ai2ki(ar):
"""Convert AR coefficients to partial correlations
(inverse Levinson recurrence)
ar : AR models stored by columns
returns the partial correlations (one model by column)
"""
parcor = np.copy(ar)
ordar, n_epochs, n_points = ar.shape
for i in range(ordar - 1, -1, -1):
if i > 0:
parcor[0:i, :, :] -= (parcor[i:i + 1, :, :] *
np.flipud(parcor[0:i, :, :]))
parcor[0:i, :, :] *= 1.0 / (1.0 - parcor[i:i + 1, :, :] ** 2)
return parcor
def ki2ai(parcor):
"""Convert parcor coefficients to autoregressive ones
(Levinson recurrence)
parcor : partial correlations stored by columns
returns the AR models by columns
"""
ar = np.zeros_like(parcor)
ordar, n_epochs, n_points = parcor.shape
for i in range(ordar):
if i > 0:
ar[0:i, :, :] += parcor[i:i + 1, :, :] * np.flipud(ar[0:i, :, :])
ar[i, :, :] = parcor[i, :, :]
# ok, at least in stationary models
return ar
| 30.607143 | 77 | 0.554551 |
eaeaf16d79bd59ce2328a16f998669aec7efff70 | 3,592 | py | Python | huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/restore_to_existing_instance_request.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | 1 | 2021-11-03T07:54:50.000Z | 2021-11-03T07:54:50.000Z | huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/restore_to_existing_instance_request.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | null | null | null | huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/restore_to_existing_instance_request.py | wuchen-huawei/huaweicloud-sdk-python-v3 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
import pprint
import re
import six
class RestoreToExistingInstanceRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'x_language': 'str',
'body': 'RestoreToExistingInstanceRequestBody'
}
attribute_map = {
'x_language': 'X-Language',
'body': 'body'
}
def __init__(self, x_language=None, body=None):
"""RestoreToExistingInstanceRequest - a model defined in huaweicloud sdk"""
self._x_language = None
self._body = None
self.discriminator = None
if x_language is not None:
self.x_language = x_language
if body is not None:
self.body = body
@property
def x_language(self):
"""Gets the x_language of this RestoreToExistingInstanceRequest.
语言
:return: The x_language of this RestoreToExistingInstanceRequest.
:rtype: str
"""
return self._x_language
@x_language.setter
def x_language(self, x_language):
"""Sets the x_language of this RestoreToExistingInstanceRequest.
语言
:param x_language: The x_language of this RestoreToExistingInstanceRequest.
:type: str
"""
self._x_language = x_language
@property
def body(self):
"""Gets the body of this RestoreToExistingInstanceRequest.
:return: The body of this RestoreToExistingInstanceRequest.
:rtype: RestoreToExistingInstanceRequestBody
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this RestoreToExistingInstanceRequest.
:param body: The body of this RestoreToExistingInstanceRequest.
:type: RestoreToExistingInstanceRequestBody
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RestoreToExistingInstanceRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.411765 | 83 | 0.569042 |
23ddfbc797e0d4b62ea87976805c0768cb953813 | 710 | py | Python | commercialoperator/migrations/0121_auto_20210806_1614.py | GraemeMuller/commercialoperator | 9218fb0a8844bc7f41cc371f4bd9488538df5fda | [
"Apache-2.0"
] | null | null | null | commercialoperator/migrations/0121_auto_20210806_1614.py | GraemeMuller/commercialoperator | 9218fb0a8844bc7f41cc371f4bd9488538df5fda | [
"Apache-2.0"
] | 12 | 2020-02-12T06:26:55.000Z | 2022-02-13T05:52:54.000Z | commercialoperator/migrations/0121_auto_20210806_1614.py | GraemeMuller/commercialoperator | 9218fb0a8844bc7f41cc371f4bd9488538df5fda | [
"Apache-2.0"
] | 8 | 2020-02-24T05:11:18.000Z | 2021-02-26T07:54:24.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2021-08-06 08:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('commercialoperator', '0120_auto_20210806_1501'),
]
operations = [
migrations.AlterField(
model_name='organisationcontact',
name='user_status',
field=models.CharField(choices=[('draft', 'Draft'), ('pending', 'Pending'), ('active', 'Active'), ('declined', 'Declined'), ('unlinked', 'Unlinked'), ('suspended', 'Suspended'), ('contact_form', 'ContactForm')], default='draft', max_length=40, verbose_name='Status'),
),
]
| 33.809524 | 279 | 0.639437 |
4d72dc4af01008d55a64050d3a1ba6bef3236077 | 24,957 | py | Python | scripts/geodata/geonames/create_geonames_tsv.py | Fillr/libpostal | bce153188aff9fbe65aef12c3c639d8069e707fc | [
"MIT"
] | 3,489 | 2015-03-03T00:21:38.000Z | 2022-03-29T09:03:05.000Z | scripts/geodata/geonames/create_geonames_tsv.py | StephenHildebrand/libpostal | d8c9847c5686a1b66056e65128e1774f060ff36f | [
"MIT"
] | 488 | 2015-05-29T23:04:28.000Z | 2022-03-29T11:20:24.000Z | scripts/geodata/geonames/create_geonames_tsv.py | StephenHildebrand/libpostal | d8c9847c5686a1b66056e65128e1774f060ff36f | [
"MIT"
] | 419 | 2015-11-24T16:53:07.000Z | 2022-03-27T06:51:28.000Z | '''
create_geonames_tsv.py
----------------------
This script formats the open GeoNames database (as well as
its accompanying postal codes data set) into a schema'd
tab-separated value file.
It generates a C header which uses an enum for the field names.
This way if new fields are added or there's a typo, etc. the
error will show up at compile-time.
The relevant C modules which operate on this data are:
geodb_builder.c
geonames.c
As well as the generated headers:
geonames_fields.h
postal_fields.h
'''
import argparse
import csv
import logging
import operator
import os
import re
import sqlite3
import subprocess
import sys
import pycountry
import unicodedata
import urllib
import urlparse
from collections import defaultdict, OrderedDict
from lxml import etree
this_dir = os.path.realpath(os.path.dirname(__file__))
sys.path.append(os.path.realpath(os.path.join(os.pardir, os.pardir)))
from geodata.csv_utils import *
from geodata.file_utils import *
from geodata.countries.country_names import *
from geodata.encoding import safe_encode, safe_decode
from geodata.geonames.paths import DEFAULT_GEONAMES_DB_PATH
from geodata.i18n.languages import *
from geodata.i18n.unicode_paths import CLDR_DIR
from geodata.log import log_to_file
multispace_regex = re.compile('[\s]+')
def encode_field(value):
return multispace_regex.sub(' ', safe_encode((value if value is not None else '')))
log_to_file(sys.stderr)
DEFAULT_DATA_DIR = os.path.join(this_dir, os.path.pardir, os.path.pardir,
os.path.pardir, 'data', 'geonames')
COUNTRY_FEATURE_CODES = ('PCL', 'PCLI', 'PCLIX', 'PCLD', 'PCLF', 'PCLS')
CONTINENT_FEATURE_CODES = ('CONT',)
ADMIN_1_FEATURE_CODES = ('ADM1',)
ADMIN_2_FEATURE_CODES = ('ADM2',)
ADMIN_3_FEATURE_CODES = ('ADM3',)
ADMIN_4_FEATURE_CODES = ('ADM4',)
OTHER_ADMIN_FEATURE_CODES = ('ADM5',)
ADMIN_OTHER_FEATURE_CODES = ('ADMD', )
POPULATED_PLACE_FEATURE_CODES = ('PPL', 'PPLA', 'PPLA2', 'PPLA3', 'PPLA4',
'PPLC', 'PPLCH', 'PPLF', 'PPLG', 'PPLL',
'PPLR', 'PPLS', 'STLMT')
NEIGHBORHOOD_FEATURE_CODES = ('PPLX', )
class boundary_types:
COUNTRY = 0
ADMIN1 = 1
ADMIN2 = 2
ADMIN3 = 3
ADMIN4 = 4
ADMIN_OTHER = 5
LOCALITY = 6
NEIGHBORHOOD = 7
geonames_admin_dictionaries = OrderedDict([
(boundary_types.COUNTRY, COUNTRY_FEATURE_CODES),
(boundary_types.ADMIN1, ADMIN_1_FEATURE_CODES),
(boundary_types.ADMIN2, ADMIN_2_FEATURE_CODES),
(boundary_types.ADMIN3, ADMIN_3_FEATURE_CODES),
(boundary_types.ADMIN4, ADMIN_4_FEATURE_CODES),
(boundary_types.ADMIN_OTHER, ADMIN_OTHER_FEATURE_CODES),
(boundary_types.LOCALITY, POPULATED_PLACE_FEATURE_CODES),
(boundary_types.NEIGHBORHOOD, NEIGHBORHOOD_FEATURE_CODES),
])
# Inserted post-query
DUMMY_BOUNDARY_TYPE = '-1 as type'
DUMMY_HAS_WIKIPEDIA_ENTRY = '0 as has_wikipedia_entry'
DUMMY_LANGUAGE_PRIORITY = '0 as language_priority'
class GeonamesField(object):
def __init__(self, name, c_constant, default=None, is_dummy=False):
self.name = name
self.c_constant = c_constant
self.default = default
self.is_dummy = is_dummy
geonames_fields = [
# Field if alternate_names present, default field name if not, C header constant
GeonamesField('alternate_name', 'GEONAMES_NAME', default='gn.name'),
GeonamesField('gn.geonames_id as geonames_id', 'GEONAMES_ID'),
GeonamesField('gn.name as canonical', 'GEONAMES_CANONICAL'),
GeonamesField(DUMMY_BOUNDARY_TYPE, 'GEONAMES_BOUNDARY_TYPE', is_dummy=True),
GeonamesField(DUMMY_HAS_WIKIPEDIA_ENTRY, 'GEONAMES_HAS_WIKIPEDIA_ENTRY', is_dummy=True),
GeonamesField('iso_language', 'GEONAMES_ISO_LANGUAGE', default="''"),
GeonamesField(DUMMY_LANGUAGE_PRIORITY, 'GEONAMES_LANGUAGE_PRIORITY', is_dummy=True),
GeonamesField('is_preferred_name', 'GEONAMES_IS_PREFERRED_NAME', default='0'),
GeonamesField('is_short_name', 'GEONAMES_IS_SHORT_NAME', default='0'),
GeonamesField('is_colloquial', 'GEONAMES_IS_COLLOQUIAL', default='0'),
GeonamesField('is_historic', 'GEONAMES_IS_HISTORICAL', default='0'),
GeonamesField('gn.population', 'GEONAMES_POPULATION'),
GeonamesField('gn.latitude', 'GEONAMES_LATITUDE'),
GeonamesField('gn.longitude', 'GEONAMES_LONGITUDE'),
GeonamesField('gn.feature_code', 'GEONAMES_FEATURE_CODE'),
GeonamesField('gn.country_code as country_code', 'GEONAMES_COUNTRY_CODE'),
GeonamesField('c.geonames_id as country_gn_id', 'GEONAMES_COUNTRY_ID'),
GeonamesField('gn.admin1_code as admin1_code', 'GEONAMES_ADMIN1_CODE'),
GeonamesField('a1.geonames_id as a1_gn_id', 'GEONAMES_ADMIN1_ID'),
GeonamesField('gn.admin2_code as admin2_code', 'GEONAMES_ADMIN2_CODE'),
GeonamesField('a2.geonames_id as a2_gn_id', 'GEONAMES_ADMIN2_ID'),
GeonamesField('gn.admin3_code as admin3_code', 'GEONAMES_ADMIN3_CODE'),
GeonamesField('a3.geonames_id as a3_gn_id', 'GEONAMES_ADMIN3_ID'),
GeonamesField('gn.admin4_code as admin4_code', 'GEONAMES_ADMIN4_CODE'),
GeonamesField('a4.geonames_id as a4_gn_id', 'GEONAMES_ADMIN4_ID'),
]
def geonames_field_index(s):
for i, f in enumerate(geonames_fields):
if f.c_constant == s:
return i
return None
DUMMY_BOUNDARY_TYPE_INDEX = geonames_field_index('GEONAMES_BOUNDARY_TYPE')
DUMMY_HAS_WIKIPEDIA_ENTRY_INDEX = geonames_field_index('GEONAMES_HAS_WIKIPEDIA_ENTRY')
GEONAMES_ID_INDEX = geonames_field_index('GEONAMES_ID')
LANGUAGE_INDEX = geonames_field_index('GEONAMES_ISO_LANGUAGE')
DUMMY_LANGUAGE_PRIORITY_INDEX = geonames_field_index('GEONAMES_LANGUAGE_PRIORITY')
CANONICAL_NAME_INDEX = geonames_field_index('GEONAMES_CANONICAL')
NAME_INDEX = geonames_field_index('GEONAMES_NAME')
COUNTRY_CODE_INDEX = geonames_field_index('GEONAMES_COUNTRY_CODE')
POPULATION_INDEX = geonames_field_index('GEONAMES_POPULATION')
PREFERRED_INDEX = geonames_field_index('GEONAMES_IS_PREFERRED_NAME')
HISTORICAL_INDEX = geonames_field_index('GEONAMES_IS_HISTORICAL')
geonames_admin_joins = '''
left join admin1_codes a1
on a1.code = gn.admin1_code
and a1.country_code = gn.country_code
left join admin2_codes a2
on a2.code = gn.admin2_code
and a2.admin1_code = gn.admin1_code
and a2.country_code = gn.country_code
left join admin3_codes a3
on a3.code = gn.admin3_code
and a3.admin1_code = gn.admin1_code
and a3.admin2_code = gn.admin2_code
and a3.country_code = gn.country_code
left join admin4_codes a4
on a4.code = gn.admin4_code
and a4.admin1_code = gn.admin1_code
and a4.admin2_code = gn.admin2_code
and a4.admin3_code = gn.admin3_code
and a4.country_code = gn.country_code
'''
# Canonical names are stored in the geonames table with alternates
# stored in a separate table. UNION ALL query will capture them all.
base_geonames_query = '''
select {geonames_fields}
from geonames gn
join countries c
on gn.country_code = c.country_code
{admin_joins}
{{predicate}}
union all
select {alt_name_fields}
from geonames gn
join countries c
on gn.country_code = c.country_code
join alternate_names an
on an.geonames_id = gn.geonames_id
and iso_language not in ('doi','faac','iata',
'icao','link','post','tcid')
and an.alternate_name != gn.name
{admin_joins}
{{predicate}}
'''.format(
geonames_fields=', '.join((f.name if f.default is None else
'{} as {}'.format(f.default, f.name)
for f in geonames_fields)),
alt_name_fields=', '.join((f.name for f in geonames_fields)),
admin_joins=geonames_admin_joins
)
IGNORE_COUNTRY_POSTAL_CODES = set([
'AR', # GeoNames has pre-1999 postal codes
])
postal_code_fields = [
GeonamesField('postal_code', 'GN_POSTAL_CODE'),
GeonamesField('p.country_code as country_code', 'GN_POSTAL_COUNTRY_CODE'),
GeonamesField('c.geonames_id as country_geonames_id', 'GN_POSTAL_COUNTRY_GEONAMES_ID'),
GeonamesField('c.population as country_population', 'GN_POSTAL_COUNTRY_POPULATION'),
GeonamesField('n.geonames_id as containing_geoname_id', 'GN_POSTAL_CONTAINING_GEONAME_ID'),
GeonamesField('group_concat(distinct a1.geonames_id) admin1_ids', 'GN_POSTAL_ADMIN1_IDS'),
GeonamesField('group_concat(distinct a2.geonames_id) admin2_ids', 'GN_POSTAL_ADMIN2_IDS'),
GeonamesField('group_concat(distinct a3.geonames_id) admin3_ids', 'GN_POSTAL_ADMIN3_IDS'),
]
def postal_code_field_index(s):
for i, f in enumerate(postal_code_fields):
if f.c_constant == s:
return i
return None
POSTAL_CODE_INDEX = postal_code_field_index('GN_POSTAL_CODE')
POSTAL_CODE_POP_INDEX = postal_code_field_index('GN_POSTAL_COUNTRY_POPULATION')
postal_codes_query = '''
select
{fields}
from postal_codes p
join countries c
on p.country_code = c.country_code
left join (
select
gn.geonames_id,
alternate_name,
country_code,
gn.name
from alternate_names an
join geonames gn
on an.geonames_id = gn.geonames_id
where iso_language = 'post'
) as n
on p.postal_code = n.alternate_name
and p.country_code = n.country_code
left join admin1_codes a1
on a1.code = p.admin1_code
and p.country_code = a1.country_code
left join admin2_codes a2
on a2.code = p.admin2_code
and a2.admin1_code = p.admin1_code
and a2.country_code = p.country_code
left join admin3_codes a3
on a3.code = p.admin3_code
and a3.admin1_code = p.admin1_code
and a3.admin2_code = p.admin2_code
and a3.country_code = p.country_code
where p.country_code not in ({exclude_country_codes})
group by postal_code, p.country_code
'''.format(
fields=','.join([f.name for f in postal_code_fields]),
exclude_country_codes=','.join("'{}'".format(code) for code in IGNORE_COUNTRY_POSTAL_CODES))
wikipedia_query = '''
select alternate_name, geonames_id, is_preferred_name
from alternate_names
where iso_language = 'link'
and alternate_name like '%%en.wikipedia%%'
order by alternate_name, is_preferred_name
'''
BATCH_SIZE = 2000
wiki_paren_regex = re.compile('(.*)[\s]*\(.*?\)[\s]*')
def normalize_wikipedia_title(title):
return safe_decode(title).replace(u'_', u' ')
def normalize_wikipedia_url(url):
url = urllib.unquote_plus(url)
parsed = urlparse.urlsplit(url)
if parsed.query:
params = urlparse.parse_qs(parsed.query)
if 'title' in params:
return normalize_wikipedia_title(params['title'][0])
title = parsed.path.rsplit('/', 1)[-1]
if title not in ('index.php', 'index.html'):
return normalize_wikipedia_title(title)
return None
def normalize_name(name):
name = name.replace('&', 'and')
name = name.replace('-', ' ')
name = name.replace(', ', ' ')
name = name.replace(',', ' ')
return name
saint_replacements = [
('st.', 'saint'),
('st.', 'st'),
('st', 'saint')
]
abbreviated_saint_regex = re.compile(r'\bSt(\.|\b)')
def normalize_display_name(name):
return abbreviated_saint_regex.sub('Saint', name).replace('&', 'and')
def utf8_normalize(s, form='NFD'):
return unicodedata.normalize(form, s)
def get_wikipedia_titles(db):
d = defaultdict(dict)
cursor = db.execute(wikipedia_query)
while True:
batch = cursor.fetchmany(BATCH_SIZE)
if not batch:
break
for (url, geonames_id, is_preferred) in batch:
title = normalize_wikipedia_url(safe_encode(url))
if title is not None and title.strip():
title = utf8_normalize(normalize_name(title))
d[title.lower()][geonames_id] = int(is_preferred or 0)
return d
def create_geonames_tsv(db, out_dir=DEFAULT_DATA_DIR):
'''
Writes geonames.tsv using the specified db to the specified data directory
'''
filename = os.path.join(out_dir, 'geonames.tsv')
temp_filename = filename + '.tmp'
f = open(temp_filename, 'w')
writer = csv.writer(f, 'tsv_no_quote')
init_languages()
init_country_names()
wiki_titles = get_wikipedia_titles(db)
logging.info('Fetched Wikipedia titles')
# Iterate over GeoNames boundary types from largest (country) to smallest (neighborhood)
for boundary_type, codes in geonames_admin_dictionaries.iteritems():
if boundary_type != boundary_types.COUNTRY:
predicate = 'where gn.feature_code in ({codes})'.format(
codes=','.join(['"{}"'.format(c) for c in codes])
)
else:
# The query for countries in GeoNames is somewhat non-trivial
predicate = 'where gn.geonames_id in (select geonames_id from countries)'
query = base_geonames_query.format(
predicate=predicate
)
cursor = db.execute(query)
i = 1
while True:
# Fetch rows in batches to save memory
batch = cursor.fetchmany(BATCH_SIZE)
if not batch:
break
rows = []
for row in batch:
row = list(row)
row[DUMMY_BOUNDARY_TYPE_INDEX] = boundary_type
language = row[LANGUAGE_INDEX]
country_code = row[COUNTRY_CODE_INDEX]
is_preferred = int(row[PREFERRED_INDEX] or 0)
is_historical = int(row[HISTORICAL_INDEX] or 0)
lang_spoken = get_country_languages(country_code.lower(), official=False).get(language, None)
lang_official = get_country_languages(country_code.lower()).get(language, None) == 1
null_language = not language.strip()
is_canonical = row[NAME_INDEX] == row[CANONICAL_NAME_INDEX]
alpha2_code = None
is_orig_name = False
if boundary_type == boundary_types.COUNTRY:
alpha2_code = row[COUNTRY_CODE_INDEX]
is_orig_name = row[NAME_INDEX] == row[CANONICAL_NAME_INDEX] and row[LANGUAGE_INDEX] == ''
# Set the canonical for countries to the local name, see country_official_name in country_names.py
country_canonical = country_localized_display_name(alpha2_code.lower())
if not country_canonical or not country_canonical.strip():
raise ValueError('Could not get local canonical name for country code={}'.format(alpha2_code))
row[CANONICAL_NAME_INDEX] = country_canonical
geonames_id = row[GEONAMES_ID_INDEX]
name = utf8_normalize(safe_decode(row[NAME_INDEX]))
# For non-postal codes, don't count
if name.isdigit():
continue
wikipedia_entries = wiki_titles.get(name.lower(), wiki_titles.get(normalize_name(name.lower()), {}))
row[NAME_INDEX] = name
if boundary_type == boundary_types.COUNTRY:
norm_name = normalize_name(name.lower())
for s, repl in saint_replacements:
if not wikipedia_entries:
wikipedia_entries = wiki_titles.get(norm_name.replace(s, repl), {})
wiki_row = []
have_wikipedia = geonames_id in wikipedia_entries
wiki_preferred = wikipedia_entries.get(geonames_id, 0)
'''
The following set of heuristics assigns a numerical value to a given name
alternative, such that in the case of ambiguous names, this value can be
used as part of the ranking function (as indeed it will be during sort).
The higher the value, the more likely the given entity resolution.
'''
if is_historical:
# Historical names, unlikely to be used
language_priority = 0
elif not null_language and language != 'abbr' and lang_spoken is None:
# Name of a place in language not widely spoken e.g. Japanese name for a US toponym
language_priority = 1
elif null_language and not is_preferred and not is_canonical:
# Null-language alternate names not marked as preferred, dubious
language_priority = 2
elif language == 'abbr' and not is_preferred:
# Abbreviation, not preferred
language_priority = 3
elif language == 'abbr' and is_preferred:
# Abbreviation, preferred e.g. NYC, UAE
language_priority = 4
elif lang_spoken and not lang_official and not is_preferred:
# Non-preferred name but in a spoken (non-official) language
language_priority = 5
elif lang_official == 1 and not is_preferred:
# Name in an official language, not preferred
language_priority = 6
elif null_language and not is_preferred and is_canonical:
# Canonical name, may be overly official e.g. Islamic Republic of Pakistan
language_priority = 7
elif is_preferred and not lang_official:
# Preferred names, not an official language
language_priority = 8
elif is_preferred and lang_official:
# Official language preferred
language_priority = 9
row[DUMMY_LANGUAGE_PRIORITY_INDEX] = language_priority
if have_wikipedia:
wiki_row = row[:]
wiki_row[DUMMY_HAS_WIKIPEDIA_ENTRY_INDEX] = wiki_preferred + 1
rows.append(map(encode_field, wiki_row))
canonical = utf8_normalize(safe_decode(row[CANONICAL_NAME_INDEX]))
row[POPULATION_INDEX] = int(row[POPULATION_INDEX] or 0)
have_normalized = False
if is_orig_name:
canonical_row = wiki_row[:] if have_wikipedia else row[:]
canonical_row_name = normalize_display_name(name)
if canonical_row_name != name:
canonical_row[NAME_INDEX] = safe_encode(canonical_row_name)
have_normalized = True
rows.append(map(encode_field, canonical_row))
if not have_wikipedia:
rows.append(map(encode_field, row))
# Country names have more specialized logic
if boundary_type == boundary_types.COUNTRY:
wikipedia_entries = wiki_titles.get(canonical.lower(), {})
canonical_row_name = normalize_display_name(canonical)
canonical_row = row[:]
if is_orig_name:
canonical = safe_decode(canonical)
canonical_row[NAME_INDEX] = safe_encode(canonical)
norm_name = normalize_name(canonical.lower())
for s, repl in saint_replacements:
if not wikipedia_entries:
wikipedia_entries = wiki_titles.get(norm_name.replace(s, repl), {})
if not wikipedia_entries:
norm_name = normalize_name(canonical_row_name.lower())
for s, repl in saint_replacements:
if not wikipedia_entries:
wikipedia_entries = wiki_titles.get(norm_name.replace(s, repl), {})
have_wikipedia = geonames_id in wikipedia_entries
wiki_preferred = wikipedia_entries.get(geonames_id, 0)
if have_wikipedia:
canonical_row[DUMMY_HAS_WIKIPEDIA_ENTRY_INDEX] = wiki_preferred + 1
if (name != canonical):
rows.append(map(encode_field, canonical_row))
if canonical_row_name != canonical and canonical_row_name != name:
canonical_row[NAME_INDEX] = safe_encode(canonical_row_name)
rows.append(map(encode_field, canonical_row))
if alpha2_code and is_orig_name:
alpha2_row = row[:]
alpha2_row[NAME_INDEX] = alpha2_code
alpha2_row[DUMMY_LANGUAGE_PRIORITY_INDEX] = 10
rows.append(map(encode_field, alpha2_row))
if alpha2_code.lower() in country_alpha3_map and is_orig_name:
alpha3_row = row[:]
alpha3_row[NAME_INDEX] = country_code_alpha3_map[alpha2_code.lower()]
alpha3_row[DUMMY_LANGUAGE_PRIORITY_INDEX] = 10
rows.append(map(encode_field, alpha3_row))
writer.writerows(rows)
logging.info('Did {} batches'.format(i))
i += 1
cursor.close()
f.flush()
f.close()
logging.info('Sorting...')
env = os.environ.copy()
env['LC_ALL'] = 'C'
command = ['sort', '-t\t', '-u', '--ignore-case',
'-k{0},{0}'.format(NAME_INDEX + 1),
# If there's a Wikipedia link to this name for the given id, sort first
'-k{0},{0}nr'.format(DUMMY_HAS_WIKIPEDIA_ENTRY_INDEX + 1),
# Language priority rules as above
'-k{0},{0}nr'.format(DUMMY_LANGUAGE_PRIORITY_INDEX + 1),
# Sort descending by population (basic proxy for relevance)
'-k{0},{0}nr'.format(POPULATION_INDEX + 1),
# group rows for the same geonames ID together
'-k{0},{0}'.format(GEONAMES_ID_INDEX + 1),
# preferred names come first within that grouping
'-k{0},{0}nr'.format(PREFERRED_INDEX + 1),
# since uniquing is done on the sort key, add language
'-k{0},{0}'.format(LANGUAGE_INDEX + 1),
'-o', filename, temp_filename]
p = subprocess.Popen(command, env=env)
return_code = p.wait()
if return_code != 0:
raise subprocess.CalledProcessError(return_code, command)
os.unlink(temp_filename)
def create_postal_codes_tsv(db, out_dir=DEFAULT_DATA_DIR):
filename = os.path.join(out_dir, 'postal_codes.tsv')
temp_filename = filename + '.tmp'
f = open(temp_filename, 'w')
writer = csv.writer(f, 'tsv_no_quote')
cursor = db.execute(postal_codes_query)
i = 1
while True:
batch = cursor.fetchmany(BATCH_SIZE)
if not batch:
break
rows = [
map(encode_field, row)
for row in batch
]
writer.writerows(rows)
logging.info('Did {} batches'.format(i))
i += 1
cursor.close()
f.close()
logging.info('Sorting...')
subprocess.check_call([
'sort', '-t\t', '--ignore-case',
'-k{0},{0}'.format(POSTAL_CODE_INDEX + 1),
'-k{0},{0}nr'.format(POSTAL_CODE_POP_INDEX + 1),
'-o', filename,
temp_filename
])
os.unlink(temp_filename)
# Generates a C header telling us the order of the fields as written
GEONAMES_FIELDS_HEADER = os.path.join(this_dir, os.pardir, os.pardir, os.pardir,
'src', 'geonames_fields.h')
GEONAMES_FIELDS_HEADER_FILE = '''enum geonames_fields {{
{fields},
NUM_GEONAMES_FIELDS
}};
'''.format(fields=''',
'''.join(['{}={}'.format(f.c_constant, i) for i, f in enumerate(geonames_fields)]))
def write_geonames_fields_header(filename=GEONAMES_FIELDS_HEADER):
with open(filename, 'w') as f:
f.write(GEONAMES_FIELDS_HEADER_FILE)
POSTAL_FIELDS_HEADER = os.path.join(this_dir, os.pardir, os.pardir, os.pardir,
'src', 'postal_fields.h')
POSTAL_FIELDS_HEADER_FILE = '''enum gn_postal_fields {{
{fields},
NUM_POSTAL_FIELDS
}};
'''.format(fields=''',
'''.join(['{}={}'.format(f.c_constant, i) for i, f in enumerate(postal_code_fields)]))
def write_postal_fields_header(filename=POSTAL_FIELDS_HEADER):
with open(filename, 'w') as f:
f.write(POSTAL_FIELDS_HEADER_FILE)
if __name__ == '__main__':
# Handle argument parsing here
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--db',
default=DEFAULT_GEONAMES_DB_PATH,
help='SQLite db file')
parser.add_argument('-o', '--out',
default=DEFAULT_DATA_DIR, help='output directory')
args = parser.parse_args()
db = sqlite3.connect(args.db)
create_geonames_tsv(db, args.out)
create_postal_codes_tsv(db, args.out)
write_geonames_fields_header()
write_postal_fields_header()
db.close()
| 36.222061 | 118 | 0.643627 |
477cf4856c84171cebd6b4bb39ecac2429dd5a90 | 2,328 | py | Python | insta/migrations/0001_initial.py | edithamadi/instagram2 | 4241367dfd02c5f66eed698c9a2fc67b663499ac | [
"Unlicense"
] | null | null | null | insta/migrations/0001_initial.py | edithamadi/instagram2 | 4241367dfd02c5f66eed698c9a2fc67b663499ac | [
"Unlicense"
] | null | null | null | insta/migrations/0001_initial.py | edithamadi/instagram2 | 4241367dfd02c5f66eed698c9a2fc67b663499ac | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-10-18 16:17
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment_text', models.CharField(max_length=200, null=True)),
],
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='images/')),
('image_name', models.CharField(max_length=40)),
('image_caption', models.TextField()),
('likes', models.PositiveIntegerField(default=0)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('bio', models.CharField(max_length=200)),
('profile_photo', models.ImageField(upload_to='profile/')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='comment',
name='Profile',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='insta.Profile'),
),
migrations.AddField(
model_name='comment',
name='post',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='insta.Image'),
),
]
| 39.457627 | 121 | 0.594502 |
0bd6693846cfd73e645c4551c45206413ce838e1 | 2,261 | py | Python | resources/utils.py | ppizarror/readme-generator | 08bab2a4d25de70a732002237b529799de9082de | [
"MIT"
] | 12 | 2018-01-17T01:02:27.000Z | 2020-04-30T06:18:42.000Z | resources/utils.py | mbebe/readme-generator | a9d7377fe1c70ac5cd997062947b876146b3c9eb | [
"MIT"
] | 1 | 2018-02-18T14:40:51.000Z | 2018-02-18T14:40:51.000Z | resources/utils.py | rrosajp/readme-generator | 08bab2a4d25de70a732002237b529799de9082de | [
"MIT"
] | 14 | 2018-06-19T02:43:42.000Z | 2022-02-21T22:59:13.000Z | # coding=utf-8
"""
README GENERATOR
Utils
Autor: Pablo Pizarro R. @ ppizarror.com
Licencia:
The MIT License (MIT)
Copyright 2017 Pablo Pizarro R.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
# Library imports
import os
# Constants
CREATE_NO_WINDOW = 0x08000000
class Cd(object):
"""Context manager for changing the current working directory."""
def __init__(self, new_path):
self.newPath = os.path.expanduser(new_path)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, etype, value, traceback):
os.chdir(self.savedPath)
def is_windows():
"""
Función que retorna True/False si el sistema operativo cliente es Windows o no.
:return: Boolean
"""
if os.name == 'nt':
return True
return False
def is_linux():
"""
Función que retorna True/False si el sistema operativo cliente es Linux o no.
:return: Boolean
"""
if os.name == 'posix':
return True
return False
def is_osx():
"""
Función que retorna True/False si el sistema operativo cliente es OSX.
:return: Boolean
"""
if os.name == 'darwin':
return True
return False
| 28.987179 | 85 | 0.69969 |
c884cb5bb964feb694e7509036aeb8d7fd088d39 | 3,626 | py | Python | libs/groupdocs_conversion_cloud/models/xltm_load_options.py | rocketbot-cl/pdf2word | e46f6f574f69aa744e300baf4802e426b71bf9b2 | [
"MIT"
] | null | null | null | libs/groupdocs_conversion_cloud/models/xltm_load_options.py | rocketbot-cl/pdf2word | e46f6f574f69aa744e300baf4802e426b71bf9b2 | [
"MIT"
] | null | null | null | libs/groupdocs_conversion_cloud/models/xltm_load_options.py | rocketbot-cl/pdf2word | e46f6f574f69aa744e300baf4802e426b71bf9b2 | [
"MIT"
] | null | null | null | # coding: utf-8
# -----------------------------------------------------------------------------------
# <copyright company="Aspose Pty Ltd" file="XltmLoadOptions.py">
# Copyright (c) 2003-2019 Aspose Pty Ltd
# </copyright>
# <summary>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# </summary>
# -----------------------------------------------------------------------------------
import pprint
import re # noqa: F401
import six
from groupdocs_conversion_cloud.models import SpreadsheetLoadOptions
class XltmLoadOptions(SpreadsheetLoadOptions):
"""
Xltm load options
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self, **kwargs): # noqa: E501
"""Initializes new instance of XltmLoadOptions""" # noqa: E501
base = super(XltmLoadOptions, self)
base.__init__(**kwargs)
self.swagger_types.update(base.swagger_types)
self.attribute_map.update(base.attribute_map)
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, XltmLoadOptions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 34.865385 | 85 | 0.598456 |
f52ff824ae76fb5fec7e9d7a1ea09bd311df3fda | 604 | py | Python | grindstone/tasks/migrations/0001_initial.py | amenasse/grindstone | eae893b4ee85175b053203e5a37cbcd21734ee7a | [
"BSD-3-Clause"
] | null | null | null | grindstone/tasks/migrations/0001_initial.py | amenasse/grindstone | eae893b4ee85175b053203e5a37cbcd21734ee7a | [
"BSD-3-Clause"
] | null | null | null | grindstone/tasks/migrations/0001_initial.py | amenasse/grindstone | eae893b4ee85175b053203e5a37cbcd21734ee7a | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255)),
('description', models.TextField(blank=True)),
('status', models.CharField(max_length=4)),
],
),
]
| 26.26087 | 114 | 0.572848 |
d5bea47e3ac02072464e6013f83e0960d9dcd6df | 3,184 | py | Python | my_app/blog/migrations/0029_latestproduct_latestproductone_reviewproduct_reviewproductone_topproduct_topproductone.py | Faisal-Sey/official1 | 49af7a9fd60c980bd5d4ef7075a4c1f27ecc9642 | [
"MIT"
] | 1 | 2021-06-19T00:17:02.000Z | 2021-06-19T00:17:02.000Z | my_app/blog/migrations/0029_latestproduct_latestproductone_reviewproduct_reviewproductone_topproduct_topproductone.py | Faisal-Sey/official1 | 49af7a9fd60c980bd5d4ef7075a4c1f27ecc9642 | [
"MIT"
] | null | null | null | my_app/blog/migrations/0029_latestproduct_latestproductone_reviewproduct_reviewproductone_topproduct_topproductone.py | Faisal-Sey/official1 | 49af7a9fd60c980bd5d4ef7075a4c1f27ecc9642 | [
"MIT"
] | null | null | null | # Generated by Django 3.1 on 2020-09-17 09:35
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0028_delete_search'),
]
operations = [
migrations.CreateModel(
name='LatestProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='LatestProductOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='ReviewProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='ReviewProductOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='TopProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
migrations.CreateModel(
name='TopProductOne',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('items_name', models.CharField(max_length=300)),
('price', models.TextField(max_length=50)),
('Description', models.TextField(blank=True, max_length=600)),
('Image', models.ImageField(upload_to='')),
],
),
]
| 43.027027 | 114 | 0.551193 |
cf45e372e0b6f5e841c23ad0d80fbcf88ef2a0ab | 1,269 | py | Python | benchmarks/base.py | utkarshgupta137/redis-py | 48bc7b6521e4688fae1d432059636d8a2cb71a0d | [
"MIT"
] | null | null | null | benchmarks/base.py | utkarshgupta137/redis-py | 48bc7b6521e4688fae1d432059636d8a2cb71a0d | [
"MIT"
] | null | null | null | benchmarks/base.py | utkarshgupta137/redis-py | 48bc7b6521e4688fae1d432059636d8a2cb71a0d | [
"MIT"
] | null | null | null | import functools
import itertools
import timeit
import redis
class Benchmark:
ARGUMENTS = ()
def __init__(self):
self._client = None
def get_client(self, **kwargs):
# eventually make this more robust and take optional args from
# argparse
if self._client is None or kwargs:
defaults = {"db": 9}
defaults.update(kwargs)
pool = redis.ConnectionPool(**kwargs)
self._client = redis.Redis(connection_pool=pool)
return self._client
def setup(self, **kwargs):
pass
def run(self, **kwargs):
pass
def run_benchmark(self):
group_names = [group["name"] for group in self.ARGUMENTS]
group_values = [group["values"] for group in self.ARGUMENTS]
for value_set in itertools.product(*group_values):
pairs = list(zip(group_names, value_set))
arg_string = ", ".join(f"{p[0]}={p[1]}" for p in pairs)
print(f"Benchmark: {arg_string}... ", end="")
kwargs = dict(pairs)
setup = functools.partial(self.setup, **kwargs)
run = functools.partial(self.run, **kwargs)
t = timeit.timeit(stmt=run, setup=setup, number=1000000)
print(f"{t:f}")
| 30.214286 | 70 | 0.590229 |
9c7f8603fbe0e12f8df26485e220d895fa4ead3b | 256 | py | Python | src/manage.py | KarimJedda/django-react-setup | 8bf28b3cf3038307776d58f4e1ed476e391f967f | [
"MIT"
] | 1 | 2020-02-27T20:32:17.000Z | 2020-02-27T20:32:17.000Z | src/manage.py | KarimJedda/django-react-setup | 8bf28b3cf3038307776d58f4e1ed476e391f967f | [
"MIT"
] | null | null | null | src/manage.py | KarimJedda/django-react-setup | 8bf28b3cf3038307776d58f4e1ed476e391f967f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sampleapp.settings.dev")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 23.272727 | 77 | 0.773438 |
21b9a2ad60333295b3c03a91d3a2dd0861037fe1 | 419 | py | Python | frame/config.py | pbcode/links | bc932dd70ab0a80227c1734f8b40d7ccbb275688 | [
"Apache-2.0"
] | 6 | 2015-05-25T06:34:39.000Z | 2016-11-14T05:54:22.000Z | frame/config.py | pbcode/links | bc932dd70ab0a80227c1734f8b40d7ccbb275688 | [
"Apache-2.0"
] | 1 | 2015-05-27T03:37:31.000Z | 2015-05-27T05:24:23.000Z | frame/config.py | pbcode/links | bc932dd70ab0a80227c1734f8b40d7ccbb275688 | [
"Apache-2.0"
] | 21 | 2015-05-21T08:45:06.000Z | 2017-02-24T14:23:43.000Z | # -*- coding:utf-8 -*-
__author__ = 'Ulric Qin'
# -- app config --
DEBUG = True
# -- db config --
DB_HOST = "127.0.0.1"
DB_PORT = 3306
DB_USER = "root"
DB_PASS = ""
DB_NAME = "falcon_links"
# -- cookie config --
SECRET_KEY = "4e.5tyg8-u9ioj"
SESSION_COOKIE_NAME = "falcon-links"
PERMANENT_SESSION_LIFETIME = 3600 * 24 * 30
try:
from frame.local_config import *
except Exception, e:
print "[warning] %s" % e
| 18.217391 | 43 | 0.651551 |
8e4b3fd7ef54b7b69982b394fe0f25b6194fcb1e | 2,893 | py | Python | libnmap/diff.py | sv0/nmap-wrapper | 34e79258b48d5d6c2687f56a9c294254ea0c65d1 | [
"CC-BY-3.0"
] | null | null | null | libnmap/diff.py | sv0/nmap-wrapper | 34e79258b48d5d6c2687f56a9c294254ea0c65d1 | [
"CC-BY-3.0"
] | null | null | null | libnmap/diff.py | sv0/nmap-wrapper | 34e79258b48d5d6c2687f56a9c294254ea0c65d1 | [
"CC-BY-3.0"
] | null | null | null | # -*- coding: utf-8 -*-
class DictDiffer(object):
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
def __init__(self, current_dict, past_dict):
self.current_dict = current_dict
self.past_dict = past_dict
self.set_current = set(current_dict.keys())
self.set_past = set(past_dict.keys())
self.intersect = self.set_current.intersection(self.set_past)
def added(self):
return self.set_current - self.intersect
def removed(self):
return self.set_past - self.intersect
def changed(self):
return (set(o for o in self.intersect
if self.past_dict[o] != self.current_dict[o]))
def unchanged(self):
return (set(o for o in self.intersect
if self.past_dict[o] == self.current_dict[o]))
class NmapDiff(DictDiffer):
"""
NmapDiff compares two objects of same type to enable the user to check:
- what has changed
- what has been added
- what has been removed
- what was kept unchanged
NmapDiff inherit from DictDiffer which makes the actual comparaison.
The different methods from DictDiffer used by NmapDiff are the
following:
- NmapDiff.changed()
- NmapDiff.added()
- NmapDiff.removed()
- NmapDiff.unchanged()
Each of the returns a python set() of key which have changed in the
compared objects. To check the different keys that could be returned,
refer to the as_dict() method of the objects you which to
compare (i.e: libnmap.objects.NmapHost, NmapService,...).
"""
def __init__(self, nmap_obj1, nmap_obj2):
"""
Constructor of NmapDiff:
- Checks if the two objects are of the same class
- Checks if the objects are "comparable" via a call to id() (dirty)
- Inherits from DictDiffer and
"""
if(nmap_obj1.__class__ != nmap_obj2.__class__ or
nmap_obj1.id != nmap_obj2.id):
raise NmapDiffException("Comparing objects with non-matching id")
self.object1 = nmap_obj1.as_dict()
self.object2 = nmap_obj2.as_dict()
DictDiffer.__init__(self, self.object1, self.object2)
def __repr__(self):
return ("added: [{0}] -- changed: [{1}] -- "
"unchanged: [{2}] -- removed [{3}]".format(self.added(),
self.changed(),
self.unchanged(),
self.removed()))
class NmapDiffException(Exception):
def __init__(self, msg):
self.msg = msg
| 34.035294 | 79 | 0.580021 |
22b06959e5c2c30e871f7aaf7e4f6f1521486e18 | 2,843 | py | Python | crf.py | yzhangcs/crfsrl | 774db71b11f2dbc2d465e8f82bbaef455adf4764 | [
"MIT"
] | 17 | 2021-10-14T03:39:05.000Z | 2022-03-18T00:45:16.000Z | crf.py | yzhangcs/crfsrl | 774db71b11f2dbc2d465e8f82bbaef455adf4764 | [
"MIT"
] | 4 | 2021-12-16T22:21:17.000Z | 2022-03-13T18:00:26.000Z | crf.py | yzhangcs/crfsrl | 774db71b11f2dbc2d465e8f82bbaef455adf4764 | [
"MIT"
] | 1 | 2021-12-03T15:29:21.000Z | 2021-12-03T15:29:21.000Z | # -*- coding: utf-8 -*-
import argparse
from supar.cmds.cmd import init
from crfsrl import CRFSemanticRoleLabelingParser
def main():
parser = argparse.ArgumentParser(description='Create first-order CRF Dependency Parser.')
parser.set_defaults(Parser=CRFSemanticRoleLabelingParser)
parser.add_argument('--prd', action='store_true', help='whether to use gold predicates')
subparsers = parser.add_subparsers(title='Commands', dest='mode')
# train
subparser = subparsers.add_parser('train', help='Train a parser.')
subparser.add_argument('--feat', '-f', choices=['tag', 'char', 'lemma', 'elmo', 'bert'], nargs='+', help='features to use')
subparser.add_argument('--build', '-b', action='store_true', help='whether to build the model first')
subparser.add_argument('--checkpoint', action='store_true', help='whether to load a checkpoint to restore training')
subparser.add_argument('--finetune', action='store_true', help='whether to finetune PLM models')
subparser.add_argument('--encoder', choices=['lstm', 'transformer', 'bert'], default='lstm', help='encoder to use')
subparser.add_argument('--max-len', type=int, help='max length of the sentences')
subparser.add_argument('--buckets', default=32, type=int, help='max num of buckets to use')
subparser.add_argument('--train', default='data/conll05/train.conllu', help='path to train file')
subparser.add_argument('--dev', default='data/conll05/dev.conllu', help='path to dev file')
subparser.add_argument('--test', default='data/conll05/test.conllu', help='path to test file')
subparser.add_argument('--embed', default='data/glove.6B.100d.txt', help='path to pretrained embeddings')
subparser.add_argument('--unk', default='unk', help='unk token in pretrained embeddings')
subparser.add_argument('--n-embed', default=100, type=int, help='dimension of embeddings')
subparser.add_argument('--bert', default='bert-base-cased', help='which BERT model to use')
# evaluate
subparser = subparsers.add_parser('evaluate', help='Evaluate the specified parser and dataset.')
subparser.add_argument('--buckets', default=8, type=int, help='max num of buckets to use')
subparser.add_argument('--data', default='data/conll05/test.conllu', help='path to dataset')
# predict
subparser = subparsers.add_parser('predict', help='Use a trained parser to make predictions.')
subparser.add_argument('--buckets', default=8, type=int, help='max num of buckets to use')
subparser.add_argument('--data', default='data/conll05/test.conllu', help='path to dataset')
subparser.add_argument('--pred', default='pred.conllu', help='path to predicted result')
subparser.add_argument('--prob', action='store_true', help='whether to output probs')
init(parser)
if __name__ == "__main__":
main()
| 61.804348 | 127 | 0.714386 |
b2eda42e1917a6e32ee23d5132abc44e694d4aa3 | 3,577 | py | Python | corpus/datasources/download.py | WolfgangFahl/ConferenceCorpus | e4f5ac3bf098008859f2e7e116f3c1d75a392271 | [
"Apache-2.0"
] | 1 | 2022-01-30T12:36:39.000Z | 2022-01-30T12:36:39.000Z | corpus/datasources/download.py | WolfgangFahl/ConferenceCorpus | e4f5ac3bf098008859f2e7e116f3c1d75a392271 | [
"Apache-2.0"
] | 42 | 2021-07-30T08:51:30.000Z | 2022-03-24T11:21:55.000Z | corpus/datasources/download.py | WolfgangFahl/ConferenceCorpus | e4f5ac3bf098008859f2e7e116f3c1d75a392271 | [
"Apache-2.0"
] | 2 | 2021-09-29T22:49:00.000Z | 2021-11-09T22:38:53.000Z | '''
Created on 2021-08-21
@author: wf
'''
import os
import urllib
import gzip
import shutil
import time
class Download:
'''
Utility functions for downloading data
'''
@staticmethod
def getURLContent(url:str):
with urllib.request.urlopen(url) as urlResponse:
content = urlResponse.read().decode()
return content
@staticmethod
def getFileContent(path:str):
with open(path, "r") as file:
content = file.read()
return content
@staticmethod
def needsDownload(filePath:str,force:bool=False)->bool:
'''
check if a download of the given filePath is necessary that is the file
does not exist has a size of zero or the download should be forced
Args:
filePath(str): the path of the file to be checked
force(bool): True if the result should be forced to True
Return:
bool: True if a download for this file needed
'''
if not os.path.isfile(filePath):
result=True
else:
stats=os.stat(filePath)
size=stats.st_size
result=force or size==0
return result
@staticmethod
def downloadBackupFile(url:str, fileName:str, targetDirectory:str, force:bool=False,profile:bool=True):
'''
Downloads from the given url the zip-file and extracts the file corresponding to the given fileName.
Args:
url: url linking to a downloadable gzip file
fileName: Name of the file that should be extracted from gzip file
targetDirectory(str): download the file to this directory
force (bool): True if the download should be forced
profile(bool): if True show profiling information
Returns:
Name of the extracted file with path to the backup directory
'''
extractTo = f"{targetDirectory}/{fileName}"
# we might want to check whether a new version is available
if Download.needsDownload(extractTo,force=force):
if not os.path.isdir(targetDirectory):
os.makedirs(targetDirectory)
zipped = f"{extractTo}.gz"
msg=f"Downloading {zipped} from {url} ... this might take a few seconds ..."
profiler=Profiler(msg=msg,profile=profile)
urllib.request.urlretrieve(url, zipped)
profiler.time(extraMsg=f" unzipping {extractTo} from {zipped}")
with gzip.open(zipped, 'rb') as gzipped:
with open(extractTo, 'wb') as unzipped:
shutil.copyfileobj(gzipped, unzipped)
if not os.path.isfile(extractTo):
raise (f"could not extract {fileName} from {zipped}")
return extractTo
class Profiler:
'''
simple profiler
'''
def __init__(self,msg,profile=True):
'''
construct me with the given msg and profile active flag
Args:
msg(str): the message to show if profiling is active
profile(bool): True if messages should be shown
'''
self.msg=msg
self.profile=profile
self.starttime=time.time()
if profile:
print(f"Starting {msg} ...")
def time(self,extraMsg=""):
'''
time the action and print if profile is active
'''
elapsed=time.time()-self.starttime
if self.profile:
print(f"{self.msg}{extraMsg} took {elapsed:5.1f} s")
return elapsed
| 33.12037 | 108 | 0.594073 |
4d159154e5af76975a91d7156f3273f8caab0dd5 | 952 | py | Python | stake_last_all_api/account_rpc_interface_account_manage/account_createWallet.py | DerWalundDieKatze/Yumekui | cb3174103ced7474ce6d1abd774b399557dcaf4f | [
"Apache-2.0"
] | null | null | null | stake_last_all_api/account_rpc_interface_account_manage/account_createWallet.py | DerWalundDieKatze/Yumekui | cb3174103ced7474ce6d1abd774b399557dcaf4f | [
"Apache-2.0"
] | null | null | null | stake_last_all_api/account_rpc_interface_account_manage/account_createWallet.py | DerWalundDieKatze/Yumekui | cb3174103ced7474ce6d1abd774b399557dcaf4f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
'''
@author: caroline
@license: (C) Copyright 2019-2022, Node Supply Chain Manager Corporation Limited.
@contact: caroline.fang.cc@gmail.com
@software: pycharm
@file: account_createWallet.py
@time: 2020/1/8 5:39 下午
@desc:
'''
from stake_last_all_api.API import request_Api
'''3. account_createWallet'''
def createWallet(api_name, params):
'''
创建本地钱包
:param api_name: account_createWallet
:param params:钱包密码
:return: 失败返回错误原因,成功不返回任何信息
示例代码
curl http://localhost:15645 -X POST --data '{"jsonrpc":"2.0","method":"account_createWallet","params":["123"], "id": 3}' -H "Content-Type:application/json"
'''
try:
result = request_Api(api_name, params)
print("创建本地钱包成功,地址为{}".format(result))
return result
except Exception as e:
print("创建本地钱包失败,api返回错误,返回值为{}".format(e))
return -1
if __name__ == '__main__':
api_name = "account_createWallet"
params = ["123"]
createWallet(api_name, params) | 23.8 | 156 | 0.718487 |
e489b309a7a61d407837552f8a64463e27a89057 | 8,735 | py | Python | io_scene_xray/plugin_prefs.py | StalkMen/blender-xray | 52dccad376ab19ad578275059162e5889715ec0d | [
"BSD-2-Clause"
] | null | null | null | io_scene_xray/plugin_prefs.py | StalkMen/blender-xray | 52dccad376ab19ad578275059162e5889715ec0d | [
"BSD-2-Clause"
] | null | null | null | io_scene_xray/plugin_prefs.py | StalkMen/blender-xray | 52dccad376ab19ad578275059162e5889715ec0d | [
"BSD-2-Clause"
] | 1 | 2021-06-30T15:52:46.000Z | 2021-06-30T15:52:46.000Z | # pylint: disable=C0103
from os import path
import bpy
from . import registry
from .ui import collapsible, xprop
from .utils import with_auto_property
from .version_utils import IS_28, assign_props
def get_preferences():
if IS_28:
return bpy.context.preferences.addons['io_scene_xray'].preferences
else:
return bpy.context.user_preferences.addons['io_scene_xray'].preferences
def PropSDKVersion():
return bpy.props.EnumProperty(
name='SDK Version',
items=(('soc', 'SoC', ''), ('cscop', 'CS/CoP', ''))
)
def PropObjectMotionsImport():
return bpy.props.BoolProperty(
name='Import Motions',
description='Import embedded motions as actions',
default=True
)
def PropObjectMeshSplitByMaterials():
return bpy.props.BoolProperty(
name='Split Mesh By Materials',
description='Import each surface (material) as separate set of faces',
default=False
)
def PropObjectMotionsExport():
return bpy.props.BoolProperty(
name='Export Motions',
description='Export armatures actions as embedded motions',
default=True
)
def PropObjectTextureNamesFromPath():
return bpy.props.BoolProperty(
name='Texture Names From Image Paths',
description='Generate texture names from image paths ' \
+ '(by subtract <gamedata/textures> prefix and <file-extension> suffix)',
default=True
)
def PropObjectBonesCustomShapes():
return bpy.props.BoolProperty(
name='Custom Shapes For Bones',
description='Use custom shapes for imported bones',
default=True
)
def PropAnmCameraAnimation():
return bpy.props.BoolProperty(
name='Create Linked Camera',
description='Create animated camera object (linked to "empty"-object)',
default=True
)
def PropUseExportPaths():
return bpy.props.BoolProperty(
name='Use Export Paths',
description='Append the Object.ExportPath to the export directory for each object',
default=True
)
__AUTO_PROPS__ = [
'gamedata_folder',
'textures_folder',
'gamemtl_file',
'eshader_file',
'cshader_file',
'objects_folder'
]
def _auto_path(obj, self_name, path_suffix, checker):
for prop in __AUTO_PROPS__:
if prop == self_name:
continue
value = getattr(obj, prop)
if not value:
continue
if prop == 'objects_folder':
continue
result = path.normpath(value)
if prop != 'gamedata_folder':
dirname = path.dirname(result)
if dirname == result:
continue # path.dirname('T:') == 'T:'
result = dirname
if path_suffix:
result = path.join(result, path_suffix)
if self_name == 'objects_folder':
result = path.abspath(result)
if checker(result):
return result
return ''
def update_menu_func(self, context):
from . import plugin
plugin.append_menu_func()
_explicit_path_op_props = {
'path': bpy.props.StringProperty(),
}
@registry.module_thing
class _ExplicitPathOp(bpy.types.Operator):
bl_idname = 'io_scene_xray.explicit_path'
bl_label = 'Make Explicit'
bl_description = 'Make this path explicit using the automatically calculated value'
if not IS_28:
for prop_name, prop_value in _explicit_path_op_props.items():
exec('{0} = _explicit_path_op_props.get("{0}")'.format(prop_name))
def execute(self, _context):
prefs = get_preferences()
value = getattr(prefs, with_auto_property.build_auto_id(self.path))
setattr(prefs, self.path, value)
return {'FINISHED'}
plugin_preferences_props = {
'expert_mode': bpy.props.BoolProperty(
name='Expert Mode', description='Show additional properties/controls'
),
'compact_menus': bpy.props.BoolProperty(
name='Compact Import/Export Menus', update=update_menu_func
),
'sdk_version': PropSDKVersion(),
'object_motions_import': PropObjectMotionsImport(),
'object_motions_export': PropObjectMotionsExport(),
'object_mesh_split_by_mat': PropObjectMeshSplitByMaterials(),
'object_texture_names_from_path': PropObjectTextureNamesFromPath(),
'object_bones_custom_shapes': PropObjectBonesCustomShapes(),
'anm_create_camera': PropAnmCameraAnimation()
}
@registry.module_thing
@with_auto_property(
bpy.props.StringProperty, 'gamedata_folder',
lambda self: _auto_path(self, 'gamedata_folder', '', path.isdir),
name='Gamedata Folder',
description='Path to the \'gamedata\' directory',
subtype='DIR_PATH',
overrides={'subtype': 'NONE'},
)
@with_auto_property(
bpy.props.StringProperty, 'textures_folder',
lambda self: _auto_path(self, 'textures_folder', 'textures', path.isdir),
name='Textures Folder',
description='Path to the \'gamedata/textures\' directory',
subtype='DIR_PATH',
overrides={'subtype': 'NONE'},
)
@with_auto_property(
bpy.props.StringProperty, 'gamemtl_file',
lambda self: _auto_path(self, 'gamemtl_file', 'gamemtl.xr', path.isfile),
name='GameMtl File',
description='Path to the \'gamemtl.xr\' file',
subtype='FILE_PATH',
overrides={'subtype': 'NONE'},
)
@with_auto_property(
bpy.props.StringProperty, 'eshader_file',
lambda self: _auto_path(self, 'eshader_file', 'shaders.xr', path.isfile),
name='EShader File',
description='Path to the \'shaders.xr\' file',
subtype='FILE_PATH',
overrides={'subtype': 'NONE'},
)
@with_auto_property(
bpy.props.StringProperty, 'cshader_file',
lambda self: _auto_path(self, 'cshader_file', 'shaders_xrlc.xr', path.isfile),
name='CShader File',
description='Path to the \'shaders_xrlc.xr\' file',
subtype='FILE_PATH',
overrides={'subtype': 'NONE'},
)
@with_auto_property(
bpy.props.StringProperty, 'objects_folder',
lambda self: _auto_path(self, 'objects_folder', path.join('..', 'rawdata', 'objects'), path.isdir),
name='Objects Folder',
description='Path to the \'rawdata/objects\' directory',
subtype='DIR_PATH',
overrides={'subtype': 'NONE'},
)
class PluginPreferences(bpy.types.AddonPreferences):
bl_idname = 'io_scene_xray'
if not IS_28:
for prop_name, prop_value in plugin_preferences_props.items():
exec('{0} = plugin_preferences_props.get("{0}")'.format(prop_name))
def draw(self, _context):
def prop_bool(layout, data, prop):
# row = layout.row()
# row.label(text=getattr(self.__class__, prop)[1]['name'] + ':')
# row.prop(data, prop, text='')
layout.prop(data, prop)
def prop_auto(layout, data, prop):
eprop = prop
if not getattr(data, prop):
nprop = with_auto_property.build_auto_id(prop)
if getattr(data, nprop):
eprop = nprop
if eprop == prop:
layout.prop(data, eprop)
else:
_, lay = xprop(layout, data, eprop, enabled=False)
operator = lay.operator(_ExplicitPathOp.bl_idname, icon='MODIFIER', text='')
operator.path = prop
layout = self.layout
prop_auto(layout, self, 'gamedata_folder')
prop_auto(layout, self, 'textures_folder')
prop_auto(layout, self, 'gamemtl_file')
prop_auto(layout, self, 'eshader_file')
prop_auto(layout, self, 'cshader_file')
prop_auto(layout, self, 'objects_folder')
_, box = collapsible.draw(layout, 'plugin_prefs:defaults', 'Defaults', style='tree')
if box:
row = box.row()
row.label(text='SDK Version:')
row.prop(self, 'sdk_version', expand=True)
_, box_n = collapsible.draw(box, 'plugin_prefs:defaults.object', 'Object', style='tree')
if box_n:
prop_bool(box_n, self, 'object_motions_import')
prop_bool(box_n, self, 'object_motions_export')
prop_bool(box_n, self, 'object_texture_names_from_path')
prop_bool(box_n, self, 'object_mesh_split_by_mat')
prop_bool(box_n, self, 'object_bones_custom_shapes')
_, box_n = collapsible.draw(box, 'plugin_prefs:defaults.anm', 'Animation', style='tree')
if box_n:
prop_bool(box_n, self, 'anm_create_camera')
prop_bool(layout, self, 'expert_mode')
prop_bool(layout, self, 'compact_menus')
assign_props([
(_explicit_path_op_props, _ExplicitPathOp),
])
assign_props([
(plugin_preferences_props, PluginPreferences),
], replace=False)
| 32.113971 | 103 | 0.648884 |
a5e0b2cffeff96ff3a2c3f655df6eca2d0f14482 | 961 | py | Python | medcam/medcam.py | lindehesse/M3d-Cam | c5e709ff7e9a9805333bc1131bb54f252ac0bbd0 | [
"MIT"
] | 170 | 2020-06-24T09:40:29.000Z | 2022-03-29T09:26:48.000Z | medcam/medcam.py | lindehesse/M3d-Cam | c5e709ff7e9a9805333bc1131bb54f252ac0bbd0 | [
"MIT"
] | 19 | 2020-07-02T07:04:30.000Z | 2022-03-25T21:22:36.000Z | medcam/medcam.py | lindehesse/M3d-Cam | c5e709ff7e9a9805333bc1131bb54f252ac0bbd0 | [
"MIT"
] | 26 | 2020-07-02T05:39:38.000Z | 2022-02-22T09:28:19.000Z | from medcam import medcam_inject
from medcam import medcam_utils
from medcam.evaluation import evaluation_utils, evaluator
from functools import wraps
@wraps(medcam_inject.inject)
def inject(*args, **kwargs):
return medcam_inject.inject(*args, **kwargs)
@wraps(medcam_utils.get_layers)
def get_layers(model, reverse=False):
return medcam_utils.get_layers(model, reverse)
@wraps(evaluation_utils.comp_score)
def compute_score(attention_map, mask, metric="wioa", threshold='otsu'):
return evaluation_utils.comp_score(attention_map, mask, metric, threshold)
@wraps(evaluator.Evaluator)
def Evaluator(save_path, metric="wioa", threshold='otsu', layer_ordering=None):
return evaluator.Evaluator(save_path, metric, threshold, layer_ordering)
@wraps(medcam_utils.save_attention_map)
def save(attention_map, filename, heatmap, raw_input=None):
medcam_utils.save_attention_map(filename, attention_map, heatmap=heatmap, raw_input=raw_input)
| 32.033333 | 98 | 0.802289 |
e583908fcca479d5c36ae5a9780395e21669ff8e | 4,617 | py | Python | sword_of_control/sword.py | nullJaX/HMLC2019 | b0ef2504d6e0a34d6f390a059984edeb58b6a299 | [
"MIT"
] | null | null | null | sword_of_control/sword.py | nullJaX/HMLC2019 | b0ef2504d6e0a34d6f390a059984edeb58b6a299 | [
"MIT"
] | null | null | null | sword_of_control/sword.py | nullJaX/HMLC2019 | b0ef2504d6e0a34d6f390a059984edeb58b6a299 | [
"MIT"
] | 1 | 2020-11-04T03:21:57.000Z | 2020-11-04T03:21:57.000Z | #!/usr/bin/env python3
import sys
import json
from collections import deque
import cv2
import numpy as np
morphology_map = {"dilate": cv2.MORPH_DILATE,
"close": cv2.MORPH_CLOSE,
"erode": cv2.MORPH_ERODE,
"open": cv2.MORPH_OPEN}
def distance(last, current):
cxa, cya, cxb, cyb = current
cxc = int(np.round((cxa + cxb) / 2))
cyc = int(np.round((cya + cyb) / 2))
xa, ya, xb, yb, xc, yc = last
dist_a = np.sqrt((xa + cxa) ** 2 + (ya + cya) ** 2)
dist_b = np.sqrt((xb + cxb) ** 2 + (yb + cyb) ** 2)
dist_c = np.sqrt((xc + cxc) ** 2 + (yc + cyc) ** 2)
return (dist_a + dist_b + dist_c) / 3
def compare(last_frame, frame, last_positions, params):
# difference between frames
frame_diff = cv2.absdiff(frame, last_frame)
frame_diff = cv2.morphologyEx(frame_diff, morphology_map[params["morphology_method"]], (params["morphology_kernel"], params["morphology_kernel"]), iterations=params["morphology_iter"])
# Limit Region Of Interest by creating circle of radius equal to longer dimension delta
mask = np.zeros(frame_diff.shape, dtype=np.uint8)
for roi in last_positions:
radius = max(abs(roi[3] - roi[1]), abs(roi[2] - roi[0]))
cv2.circle(mask, roi[-2:], radius, (255, 255, 255), -1, 8, 0)
frame_diff = frame_diff & mask
# Detect edges
canny = cv2.Canny(frame_diff, params["canny_threshold1"], params["canny_threshold2"])
lines = cv2.HoughLinesP(canny, params["hough_rho"], np.pi / 180,
params["hough_threshold"],
minLineLength=params["hough_minLineLength"],
maxLineGap=params["hough_maxLineGap"])
# cv2.imshow("CannyEdges", canny)
# Determine new points
if lines is not None:
distances = [distance(last_positions[-1], line[0]) for line in lines]
index = distances.index(min(distances))
xa, ya, xb, yb = lines[index][0]
xc = int(np.round((xa + xb) / 2))
yc = int(np.round((ya + yb) / 2))
else:
xa, ya, xb, yb, xc, yc = last_positions[-1]
return xa, ya, xb, yb, xc, yc
def preprocess(frame, params):
frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
frame_blur = cv2.medianBlur(frame_gray, params["blur_kernel_size"])
return frame_blur
def mp4_read(video_file_path):
cap = cv2.VideoCapture(video_file_path)
frame_count = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
fc = 0
ret = True
while fc < frame_count and ret:
ret, buffer = cap.read()
fc += 1
yield buffer
cap.release()
def where_da_sword(video_file_path, first_frame_position, params):
x_c = int(np.round(sum(first_frame_position[0::2]) / 2))
y_c = int(np.round(sum(first_frame_position[1::2]) / 2))
last_positions = deque([(*first_frame_position, x_c, y_c)], maxlen=params["deque_maxlen"])
last_frame = None
for i, frame in enumerate(mp4_read(video_file_path)):
if i == 0:
last_frame = preprocess(frame, params)
else:
frame = preprocess(frame, params)
last_positions.append(tuple(map(int, compare(last_frame, frame, last_positions, params))))
last_frame = frame.copy()
x_a, y_a, x_b, y_b, x_c, y_c = last_positions[-1]
height, width = last_frame.shape[:2]
x_a = x_a if 0 <= x_a < width else -1
x_b = x_b if 0 <= x_b < width else -1
y_a = y_a if 0 <= y_a < height else -1
y_b = y_b if 0 <= y_b < height else -1
# display = cv2.cvtColor(last_frame, cv2.COLOR_GRAY2RGB)
# if all([i >= 0 for i in [x_a, x_b, y_a, y_b]]):
# cv2.line(display, (x_a, y_a), (x_b, y_b), (0, 255, 0), 10)
# cv2.circle(display, (x_a, y_a), 5, (255, 0, 0), -1)
# cv2.circle(display, (x_b, y_b), 5, (0, 0, 255), -1)
# cv2.circle(display, (x_c, y_c), 5, (0, 0, 0), -1)
# cv2.imshow('Debug display', display)
# cv2.waitKey(30)
yield x_a, y_a, x_b, y_b
if __name__ == "__main__":
video_file_path = sys.argv[1]
first_frame_position = tuple(map(int, sys.argv[2].split(";")))
with open("config.json", "r") as f:
params = dict(json.load(f))
for x_a, y_a, x_b, y_b in where_da_sword(video_file_path,
first_frame_position, params):
if x_a < 0 or y_a < 0:
x_a = "#"
y_a = "#"
if x_b < 0 or y_b < 0:
x_b = "#"
y_b = "#"
print("{0!s};{1!s};{2!s};{3!s}".format(x_a, y_a, x_b, y_b))
print()
| 38.157025 | 188 | 0.584362 |
ae705962059b3e5de268223c17e407b12c5043b9 | 1,453 | py | Python | mundo_2/ex068.py | tseiiti/curso_em_video | 59565ce809c1f025fb41ab69de3b8c5b53c8f7b2 | [
"MIT"
] | null | null | null | mundo_2/ex068.py | tseiiti/curso_em_video | 59565ce809c1f025fb41ab69de3b8c5b53c8f7b2 | [
"MIT"
] | null | null | null | mundo_2/ex068.py | tseiiti/curso_em_video | 59565ce809c1f025fb41ab69de3b8c5b53c8f7b2 | [
"MIT"
] | null | null | null | from os import system, name
system('cls' if name == 'nt' else 'clear')
dsc = ('''DESAFIO 068:
Faça um programa que jogue para ou ímpar com o computador. O
jogo só será interrompido quando o jogador PERDER,
mostrando o total de vitórias consecutivas que ele conquistou no
final do jogo.
''')
from time import sleep
from random import randint
co = 0
mj = -1
us = -1
while True:
print('\n' + '=-' * 20, end='')
print('=\n= JOGO DE PAR OU ÍMPAR')
print('=-' * 20, end='')
print(f'=\n(partidas vencidas até agora: {co})\n')
mj = ''
while not mj or mj not in 'PI':
mj = input('Escolha entre [P]ar ou [I]mpar: ').strip().upper()[0]
mj = 0 if mj == 'P' else 1
print(f'Você escolheu "{"PAR" if mj == 0 else "ÍMPAR"}" então o computador escolhe "{"ÍMPAR" if mj == 0 else "PAR"}"!')
pc = randint(0, 5)
print('computador pensando', end= '')
for i in range(3):
print('.', end= '')
sleep(0.5)
print('\n... o computador já pensou na jogada')
while us not in range(0, 6):
us = int(input('Escolha a sua jogada (0 a 5 dedos): '))
print('')
msg = f'\nO computador jogou {pc} e você {us}. Total {pc + us} é {"PAR" if (pc + us) % 2 == 0 else "ÍMPAR"}'
if (pc + us) % 2 == mj:
print('*' * 20)
print('*** Você GANHOU!')
print('*' * 20)
print(f'{msg}')
else:
print('-' * 20)
print('Você PERDEU!')
print(f'{msg}')
break
co += 1
mj = -1
us = -1
print(f'Você ganhou {co} jogadas') | 25.946429 | 121 | 0.579491 |
44da0c5d77c9e46607c27abe7b1aad830951f4bd | 11,899 | py | Python | venv/lib/python2.7/site-packages/ansible/modules/network/f5/bigip_ssl_key.py | haind27/test01 | 7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852 | [
"MIT"
] | 37 | 2017-08-15T15:02:43.000Z | 2021-07-23T03:44:31.000Z | venv/lib/python2.7/site-packages/ansible/modules/network/f5/bigip_ssl_key.py | haind27/test01 | 7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852 | [
"MIT"
] | 12 | 2018-01-10T05:25:25.000Z | 2021-11-28T06:55:48.000Z | venv/lib/python2.7/site-packages/ansible/modules/network/f5/bigip_ssl_key.py | haind27/test01 | 7f86c0a33eb0874a6c3f5ff9a923fd0cfc8ef852 | [
"MIT"
] | 49 | 2017-08-15T09:52:13.000Z | 2022-03-21T17:11:54.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = r'''
module: bigip_ssl_key
short_description: Import/Delete SSL keys from BIG-IP
description:
- This module will import/delete SSL keys on a BIG-IP. Keys can be imported
from key files on the local disk, in PEM format.
version_added: 2.5
options:
content:
description:
- Sets the contents of a key directly to the specified value. This is
used with lookup plugins or for anything with formatting or templating.
This must be provided when C(state) is C(present).
aliases:
- key_content
state:
description:
- When C(present), ensures that the key is uploaded to the device. When
C(absent), ensures that the key is removed from the device. If the key
is currently in use, the module will not be able to remove the key.
default: present
choices:
- present
- absent
name:
description:
- The name of the key.
required: True
passphrase:
description:
- Passphrase on key.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
notes:
- This module does not behave like other modules that you might include in
roles where referencing files or templates first looks in the role's
files or templates directory. To have it behave that way, use the Ansible
file or template lookup (see Examples). The lookups behave as expected in
a role context.
extends_documentation_fragment: f5
requirements:
- BIG-IP >= v12
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Use a file lookup to import key
bigip_ssl_key:
name: key-name
server: lb.mydomain.com
user: admin
password: secret
state: present
content: "{{ lookup('file', '/path/to/key.key') }}"
delegate_to: localhost
- name: Delete key
bigip_ssl_key:
name: key-name
server: lb.mydomain.com
user: admin
password: secret
state: absent
delegate_to: localhost
'''
RETURN = r'''
key_filename:
description:
- The name of the SSL certificate key. The C(key_filename) and
C(cert_filename) will be similar to each other, however the
C(key_filename) will have a C(.key) extension.
returned: created
type: string
sample: cert1.key
key_checksum:
description: SHA1 checksum of the key that was provided.
returned: changed and created
type: string
sample: cf23df2207d99a74fbe169e3eba035e633b65d94
key_source_path:
description: Path on BIG-IP where the source of the key is stored
returned: created
type: string
sample: /var/config/rest/downloads/cert1.key
'''
import hashlib
import os
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
class Parameters(AnsibleF5Parameters):
download_path = '/var/config/rest/downloads'
api_map = {
'sourcePath': 'key_source_path'
}
updatables = ['key_source_path']
returnables = ['key_filename', 'key_checksum', 'key_source_path']
api_attributes = ['passphrase', 'sourcePath']
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
def _get_hash(self, content):
k = hashlib.sha1()
s = StringIO(content)
while True:
data = s.read(1024)
if not data:
break
k.update(data.encode('utf-8'))
return k.hexdigest()
@property
def key_filename(self):
if self.name.endswith('.key'):
return self.name
else:
return self.name + '.key'
@property
def key_checksum(self):
if self.content is None:
return None
return self._get_hash(self.content)
@property
def key_source_path(self):
result = 'file://' + os.path.join(
self.download_path,
self.key_filename
)
return result
@property
def checksum(self):
if self._values['checksum'] is None:
return None
pattern = r'SHA1:\d+:(?P<value>[\w+]{40})'
matches = re.match(pattern, self._values['checksum'])
if matches:
return matches.group('value')
else:
return None
class Changes(Parameters):
pass
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.have = None
self.want = Parameters(params=self.module.params)
self.changes = Changes()
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
changes = self.changes.to_return()
result.update(**changes)
result.update(dict(changed=changed))
return result
def _set_changed_options(self):
changed = {}
try:
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = Changes(params=changed)
except Exception:
pass
def _update_changed_options(self):
changed = {}
try:
for key in Parameters.updatables:
if getattr(self.want, key) is not None:
attr1 = getattr(self.want, key)
attr2 = getattr(self.have, key)
if attr1 != attr2:
changed[key] = attr1
if self.want.key_checksum != self.have.checksum:
changed['key_checksum'] = self.want.key_checksum
if changed:
self.changes = Changes(params=changed)
return True
except Exception:
pass
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update_on_device(self):
content = StringIO(self.want.content)
self.client.api.shared.file_transfer.uploads.upload_stringio(
content, self.want.key_filename
)
resource = self.client.api.tm.sys.file.ssl_keys.ssl_key.load(
name=self.want.key_filename,
partition=self.want.partition
)
resource.update()
def exists(self):
result = self.client.api.tm.sys.file.ssl_keys.ssl_key.exists(
name=self.want.key_filename,
partition=self.want.partition
)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def create(self):
if self.want.content is None:
return False
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def read_current_from_device(self):
resource = self.client.api.tm.sys.file.ssl_keys.ssl_key.load(
name=self.want.key_filename,
partition=self.want.partition
)
result = resource.attrs
return Parameters(params=result)
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def create_on_device(self):
content = StringIO(self.want.content)
self.client.api.shared.file_transfer.uploads.upload_stringio(
content, self.want.key_filename
)
self.client.api.tm.sys.file.ssl_keys.ssl_key.create(
sourcePath=self.want.key_source_path,
name=self.want.key_filename,
partition=self.want.partition
)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
resource = self.client.api.tm.sys.file.ssl_keys.ssl_key.load(
name=self.want.key_filename,
partition=self.want.partition
)
resource.delete()
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the key")
return True
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(
required=True
),
content=dict(
aliases=['key_content']
),
passphrase=dict(
no_log=True
),
state=dict(
required=False,
default='present',
choices=['absent', 'present']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| 29.307882 | 91 | 0.622153 |
ce4572601c58084d37c5c242048c3d147596f7ba | 6,007 | py | Python | training/model.py | trungtv98/skip-thoughts | fc2af2c973f3dc73ba95f9265cd196a7407f2eec | [
"Apache-2.0"
] | null | null | null | training/model.py | trungtv98/skip-thoughts | fc2af2c973f3dc73ba95f9265cd196a7407f2eec | [
"Apache-2.0"
] | null | null | null | training/model.py | trungtv98/skip-thoughts | fc2af2c973f3dc73ba95f9265cd196a7407f2eec | [
"Apache-2.0"
] | null | null | null | """
Model specification
"""
import theano
import theano.tensor as tensor
import numpy
from collections import OrderedDict
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
from utils import _p, ortho_weight, norm_weight, tanh
from layers import get_layer, param_init_fflayer, fflayer, param_init_gru, gru_layer
def init_params(options):
"""
Initialize all parameters
"""
params = OrderedDict()
# Word embedding
params['Wemb'] = norm_weight(options['n_words'], options['dim_word'])
# Encoder
params = get_layer(options['encoder'])[0](options, params, prefix='encoder',
nin=options['dim_word'], dim=options['dim'])
# Decoder: next sentence
params = get_layer(options['decoder'])[0](options, params, prefix='decoder_f',
nin=options['dim_word'], dim=options['dim'])
# Decoder: previous sentence
params = get_layer(options['decoder'])[0](options, params, prefix='decoder_b',
nin=options['dim_word'], dim=options['dim'])
# Output layer
params = get_layer('ff')[0](options, params, prefix='ff_logit', nin=options['dim'], nout=options['n_words'])
return params
def build_model(tparams, options):
"""
Computation graph for the model
"""
opt_ret = dict()
trng = RandomStreams(1234)
# description string: #words x #samples
# x: current sentence
# y: next sentence
# z: previous sentence
x = tensor.matrix('x', dtype='int64')
x_mask = tensor.matrix('x_mask', dtype='float32')
y = tensor.matrix('y', dtype='int64')
y_mask = tensor.matrix('y_mask', dtype='float32')
z = tensor.matrix('z', dtype='int64')
z_mask = tensor.matrix('z_mask', dtype='float32')
n_timesteps = x.shape[0]
n_timesteps_f = y.shape[0]
n_timesteps_b = z.shape[0]
n_samples = x.shape[1]
# Word embedding (source)
emb = tparams['Wemb'][x.flatten()].reshape([n_timesteps, n_samples, options['dim_word']])
# encoder
proj = get_layer(options['encoder'])[1](tparams, emb, None, options,
prefix='encoder',
mask=x_mask)
ctx = proj[0][-1]
dec_ctx = ctx
# Word embedding (ahead)
embf = tparams['Wemb'][y.flatten()].reshape([n_timesteps_f, n_samples, options['dim_word']])
embf_shifted = tensor.zeros_like(embf)
embf_shifted = tensor.set_subtensor(embf_shifted[1:], embf[:-1])
embf = embf_shifted
# Word embedding (behind)
embb = tparams['Wemb'][z.flatten()].reshape([n_timesteps_b, n_samples, options['dim_word']])
embb_shifted = tensor.zeros_like(embb)
embb_shifted = tensor.set_subtensor(embb_shifted[1:], embb[:-1])
embb = embb_shifted
# decoder (ahead)
projf = get_layer(options['decoder'])[1](tparams, embf, dec_ctx, options,
prefix='decoder_f',
mask=y_mask)
# decoder (behind)
projb = get_layer(options['decoder'])[1](tparams, embb, dec_ctx, options,
prefix='decoder_b',
mask=z_mask)
# compute word probabilities (ahead)
logit = get_layer('ff')[1](tparams, projf[0], options, prefix='ff_logit', activ='linear')
logit_shp = logit.shape
probs = tensor.nnet.softmax(logit.reshape([logit_shp[0]*logit_shp[1], logit_shp[2]]))
# cost (ahead)
y_flat = y.flatten()
y_flat_idx = tensor.arange(y_flat.shape[0]) * options['n_words'] + y_flat
costf = -tensor.log(probs.flatten()[y_flat_idx]+1e-8)
costf = costf.reshape([y.shape[0],y.shape[1]])
costf = (costf * y_mask).sum(0)
costf = costf.sum()
# compute word probabilities (behind)
logit = get_layer('ff')[1](tparams, projb[0], options, prefix='ff_logit', activ='linear')
logit_shp = logit.shape
probs = tensor.nnet.softmax(logit.reshape([logit_shp[0]*logit_shp[1], logit_shp[2]]))
# cost (behind)
z_flat = z.flatten()
z_flat_idx = tensor.arange(z_flat.shape[0]) * options['n_words'] + z_flat
costb = -tensor.log(probs.flatten()[z_flat_idx]+1e-8)
costb = costb.reshape([z.shape[0],z.shape[1]])
costb = (costb * z_mask).sum(0)
costb = costb.sum()
# total cost
cost = costf + costb
return trng, x, x_mask, y, y_mask, z, z_mask, opt_ret, cost
def build_encoder(tparams, options):
"""
Computation graph, encoder only
"""
opt_ret = dict()
trng = RandomStreams(1234)
# description string: #words x #samples
x = tensor.matrix('x', dtype='int64')
x_mask = tensor.matrix('x_mask', dtype='float32')
n_timesteps = x.shape[0]
n_samples = x.shape[1]
# word embedding (source)
emb = tparams['Wemb'][x.flatten()].reshape([n_timesteps, n_samples, options['dim_word']])
# encoder
proj = get_layer(options['encoder'])[1](tparams, emb, None, options,
prefix='encoder',
mask=x_mask)
ctx = proj[0][-1]
return trng, x, x_mask, ctx, emb
def build_encoder_w2v(tparams, options):
"""
Computation graph for encoder, given pre-trained word embeddings
"""
opt_ret = dict()
trng = RandomStreams(1234)
# word embedding (source)
embedding = tensor.tensor3('embedding', dtype='float32')
x_mask = tensor.matrix('x_mask', dtype='float32')
# encoder
proj = get_layer(options['encoder'])[1](tparams, embedding, None, options,
prefix='encoder',
mask=x_mask)
ctx = proj[0][-1]
return trng, embedding, x_mask, ctx
| 34.722543 | 113 | 0.57999 |
23ddef234449ce6db81368ce07ef9fe9a3629001 | 5,401 | py | Python | test/functional/wallet-dump.py | farsider350/AUTX-Core | 6d00d1e027a5a6dffb3b0815a155e4515ced007b | [
"MIT"
] | null | null | null | test/functional/wallet-dump.py | farsider350/AUTX-Core | 6d00d1e027a5a6dffb3b0815a155e4515ced007b | [
"MIT"
] | null | null | null | test/functional/wallet-dump.py | farsider350/AUTX-Core | 6d00d1e027a5a6dffb3b0815a155e4515ced007b | [
"MIT"
] | 1 | 2021-01-03T02:35:54.000Z | 2021-01-03T02:35:54.000Z | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the dumpwallet RPC."""
import os
import sys
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (assert_equal, assert_raises_rpc_error)
def read_dump(file_name, addrs, hd_master_addr_old):
"""
Read the given dump, count the addrs that match, count change and reserve.
Also check that the old hd_master is inactive
"""
with open(file_name, encoding='utf8') as inputfile:
found_addr = 0
found_addr_chg = 0
found_addr_rsv = 0
hd_master_addr_ret = None
for line in inputfile:
# only read non comment lines
if line[0] != "#" and len(line) > 10:
# split out some data
key_label, comment = line.split("#")
# key = key_label.split(" ")[0]
keytype = key_label.split(" ")[2]
if len(comment) > 1:
addr_keypath = comment.split(" addr=")[1]
addr = addr_keypath.split(" ")[0]
keypath = None
if keytype == "inactivehdmaster=1":
# ensure the old master is still available
assert(hd_master_addr_old == addr)
elif keytype == "hdmaster=1":
# ensure we have generated a new hd master key
assert(hd_master_addr_old != addr)
hd_master_addr_ret = addr
else:
keypath = addr_keypath.rstrip().split("hdkeypath=")[1]
# count key types
for addrObj in addrs:
if addrObj['address'] == addr and addrObj['hdkeypath'] == keypath and keytype == "label=":
found_addr += 1
break
elif keytype == "change=1":
found_addr_chg += 1
break
elif keytype == "reserve=1":
found_addr_rsv += 1
break
return found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_ret
class WalletDumpTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-keypool=90", "-usehd=1"]]
def setup_network(self):
# TODO remove this when usehd=1 becomes the default
# use our own cache and -usehd=1 as extra arg as the default cache is run with -usehd=0
self.options.tmpdir = os.path.join(self.options.tmpdir, 'hd')
self.options.cachedir = os.path.join(self.options.cachedir, 'hd')
self._initialize_chain(extra_args=self.extra_args[0], stderr=sys.stdout)
self.set_cache_mocktime()
# Use 1 minute timeout because the initial getnewaddress RPC can take
# longer than the default 30 seconds due to an expensive
# CWallet::TopUpKeyPool call, and the encryptwallet RPC made later in
# the test often takes even longer.
self.add_nodes(self.num_nodes, self.extra_args, timewait=60, stderr=sys.stdout)
self.start_nodes()
def run_test (self):
tmpdir = self.options.tmpdir
# generate 20 addresses to compare against the dump
test_addr_count = 20
addrs = []
for i in range(0,test_addr_count):
addr = self.nodes[0].getnewaddress()
vaddr= self.nodes[0].validateaddress(addr) #required to get hd keypath
addrs.append(vaddr)
# Should be a no-op:
self.nodes[0].keypoolrefill()
# dump unencrypted wallet
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.unencrypted.dump")
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_unenc = \
read_dump(tmpdir + "/node0/wallet.unencrypted.dump", addrs, None)
assert_equal(found_addr, test_addr_count) # all keys must be in the dump
assert_equal(found_addr_chg, 50) # 50 blocks where mined
assert_equal(found_addr_rsv, 180) # keypool size (external+internal)
#encrypt wallet, restart, unlock and dump
self.nodes[0].node_encrypt_wallet('test')
self.start_node(0)
self.nodes[0].walletpassphrase('test', 30)
# Should be a no-op:
self.nodes[0].keypoolrefill()
self.nodes[0].dumpwallet(tmpdir + "/node0/wallet.encrypted.dump")
found_addr, found_addr_chg, found_addr_rsv, hd_master_addr_enc = \
read_dump(tmpdir + "/node0/wallet.encrypted.dump", addrs, hd_master_addr_unenc)
assert_equal(found_addr, test_addr_count)
# TODO clarify if we want the behavior that is tested below in autx (only when HD seed was generated and not user-provided)
# assert_equal(found_addr_chg, 180 + 50) # old reserve keys are marked as change now
assert_equal(found_addr_rsv, 180) # keypool size
# Overwriting should fail
assert_raises_rpc_error(-8, "already exists", self.nodes[0].dumpwallet, tmpdir + "/node0/wallet.unencrypted.dump")
if __name__ == '__main__':
WalletDumpTest().main ()
| 45.008333 | 131 | 0.606184 |
5e283efb9bbd4f0dfbf2376c4077da3f6e8000f9 | 2,128 | py | Python | balanceClass.py | SuhasShanbhogue/Sport-Prediction | ed9e1d693be99bfe6bda665866fe2aba97569a0f | [
"MIT"
] | 1 | 2020-12-11T03:00:58.000Z | 2020-12-11T03:00:58.000Z | balanceClass.py | SuhasShanbhogue/Sport-Prediction | ed9e1d693be99bfe6bda665866fe2aba97569a0f | [
"MIT"
] | null | null | null | balanceClass.py | SuhasShanbhogue/Sport-Prediction | ed9e1d693be99bfe6bda665866fe2aba97569a0f | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import csv
import random
random.seed(10)
import tqdm
def balanceClass():
for year in tqdm.tqdm(["2011","2012","2013","2014","2015"]):
df = pd.read_csv("../Data/players_"+year+".csv")
df1 = pd.read_csv("../Data/team_"+year+".csv")
assert len(df.index) == len(df1.index) , "check the Files"
assert df1['winner'].sum() == df["Winner"].sum() , "check the files"
ones = df1['winner'].sum()
total = len(df1.index)
delta = ones - (total/2)
print("Imbalance before:" + str(delta))
listRows = []
for i in range(len(df.index)):
if int(df.iloc[i,-1:]) == 1:
listRows.append(i)
random.shuffle(listRows)
for i in range(int(delta)):
#print(i)
rowNumber = listRows[i]
#print(rowNumber)
assert df.loc[rowNumber][-1] == df1.loc[rowNumber][-1] , "check the files"
teamData = df1.loc[rowNumber].to_numpy()
playersData = df.loc[rowNumber].to_numpy()
whoWonInit = df.loc[rowNumber][-1]
#delta = delta- 1
dataPlayers = playersData[:460]
dataPlayersTeam1 = dataPlayers[:230]
dataPlayersTeam2 = dataPlayers[230:460]
dataTeams = teamData[:48]
dataTeamsTeam1 = dataTeams[:24]
dataTeamsTeam2 = dataTeams[24:48]
assert len(dataPlayersTeam2) == len(dataPlayersTeam2) , "Error"
assert len(dataTeamsTeam2) == len(dataTeamsTeam1) , "Error"
newFirstTeam = dataTeamsTeam2
lineAfterSecondTeam = np.append(newFirstTeam,dataTeamsTeam1)
newFirstPlayerTeam = dataPlayersTeam2
lineAfterPlayerSecondTeam = np.append(newFirstPlayerTeam,dataPlayersTeam1)
whoWonInit = not (whoWonInit)
finalTeamLine = np.append(lineAfterSecondTeam,whoWonInit)
finalPlayerLine = np.append(lineAfterPlayerSecondTeam,whoWonInit)
df1.loc[rowNumber] = finalTeamLine
#print(df1.loc[rowNumber])
df.loc[rowNumber] = finalPlayerLine
#print(df1.loc[rowNumber])
#print(df)
df.to_csv("./Data/playersBal_"+year+".csv",index=False)
df1.to_csv("./Data/teamBal_"+year+".csv",index=False)
#print(df['Winner'])
print("Imbalance Now : " + str(int(df["Winner"].sum()-len(df.index)/2)))
#print(len(df.index))
balanceClass()
| 32.738462 | 78 | 0.68609 |
9c9e279372014704936c4e32439e4e56d6386de0 | 11,225 | py | Python | detect.py | cxqj/45-CMCS | 9f2eeabbe49adb1bc26a99ebda089870e867fa1d | [
"MIT"
] | null | null | null | detect.py | cxqj/45-CMCS | 9f2eeabbe49adb1bc26a99ebda089870e867fa1d | [
"MIT"
] | null | null | null | detect.py | cxqj/45-CMCS | 9f2eeabbe49adb1bc26a99ebda089870e867fa1d | [
"MIT"
] | null | null | null | import matlab.engine # Must import matlab.engine first
import os
import torch
import argparse
import numpy as np
import matplotlib.pyplot as plt
import torch.nn.functional as F
from utils import smooth
from utils import eval_thumos_detect, detect_with_thresholding
from utils import get_dataset, normalize, interpolate
from utils import mask_to_detections, load_config_file
from utils import output_detections_thumos14, output_detections_anet
import pdb
def softmax(x, dim):
x = F.softmax(torch.from_numpy(x), dim=dim)
return x.numpy()
if __name__ == '__main__':
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--config-file', type=str)
parser.add_argument('--train-subset-name', type=str)
parser.add_argument('--test-subset-name', type=str)
parser.add_argument('--include-train',
dest='include_train',
action='store_true')
parser.add_argument('--no-include-train',
dest='include_train',
action='store_false')
parser.set_defaults(include_train=True)
args = parser.parse_args()
print(args.config_file)
print(args.train_subset_name)
print(args.test_subset_name)
print(args.include_train)
all_params = load_config_file(args.config_file)
locals().update(all_params)
if args.include_train:
train_dataset_dict = get_dataset(
dataset_name=dataset_name,
subset=args.train_subset_name,
file_paths=file_paths,
sample_rate=sample_rate,
base_sample_rate=base_sample_rate,
action_class_num=action_class_num,
modality='both',
feature_type=feature_type,
feature_oversample=False,
temporal_aug=False,
)
else:
train_dataset_dict = None
test_dataset_dict = get_dataset(
dataset_name=dataset_name,
subset=args.test_subset_name,
file_paths=file_paths,
sample_rate=sample_rate,
base_sample_rate=base_sample_rate,
action_class_num=action_class_num,
modality='both',
feature_type=feature_type,
feature_oversample=False,
temporal_aug=False,
)
dataset_dicts = {'train': train_dataset_dict, 'test': test_dataset_dict}
def detect(
cas_dir,
subset,
out_file_name,
global_score_thrh, #0.1
metric_type, # score
thrh_type, # mean
thrh_value, # 1
interpolate_type, # linear
proc_type, # dilation
proc_value, # 20
sample_offset, # 0
weight_inner, # 1
weight_outter, # -1
weight_global, # 0.25
att_filtering_value=None,
):
assert (metric_type in ['score', 'multiply', 'att-filtering'])
assert (thrh_type in ['mean', 'max'])
assert (interpolate_type in ['quadratic', 'linear', 'nearest'])
assert (proc_type in ['dilation', 'median'])
out_detections = []
dataset_dict = dataset_dicts[subset]
for video_name in dataset_dict.keys():
cas_file = video_name + '.npz'
cas_data = np.load(os.path.join(cas_dir, cas_file))
avg_score = cas_data['avg_score'] # (279,21)
att_weight = cas_data['weight'] # (279,1)
branch_scores = cas_data['branch_scores'] # (4,10,279,21)
global_score = cas_data['global_score'] # (21,)
duration = dataset_dict[video_name]['duration'] # 149.10
fps = dataset_dict[video_name]['frame_rate'] # 30
frame_cnt = dataset_dict[video_name]['frame_cnt'] # 4469
global_score = softmax(global_score, dim=0)
################ Threshoding ################
for class_id in range(action_class_num):
if global_score[class_id] <= global_score_thrh: # global_score_thrh=0.1
continue
if metric_type == 'score':
metric = softmax(avg_score, dim=1)[:, class_id:class_id + 1] # (279,1)
#metric = smooth(metric)
metric = normalize(metric)
elif metric_type == 'multiply':
_score = softmax(avg_score, dim=1)[:, class_id:class_id + 1]
metric = att_weight * _score
#metric = smooth(metric)
metric = normalize(metric)
elif metric_type == 'att-filtering':
assert (att_filtering_value is not None)
metric = softmax(avg_score, dim=1)[:, class_id:class_id + 1]
#metric = smooth(metric)
metric = normalize(metric)
metric[att_weight < att_filtering_value] = 0
metric = normalize(metric)
#########################################
metric = interpolate(metric[:, 0],
feature_type,
frame_cnt,
sample_rate,
snippet_size=base_snippet_size,
kind=interpolate_type)
metric = np.expand_dims(metric, axis=1)
mask = detect_with_thresholding(metric, thrh_type, thrh_value,
proc_type, proc_value)
temp_out = mask_to_detections(mask, metric, weight_inner,
weight_outter)
#########################################
for entry in temp_out:
entry[2] = class_id
entry[3] += global_score[class_id] * weight_global
entry[0] = (entry[0] + sample_offset) / fps
entry[1] = (entry[1] + sample_offset) / fps
entry[0] = max(0, entry[0])
entry[1] = max(0, entry[1])
entry[0] = min(duration, entry[0])
entry[1] = min(duration, entry[1])
#########################################
for entry_id in range(len(temp_out)):
temp_out[entry_id] = [video_name] + temp_out[entry_id]
out_detections += temp_out
if dataset_name == 'thumos14':
output_detections_thumos14(out_detections, out_file_name)
elif dataset_name in ['ActivityNet12', 'ActivityNet13']:
output_detections_anet(out_detections, out_file_name, dataset_name,
feature_type)
return out_detections
if dataset_name == 'thumos14':
summary_file = './outputs/summary-{}.npz'.format(experiment_naming)
all_train_map = np.zeros((train_run_num, len(check_points), 4, 9, 1))
all_train_aps = np.zeros(
(train_run_num, len(check_points), 4, 9, action_class_num))
all_test_map = np.zeros((train_run_num, len(check_points), 4, 9, 1))
all_test_aps = np.zeros(
(train_run_num, len(check_points), 4, 9, action_class_num))
for run_idx in range(train_run_num):
for cp_idx, check_point in enumerate(check_points):
for mod_idx, modality in enumerate(
['both', 'rgb', 'flow', 'late-fusion']):
cas_dir = os.path.join(
'cas-features',
'{}-run-{}-{}-{}'.format(experiment_naming, run_idx,
check_point, modality))
pred_dir = os.path.join('outputs', 'predictions')
if not os.path.exists(pred_dir):
os.makedirs(pred_dir)
if args.include_train:
train_pred_file = os.path.join(
pred_dir,
'{}-run-{}-{}-{}-train'.format(experiment_naming,
run_idx, check_point,
modality))
train_outs = detect(cas_dir, 'train', train_pred_file,
**detect_params)
test_pred_file = os.path.join(
pred_dir,
'{}-run-{}-{}-{}-test'.format(experiment_naming, run_idx,
check_point, modality))
test_outs = detect(cas_dir, 'test', test_pred_file,
**detect_params)
if dataset_name == 'thumos14':
for IoU_idx, IoU in enumerate(
[.1, .2, .3, .4, .5, .6, .7, .8, .9]):
if args.include_train:
if len(train_outs) != 0:
temp_aps, temp_map = eval_thumos_detect(
train_pred_file, file_paths[
args.train_subset_name]['anno_dir'],
args.train_subset_name, IoU)
all_train_aps[run_idx, cp_idx, mod_idx,
IoU_idx, :] = temp_aps
all_train_map[run_idx, cp_idx, mod_idx, IoU_idx,
0] = temp_map
else:
print('Empty Detections')
all_train_aps[run_idx, cp_idx, mod_idx,
IoU_idx, :] = 0
all_train_map[run_idx, cp_idx, mod_idx, IoU_idx,
0] = 0
if len(test_outs) != 0:
temp_aps, temp_map = eval_thumos_detect(
test_pred_file,
file_paths[args.test_subset_name]['anno_dir'],
args.test_subset_name, IoU)
all_test_aps[run_idx, cp_idx, mod_idx,
IoU_idx, :] = temp_aps
all_test_map[run_idx, cp_idx, mod_idx, IoU_idx,
0] = temp_map
else:
print('Empty Detections')
all_test_aps[run_idx, cp_idx, mod_idx,
IoU_idx, :] = 0
all_test_map[run_idx, cp_idx, mod_idx, IoU_idx,
0] = 0
print('{}{}{}{}'.format(run_idx, cp_idx, mod_idx,
IoU_idx))
if dataset_name == 'thumos14':
np.savez(summary_file,
all_train_aps=all_train_aps,
all_train_map=all_train_map,
all_test_aps=all_test_aps,
all_test_map=all_test_map)
| 37.667785 | 91 | 0.494165 |
5eb76c261d120b246891730be478010f152eb00e | 3,106 | py | Python | ortools/sat/samples/step_function_sample_sat.py | bollhals/or-tools | 87cc5a1cb12d901089de0aab55f7ec50bce2cdfd | [
"Apache-2.0"
] | null | null | null | ortools/sat/samples/step_function_sample_sat.py | bollhals/or-tools | 87cc5a1cb12d901089de0aab55f7ec50bce2cdfd | [
"Apache-2.0"
] | null | null | null | ortools/sat/samples/step_function_sample_sat.py | bollhals/or-tools | 87cc5a1cb12d901089de0aab55f7ec50bce2cdfd | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements a step function."""
from ortools.sat.python import cp_model
class VarArraySolutionPrinter(cp_model.CpSolverSolutionCallback):
"""Print intermediate solutions."""
def __init__(self, variables):
cp_model.CpSolverSolutionCallback.__init__(self)
self.__variables = variables
self.__solution_count = 0
def on_solution_callback(self):
self.__solution_count += 1
for v in self.__variables:
print('%s=%i' % (v, self.Value(v)), end=' ')
print()
def solution_count(self):
return self.__solution_count
def step_function_sample_sat():
"""Encode the step function."""
# Model.
model = cp_model.CpModel()
# Declare our primary variable.
x = model.NewIntVar(0, 20, 'x')
# Create the expression variable and implement the step function
# Note it is not defined for x == 2.
#
# - 3
# -- -- --------- 2
# 1
# -- --- 0
# 0 ================ 20
#
expr = model.NewIntVar(0, 3, 'expr')
# expr == 0 on [5, 6] U [8, 10]
b0 = model.NewBoolVar('b0')
model.AddLinearExpressionInDomain(
x, cp_model.Domain.FromIntervals([(5, 6), (8, 10)])).OnlyEnforceIf(b0)
model.Add(expr == 0).OnlyEnforceIf(b0)
# expr == 2 on [0, 1] U [3, 4] U [11, 20]
b2 = model.NewBoolVar('b2')
model.AddLinearExpressionInDomain(
x, cp_model.Domain.FromIntervals([(0, 1), (3, 4),
(11, 20)])).OnlyEnforceIf(b2)
model.Add(expr == 2).OnlyEnforceIf(b2)
# expr == 3 when x == 7
b3 = model.NewBoolVar('b3')
model.Add(x == 7).OnlyEnforceIf(b3)
model.Add(expr == 3).OnlyEnforceIf(b3)
# At least one bi is true. (we could use an exactly one constraint).
model.AddBoolOr([b0, b2, b3])
# Search for x values in increasing order.
model.AddDecisionStrategy([x], cp_model.CHOOSE_FIRST,
cp_model.SELECT_MIN_VALUE)
# Create a solver and solve with a fixed search.
solver = cp_model.CpSolver()
# Force the solver to follow the decision strategy exactly.
solver.parameters.search_branching = cp_model.FIXED_SEARCH
# Enumerate all solutions.
solver.parameters.enumerate_all_solutions = True
# Search and print out all solutions.
solution_printer = VarArraySolutionPrinter([x, expr])
solver.Solve(model, solution_printer)
step_function_sample_sat()
| 32.354167 | 78 | 0.635866 |
bea72c050f57cfd91079f9b923ab2f9b039e8225 | 32,308 | py | Python | mcz3lz.py | samgregoost/self_supervised_large | 9c0c33cf374a1d5112519939012a64bca98c5f8d | [
"MIT"
] | 1 | 2021-08-25T08:32:19.000Z | 2021-08-25T08:32:19.000Z | mcz3lz.py | samgregoost/self_supervised_large | 9c0c33cf374a1d5112519939012a64bca98c5f8d | [
"MIT"
] | null | null | null | mcz3lz.py | samgregoost/self_supervised_large | 9c0c33cf374a1d5112519939012a64bca98c5f8d | [
"MIT"
] | null | null | null | from __future__ import print_function
import tensorflow as tf
import numpy as np
import random
import TensorflowUtils as utils
import read_MITSceneParsingDataParis as scene_parsing
import datetime
import BatchDatsetReaderMnist as dataset
from six.moves import xrange
import math
from scipy import signal
from scipy.interpolate import interp1d
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_integer("batch_size", "20", "batch size for training")
tf.flags.DEFINE_string("logs_dir", "/scratch1/ram095/nips20/logs_mcz3lz/", "path to logs directory")
tf.flags.DEFINE_string("data_dir", "/scratch1/ram095/nips20/datasets/mnist_png/", "path to dataset")
tf.flags.DEFINE_float("learning_rate", "1e-4", "Learning rate for Adam Optimizer")
tf.flags.DEFINE_string("model_dir", "Model_zoo/", "Path to vgg model mat")
tf.flags.DEFINE_bool('debug', "False", "Debug mode: True/ False")
tf.flags.DEFINE_string('mode', "train", "Mode train/ test/ visualize")
MODEL_URL = 'http://www.vlfeat.org/matconvnet/models/beta16/imagenet-vgg-verydeep-19.mat'
MAX_ITERATION = int(1e5 + 1)
NUM_OF_CLASSESS = 3
IMAGE_SIZE = 64
def vgg_net(weights, image):
layers = (
'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
'relu5_3', 'conv5_4', 'relu5_4'
)
net = {}
current = image
for i, name in enumerate(layers):
kind = name[:4]
if kind == 'conv':
kernels, bias = weights[i][0][0][0][0]
# matconvnet: weights are [width, height, in_channels, out_channels]
# tensorflow: weights are [height, width, in_channels, out_channels]
kernels = utils.get_variable(np.transpose(kernels, (1, 0, 2, 3)), name=name + "_w")
bias = utils.get_variable(bias.reshape(-1), name=name + "_b")
current = utils.conv2d_basic(current, kernels, bias)
elif kind == 'relu':
current = tf.nn.relu(current, name=name)
if FLAGS.debug:
utils.add_activation_summary(current)
elif kind == 'pool':
current = utils.avg_pool_2x2(current)
net[name] = current
return net
'''
def decoder(image):
model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)
mean = model_data['normalization'][0][0][0]
mean_pixel = np.mean(mean, axis=(0, 1))
weights = np.squeeze(model_data['layers'])
processed_image = utils.process_image(image, mean_pixel)
with tf.variable_scope("decoder"):
image_net = vgg_net(weights, processed_image)
conv_final_layer = image_net["conv5_3"]
pool5 = utils.max_pool_2x2(conv_final_layer)
return pool5
'''
def conv_out_size_same(size, stride):
return int(math.ceil(float(size) / float(stride)))
def linear(input_, output_size, scope=None, stddev=0.02, bias_start=0.0, with_w=False):
shape = input_.get_shape().as_list()
with tf.variable_scope(scope or "Linear"):
try:
matrix = tf.get_variable("Matrix", [shape[1], output_size], tf.float32,
tf.random_normal_initializer(stddev=stddev))
except ValueError as err:
msg = "NOTE: Usually, this is due to an issue with the image dimensions. Did you correctly set '--crop' or '--input_height' or '--output_height'?"
err.args = err.args + (msg,)
raise
bias = tf.get_variable("bias", [output_size],
initializer=tf.constant_initializer(bias_start))
if with_w:
return tf.matmul(input_, matrix) + bias, matrix, bias
else:
return tf.matmul(input_, matrix) + bias
def deconv2d(input_, output_shape,
k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02,
name="deconv2d", with_w=False):
with tf.variable_scope(name):
# filter : [height, width, output_channels, in_channels]
w = tf.get_variable('w', [k_h, k_w, output_shape[-1], input_.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
try:
deconv = tf.nn.conv2d_transpose(input_, w, output_shape=output_shape,
strides=[1, d_h, d_w, 1])
# Support for verisons of TensorFlow before 0.7.0
except AttributeError:
deconv = tf.nn.deconv2d(input_, w, output_shape=output_shape,
strides=[1, d_h, d_w, 1])
biases = tf.get_variable('biases', [output_shape[-1]], initializer=tf.constant_initializer(0.0))
deconv = tf.reshape(tf.nn.bias_add(deconv, biases), deconv.get_shape())
if with_w:
return deconv, w, biases
else:
return deconv
def new_conv_layer( bottom, filter_shape, activation=tf.identity, padding='SAME', stride=1, name=None ):
with tf.variable_scope( name ):
w = tf.get_variable(
"W",
shape=filter_shape,
initializer=tf.random_normal_initializer(0., 0.005))
b = tf.get_variable(
"b",
shape=filter_shape[-1],
initializer=tf.constant_initializer(0.))
conv = tf.nn.conv2d( bottom, w, [1,stride,stride,1], padding=padding)
bias = activation(tf.nn.bias_add(conv, b))
return bias #relu
def new_deconv_layer(bottom, filter_shape, output_shape, activation=tf.identity, padding='SAME', stride=1, name=None):
with tf.variable_scope(name):
W = tf.get_variable(
"W",
shape=filter_shape,
initializer=tf.random_normal_initializer(0., 0.005))
b = tf.get_variable(
"b",
shape=filter_shape[-2],
initializer=tf.constant_initializer(0.))
deconv = tf.nn.conv2d_transpose( bottom, W, output_shape, [1,stride,stride,1], padding=padding)
bias = activation(tf.nn.bias_add(deconv, b))
return bias
def batchnorm(bottom, is_train, epsilon=1e-8, name=None):
bottom = tf.clip_by_value( bottom, -100., 100.)
depth = bottom.get_shape().as_list()[-1]
with tf.variable_scope(name):
gamma = tf.get_variable("gamma", [depth], initializer=tf.constant_initializer(1.))
beta = tf.get_variable("beta" , [depth], initializer=tf.constant_initializer(0.))
batch_mean, batch_var = tf.nn.moments(bottom, [0,1,2], name='moments')
ema = tf.train.ExponentialMovingAverage(decay=0.5)
def update():
with tf.control_dependencies([ema_apply_op]):
return tf.identity(batch_mean), tf.identity(batch_var)
ema_apply_op = ema.apply([batch_mean, batch_var])
ema_mean, ema_var = ema.average(batch_mean), ema.average(batch_var)
mean, var = tf.cond(
is_train,
update,
lambda: (ema_mean, ema_var) )
normed = tf.nn.batch_norm_with_global_normalization(bottom, mean, var, beta, gamma, epsilon, False)
return normed
def inference(images, keep_prob,z,is_train):
"""
Semantic segmentation network definition
:param image: input image. Should have values in range 0-255
:param keep_prob:
:return:
"""
encoderLayerNum = int(math.log(IMAGE_SIZE) / math.log(2))
encoderLayerNum = encoderLayerNum - 1 # minus 1 because the second last layer directly go from 4x4 to 1x1
print("encoderLayerNum=", encoderLayerNum)
encoderLayerNum = encoderLayerNum
decoderLayerNum = int(math.log(IMAGE_SIZE) / math.log(2))
decoderLayerNum = decoderLayerNum - 1
print("decoderLayerNum=", decoderLayerNum)
decoderLayerNum = decoderLayerNum
print("setting up vgg initialized conv layers ...")
#model_data = utils.get_model_data(FLAGS.model_dir, MODEL_URL)
#mean = model_data['normalization'][0][0][0]
#mean_pixel = np.mean(mean, axis=(0, 1))
#weights = np.squeeze(model_data['layers'])
#processed_image = utils.process_image(image, mean_pixel)
with tf.variable_scope("encoder", reuse = tf.AUTO_REUSE):
previousFeatureMap = images
previousDepth = 3
depth = 64
for layer in range(1, encoderLayerNum):
print("build_reconstruction encoder layer=", layer)
conv = tf.nn.dropout(new_conv_layer(previousFeatureMap, [4,4,previousDepth,depth], stride=2, name=("conv" + str(layer))),keep_prob)
bn = tf.nn.leaky_relu(batchnorm(conv, is_train, name=("bn" + str(layer))))
previousFeatureMap = bn
previousDepth = depth
depth = depth * 2
# last layer
conv = new_conv_layer(previousFeatureMap, [4,4,previousDepth,4000], stride=2, padding='VALID', name=('conv' + str(encoderLayerNum)))
bn = tf.nn.leaky_relu(batchnorm(conv, is_train, name=("bn" + str(encoderLayerNum))))
previousDepth = 4000
depth = 64 * pow(2,decoderLayerNum-2)
featureMapSize = 4
deconv = tf.nn.dropout(new_deconv_layer( bn, [4,4,depth,previousDepth], [FLAGS.batch_size,featureMapSize,featureMapSize,depth], padding='VALID', stride=2, name=("deconv" + str(decoderLayerNum))),keep_prob)
#debn_ = tf.nn.relu(batchnorm(deconv, is_train, name=("debn" + str(decoderLayerNum))))
z_ = z/tf.norm(z)
debn_ = tf.nn.relu(batchnorm(deconv, is_train, name=("debn" + str(decoderLayerNum))))
debn = tf.concat([debn_,tf.tile(z_,[1,4,4,1])],axis = 3)
with tf.variable_scope("decoder", reuse = tf.AUTO_REUSE):
print("#################################")
print(debn)
previousFeatureMap = debn
previousDepth = 522
depth = depth / 2
featureMapSize = featureMapSize *2
for layer in range(decoderLayerNum-1,1, -1):
print("build_reconstruction decoder layer=", layer)
deconv = new_deconv_layer( previousFeatureMap, [4,4,depth,previousDepth], [FLAGS.batch_size,featureMapSize,featureMapSize,depth], stride=2, name=("deconv" + str(layer)))
debn = tf.nn.relu(batchnorm(deconv, is_train, name=('debn'+ str(layer))))
previousFeatureMap = debn
previousDepth = depth
depth = depth / 2
featureMapSize = featureMapSize *2
recon = tf.nn.tanh(new_deconv_layer( debn, [4,4,1,previousDepth], [FLAGS.batch_size,64,64,1], stride=2, name="recon"))
'''
conv1 = new_conv_layer(images, [4,4,3,64], stride=2, name="conv1" )
bn1 = tf.nn.leaky_relu(batchnorm(conv1, is_train, name='bn1'))
conv2 = new_conv_layer(bn1, [4,4,64,64], stride=2, name="conv2" )
bn2 = tf.nn.leaky_relu(batchnorm(conv2, is_train, name='bn2'))
conv3 = new_conv_layer(bn2, [4,4,64,128], stride=2, name="conv3")
bn3 = tf.nn.leaky_relu(batchnorm(conv3, is_train, name='bn3'))
conv4 = new_conv_layer(bn3, [4,4,128,256], stride=2, name="conv4")
bn4 = tf.nn.leaky_relu(batchnorm(conv4, is_train, name='bn4'))
conv5 = new_conv_layer(bn4, [4,4,256,512], stride=2, name="conv5")
bn5 = tf.nn.leaky_relu(batchnorm(conv5, is_train, name='bn5'))
conv6 = new_conv_layer(bn5, [4,4,512,4000], stride=2, padding='VALID', name='conv6')
bn6 = tf.nn.leaky_relu(batchnorm(conv6, is_train, name='bn6'))
deconv4 = new_deconv_layer( bn6, [4,4,512,4000], conv5.get_shape().as_list(), padding='VALID', stride=2, name="deconv4")
debn4 = tf.nn.relu(batchnorm(deconv4, is_train, name='debn4'))
deconv3 = new_deconv_layer( debn4, [4,4,256,512], conv4.get_shape().as_list(), stride=2, name="deconv3")
debn3 = tf.nn.relu(batchnorm(deconv3, is_train, name='debn3'))
deconv2 = new_deconv_layer( debn3, [4,4,128,256], conv3.get_shape().as_list(), stride=2, name="deconv2")
debn2 = tf.nn.relu(batchnorm(deconv2, is_train, name='debn2'))
deconv1 = new_deconv_layer( debn2, [4,4,64,128], conv2.get_shape().as_list(), stride=2, name="deconv1")
debn1 = tf.nn.relu(batchnorm(deconv1, is_train, name='debn1'))
recon = new_deconv_layer( debn1, [4,4,3,64], [batch_size,64,64,3], stride=2, name="recon")
print("##########################################")
print(recon)
'''
return recon, debn_
def predictor_(h,z, is_train):
z_tiled = tf.tile(z,[1,4,4,1])
concat = tf.concat([h,z_tiled],axis = 3)
conv1 = new_conv_layer(concat, [3,3,1024,512], stride=1, padding='VALID', name=('pred_conv_1'))
bn = tf.nn.leaky_relu(batchnorm(conv1, is_train, name=("pred_bn_1")))
bn_ln = tf.reshape(bn,[FLAGS.batch_size,-1])
fc1 = tf.expand_dims(tf.expand_dims(tf.layers.dense(bn_ln,10),1),1)
# bn2 = tf.nn.leaky_relu(batchnorm(fc1, is_train, name=("pred_bn_2")))
# z_pred = tf.clip_by_value(tf.nn.tanh(fc1),-0.1,0.1)
z_pred = fc1
return z_pred
def predictor(h,z, is_train):
# z_tiled = tf.tile(z,[1,4,4,1])
with tf.variable_scope("predictor", reuse = tf.AUTO_REUSE):
concat = tf.concat([tf.contrib.layers.flatten(h),tf.contrib.layers.flatten(z)],axis = 1, name = "pred_concat")
# concat = tf.reshape(tf.concat([h,z_tiled],axis = 3),[FLAGS.batch_size,-1])
fc1 = tf.nn.leaky_relu(tf.layers.dense(concat,512), name = "pred_fc1")
# bn = tf.nn.leaky_relu(batchnorm(fc1, is_train, name=("pred_bn_1")))
fc2 = tf.nn.leaky_relu(tf.layers.dense(fc1,512), name = "pred_fc2")
# bn2 = tf.nn.leaky_relu(batchnorm(fc2, is_train, name=("pred_bn_2")))
fc3 = tf.expand_dims(tf.expand_dims(tf.layers.dense(fc2,10),1),1, name = "pred_fc3")
# bn2 = tf.nn.leaky_relu(batchnorm(fc1, is_train, name=("pred_bn_2")))
# z_pred = tf.clip_by_value(tf.nn.tanh(fc1),-0.1,0.1)
z_pred = tf.nn.tanh(fc3)
return z_pred
def train(loss_val, var_list):
optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
grads = optimizer.compute_gradients(loss_val, var_list=var_list)
if FLAGS.debug:
# print(len(var_list))
for grad, var in grads:
utils.add_gradient_summary(grad, var)
return optimizer.apply_gradients(grads)
def train_predictor(loss_val, var_list):
optimizer = tf.train.AdamOptimizer(FLAGS.learning_rate)
grads = optimizer.compute_gradients(loss_val, var_list=var_list)
if FLAGS.debug:
# print(len(var_list))
for grad, var in grads:
utils.add_gradient_summary(grad, var)
return optimizer.apply_gradients(grads)
def train_z(loss,Z):
return tf.gradients(ys = loss, xs = Z)
def random_mask(input_size):
x1 = random.randint(0, 10)
w1 = random.randint(30, 54)
y1 = random.randint(0, 10)
h1 = random.randint(30, 54)
mask = np.zeros((1,64,64,1))
mask[:,x1:w1,y1:h1,:] = 1.0
return mask
def main(argv=None):
keep_probability = tf.placeholder(tf.float32, name="keep_probabilty")
image = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 3], name="input_image")
annotation = tf.placeholder(tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, 1], name="annotation")
z = tf.placeholder(tf.float32, shape=[None, 1, 1, 10], name="z")
mask = tf.placeholder(tf.float32, shape=[None, 64, 64, 1], name="mask")
z_new = tf.placeholder(tf.float32, shape=[None, 1, 1, 10], name="z_new")
istrain = tf.placeholder(tf.bool)
z_lip = tf.placeholder(tf.float32, shape=[None, 1, 1, 10], name="z_lip")
z_lip_inv = tf.placeholder(tf.float32, shape=[None, 1, 1, 10], name="z_lip_inv")
# pred_annotation, logits = inference(image, keep_probability,z)
# tf.summary.image("input_image", image, max_outputs=2)
# tf.summary.image("ground_truth", tf.cast(annotation, tf.uint8), max_outputs=2)
# tf.summary.image("pred_annotation", tf.cast(pred_annotation, tf.uint8), max_outputs=2)
# loss = tf.reduce_mean((tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits,
# labels=tf.squeeze(annotation, squeeze_dims=[3]),
# name="entropy")))
# mask_ = tf.ones([FLAGS.batch_size,32,64,3])
# mask = tf.pad(mask_, [[0,0],[0,32],[0,0],[0,0]])
# mask2__ = tf.ones([FLAGS.batch_size,78,78,3])
# mask2_ = tf.pad(mask2__, [[0,0],[25,25],[25,25],[0,0]])
# mask2 = mask2_ - mask
logits, h = inference((1-mask)*image + mask*0.0, keep_probability,z,istrain)
logits_lip,_ = inference((1-mask)*image + mask*0.0, keep_probability,z_lip,istrain )
logits_lip_inv,_ = inference((1-mask)*image + mask*0.0, keep_probability,z_lip_inv,istrain )
z_pred = predictor(h,z,istrain)
z_pred_lip = predictor(h,z_lip,istrain)
z_pred_lip_inv = predictor(h,z_lip_inv,istrain)
# logits = inference(image, keep_probability,z,istrain)
tf.summary.image("input_image", image, max_outputs=2)
tf.summary.image("ground_truth", tf.cast(annotation, tf.uint8), max_outputs=2)
# tf.summary.image("pred_annotation", tf.cast(pred_annotation, tf.uint8), max_outputs=2)
# lossz = 0.1 * tf.reduce_mean(tf.reduce_sum(tf.abs(z),[1,2,3]))
# lossz = 0.1 * tf.reduce_mean(tf.abs(z))
# loss_all = tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square((image - logits)),[1,2,3])))
# loss_all = tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs(image - logits)),1))
# loss_mask = 0.8*tf.reduce_mean(tf.sqrt(tf.reduce_sum(tf.square((image - logits)*mask),[1,2,3])))
loss_mask = tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs((annotation - logits)*mask)),1))
loss = loss_mask
# loss = tf.reduce_mean(tf.squared_difference(logits ,annotation ))
loss_summary = tf.summary.scalar("entropy", loss)
# zloss = tf.reduce_mean(tf.losses.cosine_distance(tf.contrib.layers.flatten(z_new) ,tf.contrib.layers.flatten(z_pred),axis =1))
zloss_ = tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs((z_pred - z_new))),1))
zloss_lip = tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs((z_pred - z_pred_lip))),1))
# zloss_lip_inv = -tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs((z_pred - z_pred_lip_inv))),1))
zloss = zloss_ + 0.1* zloss_lip# + zloss_lip_inv
lip_loss_norm = 0.3*tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs((logits - logits_lip))),1))
lip_loss_inv = -0.1*tf.reduce_mean(tf.reduce_sum(tf.contrib.layers.flatten(tf.abs((logits - logits_lip_inv))),1))
lip_loss = lip_loss_norm + lip_loss_inv
grads = train_z(loss_mask,z)
trainable_var = tf.trainable_variables()
trainable_z_pred_var = tf.trainable_variables(scope="predictor")
trainable_d_pred_var = tf.trainable_variables(scope="decoder")
print(trainable_z_pred_var)
if FLAGS.debug:
for var in trainable_var:
utils.add_to_regularization_and_summary(var)
train_op = train(loss, trainable_var)
train_pred = train_predictor(zloss,trainable_z_pred_var)
train_lip = train(lip_loss,trainable_d_pred_var)
print("Setting up summary op...")
summary_op = tf.summary.merge_all()
print("Setting up image reader...")
train_records, valid_records = scene_parsing.read_dataset(FLAGS.data_dir)
print(len(train_records))
print(len(valid_records))
print("Setting up dataset reader")
image_options = {'resize': True, 'resize_size': IMAGE_SIZE}
if FLAGS.mode == 'train':
train_dataset_reader = dataset.BatchDatset(train_records, image_options)
validation_dataset_reader = dataset.BatchDatset(valid_records, image_options)
sess = tf.Session()
print("Setting up Saver...")
saver = tf.train.Saver()
# create two summary writers to show training loss and validation loss in the same graph
# need to create two folders 'train' and 'validation' inside FLAGS.logs_dir
train_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/train', sess.graph)
validation_writer = tf.summary.FileWriter(FLAGS.logs_dir + '/validation')
sess.run(tf.global_variables_initializer())
ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
print("Model restored...")
saved =True
if FLAGS.mode == "train":
for itr in xrange(MAX_ITERATION):
train_images, train_annotations = train_dataset_reader.next_batch(FLAGS.batch_size)
print(np.max(train_images))
# z_ = np.reshape(signal.gaussian(200, std=1),(FLAGS.batch_size,1,1,10))-0.5
z_ = np.random.uniform(low=-1.0, high=1.0, size=(FLAGS.batch_size,1,1,10))
train_images[train_images < 0.] = -1.
train_annotations[train_annotations < 0.] = -1.
train_images[train_images >= 0.] = 1.0
train_annotations[train_annotations >= 0.] = 1.0
x1 = random.randint(0, 10)
w1 = random.randint(30, 54)
y1 = random.randint(0, 10)
h1 = random.randint(30, 54)
cond = random.randint(0, 10)
# saved = True
if cond <=2:
saved = False
train_images_m, train_annotations_m = train_dataset_reader.get_random_batch(FLAGS.batch_size)
train_images_m[train_images_m < 0.] = -1.
train_annotations_m[train_annotations_m < 0.] = -1.
train_images_m[train_images_m >= 0.] = 1.0
train_annotations_m[train_annotations_m >= 0.] = 1.0
train_images = (train_images + 1.)/2.0*255.0
train_annotations = (train_annotations + 1.)/2.0*255.0
train_images_m = (train_images_m + 1.)/2.0*255.0
train_annotations_m = (train_annotations_m + 1.)/2.0*255.0
train_images_m[:,32:,:,:] = 0
train_annotations_m[:,32:,:,:] = 0
train_images = np.clip((train_images + train_images_m),0.0,255.0)
train_annotations = np.clip((train_annotations + train_annotations_m),0.0,255.0)
'''
train_images[train_images < 0.] = -1.
train_annotations[train_annotations < 0.] = -1.
train_images[train_images >= 0.] = 1.0
train_annotations[train_annotations >= 0.] = 1.0
'''
train_annotations_ = np.squeeze(train_annotations,axis = 3)
train_images_ = train_images
train_images = train_images/127.5 - 1.0
train_annotations = train_annotations/127.5 - 1.0
# for itr_ in range(FLAGS.batch_size):
# utils.save_image(train_images_[itr_].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(5+itr_) )
# utils.save_image(train_annotations_[itr_].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(5+itr_) )
# train_images[:,x1:w1,y1:h1,:] = 0
# print(train_images)
r_m = random_mask(64)
feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_,mask:r_m, istrain:True }
#train_images[:,50:100,50:100,:] =0
v = 0
# print(train_images)
z_l = z_ + np.random.normal(0.0,0.001)
# z_l_inv = z_ + np.random.normal(0.0,0.1)
# feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_, z_lip:z_l, z_lip_inv:z_l_inv, mask:r_m, istrain:True }
# z_l = z_ + np.random.normal(0.0,0.001)
# lloss,_ = sess.run([lip_loss, train_lip ], feed_dict=feed_dict)
# z_l = z_ + np.random.normal(0.0,0.001)
# print("Step: %d, lip_loss:%g" % (itr,lloss))
for p in range(20):
z_ol = np.copy(z_)
# z_l = z_ol + np.random.normal(0.0,0.001)
# print("666666666666666666666666666666666666666")
feed_dict = {image: train_images, annotation: train_annotations, keep_probability: 0.85, z: z_, mask:r_m, istrain:True }
# lloss,_ = sess.run([lip_loss, train_lip ], feed_dict=feed_dict)
# print("Step: %d, z_step: %d, lip_loss:%g" % (itr,p,lloss))
z_loss, summ = sess.run([loss,loss_summary], feed_dict=feed_dict)
print("Step: %d, z_step: %d, Train_loss:%g" % (itr,p,z_loss))
# print(z_)
g = sess.run([grads],feed_dict=feed_dict)
v_prev = np.copy(v)
# print(g[0][0].shape)
v = 0.001*v - 0.1*g[0][0]
z_ += 0.001 * v_prev + (1+0.001)*v
z_ = np.clip(z_, -20.0, 20.0)
z_l = z_ + np.random.normal(0.0,0.001)
z_l = np.clip(z_l, -20.0, 20.0)
# z_l_inv = z_ + np.random.normal(0.0,0.1)
m = interp1d([-20.0,20.0],[-1.0,1.0])
print(np.max(z_))
print(np.min(z_))
z_ol_interp = m(z_ol)
z_interp = m(z_)
z_l_interp = m(z_l)
# z_l_inv_interp = m(z_l_inv)
_,z_pred_loss =sess.run([train_pred,zloss],feed_dict={image: train_images,mask:r_m,z:z_ol_interp,z_new:z_interp,z_lip:z_l_interp,istrain:True,keep_probability: 0.85})
print("Step: %d, z_step: %d, z_pred_loss:%g" % (itr,p,z_pred_loss))
# _,z_pred_loss =sess.run([train_pred,zloss],feed_dict={image: train_images,mask:r_m,z:z_ol,z_new:z_,istrain:True,keep_probability: 0.85})
# print("Step: %d, z_step: %d, z_pred_loss:%g" % (itr,p,z_pred_loss))
# z_ = np.clip(z_, -1.0, 1.0)
# print(v.shape)
# print(z_.shape)
# feed_dict = {image: train_images, annotation: train_annotations, keep_probability:0.85,mask:r_m, z: z_, istrain:True }
# sess.run(train_op, feed_dict=feed_dict)
if itr % 10 == 0:
train_loss, summary_str = sess.run([loss, loss_summary], feed_dict=feed_dict)
print("Step: %d, Train_loss:%g" % (itr, train_loss))
train_writer.add_summary(summary_str, itr)
if itr % 500 == 0:
valid_images, valid_annotations = validation_dataset_reader.next_batch(FLAGS.batch_size)
valid_annotations[valid_annotations < 0.] = -1.
valid_images[valid_images < 0.] = -1.
valid_annotations[valid_annotations >= 0.] = 1.0
valid_images[valid_images >= 0.] = 1.0
x1 = random.randint(0, 10)
w1 = random.randint(30, 54)
y1 = random.randint(0, 10)
h1 = random.randint(30, 54)
# valid_images[:,x1:w1,y1:h1,:] = 0
valid_loss, summary_sva = sess.run([loss, loss_summary], feed_dict={image: valid_images,mask:random_mask(64), annotation: valid_annotations,
keep_probability: 1.0, z: z_, istrain:False })
print("%s ---> Validation_loss: %g" % (datetime.datetime.now(), valid_loss))
# add validation loss to TensorBoard
validation_writer.add_summary(summary_sva, itr)
saver.save(sess, FLAGS.logs_dir + "model_z_center.ckpt", 500)
elif FLAGS.mode == "visualize":
valid_images, valid_annotations = validation_dataset_reader.get_random_batch(20)
valid_annotations[valid_annotations < 0.] = -1.0
valid_images[valid_images < 0.] = -1.0
valid_annotations[valid_annotations >= 0.] = 1.0
valid_images[valid_images >= 0.] = 1.0
x1 = random.randint(0, 10)
w1 = random.randint(30, 54)
y1 = random.randint(0, 10)
h1 = random.randint(30, 54)
# valid_images[:,x1:w1,y1:h1,:] = 0
r_m = random_mask(64)
# z_ = np.zeros(low=-1.0, high=1.0, size=(FLAGS.batch_size,1,1,10))
# z_ = np.reshape(signal.gaussian(200, std=1),(FLAGS.batch_size,1,1,10))-0.5
z_ = np.random.uniform(low=-1.0, high=1.0, size=(FLAGS.batch_size,1,1,10))
feed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 0.85, z: z_, istrain:False,mask:r_m }
v= 0
# feed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 0.85, z: z_, istrain:False,mask:r_m }
for p in range(20):
z_ol = np.copy(z_)
# print("666666666666666666666666666666666666666")
# print(z_)
feed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 0.85, z: z_, istrain:False,mask:r_m }
z_loss, summ = sess.run([loss,loss_summary], feed_dict=feed_dict)
print("z_step: %d, Train_loss:%g" % (p,z_loss))
# z_, z_pred_loss = sess.run(z_pred,zlossfeed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 1.0, z:z_ol, istrain:False,mask:r_m})
# print(z_)
g = sess.run([grads],feed_dict=feed_dict)
v_prev = np.copy(v)
# print(g[0][0].shape)
v = 0.001*v - 0.1*g[0][0]
z_ = z_ol + 0.001 * v_prev + (1+0.001)*v
# z_ = z_ol + 0.001 * v_prev + (1+0.001)*v
# print("z_____________")
# print(z__)
# print("z_")
# print(z_)
# z_ = sess.run(z_pred,feed_dict = {image: valid_images, annotation: valid_annotations, keep_probability: 0.85, z:z_ol, istrain:False,mask:r_m})
# z_ = np.clip(z_, -1.0, 1.0)
# print(z_pred_loss)
pred = sess.run(logits, feed_dict={image: valid_images, annotation: valid_annotations,z:z_, istrain:False,mask:r_m,
keep_probability: 0.85})
valid_images_masked = ((1-r_m)*valid_images + 1.)/2.0*255
# valid_images = (valid_images +1.)/2.0*255
# predicted_patch = sess.run(mask) * pred
# pred = valid_images_masked + predicted_patch
pred_ = (np.squeeze(pred, axis=3)+1.)/2.0*255
# pred = pred + 1./2.0*255
pred = valid_images_masked[:,:,:,0] *(1-r_m)[:,:,:,0] + pred_ * r_m[:,:,:,0]
valid_annotations_ = (np.squeeze(valid_annotations, axis=3)+1.)/2.0*255
# pred = np.squeeze(pred, axis=3)
print(np.max(pred))
print(valid_images.shape)
print(valid_annotations.shape)
print(pred.shape)
# for itr in range(FLAGS.batch_size):
# utils.save_image(valid_images_masked[itr].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(5+itr))
# utils.save_image(valid_annotations[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(5+itr))
# utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="predz_" + str(5+itr))
# utils.save_image(valid_images_masked[itr].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(5+itr)+'_' + str(p) )
# utils.save_image(valid_annotations_[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(5+itr)+'_' + str(p) )
# utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="predz_" + str(5+itr)+'_' + str(p) )
# print("Saved image: %d" % itr)
for itr in range(FLAGS.batch_size):
utils.save_image(valid_images_masked[itr].astype(np.uint8), FLAGS.logs_dir, name="inp_" + str(5+itr) )
utils.save_image(pred[itr].astype(np.uint8), FLAGS.logs_dir, name="predz_" + str(5+itr) )
utils.save_image(valid_annotations_[itr].astype(np.uint8), FLAGS.logs_dir, name="gt_" + str(5+itr) )
if __name__ == "__main__":
tf.app.run()
| 46.687861 | 214 | 0.611613 |
37e2c4aac5cf22301ff5c0d27d36ad438937ad23 | 442 | py | Python | dthm4kaiako/ara_ako/urls.py | uccser/dthm4kaiako | 3a3b209a7b428a6f5bd136b6b431f49d7e8a2555 | [
"MIT"
] | 3 | 2018-12-10T07:03:02.000Z | 2021-04-12T02:18:30.000Z | dthm4kaiako/ara_ako/urls.py | uccser/dthm4kaiako | 3a3b209a7b428a6f5bd136b6b431f49d7e8a2555 | [
"MIT"
] | 566 | 2018-09-30T02:54:28.000Z | 2022-03-28T01:20:01.000Z | dthm4kaiako/ara_ako/urls.py | uccser/dthm4kaiako | 3a3b209a7b428a6f5bd136b6b431f49d7e8a2555 | [
"MIT"
] | 3 | 2019-04-04T19:53:39.000Z | 2021-05-16T02:04:46.000Z | """URL routing for Ara Ako application."""
from django.urls import path
from ara_ako import views
app_name = 'ara_ako'
urlpatterns = [
path('', views.AraAkoHomeView.as_view(), name='home'),
path('json/dashboard/', views.dashboard_json, name='dashboard_json'),
path('<slug:slug>/', views.AraAkoEventDetailView.as_view(), name='event'),
path('<slug:slug>/dashboard/', views.AraAkoDashboardView.as_view(), name='dashboard'),
]
| 34 | 90 | 0.708145 |
c55f25c966047c2714b63d9483215dba9f9f99f9 | 9,248 | py | Python | tests/test_attributed_graphs.py | zeno129/CSAG | 79debf2ef88a8054e0b1c74fd73faf731300c219 | [
"BSD-2-Clause"
] | 1 | 2018-07-27T11:33:24.000Z | 2018-07-27T11:33:24.000Z | tests/test_attributed_graphs.py | zeno129/CSAG | 79debf2ef88a8054e0b1c74fd73faf731300c219 | [
"BSD-2-Clause"
] | null | null | null | tests/test_attributed_graphs.py | zeno129/CSAG | 79debf2ef88a8054e0b1c74fd73faf731300c219 | [
"BSD-2-Clause"
] | null | null | null | # import itertools
# import operator
import random
import numpy as np
from kronecker import mKPGM as model
from graph import sampling
from scipy.stats.stats import pearsonr
# def test_graph_sampling_binomial():
# """
# Test dumb version:
# Create graph and get random attributes back.
# Use binomial distribution.
# """
# b = 2
# k = 5
# l = 2
# theta = [[0.7, 0.4], [0.4, 0.5]]
# g = model.mKPGM(theta, k, b, l)
# mymodel = {'name': "KPGM", 'k': k, 'b': b, 'l': l}
#
# x = list(np.random.random_integers(low=0, high=1, size=g.vertices))
#
# _, attributes = sampling.graph_sampling(graphIn=(range(g.vertices), g.edges),
# xIn=x,
# model=mymodel,
# epsilon=0.0,
# distribution="binomial")
#
# # TODO: calculate with graphOut
# n = g.vertices
# p = 0.5
# mean = n * p
# variance = mean * (1 - p)
#
# assert len(attributes) == g.vertices
# assert attributes.count(1) <= mean + variance
# assert attributes.count(1) >= mean - variance
#
#
# def test_graph_sampling_multinomial():
# """
# Test dumb version:
# Create graph and get random attributes back.
# Use binomial distribution.
# """
# b = 2
# k = 5
# l = 2
# theta = [[0.7, 0.4], [0.4, 0.5]]
# g = model.mKPGM(theta, k, b, l)
# mymodel = {'name': "KPGM", 'k': k, 'b': b, 'l': l}
#
# x = list(np.random.random_integers(low=0, high=3, size=g.vertices))
#
# _, attributes = sampling.graph_sampling(graphIn=(range(g.vertices), g.edges),
# xIn=x,
# model=mymodel,
# epsilon=0.0,
# distribution="multinomial")
#
# # TODO: calculate with graphOut
# n = g.vertices
# p = 0.25
# mean = n * p
# variance = mean * (1 - p)
#
# assert len(attributes) == g.vertices
# assert attributes.count(1) <= mean + variance
# assert attributes.count(1) >= mean - variance
# def test_maxent_edge_sampling():
# # model = "mKPGM"
# mymodel = {'name': "KPGM", 'k': None, 'b': None, 'l': None}
# theta = [0.7, 0.4, 0.4, 0.5]
# num_nodes = 2 ** 4
#
# # TODO: get real block from mKPGM model
# block = [ np.random.choice(theta, num_nodes) for i in range(num_nodes)]
# psi = [(0,0), (0,1), (1,0), (1,1)]
# p = 0.25
# probs = [p,p,p,p]
# tmp = np.random.multinomial(n=1, pvals=probs, size=num_nodes)
# xOut = [t[0] for t in tmp]
#
# vertices, edges = sampling.maxent_edge_sampling(mymodel,theta,block,psi,probs,xOut)
# edge_labels = [(xOut[u], xOut[v]) for u,v in edges]
#
# # TODO: test for all probabilities
# n = vertices
# mean = n * p
# variance = mean * (1 - p)
#
# assert vertices == num_nodes
# assert edge_labels.count(psi[0]) <= mean + variance
# assert edge_labels.count(psi[0]) >= mean - variance
def test_graph_sampling_binomial_dumb_no_learning():
"""
Test dumb version:
Create graph and get random attributes back.
Use binomial distribution.
"""
b = 2
# k = 10
# k = 3
# l = 2
k = 5
l = 3
theta = [[0.7, 0.4], [0.4, 0.5]]
n = pow(b, k)
g = model.mKPGM(theta, k, b, l)
mymodel = {'name': "mKPGM", 'K': k, 'b': b, 'l': l, 'theta': theta}
# x = list(np.random.random_integers(low=0, high=1, size=n))
x = [0] * (n / 2)
x.extend([1] * (n / 2))
# random.shuffle(x)
# TODO: specify beta directly
# beta = fraction of edges of each type
tries = [[0.25, 0.25, 0.25, 0.25],
# [0.4375, 0.125, 0.125, 0.4375],
[0.45, 0.05, 0.05, 0.45],
[0.48, 0.01, 0.01, 0.48],
[0.97, 0.01, 0.01, 0.01],
[0.05, 0.45, 0.45, 0.05]]
for beta in tries:
params_test = {"beta": beta, "thetaG": theta, "last_block": False}
# graphOut, xOut = sampling.graph_sampling(graphIn=(None, None),
graphOut, xOut = sampling.graph_sampling(graphIn=(g.vertices, g.edges),
xIn=x,
model=mymodel,
epsilon=0.1,
distribution="binomial",
params_test=params_test)
# with open("correlations.txt", "a") as myfile:
# corr = calc_correlation(graphOut.edges, xOut)
# myfile.write("beta = {}\t\tcorrelation = {}\n\n".format(beta, corr))
# TODO: calculate with graphOut
p = 0.5
mean = n * p
variance = mean * (1 - p)
# TODO: figure out how to test correlation
assert len(xOut) == n
# assert calc_correlation(graphOut[1], xOut) == 0.5
assert xOut.count(1) <= mean + variance
assert xOut.count(1) >= mean - variance
# def test_graph_sampling_binomial_no_learning():
# """
# Test dumb version:
# Create graph and get random attributes back.
# Use binomial distribution.
# """
# b = 2
# k = 3
# # k = 10
# l = 2
# theta = [[0.7, 0.4], [0.4, 0.5]]
# n = pow(b, k)
#
# g = model.mKPGM(theta, k, b, l)
# mymodel = {'name': "mKPGM", 'K': k, 'b': b, 'l': l, 'theta': theta}
#
# # x = list(np.random.random_integers(low=0, high=1, size=n))
#
# x = [0] * (n / 2)
# x.extend([1] * (n / 2))
# # random.shuffle(x)
#
# # TODO: specify beta directly
# # beta = fraction of edges of each type
# tries = [[0.25, 0.25, 0.25, 0.25],
# # [0.4375, 0.125, 0.125, 0.4375],
# [0.45, 0.05, 0.05, 0.45],
# [0.48, 0.01, 0.01, 0.48],
# [0.97, 0.01, 0.01, 0.01],
# [0.05, 0.45, 0.45, 0.05]]
# for beta in tries:
# params_test = {"beta": beta, "thetaG": theta, "last_block": False}
# # graphOut, xOut = sampling.graph_sampling(graphIn=(None, None),
# graphOut, xOut = sampling.graph_sampling(graphIn=(g.vertices, g.edges),
# xIn=x,
# model=mymodel,
# epsilon=0.1,
# distribution="binomial",
# params_test=params_test)
#
# with open("correlations.txt", "a") as myfile:
# corr = calc_correlation(graphOut.edges, xOut)
# myfile.write("beta = {}\t\tcorrelation = {}\n\n".format(beta, corr))
#
# # TODO: calculate with graphOut
# p = 0.5
# mean = n * p
# variance = mean * (1 - p)
#
# # TODO: figure out how to test correlation
# assert len(xOut) == n
#
# # assert calc_correlation(graphOut[1], xOut) == 0.5
# assert xOut.count(1) <= mean + variance
# assert xOut.count(1) >= mean - variance
# def test_multiple_random_graphs():
# b = 2
# # k = 5
# k = 10
# l = 2
# n = pow(b, k)
#
# probs = [0.9, 0.8, 0.7, 0.6, 0.5, 0.4, 0.3] # v2
# # probs = [0.7, 0.5, 0.3] # v2
# # probs = [0.5, 0.4, 0.3] # v3
# for p in probs:
# theta = [[p, p], [p, p]]
#
# mymodel = {'name': "mKPGM", 'K': k, 'b': b, 'l': l, 'theta': theta}
#
# # x = list(np.random.random_integers(low=0, high=1, size=n))
# x = [0] * (n/2)
# x.extend([1] * (n/2))
# random.shuffle(x)
#
# graphOut, xOut = sampling.graph_sampling(graphIn=(None, None),
# xIn=x,
# model=mymodel,
# epsilon=0.0,
# distribution="binomial",
# thetaG=theta)
#
# x_prob = 0.5
# mean = n * x_prob
# variance = n * x_prob * (1 - x_prob)
#
# # TODO: figure out how to test correlation
# assert len(xOut) == n
# # assert calc_correlation(graphOut[1], xOut) == 0.5
#
# # with open("correlations.txt", "a") as myfile:
# # corr = calc_correlation(graphOut.edges, xOut)
# # myfile.write("theta = {}\t\tcorrelation = {}\n\n".format(theta, corr))
#
# assert xOut.count(1) <= mean + variance
# assert xOut.count(1) >= mean - variance
# def test_range_of_graphs():
# b = 2
# # k = 5
# k = 10
# l = 2
# n = pow(b, k)
#
# theta_11 = [0.99, 0.95, 0.9, 0.85, 0.8]
# theta_12 = [0.55, 0.45, 0.35, 0.25, 0.15]
# theta_22 = [0.75, 0.65, 0.55, 0.45, 0.35]
# # % theta_22 = [0.75 0.7 0.65 0.6 0.55];
def calc_correlation(edges, labels):
'''
Calculate the Pearson correlation across edges.
:param edges: list of tuples containing node indices
:param labels:
:return:
'''
x = []
y = []
for (u,v) in edges:
x.append(labels[u])
y.append(labels[v])
return float(pearsonr(x,y)[0])
| 32.111111 | 89 | 0.479022 |
0c4244d0b05ddb056d5a4b154fd9f16b0a9d504b | 2,711 | py | Python | ch05/ans49.py | upura/nlp100v2020 | 37d4d208d5d527d163356793b630f36eb7595779 | [
"MIT"
] | 66 | 2020-04-07T13:27:45.000Z | 2022-01-10T10:43:08.000Z | ch05/ans49.py | upura/nlp100v2020 | 37d4d208d5d527d163356793b630f36eb7595779 | [
"MIT"
] | 2 | 2021-04-30T21:11:02.000Z | 2022-01-13T02:33:08.000Z | ch05/ans49.py | upura/nlp100v2020 | 37d4d208d5d527d163356793b630f36eb7595779 | [
"MIT"
] | 12 | 2020-04-10T16:26:10.000Z | 2022-02-06T06:17:22.000Z | class Morph:
def __init__(self, dc):
self.surface = dc['surface']
self.base = dc['base']
self.pos = dc['pos']
self.pos1 = dc['pos1']
class Chunk:
def __init__(self, morphs, dst):
self.morphs = morphs # 形態素(Morphオブジェクト)のリスト
self.dst = dst # 係り先文節インデックス番号
self.srcs = [] # 係り元文節インデックス番号のリスト
def parse_cabocha(block):
def check_create_chunk(tmp):
if len(tmp) > 0:
c = Chunk(tmp, dst)
res.append(c)
tmp = []
return tmp
res = []
tmp = []
dst = None
for line in block.split('\n'):
if line == '':
tmp = check_create_chunk(tmp)
elif line[0] == '*':
dst = line.split(' ')[2].rstrip('D')
tmp = check_create_chunk(tmp)
else:
(surface, attr) = line.split('\t')
attr = attr.split(',')
lineDict = {
'surface': surface,
'base': attr[6],
'pos': attr[0],
'pos1': attr[1]
}
tmp.append(Morph(lineDict))
for i, r in enumerate(res):
res[int(r.dst)].srcs.append(i)
return res
def convert(s):
pl, nl = [], [c for c in s if '名詞' in [m.pos for m in c.morphs]]
for i in range(len(nl) - 1):
st1 = [''.join([m.surface if m.pos != '名詞' else 'X' for m in nl[i].morphs])]
for e in nl[i + 1:]:
dst, p = nl[i].dst, []
st2 = [''.join([m.surface if m.pos != '名詞' else 'Y' for m in e.morphs])]
while int(dst) != -1 and dst != s.index(e):
p.append(s[int(dst)])
dst = s[int(dst)].dst
if len(p) < 1 or p[-1].dst != -1:
mid = [''.join([m.surface for m in c.morphs if m.pos != '記号']) for c in p]
pl.append(st1 + mid + ['Y'])
else:
mid, dst = [], e.dst
while not s[int(dst)] in p:
mid.append(''.join([m.surface for m in s[int(dst)].morphs if m.pos != '記号']))
dst = s[int(dst)].dst
ed = [''.join([m.surface for m in s[int(dst)].morphs if m.pos != '記号'])]
pl.append([st1, st2 + mid, ed])
return pl
filename = 'ch05/ai.ja.txt.cabocha'
with open(filename, mode='rt', encoding='utf-8') as f:
blocks = f.read().split('EOS\n')
blocks = list(filter(lambda x: x != '', blocks))
blocks = [parse_cabocha(block) for block in blocks]
for b in blocks:
pl = (convert(b))
for p in pl:
if isinstance(p[0], str):
print(' -> '.join(p))
else:
print(p[0][0], ' -> '.join(p[1]), p[2][0], sep=' | ')
| 31.894118 | 97 | 0.461453 |
83de17f53185ed1eb47ea8067313a79135246f91 | 2,161 | py | Python | aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/QueryCensorJobListRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | null | null | null | aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/QueryCensorJobListRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | 1 | 2020-05-31T14:51:47.000Z | 2020-05-31T14:51:47.000Z | aliyun-python-sdk-mts/aliyunsdkmts/request/v20140618/QueryCensorJobListRequest.py | jia-jerry/aliyun-openapi-python-sdk | e90f3683a250cfec5b681b5f1d73a68f0dc9970d | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkmts.endpoint import endpoint_data
class QueryCensorJobListRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Mts', '2014-06-18', 'QueryCensorJobList')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_JobIds(self):
return self.get_query_params().get('JobIds')
def set_JobIds(self,JobIds):
self.add_query_param('JobIds',JobIds) | 35.42623 | 74 | 0.773253 |
413a52c5a3b946ea8f414703c997e77b1209aca3 | 323 | py | Python | tests/test_examples.py | Mrfranken/PyVirtualDisplay | b3d45024d1d55033af69d1af29f56fc04e4e4209 | [
"BSD-2-Clause"
] | null | null | null | tests/test_examples.py | Mrfranken/PyVirtualDisplay | b3d45024d1d55033af69d1af29f56fc04e4e4209 | [
"BSD-2-Clause"
] | null | null | null | tests/test_examples.py | Mrfranken/PyVirtualDisplay | b3d45024d1d55033af69d1af29f56fc04e4e4209 | [
"BSD-2-Clause"
] | null | null | null | import logging
import time
from easyprocess import EasyProcess
from pyvirtualdisplay.display import Display
log = logging.getLogger(__name__)
VISIBLE = 0
def test_screenshot3():
with Display(visible=VISIBLE):
with EasyProcess("python -m pyvirtualdisplay.examples.screenshot3"):
time.sleep(1)
| 17.944444 | 76 | 0.74613 |
961381aa37f92fd1ad78f0dd016edfb1fc7fdcec | 1,408 | py | Python | backend/tasker_business/api/v1/viewsets.py | crowdbotics-apps/my-beer-tracker-28497 | 02b5da836901a6070616afc717884f36a5cd9261 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/tasker_business/api/v1/viewsets.py | crowdbotics-apps/my-beer-tracker-28497 | 02b5da836901a6070616afc717884f36a5cd9261 | [
"FTL",
"AML",
"RSA-MD"
] | 26 | 2021-08-02T20:17:22.000Z | 2022-01-23T13:45:59.000Z | backend/tasker_business/api/v1/viewsets.py | crowdbotics-apps/news-29341 | 9581a5b0d691753d2d28003c0f4e4c9baf16d93a | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | from rest_framework import authentication
from tasker_business.models import (
TaskerSkill,
BusinessPhoto,
Timeslot,
TaskerAvailability,
)
from .serializers import (
TaskerSkillSerializer,
BusinessPhotoSerializer,
TimeslotSerializer,
TaskerAvailabilitySerializer,
)
from rest_framework import viewsets
class TimeslotViewSet(viewsets.ModelViewSet):
serializer_class = TimeslotSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Timeslot.objects.all()
class TaskerSkillViewSet(viewsets.ModelViewSet):
serializer_class = TaskerSkillSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = TaskerSkill.objects.all()
class TaskerAvailabilityViewSet(viewsets.ModelViewSet):
serializer_class = TaskerAvailabilitySerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = TaskerAvailability.objects.all()
class BusinessPhotoViewSet(viewsets.ModelViewSet):
serializer_class = BusinessPhotoSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = BusinessPhoto.objects.all()
| 27.607843 | 55 | 0.764205 |
c11c4cf1fa3b26216b1db203cb912fdb4704b82f | 3,843 | py | Python | Ago-Dic-2018/Genesis Perez/proyecto_final/webScrapping.py | angelicardz/DAS_Sistemas | e2a69fec358f0fad4fe05c39ea6168c89eed41ac | [
"MIT"
] | 41 | 2017-09-26T09:36:32.000Z | 2022-03-19T18:05:25.000Z | Ago-Dic-2018/Genesis Perez/proyecto_final/webScrapping.py | angelicardz/DAS_Sistemas | e2a69fec358f0fad4fe05c39ea6168c89eed41ac | [
"MIT"
] | 67 | 2017-09-11T05:06:12.000Z | 2022-02-14T04:44:04.000Z | Ago-Dic-2018/Genesis Perez/proyecto_final/webScrapping.py | angelicardz/DAS_Sistemas | e2a69fec358f0fad4fe05c39ea6168c89eed41ac | [
"MIT"
] | 210 | 2017-09-01T00:10:08.000Z | 2022-03-19T18:05:12.000Z | import requests, os, errno, sqlite3, json
from bs4 import BeautifulSoup
cnx = sqlite3.connect('taqueria.db')
cursor = cnx.cursor()
class scrappingTacos():
url = 'http://taco-randomizer.herokuapp.com/'
for i in range(1,51): # hasta el 51
idtaco = int(i)
cursor.execute("INSERT INTO tacos (ID) VALUES (?)",(idtaco,))
cnx.commit()
# i es el id de c/taco, en tabla tacos
# Realizamos la petición a la web
req = requests.get(url)
# Pasamos el contenido HTML de la web a un objeto BeautifulSoup()
soup = BeautifulSoup(req.text, 'html.parser')
# Encontrar todos los datos dentro de las tags donde están las entradas que necesitamos
nombresTacos = soup.find_all('h1',{'class':'light'})
subrecetas = soup.find_all('div',{'class':'twelve columns'})
# Recorremos todas las entradas para extraer el nombre del taco, las sub-recetas, contribuidores, tags
for i, nombre in enumerate(nombresTacos):
# taco es el nombre completo del taco
taco = nombre.getText()
cursor.execute("Update tacos SET Nombre_Taco=? WHERE ID=?",(taco,idtaco))
cnx.commit()
for i, subreceta in enumerate(subrecetas):
receta = subreceta.find('div', {'class':'recipe'}).getText()
# Buscamos la sub-cadena "tags:", devuelve el índice de inicio de las mismas
indiceTags = receta.find("tags:")
# Cortamos el texto de la receta para que la sub-cadena "tags:..." ya no aparezcan; desde la posición 0 hasta el índice que nos devolvió el método anterior
recetaSinTags = receta[:indiceTags]
# recetaSinTags es solo el nombre y procedimiento de la receta
# Cortamos la receta para pasar la sub-cadena "tags:" con todo lo que está adelante, debido a que es el último renglón, no se necesita poner un parámetro específico de donde termina
tags = receta[indiceTags:]
contribuidores = subreceta.find('h6', {'class':'light'}).getText()
if i==0:
cursor.execute("UPDATE tacos SET BaseLayer=?,Contribuidores_BL=?,Tags_BL=? WHERE ID=?",(recetaSinTags,contribuidores,tags,idtaco))
cnx.commit()
if i==1:
cursor.execute("UPDATE tacos SET Mixin=?,Contribuidores_Mix=?,Tags_Mix=? WHERE ID=?",(recetaSinTags,contribuidores,tags,idtaco))
cnx.commit()
if i==2:
cursor.execute("UPDATE tacos SET Condiment=?,Contribuidores_Cond=?,Tags_Cond=? WHERE ID=?",(recetaSinTags,contribuidores,tags,idtaco))
cnx.commit()
if i==3:
cursor.execute("UPDATE tacos SET Seasoning=?,Contribuidores_Seas=?,Tags_Seas=? WHERE ID=?",(recetaSinTags,contribuidores,tags,idtaco))
cnx.commit()
if i==4:
cursor.execute("UPDATE tacos SET Shell=?,Contribuidores_Shell=?,Tags_Shell=? WHERE ID=?",(recetaSinTags,contribuidores,tags,idtaco))
cnx.commit()
cnx.close()
class Clientes():
def crearClientes():
for i in range(1,21):
idcliente = i
url = 'https://randomuser.me/api/'
request = requests.get(url)
datos = request.json()['results']
nombrecliente = datos[0]['name'].get("first") + " " + datos[0]['name'].get("last")
genero = datos[0]['gender']
celular = datos[0]['cell']
direccion = datos[0]['location'].get("street")
cursor.execute("INSERT INTO clientes (ID,Nombre,Genero,Celular,Direccion) VALUES (?,?,?,?,?)",(idcliente, nombrecliente, genero, celular, direccion))
cnx.commit()
cnx.close()
| 56.514706 | 197 | 0.59719 |
9a84449b1c58dedd39be09e85185d1b665561ae7 | 8,381 | py | Python | platform/gsutil/gslib/tests/test_signurl.py | bopopescu/google-cloud-sdk | b34e6a18f1e89673508166acce816111c3421e4b | [
"Apache-2.0"
] | null | null | null | platform/gsutil/gslib/tests/test_signurl.py | bopopescu/google-cloud-sdk | b34e6a18f1e89673508166acce816111c3421e4b | [
"Apache-2.0"
] | null | null | null | platform/gsutil/gslib/tests/test_signurl.py | bopopescu/google-cloud-sdk | b34e6a18f1e89673508166acce816111c3421e4b | [
"Apache-2.0"
] | 1 | 2020-07-24T20:04:47.000Z | 2020-07-24T20:04:47.000Z | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for signurl command."""
from datetime import timedelta
import pkgutil
import gslib.commands.signurl
from gslib.commands.signurl import HAVE_OPENSSL
from gslib.exception import CommandException
import gslib.tests.testcase as testcase
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import unittest
# pylint: disable=protected-access
@unittest.skipUnless(HAVE_OPENSSL, 'signurl requires pyopenssl.')
@SkipForS3('Signed URLs are only supported for gs:// URLs.')
class TestSignUrl(testcase.GsUtilIntegrationTestCase):
"""Integration tests for signurl command."""
def _GetKsFile(self):
if not hasattr(self, 'ks_file'):
# Dummy pkcs12 keystore generated with the command
# openssl req -new -passout pass:notasecret -batch \
# -x509 -keyout signed_url_test.key -out signed_url_test.pem \
# -subj '/CN=test.apps.googleusercontent.com'
# &&
# openssl pkcs12 -export -passin pass:notasecret \
# -passout pass:notasecret -inkey signed_url_test.key \
# -in signed_url_test.pem -out test.p12
# &&
# rm signed_url_test.key signed_url_test.pem
contents = pkgutil.get_data('gslib', 'tests/test_data/test.p12')
self.ks_file = self.CreateTempFile(contents=contents, open_wb=True)
return self.ks_file
def testSignUrlOutput(self):
"""Tests signurl output of a sample object."""
object_url = self.CreateObject(contents='z')
stdout = self.RunGsUtil(['signurl', '-p', 'notasecret',
self._GetKsFile(), suri(object_url)],
return_stdout=True)
self.assertIn(object_url.uri, stdout)
self.assertIn('test@developer.gserviceaccount.com', stdout)
self.assertIn('Expires=', stdout)
self.assertIn('\tGET\t', stdout)
stdout = self.RunGsUtil(['signurl', '-m', 'PUT', '-p',
'notasecret', self._GetKsFile(),
'gs://test/test.txt'], return_stdout=True)
self.assertIn('test@developer.gserviceaccount.com', stdout)
self.assertIn('Expires=', stdout)
self.assertIn('\tPUT\t', stdout)
def testSignUrlWithWildcard(self):
objs = ['test1', 'test2', 'test3']
bucket = self.CreateBucket()
obj_urls = []
for obj_name in objs:
obj_urls.append(self.CreateObject(bucket_uri=bucket,
object_name=obj_name, contents=''))
stdout = self.RunGsUtil(['signurl', '-p',
'notasecret', self._GetKsFile(),
suri(bucket) + '/*'], return_stdout=True)
# Header, 3 signed urls, trailing newline
self.assertEquals(len(stdout.split('\n')), 5)
for obj_url in obj_urls:
self.assertIn(suri(obj_url), stdout)
def testSignUrlOfNonObjectUrl(self):
"""Tests the signurl output of a non-existent file."""
self.RunGsUtil(['signurl', self._GetKsFile(), 'gs://'],
expected_status=1, stdin='notasecret')
self.RunGsUtil(['signurl', 'file://tmp/abc'], expected_status=1)
@unittest.skipUnless(HAVE_OPENSSL, 'signurl requires pyopenssl.')
class UnitTestSignUrl(testcase.GsUtilUnitTestCase):
"""Unit tests for the signurl command."""
def setUp(self):
super(UnitTestSignUrl, self).setUp()
self.ks_contents = pkgutil.get_data('gslib', 'tests/test_data/test.p12')
def testDurationSpec(self):
tests = [('1h', timedelta(hours=1)),
('2d', timedelta(days=2)),
('5D', timedelta(days=5)),
('35s', timedelta(seconds=35)),
('1h', timedelta(hours=1)),
('33', timedelta(hours=33)),
('22m', timedelta(minutes=22)),
('3.7', None),
('27Z', None),
]
for inp, expected in tests:
try:
td = gslib.commands.signurl._DurationToTimeDelta(inp)
self.assertEquals(td, expected)
except CommandException:
if expected is not None:
self.fail('{0} failed to parse')
def testSignPut(self):
"""Tests the return value of the _GenSignedUrl function with \
a PUT method."""
expected = ('https://storage.googleapis.com/test/test.txt?'
'GoogleAccessId=test@developer.gserviceaccount.com'
'&Expires=1391816302&Signature=A6QbgTA8cXZCtjy2xCr401bdi0e'
'7zChTBQ6BX61L7AfytTGEQDMD%2BbvOQKjX7%2FsEh77cmzcSxOEKqTLUD'
'bbkPgPqW3j8sGPSRX9VM58bgj1vt9yU8cRKoegFHXAqsATx2G5rc%2FvEl'
'iFp9UWMfVj5TaukqlBAVuzZWlyx0aQa9tCKXRtC9YcxORxG41RfiowA2kd8'
'XBTQt4M9XTzpVyr5rVMzfr2LvtGf9UAJvlt8p6T6nThl2vy9%2FwBoPcMFa'
'OWQcGTagwjyKWDcI1vQPIFQLGftAcv3QnGZxZTtg8pZW%2FIxRJrBhfFfcA'
'c62hDKyaU2YssSMy%2FjUJynWx3TIiJjhg%3D%3D')
expiration = 1391816302
ks, client_id = (gslib.commands.signurl
._ReadKeystore(self.ks_contents, 'notasecret'))
signed_url = (gslib.commands.signurl
._GenSignedUrl(ks.get_privatekey(),
client_id, 'PUT', '',
'', expiration, 'test/test.txt'))
self.assertEquals(expected, signed_url)
def testSignurlPutContentype(self):
"""Tests the return value of the _GenSignedUrl function with \
a PUT method and specified content type."""
expected = ('https://storage.googleapis.com/test/test.txt?'
'GoogleAccessId=test@developer.gserviceaccount.com&'
'Expires=1391816302&Signature=APn%2BCCVcQrfc1fKQXrs'
'PEZFj9%2FmASO%2BolR8xwgBY6PbWMkcCtrUVFBauP6t4NxqZO'
'UnbOFYTZYzul0RC57ZkEWJp3VcyDIHcn6usEE%2FTzUHhbDCDW'
'awAkZS7p8kO8IIACuJlF5s9xZmZzaEBtzF0%2BBOsGgBPBlg2y'
'zrhFB6cyyAwNiUgmhLQaVkdobnSwtI5QJkvXoIjJb6hhLiVbLC'
'rWdgSZVusjAKGlWCJsM%2B4TkCR%2Bi8AnrkECngcMHuJ9mYbS'
'XI1VfEmcnRVcfkKkJGZGctaDIWK%2FMTEmfYCW6USt3Zk2WowJ'
'SGuJHqEcFz0kyfAlkpmG%2Fl5E1FQROYqLN2kZQ%3D%3D')
expiration = 1391816302
ks, client_id = (gslib.commands.signurl
._ReadKeystore(self.ks_contents,
'notasecret'))
signed_url = (gslib.commands.signurl
._GenSignedUrl(ks.get_privatekey(),
client_id, 'PUT', '',
'text/plain', expiration,
'test/test.txt'))
self.assertEquals(expected, signed_url)
def testSignurlGet(self):
"""Tests the return value of the _GenSignedUrl function with \
a GET method."""
expected = ('https://storage.googleapis.com/test/test.txt?'
'GoogleAccessId=test@developer.gserviceaccount.com&'
'Expires=0&Signature=TCZwe32cU%2BMksmLiSY9shHXQjLs1'
'F3y%2F%2F1M0UhiK4qsPRVNZVwI7YWvv2qa2Xa%2BVBBafboF0'
'1%2BWvx3ZG316pwpNIRR6y7jNnE0LvQmHE8afbm2VYCi%2B2JS'
'ZK2YZFJAyEek8si53jhYQEmaRq1zPfGbX84B2FJ8v4iI%2FTC1'
'I9OE5vHF0sWwIR9d73JDrFLjaync7QYFWRExdwvqlQX%2BPO3r'
'OG9Ns%2BcQFIN7npnsVjH28yNY9gBzXya8LYmNvUx6bWHWZMiu'
'fLwDZ0jejNeDZTOfQGRM%2B0vY7NslzaT06W1wo8P7McSkAZEl'
'DCbhR0Vo1fturPMwmAhi88f0qzRzywbg%3D%3D')
expiration = 0
ks, client_id = (gslib.commands.signurl
._ReadKeystore(self.ks_contents,
'notasecret'))
signed_url = (gslib.commands.signurl
._GenSignedUrl(ks.get_privatekey(),
client_id, 'GET', '',
'', expiration, 'test/test.txt'))
self.assertEquals(expected, signed_url)
| 41.490099 | 77 | 0.640377 |
b42447a3499ebd1414377b9f0eee8836e42ae555 | 5,935 | py | Python | src/cerebral/pack1/main.py | bobbyluig/eclipse | ed2d3ed40b878eaddaf8997749fde5a11428964e | [
"MIT"
] | 6 | 2018-12-11T19:09:57.000Z | 2022-03-04T00:28:40.000Z | src/cerebral/pack1/main.py | bobbyluig/Eclipse | ed2d3ed40b878eaddaf8997749fde5a11428964e | [
"MIT"
] | null | null | null | src/cerebral/pack1/main.py | bobbyluig/Eclipse | ed2d3ed40b878eaddaf8997749fde5a11428964e | [
"MIT"
] | 1 | 2020-07-10T07:20:04.000Z | 2020-07-10T07:20:04.000Z | #!/usr/bin/env python3.5
from cerebral import logger as l
import asyncio
import logging
import ssl
import time
import os
import socket
from autobahn.asyncio.wamp import ApplicationSession
from autobahn import wamp
from autobahn.wamp import auth
from shared.autoreconnect import ApplicationRunner
from concurrent.futures import ThreadPoolExecutor
from cerebral.pack1.hippocampus import Crossbar
import Pyro4
from cerebral.nameserver import lookup
logger = logging.getLogger('universe')
class Cerebral(ApplicationSession):
def __init__(self, *args, **kwargs):
# Path for dynamic spawning.
self.root = os.path.dirname(__file__)
# Get loop.
self.loop = asyncio.get_event_loop()
# Get remote objects URI.
self.super_agility = lookup('worker1', 'super_agility')
self.super_ares = lookup('worker3', 'super_ares')
# Create a thread executor for slightly CPU-bound async functions.
self.executor = ThreadPoolExecutor(20)
# Init parent.
super().__init__(*args, **kwargs)
def onConnect(self):
logger.info('Connected to server.')
self.join(self.config.realm, ['wampcra'], Crossbar.authid)
def onChallenge(self, challenge):
logger.debug('Challenge received.')
if challenge.method == 'wampcra':
if 'salt' in challenge.extra:
key = auth.derive_key(Crossbar.secret.encode(),
challenge.extra['salt'].encode(),
challenge.extra.get('iterations', None),
challenge.extra.get('keylen', None))
else:
key = Crossbar.secret.encode()
signature = auth.compute_wcs(key, challenge.extra['challenge'])
return signature.decode('ascii')
else:
raise Exception('Unknown challenge method: %s' % challenge.method)
async def onJoin(self, details):
logger.info('Joined "%s" realm.' % self.config.realm)
# Register all procedures.
self.register(self)
# Start logging.
self.run(self.watch_logging)
def onDisconnect(self):
logger.info('Connection lost!')
# Stop robot.
self.stop()
####################
# Special functions.
####################
def run(self, fn, *args, **kwargs):
return asyncio.wrap_future(self.executor.submit(fn, *args, **kwargs))
########################
# Main remote functions.
########################
@wamp.register('{}.emergency'.format(Crossbar.prefix))
async def em_on(self):
super_agility = Pyro4.Proxy(self.super_agility)
await self.run(super_agility.emergency)
return True
@wamp.register('{}.read_rfid'.format(Crossbar.prefix))
async def read_rfid(self):
super_ares = Pyro4.Proxy(self.super_ares)
future = self.run(super_ares.read)
data = await future
if data is None:
return False
else:
return data
@wamp.register('{}.lift_leg'.format(Crossbar.prefix))
async def lift_leg(self, leg, lift, t):
super_agility = Pyro4.Proxy(self.super_agility)
future = self.run(super_agility.lift_leg, leg, lift, t)
success = await future
return success
@wamp.register('{}.target_point'.format(Crossbar.prefix))
async def target_point(self, leg, x, y, z, t):
super_agility = Pyro4.Proxy(self.super_agility)
future = self.run(super_agility.target_point, leg, (x, y, z), t)
success = await future
return success
@wamp.register('{}.set_vector'.format(Crossbar.prefix))
async def set_vector(self, a, b):
if abs(a) > 15.0 or abs(b) > 2.0:
return False
super_agility = Pyro4.Proxy(self.super_agility)
await self.run(super_agility.set_vector, (a, b))
return True
@wamp.register('{}.set_head'.format(Crossbar.prefix))
async def set_head(self, a, b):
super_agility = Pyro4.Proxy(self.super_agility)
await self.run(super_agility.set_head, (a, b))
return True
@wamp.register('{}.stop'.format(Crossbar.prefix))
async def stop(self):
super_agility = Pyro4.Proxy(self.super_agility)
future = self.run(super_agility.stop)
success = await future
return success
@wamp.register('{}.pushup'.format(Crossbar.prefix))
async def pushup(self):
super_agility = Pyro4.Proxy(self.super_agility)
future = self.run(super_agility.start_pushup)
success = await future
return success
@wamp.register('{}.start_watch'.format(Crossbar.prefix))
async def start_watch(self):
super_agility = Pyro4.Proxy(self.super_agility)
future = self.run(super_agility.start_watch)
success = await future
return success
@wamp.register('{}.zero'.format(Crossbar.prefix))
async def zero(self):
super_agility = Pyro4.Proxy(self.super_agility)
future = self.run(super_agility.zero)
success = await future
return success
#####################
# Blocking functions.
#####################
def watch_logging(self):
uri = lookup('database', 'logging')
queue = Pyro4.Proxy(uri)
while True:
message = queue.get()
topic = '{}.log'.format(Crossbar.prefix)
self.publish(topic, *message)
if __name__ == '__main__':
# Configure SSL.
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.check_hostname = False
pem = ssl.get_server_certificate((Crossbar.ip, 443))
context.load_verify_locations(cadata=pem)
# Create application runner.
runner = ApplicationRunner(url='wss://%s/ws' % Crossbar.ip, realm=Crossbar.realm,
ssl=context)
# Run forever.
runner.run(Cerebral)
| 31.402116 | 85 | 0.615501 |
7b0c26909f2a84316d8c881f220453096598c8ca | 1,704 | py | Python | neutron_lbaas/common/cert_manager/barbican_auth/barbican_acl.py | 2020human/neutron-lbaas | c5acb45ff6a8c2c41b84bdb2406450731491cad8 | [
"Apache-2.0"
] | null | null | null | neutron_lbaas/common/cert_manager/barbican_auth/barbican_acl.py | 2020human/neutron-lbaas | c5acb45ff6a8c2c41b84bdb2406450731491cad8 | [
"Apache-2.0"
] | null | null | null | neutron_lbaas/common/cert_manager/barbican_auth/barbican_acl.py | 2020human/neutron-lbaas | c5acb45ff6a8c2c41b84bdb2406450731491cad8 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2014-2016 Rackspace US, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Barbican ACL auth class for Barbican certificate handling
"""
from barbicanclient import client as barbican_client
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from neutron_lbaas._i18n import _LE
from neutron_lbaas.common.cert_manager.barbican_auth import common
from neutron_lbaas.common import keystone
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
class BarbicanACLAuth(common.BarbicanAuth):
_barbican_client = None
@classmethod
def get_barbican_client(cls, project_id=None):
if not cls._barbican_client:
try:
cls._barbican_client = barbican_client.Client(
session=keystone.get_session(),
region_name=CONF.service_auth.region,
interface=CONF.service_auth.endpoint_type
)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE("Error creating Barbican client"))
return cls._barbican_client
| 34.77551 | 78 | 0.705399 |
05eedb0b6cca739f9bde166c3773aab7b2dde410 | 403 | py | Python | scripts/figures/figure7/pipeswitch_inception_v3/host_run_data.py | CcTtry/PipeSwitch | c6d632ee20b6dbbaea9a6fb95b9ea0ed4bbbf67e | [
"Apache-2.0"
] | null | null | null | scripts/figures/figure7/pipeswitch_inception_v3/host_run_data.py | CcTtry/PipeSwitch | c6d632ee20b6dbbaea9a6fb95b9ea0ed4bbbf67e | [
"Apache-2.0"
] | null | null | null | scripts/figures/figure7/pipeswitch_inception_v3/host_run_data.py | CcTtry/PipeSwitch | c6d632ee20b6dbbaea9a6fb95b9ea0ed4bbbf67e | [
"Apache-2.0"
] | null | null | null | import os
import sys
from scripts.common.util import RunRemoteRepo, import_server_list
def main():
server_list_path = sys.argv[1]
server_list = import_server_list(server_list_path)
with RunRemoteRepo(server_list[0], 'dev') as rrr:
rrr.run("bash ~/PipeSwitch/scripts/figures/figure7/pipeswitch_inception_v3/remote_run_data.sh")
if __name__ == '__main__':
main() | 26.866667 | 104 | 0.722084 |
3caef82bda04c11f3e5dc7a8d1cdddcec9abb6de | 7,623 | py | Python | Python/Sorting/FraudulentActivityNotifications/FraudulentActivityNotifications.py | zseen/hackerrank-challenges | c154f039f58073ee3d94d012462c7055e68784b2 | [
"MIT"
] | null | null | null | Python/Sorting/FraudulentActivityNotifications/FraudulentActivityNotifications.py | zseen/hackerrank-challenges | c154f039f58073ee3d94d012462c7055e68784b2 | [
"MIT"
] | null | null | null | Python/Sorting/FraudulentActivityNotifications/FraudulentActivityNotifications.py | zseen/hackerrank-challenges | c154f039f58073ee3d94d012462c7055e68784b2 | [
"MIT"
] | null | null | null | #!/bin/python3
import sys
import unittest
MAX_MONEY_SPENT_A_DAY = 200
def getNumCounter(numbersList, maxNumber):
counter = [0] * (maxNumber + 1)
for item in numbersList:
counter[item] += 1
return counter
def getMedianOdd(counter, numItems):
medianItem = (numItems // 2)
medianIndex = medianItem + 1
numbersSeenTotal = 0
for i in range(0, len(counter)):
numbersSeenTotal += counter[i]
if numbersSeenTotal >= medianIndex:
return i
def findNextNonZeroIndex(numsList, startFrom):
for index in range(startFrom, len(numsList) + 1):
if numsList[index] > 0:
return index
raise IndexError("No non-zero element has been found")
def getMedianEven(counter, numItems):
medianItem = (numItems // 2)
medianSmallerIndex = medianItem
numbersSeenTotal = 0
for firstIndex in range(0, len(counter)):
numbersSeenTotal += counter[firstIndex]
if numbersSeenTotal >= medianSmallerIndex:
medianFirstItem = firstIndex
if numbersSeenTotal >= medianSmallerIndex + 1:
return medianFirstItem
else:
medianSecondItem = findNextNonZeroIndex(counter, firstIndex + 1)
return (medianFirstItem + medianSecondItem) / 2
def getMedian(counter, numItems):
if numItems % 2 != 0:
return getMedianOdd(counter, numItems)
else:
return getMedianEven(counter, numItems)
def countNotifications(moneySpentDaily, daysPrior):
notificationCounter = 0
moneySpentForDaysPrior = moneySpentDaily[0: daysPrior]
counter = getNumCounter(moneySpentForDaysPrior, MAX_MONEY_SPENT_A_DAY)
for index in range(daysPrior, len(moneySpentDaily)):
median = getMedian(counter, daysPrior)
if moneySpentDaily[index] >= median * 2:
notificationCounter += 1
if index < len(moneySpentDaily) - 1:
counter[moneySpentDaily[index - daysPrior]] -= 1
counter[moneySpentDaily[index]] += 1
return notificationCounter
def main():
sys.stdin = open('FraudulentActivityNotifications_input.txt')
daysNumData, daysPrior = input().strip().split(' ')
daysNumData, daysPrior = [int(daysNumData), int(daysPrior)]
moneySpentDaily = list(map(int, input().strip().split(' ')))
notifications = countNotifications(moneySpentDaily, daysPrior)
print(notifications)
class TestNotificationCount(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestNotificationCount, self).__init__(*args, **kwargs)
def test_countNotifications_moreDaysPriorThanDays_0notifications(self):
daysPrior = 6
moneySpentDaily = [1, 2, 3, 4, 5]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 0)
def test_countNotifications_sameMoneySpent_0notifications(self):
daysPrior = 3
moneySpentDaily = [2, 2, 2, 2, 2]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 0)
def test_countNotifications_evenDaysPrior_0notifications(self):
daysPrior = 4
moneySpentDaily = [1, 2, 3, 4, 4]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 0)
def test_countNotifications_oddDaysPrior_1notifications(self):
daysPrior = 3
moneySpentDaily = [1, 2, 3, 4, 5]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 1)
def test_countNotifications_evenDaysPriorDifferentElementsInMiddle_1notification(self):
daysPrior = 4
moneySpentDaily = [1, 2, 3, 4, 4, 7]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 1)
def test_countNotifications_evenDaysPriorSameElementInMiddle_1notification(self):
daysPrior = 4
moneySpentDaily = [1, 2, 4, 4, 4, 8]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 1)
def test_countNotifications_slightlyLongerArray_2notifications(self):
daysPrior = 5
moneySpentDaily = [2, 3, 4, 2, 3, 6, 8, 4, 5]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 2)
def test_countNotifications_incrementingElements_0notifications(self):
daysPrior = 3
moneySpentDaily = [2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
notifications = countNotifications(moneySpentDaily, daysPrior)
self.assertTrue(notifications == 0)
def test_countNotifications_tooLargeNumber_raisesIndexError(self):
daysPrior = 3
moneySpentDaily = [2, 3, 4, 5, 6, 7, 8, 1000, 9, 10, 11, 12, 13]
self.assertRaises(IndexError,countNotifications, moneySpentDaily, daysPrior)
def test_countNotifications_tooLargeNumbers_raisesIndexError(self):
daysPrior = 2
moneySpentDaily = [10000, 10001, 15000, 20000, 27000, 39000]
self.assertRaises(IndexError, countNotifications, moneySpentDaily, daysPrior)
def test_getMedian_oddNumElements_shortArray(self):
daysPrior = 3
moneySpentDaily = [20, 40, 30]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 30)
def test_getMedian_oddNumberElements_longerArray(self):
daysPrior = 9
moneySpentDaily = [20, 78, 1, 1, 1, 20, 40, 30, 199]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 20)
def test_getMedian_oddNumberElements_sameNumbers(self):
daysPrior = 5
moneySpentDaily = [45, 45, 45, 45, 45]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 45)
def test_getMedian_oddNumberElements_onlyBigNumbers(self):
daysPrior = 5
moneySpentDaily = [200, 198, 197, 199, 196]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 198)
def test_getMedian_evenNumberElements_2DifferentMedNums_medFraction(self):
daysPrior = 4
moneySpentDaily = [2, 3, 3, 2]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 2.5)
def test_getMedian_evenNumberElements_biggerRangeNums2DiffMedNums_medWhole(self):
daysPrior = 6
moneySpentDaily = [4, 20, 62, 3, 100, 0]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 12)
def test_getMedian_evenNumberElements_sameMedianNums(self):
daysPrior = 6
moneySpentDaily = [20, 20, 62, 3, 100, 0]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 20)
def test_getMedian_evenNumberElements_secondMedianItemIsMaxValue(self):
daysPrior = 6
moneySpentDaily = [100, 100, 100, 200, 200, 200]
counter = getNumCounter(moneySpentDaily, MAX_MONEY_SPENT_A_DAY)
median = getMedian(counter, daysPrior)
self.assertTrue(median == 150)
if __name__ == "__main__":
#main()
unittest.main() | 37.004854 | 91 | 0.685819 |
705108e39490045a500beefe1fa1e73fb92716f9 | 2,379 | py | Python | examples/adwords/v201406/advanced_operations/get_ad_group_bid_modifier.py | dietrichc/streamline-ppc-reports | 256f79246aba3c2cf8f792d87a066391a2f471e0 | [
"Apache-2.0"
] | null | null | null | examples/adwords/v201406/advanced_operations/get_ad_group_bid_modifier.py | dietrichc/streamline-ppc-reports | 256f79246aba3c2cf8f792d87a066391a2f471e0 | [
"Apache-2.0"
] | null | null | null | examples/adwords/v201406/advanced_operations/get_ad_group_bid_modifier.py | dietrichc/streamline-ppc-reports | 256f79246aba3c2cf8f792d87a066391a2f471e0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Retrieves the ad group level bid modifiers for a campaign.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: AdGroupBidModifierService.get
"""
__author__ = 'Joseph DiLallo'
from googleads import adwords
PAGE_SIZE = 500
def main(client):
# Initialize appropriate service.
ad_group_bid_modifier_service = client.GetService(
'AdGroupBidModifierService', version='v201406')
# Get all ad group bid modifiers for the campaign.
selector = {
'fields': ['CampaignId', 'AdGroupId', 'BidModifier', 'Id'],
'paging': {
'startIndex': '0',
'numberResults': str(PAGE_SIZE)
}
}
# Set initial values.
offset, page = 0, {}
more_results = True
while more_results:
page = ad_group_bid_modifier_service.get(selector)
if page['entries']:
for modifier in page['entries']:
value = (modifier['bidModifier'] if 'bidModifier' in modifier
else 'unset')
print ('Campaign ID %s, AdGroup ID %s, Criterion ID %s has ad group '
'level modifier: %s' %
(modifier['campaignId'], modifier['adGroupId'],
modifier['criterion']['id'], value))
# Increment values to request the next page.
offset += PAGE_SIZE
selector['paging']['startIndex'] = str(offset)
else:
print 'No ad group bid modifiers returned.'
more_results = int(page['totalNumEntries']) > offset
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client)
| 30.896104 | 77 | 0.691467 |
f6513c42f24258cbab9aafb042ba574fc9e854d1 | 9,167 | py | Python | src/build/__init__.py | hexagonrecursion/build | 192f481f569bb352a6d4c73ec85e573bf651de94 | [
"MIT"
] | 2 | 2020-12-11T08:07:13.000Z | 2021-01-21T19:21:57.000Z | src/build/__init__.py | gaborbernat/build | ff6913781b62f6eb1529e4c36cf923c2c2b4d5f6 | [
"MIT"
] | null | null | null | src/build/__init__.py | gaborbernat/build | ff6913781b62f6eb1529e4c36cf923c2c2b4d5f6 | [
"MIT"
] | null | null | null | # SPDX-License-Identifier: MIT
"""
build - A simple, correct PEP517 package builder
"""
__version__ = '0.1.0'
import contextlib
import difflib
import os
import sys
import warnings
from typing import AbstractSet, Iterator, Mapping, Optional, Sequence, Set, Text, Tuple, Union
import pep517.wrappers
import toml
import toml.decoder
if sys.version_info < (3,):
FileNotFoundError = IOError
PermissionError = OSError
ConfigSettings = Mapping[str, Union[str, Sequence[str]]]
_DEFAULT_BACKEND = {
'build-backend': 'setuptools.build_meta:__legacy__',
'requires': ['setuptools >= 40.8.0', 'wheel'],
}
class BuildException(Exception):
"""
Exception raised by ProjectBuilder
"""
class BuildBackendException(Exception):
"""
Exception raised when the backend fails
"""
class TypoWarning(Warning):
"""
Warning raised when a potential typo is found
"""
def check_dependency(req_string, ancestral_req_strings=(), parent_extras=frozenset()):
# type: (str, Tuple[str, ...], AbstractSet[str]) -> Iterator[Tuple[str, ...]]
"""
Verify that a dependency and all of its dependencies are met.
:param req_string: Requirement string
:param parent_extras: Extras (eg. "test" in myproject[test])
:yields: Unmet dependencies
"""
import packaging.requirements
if sys.version_info >= (3, 8):
import importlib.metadata as importlib_metadata
else:
import importlib_metadata
req = packaging.requirements.Requirement(req_string)
if req.marker:
extras = frozenset(('',)).union(parent_extras)
# a requirement can have multiple extras but ``evaluate`` can
# only check one at a time.
if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
# if the marker conditions are not met, we pretend that the
# dependency is satisfied.
return
try:
dist = importlib_metadata.distribution(req.name)
except importlib_metadata.PackageNotFoundError:
# dependency is not installed in the environment.
yield ancestral_req_strings + (req_string,)
else:
if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
# the installed version is incompatible.
yield ancestral_req_strings + (req_string,)
elif dist.requires:
for other_req_string in dist.requires:
for unmet_req in check_dependency(other_req_string, ancestral_req_strings + (req_string,), req.extras):
# a transitive dependency is not satisfied.
yield unmet_req
def _find_typo(dictionary, expected): # type: (Mapping[str, str], str) -> None
if expected not in dictionary:
for obj in dictionary:
if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
warnings.warn(
"Found '{}' in pyproject.toml, did you mean '{}'?".format(obj, expected),
TypoWarning,
)
@contextlib.contextmanager
def _working_directory(path): # type: (str) -> Iterator[None]
current = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(current)
class ProjectBuilder(object):
def __init__(self, srcdir, python_executable=sys.executable):
# type: (str, Union[bytes, Text]) -> None
"""
Create a project builder.
:param srcdir: the source directory
:param python_executable: the python executable where the backend lives
"""
self.srcdir = os.path.abspath(srcdir) # type: str
spec_file = os.path.join(srcdir, 'pyproject.toml')
try:
with open(spec_file) as f:
spec = toml.load(f)
except FileNotFoundError:
spec = {}
except PermissionError as e:
raise BuildException("{}: '{}' ".format(e.strerror, e.filename))
except toml.decoder.TomlDecodeError as e:
raise BuildException('Failed to parse {}: {} '.format(spec_file, e))
build_system = spec.get('build-system')
# if pyproject.toml is missing (per PEP 517) or [build-system] is missing (pep PEP 518),
# use default values.
if build_system is None:
_find_typo(spec, 'build-system')
build_system = _DEFAULT_BACKEND
# if [build-system] is present, it must have a ``requires`` field (per PEP 518).
elif 'requires' not in build_system:
_find_typo(build_system, 'requires')
raise BuildException("Missing 'build-system.requires' in {}".format(spec_file))
# if ``build-backend`` is missing, inject the legacy setuptools backend
# but leave ``requires`` alone to emulate pip.
elif 'build-backend' not in build_system:
_find_typo(build_system, 'build-backend')
build_system['build-backend'] = _DEFAULT_BACKEND['build-backend']
self._build_system = build_system
self._backend = self._build_system['build-backend']
self._hook = pep517.wrappers.Pep517HookCaller(
self.srcdir,
self._backend,
backend_path=self._build_system.get('backend-path'),
python_executable=python_executable,
)
@property
def python_executable(self): # type: () -> Union[bytes, Text]
"""
The Python executable used to invoke the backend.
"""
# make mypy happy
exe = self._hook.python_executable # type: Union[bytes, Text]
return exe
@python_executable.setter
def python_executable(self, value): # type: (Union[bytes, Text]) -> None
self._hook.python_executable = value
@property
def build_dependencies(self): # type: () -> Set[str]
"""
The dependencies defined in the ``pyproject.toml``'s
``build-system.requires`` field or the default build dependencies
if ``pyproject.toml`` is missing or ``build-system`` is undefined.
"""
return set(self._build_system['requires'])
def get_dependencies(self, distribution, config_settings=None): # type: (str, Optional[ConfigSettings]) -> Set[str]
"""
Return the dependencies defined by the backend in addition to
:attr:`build_dependencies` for a given distribution.
:param distribution: Distribution to get the dependencies of
(``sdist`` or ``wheel``)
:param config_settings: Config settings for the build backend
"""
get_requires = getattr(self._hook, 'get_requires_for_build_{}'.format(distribution))
try:
with _working_directory(self.srcdir):
return set(get_requires(config_settings))
except pep517.wrappers.BackendUnavailable:
raise BuildException("Backend '{}' is not available.".format(self._backend))
except Exception as e: # noqa: E722
raise BuildBackendException('Backend operation failed: {}'.format(e))
def check_dependencies(self, distribution, config_settings=None):
# type: (str, Optional[ConfigSettings]) -> Set[Tuple[str, ...]]
"""
Return the dependencies which are not satisfied from the combined set of
:attr:`build_dependencies` and :meth:`get_dependencies` for a given
distribution.
:param distribution: Distribution to check (``sdist`` or ``wheel``)
:param config_settings: Config settings for the build backend
:returns: Set of variable-length unmet dependency tuples
"""
dependencies = self.get_dependencies(distribution, config_settings).union(self.build_dependencies)
return {u for d in dependencies for u in check_dependency(d)}
def build(self, distribution, outdir, config_settings=None): # type: (str, str, Optional[ConfigSettings]) -> str
"""
Build a distribution.
:param distribution: Distribution to build (``sdist`` or ``wheel``)
:param outdir: Output directory
:param config_settings: Config settings for the build backend
:returns: The full path to the built distribution
"""
build = getattr(self._hook, 'build_{}'.format(distribution))
outdir = os.path.abspath(outdir)
if os.path.exists(outdir):
if not os.path.isdir(outdir):
raise BuildException("Build path '{}' exists and is not a directory".format(outdir))
else:
os.mkdir(outdir)
try:
with _working_directory(self.srcdir):
basename = build(outdir, config_settings) # type: str
return os.path.join(outdir, basename)
except pep517.wrappers.BackendUnavailable:
raise BuildException("Backend '{}' is not available.".format(self._backend))
except Exception as e: # noqa: E722
raise BuildBackendException('Backend operation failed: {!r}'.format(e))
__all__ = (
'__version__',
'ConfigSettings',
'BuildException',
'BuildBackendException',
'TypoWarning',
'check_dependency',
'ProjectBuilder',
)
| 35.393822 | 120 | 0.639249 |
c6f25048f5abb35d5f431583230d62284d2e7604 | 469 | py | Python | HLTrigger/Configuration/python/HLT_75e33/modules/rpcRecHits_cfi.py | PKUfudawei/cmssw | 8fbb5ce74398269c8a32956d7c7943766770c093 | [
"Apache-2.0"
] | 1 | 2021-11-30T16:24:46.000Z | 2021-11-30T16:24:46.000Z | HLTrigger/Configuration/python/HLT_75e33/modules/rpcRecHits_cfi.py | PKUfudawei/cmssw | 8fbb5ce74398269c8a32956d7c7943766770c093 | [
"Apache-2.0"
] | 4 | 2021-11-29T13:57:56.000Z | 2022-03-29T06:28:36.000Z | HLTrigger/Configuration/python/HLT_75e33/modules/rpcRecHits_cfi.py | PKUfudawei/cmssw | 8fbb5ce74398269c8a32956d7c7943766770c093 | [
"Apache-2.0"
] | 1 | 2021-11-30T16:16:05.000Z | 2021-11-30T16:16:05.000Z | import FWCore.ParameterSet.Config as cms
rpcRecHits = cms.EDProducer("RPCRecHitProducer",
deadSource = cms.string('File'),
deadvecfile = cms.FileInPath('RecoLocalMuon/RPCRecHit/data/RPCDeadVec.dat'),
maskSource = cms.string('File'),
maskvecfile = cms.FileInPath('RecoLocalMuon/RPCRecHit/data/RPCMaskVec.dat'),
recAlgo = cms.string('RPCRecHitStandardAlgo'),
recAlgoConfig = cms.PSet(
),
rpcDigiLabel = cms.InputTag("simMuonRPCDigis")
)
| 33.5 | 80 | 0.729211 |
d6725c94610f6dc128b8b475f8da232a1c40c1f9 | 3,179 | py | Python | yacms/blog/models.py | minhhoit/yacms | 39a9f1f2f8eced6d4cb89db36f3cdff89c18bdfe | [
"BSD-2-Clause"
] | null | null | null | yacms/blog/models.py | minhhoit/yacms | 39a9f1f2f8eced6d4cb89db36f3cdff89c18bdfe | [
"BSD-2-Clause"
] | null | null | null | yacms/blog/models.py | minhhoit/yacms | 39a9f1f2f8eced6d4cb89db36f3cdff89c18bdfe | [
"BSD-2-Clause"
] | null | null | null | from __future__ import unicode_literals
from future.builtins import str
from django.db import models
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from yacms.conf import settings
from yacms.core.fields import FileField
from yacms.core.models import Displayable, Ownable, RichText, Slugged
from yacms.generic.fields import CommentsField, RatingField
from yacms.utils.models import AdminThumbMixin, upload_to
class BlogPost(Displayable, Ownable, RichText, AdminThumbMixin):
"""
A blog post.
"""
categories = models.ManyToManyField("BlogCategory",
verbose_name=_("Categories"),
blank=True, related_name="blogposts")
allow_comments = models.BooleanField(verbose_name=_("Allow comments"),
default=True)
comments = CommentsField(verbose_name=_("Comments"))
rating = RatingField(verbose_name=_("Rating"))
featured_image = FileField(verbose_name=_("Featured Image"),
upload_to=upload_to("blog.BlogPost.featured_image", "blog"),
format="Image", max_length=255, null=True, blank=True)
related_posts = models.ManyToManyField("self",
verbose_name=_("Related posts"), blank=True)
admin_thumb_field = "featured_image"
class Meta:
verbose_name = _("Blog post")
verbose_name_plural = _("Blog posts")
ordering = ("-publish_date",)
def get_absolute_url(self):
"""
URLs for blog posts can either be just their slug, or prefixed
with a portion of the post's publish date, controlled by the
setting ``BLOG_URLS_DATE_FORMAT``, which can contain the value
``year``, ``month``, or ``day``. Each of these maps to the name
of the corresponding urlpattern, and if defined, we loop through
each of these and build up the kwargs for the correct urlpattern.
The order which we loop through them is important, since the
order goes from least granular (just year) to most granular
(year/month/day).
"""
url_name = "blog_post_detail"
kwargs = {"slug": self.slug}
date_parts = ("year", "month", "day")
if settings.BLOG_URLS_DATE_FORMAT in date_parts:
url_name = "blog_post_detail_%s" % settings.BLOG_URLS_DATE_FORMAT
for date_part in date_parts:
date_value = str(getattr(self.publish_date, date_part))
if len(date_value) == 1:
date_value = "0%s" % date_value
kwargs[date_part] = date_value
if date_part == settings.BLOG_URLS_DATE_FORMAT:
break
return reverse(url_name, kwargs=kwargs)
class BlogCategory(Slugged):
"""
A category for grouping blog posts into a series.
"""
class Meta:
verbose_name = _("Blog Category")
verbose_name_plural = _("Blog Categories")
ordering = ("title",)
@models.permalink
def get_absolute_url(self):
return ("blog_post_list_category", (), {"category": self.slug})
| 39.7375 | 77 | 0.643599 |
7890d1f911c90465af7c3d0d57f43eb7d01e2c73 | 220 | py | Python | zipline/research/__init__.py | quantrocket-llc/zipline | 4eccd1ff3f07addbdc1f9682b608e0584a9b59c6 | [
"Apache-2.0"
] | 14 | 2018-02-05T18:38:15.000Z | 2022-01-15T21:31:30.000Z | zipline/research/__init__.py | quantrocket-llc/zipline | 4eccd1ff3f07addbdc1f9682b608e0584a9b59c6 | [
"Apache-2.0"
] | null | null | null | zipline/research/__init__.py | quantrocket-llc/zipline | 4eccd1ff3f07addbdc1f9682b608e0584a9b59c6 | [
"Apache-2.0"
] | 8 | 2020-02-14T04:21:46.000Z | 2022-01-30T06:42:50.000Z | from zipline.research.pipeline import run_pipeline, get_forward_returns
from zipline.research.bardata import get_data
from zipline.research.sid import sid
from zipline.research.continuous_future import continuous_future
| 44 | 71 | 0.881818 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.