hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
790c759b73c878a7b39473583ec0ffc5dfbb10f0 | 6,196 | py | Python | smock.py | serverboards/serverboards-plugin-google-drive | 2295df2c8c42d1667e80d00d2038aabd2fe15b62 | [
"Apache-2.0"
] | null | null | null | smock.py | serverboards/serverboards-plugin-google-drive | 2295df2c8c42d1667e80d00d2038aabd2fe15b62 | [
"Apache-2.0"
] | null | null | null | smock.py | serverboards/serverboards-plugin-google-drive | 2295df2c8c42d1667e80d00d2038aabd2fe15b62 | [
"Apache-2.0"
] | null | null | null | import json
import yaml
"""
SMock -- Serverboards Mock library -- Mock comfortably.
This library helps to mock function and method calls, getting the data
from an external yaml file.
"""
class MockWrapper:
"""
Wraps all the data returned by the mocked function to behave like a
dictionary, like an object, like a function, like a jsonable dict...
like almost everything you may need
"""
def __init__(self, data):
self.__data = data
def __getattr__(self, key):
if key not in self.__data:
raise KeyError("'%s' not found in %s" % (key, self.__data.keys()))
return self.__getitem__(key)
def __call__(self):
return wrapped(self.__data)
def __getitem__(self, key):
val = self.__data[key]
if isinstance(val, (int, str)):
return val
return wrapped(val)
def __str__(self):
return str(self.__data)
def __repr__(self):
return repr(self.__data)
def __eq__(self, other):
return self.__data.__eq__(other)
def __le__(self, other):
return self.__data.__le__(other)
def __ge__(self, other):
return self.__data.__ge__(other)
def __lt__(self, other):
return self.__data.__lt__(other)
def __gt__(self, other):
return self.__data.__gt__(other)
def __len__(self):
return self.__data.__len__()
def keys(self):
return self.__data.keys()
def get(self, key, defv=None):
return self.__data.get(key, defv)
class MockWrapperList(MockWrapper, list):
def __init__(self, data):
MockWrapper.__init__(self, data)
list.__init__(self, data)
class MockWrapperDict(MockWrapper, dict):
def __init__(self, data):
MockWrapper.__init__(self, data)
dict.__init__(self, data)
def wrapped(data):
if isinstance(data, dict):
return MockWrapperDict(data)
if isinstance(data, list):
return MockWrapperList(data)
return MockWrapper(data)
def mock_match(A, B):
"""
Checked for params on a mocked function is as expected
It is necesary as sometimes we get a tuple and at the mock data we have
lists.
Examples:
```
>>> mock_match("A", "A")
True
>>> mock_match("A", "B")
False
>>> mock_match(["A", "B", "C"], ["A", "B", "C"])
True
>>> mock_match(["A", "B", "C"], "*")
True
```
"""
if B == '*': # always match
return True
if isinstance(A, (tuple, list)):
return all(mock_match(a, b) for (a, b) in zip(A, B))
return A == B
def mock_res(name, data, args=[], kwargs={}):
"""
Given a name, data and call parameters, returns the mocked response
If there is no matching response, raises an exception that can be used to
prepare the mock data.
This can be used for situations where you mock some function like data;
for example at [Serverboards](https://serverboards.io), we use it to
mock RPC calls.
Its also used internally on every other mocking.
"""
data = data.get(name)
if not data:
raise Exception(
"unknown method for mocking: \n%s:\n - args: %s\n kwargs: %s\n response: ...\n" % (
name, json.dumps(args), json.dumps(kwargs)
)
)
for res in data:
if (mock_match(args, res.get("args")) and
mock_match(kwargs, res.get("kwargs", {}))):
if 'error' in res:
raise Exception(res["error"])
response = res["response"]
if isinstance(response, (int, str)):
return response
return wrapped(response)
raise Exception(
"unknown data for mocking: \n%s:\n - args: %s\n kwargs: %s\n response: ...\n" % (
name, json.dumps(args), json.dumps(kwargs)
)
)
def mock_method(name, data):
"""
Returns a function that mocks an original function.
"""
def mockf(*args, **kwargs):
return mock_res(name, data, args, kwargs)
return mockf
def mock_method_async(name, data):
"""
Returns an async function that mocks an original async function
"""
async def mockf(*args, **kwargs):
return mock_res(name, data, args, kwargs)
return mockf
class SMock:
"""
Encapsulates mocking calls so it's easier to load data and mock methods
Example:
```python
>>> import requests
>>> smocked = SMock("tests/data.yaml")
>>> requests.get = smocked.mock_method("requests.get")
>>> res = requests.get("https://mocked.url")
>>> res.status_code
200
>>> res.content
'Gocha!'
>>> res.json()
{'text': 'Gocha too!'}
```
The mock file is a yaml file with each mocked function as keys, and
`args`/`kwargs` as calling args and kwargs, and `response` the response.
Check `tests/data.yaml` for an example at the source code.
"""
def __init__(self, mockfile):
with open(mockfile) as fd:
self._data = yaml.load(fd)
def mock_res(self, name, args=[], kwargs={}):
"""
Calls `mock_res`
Mock by args:
```
>>> smock = SMock("tests/data.yaml")
>>> res = smock.mock_res("requests.get", ["https://mocked.url"])
>>> res.status_code
200
```
Using "*" as args, as fallback. As there is no kwargs, use default:
```
>>> res = smock.mock_res("requests.get", ["https://error.mocked.url"])
>>> res.status_code
404
```
Using "*" as kwargs:
```
>>> res = smock.mock_res("requests.get",
... ["https://mocked.url"],
... {'data': 'data'})
>>> res.status_code
200
>>> res.content
'Mocked query'
```
"""
return mock_res(name, self._data, args, kwargs)
def mock_method(self, name):
"""
Calls `mock_method`
"""
return mock_method(name, self._data)
async def mock_method_async(self, name):
"""
Calls `mock_method_async`
"""
return await mock_method_async(name, self._data)
| 25.497942 | 102 | 0.576017 |
47bde0175ac9ab1abb8a0ed8418ea46db9801467 | 8,138 | py | Python | samples/ralph/main.py | el-dee/panda3d-openvr | 3f9567897552df6c10078bc124795101cf478f91 | [
"BSD-3-Clause"
] | 12 | 2020-06-23T08:31:14.000Z | 2022-02-26T00:44:53.000Z | samples/ralph/main.py | el-dee/panda3d-openvr | 3f9567897552df6c10078bc124795101cf478f91 | [
"BSD-3-Clause"
] | 17 | 2020-08-03T15:39:51.000Z | 2022-02-25T00:27:44.000Z | samples/ralph/main.py | el-dee/panda3d-openvr | 3f9567897552df6c10078bc124795101cf478f91 | [
"BSD-3-Clause"
] | 5 | 2020-08-12T15:17:36.000Z | 2022-02-09T16:02:31.000Z | #!/usr/bin/env python
from direct.showbase.ShowBase import ShowBase
from panda3d.core import CollisionTraverser, CollisionNode
from panda3d.core import CollisionHandlerQueue, CollisionRay
from panda3d.core import AmbientLight, DirectionalLight
from panda3d.core import TextNode
from panda3d.core import CollideMask, LVector3
from panda3d.core import ExecutionEnvironment
from direct.gui.OnscreenText import OnscreenText
from p3dopenvr.p3dopenvr import P3DOpenVR
import sys
import os
# Function to put instructions on the screen.
def addInstructions(pos, msg):
return OnscreenText(text=msg, style=1, fg=(1, 1, 1, 1), scale=.05,
shadow=(0, 0, 0, 1), parent=base.a2dTopLeft,
pos=(0.08, -pos - 0.04), align=TextNode.ALeft)
# Function to put title on the screen.
def addTitle(text):
return OnscreenText(text=text, style=1, fg=(1, 1, 1, 1), scale=.07,
parent=base.a2dBottomRight, align=TextNode.ARight,
pos=(-0.1, 0.09), shadow=(0, 0, 0, 1))
class RoamingRalphDemo(ShowBase):
def __init__(self):
# Set up the window, camera, etc.
ShowBase.__init__(self)
# Create and configure the VR environment
self.ovr = P3DOpenVR()
self.ovr.init(msaa=4)
main_dir = ExecutionEnvironment.getEnvironmentVariable("MAIN_DIR")
# Setup the application manifest, it will identify and configure the app
# We force it in case it has changed.
self.ovr.identify_application(os.path.join(main_dir, "ralph.vrmanifest"), "p3dopenvr.demo.ralph", force=True)
# Load the actions manifest, it must be the same as the manifest referenced in the application manifest
self.ovr.load_action_manifest(os.path.join(main_dir, "manifest/actions.json"))
# Use the '/actions/platformer' action set. This action set will be updated each frame
self.ovr.add_action_set("/actions/platformer")
# Get the handle of the action '/actions/platformer/in/Move'. This hande will be used to retrieve the data of the action.
self.action_move = self.ovr.vr_input.getActionHandle('/actions/platformer/in/Move')
# Set the background color to black
self.win.setClearColor((0, 0, 0, 1))
# Post the instructions
self.title = addTitle(
"Panda3D Tutorial: Roaming Ralph (Walking on Uneven Terrain)")
self.inst1 = addInstructions(0.06, "[ESC]: Quit")
self.inst2 = addInstructions(0.12, "[Left trackpad]: Rotate Left")
self.inst3 = addInstructions(0.18, "[Right trackpad]: Rotate Right")
self.inst4 = addInstructions(0.24, "[Up trackpad]: Walk Forward")
self.inst4 = addInstructions(0.30, "[Down trackpad]: Walk Backward")
# Set up the environment
#
# This environment model contains collision meshes. If you look
# in the egg file, you will see the following:
#
# <Collide> { Polyset keep descend }
#
# This tag causes the following mesh to be converted to a collision
# mesh -- a mesh which is optimized for collision, not rendering.
# It also keeps the original mesh, so there are now two copies ---
# one optimized for rendering, one for collisions.
self.environ = loader.loadModel("models/world")
self.environ.reparentTo(render)
# Create the main character, Ralph
self.ralph = render.attachNewNode('ralph')
self.ralphStartPos = self.environ.find("**/start_point").getPos()
self.ovr.tracking_space.setPos(self.ralphStartPos)
self.ralph.setPos(self.ovr.hmd_anchor.getPos(render))
self.accept("escape", sys.exit)
taskMgr.add(self.move, "moveTask")
taskMgr.add(self.collision, "collisionTask")
# Set up the camera
self.disableMouse()
# We will detect the height of the terrain by creating a collision
# ray and casting it downward toward the terrain. One ray will
# start above ralph's head, and the other will start above the camera.
# A ray may hit the terrain, or it may hit a rock or a tree. If it
# hits the terrain, we can detect the height. If it hits anything
# else, we rule that the move is illegal.
self.cTrav = CollisionTraverser()
self.ralphGroundRay = CollisionRay()
self.ralphGroundRay.setOrigin(0, 0, 9)
self.ralphGroundRay.setDirection(0, 0, -1)
self.ralphGroundCol = CollisionNode('ralphRay')
self.ralphGroundCol.addSolid(self.ralphGroundRay)
self.ralphGroundCol.setFromCollideMask(CollideMask.bit(0))
self.ralphGroundCol.setIntoCollideMask(CollideMask.allOff())
self.ralphGroundColNp = self.ralph.attachNewNode(self.ralphGroundCol)
self.ralphGroundHandler = CollisionHandlerQueue()
self.cTrav.addCollider(self.ralphGroundColNp, self.ralphGroundHandler)
# Uncomment this line to see the collision rays
#self.ralphGroundColNp.show()
# Uncomment this line to show a visual representation of the
# collisions occuring
#self.cTrav.showCollisions(render)
# Create some lighting
ambientLight = AmbientLight("ambientLight")
ambientLight.setColor((.3, .3, .3, 1))
directionalLight = DirectionalLight("directionalLight")
directionalLight.setDirection((-5, -5, -5))
directionalLight.setColor((1, 1, 1, 1))
directionalLight.setSpecularColor((1, 1, 1, 1))
render.setLight(render.attachNewNode(ambientLight))
render.setLight(render.attachNewNode(directionalLight))
# Move camera according to user's input
def move(self, task):
# Get the time that elapsed since last frame. We multiply this with
# the desired speed in order to find out with which distance to move
# in order to achieve that desired speed.
dt = globalClock.getDt()
# If a move-button is touched, move in the specified direction.
move_data, device_path = self.ovr.get_analog_action_value(self.action_move)
if move_data is not None:
x, y = move_data.x,move_data.y
# The x coordinate is used to turn the camera
self.ovr.tracking_space.setH(self.ovr.tracking_space.getH() - x * 60 * dt)
# The y coordinate is used to move the camera along the view vector
# We retrieve the orientation of the headset and we generate a 2D direction
orientation = self.ovr.hmd_anchor.get_quat(render)
vector = orientation.xform(LVector3(0, 1, 0))
vector[2] = 0
vector.normalize()
# Use the vector and the x value to move the camera relative to itself
self.ovr.tracking_space.setPos(self.ovr.tracking_space.getPos() + vector * (y * 5 * dt))
return task.cont
# Grid checking and collision detection
def collision(self, task):
# Normally, we would have to call traverse() to check for collisions.
# However, the class ShowBase that we inherit from has a task to do
# this for us, if we assign a CollisionTraverser to self.cTrav.
#self.cTrav.traverse(render)
# Adjust ralph's Z coordinate. If ralph's ray hit terrain,
# update his Z. If it hit anything else, or didn't hit anything, put
# him back where he was last frame.
entries = list(self.ralphGroundHandler.getEntries())
entries.sort(key=lambda x: x.getSurfacePoint(render).getZ())
if len(entries) > 0 and entries[0].getIntoNode().getName() == "terrain":
self.ovr.tracking_space.setZ(entries[0].getSurfacePoint(render).getZ())
else:
self.ovr.tracking_space.setPos(self.ralphStartPos)
self.ralph.setPos(self.ovr.hmd_anchor.getPos(render))
# save ralph's initial position so that we can restore it,
# in case he falls off the map or runs into something.
self.ralphStartPos = self.ovr.tracking_space.getPos()
return task.cont
demo = RoamingRalphDemo()
demo.run()
| 45.211111 | 129 | 0.666871 |
a734822b3c78189875eb0fb3521259523d701602 | 180 | py | Python | kapre/__init__.py | MichaelisTrofficus/kapre | f25fdab1b8ea9e236e514aab19063cd114869f53 | [
"MIT"
] | null | null | null | kapre/__init__.py | MichaelisTrofficus/kapre | f25fdab1b8ea9e236e514aab19063cd114869f53 | [
"MIT"
] | null | null | null | kapre/__init__.py | MichaelisTrofficus/kapre | f25fdab1b8ea9e236e514aab19063cd114869f53 | [
"MIT"
] | null | null | null | __version__ = '0.3.6'
VERSION = __version__
from . import composed
from . import backend
from .signal import *
from .time_frequency import *
from .time_frequency_tflite import *
| 18 | 36 | 0.766667 |
1d4ce80b9d5cde5485674bd9df58c25118c8ace6 | 494 | py | Python | test/lib_common_test.py | yokoyama-flogics/ibp_monitor_2 | 1a7df55a524ff3a7908df330e7e02c9f27e24ae0 | [
"BSD-2-Clause"
] | 3 | 2017-11-23T13:29:47.000Z | 2021-01-08T09:28:35.000Z | test/lib_common_test.py | yokoyama-flogics/ibp_monitor_2 | 1a7df55a524ff3a7908df330e7e02c9f27e24ae0 | [
"BSD-2-Clause"
] | null | null | null | test/lib_common_test.py | yokoyama-flogics/ibp_monitor_2 | 1a7df55a524ff3a7908df330e7e02c9f27e24ae0 | [
"BSD-2-Clause"
] | 2 | 2018-02-15T08:11:24.000Z | 2021-01-08T09:28:43.000Z | import os
import sys
import unittest
# Set Python search path to the parent directory
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from lib.common import *
class TestLibCommon(unittest.TestCase):
def test_eprint1(self):
eprint('Hello')
self.assertEquals(sys.stderr.getvalue(), 'Hello\n')
def test_eprint2(self):
eprint(123)
self.assertEquals(sys.stderr.getvalue(), '123\n')
if __name__ == "__main__":
unittest.main(buffer=True)
| 24.7 | 62 | 0.690283 |
b11f3d26915f9586dadc745775f903eb6cc2c249 | 5,471 | py | Python | recipes/arcus/all/conanfile.py | marsven/conan-center-index | d8bb4ad617cee02d8664e8341fa32cdf702e4284 | [
"MIT"
] | 1 | 2021-11-11T03:07:13.000Z | 2021-11-11T03:07:13.000Z | recipes/arcus/all/conanfile.py | marsven/conan-center-index | d8bb4ad617cee02d8664e8341fa32cdf702e4284 | [
"MIT"
] | 1 | 2022-03-09T06:33:41.000Z | 2022-03-09T06:33:41.000Z | recipes/arcus/all/conanfile.py | marsven/conan-center-index | d8bb4ad617cee02d8664e8341fa32cdf702e4284 | [
"MIT"
] | null | null | null | from conans import ConanFile, CMake, tools
import os
import textwrap
required_conan_version = ">=1.43.0"
class ArcusConan(ConanFile):
name = "arcus"
description = "This library contains C++ code and Python3 bindings for " \
"creating a socket in a thread and using this socket to send " \
"and receive messages based on the Protocol Buffers library."
license = "LGPL-3.0-or-later"
topics = ("arcus", "protobuf", "socket", "cura")
homepage = "https://github.com/Ultimaker/libArcus"
url = "https://github.com/conan-io/conan-center-index"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
exports_sources = "CMakeLists.txt"
generators = "cmake", "cmake_find_package"
_cmake = None
@property
def _source_subfolder(self):
return "source_subfolder"
@property
def _is_msvc(self):
return str(self.settings.compiler) in ["Visual Studio", "msvc"]
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
del self.options.fPIC
def requirements(self):
self.requires("protobuf/3.17.1")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
tools.check_min_cppstd(self, 11)
def source(self):
tools.get(**self.conan_data["sources"][self.version],
destination=self._source_subfolder, strip_root=True)
def _patch_sources(self):
# Do not force PIC
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeLists.txt"),
"set(CMAKE_POSITION_INDEPENDENT_CODE ON)",
"")
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeLists.txt"),
"set_target_properties(Arcus PROPERTIES COMPILE_FLAGS -fPIC)",
"")
# TODO: this patch could be removed when CMake variables fixed in protobuf recipe
tools.replace_in_file(os.path.join(self._source_subfolder, "CMakeLists.txt"),
"target_link_libraries(Arcus PUBLIC ${PROTOBUF_LIBRARIES})",
"target_link_libraries(Arcus PUBLIC protobuf::libprotobuf)")
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.definitions["BUILD_PYTHON"] = False
self._cmake.definitions["BUILD_EXAMPLES"] = False
self._cmake.definitions["BUILD_STATIC"] = not self.options.shared
if self._is_msvc:
if self.settings.compiler == "Visual Studio":
is_static_runtime = str(self.settings.compiler.runtime).startswith("MT")
else:
is_static_runtime = self.settings.compiler.runtime == "static"
self._cmake.definitions["MSVC_STATIC_RUNTIME"] = is_static_runtime
self._cmake.configure()
return self._cmake
def build(self):
self._patch_sources()
cmake = self._configure_cmake()
cmake.build()
def package(self):
self.copy("LICENSE", dst="licenses", src=self._source_subfolder)
cmake = self._configure_cmake()
cmake.install()
tools.rmdir(os.path.join(self.package_folder, "lib", "cmake"))
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self._create_cmake_module_alias_targets(
os.path.join(self.package_folder, self._module_file_rel_path),
{"Arcus": "Arcus::Arcus"}
)
@staticmethod
def _create_cmake_module_alias_targets(module_file, targets):
content = ""
for alias, aliased in targets.items():
content += textwrap.dedent("""\
if(TARGET {aliased} AND NOT TARGET {alias})
add_library({alias} INTERFACE IMPORTED)
set_property(TARGET {alias} PROPERTY INTERFACE_LINK_LIBRARIES {aliased})
endif()
""".format(alias=alias, aliased=aliased))
tools.save(module_file, content)
@property
def _module_subfolder(self):
return os.path.join("lib", "cmake")
@property
def _module_file_rel_path(self):
return os.path.join(self._module_subfolder,
"conan-official-{}-targets.cmake".format(self.name))
def package_info(self):
self.cpp_info.set_property("cmake_file_name", "Arcus")
self.cpp_info.set_property("cmake_target_name", "Arcus")
self.cpp_info.libs = ["Arcus"]
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.system_libs.append("pthread")
elif self.settings.os == "Windows":
self.cpp_info.system_libs.append("ws2_32")
# TODO: to remove in conan v2 once cmake_find_package* generators removed
self.cpp_info.names["cmake_find_package"] = "Arcus"
self.cpp_info.names["cmake_find_package_multi"] = "Arcus"
self.cpp_info.builddirs.append(self._module_subfolder)
self.cpp_info.build_modules["cmake_find_package"] = [self._module_file_rel_path]
self.cpp_info.build_modules["cmake_find_package_multi"] = [self._module_file_rel_path]
| 38.801418 | 94 | 0.624383 |
c7999324166ffc44ab7a5cbf1f221d4bb5ecc8d7 | 32,730 | py | Python | src_data/lgbt_content_selection.py | marcmiquel/WDO | 7d8d8e912f8dbacb2cdc0f6fd5c26370b8310cbb | [
"MIT"
] | 3 | 2020-12-21T06:06:16.000Z | 2021-08-28T12:52:07.000Z | src_data/lgbt_content_selection.py | marcmiquel/WDO | 7d8d8e912f8dbacb2cdc0f6fd5c26370b8310cbb | [
"MIT"
] | 1 | 2021-01-27T19:33:20.000Z | 2021-01-27T19:33:20.000Z | src_data/lgbt_content_selection.py | marcmiquel/WDO | 7d8d8e912f8dbacb2cdc0f6fd5c26370b8310cbb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# script
import wikilanguages_utils
from wikilanguages_utils import *
# time
import time
import datetime
from dateutil import relativedelta
import calendar
# system
import os
import sys
import shutil
import re
import random
# databases
import MySQLdb as mdb, MySQLdb.cursors as mdb_cursors
import sqlite3
# files
import gzip
import zipfile
import bz2
import json
import csv
import codecs
import unidecode
# requests and others
import requests
import urllib
from urllib.parse import urlparse, parse_qsl, urlencode
import webbrowser
import reverse_geocoder as rg
import numpy as np
# data
import pandas as pd
# classifier
from sklearn import svm, linear_model
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
import gc
# this script collects content related to lgbt topics.
# MAIN
def main():
for languagecode in wikilanguagecodes: # wikilanguagecodes[wikilanguagecodes.index('nds'):]
print ('\n\n\n *** ' +languagecode+ ' '+languages.loc[languagecode]['languagename'])
store_articles_category_crawling_keywords_biographies_links(languagecode)
store_articles_lgbt_topic_binary_classifier(languagecode)
update_push_lgbt_topics_wikipedia_diversity()
# print ('done')
# input('')
################################################################
def store_articles_category_crawling_keywords_biographies_links(languagecode):
functionstartTime = time.time()
function_name = 'store_articles_category_crawling_keywords_biographies_links '+languagecode
# if wikilanguages_utils.verify_function_run(cycle_year_month, script_name, function_name, 'check','')==1: return
print (function_name)
# os.remove(databases_path + lgbt_content_db); print (lgbt_content_db+' deleted.');
conn = sqlite3.connect(databases_path + wikipedia_diversity_production_db); cursor = conn.cursor()
conn2 = sqlite3.connect(databases_path + diversity_categories_db); cursor2 = conn2.cursor()
conn3 = sqlite3.connect(databases_path + lgbt_content_db); cursor3 = conn3.cursor()
# wikilanguages_utils.store_lgbt_label('store')
# Create the table.
query = ('CREATE TABLE IF NOT EXISTS '+languagecode+'wiki_lgbt ('+
# general
'qitem text, '+
'page_id integer, '+
'page_title text, '+
'lgbt_biography, '+
'keyword text, '+
'category_crawling_level integer, '+
'num_inlinks_from_lgbt integer, '+
'num_outlinks_to_lgbt integer, '+
'percent_inlinks_from_lgbt real, '+
'percent_outlinks_to_lgbt real, '+
'lgbt_binary integer, '+
'PRIMARY KEY (qitem));')
try:
cursor3.execute(query)
conn3.commit()
print ('Table for languagecode '+languagecode+' created.')
except:
pass
try:
keyword = wikilanguages_utils.store_lgbt_label('get')[languagecode]
keyword_unicode = unidecode.unidecode(keyword).lower().replace(' ','_')
except:
keyword = None
keyword_unicode = None
pass
print (keyword)
if keyword != None:
# PRIMER: s’han d’haver agafat totes les categories. també les que contenen paraules clau.
category_page_ids_page_titles = {}
category_page_titles_page_ids = {}
dumps_path = '/public/dumps/public/'+languagecode+'wiki/latest/'+languagecode+'wiki-latest-page.sql.gz'
wikilanguages_utils.check_dump(dumps_path, script_name)
dump_in = gzip.open(dumps_path, 'r')
while True:
line = dump_in.readline()
try: line = line.decode("utf-8")
except UnicodeDecodeError: line = str(line)
if line == '':
i+=1
if i==3: break
else: i=0
if wikilanguages_utils.is_insert(line):
values = wikilanguages_utils.get_values(line)
if wikilanguages_utils.values_sanity_check(values): rows = wikilanguages_utils.parse_values(values)
for row in rows:
page_id = int(row[0])
page_namespace = int(row[1])
cat_title = str(row[2])
if page_namespace != 14: continue
category_page_ids_page_titles[page_id]=cat_title
category_page_titles_page_ids[cat_title]=page_id
print (str(datetime.timedelta(seconds=time.time() - functionstartTime)))
print (len(category_links_cat_cat))
print ('all categories loaded')
storing_catlinks = False
if len(category_page_ids_page_titles) > 2000000: # if the language is over 2,000,000 categories
storing_catlinks = True
print ('storing category links.')
conn = sqlite3.connect(databases_path + languagecode + 'wiki_category_links_temp.db'); cursor = conn.cursor()
query = ('CREATE TABLE IF NOT EXISTS category_links_cat_art (category_title text, page_id integer, PRIMARY KEY (category_title, page_id));')
cursor.execute(query); conn.commit()
query = ('CREATE TABLE IF NOT EXISTS category_links_cat_cat (category_title text, subcategory_title text, PRIMARY KEY (category_title, subcategory_title));')
cursor.execute(query); conn.commit()
category_links_cat_cat = []
category_links_cat_art = []
else:
category_links_cat_cat_dict = {}
category_links_cat_art_dict = {}
for cat_title in category_page_titles_page_ids.keys():
category_links_cat_cat_dict[cat_title] = set()
category_links_cat_art_dict[cat_title] = set()
# SEGON:
# Category links. Muntar estructura de category links amb diccionaris i sets. Un diccionari amb les relacions entre cat-page i un altre entre cat-cat.
# https://www.mediawiki.org/wiki/Manual:Categorylinks_table
dumps_path = '/public/dumps/public/'+languagecode+'wiki/latest/'+languagecode+'wiki-latest-categorylinks.sql.gz'
print (dumps_path)
wikilanguages_utils.check_dump(dumps_path, script_name)
dump_in = gzip.open(dumps_path, 'r')
iter = 0
while True:
iter+=1
line = dump_in.readline()
try: line = line.decode("utf-8")
except UnicodeDecodeError: line = str(line)
if line == '':
i+=1
if i==3: break
else: i=0
if wikilanguages_utils.is_insert(line):
values = wikilanguages_utils.get_values(line)
if wikilanguages_utils.values_sanity_check(values): rows = wikilanguages_utils.parse_values(values)
for row in rows:
# print (row)
# input('')
try:
page_id = int(row[0])
# if page_id == 375668:
# print (row)
# input('')
except:
continue
try:
cat_title = str(row[1].strip("'"))
# if page_id == 375668:
# print (cat_title)
# print ()
# input('')
except:
continue
if cat_title not in category_page_titles_page_ids:
continue
if storing_catlinks:
if page_id in category_page_ids_page_titles: # is this a category
category_links_cat_cat.append((cat_title, category_page_ids_page_titles[page_id]))
else: # this is an article
category_links_cat_art.append((cat_title, page_id))
else:
if page_id in category_page_ids_page_titles: # is this a category
category_links_cat_cat_dict[cat_title].add(category_page_ids_page_titles[page_id])
else: # this is an article
category_links_cat_art_dict[cat_title].add(page_id)
# if page_id == 375668:
# print (row)
# input('')
# if 'LGBT' in str(row[1]):
# print (row)
# print (category_links_cat_cat['LGBT'])
# print (category_links_cat_art['LGBT'])
if storing_catlinks and iter % 5000 == 0:
print (str(iter)+' categorylinks lines read.')
cursor.executemany('INSERT OR IGNORE INTO category_links_cat_cat (category_title, subcategory_title) VALUES (?,?)', category_links_cat_cat);
conn.commit()
cursor.executemany('INSERT OR IGNORE INTO category_links_cat_art (category_title, page_id) VALUES (?,?)', category_links_cat_art);
conn.commit()
category_links_cat_cat = []
category_links_cat_art = []
if storing_catlinks:
cursor.executemany('INSERT OR IGNORE INTO category_links_cat_cat (category_title, subcategory_title) VALUES (?,?)', category_links_cat_cat);
conn.commit()
cursor.executemany('INSERT OR IGNORE INTO category_links_cat_art (category_title, page_id) VALUES (?,?)', category_links_cat_art);
conn.commit()
category_links_cat_cat = []
category_links_cat_art = []
print (str(datetime.timedelta(seconds=time.time() - functionstartTime)))
print ('all category links loaded')
# input('')
# print (category_links_cat_cat['LGBT'])
# print (category_links_cat_art['LGBT'])
# input('')
# TERCER:
# Iterar els nivells corresponents. crawling.
print ('*\n')
print ('With language '+ languagecode+ ' and category '+keyword);
# Get categories level zero
keyword_category = {}
keyword_category[keyword]=set()
for cat_title in category_page_ids_page_titles.values():
if keyword_unicode in unidecode.unidecode(cat_title.lower().replace('_',' ')):
keyword_category[keyword].add(cat_title)
cattitles_total_level = {}
for cat_title in keyword_category[keyword]:
cattitles_total_level[cat_title] = None
if len(cattitles_total_level) == 0: return
# ITERATIONS
# CATEGORIES FROM LEVELS
level = 1
num_levels = 25
if languagecode=='en': num_levels = 10
j = 0
total_categories = dict(); total_categories.update(cattitles_total_level)
print ('Number of categories to start: '+str(len(total_categories)))
selectedarticles_level = {}
while (level <= num_levels): # Here we choose the number of levels we prefer.
i = 0
newcategories = dict()
for cat_title in cattitles_total_level.keys():
# print (cat_title)
# print (category_links_cat_cat[cat_title])
if storing_catlinks:
for row in cursor.execute('SELECT subcategory_title FROM category_links_cat_cat WHERE category_title = ?;', (cat_title,)):
cat_title2 = row[0]
try: total_categories[cat_title2]
except: newcategories[cat_title2] = None
else:
for cat_title2 in category_links_cat_cat_dict[cat_title]:
try: total_categories[cat_title2]
except: newcategories[cat_title2] = None
if storing_catlinks:
for row in cursor.execute('SELECT page_id FROM category_links_cat_art WHERE category_title = ?;', (cat_title,)):
page_id = row[0]
try:
cur_level = selectedarticles_level[page_id]
if cur_level > level: selectedarticles_level[page_id] = level
except:
selectedarticles_level[page_id] = level
if page_id in selectedarticles:
selectedarticles[page_id] = level
else:
selectedarticles[page_id] = level
i += 1
else:
for page_id in category_links_cat_art_dict[cat_title]:
try:
cur_level = selectedarticles_level[page_id]
if cur_level > level:
selectedarticles_level[page_id] = level
except:
selectedarticles_level[page_id] = level
if page_id in selectedarticles:
selectedarticles[page_id] = level
else:
selectedarticles[page_id] = level
i += 1
cattitles_total_level = dict()
cattitles_total_level.update(newcategories)
total_categories.update(newcategories)
print('Level: '+str(level) + ". Number of new articles is: " + str(i)+ ". Total number of articles is "+str(len(selectedarticles_level))+'. Number of new categories is: '+str(len(newcategories))+'. Total number of categories is: '+str(len(total_categories)))
level = level + 1
if len(newcategories) == 0:
print ('No new categories: break!')
break
# GETTING READY TO INSERT
parameters = []
discarded = 0
try:
for row in cursor.execute('SELECT qitem, page_id, page_title, sexual_orientation, num_inlinks_from_lgbt, num_outlinks_to_lgbt, percent_inlinks_from_lgbt, percent_outlinks_to_lgbt, gender FROM '+languagecode+'wiki'):
qitem = row[0]
page_id = row[1]
page_title = row[2]
sexual_orientation = row[3]
lgbt_binary = None
gender = row[8]
if gender not in ('Q6581072','Q6581097') and gender != None:
lgbt_binary = 1
try:
category_crawling_level = selectedarticles_level[page_id]
except:
category_crawling_level = None
if sexual_orientation != "Q1035954" and sexual_orientation != "" and sexual_orientation != None:
lgbt_binary = 1
else:
sexual_orientation = None
# if category_crawling_level == None:
# discarded+=1
# continue
num_inlinks_from_lgbt = row[4]
num_outlinks_to_lgbt = row[5]
percent_inlinks_from_lgbt = row[6]
percent_outlinks_to_lgbt = row[7]
page_title_rect = unidecode.unidecode(page_title).lower().replace(' ','_')
keyword_param = None
if keyword_unicode in page_title_rect:
keyword_param = keyword
lgbt_binary = 1
if num_inlinks_from_lgbt == 0 and num_outlinks_to_lgbt == 0 and category_crawling_level == None:
discarded+=1
continue
parameters.append((qitem, page_id, page_title, sexual_orientation, num_inlinks_from_lgbt, num_outlinks_to_lgbt, percent_inlinks_from_lgbt, percent_outlinks_to_lgbt, keyword_param, category_crawling_level, lgbt_binary))
except:
pass
cursor3.executemany('INSERT OR IGNORE INTO '+languagecode+'wiki_lgbt (qitem, page_id, page_title, lgbt_biography, num_inlinks_from_lgbt, num_outlinks_to_lgbt, percent_inlinks_from_lgbt, percent_outlinks_to_lgbt, keyword, category_crawling_level, lgbt_binary) VALUES (?,?,?,?,?,?,?,?,?,?,?)', parameters);
conn3.commit()
print (discarded)
print (len(parameters))
try:
os.remove(databases_path + languagecode + 'wiki_category_links_temp.db'); print (languagecode + 'wiki_category_links_temp.db'+' deleted.');
except:
pass
duration = str(datetime.timedelta(seconds=time.time() - functionstartTime))
print (duration)
# wikilanguages_utils.verify_function_run(cycle_year_month, script_name, function_name, 'mark', duration)
def store_articles_lgbt_topic_binary_classifier(languagecode):
function_name = 'store_articles_lgbt_topic_binary_classifier '+languagecode
# if wikilanguages_utils.verify_function_run(cycle_year_month, script_name, function_name, 'check','')==1: return
functionstartTime = time.time()
try:
keyword = wikilanguages_utils.store_lgbt_label('get')[languagecode]
print (keyword)
except:
print ('There is not a keyword for LGBT in this language: '+languagecode)
return
# input('')
# OBTAIN THE DATA TO FIT.
conn3 = sqlite3.connect(databases_path + lgbt_content_db); cursor3 = conn3.cursor()
query = 'SELECT qitem, page_id, page_title, lgbt_biography, num_inlinks_from_lgbt, num_outlinks_to_lgbt, percent_inlinks_from_lgbt, percent_outlinks_to_lgbt, keyword, category_crawling_level, lgbt_binary FROM '+languagecode+'wiki_lgbt;'
features = ['qitem']+['lgbt_binary', 'category_crawling_level', 'num_inlinks_from_lgbt','num_outlinks_to_lgbt', 'percent_inlinks_from_lgbt', 'percent_outlinks_to_lgbt', 'keyword'] # 'lgbt_biography', 'keyword',
lgbt_df = pd.read_sql_query(query, conn3)
qitems_page_titles = pd.Series(lgbt_df.page_title.values,index=lgbt_df.qitem).to_dict()
lgbt_df = lgbt_df[features]
lgbt_df = lgbt_df.set_index(['qitem'])
lgbt_df = lgbt_df.fillna(0)
lgbt_df.num_inlinks_from_lgbt = lgbt_df.num_inlinks_from_lgbt.astype(int)
lgbt_df.num_outlinks_to_lgbt = lgbt_df.num_outlinks_to_lgbt.astype(int)
if len(lgbt_df.index.tolist())==0: print ('It is not possible to classify Wikipedia Articles as there is no groundtruth.'); return (0,0,[],[]) # maxlevel,num_articles_lgbt,lgbt_df_list,binary_list
# FORMAT THE DATA FEATURES AS NUMERICAL FOR THE MACHINE LEARNING
category_crawling_absolute_level=lgbt_df['category_crawling_level'].tolist()
try:
maxlevel = max(category_crawling_absolute_level)
except:
print ('There are not articles retrieved with category crawling, therefore we cannot classify articles.')
return
if maxlevel == 0:
print ('There are not articles retrieved with category crawling, therefore we cannot classify articles.')
return
for n, i in enumerate(category_crawling_absolute_level):
if i > 0:
category_crawling_absolute_level[n]=abs(i-(maxlevel+1))
else:
category_crawling_absolute_level[n]=0
lgbt_df = lgbt_df.assign(category_crawling_level = category_crawling_absolute_level)
# keyword_title=lgbt_df['keyword'].tolist()
# for n, i in enumerate(keyword_title):
# if i is not 0: keyword_title[n]=1
# else: keyword_title[n]=0
# lgbt_df = lgbt_df.assign(keyword = keyword_title)
# lgbt_biography=lgbt_df['lgbt_biography'].tolist()
# for n, i in enumerate(lgbt_biography):
# if i is not 0: lgbt_biography[n]=1
# else: lgbt_biography[n]=0
# lgbt_df = lgbt_df.assign(lgbt_biography = lgbt_biography)
lgbt_df = lgbt_df.fillna(0)
# SAMPLING
print ('sampling method: negative sampling.')
lgbt_df_yes = lgbt_df.loc[lgbt_df['keyword'] == keyword] # positives are the biographies
lgbt_df = lgbt_df.drop(columns=['keyword'])
# lgbt_df_yes = lgbt_df.loc[lgbt_df['lgbt_binary'] == 1] # positives are the biographies
lgbt_df_yes = lgbt_df_yes.drop(columns=['lgbt_binary','keyword'])
lgbt_df_list_yes = lgbt_df_yes.values.tolist()
num_articles_lgbt = len(lgbt_df_list_yes)
lgbt_df_list_probably_no = []
size_sample = 6
if languagecode == 'en': size_sample = 4 # exception for English
for i in range(1,1+size_sample):
lgbt_df = lgbt_df.sample(frac=1) # randomize the rows order
lgbt_df_probably_no = lgbt_df.loc[(lgbt_df['num_outlinks_to_lgbt'] == 0) | (lgbt_df['num_inlinks_from_lgbt'] == 0)]
lgbt_df_probably_no = lgbt_df_probably_no.sample(frac=1) # randomize the rows order
lgbt_df_probably_no = lgbt_df_probably_no.drop(columns=['lgbt_binary'])
lgbt_df_list_probably_no = lgbt_df_list_probably_no + lgbt_df_probably_no.values.tolist()[:num_articles_lgbt]
num_probably_no = len(lgbt_df_list_probably_no)
lgbt_df_list = lgbt_df_list_yes + lgbt_df_list_probably_no
binary_list = [1]*num_articles_lgbt+[0]*num_probably_no
print ('\nConverting the dataframe...')
print ('These are its columns:')
print (list(lgbt_df_yes.columns.values))
print (num_articles_lgbt) # these are the yes (groundtruth)
print (len(lgbt_df_list)) # these are the no (groundtruth)
print (len(binary_list))
# WE GET THE POTENTIAL CCC ARTICLES THAT HAVE NOT BEEN 1 BY ANY OTHER MEANS.
# For the testing takes those with one of these features category crawling, els keywords i els outlinks a ethnic groups > 0.
# query = 'SELECT * FROM lgbt_articles WHERE primary_lang = "'+languagecode+'" AND qitem_lgbt = "'+qitem_lgbt+' AND category_crawling_absolute_level IS NOT NULL AND lgbt_binary != 1 AND (num_outlinks_to_group > 0 OR num_inlinks_from_group > 0)";'
query = 'SELECT qitem, page_id, page_title, lgbt_biography, num_inlinks_from_lgbt, num_outlinks_to_lgbt, percent_inlinks_from_lgbt, percent_outlinks_to_lgbt, keyword, category_crawling_level, lgbt_binary FROM '+languagecode+'wiki_lgbt WHERE lgbt_binary IS NULL AND category_crawling_level IS NOT NULL;'
features = ['qitem']+['category_crawling_level', 'num_inlinks_from_lgbt','num_outlinks_to_lgbt', 'percent_inlinks_from_lgbt', 'percent_outlinks_to_lgbt'] #'lgbt_biography', 'keyword',
potential_lgbt_df = pd.read_sql_query(query, conn3)
potential_lgbt_df = potential_lgbt_df[features]
potential_lgbt_df = potential_lgbt_df.set_index(['qitem'])
potential_lgbt_df = potential_lgbt_df.fillna(0)
print (len(potential_lgbt_df))
# FORMAT THE DATA FEATURES AS NUMERICAL FOR THE MACHINE LEARNING
category_crawling_absolute_level=potential_lgbt_df['category_crawling_level'].tolist()
try:
maxlevel = max(category_crawling_absolute_level)
except:
print ('There are not articles retrieved with category crawling, therefore we cannot classify articles.')
return
for n, i in enumerate(category_crawling_absolute_level):
if i > 0:
category_crawling_absolute_level[n]=abs(i-(maxlevel+1))
else:
category_crawling_absolute_level[n]=0
potential_lgbt_df = potential_lgbt_df.assign(category_crawling_level = category_crawling_absolute_level)
# keyword_title=lgbt_df['keyword'].tolist()
# for n, i in enumerate(keyword_title):
# if i is not 0: keyword_title[n]=1
# else: keyword_title[n]=0
# lgbt_df = lgbt_df.assign(keyword = keyword_title)
# lgbt_biography=lgbt_df['lgbt_biography'].tolist()
# for n, i in enumerate(lgbt_biography):
# if i is not 0: lgbt_biography[n]=1
# else: lgbt_biography[n]=0
# lgbt_df = lgbt_df.assign(lgbt_biography = lgbt_biography)
# NOT ENOUGH ARTICLES
if len(potential_lgbt_df)==0: print ('There are not potential articles for lgbt, so it returns empty'); return
potential_lgbt_df = potential_lgbt_df.sample(frac=1) # randomize the rows order
print ('We selected this number of potential Ethnic articles: '+str(len(potential_lgbt_df)))
# FIT THE SVM MODEL
print ('We have: num_articles_lgbt,lgbt_df_list,binary_list,potential_lgbt_df')
print ('Fitting the data into the classifier.')
print ('The data has '+str(len(lgbt_df_list))+' samples.')
if num_articles_lgbt == 0 or len(lgbt_df_list)<10: print ('There are not enough samples.'); return
X = lgbt_df_list
y = binary_list
# print (X)
# print (y)
print (len(X))
print (len(y))
print ('The chosen classifier is RandomForest')
clf = RandomForestClassifier(n_estimators=100)
clf.fit(X, y)
print ('The fit classes are: '+str(clf.classes_))
print ('The fit has a score of: '+str(clf.score(X, y, sample_weight=None)))
print (clf.feature_importances_.tolist())
# TEST THE DATA
print ('Calculating which page is IN or OUT...')
if potential_lgbt_df is None:
print ('No Articles to verify.');
duration = str(datetime.timedelta(seconds=time.time() - functionstartTime))
# wikilanguages_utils.verify_function_run(cycle_year_month, script_name, function_name, 'mark', duration)
return
if len(potential_lgbt_df)==0:
print ('No Articles to verify.');
duration = str(datetime.timedelta(seconds=time.time() - functionstartTime))
# wikilanguages_utils.verify_function_run(cycle_year_month, script_name, function_name, 'mark', duration)
return
qitems = potential_lgbt_df.index.tolist()
potential = potential_lgbt_df.values.tolist()
print ('This is the number of articles we have for testing '+str(len(potential)))
# input('')
print ('We print the results (0 for no, 1 for yes):')
visible = 0
print (visible)
selected=[]
# DO NOT PRINT THE CLASSIFIER RESULTS ARTICLE BY ARTICLE
if visible == 0:
# testdict = {}
result = clf.predict(potential)
i = 0
for x in result:
# testdict[qitems[i]]=(x,potential[i])
if x == 1:
qitem=qitems[i]
selected.append((qitem,))
i += 1
# print (testdict)
# PRINT THE CLASSIFIER RESULTS ARTICLE BY ARTICLE
else:
# provisional
# print (potential[:15])
# print (qitems[:15])
count_yes=0
count_no=0
for n,i in enumerate(potential):
result = clf.predict([i])
qitem=qitems[n]
if qitem == None: continue
if result[0] == 1:
count_yes+=1
print (['category_crawling_level', 'num_inlinks_from_lgbt','num_outlinks_to_lgbt', 'percent_inlinks_from_lgbt', 'percent_outlinks_to_lgbt']) # 'lgbt_biography', 'keyword',
print(i)
print(clf.predict_proba([i]).tolist())
page_title = qitems_page_titles[qitem]
if page_title == None: page_title = ''
print (str(count_yes)+'\tIN\t'+qitem+':'+page_title+'\n')
# input('')
try: selected.append((qitem))
except: pass
else:
count_no+=1
print (['category_crawling_level', 'num_inlinks_from_lgbt','num_outlinks_to_lgbt', 'percent_inlinks_from_lgbt', 'percent_outlinks_to_lgbt']) #
print(i)
print(clf.predict_proba([i]).tolist())
page_title = qitems_page_titles[qitem]
if page_title == None: page_title = ''
print (str(count_no)+'\tOUT:\t'+qitem+':'+page_title+'\n')
# input('')
print ('There were already '+str(num_articles_lgbt)+' lgbt Articles selected as groundtruth.')
print ('\nThis algorithm CLASSIFIED '+str(len(selected))+' Articles as lgbt_binary = 1 from a total of '+str(len(potential))+' from the testing data. This is a: '+str(round(100*len(selected)/len(potential),3))+'%.')
# input('')
conn3 = sqlite3.connect(databases_path + lgbt_content_db); cursor3 = conn3.cursor()
query = 'UPDATE '+languagecode+'wiki_lgbt SET lgbt_binary = 1 WHERE qitem = ?;'
cursor3.executemany(query,selected)
conn3.commit()
print ('lgbt topics in '+languagecode+' created.')
duration = str(datetime.timedelta(seconds=time.time() - functionstartTime))
print (duration)
# wikilanguages_utils.verify_function_run(cycle_year_month, script_name, function_name, 'mark', duration)
def update_push_lgbt_topics_wikipedia_diversity():
functionstartTime = time.time()
conn2 = sqlite3.connect(databases_path + lgbt_content_db); cursor2 = conn2.cursor()
conn = sqlite3.connect(databases_path + wikipedia_diversity_db); cursor = conn.cursor()
qitems = {}
keyword_title = {}
for languagecode in wikilanguagecodes:
for row in cursor2.execute('SELECT qitem, keyword FROM '+languagecode+'wiki_lgbt WHERE lgbt_binary = 1;'):
try:
qitems[row[0]]+=1
except:
qitems[row[0]]=1
keyword = row[1]
if keyword!='' and keyword!=None:
keyword_title[row[0]]=row[1]
for languagecode in wikilanguagecodes:
print (languagecode)
try:
query = 'UPDATE '+languagecode+'wiki SET lgbt_topic = NULL;'
cursor.execute(query)
conn.commit()
except:
continue
params = []
(page_titles_qitems, page_titles_page_ids)=wikilanguages_utils.load_dicts_page_ids_qitems(0,languagecode)
qitems_page_titles = {v: k for k, v in page_titles_qitems.items()}
for qitem,value in qitems.items():
try:
keyword = keyword_title[qitem]
except:
keyword = None
try:
page_title = qitems_page_titles[qitem]
page_id = page_titles_page_ids[page_title]
params.append((value, keyword, qitem, page_title, page_id))
except:
pass
query = 'UPDATE '+languagecode+'wiki SET lgbt_topic = ?, lgbt_keyword_title = ? WHERE qitem = ? AND page_title = ? and page_id = ?;'
cursor.executemany(query,params)
conn.commit()
duration = str(datetime.timedelta(seconds=time.time() - functionstartTime))
print (duration)
#######################################################################################
class Logger_out(object): # this prints both the output to a file and to the terminal screen.
def __init__(self):
self.terminal = sys.stdout
self.log = open("lgbt_content_selection"+".out", "w")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
def flush(self):
pass
class Logger_err(object): # this prints both the output to a file and to the terminal screen.
def __init__(self):
self.terminal = sys.stdout
self.log = open("lgbt_content_selection"+".err", "w")
def write(self, message):
self.terminal.write(message)
self.log.write(message)
def flush(self):
pass
### MAIN:
if __name__ == '__main__':
script_name = 'lgbt_content_selection.py'
sys.stdout = Logger_out()
sys.stderr = Logger_err()
cycle_year_month = wikilanguages_utils.get_current_cycle_year_month()
startTime = time.time()
territories = wikilanguages_utils.load_wikipedia_languages_territories_mapping()
languages = wikilanguages_utils.load_wiki_projects_information();
wikilanguagecodes = sorted(languages.index.tolist())
print ('checking languages Replicas databases and deleting those without one...')
# Verify/Remove all languages without a replica database
for a in wikilanguagecodes:
if wikilanguages_utils.establish_mysql_connection_read(a)==None:
wikilanguagecodes.remove(a)
# Only those with a geographical context
languageswithoutterritory=list(set(languages.index.tolist()) - set(list(territories.index.tolist())))
for languagecode in languageswithoutterritory:
try: wikilanguagecodes.remove(languagecode)
except: pass
# Get the number of Articles for each Wikipedia language edition
wikipedialanguage_numberarticles = wikilanguages_utils.load_wikipedia_language_editions_numberofarticles(wikilanguagecodes,'')
wikilanguagecodes_by_size = [k for k in sorted(wikipedialanguage_numberarticles, key=wikipedialanguage_numberarticles.get, reverse=False)]
# if wikilanguages_utils.verify_script_run(cycle_year_month, script_name, 'check', '') == 1: exit();
main()
duration = str(datetime.timedelta(seconds=time.time() - functionstartTime))
# wikilanguages_utils.verify_script_run(cycle_year_month, script_name, 'mark', duration)
wikilanguages_utils.finish_email(startTime,'lgbt_content_selection.out','lgbt CONTENT Selection')
| 38.505882 | 308 | 0.635044 |
6bfee7c2ea5fc9925547aa9b4d43756e661701c0 | 1,257 | py | Python | tests/function/value/test_func_text.py | remram44/openclean-core | 8c09c8302cadbb3bb02c959907f91a3ae343f939 | [
"BSD-3-Clause"
] | 4 | 2021-04-20T09:06:26.000Z | 2021-11-20T20:31:28.000Z | tests/function/value/test_func_text.py | remram44/openclean-core | 8c09c8302cadbb3bb02c959907f91a3ae343f939 | [
"BSD-3-Clause"
] | 14 | 2021-01-19T19:23:16.000Z | 2021-04-28T14:31:03.000Z | tests/function/value/test_func_text.py | remram44/openclean-core | 8c09c8302cadbb3bb02c959907f91a3ae343f939 | [
"BSD-3-Clause"
] | 5 | 2021-08-24T11:57:21.000Z | 2022-03-17T04:39:04.000Z | # This file is part of the Data Cleaning Library (openclean).
#
# Copyright (C) 2018-2021 New York University.
#
# openclean is released under the Revised BSD License. See file LICENSE for
# full license details.
"""Unit tests for string helper functions."""
import pytest
from openclean.function.value.text import AlphaNumeric, to_len, to_lower, to_title, to_upper
@pytest.mark.parametrize(
'value,result',
[
('ABC', True),
('ab12', True),
('1-3', False),
('-1', False),
('123', True),
(123, True),
(12.34, False),
(' ', False),
('a#1', False),
('!@#$', False)
]
)
def test_alphanumeric_predicate(value, result):
"""Test the alphanimeric character predicate."""
assert AlphaNumeric().eval(value) == result
@pytest.mark.parametrize(
'value,func,result',
[
('abc', to_len, 3),
(42, to_len, 2),
('ABC', to_lower, 'abc'),
(42, to_lower, 42),
('abc', to_title, 'Abc'),
(42, to_title, 42),
('abc', to_upper, 'ABC'),
(42, to_upper, 42)
]
)
def test_string_function(value, func, result):
"""Test string helper functions on different values."""
assert func(value) == result
| 24.647059 | 92 | 0.579952 |
2543ce86ca2b4201663cee41a5ca2c0b2280cfe8 | 291 | py | Python | Desafio 103.py | MoomenEltelbany/PythonDesafios | aa2f44d3104cf3607f58dc42c2f8fc8023f128de | [
"MIT"
] | null | null | null | Desafio 103.py | MoomenEltelbany/PythonDesafios | aa2f44d3104cf3607f58dc42c2f8fc8023f128de | [
"MIT"
] | null | null | null | Desafio 103.py | MoomenEltelbany/PythonDesafios | aa2f44d3104cf3607f58dc42c2f8fc8023f128de | [
"MIT"
] | null | null | null | def ficha(nome='<Desconhecido>', gols=0):
print(f'O jogador {nome} fez {gols} gols(s) no campeonato.')
n = str(input(f'Nome do Jogador: '))
g = str(input(f'Número do gols: '))
if g.isnumeric():
g = int(g)
else:
g = 0
if n.strip() == '':
ficha(gols=g)
else:
ficha(n, g)
| 19.4 | 64 | 0.57732 |
ea0fbf1aaee14031e1605717382275c69dd2184b | 4,918 | py | Python | .history/my_classes/ScopesClosuresAndDecorators/Closures_20210711204043.py | minefarmer/deep-Dive-1 | b0675b853180c5b5781888266ea63a3793b8d855 | [
"Unlicense"
] | null | null | null | .history/my_classes/ScopesClosuresAndDecorators/Closures_20210711204043.py | minefarmer/deep-Dive-1 | b0675b853180c5b5781888266ea63a3793b8d855 | [
"Unlicense"
] | null | null | null | .history/my_classes/ScopesClosuresAndDecorators/Closures_20210711204043.py | minefarmer/deep-Dive-1 | b0675b853180c5b5781888266ea63a3793b8d855 | [
"Unlicense"
] | null | null | null | """ Closuers
Free variables and closures
Remember: Functions defined inside another function can access the outer (nonLocal) variables
"""
def outer():
x = 'python'
/ this x refers to the one in outer's scope', this nonlocal variable x is called a free variable
def inner(): /
print("{0} rocks!".format(x)) when we consider inner, we are really looking at:
The function inner
the free variable x (with the current value python)
This is called a closure, # x thru the print statement
inner()
outer() # python rocks!
""" Returning the inner function
What happens if, instead of calling(running) inner from inside outer, we rune it?
def outer():
x = 'python' # x is a free variable in inner, it is bound to the variable x in outer, this happens when outer runs
def inner():
print("{0} rocks!".format(x))
return inner # when returning inner, we are actually 'returning' the closure
We can assign that return value to a variable name: fn = outer()
fn() # python rocks!
When we called fn
at that time Python determined the value of x in the extended scope
But notice that outer had finished running before we called fn - it's scope was gone
Python cells and Multi-Scopped Variables
def outer(): # Here the value of x is shared between two scoped
x = 'python' # outer
def inner(): # inner
print(x)
return inner # The label x is in two different scopes
Python does this by creating a cell as an intermediary object
outer,x ----> # cell 0xA500 / str 0xFF100 # indirect reference
inner.x ----> # OxFF199 / python
# they are pointing to the same cell
# when requesting the value of the variable, Python will "double-hop" to get the final value
Closures
I can think of the closure as a function plus an EXTENDED SCOPE THAT CONTAINS THE FREE VARIABLES
The free variables's value is the object the cell points to - so that could change over time!
def outer():
a = 100
_______________________closure______
|x = 'python' |
|
def inner():
a = 10 # local variable |
print("{0} rocks!".format(x)) |
|___________________________________|
return inner
fn = outer() fn -> inner + extended scope x
Introspection
def outer():
a = 100
x = 'python'
def inner():
a = 10 # local variable
print("{0} rocks!".format(x))
return inner
fn = outer()
fn.__code__.co_freevars ->('x',) (a is not a free variable)
fn.__closure__ -> (<cell at )xA500: str object at 0xFF199>,
def outer():
x = 'python'
print(hex(id(x))) -----------------> 0xFF100 indirect reference
def inner():
print(hex(id(x))) -----------------> 0xFF100 indirect reference
print("{0} rocks!".format(x))
return inner
fn = outer()
fn()
def counter(): # closure
------------------------
count = 0 / count is a free variable
/ it is bound to the cell count
def inc(): /
nonlocal count
count += 1
return count
------------------------
return inc
fn -> inc + count -> 0
/
fn = counter() /
fn() => 1 count's (indirect) reference changed from object 0 to the object 1
fn() => 2
Every time we run a function a new scope is created.
If that function generates a closure, a new closure is created every time as well
def counter(): # closure f1 = counter()
------------------------ f2 = counter()
count = 0
f1() # 1
def onc(): f1() # 2 f1 and f2 do not have the same extended scope
nonlocal count f1() # 3
count += 1
return count f2() # 1 they are different instances of the closure
-------------------------
return inc the cells are different
def outer():
count = 0
count is a free variable-bound to count in the extended scope
def inc1(): /
nonlocal count
count = 1
return count
count is a free variable - bound to the same count
def inc2(): /
nonlocal count
count += 1
return count
/ returns a tuple containing both closures
return inc1, inc2
f1, f2 = outer()
f1() -> 1
f2() -> 2
The shared extended scope is not unusual
def adder(n):
def inner(x):
return x + n
return inner
""" | 28.427746 | 132 | 0.539854 |
47367da86f30b44992f2612993e6f0d0d4d896a0 | 3,981 | py | Python | examples/sklearn_elasticnet_diabetes/osx/train_diabetes.py | PeterSulcs/mlflow | 14c48e7bb1ca6cd6a3c1b249a486cd98bd5e7051 | [
"Apache-2.0"
] | 10,351 | 2018-07-31T02:52:49.000Z | 2022-03-31T23:33:13.000Z | examples/sklearn_elasticnet_diabetes/osx/train_diabetes.py | PeterSulcs/mlflow | 14c48e7bb1ca6cd6a3c1b249a486cd98bd5e7051 | [
"Apache-2.0"
] | 3,733 | 2018-07-31T01:38:51.000Z | 2022-03-31T23:56:25.000Z | examples/sklearn_elasticnet_diabetes/osx/train_diabetes.py | PeterSulcs/mlflow | 14c48e7bb1ca6cd6a3c1b249a486cd98bd5e7051 | [
"Apache-2.0"
] | 2,596 | 2018-07-31T06:38:39.000Z | 2022-03-31T23:56:32.000Z | #
# train_diabetes.py
#
# MLflow model using ElasticNet (sklearn) and Plots ElasticNet Descent Paths
#
# Uses the sklearn Diabetes dataset to predict diabetes progression using ElasticNet
# The predicted "progression" column is a quantitative measure of disease progression one year after baseline
# http://scikit-learn.org/stable/modules/generated/sklearn.datasets.load_diabetes.html
# Combines the above with the Lasso Coordinate Descent Path Plot
# http://scikit-learn.org/stable/auto_examples/linear_model/plot_lasso_coordinate_descent_path.html
# Original author: Alexandre Gramfort <alexandre.gramfort@inria.fr>; License: BSD 3 clause
#
# Usage:
# python train_diabetes.py 0.01 0.01
# python train_diabetes.py 0.01 0.75
# python train_diabetes.py 0.01 1.0
#
import os
import warnings
import sys
import pandas as pd
import numpy as np
from itertools import cycle
import matplotlib.pyplot as plt
from sklearn.metrics import mean_squared_error, mean_absolute_error, r2_score
from sklearn.model_selection import train_test_split
from sklearn.linear_model import ElasticNet
from sklearn.linear_model import lasso_path, enet_path
from sklearn import datasets
# Load Diabetes datasets
diabetes = datasets.load_diabetes()
X = diabetes.data
y = diabetes.target
# Create pandas DataFrame for sklearn ElasticNet linear_model
Y = np.array([y]).transpose()
d = np.concatenate((X, Y), axis=1)
cols = diabetes.feature_names + ["progression"]
data = pd.DataFrame(d, columns=cols)
# Import mlflow
import mlflow
import mlflow.sklearn
# Evaluate metrics
def eval_metrics(actual, pred):
rmse = np.sqrt(mean_squared_error(actual, pred))
mae = mean_absolute_error(actual, pred)
r2 = r2_score(actual, pred)
return rmse, mae, r2
if __name__ == "__main__":
warnings.filterwarnings("ignore")
np.random.seed(40)
# Split the data into training and test sets. (0.75, 0.25) split.
train, test = train_test_split(data)
# The predicted column is "progression" which is a quantitative measure of disease progression one year after baseline
train_x = train.drop(["progression"], axis=1)
test_x = test.drop(["progression"], axis=1)
train_y = train[["progression"]]
test_y = test[["progression"]]
alpha = float(sys.argv[1]) if len(sys.argv) > 1 else 0.05
l1_ratio = float(sys.argv[2]) if len(sys.argv) > 2 else 0.05
# Run ElasticNet
lr = ElasticNet(alpha=alpha, l1_ratio=l1_ratio, random_state=42)
lr.fit(train_x, train_y)
predicted_qualities = lr.predict(test_x)
(rmse, mae, r2) = eval_metrics(test_y, predicted_qualities)
# Print out ElasticNet model metrics
print("Elasticnet model (alpha=%f, l1_ratio=%f):" % (alpha, l1_ratio))
print(" RMSE: %s" % rmse)
print(" MAE: %s" % mae)
print(" R2: %s" % r2)
# Log mlflow attributes for mlflow UI
mlflow.log_param("alpha", alpha)
mlflow.log_param("l1_ratio", l1_ratio)
mlflow.log_metric("rmse", rmse)
mlflow.log_metric("r2", r2)
mlflow.log_metric("mae", mae)
mlflow.sklearn.log_model(lr, "model")
# Compute paths
eps = 5e-3 # the smaller it is the longer is the path
print("Computing regularization path using the elastic net.")
alphas_enet, coefs_enet, _ = enet_path(X, y, eps=eps, l1_ratio=l1_ratio, fit_intercept=False)
# Display results
fig = plt.figure(1)
ax = plt.gca()
colors = cycle(["b", "r", "g", "c", "k"])
neg_log_alphas_enet = -np.log10(alphas_enet)
for coef_e, c in zip(coefs_enet, colors):
l2 = plt.plot(neg_log_alphas_enet, coef_e, linestyle="--", c=c)
plt.xlabel("-Log(alpha)")
plt.ylabel("coefficients")
title = "ElasticNet Path by alpha for l1_ratio = " + str(l1_ratio)
plt.title(title)
plt.axis("tight")
# Save figures
fig.savefig("ElasticNet-paths.png")
# Close plot
plt.close(fig)
# Log artifacts (output files)
mlflow.log_artifact("ElasticNet-paths.png")
| 32.365854 | 122 | 0.708114 |
615a492f27fadadb58a87a50f2682d8332afd795 | 1,652 | py | Python | aiotdlib/api/types/input_background.py | jraylan/aiotdlib | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | [
"MIT"
] | 37 | 2021-05-04T10:41:41.000Z | 2022-03-30T13:48:05.000Z | aiotdlib/api/types/input_background.py | jraylan/aiotdlib | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | [
"MIT"
] | 13 | 2021-07-17T19:54:51.000Z | 2022-02-26T06:50:00.000Z | aiotdlib/api/types/input_background.py | jraylan/aiotdlib | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | [
"MIT"
] | 7 | 2021-09-22T21:27:11.000Z | 2022-02-20T02:33:19.000Z | # =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from .input_file import InputFile
from ..base_object import BaseObject
class InputBackground(BaseObject):
"""
Contains information about background to set
"""
ID: str = Field("inputBackground", alias="@type")
class InputBackgroundLocal(InputBackground):
"""
A background from a local file
:param background: Background file to use. Only inputFileLocal and inputFileGenerated are supported. The file must be in JPEG format for wallpapers and in PNG format for patterns
:type background: :class:`InputFile`
"""
ID: str = Field("inputBackgroundLocal", alias="@type")
background: InputFile
@staticmethod
def read(q: dict) -> InputBackgroundLocal:
return InputBackgroundLocal.construct(**q)
class InputBackgroundRemote(InputBackground):
"""
A background from the server
:param background_id: The background identifier
:type background_id: :class:`int`
"""
ID: str = Field("inputBackgroundRemote", alias="@type")
background_id: int
@staticmethod
def read(q: dict) -> InputBackgroundRemote:
return InputBackgroundRemote.construct(**q)
| 30.036364 | 182 | 0.559927 |
3fa92bcc8fb0602e3a0bdf715b9aa8d52d17b12c | 7,201 | py | Python | tm1637.py | dbreen/micropython-tm1637 | 446ed2c32baa5bd8ae4050b7bc72e1fe9398ddf7 | [
"MIT"
] | null | null | null | tm1637.py | dbreen/micropython-tm1637 | 446ed2c32baa5bd8ae4050b7bc72e1fe9398ddf7 | [
"MIT"
] | null | null | null | tm1637.py | dbreen/micropython-tm1637 | 446ed2c32baa5bd8ae4050b7bc72e1fe9398ddf7 | [
"MIT"
] | null | null | null | # CircuitPython TM1637 quad 7-segment LED display driver
# Ported from MicroPython version.
import digitalio
import time
TM1637_CMD1 = 64 # 0x40 data command
TM1637_CMD2 = 192 # 0xC0 address command
TM1637_CMD3 = 128 # 0x80 display control command
TM1637_DSP_ON = 8 # 0x08 display on
TM1637_DELAY = 10 # 10us delay between clk/dio pulses
TM1637_MSB = 128 # msb is the decimal point or the colon depending on your display
# 0-9, a-z, blank, dash, star
_SEGMENTS = bytearray(b'\x3F\x06\x5B\x4F\x66\x6D\x7D\x07\x7F\x6F\x77\x7C\x39\x5E\x79\x71\x3D\x76\x06\x1E\x76\x38\x55\x54\x3F\x73\x67\x50\x6D\x78\x3E\x1C\x2A\x76\x6E\x5B\x00\x40\x63')
class TM1637(object):
"""Library for quad 7-segment LED modules based on the TM1637 LED driver.
For the CircuitPython port, pass in board pins for `clk` and `dio`, such as `board.D1`
"""
def __init__(self, clk, dio, brightness=7):
if not 0 <= brightness <= 7:
raise ValueError("Brightness out of range")
self._brightness = brightness
self.clk = digitalio.DigitalInOut(clk)
self.dio = digitalio.DigitalInOut(dio)
self.clk.direction = digitalio.Direction.OUTPUT
self.dio.direction = digitalio.Direction.OUTPUT
sleep_us(TM1637_DELAY)
self._write_data_cmd()
self._write_dsp_ctrl()
def _start(self):
self.dio.value = False
sleep_us(TM1637_DELAY)
self.clk.value = False
sleep_us(TM1637_DELAY)
def _stop(self):
self.dio.value = False
sleep_us(TM1637_DELAY)
self.clk.value = True
sleep_us(TM1637_DELAY)
self.dio.value = True
def _write_data_cmd(self):
# automatic address increment, normal mode
self._start()
self._write_byte(TM1637_CMD1)
self._stop()
def _write_dsp_ctrl(self):
# display on, set brightness
self._start()
self._write_byte(TM1637_CMD3 | TM1637_DSP_ON | self._brightness)
self._stop()
def _write_byte(self, b):
for i in range(8):
self.dio.value = bool((b >> i) & 1)
sleep_us(TM1637_DELAY)
self.clk.value = True
sleep_us(TM1637_DELAY)
self.clk.value = False
sleep_us(TM1637_DELAY)
self.clk.value = False
sleep_us(TM1637_DELAY)
self.clk.value = True
sleep_us(TM1637_DELAY)
self.clk.value = False
sleep_us(TM1637_DELAY)
def brightness(self, val=None):
"""Set the display brightness 0-7."""
# brightness 0 = 1/16th pulse width
# brightness 7 = 14/16th pulse width
if val is None:
return self._brightness
if not 0 <= val <= 7:
raise ValueError("Brightness out of range")
self._brightness = val
self._write_data_cmd()
self._write_dsp_ctrl()
def write(self, segments, pos=0):
"""Display up to 6 segments moving right from a given position.
The MSB in the 2nd segment controls the colon between the 2nd
and 3rd segments."""
if not 0 <= pos <= 5:
raise ValueError("Position out of range")
self._write_data_cmd()
self._start()
self._write_byte(TM1637_CMD2 | pos)
for seg in segments:
self._write_byte(seg)
self._stop()
self._write_dsp_ctrl()
def encode_digit(self, digit):
"""Convert a character 0-9, a-f to a segment."""
return _SEGMENTS[digit & 0x0f]
def encode_string(self, string):
"""Convert an up to 4 character length string containing 0-9, a-z,
space, dash, star to an array of segments, matching the length of the
source string."""
segments = bytearray(len(string))
for i in range(len(string)):
segments[i] = self.encode_char(string[i])
return segments
def encode_char(self, char):
"""Convert a character 0-9, a-z, space, dash or star to a segment."""
o = ord(char)
if o == 32:
return _SEGMENTS[36] # space
if o == 42:
return _SEGMENTS[38] # star/degrees
if o == 45:
return _SEGMENTS[37] # dash
if o >= 65 and o <= 90:
return _SEGMENTS[o-55] # uppercase A-Z
if o >= 97 and o <= 122:
return _SEGMENTS[o-87] # lowercase a-z
if o >= 48 and o <= 57:
return _SEGMENTS[o-48] # 0-9
raise ValueError("Character out of range: {:d} '{:s}'".format(o, chr(o)))
def hex(self, val):
"""Display a hex value 0x0000 through 0xffff, right aligned."""
string = '{:04x}'.format(val & 0xffff)
self.write(self.encode_string(string))
def number(self, num):
"""Display a numeric value -999 through 9999, right aligned."""
# limit to range -999 to 9999
num = max(-999, min(num, 9999))
string = '{0: >4d}'.format(num)
self.write(self.encode_string(string))
def numbers(self, num1, num2, colon=True):
"""Display two numeric values -9 through 99, with leading zeros
and separated by a colon."""
num1 = max(-9, min(num1, 99))
num2 = max(-9, min(num2, 99))
segments = self.encode_string('{0:0>2d}{1:0>2d}'.format(num1, num2))
if colon:
segments[1] |= 0x80 # colon on
self.write(segments)
def temperature(self, num):
if num < -9:
self.show('lo') # low
elif num > 99:
self.show('hi') # high
else:
string = '{0: >2d}'.format(num)
self.write(self.encode_string(string))
self.write([_SEGMENTS[38], _SEGMENTS[12]], 2) # degrees C
def show(self, string, colon=False):
segments = self.encode_string(string)
if len(segments) > 1 and colon:
segments[1] |= 128
self.write(segments[:4])
def scroll(self, string, delay=250):
segments = string if isinstance(string, list) else self.encode_string(string)
data = [0] * 8
data[4:0] = list(segments)
for i in range(len(segments) + 5):
self.write(data[0+i:4+i])
sleep_ms(delay)
class TM1637Decimal(TM1637):
"""Library for quad 7-segment LED modules based on the TM1637 LED driver.
This class is meant to be used with decimal display modules (modules
that have a decimal point after each 7-segment LED).
"""
def encode_string(self, string):
"""Convert a string to LED segments.
Convert an up to 4 character length string containing 0-9, a-z,
space, dash, star and '.' to an array of segments, matching the length of
the source string."""
segments = bytearray(len(string.replace('.','')))
j = 0
for i in range(len(string)):
if string[i] == '.' and j > 0:
segments[j-1] |= TM1637_MSB
continue
segments[j] = self.encode_char(string[i])
j += 1
return segments
# CircuitPython doesn't support the MicroPython sleep_us/ms so use our own
def sleep_us(us):
time.sleep(us / 1000000)
def sleep_ms(ms):
time.sleep(ms / 1000)
| 33.493023 | 182 | 0.597 |
fbf03480252c6c3313112b4fa42b37356f0d9227 | 3,672 | py | Python | experiments/mainTrainOpt.py | AziziShekoofeh/TimeSeries_Classification | 8bd18688e3220392ffc85642f10a68aed0f7f429 | [
"Apache-2.0"
] | 14 | 2018-11-27T19:54:29.000Z | 2021-02-03T05:32:31.000Z | experiments/mainTrainOpt.py | AziziShekoofeh/TimeSeries_Classification | 8bd18688e3220392ffc85642f10a68aed0f7f429 | [
"Apache-2.0"
] | null | null | null | experiments/mainTrainOpt.py | AziziShekoofeh/TimeSeries_Classification | 8bd18688e3220392ffc85642f10a68aed0f7f429 | [
"Apache-2.0"
] | 4 | 2018-11-25T07:08:31.000Z | 2021-03-23T14:16:11.000Z | import sys
import time
import os
module_root = '..'
sys.path.append(module_root)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '1'
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
from deepNetworks.model import RNNModel
from utils.data_loader import DataLoader
from deepNetworks.netArch import DeepNetArch1, DeepNetArch2, DeepNetArch3, DeepNetArch1L1, DeepNetArch2L1, \
DeepNetArch3L1
from keras import backend as k
from utils.history import History
if __name__ == '__main__':
logs_dir = 'DeepNetArch1-Div'
model_type = 'DeepNetArch1'
sl = 100
ds_rate = 2
early_stopping = True
downsample = False
bmode = True
subdir = '/bmode/'
model_history = History(logs_dir)
opt_params, opt_model_uid = model_history.find_opt_model(auc=False, loss=False, acc=True)
if downsample:
ds = DataLoader(sl=sl, downsample=True, downsample_rate=ds_rate)
sl = int(sl/ds_rate)
else:
ds = DataLoader(sl=sl)
if model_type == 'DeepNetArch1':
arch = DeepNetArch1(sl=sl, initial_lr=float(opt_params['initial_lr']), l2_reg=float(opt_params['l2_regulizer']),
dropout=float(opt_params['dropout']), rec_dropout=float(opt_params['rec_dropout']),
optimizer=opt_params['optimizer'], summary=1)
if model_type == 'DeepNetArch2':
arch = DeepNetArch2(sl=sl, initial_lr=float(opt_params['initial_lr']), l2_reg=float(opt_params['l2_regulizer']),
dropout=float(opt_params['dropout']), rec_dropout=float(opt_params['rec_dropout']),
optimizer=opt_params['optimizer'], summary=1)
if model_type == 'DeepNetArch3':
arch = DeepNetArch3(sl=sl, initial_lr=float(opt_params['initial_lr']), l2_reg=float(opt_params['l2_regulizer']),
dropout=float(opt_params['dropout']), rec_dropout=float(opt_params['rec_dropout']),
optimizer=opt_params['optimizer'], summary=1)
if model_type == 'DeepNetArch1L1':
arch = DeepNetArch1L1(sl=sl, initial_lr=float(opt_params['initial_lr']),
l2_reg=float(opt_params['l2_regulizer']),
dropout=float(opt_params['dropout']), rec_dropout=float(opt_params['rec_dropout']),
optimizer=opt_params['optimizer'], summary=1)
if model_type == 'DeepNetArch2L1':
arch = DeepNetArch2L1(sl=sl, initial_lr=float(opt_params['initial_lr']),
l2_reg=float(opt_params['l2_regulizer']),
dropout=float(opt_params['dropout']), rec_dropout=float(opt_params['rec_dropout']),
optimizer=opt_params['optimizer'], summary=1)
if model_type == 'DeepNetArch3L1':
arch = DeepNetArch3L1(sl=sl, initial_lr=float(opt_params['initial_lr']),
l2_reg=float(opt_params['l2_regulizer']),
dropout=float(opt_params['dropout']), rec_dropout=float(opt_params['rec_dropout']),
optimizer=opt_params['optimizer'], summary=1)
model, model_id = arch.arch_generator()
rnn_model = RNNModel(ds, model, opt_params, log_dir=logs_dir + subdir + str(sl), division=True, bmode=bmode)
uid = time.strftime("%Y_%m_%d_%H_%M_%S_") + model_id
print('-' * 50)
print('UID: {}'.format(uid))
print('-' * 50)
rnn_model.opt_model_train(uid=uid, batch_size=int(opt_params['batch_size']), es=early_stopping,
nb_epoch=int(opt_params['n_epoch']), verbose=2)
test_predictions = rnn_model.predict_test(uid=uid)
k.clear_session()
| 47.688312 | 120 | 0.634532 |
f4d94f641f932cd8e3165b510dbeb5fc0efb20db | 2,533 | py | Python | src/memory.py | Pablok98/PyGB | afd846d8fb505ebeea2a16c97ac3771a4f1d4cac | [
"MIT"
] | null | null | null | src/memory.py | Pablok98/PyGB | afd846d8fb505ebeea2a16c97ac3771a4f1d4cac | [
"MIT"
] | null | null | null | src/memory.py | Pablok98/PyGB | afd846d8fb505ebeea2a16c97ac3771a4f1d4cac | [
"MIT"
] | null | null | null | from cartridge import CartridgeMBC1
from config import ROM_PATH
class Memory:
def __init__(self):
self.wram = bytearray(8192)
self.vram = bytearray(8192)
# TODO: implement all MBCs, for now, we force MBC1.
self.cartridge = CartridgeMBC1()
self.io_registers = bytearray(128)
def load_cartridge(self, path):
# TODO: fix
self.cartridge.load_cartridge(path)
def read_data(self, address):
# TODO: once upgrded to python 3.10, compress both match cases
# into one
if 0x0000 <= address <= 0x3FFF:
# TODO: when MBCs are implemented, this won't do
return self.cartridge.read_data(address)
elif 0x4000 <= address <= 0x7FFF:
return self.cartridge.read_data(address)
elif 0x8000 <= address <= 0x9FFF:
return self.vram[address - 0x8000]
elif 0xA000 <= address <= 0xBFFF:
return self.cartridge.read_data(address)
elif 0xC000 <= address <= 0xCFFF:
return self.wram[address - 0xC000]
elif 0xD000 <= address <= 0xDFFF:
return self.wram[address - 0xC000]
elif 0xE000 <= address <= 0xFDFF:
pass
elif 0xFE00 <= address <= 0xFE9F:
pass
elif 0xFEA0 <= address <= 0xFEFF:
pass
elif 0xFF00 <= address <= 0xFF7F:
return self.io_registers[address - 0xFF00]
elif 0xFF80 <= address <= 0xFFFE:
pass
elif 0xFFFF <= address <= 0xFFFF:
pass
def write_data(self, data, address):
if 0x0000 <= address <= 0x3FFF:
self.cartridge.write_data(data, address)
elif 0x4000 <= address <= 0x7FFF:
self.cartridge.write_data(data, address)
elif 0x8000 <= address <= 0x9FFF:
self.vram[address - 0x8000] = data
elif 0xA000 <= address <= 0xBFFF:
self.cartridge.write_data(data, address)
elif 0xC000 <= address <= 0xCFFF:
self.wram[address - 0xC000] = data
elif 0xD000 <= address <= 0xDFFF:
self.wram[address - 0xC000] = data
elif 0xE000 <= address <= 0xFDFF:
pass
elif 0xFE00 <= address <= 0xFE9F:
pass
elif 0xFEA0 <= address <= 0xFEFF:
pass
elif 0xFF00 <= address <= 0xFF7F:
self.io_registers[address - 0xFF00] = data
elif 0xFF80 <= address <= 0xFFFE:
pass
elif 0xFFFF <= address <= 0xFFFF:
pass
| 35.676056 | 70 | 0.568101 |
ab31dfff6fdcc85259d346512d8c08072ea0824a | 3,423 | py | Python | accenv/lib/python3.4/site-packages/IPython/lib/tests/test_pretty.py | adamshamsudeen/clubdin-dj | eb48c67dab3a4ae7c4032544eb4d64e0b1d7e15a | [
"MIT"
] | null | null | null | accenv/lib/python3.4/site-packages/IPython/lib/tests/test_pretty.py | adamshamsudeen/clubdin-dj | eb48c67dab3a4ae7c4032544eb4d64e0b1d7e15a | [
"MIT"
] | null | null | null | accenv/lib/python3.4/site-packages/IPython/lib/tests/test_pretty.py | adamshamsudeen/clubdin-dj | eb48c67dab3a4ae7c4032544eb4d64e0b1d7e15a | [
"MIT"
] | null | null | null | """Tests for IPython.lib.pretty.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2011, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Third-party imports
import nose.tools as nt
# Our own imports
from IPython.lib import pretty
from IPython.testing.decorators import skip_without
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
class MyList(object):
def __init__(self, content):
self.content = content
def _repr_pretty_(self, p, cycle):
if cycle:
p.text("MyList(...)")
else:
with p.group(3, "MyList(", ")"):
for (i, child) in enumerate(self.content):
if i:
p.text(",")
p.breakable()
else:
p.breakable("")
p.pretty(child)
class MyDict(dict):
def _repr_pretty_(self, p, cycle):
p.text("MyDict(...)")
class Dummy1(object):
def _repr_pretty_(self, p, cycle):
p.text("Dummy1(...)")
class Dummy2(Dummy1):
_repr_pretty_ = None
class NoModule(object):
pass
NoModule.__module__ = None
def test_indentation():
"""Test correct indentation in groups"""
count = 40
gotoutput = pretty.pretty(MyList(list(range(count))))
expectedoutput = "MyList(\n" + ",\n".join(" %d" % i for i in range(count)) + ")"
nt.assert_equal(gotoutput, expectedoutput)
def test_dispatch():
"""
Test correct dispatching: The _repr_pretty_ method for MyDict
must be found before the registered printer for dict.
"""
gotoutput = pretty.pretty(MyDict())
expectedoutput = "MyDict(...)"
nt.assert_equal(gotoutput, expectedoutput)
def test_callability_checking():
"""
Test that the _repr_pretty_ method is tested for callability and skipped if
not.
"""
gotoutput = pretty.pretty(Dummy2())
expectedoutput = "Dummy1(...)"
nt.assert_equal(gotoutput, expectedoutput)
def test_sets():
"""
Test that set and frozenset use Python 3 formatting.
"""
objects = [set(), frozenset(), set([1]), frozenset([1]), set([1, 2]),
frozenset([1, 2]), set([-1, -2, -3])]
expected = ['set()', 'frozenset()', '{1}', 'frozenset({1})', '{1, 2}',
'frozenset({1, 2})', '{-3, -2, -1}']
for obj, expected_output in zip(objects, expected):
got_output = pretty.pretty(obj)
yield nt.assert_equal, got_output, expected_output
@skip_without('xxlimited')
def test_pprint_heap_allocated_type():
"""
Test that pprint works for heap allocated types.
"""
import xxlimited
output = pretty.pretty(xxlimited.Null)
nt.assert_equal(output, 'xxlimited.Null')
def test_pprint_nomod():
"""
Test that pprint works for classes with no __module__.
"""
output = pretty.pretty(NoModule)
nt.assert_equal(output, 'NoModule')
| 28.289256 | 86 | 0.530237 |
ff1546d0ade6777c5499ba460f053958967171a0 | 3,613 | py | Python | main.py | scw2tt/traffic-lights | 725ee552e7399f4f7bd42322861bc09c8258a17e | [
"MIT"
] | 1 | 2020-01-16T07:36:28.000Z | 2020-01-16T07:36:28.000Z | main.py | scw2tt/traffic-lights | 725ee552e7399f4f7bd42322861bc09c8258a17e | [
"MIT"
] | null | null | null | main.py | scw2tt/traffic-lights | 725ee552e7399f4f7bd42322861bc09c8258a17e | [
"MIT"
] | null | null | null | import torch
from PIL import Image
import numpy as np
from matplotlib import pyplot as plt
import matplotlib.patches as patches
import torchvision.transforms as transforms
from model import SSD300, ResNet, Loss
from train import train_loop, tencent_trick
from PIL import ImageMath
import cv2
#Sources : https://github.com/julimueller/dtld_parsing
# https://github.com/NVIDIA/DeepLearningExamples/tree/master/PyTorch/Detection/SSD
preprocessFn = transforms.Compose(
[transforms.Resize((300,300)),
transforms.ToTensor(),
transforms.Normalize(mean = [0.485, 0.456, 0.406],
std = [0.229, 0.224, 0.225])])
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(device)
"""
precision = 'fp32'
ssd_model = torch.hub.load('NVIDIA/DeepLearningExamples:torchhub', 'nvidia_ssd', model_math=precision)
utils = torch.hub.load('NVIDIA/DeepLearningExamples:torchhub', 'nvidia_ssd_processing_utils')
ssd_model.to(device)
ssd_model.eval()
uris = [
'http://images.cocodataset.org/val2017/000000397133.jpg',
'http://images.cocodataset.org/val2017/000000037777.jpg',
'http://images.cocodataset.org/val2017/000000252219.jpg'
]
inputs = [utils.prepare_input(uri) for uri in uris]
"""
utils = torch.hub.load('NVIDIA/DeepLearningExamples:torchhub', 'nvidia_ssd_processing_utils')
img = cv2.imread("data_test.tiff", cv2.IMREAD_UNCHANGED)
img = cv2.cvtColor(img, cv2.COLOR_BAYER_GB2RGB)
# Images are saved in 12 bit raw -> shift 4 bits
img = np.right_shift(img, 4)
img = img.astype(np.uint8)
img_pil = Image.fromarray(img)
#cv2.imshow("a",img)
#cv2.waitKey(1)
#plt.imshow(img_pil)
#plt.show()
#img = Image.open("data_test.tiff")
input_size = 300
img_pil = img_pil.resize((input_size,input_size))
#print(len(inputs[0][0]))
#print(len(img))
#inputs =
#tensor = utils.prepare_tensor(inputs, precision == 'fp16')
#print(uris[0])
#print(utils.prepare_input(uris[0]))
a = preprocessFn(img_pil).unsqueeze(0)
print(a.shape)
#print(tensor.shape)
#print(img.mode)
a = a.to(device)
ssd300 = SSD300(backbone=ResNet('resnet18', None))
ssd300.to(device)
#Training
ssd300.train()
dboxes = dboxes300_coco()
#trainLoader
#valLoader
criterion = Loss(dboxes)
criterion.to(device)
learningRate = 0.05
optimizer = optim.SGD(tencent_trick(ssd300) lr = learningRate) #Tencent_trick is to disable weight decay for some parameters
epoch_size = 10
for epoch in range(0,epoch_size):
#train_loop() #Put all paramters in here
#Validation step
#print out Accuracies and Losses
##Testing an image
ssd300.eval()
with torch.no_grad():
yhat = ssd300(a)
print(len(yhat))
print(yhat[0].shape)
print(yhat[1].shape)
results_per_input = utils.decode_results(yhat)
best_results_per_input = [utils.pick_best(results, 0.50) for results in results_per_input]
classes_to_labels = {
0: "red",
1: "yellow",
2: "green"
}
for image_idx in range(len(best_results_per_input)):
fig, ax = plt.subplots(1)
# Show original, denormalized image...
image = img_pil#inputs[image_idx] / 2 + 0.5
ax.imshow(image)
# ...with detections
bboxes, classes, confidences = best_results_per_input[image_idx]
for idx in range(len(bboxes)):
left, bot, right, top = bboxes[idx]
x, y, w, h = [val * input_size for val in [left, bot, right - left, top - bot]]
if(h>w):
rect = patches.Rectangle((x, y), w, h, linewidth=1, edgecolor='r', facecolor='none')
ax.add_patch(rect)
ax.text(x, y, "{} {:.0f}%".format(classes_to_labels[classes[idx] - 1], confidences[idx]*100), bbox=dict(facecolor='white', alpha=0.5))
plt.show()
| 28.448819 | 143 | 0.716025 |
211da5edfabbe20ed98b5924a0ec677e4938534f | 648 | py | Python | drf_demos/drf_demos/api/models.py | Minkov/python-web-framework-demos- | 30f39de0b4344e1b7e5d4cf96f6d6bbbfe867305 | [
"MIT"
] | 2 | 2022-03-06T11:56:35.000Z | 2022-03-20T09:31:45.000Z | drf_demos/drf_demos/api/models.py | Minkov/python-web-framework-demos- | 30f39de0b4344e1b7e5d4cf96f6d6bbbfe867305 | [
"MIT"
] | null | null | null | drf_demos/drf_demos/api/models.py | Minkov/python-web-framework-demos- | 30f39de0b4344e1b7e5d4cf96f6d6bbbfe867305 | [
"MIT"
] | 4 | 2022-03-17T18:05:19.000Z | 2022-03-22T16:38:11.000Z | from django.db import models
from django.core import validators
class Category(models.Model):
NAME_MAX_LENGTH = 15
name = models.CharField(
max_length=NAME_MAX_LENGTH,
)
description = models.TextField(
null=True,
blank=True,
)
class Product(models.Model):
NAME_MAX_LENGTH = 25
PRICE_MIN_VALUE = 0.01
name = models.CharField(
max_length=NAME_MAX_LENGTH,
)
price = models.FloatField(
validators=(
validators.MinValueValidator(PRICE_MIN_VALUE),
)
)
category = models.ForeignKey(
Category,
on_delete=models.CASCADE,
)
| 19.636364 | 58 | 0.634259 |
f9cfa81014dc43bda997450985cd643d07e86c3b | 38,042 | py | Python | py/dynesty/sampling.py | mcurliss/dynesty | 84ba36f3a698c267fdebccaecead8c83953a0533 | [
"MIT"
] | null | null | null | py/dynesty/sampling.py | mcurliss/dynesty | 84ba36f3a698c267fdebccaecead8c83953a0533 | [
"MIT"
] | null | null | null | py/dynesty/sampling.py | mcurliss/dynesty | 84ba36f3a698c267fdebccaecead8c83953a0533 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Functions for proposing new live points used by
:class:`~dynesty.sampler.Sampler` (and its children from
:mod:`~dynesty.nestedsamplers`) and
:class:`~dynesty.dynamicsampler.DynamicSampler`.
"""
import warnings
import math
import numpy as np
from numpy import linalg
from .utils import unitcheck, apply_reflect, get_random_generator
from .bounding import randsphere
__all__ = [
"sample_unif", "sample_rwalk", "sample_slice", "sample_rslice",
"sample_hslice"
]
EPS = float(np.finfo(np.float64).eps)
SQRTEPS = math.sqrt(float(np.finfo(np.float64).eps))
def sample_unif(args):
"""
Evaluate a new point sampled uniformly from a bounding proposal
distribution. Parameters are zipped within `args` to utilize
`pool.map`-style functions.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample.
loglstar : float
Ln(likelihood) bound. **Not applicable here.**
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new points. **Not applicable here.**
scale : float
Value used to scale the provided axes. **Not applicable here.**
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
**Not applicable here.**
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube. **For
uniform sampling this is the same as the initial input position.**
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample. For uniform
sampling this is `1` by construction.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`. **Not
applicable for uniform sampling.**
"""
# Unzipping.
(u, loglstar, axes, scale, prior_transform, loglikelihood, rseed,
kwargs) = args
# Evaluate.
v = prior_transform(np.asarray(u))
logl = loglikelihood(np.asarray(v))
nc = 1
blob = None
return u, v, logl, nc, blob
def sample_rwalk(args):
"""
Return a new live point proposed by random walking away from an
existing live point.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new points. For random walks new positions are
proposed using the :class:`~dynesty.bounding.Ellipsoid` whose
shape is defined by axes.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Unzipping.
(u, loglstar, axes, scale, prior_transform, loglikelihood, rseed,
kwargs) = args
rstate = get_random_generator(rseed)
return generic_random_walk(u, loglstar, axes, scale, prior_transform,
loglikelihood, rstate, kwargs)
def generic_random_walk(u, loglstar, axes, scale, prior_transform,
loglikelihood, rstate, kwargs):
"""
Generic random walk step
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new points. For random walks new positions are
proposed using the :class:`~dynesty.bounding.Ellipsoid` whose
shape is defined by axes.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Periodicity.
nonbounded = kwargs.get('nonbounded')
periodic = kwargs.get('periodic')
reflective = kwargs.get('reflective')
# Setup.
n = len(u)
n_cluster = axes.shape[0]
walks = kwargs.get('walks', 25) # number of steps
naccept = 0
# Total number of accepted points with L>L*
nreject = 0
# Total number of points proposed to within the ellipsoid and cube
# but rejected due to L<=L* condition
ncall = 0
# Total number of Likelihood calls (proposals evaluated)
# Here we loop for exactly walks iterations.
while ncall < walks:
# This proposes a new point within the ellipsoid
# This also potentially modifies the scale
u_prop, fail = propose_ball_point(u,
scale,
axes,
n,
n_cluster,
rstate=rstate,
periodic=periodic,
reflective=reflective,
nonbounded=nonbounded)
# If generation of points within an ellipsoid was
# highly inefficient we adjust the scale
if fail:
nreject += 1
ncall += 1
continue
# Check proposed point.
v_prop = prior_transform(u_prop)
logl_prop = loglikelihood(v_prop)
ncall += 1
if logl_prop > loglstar:
u = u_prop
v = v_prop
logl = logl_prop
naccept += 1
else:
nreject += 1
if naccept == 0:
# Technically we can find out the likelihood value
# stored somewhere
# But I'm currently recomputing it
v = prior_transform(u)
logl = loglikelihood(v)
blob = {'accept': naccept, 'reject': nreject, 'scale': scale}
return u, v, logl, ncall, blob
def propose_ball_point(u,
scale,
axes,
n,
n_cluster,
rstate=None,
periodic=None,
reflective=None,
nonbounded=None):
"""
Here we are proposing points uniformly within an n-d ellipsoid.
We are only trying once.
We return the tuple with
1) proposed point or None
2) failure flag (if True, the generated point was outside bounds)
"""
# starting point for clustered dimensions
u_cluster = u[:n_cluster]
# draw random point for non clustering parameters
# we only need to generate them once
u_non_cluster = rstate.uniform(0, 1, n - n_cluster)
u_prop = np.zeros(n)
u_prop[n_cluster:] = u_non_cluster
# Propose a direction on the unit n-sphere.
dr = randsphere(n_cluster, rstate=rstate)
# This generates uniform distribution within n-d ball
# Transform to proposal distribution.
du = np.dot(axes, dr)
u_prop[:n_cluster] = u_cluster + scale * du
# Wrap periodic parameters
if periodic is not None:
u_prop[periodic] = np.mod(u_prop[periodic], 1)
# Reflect
if reflective is not None:
u_prop[reflective] = apply_reflect(u_prop[reflective])
# Check unit cube constraints.
if unitcheck(u_prop, nonbounded):
return u_prop, False
else:
return None, True
def generic_slice_step(u, direction, nonperiodic, loglstar, loglikelihood,
prior_transform, rstate):
"""
Do a slice generic slice sampling step along a specified dimension
Arguments
u: ndarray (ndim sized)
Starting point in unit cube coordinates
It MUST satisfy the logl>loglstar criterion
direction: ndarray (ndim sized)
Step direction vector
nonperiodic: ndarray(bool)
mask for nonperiodic variables
loglstar: float
the critical value of logl, so that new logl must be >loglstar
loglikelihood: function
prior_transform: function
rstate: random state
"""
nc, nexpand, ncontract = 0, 0, 0
nexpand_threshold = 10000 # Threshold for warning the user
n = len(u)
rand0 = rstate.uniform() # initial scale/offset
dirlen = linalg.norm(direction)
maxlen = np.sqrt(n) / 2.
# maximum initial interval length (the diagonal of the cube)
if dirlen > maxlen:
# I stopped giving warnings, as it was too noisy
dirnorm = dirlen / maxlen
else:
dirnorm = 1
direction = direction / dirnorm
# The function that evaluates the logl at the location of
# u0 + x*direction0
def F(x):
nonlocal nc
u_new = u + x * direction
if unitcheck(u_new, nonperiodic):
logl = loglikelihood(prior_transform(u_new))
else:
logl = -np.inf
nc += 1
return u_new, logl
# asymmetric step size on the left/right (see Neal 2003)
nstep_l = -rand0
nstep_r = (1 - rand0)
logl_l = F(nstep_l)[1]
logl_r = F(nstep_r)[1]
# "Stepping out" the left and right bounds.
while logl_l > loglstar:
nstep_l -= 1
logl_l = F(nstep_l)[1]
nexpand += 1
while logl_r > loglstar:
nstep_r += 1
logl_r = F(nstep_r)[1]
nexpand += 1
if nexpand > nexpand_threshold:
warnings.warn(
str.format(
'The slice sample interval was expanded more than {0} times',
nexpand_threshold))
# Sample within limits. If the sample is not valid, shrink
# the limits until we hit the `loglstar` bound.
while True:
# Define slice and window.
nstep_hat = nstep_r - nstep_l
# Propose new position.
nstep_prop = nstep_l + rstate.uniform() * nstep_hat # scale from left
u_prop, logl_prop = F(nstep_prop)
ncontract += 1
# If we succeed, move to the new position.
if logl_prop > loglstar:
fscale = (nstep_r - nstep_l) / dirnorm
break
# If we fail, check if the new point is to the left/right of
# our original point along our proposal axis and update
# the bounds accordingly.
else:
if nstep_prop < 0:
nstep_l = nstep_prop
elif nstep_prop > 0: # right
nstep_r = nstep_prop
else:
# If `nstep_prop = 0` something has gone horribly wrong.
raise RuntimeError("Slice sampler has failed to find "
"a valid point. Some useful "
"output quantities:\n"
"u: {0}\n"
"nstep_left: {1}\n"
"nstep_right: {2}\n"
"nstep_hat: {3}\n"
"u_prop: {4}\n"
"loglstar: {5}\n"
"logl_prop: {6}\n"
"direction: {7}\n".format(
u, nstep_l, nstep_r, nstep_hat, u_prop,
loglstar, logl_prop, direction))
v_prop = prior_transform(u_prop)
return u_prop, v_prop, logl_prop, nc, nexpand, ncontract, fscale
def sample_slice(args):
"""
Return a new live point proposed by a series of random slices
away from an existing live point. Standard "Gibs-like" implementation where
a single multivariate "slice" is a combination of `ndim` univariate slices
through each axis.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new points. For slices new positions are
proposed along the arthogonal basis defined by :data:`axes`.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Unzipping.
(u, loglstar, axes, scale, prior_transform, loglikelihood, rseed,
kwargs) = args
rstate = get_random_generator(rseed)
# Periodicity.
nonperiodic = kwargs.get('nonperiodic', None)
# Setup.
n = len(u)
assert axes.shape[0] == n
slices = kwargs.get('slices', 5) # number of slices
nc = 0
nexpand = 0
ncontract = 0
fscale = []
# Modifying axes and computing lengths.
axes = scale * axes.T # scale based on past tuning
# Slice sampling loop.
for it in range(slices):
# Shuffle axis update order.
idxs = np.arange(n)
rstate.shuffle(idxs)
# Slice sample along a random direction.
for idx in idxs:
# Select axis.
axis = axes[idx]
(u_prop, v_prop, logl_prop, nc1, nexpand1, ncontract1,
fscale1) = generic_slice_step(u, axis, nonperiodic, loglstar,
loglikelihood, prior_transform,
rstate)
u = u_prop
nc += nc1
nexpand += nexpand1
ncontract += ncontract1
fscale.append(fscale1)
blob = {
'fscale': np.mean(fscale),
'nexpand': nexpand,
'ncontract': ncontract
}
return u_prop, v_prop, logl_prop, nc, blob
def sample_rslice(args):
"""
Return a new live point proposed by a series of random slices
away from an existing live point. Standard "random" implementation where
each slice is along a random direction based on the provided axes.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new slice directions.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Unzipping.
(u, loglstar, axes, scale, prior_transform, loglikelihood, rseed,
kwargs) = args
rstate = get_random_generator(rseed)
# Periodicity.
nonperiodic = kwargs.get('nonperiodic', None)
# Setup.
n = len(u)
assert axes.shape[0] == n
slices = kwargs.get('slices', 5) # number of slices
nc = 0
nexpand = 0
ncontract = 0
fscale = []
# Slice sampling loop.
for it in range(slices):
# Propose a direction on the unit n-sphere.
drhat = rstate.standard_normal(size=n)
drhat /= linalg.norm(drhat)
# Transform and scale based on past tuning.
direction = np.dot(axes, drhat) * scale
(u_prop, v_prop, logl_prop, nc1, nexpand1, ncontract1,
fscale1) = generic_slice_step(u, direction, nonperiodic, loglstar,
loglikelihood, prior_transform, rstate)
u = u_prop
nc += nc1
nexpand += nexpand1
ncontract += ncontract1
fscale.append(fscale1)
blob = {
'fscale': np.mean(fscale),
'nexpand': nexpand,
'ncontract': ncontract
}
return u_prop, v_prop, logl_prop, nc, blob
def sample_hslice(args):
"""
Return a new live point proposed by "Hamiltonian" Slice Sampling
using a series of random trajectories away from an existing live point.
Each trajectory is based on the provided axes and samples are determined
by moving forwards/backwards in time until the trajectory hits an edge
and approximately reflecting off the boundaries.
Once a series of reflections has been established, we propose a new live
point by slice sampling across the entire path.
Parameters
----------
u : `~numpy.ndarray` with shape (npdim,)
Position of the initial sample. **This is a copy of an existing live
point.**
loglstar : float
Ln(likelihood) bound.
axes : `~numpy.ndarray` with shape (ndim, ndim)
Axes used to propose new slice directions.
scale : float
Value used to scale the provided axes.
prior_transform : function
Function transforming a sample from the a unit cube to the parameter
space of interest according to the prior.
loglikelihood : function
Function returning ln(likelihood) given parameters as a 1-d `~numpy`
array of length `ndim`.
kwargs : dict
A dictionary of additional method-specific parameters.
Returns
-------
u : `~numpy.ndarray` with shape (npdim,)
Position of the final proposed point within the unit cube.
v : `~numpy.ndarray` with shape (ndim,)
Position of the final proposed point in the target parameter space.
logl : float
Ln(likelihood) of the final proposed point.
nc : int
Number of function calls used to generate the sample.
blob : dict
Collection of ancillary quantities used to tune :data:`scale`.
"""
# Unzipping.
(u, loglstar, axes, scale, prior_transform, loglikelihood, rseed,
kwargs) = args
rstate = get_random_generator(rseed)
# Periodicity.
nonperiodic = kwargs.get('nonperiodic', None)
# Setup.
n = len(u)
assert axes.shape[0] == len(u)
slices = kwargs.get('slices', 5) # number of slices
grad = kwargs.get('grad', None) # gradient of log-likelihood
max_move = kwargs.get('max_move', 100) # limit for `ncall`
compute_jac = kwargs.get('compute_jac', False) # whether Jacobian needed
jitter = 0.25 # 25% jitter
nc = 0
nmove = 0
nreflect = 0
ncontract = 0
# Slice sampling loop.
for it in range(slices):
# Define the left, "inner", and right "nodes" for a given chord.
# We will plan to slice sampling using these chords.
nodes_l, nodes_m, nodes_r = [], [], []
# Propose a direction on the unit n-sphere.
drhat = rstate.standard_normal(size=n)
drhat /= linalg.norm(drhat)
# Transform and scale based on past tuning.
axis = np.dot(axes, drhat) * scale * 0.01
# Create starting window.
vel = np.array(axis) # current velocity
u_l = u.copy()
u_r = u.copy()
u_l -= rstate.uniform(1. - jitter, 1. + jitter) * vel
u_r += rstate.uniform(1. - jitter, 1. + jitter) * vel
nodes_l.append(np.array(u_l))
nodes_m.append(np.array(u))
nodes_r.append(np.array(u_r))
# Progress "right" (i.e. "forwards" in time).
reverse, reflect = False, False
u_r = np.array(u)
ncall = 0
while ncall <= max_move:
# Iterate until we can bracket the edge of the distribution.
nodes_l.append(np.array(u_r))
u_out, u_in = None, []
while True:
# Step forward.
u_r += rstate.uniform(1. - jitter, 1. + jitter) * vel
# Evaluate point.
if unitcheck(u_r, nonperiodic):
v_r = prior_transform(np.asarray(u_r))
logl_r = loglikelihood(np.asarray(v_r))
nc += 1
ncall += 1
nmove += 1
else:
logl_r = -np.inf
# Check if we satisfy the log-likelihood constraint
# (i.e. are "in" or "out" of bounds).
if logl_r < loglstar:
if reflect:
# If we are out of bounds and just reflected, we
# reverse direction and terminate immediately.
reverse = True
nodes_l.pop() # remove since chord does not exist
break
else:
# If we're already in bounds, then we're safe.
u_out = np.array(u_r)
logl_out = logl_r
# Check if we could compute gradients assuming we
# terminated with the current `u_out`.
if np.isfinite(logl_out):
reverse = False
else:
reverse = True
else:
reflect = False
u_in.append(np.array(u_r))
# Check if we've bracketed the edge.
if u_out is not None:
break
# Define the rest of our chord.
if len(nodes_l) == len(nodes_r) + 1:
if len(u_in) > 0:
u_in = u_in[rstate.choice(
len(u_in))] # pick point randomly
else:
u_in = np.array(u)
pass
nodes_m.append(np.array(u_in))
nodes_r.append(np.array(u_out))
# Check if we have turned around.
if reverse:
break
# Reflect off the boundary.
u_r, logl_r = u_out, logl_out
if grad is None:
# If the gradient is not provided, we will attempt to
# approximate it numerically using 2nd-order methods.
h = np.zeros(n)
for i in range(n):
u_r_l, u_r_r = np.array(u_r), np.array(u_r)
# right side
u_r_r[i] += 1e-10
if unitcheck(u_r_r, nonperiodic):
v_r_r = prior_transform(np.asarray(u_r_r))
logl_r_r = loglikelihood(np.asarray(v_r_r))
else:
logl_r_r = -np.inf
reverse = True # can't compute gradient
nc += 1
# left side
u_r_l[i] -= 1e-10
if unitcheck(u_r_l, nonperiodic):
v_r_l = prior_transform(np.asarray(u_r_l))
logl_r_l = loglikelihood(np.asarray(v_r_l))
else:
logl_r_l = -np.inf
reverse = True # can't compute gradient
if reverse:
break # give up because we have to turn around
nc += 1
# compute dlnl/du
h[i] = (logl_r_r - logl_r_l) / 2e-10
else:
# If the gradient is provided, evaluate it.
h = grad(v_r)
if compute_jac:
jac = []
# Evaluate and apply Jacobian dv/du if gradient
# is defined as d(lnL)/dv instead of d(lnL)/du.
for i in range(n):
u_r_l, u_r_r = np.array(u_r), np.array(u_r)
# right side
u_r_r[i] += 1e-10
if unitcheck(u_r_r, nonperiodic):
v_r_r = prior_transform(np.asarray(u_r_r))
else:
reverse = True # can't compute Jacobian
v_r_r = np.array(v_r) # assume no movement
# left side
u_r_l[i] -= 1e-10
if unitcheck(u_r_l, nonperiodic):
v_r_l = prior_transform(np.asarray(u_r_l))
else:
reverse = True # can't compute Jacobian
v_r_r = np.array(v_r) # assume no movement
if reverse:
break # give up because we have to turn around
jac.append((v_r_r - v_r_l) / 2e-10)
jac = np.array(jac)
h = np.dot(jac, h) # apply Jacobian
nc += 1
# Compute specular reflection off boundary.
vel_ref = vel - 2 * h * np.dot(vel, h) / linalg.norm(h)**2
dotprod = np.dot(vel_ref, vel)
dotprod /= linalg.norm(vel_ref) * linalg.norm(vel)
# Check angle of reflection.
if dotprod < -0.99:
# The reflection angle is sufficiently small that it might
# as well be a reflection.
reverse = True
break
else:
# If the reflection angle is sufficiently large, we
# proceed as normal to the new position.
vel = vel_ref
u_out = None
reflect = True
nreflect += 1
# Progress "left" (i.e. "backwards" in time).
reverse, reflect = False, False
vel = -np.array(axis) # current velocity
u_l = np.array(u)
ncall = 0
while ncall <= max_move:
# Iterate until we can bracket the edge of the distribution.
# Use a doubling approach to try and locate the bounds faster.
nodes_r.append(np.array(u_l))
u_out, u_in = None, []
while True:
# Step forward.
u_l += rstate.uniform(1. - jitter, 1. + jitter) * vel
# Evaluate point.
if unitcheck(u_l, nonperiodic):
v_l = prior_transform(np.asarray(u_l))
logl_l = loglikelihood(np.asarray(v_l))
nc += 1
ncall += 1
nmove += 1
else:
logl_l = -np.inf
# Check if we satisfy the log-likelihood constraint
# (i.e. are "in" or "out" of bounds).
if logl_l < loglstar:
if reflect:
# If we are out of bounds and just reflected, we
# reverse direction and terminate immediately.
reverse = True
nodes_r.pop() # remove since chord does not exist
break
else:
# If we're already in bounds, then we're safe.
u_out = np.array(u_l)
logl_out = logl_l
# Check if we could compute gradients assuming we
# terminated with the current `u_out`.
if np.isfinite(logl_out):
reverse = False
else:
reverse = True
else:
reflect = False
u_in.append(np.array(u_l))
# Check if we've bracketed the edge.
if u_out is not None:
break
# Define the rest of our chord.
if len(nodes_r) == len(nodes_l) + 1:
if len(u_in) > 0:
u_in = u_in[rstate.choice(
len(u_in))] # pick point randomly
else:
u_in = np.array(u)
pass
nodes_m.append(np.array(u_in))
nodes_l.append(np.array(u_out))
# Check if we have turned around.
if reverse:
break
# Reflect off the boundary.
u_l, logl_l = u_out, logl_out
if grad is None:
# If the gradient is not provided, we will attempt to
# approximate it numerically using 2nd-order methods.
h = np.zeros(n)
for i in range(n):
u_l_l, u_l_r = np.array(u_l), np.array(u_l)
# right side
u_l_r[i] += 1e-10
if unitcheck(u_l_r, nonperiodic):
v_l_r = prior_transform(np.asarray(u_l_r))
logl_l_r = loglikelihood(np.asarray(v_l_r))
else:
logl_l_r = -np.inf
reverse = True # can't compute gradient
nc += 1
# left side
u_l_l[i] -= 1e-10
if unitcheck(u_l_l, nonperiodic):
v_l_l = prior_transform(np.asarray(u_l_l))
logl_l_l = loglikelihood(np.asarray(v_l_l))
else:
logl_l_l = -np.inf
reverse = True # can't compute gradient
if reverse:
break # give up because we have to turn around
nc += 1
# compute dlnl/du
h[i] = (logl_l_r - logl_l_l) / 2e-10
else:
# If the gradient is provided, evaluate it.
h = grad(v_l)
if compute_jac:
jac = []
# Evaluate and apply Jacobian dv/du if gradient
# is defined as d(lnL)/dv instead of d(lnL)/du.
for i in range(n):
u_l_l, u_l_r = np.array(u_l), np.array(u_l)
# right side
u_l_r[i] += 1e-10
if unitcheck(u_l_r, nonperiodic):
v_l_r = prior_transform(np.asarray(u_l_r))
else:
reverse = True # can't compute Jacobian
v_l_r = np.array(v_l) # assume no movement
# left side
u_l_l[i] -= 1e-10
if unitcheck(u_l_l, nonperiodic):
v_l_l = prior_transform(np.asarray(u_l_l))
else:
reverse = True # can't compute Jacobian
v_l_r = np.array(v_l) # assume no movement
if reverse:
break # give up because we have to turn around
jac.append((v_l_r - v_l_l) / 2e-10)
jac = np.array(jac)
h = np.dot(jac, h) # apply Jacobian
nc += 1
# Compute specular reflection off boundary.
vel_ref = vel - 2 * h * np.dot(vel, h) / linalg.norm(h)**2
dotprod = np.dot(vel_ref, vel)
dotprod /= linalg.norm(vel_ref) * linalg.norm(vel)
# Check angle of reflection.
if dotprod < -0.99:
# The reflection angle is sufficiently small that it might
# as well be a reflection.
reverse = True
break
else:
# If the reflection angle is sufficiently large, we
# proceed as normal to the new position.
vel = vel_ref
u_out = None
reflect = True
nreflect += 1
# Initialize lengths of chords.
if len(nodes_l) > 1:
# remove initial fallback chord
nodes_l.pop(0)
nodes_m.pop(0)
nodes_r.pop(0)
nodes_l, nodes_m, nodes_r = (np.array(nodes_l), np.array(nodes_m),
np.array(nodes_r))
Nchords = len(nodes_l)
axlen = np.zeros(Nchords, dtype='float')
for i, (nl, nr) in enumerate(zip(nodes_l, nodes_r)):
axlen[i] = linalg.norm(nr - nl)
# Slice sample from all chords simultaneously. This is equivalent to
# slice sampling in *time* along our trajectory.
axlen_init = np.array(axlen)
while True:
# Safety check.
if np.any(axlen < 1e-5 * axlen_init):
raise RuntimeError("Hamiltonian slice sampling appears to be "
"stuck! Some useful output quantities:\n"
"u: {0}\n"
"u_left: {1}\n"
"u_right: {2}\n"
"loglstar: {3}.".format(
u, u_l, u_r, loglstar))
# Select chord.
axprob = axlen / np.sum(axlen)
idx = rstate.choice(Nchords, p=axprob)
# Define chord.
u_l, u_m, u_r = nodes_l[idx], nodes_m[idx], nodes_r[idx]
u_hat = u_r - u_l
rprop = rstate.uniform()
u_prop = u_l + rprop * u_hat # scale from left
if unitcheck(u_prop, nonperiodic):
v_prop = prior_transform(np.asarray(u_prop))
logl_prop = loglikelihood(np.asarray(v_prop))
else:
logl_prop = -np.inf
nc += 1
ncontract += 1
# If we succeed, move to the new position.
if logl_prop > loglstar:
u = u_prop
break
# If we fail, check if the new point is to the left/right of
# the point interior to the bounds (`u_m`) and update
# the bounds accordingly.
else:
s = np.dot(u_prop - u_m, u_hat) # check sign (+/-)
if s < 0: # left
nodes_l[idx] = u_prop
axlen[idx] *= 1 - rprop
elif s > 0: # right
nodes_r[idx] = u_prop
axlen[idx] *= rprop
else:
raise RuntimeError("Slice sampler has failed to find "
"a valid point. Some useful "
"output quantities:\n"
"u: {0}\n"
"u_left: {1}\n"
"u_right: {2}\n"
"u_hat: {3}\n"
"u_prop: {4}\n"
"loglstar: {5}\n"
"logl_prop: {6}.".format(
u, u_l, u_r, u_hat, u_prop,
loglstar, logl_prop))
blob = {'nmove': nmove, 'nreflect': nreflect, 'ncontract': ncontract}
return u_prop, v_prop, logl_prop, nc, blob
| 35.387907 | 79 | 0.529441 |
95fa2cf31652bc5504db781c43d31a4e125fced9 | 954 | py | Python | lhcb/cmtuser/DaVinci_v36r5/Phys/DiLeptonTuple/python/DiLeptonTuple/configIso.py | ibab/lhcb-b2dmumu | c1334c381032af9459602640e17541377fd16606 | [
"MIT"
] | 3 | 2019-06-24T10:56:34.000Z | 2019-06-24T10:57:11.000Z | lhcb/cmtuser/DaVinci_v36r5/Phys/DiLeptonTuple/python/DiLeptonTuple/configIso.py | ibab/lhcb-b2dmumu | c1334c381032af9459602640e17541377fd16606 | [
"MIT"
] | null | null | null | lhcb/cmtuser/DaVinci_v36r5/Phys/DiLeptonTuple/python/DiLeptonTuple/configIso.py | ibab/lhcb-b2dmumu | c1334c381032af9459602640e17541377fd16606 | [
"MIT"
] | null | null | null | # lines from Greg Ciezarek -
# this is work in progress
#will be comitted to SVN properly at a future date as of 3/4/14
# this needs to go before the tuple tool is run
def configIso():
from Configurables import ChargedProtoParticleMaker, DaVinci
veloprotos = ChargedProtoParticleMaker("ProtoPMaker")
veloprotos.Inputs = ["Rec/Track/Best"]
veloprotos.Output = "Rec/ProtoP/myProtoPMaker/ProtoParticles"
DaVinci().appendToMainSequence( [ veloprotos ])
from Configurables import ProtoParticleCALOFilter, CombinedParticleMaker,NoPIDsParticleMaker
from CommonParticles.Utils import trackSelector, updateDoD
algorithm = NoPIDsParticleMaker('StdNoPIDsVeloPions', Particle =
'pion', )
algorithm.Input = "Rec/ProtoP/myProtoPMaker/ProtoParticles"
selector = trackSelector ( algorithm , trackTypes = ['Velo'] )
locations = updateDoD ( algorithm )
DaVinci().appendToMainSequence( [ algorithm ])
| 47.7 | 102 | 0.742138 |
9d9654eba1ba7459c7bac405c5ce4556a782afd0 | 1,607 | py | Python | 2020/test/test_day7.py | terezaif/adventofcode | 67601f79a3b01d71434ef0236387ffd5ab7dca0f | [
"MIT"
] | 4 | 2020-12-06T13:11:59.000Z | 2021-12-15T11:34:34.000Z | 2020/test/test_day7.py | terezaif/adventofcode | 67601f79a3b01d71434ef0236387ffd5ab7dca0f | [
"MIT"
] | null | null | null | 2020/test/test_day7.py | terezaif/adventofcode | 67601f79a3b01d71434ef0236387ffd5ab7dca0f | [
"MIT"
] | 1 | 2021-12-02T16:32:50.000Z | 2021-12-02T16:32:50.000Z | import pytest
from days.day7 import get_bags
from days.day7 import get_bags_2
from utils.reading_data import get_string_input_array
input = [
"light red bags contain 1 bright white bag, 2 muted yellow bags.",
"dark orange bags contain 3 bright white bags, 4 muted yellow bags.",
"bright white bags contain 1 shiny gold bag.",
"muted yellow bags contain 2 shiny gold bags, 9 faded blue bags.",
"shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags.",
"dark olive bags contain 3 faded blue bags, 4 dotted black bags.",
"vibrant plum bags contain 5 faded blue bags, 6 dotted black bags.",
"faded blue bags contain no other bags.",
"dotted black bags contain no other bags.",
]
input2 = [
"shiny gold bags contain 2 dark red bags.",
"dark red bags contain 2 dark orange bags.",
"dark orange bags contain 2 dark yellow bags.",
"dark yellow bags contain 2 dark green bags.",
"dark green bags contain 2 dark blue bags.",
"dark blue bags contain 2 dark violet bags.",
"dark violet bags contain no other bags.",
]
input3 = get_string_input_array(path="2020/test/data/day7.txt")
def test_get_count():
expected = 4
actual = get_bags(input)
assert expected == actual
def test_get_count_2():
expected = 32
actual = get_bags_2(input)
assert expected == actual
def test_get_count_2_2():
expected = 126
actual = get_bags_2(input2)
assert expected == actual
@pytest.mark.skip(reason="cannot work")
def test_get_count_2_t():
expected = 1151
actual = get_bags_2(input3)
assert expected == actual
| 30.903846 | 73 | 0.702551 |
c79206492753a2253e4048164f44e9414f91f064 | 9,091 | py | Python | tests/test_formats.py | robocorp/jupytext | 57c011db9055242bb59ca9dd1ee5ca4f7fab752b | [
"MIT"
] | 1 | 2021-12-27T16:26:14.000Z | 2021-12-27T16:26:14.000Z | tests/test_formats.py | huangyingw/mwouts_jupytext | b72d03f39920333eb312d675a1ecd0fa7c2b549f | [
"MIT"
] | null | null | null | tests/test_formats.py | huangyingw/mwouts_jupytext | b72d03f39920333eb312d675a1ecd0fa7c2b549f | [
"MIT"
] | null | null | null | import pytest
from jupytext.compare import compare
from nbformat.v4.nbbase import new_notebook
import jupytext
from jupytext.formats import (
guess_format,
divine_format,
read_format_from_metadata,
rearrange_jupytext_metadata,
)
from jupytext.formats import (
long_form_multiple_formats,
short_form_multiple_formats,
update_jupytext_formats_metadata,
)
from jupytext.formats import (
get_format_implementation,
validate_one_format,
JupytextFormatError,
)
from .utils import list_notebooks, requires_myst, requires_pandoc
@pytest.mark.parametrize("nb_file", list_notebooks("python"))
def test_guess_format_light(nb_file):
with open(nb_file) as stream:
assert guess_format(stream.read(), ext=".py")[0] == "light"
@pytest.mark.parametrize("nb_file", list_notebooks("percent"))
def test_guess_format_percent(nb_file):
with open(nb_file) as stream:
assert guess_format(stream.read(), ext=".py")[0] == "percent"
@pytest.mark.parametrize("nb_file", list_notebooks("sphinx"))
def test_guess_format_sphinx(nb_file):
with open(nb_file) as stream:
assert guess_format(stream.read(), ext=".py")[0] == "sphinx"
def test_guess_format_hydrogen():
text = """# %%
cat hello.txt
"""
assert guess_format(text, ext=".py")[0] == "hydrogen"
def test_divine_format():
assert divine_format('{"cells":[]}') == "ipynb"
assert (
divine_format(
"""def f(x):
x + 1"""
)
== "py:light"
)
assert (
divine_format(
"""# %%
def f(x):
x + 1
# %%
def g(x):
x + 2
"""
)
== "py:percent"
)
assert (
divine_format(
"""This is a markdown file
with one code block
```
1 + 1
```
"""
)
== "md"
)
assert (
divine_format(
""";; ---
;; jupyter:
;; jupytext:
;; text_representation:
;; extension: .ss
;; format_name: percent
;; ---"""
)
== "ss:percent"
)
def test_get_format_implementation():
assert get_format_implementation(".py").format_name == "light"
assert get_format_implementation(".py", "percent").format_name == "percent"
with pytest.raises(JupytextFormatError):
get_format_implementation(".py", "wrong_format")
def test_script_with_magics_not_percent(
script="""# %%time
1 + 2""",
):
assert guess_format(script, ".py")[0] == "light"
def test_script_with_spyder_cell_is_percent(
script="""#%%
1 + 2""",
):
assert guess_format(script, ".py")[0] == "percent"
def test_script_with_percent_cell_and_magic_is_hydrogen(
script="""#%%
%matplotlib inline
""",
):
assert guess_format(script, ".py")[0] == "hydrogen"
def test_script_with_percent_cell_and_kernelspec(
script="""# ---
# jupyter:
# kernelspec:
# display_name: Python3
# language: python
# name: python3
# ---
# %%
a = 1
""",
):
assert guess_format(script, ".py")[0] == "percent"
def test_script_with_spyder_cell_with_name_is_percent(
script="""#%% cell name
1 + 2""",
):
assert guess_format(script, ".py")[0] == "percent"
def test_read_format_from_metadata(
script="""---
jupyter:
jupytext:
formats: ipynb,pct.py:percent,lgt.py:light,spx.py:sphinx,md,Rmd
text_representation:
extension: .pct.py
format_name: percent
format_version: '1.1'
jupytext_version: 0.8.0
---""",
):
assert read_format_from_metadata(script, ".Rmd") is None
def test_update_jupytext_formats_metadata():
nb = new_notebook(metadata={"jupytext": {"formats": "py"}})
update_jupytext_formats_metadata(nb.metadata, "py:light")
assert nb.metadata["jupytext"]["formats"] == "py:light"
nb = new_notebook(metadata={"jupytext": {"formats": "ipynb,py"}})
update_jupytext_formats_metadata(nb.metadata, "py:light")
assert nb.metadata["jupytext"]["formats"] == "ipynb,py:light"
def test_decompress_formats():
assert long_form_multiple_formats("ipynb") == [{"extension": ".ipynb"}]
assert long_form_multiple_formats("ipynb,md") == [
{"extension": ".ipynb"},
{"extension": ".md"},
]
assert long_form_multiple_formats("ipynb,py:light") == [
{"extension": ".ipynb"},
{"extension": ".py", "format_name": "light"},
]
assert long_form_multiple_formats(["ipynb", ".py:light"]) == [
{"extension": ".ipynb"},
{"extension": ".py", "format_name": "light"},
]
assert long_form_multiple_formats(".pct.py:percent") == [
{"extension": ".py", "suffix": ".pct", "format_name": "percent"}
]
def test_compress_formats():
assert short_form_multiple_formats([{"extension": ".ipynb"}]) == "ipynb"
assert short_form_multiple_formats("ipynb") == "ipynb"
assert (
short_form_multiple_formats([{"extension": ".ipynb"}, {"extension": ".md"}])
== "ipynb,md"
)
assert (
short_form_multiple_formats(
[{"extension": ".ipynb"}, {"extension": ".py", "format_name": "light"}]
)
== "ipynb,py:light"
)
assert (
short_form_multiple_formats(
[
{"extension": ".ipynb"},
{"extension": ".py", "format_name": "light"},
{"extension": ".md", "comment_magics": True},
]
)
== "ipynb,py:light,md"
)
assert (
short_form_multiple_formats(
[{"extension": ".py", "suffix": ".pct", "format_name": "percent"}]
)
== ".pct.py:percent"
)
def test_rearrange_jupytext_metadata():
metadata = {"nbrmd_formats": "ipynb,py"}
rearrange_jupytext_metadata(metadata)
compare(metadata, {"jupytext": {"formats": "ipynb,py"}})
metadata = {"jupytext_formats": "ipynb,py"}
rearrange_jupytext_metadata(metadata)
compare(metadata, {"jupytext": {"formats": "ipynb,py"}})
metadata = {"executable": "#!/bin/bash"}
rearrange_jupytext_metadata(metadata)
compare(metadata, {"jupytext": {"executable": "#!/bin/bash"}})
def test_rearrange_jupytext_metadata_metadata_filter():
metadata = {
"jupytext": {
"metadata_filter": {
"notebook": {"additional": ["one", "two"], "excluded": "all"},
"cells": {"additional": "all", "excluded": ["three", "four"]},
}
}
}
rearrange_jupytext_metadata(metadata)
compare(
metadata,
{
"jupytext": {
"notebook_metadata_filter": "one,two,-all",
"cell_metadata_filter": "all,-three,-four",
}
},
)
def test_rearrange_jupytext_metadata_add_dot_in_suffix():
metadata = {
"jupytext": {
"text_representation": {"jupytext_version": "0.8.6"},
"formats": "ipynb,pct.py,lgt.py",
}
}
rearrange_jupytext_metadata(metadata)
compare(
metadata,
{
"jupytext": {
"text_representation": {"jupytext_version": "0.8.6"},
"formats": "ipynb,.pct.py,.lgt.py",
}
},
)
def test_fix_139():
text = """# ---
# jupyter:
# jupytext:
# metadata_filter:
# cells:
# additional:
# - "lines_to_next_cell"
# excluded:
# - "all"
# ---
# + {"lines_to_next_cell": 2}
1 + 1
# -
1 + 1
"""
nb = jupytext.reads(text, "py:light")
text2 = jupytext.writes(nb, "py:light")
assert "cell_metadata_filter: -all" in text2
assert "lines_to_next_cell" not in text2
def test_validate_one_format():
with pytest.raises(JupytextFormatError):
validate_one_format("py:percent")
with pytest.raises(JupytextFormatError):
validate_one_format({"extension": "py", "format_name": "invalid"})
with pytest.raises(JupytextFormatError):
validate_one_format({})
with pytest.raises(JupytextFormatError):
validate_one_format({"extension": ".py", "unknown_option": True})
with pytest.raises(JupytextFormatError):
validate_one_format({"extension": ".py", "comment_magics": "TRUE"})
def test_set_auto_ext():
with pytest.raises(ValueError):
long_form_multiple_formats("ipynb,auto:percent", {})
@requires_pandoc
def test_pandoc_format_is_preserved():
formats_org = "ipynb,md,.pandoc.md:pandoc,py:light"
long = long_form_multiple_formats(formats_org)
formats_new = short_form_multiple_formats(long)
compare(formats_new, formats_org)
@requires_myst
def test_write_as_myst(tmpdir):
"""Inspired by https://github.com/mwouts/jupytext/issues/462"""
nb = new_notebook()
tmp_md = str(tmpdir.join("notebook.md"))
jupytext.write(nb, tmp_md, fmt="myst")
with open(tmp_md) as fp:
md = fp.read()
assert "myst" in md
def test_write_raises_when_fmt_does_not_exists(tmpdir):
"""Inspired by https://github.com/mwouts/jupytext/issues/462"""
nb = new_notebook()
tmp_md = str(tmpdir.join("notebook.md"))
with pytest.raises(JupytextFormatError):
jupytext.write(nb, tmp_md, fmt="unknown_format")
| 25.536517 | 84 | 0.613794 |
6dc8679ab98ab3f1584e4707fb35d1ab785f258a | 5,751 | py | Python | GhostScan/Reconstructions/Mesh.py | yyf20001230/GhostScan | 5694df4532132be5e916bd72a46dc907eb108bf9 | [
"MIT"
] | 4 | 2021-09-27T14:16:08.000Z | 2022-03-17T07:03:18.000Z | GhostScan/Reconstructions/Mesh.py | clkimsdu/GhostScan | 5694df4532132be5e916bd72a46dc907eb108bf9 | [
"MIT"
] | null | null | null | GhostScan/Reconstructions/Mesh.py | clkimsdu/GhostScan | 5694df4532132be5e916bd72a46dc907eb108bf9 | [
"MIT"
] | 2 | 2022-02-04T17:32:04.000Z | 2022-03-31T09:53:20.000Z | from __future__ import print_function
import os, sys, glob
import numpy as np
import cv2
import scipy.spatial
import matplotlib
import wavepy
import matplotlib.pyplot as plt
# import pyvista as pv
# import plotly.graph_objects as go
# Set Parameters
DEFLECTOMETRY_GRADMAP_FILTERSIZE = 49
# Render parameters
MESH_Ka = [0.0, 0.0, 0.0]
MESH_Kd = [0.9, 0.9, 0.9]
MESH_Ks = [0.1, 0.1, 0.1]
MESH_Ns = 5.000000
MESH_d = 0.010000
MESH_illum = 1
# Scaling factors
MESH_DEPTH_SCALINGFACTOR = 5
MESH_SIGMA_GLOBCORR = 5
class Mesh:
def __init__(self, name, height, width, cropMask=0):
self.name = name
self.height = height
self.width = width
self.vertex = np.zeros((self.height * self.width, 3))
# set 2d plane
self.setVertex2D()
self.setFace()
if cropMask == 0:
self.mask = np.array([0, 0, self.height, self.width])
else:
self.mask = cropMask
def setVertex2D(self):
[i, j] = np.meshgrid(np.r_[1:self.height + 1], np.r_[1:self.width + 1])
self.vertex2D = np.concatenate((i.flatten('F')[:, None], j.flatten('F')[:, None]), axis=1).astype(np.int16)
self.vertex[:, 0:2] = self.vertex2D
# options = 'Qt QbB Qc'
# self.faces = scipy.spatial.Delaunay(v, qhull_options=options).simplices + 1
def setFace(self):
# N = 2
options = 'Qt QbB Qc'
self.faces = scipy.spatial.Delaunay(self.vertex2D, qhull_options=options).simplices + 1
def setDepth(self):
# Set x and y direction
p = self.normals[..., 0] / self.normals[..., 2]
q = self.normals[..., 1] / self.normals[..., 2]
p = np.nan_to_num(p)
q = np.nan_to_num(q)
# TODO: Low pass filter to filter out the noise (BilateralFilter)
# Compute depth as a imaginary number (real: depth, img: noise)
# Using Frankot Chellappa method with FFT
#x = sfg.frankotchellappa(p, q, False)
x = wavepy.surface_from_grad.frankotchellappa(p, q, False)
depth = x.real
# depth_smooth = cv2.GaussianBlur(depth, (sigma_GlobCorr, sigma_GlobCorr), ((sigma_GlobCorr + 1) / 2))
depth *= MESH_DEPTH_SCALINGFACTOR
# cv.bilateralFilter(src, dst, 9, 75, 75, cv.BORDER_DEFAULT)
depth_corr = cv2.bilateralFilter(depth.astype(np.float32), MESH_SIGMA_GLOBCORR, 90, 90)
# depth_corr = depth
# cv2.imwrite('./depth_before.png', (depth*255).astype(np.uint8))
# depth = cv2.blur(depth.astype(np.float32), (5,5))
# cv2.imwrite('./depth_after.png', (depth*255).astype(np.uint8))
depth_crop = depth_corr[self.mask[0]:self.mask[2], self.mask[1]:self.mask[3]]
self.vertex[:, 2] = depth_crop.reshape(-1)
return depth
def setNormal(self, normals):
self.normals = normals
def setTexture(self, texture):
#self.texture = (cv2.flip(texture, -1) * 255).astype(np.uint8)
self.texture = cv2.flip(texture, -1)
# @nb.jit(nopython=True)
def exportOBJ(self, path, withTexture=True):
# totalCount = self.vertex.shape[0] + self.height * self.width + self.faces.shape[0]
# progress = ProgressBar(totalCount, fmt=ProgressBar.FULL)
print('Exporting OBJ...')
N = self.normals[self.mask[0]:self.mask[2], self.mask[1]:self.mask[3]].reshape(-1, 3)
filename = os.path.join(os.path.normpath(path), "mesh_" + self.name + ".obj")
f = open(filename, 'w')
f.write('mtllib material_' + self.name + '.mtl\n')
f.write('usemtl Textured\n')
for i in range(self.vertex.shape[0]):
# progress.current += 1
# progress()
f.write('v %f %f %f\n' % (self.vertex[i, 0], self.vertex[i, 1], self.vertex[i, 2]))
f.write('vn %f %f %f\n' % (N[i, 0], N[i, 1], N[i, 2]))
if withTexture:
for i in range(self.height):
for j in range(self.width):
# progress.current += 1
# progress()
u = (i) / self.height
v = 1 - (j) / self.width
f.write('vt %f %f\n' % (v, u))
for i in range(self.faces.shape[0]):
# progress.current += 1
# progress()
# f.write('f ' + str(self.faces[i, 0]) + ' ' + str(self.faces[i, 1]) + ' ' + str(self.faces[i, 2]) + '\n')
f.write(
'f ' + str(self.faces[i, 0]) + '/' + str(self.faces[i, 0]) + '/' + str(self.faces[i, 0]) + ' ' + str(
self.faces[i, 1]) + '/' + str(self.faces[i, 1]) + '/' + str(self.faces[i, 1]) + ' ' + str(
self.faces[i, 2]) + '/' + str(self.faces[i, 2]) + '/' + str(self.faces[i, 2]) + '\n')
f.close()
if withTexture:
# export mtl
filename = os.path.join(os.path.normpath(path), "material_" + self.name + ".mtl")
f = open(filename, 'w')
f.write('newmtl Textured\n')
f.write('Ka %f %f %f\n' % (MESH_Ka[0], MESH_Ka[1], MESH_Ka[2]))
f.write('Kd %f %f %f\n' % (MESH_Kd[0], MESH_Kd[1], MESH_Kd[2]))
f.write('Ks %f %f %f\n' % (MESH_Ks[0], MESH_Ks[1], MESH_Ks[2]))
f.write('Ns %f\n' % (MESH_Ns))
f.write('d %f\n' % (MESH_d))
f.write('illum %d\n' % (MESH_illum))
f.write("map_Kd texture_" + self.name + ".jpg\n")
f.close()
# export texture
filename = os.path.join(os.path.normpath(path), "texture_" + self.name + ".jpg")
# cv2.imwrite(filename, self.texture)
# cv2.imwrite(filename, np.array((utilities.imgBrighten(self.texture, 10) * 255), dtype=np.uint8))
cv2.imwrite(filename, self.texture)
| 39.9375 | 118 | 0.557294 |
c09f41c3f17c47d5e2ecf9641b49ba08495d5902 | 2,919 | py | Python | python/smap/checkers.py | carlosduarteroa/smap | 5760631dfaf3e85da26ce68bf542bf254bb92c80 | [
"BSD-2-Clause"
] | 21 | 2015-02-06T21:55:59.000Z | 2021-04-29T11:23:18.000Z | python/smap/checkers.py | carlosduarteroa/smap | 5760631dfaf3e85da26ce68bf542bf254bb92c80 | [
"BSD-2-Clause"
] | 9 | 2015-02-03T10:41:35.000Z | 2020-02-18T12:46:10.000Z | python/smap/checkers.py | carlosduarteroa/smap | 5760631dfaf3e85da26ce68bf542bf254bb92c80 | [
"BSD-2-Clause"
] | 20 | 2015-02-06T00:09:19.000Z | 2020-01-10T13:27:06.000Z | """
Copyright (c) 2011, 2012, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
@author Sagar Karandikar <skarandikar@berkeley.edu>
"""
"""A set of checking functions that cause an instance to terminate on different
failure conditions (at which point monit restarts the instance when configured
properly). A usable checking function has a wrapper that can generate a
no-arg version for use in the instance.
"""
import sys
import time
from twisted.internet import reactor
from twisted.python import log
def datacheck(instance, driver, timep):
"""This function kills the reactor when the instance/driver stats feed
reports that no data is being added.
ARGS:
instance - to get points/s
driver - what to check for,
timep - width of allowable window (allowable window is now-(timep seconds)
"""
# see if at least latest point in user defined time window
lastpointtime = driver.statslog.latest()
comparetime = time.time() - timep
if lastpointtime >= comparetime:
return
else:
log.err("Killing sMAP: no data check violated for driver " +
str(driver))
reactor.stop()
def get(inst, driver, opts):
if not ('DatacheckWindow' in opts or
'DatacheckInterval' in opts):
return None
else:
checkwin = int(opts.get('DatacheckWindow', 300))
checkint = int(opts.get('DatacheckInterval', 300))
return checkint, datacheckwrap(inst, driver, checkwin)
def datacheckwrap(instance, driver, timep):
"""A wrapper that generates a no-args version of datacheck"""
return lambda: datacheck(instance, driver, timep)
| 39.445946 | 80 | 0.74272 |
7ab2896112aaadabe020a170b9d269464c1a5afc | 5,617 | py | Python | CrossPlan/settings.py | YuzuRyo61/CrossPlan | 7bdc9b688885cd2bcce8bdc1f4d65b225beeb122 | [
"MIT"
] | 8 | 2020-01-22T07:44:59.000Z | 2020-05-17T18:32:06.000Z | CrossPlan/settings.py | YuzuRyo61/CrossPlan | 7bdc9b688885cd2bcce8bdc1f4d65b225beeb122 | [
"MIT"
] | 4 | 2021-03-19T08:17:48.000Z | 2021-06-10T19:53:08.000Z | CrossPlan/settings.py | YuzuRyo61/CrossPlan | 7bdc9b688885cd2bcce8bdc1f4d65b225beeb122 | [
"MIT"
] | null | null | null | """
Django settings for CrossPlan project.
Generated by 'django-admin startproject' using Django 2.2.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import socket
import logging
from dotenv import load_dotenv
from distutils.version import LooseVersion
from distutils.util import strtobool
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
load_dotenv(os.path.join(BASE_DIR, '.env'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('CP_SECRET')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True if os.environ.get('CP_ENV', 'development') == 'development' else False
if os.environ.get('CP_ENV', 'development') == 'development':
logging.basicConfig(level=logging.DEBUG)
ALLOWED_HOSTS = [
"localhost",
"127.0.0.1",
os.environ.get('CP_ENDPOINT', 'localhost:8000')
]
# Application definition
INSTALLED_APPS = [
# SYSTEM
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Libraries
'django_celery_beat',
'django_celery_results',
'widget_tweaks',
'channels',
'django_admin_listfilter_dropdown',
'rest_framework',
'oauth2_provider',
'corsheaders',
# Projects
'fediverse',
'Web',
'restAPI',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'corsheaders.middleware.CorsMiddleware',
]
ROOT_URLCONF = 'CrossPlan.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'Web', 'template')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'CrossPlan.context_processors.DEFINE_COMMON_VARIABLE',
],
},
},
]
WSGI_APPLICATION = 'CrossPlan.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': os.environ.get('CP_DB_ENGINE', 'django.db.backends.postgresql'),
'NAME': os.environ.get('CP_DB_NAME', 'crossplan'),
'USER': os.environ.get('CP_DB_USER', 'crossplan'),
'PASSWORD': os.environ.get('CP_DB_PASS', 'password'),
'HOST': os.environ.get('CP_DB_HOST', 'localhost'),
'PORT': str(os.environ.get('CP_DB_PORT', '5432')),
'TEST': {
'NAME': 'test_crossplan'
}
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'ja-jp'
TIME_ZONE = 'Asia/Tokyo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'fediverse.User'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_BROKER_URL = f'redis://{os.environ.get("CP_REDIS_HOST", "localhost")}:{str(os.environ.get("CP_REDIS_PORT", "6379"))}/1'
CP_ENDPOINT = os.environ.get('CP_ENDPOINT', 'localhost:8000')
with open(os.path.join(BASE_DIR, ".crossplan_version"), "r", encoding="utf-8") as cv:
CP_VERSION = LooseVersion(cv.readline().rstrip(os.linesep))
ASGI_APPLICATION = 'CrossPlan.routing.application'
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [(os.environ.get('CP_REDIS_HOST', 'localhost'), int(os.environ.get('CP_REDIS_PORT', 6379)))],
},
},
}
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static")
]
LOGIN_URL = "Login"
LOGIN_REDIRECT_URL = "INDEX"
OBJECT_PER_PAGE = 20
USER_PER_PAGE = 10
CP_OPENREGISTER = strtobool(os.environ.get('CP_OPENREGISTER', "true"))
CORS_ORIGIN_ALLOW_ALL = True
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.contrib.rest_framework.OAuth2Authentication',
'rest_framework.authentication.SessionAuthentication',
)
}
OAUTH2_PROVIDER = {
'SCOPES': {
'read': 'Read scope',
'write': 'Write scope'
}
}
| 26.620853 | 126 | 0.682215 |
0390268a3ecabf2d51d156e655958f0de0b87ead | 3,223 | py | Python | isi_sdk_7_2/isi_sdk_7_2/models/compatibilities_ssd_active.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 24 | 2018-06-22T14:13:23.000Z | 2022-03-23T01:21:26.000Z | isi_sdk_7_2/isi_sdk_7_2/models/compatibilities_ssd_active.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 46 | 2018-04-30T13:28:22.000Z | 2022-03-21T21:11:07.000Z | isi_sdk_7_2/isi_sdk_7_2/models/compatibilities_ssd_active.py | mohitjain97/isilon_sdk_python | a371f438f542568edb8cda35e929e6b300b1177c | [
"Unlicense"
] | 29 | 2018-06-19T00:14:04.000Z | 2022-02-08T17:51:19.000Z | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 2
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_7_2.models.compatibilities_ssd_active_active_item import CompatibilitiesSsdActiveActiveItem # noqa: F401,E501
class CompatibilitiesSsdActive(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'active': 'list[CompatibilitiesSsdActiveActiveItem]'
}
attribute_map = {
'active': 'active'
}
def __init__(self, active=None): # noqa: E501
"""CompatibilitiesSsdActive - a model defined in Swagger""" # noqa: E501
self._active = None
self.discriminator = None
if active is not None:
self.active = active
@property
def active(self):
"""Gets the active of this CompatibilitiesSsdActive. # noqa: E501
:return: The active of this CompatibilitiesSsdActive. # noqa: E501
:rtype: list[CompatibilitiesSsdActiveActiveItem]
"""
return self._active
@active.setter
def active(self, active):
"""Sets the active of this CompatibilitiesSsdActive.
:param active: The active of this CompatibilitiesSsdActive. # noqa: E501
:type: list[CompatibilitiesSsdActiveActiveItem]
"""
self._active = active
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CompatibilitiesSsdActive):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.026087 | 123 | 0.587341 |
681779806d72b020dd41a28020f911b6eec0648a | 319 | py | Python | exercises/pt/exc_02_05_03.py | tuanducdesign/spacy-course | f8d092c5fa2997fccb3f367d174dce8667932b3d | [
"MIT"
] | null | null | null | exercises/pt/exc_02_05_03.py | tuanducdesign/spacy-course | f8d092c5fa2997fccb3f367d174dce8667932b3d | [
"MIT"
] | null | null | null | exercises/pt/exc_02_05_03.py | tuanducdesign/spacy-course | f8d092c5fa2997fccb3f367d174dce8667932b3d | [
"MIT"
] | null | null | null | import spacy
nlp = spacy.blank("pt")
# Importe a classe Doc
from ____ import ____
# Texto desejado: "Oh, realmente?!"
words = [____, ____, ____, ____, ____]
spaces = [____, ____, ____, ____, ____]
# Crie um Doc a partir das palavras words e espaçamento spaces
doc = ____(____, ____=____, ____=____)
print(doc.text)
| 21.266667 | 62 | 0.705329 |
abe44e5583c504d4acdfdda832c07597957e1c3d | 2,648 | py | Python | plot/ART/point/height_url_art.py | XinYao1994/HOPE | 99b41b457b67d3e5d6dd182f8aa2ce4ea66e4a68 | [
"Apache-2.0"
] | 108 | 2020-04-23T19:06:51.000Z | 2022-02-23T20:05:09.000Z | plot/ART/point/height_url_art.py | XinYao1994/HOPE | 99b41b457b67d3e5d6dd182f8aa2ce4ea66e4a68 | [
"Apache-2.0"
] | 1 | 2021-07-07T05:58:57.000Z | 2021-07-07T05:58:57.000Z | plot/ART/point/height_url_art.py | XinYao1994/HOPE | 99b41b457b67d3e5d6dd182f8aa2ce4ea66e4a68 | [
"Apache-2.0"
] | 11 | 2020-04-24T01:53:50.000Z | 2022-01-21T07:36:14.000Z | import sys
import os
sys.path.append(os.path.abspath('./plot/'))
from option import *
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plot
import matplotlib.ticker as ticker
import numpy as np
import csv
def autolabel(rects):
for rect in rects:
height = rect.get_height()
ax.text(rect.get_x() + rect.get_width()/2., height + 0.01,
'%0.1f' % float(height),
fontsize=14,
ha='center', va='bottom')
GROUP_SIZE = 7
CATEGORY_NAMES = ["Uncompressed", "Single", "Double", "3-Grams, 65536", "4-Grams, 65536", "ALM, 8192", "ALM, 65536"]
CSV_FILE_PATH = "results/ART/point/final_height_url_art.csv"
GRAPH_OUTPUT_PATH = "figures/ART/point/height_url_art.pdf"
COLORS = ['#ffffff', '#fff7ec', '#fee8c8', '#fc8d59', '#d7301f', '#7f0000', '#4c0000']
Y_LABEL = "Average Trie Height"
LEGEND_FONT_SIZE = 18
LEGEND_POS = 'upper left'
f_in = open(CSV_FILE_PATH)
reader = csv.reader(f_in)
csvrows = list(reader)
data = []
for row in csvrows :
h_sum = 0
for i,item in enumerate(row) :
h_sum += float(item) * (i+1)
data.append(h_sum/len(row))
#========================================================================================
mpl.rcParams['ps.useafm'] = True
mpl.rcParams['pdf.use14corefonts'] = True
mpl.rcParams['text.usetex'] = False
mpl.rcParams['text.latex.preamble'] = [
r'\usepackage{siunitx}', # i need upright \micro symbols, but you need...
r'\sisetup{detect-all}', # ...this to force siunitx to actually use your fonts
r'\usepackage{helvet}', # set the normal font here
r'\usepackage{sansmath}', # load up the sansmath so that math -> helvet
r'\sansmath' # <- tricky! -- gotta actually tell tex to use!
]
#========================================================================================
width = 1 / (GROUP_SIZE + 2.0)
fig = plot.figure(figsize=(GRAPH_WIDTH, GRAPH_HEIGHT))
ax = fig.add_subplot(111)
rect = []
for i in range(0, GROUP_SIZE) :
if i == 0: # baseline
hatch = HATCH
else:
hatch = ""
pos = []
pos.append(width + width * i)
rect.append(ax.bar(pos, [data[i]], width, color=COLORS[i], label=CATEGORY_NAMES[i], linewidth = BORDER_SIZE, edgecolor = BORDER_COLOR, hatch=hatch))
autolabel(rect[i])
ax.get_xaxis().set_visible(False)
ylim = getLimit(data)
ax.set_ylim(0, ylim)
ax.set_xlim([0,1])
ax.set_yticks(getTicks(ylim))
ax.set_ylabel(Y_LABEL, fontsize=Y_LABEL_FONT_SIZE)
for label in ax.get_yticklabels():
label.set_fontsize(Y_TICK_FONT_SIZE)
outFile = GRAPH_OUTPUT_PATH
plot.savefig(outFile, bbox_inches='tight')
| 30.790698 | 152 | 0.614804 |
0e61909910bec4da938c30cad80653f49eb97586 | 97 | py | Python | flash/core/serve/dag/utils_test.py | Actis92/lightning-flash | 49972268cfc0f95f1bd2b8fbf25036970cc44b59 | [
"Apache-2.0"
] | 1,457 | 2021-01-28T20:40:16.000Z | 2022-03-31T06:22:05.000Z | flash/core/serve/dag/utils_test.py | Actis92/lightning-flash | 49972268cfc0f95f1bd2b8fbf25036970cc44b59 | [
"Apache-2.0"
] | 1,123 | 2021-01-28T20:37:56.000Z | 2022-03-31T19:34:44.000Z | flash/core/serve/dag/utils_test.py | Actis92/lightning-flash | 49972268cfc0f95f1bd2b8fbf25036970cc44b59 | [
"Apache-2.0"
] | 170 | 2021-01-29T00:41:39.000Z | 2022-03-29T16:09:52.000Z | def inc(x):
return x + 1
def add(x, y):
return x + y
def mul(x, y):
return x * y
| 8.818182 | 16 | 0.484536 |
de256e518c463051029f6d58a76daa3c10de7179 | 14,731 | py | Python | main-back.py | p-sk/AlexaMeetingRoomBooking | 42c9cefa6cc085c51312fb9e1bcc66ea2e73fe23 | [
"MIT"
] | null | null | null | main-back.py | p-sk/AlexaMeetingRoomBooking | 42c9cefa6cc085c51312fb9e1bcc66ea2e73fe23 | [
"MIT"
] | null | null | null | main-back.py | p-sk/AlexaMeetingRoomBooking | 42c9cefa6cc085c51312fb9e1bcc66ea2e73fe23 | [
"MIT"
] | null | null | null | import json
import datetime
import isodate
import boto3
# Create clients
sns = boto3.client('sns')
dynamodb = boto3.resource('dynamodb', region_name='eu-west-1')
roomTable = dynamodb.Table('myrecords')
# Check if is there any booking fot that day
def isTimeAvailable(_order):
bookingData = roomTable.scan()["Items"]
_order["isAvailable"] = True
_order["alternative"] = []
for _bookingData in bookingData:
for _slot in _bookingData["slots"]:
if _slot["date"] == _order["date"]:
__stime = _stime = datetime.datetime.strptime(
_slot["startTime"], "%H:%M").time()
__etime = _stime = datetime.datetime.strptime(
_slot["endTime"], "%H:%M").time()
_stime = _stime = datetime.datetime.strptime(
_order["startTime"], "%H:%M").time()
_etime = datetime.datetime.strptime(
_order["endTime"], "%H:%M").time()
if (_stime >= __stime and _stime < __etime) or (_etime > __stime and _etime <= __etime):
if _bookingData["room"] == _order["room"]:
_order["isAvailable"] = False
else:
if _bookingData["room"] != _order["room"]:
if _bookingData["room"] not in _order["alternative"]:
_order["alternative"].append(_bookingData["room"])
else:
if _bookingData["room"] != _order["room"]:
if _bookingData["room"] not in _order["alternative"]:
_order["alternative"].append(_bookingData["room"])
return _order
# Book the Slot
def bookSlot(_order):
_slot = [{
"date": _order["date"],
"endTime": _order["endTime"],
"startTime": _order["startTime"]
}]
try:
roomTable.update_item(
Key={
'room': _order["room"]
},
UpdateExpression="set slots = list_append(slots, :i)",
ExpressionAttributeValues={
':i': _slot
},
ReturnValues="UPDATED_NEW"
)
_order["status"] = "COMPLETED"
except Exception as e:
print(e)
_order["status"] = "NOTCOMPLETED"
return _order
def askAgain(msg, key, _order):
res = {"version": "1.0",
"sessionAttributes": _order,
"shouldEndSession": False,
"response": {
"outputSpeech": {
"type": "PlainText",
"text": msg
},
"directives": [
{
"type": "Dialog.Delegate",
"updatedIntent": {
"name": "BookRoom",
"confirmationStatus": "NONE",
"slots": {
"date": {
"name": "date",
"value": _order["date"],
"confirmationStatus": "NONE"
},
"duration": {
"name": "duration",
"value": _order["duration"],
"confirmationStatus": "NONE"
},
"time": {
"name": "time",
"value": _order["startTime"],
"confirmationStatus": "NONE"
},
"room": {
"name": "room",
"value": _order["room"],
"resolutions": {"resolutionsPerAuthority": [{
"authority": "amzn1.er-authority.echo-sdk.amzn1.ask.skill.49b043a1-f602-4f73-9d17-817f11a2a81e.AMAZON.Room"}
]
},
"confirmationStatus": "NONE"
}
}
}
}
]
}
}
print(msg, key, _order, res)
return res
def conformation(_order):
res = {"version": "1.0",
"sessionAttributes": _order,
"shouldEndSession": False,
"response": {
"outputSpeech": {
"type": "PlainText",
"text": "Ok Your request is for "+_order["room"]+" on "+_order["date"]+" from "+_order["startTime"]+" to "+_order["endTime"]+". Kindly conform"
},
"directives": [
{
"type": "Dialog.ConfirmIntent",
"updatedIntent": {
"name": "BookRoom",
"confirmationStatus": "NONE",
"slots": {
"date": {
"name": "date",
"value": _order["date"],
"confirmationStatus": "NONE"
},
"duration": {
"name": "duration",
"value": _order["duration"],
"confirmationStatus": "NONE"
},
"time": {
"name": "time",
"value": _order["startTime"],
"confirmationStatus": "NONE"
},
"room": {
"name": "room",
"value": _order["room"],
"resolutions": {"resolutionsPerAuthority": [{
"authority": "amzn1.er-authority.echo-sdk.amzn1.ask.skill.49b043a1-f602-4f73-9d17-817f11a2a81e.AMAZON.Room"}
]
},
"confirmationStatus": "NONE"
}
}
}
}
]
}
}
print(_order, res)
return res
def slotValidation(event):
bookingData = roomTable.scan()["Items"]
order = {
"room": '',
"date": '',
"startTime": '',
"duration": ''
}
if event["request"]["intent"]["slots"]["room"]["value"]!= '':
_room = event["request"]["intent"]["slots"]["room"]["value"]
for rooms in bookingData:
rooms["names"].append(rooms["room"])
if _room in rooms["names"]:
order["room"] = rooms["room"]
break
else:
return askAgain("Requested room not found, try again.", 'room', order)
if event["request"]["intent"]["slots"]["date"]["value"] != '':
_date = event["request"]["intent"]["slots"]["date"]["value"]
order["date"] = _date
if event["request"]["intent"]["slots"]["time"]["value"] != '':
_startTime = event["request"]["intent"]["slots"]["time"]["value"]
_datetime = datetime.datetime.strptime(
_date+_startTime, '%Y-%m-%d%H:%M')
if _datetime >= datetime.datetime.now():
order["startTime"] = _startTime
if event["request"]["intent"]["slots"]["duration"]["value"] != '':
order["duration"] = event["request"]["intent"]["slots"]["duration"]["value"]
order["endTime"] = (datetime.datetime.strptime(order["startTime"], "%H:%M") +
isodate.parse_duration(order["duration"])).strftime("%H:%M")
order = isTimeAvailable(order)
if order["isAvailable"] == True:
if event["request"]["intent"]["confirmationStatus"] == "CONFIRMED":
return bookRoom(event, order)
else:
return conformation(order)
else:
order["room"] = ''
return askAgain("Requested room " + order["room"]+" is not available. "+str(order["alternative"])+" are available.", 'room', order)
else:
return askAgain('How Long does it take ', 'duration', order)
else:
order["date"] = ''
order["startTime"] = ''
return askAgain("You requested booking for older date time. Kindly try future date time.", "time", order)
else:
return askAgain('What time you want to book', 'time', order)
else:
return askAgain('When is this meeting', 'date', order)
else:
return askAgain('Which room you want to book ', 'room', order)
def bookRoom(event, order):
order = bookSlot(order)
if order["status"] == "COMPLETED":
msg = "Book Request For room: "+event["request"]["intent"]["slots"]["room"]["value"] +\
"From:"+event["request"]["intent"]["slots"]["date"]["value"]+" "+event["request"]["intent"]["slots"]["time"]["value"]+" " +\
"Duration:" + \
event["request"]["intent"]["slots"]["duration"]["value"].replace(
"PT", "")+" "
# response = sns.publish(TopicArn='arn:aws:sns:eu-west-1:682426607174:suraj',Message=json.dumps(str(msg)))
return {
"version": "1.0",
"response": {
"outputSpeech": {
"type": "PlainText",
"text": "Ok, Your request has completed, Here is the request information\n"+msg
},
"card": {
"type": "Simple",
"title": " your request has completed",
"content": "Example of card content. This card has just plain text content.\nThe content is formatted with line breaks to improve readability."
}
}
}
else:
return {
"version": "1.0",
"response": {
"outputSpeech": {
"type": "PlainText",
"text": "Something Went Wrong"
}
}
}
def sessionEndedRequest(event):
return {
"version": "1.0",
"response": {
"outputSpeech": {
"type": "PlainText",
"text": "Bye.."
}
}
}
def defaultFunction(event):
return {
"version": "1.0",
"response": {
"outputSpeech": {
"type": "PlainText",
"text": "Sorry I cannot understand that can you please try again"
},
"shouldEndSession": False,
}
}
def canFulfilled(event):
if event["request"]["intent"]["name"] == "BookRoom":
return {
"version": "1.0",
"response": {
"canFulfillIntent": {
"canFulfill": "YES"
}
},
"shouldEndSession": False,
}
def launchRequest(event):
return {
"version": "1.0",
"response": {
"outputSpeech": {
"type": "PlainText",
"text": "Welcome..."
}
}
}
def startDialog(event):
_order = {
"room": '',
"date": '',
"startTime": '',
"duration": ''
}
res = {"version": "1.0",
"sessionAttributes": _order,
"shouldEndSession": False,
"response": {
"directives": [
{
"type": "Dialog.Delegate",
"updatedIntent": {
"name": "BookRoom",
"confirmationStatus": "NONE",
"slots": {
"date": {
"name": "date",
"value": _order["date"],
"confirmationStatus": "NONE"
},
"duration": {
"name": "duration",
"value": _order["duration"],
"confirmationStatus": "NONE"
},
"time": {
"name": "time",
"value": _order["startTime"],
"confirmationStatus": "NONE"
},
"room": {
"name": "room",
"value": _order["room"],
"resolutions": {},
"confirmationStatus": "NONE"
}
}
}
}
]
}
}
return res
def main(event, context):
# TODO implement
# Publish a simple message to the specified SNS topic
print(event)
if event["request"]["type"] == "IntentRequest":
if event["request"]["intent"]["name"] == "BookRoom":
if event["request"]["dialogState"] == "COMPLETED":
return bookRoom(event, event["session"]["attributes"])
elif event["request"]["dialogState"] == "STARTED":
return startDialog(event)
else:
return slotValidation(event)
if event["request"]["type"] == "SessionEndedRequest":
return sessionEndedRequest(event)
if event["request"]["type"] == "CanFulfillIntentRequest":
return canFulfilled(event)
if event["request"]["type"] == "LaunchRequest":
return launchRequest(event) | 39.387701 | 163 | 0.388908 |
5368f3dc9a1620c617284f05ce8a5bd699fc49e6 | 1,382 | py | Python | Notes_20-29/KnoxFlaskAPI.py | joseeden/notes-cbt-nuggets-devasc | 7e1b3e08be50de6b4c58ca77944d93c81f0db8ab | [
"MIT"
] | 1 | 2021-01-18T08:13:16.000Z | 2021-01-18T08:13:16.000Z | Notes_20-29/KnoxFlaskAPI.py | joseeden/cbt-nuggets-devasc-notes | 7e1b3e08be50de6b4c58ca77944d93c81f0db8ab | [
"MIT"
] | null | null | null | Notes_20-29/KnoxFlaskAPI.py | joseeden/cbt-nuggets-devasc-notes | 7e1b3e08be50de6b4c58ca77944d93c81f0db8ab | [
"MIT"
] | 1 | 2021-05-27T01:55:49.000Z | 2021-05-27T01:55:49.000Z |
#**********************************************************************************************************************#
# KnoxFlaskAPI.py
#**********************************************************************************************************************#
# 2021-01-15 17:44:10
# EDEN:
# This is the code for the flask API used in Knox Hutchinson's docker section in the CBT Nuggets - DevAsc course.
# You can check out the code here: https://github.com/DataKnox/CodeSamples/blob/master/Python/Docker/myAPI/myAPI.py
# Note that you'll have to install flask on your machine first, before you can run the code below.
# For installing flask, you can follow this tutorial: https://code.visualstudio.com/docs/python/tutorial-flask
#-------------------------------------------------START OF CODE------------------------------------------------------#
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/api/endpoint', methods=['GET'])
def get_data():
return (jsonify({'message': 'received'}), 200)
if __name__ == '__main__':
app.run(host='0.0.0.0')
#---------------------------------------------------END OF CODE------------------------------------------------------#
# To test if script worked, run the following commands:
#**********************************************************************************************************************#
| 38.388889 | 120 | 0.424023 |
cb85694377ee4258b274e313364c30514f76d993 | 2,427 | py | Python | samples/sunrgbd/sun_config.py | Yannick947/Mask_RCNN | 19cb2a26003870f80fac3feae834b82725530557 | [
"MIT"
] | 3 | 2021-03-19T22:10:25.000Z | 2022-02-27T20:41:43.000Z | samples/sunrgbd/sun_config.py | Yannick947/Mask_RCNN | 19cb2a26003870f80fac3feae834b82725530557 | [
"MIT"
] | null | null | null | samples/sunrgbd/sun_config.py | Yannick947/Mask_RCNN | 19cb2a26003870f80fac3feae834b82725530557 | [
"MIT"
] | null | null | null | import sys
import os
import numpy as np
from mrcnn.config import Config
ANNOTATION_FILENAME = 'via_regions_sunrgbd.json'
# removed 'tool', put 'wardrobe' and 'desk' to COMBINED_CLASSES
CLASSES = ['bed', 'chair', 'table', 'sofa', 'bookcase']
COMBINED_CLASSES = {'desk': 'table'}
IGNORE_IMAGES_PATH = os.path.abspath('./skip_image_paths.txt')
# Root directory of the project
ROOT_DIR = os.path.abspath('./')
# Import Mask RCNN
sys.path.append(ROOT_DIR) # To find local version of the library
try:
print('Try to set gpu ressources ...')
import nvgpu
available_gpus = nvgpu.available_gpus()
if type(available_gpus) is list and len(available_gpus) > 0:
os.environ["CUDA_VISIBLE_DEVICES"] = available_gpus[0]
print('Using GPU ', available_gpus[0])
else:
print('No free gpu found, try later..')
exit()
except Exception as e:
print(e)
pass
class SunConfig(Config):
"""Configuration for training on the sun dataset.
Derives from the base Config class and overrides some values.
"""
# Mean of depth channel was calculated with tests/get_mean_channels
# Other means of channels were calculated as well but are mostly similar to the provided ones
# and are therefore not changed
MEAN_DEPTH_VALUE = 61
def __init__(self, depth_mode: bool = False):
"""Initialize configuration of sunrgbd dataset.
Arguments:
:param depth_mode: Flag to indicate whether depth channel (4th dimension) is available
"""
self.depth_mode = depth_mode
if depth_mode:
print('Depth mode enabled')
self.IMAGE_CHANNEL_COUNT = 4
self.MEAN_PIXEL = np.append(self.MEAN_PIXEL, self.MEAN_DEPTH_VALUE)
super().__init__()
print('Following classes are used: ', *CLASSES)
# Give the configuration a recognizable name
NAME = "sun"
# NUMBER OF GPUs to use. When using only a CPU, this needs to be set to 1.
GPU_COUNT = 1
# Into 12GB GPU memory, can fit two images.
IMAGES_PER_GPU = 3
# Number of classes (including background)
NUM_CLASSES = 1 + len(CLASSES) # Background + num_classes
# Skip detections with < 90% confidence
DETECTION_MIN_CONFIDENCE = 0.9
class InferenceConfig(SunConfig):
GPU_COUNT = 1
IMAGES_PER_GPU = 1
def __init__(self, depth_mode):
super().__init__(depth_mode=depth_mode)
| 27.896552 | 98 | 0.677379 |
703cda44669aa8b8dba47e4158d637e99434680a | 4,281 | py | Python | Extra/image_classification.py | seunboy1/Coursera-TensorFlow-Developer-Course | 0f0b2b2637c561293e7f7fef5acaaac5e12c1ca2 | [
"MIT"
] | 3 | 2021-07-04T18:49:10.000Z | 2021-08-15T13:54:30.000Z | Extra/image_classification.py | seunboy1/Coursera-TensorFlow-Developer-Course | 0f0b2b2637c561293e7f7fef5acaaac5e12c1ca2 | [
"MIT"
] | null | null | null | Extra/image_classification.py | seunboy1/Coursera-TensorFlow-Developer-Course | 0f0b2b2637c561293e7f7fef5acaaac5e12c1ca2 | [
"MIT"
] | null | null | null | import os
import PIL
import random
import pathlib
import shutil
import numpy as np
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import Conv2D, Dense, MaxPooling2D, Flatten
def download_data():
dataset_url = "https://storage.googleapis.com/download.tensorflow.org/example_images/flower_photos.tgz"
data_dir = tf.keras.utils.get_file('flower_photos', origin=dataset_url, untar=True)
data_dir = pathlib.Path(data_dir)
return data_dir
def preprocessing(train_dir, val_dir = None):
train_datagen = ImageDataGenerator(
rescale=1. / 255,
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
fill_mode="nearest",
horizontal_flip=True
)
train_generator = train_datagen.flow_from_directory(
train_dir,
target_size=(180, 180),
batch_size=32,
class_mode="binary"
)
if val_dir is not None:
validation_datagen = ImageDataGenerator(rescale=1. / 255)
validation_generator = validation_datagen.flow_from_directory(
val_dir,
target_size=(180, 180),
batch_size=32,
class_mode="binary"
)
return train_generator, validation_generator
return train_generator
def split_data(SOURCE, TRAINING, TESTING, SPLIT_SIZE):
x = os.listdir(SOURCE)
files = random.sample(x, len(x))
num = int(len(files) * SPLIT_SIZE)
train = [files[:num], TRAINING]
test = [files[num:], TESTING]
for data in [train, test]:
for img in data[0]:
shutil.move(f"{SOURCE}/{img}", data[1])
def create_data(data_dir):
paths = ["flowers/training", "flowers/testing"]
for path in paths:
for i in ["tulips", "dandelion", "roses", "sunflowers", "daisy"]:
if os.path.exists(os.path.join(path, i)):
pass
else:
os.makedirs(os.path.join(path, i))
train_roses_dir = "flowers/training/roses"
train_daisy_dir = "flowers/training/daisy"
train_tulips_dir = "flowers/training/tulips"
train_dandelion_dir = "flowers/training/dandelion"
train_sunflowers_dir = "flowers/training/sunflowers"
test_roses_dir = "flowers/testing/roses"
test_daisy_dir = "flowers/testing/daisy"
test_tulips_dir = "flowers/testing/tulips"
test_dandelion_dir = "flowers/testing/dandelion"
test_sunflowers_dir = "flowers/testing/sunflowers"
split_size = .8
split_data(os.path.join(data_dir, "roses"), train_roses_dir, test_roses_dir, split_size)
split_data(os.path.join(data_dir, "daisy"), train_daisy_dir, test_daisy_dir, split_size)
split_data(os.path.join(data_dir, "tulips"), train_tulips_dir, test_tulips_dir, split_size)
split_data(os.path.join(data_dir, "dandelion"), train_dandelion_dir, test_dandelion_dir, split_size)
split_data(os.path.join(data_dir, "sunflowers"), train_sunflowers_dir, test_sunflowers_dir, split_size)
def architecture():
model = Sequential([
Conv2D(32, 3, activation="relu", input_shape=(180, 180, 3)),
MaxPooling2D((2, 2)),
Conv2D(64, 3, activation="relu"),
MaxPooling2D((2, 2)),
Conv2D(64, 3, activation="relu"),
MaxPooling2D((2, 2)),
Conv2D(128, 3, activation="relu"),
MaxPooling2D((2, 2)),
Flatten(),
Dense(128, activation='relu'),
Dense(5, activation="softmax")
])
model.compile(optimizer="adam", #RMSprop(lr=0.001),
loss='sparse_categorical_crossentropy',
metrics=['acc'])
return model
if __name__ == '__main__':
data_dir = download_data()
create_data(data_dir)
train_dir = "flowers/training"
val_dir = "flowers/testing"
train_generator, validation_generator = preprocessing(train_dir, val_dir)
model = architecture()
model.summary()
history = model.fit_generator(train_generator,
epochs=15,
verbose=1,
validation_data=validation_generator) | 33.708661 | 107 | 0.658958 |
94b50ecd6575e8d6513fb444bff3fa9f26a114b9 | 7,787 | py | Python | netket/operator/_local_cost_functions.py | gpescia/MyNetKet | 958510966a5870d9d491de0628903cf1fc210921 | [
"Apache-2.0"
] | 352 | 2018-04-24T16:45:10.000Z | 2022-03-31T01:15:34.000Z | netket/operator/_local_cost_functions.py | gpescia/MyNetKet | 958510966a5870d9d491de0628903cf1fc210921 | [
"Apache-2.0"
] | 947 | 2018-04-24T20:16:17.000Z | 2022-03-31T17:33:52.000Z | netket/operator/_local_cost_functions.py | gpescia/MyNetKet | 958510966a5870d9d491de0628903cf1fc210921 | [
"Apache-2.0"
] | 148 | 2018-04-25T02:44:20.000Z | 2022-03-11T11:42:34.000Z | # Copyright 2021 The NetKet Authors - All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from inspect import signature
import jax
import jax.numpy as jnp
from netket import jax as nkjax
# The following dicts store some 'properties' of cost functions. The keys are jitted
# cost functions. Access should be performed in jit blocks in order to be 0-cost.
# batch_axes for jax.vmap of cost functions
_batch_axes = {}
# unjitted version of the cost function
_unjitted_fun = {}
# Whever the cost function has in general complex output, or only real
_outdtype = {}
def define_local_cost_function(
fun,
static_argnums=0,
batch_axes=None,
):
"""
@define_local_cost_function(fun, static_argnums=0, batch_axes=automatic,
outdtype=complex)
A decorator to be used to define a local cost function and it's gradient. The
function to be decorated must be a jax-compatible function, that takes the following
positional arguments:
- The variational function evaluating a single input and returning a scalar output.
- A pytree of parameters for the neural network function. Gradients will be
computed with respect to this argument
- N additional positional arguments (non static) containing any additional data.
In order to support batching, one must also define the batch_axes variable according
to jax vmap documentation. By default `batch_axesw=(None, None, 0...)`, meaning that
no batching is performed for the first two arguments (the network and the
parameters) and batching is performed along the 0-th dimension of all arguments.
The optional kwarg `outdtype` specifies the output type (float or complex) of the
local cost function. If you know the output is real, then `outdtype=float` will
allow for a faster code-path.
An example is provided below:
```python
@partial(define_local_cost_function, static_argnums=0,
batch_axes=(None, None, 0, 0, 0))
def local_energy_kernel(logpsi, pars, vp, mel, v):
return jax.numpy.sum(mel * jax.numpy.exp(logpsi(pars, vp) - logpsi(pars, v)))
```
"""
jitted_fun = jax.jit(fun, static_argnums=static_argnums)
ig = signature(jitted_fun)
npars = len(ig.parameters)
if npars < 2:
raise ValueError("Local cost functions should have at least 2 parameters.")
# If batch_axes is not specified, assume that all parameters except the first two
# (function and parameters) are to be batched upon.
if batch_axes is None:
batch_axes = (None, None) + tuple([None for _ in range(npars - 2)])
_batch_axes[jitted_fun] = batch_axes
_unjitted_fun[jitted_fun] = fun
return jitted_fun
# In the code below, we define a jitted function taking as argument the
# jax_forward and pytree parametersm and a standard python function
# taking as argument the full machine. The stable API only involves the
# one taking a full JaxMachine, as it allows us to add some logic (like
# for real/complex valued machines) in the future if needed without
# breaking the API.
# In the following, all functions _jitted functions assume that the arguments
# are passed in that order:
# 0 - (static) local_cost_fun (for example local_energy kernel).
# 1 - (static) the nn function
# 2 - weights for the nn function in pytree format (directions of the gradient)
# 3 - various parameters
# Also assumes that args 1..N are the args (in that order) of local_cost_fun
@partial(jax.jit, static_argnums=(0, 1))
def _local_cost_function(local_cost_fun, logpsi, pars, *args):
local_cost_fun_vmap = jax.vmap(
_unjitted_fun[local_cost_fun],
in_axes=_batch_axes[local_cost_fun],
out_axes=0,
)
return local_cost_fun_vmap(logpsi, pars, *args)
def local_cost_function(local_cost_fun, model, pars, *args):
"""
local_cost_function(local_cost_fun, machine, *args)
Function to compute the local cost function in batches for the parameters of
`machine`.
Args:
local_cost_fun: the cost function
machine: netket's JaxMachine containing the variational ansatz and parameters
*args: additional arguments
Returns:
the value of log_psi with parameters `pars` for the batches *args
"""
return _local_cost_function(local_cost_fun, model, pars, *args)
# Starting from the 4th argument, it's the same arguments as the cost function itself
# dtype: dtype of pars
# outdtype: dtype of logpsi(pars, *args)
def __local_cost_and_grad_function(local_cost_fun, logpsi, pars, *args):
lcfun_u = partial(_unjitted_fun[local_cost_fun], logpsi)
der_local_cost_fun = nkjax.value_and_grad(lcfun_u, argnums=0)
return der_local_cost_fun(pars, *args)
_local_cost_and_grad_function = jax.jit(
__local_cost_and_grad_function, static_argnums=(0, 1)
)
def local_cost_and_grad_function(local_cost_fun, machine, parameters, *args):
"""
local_cost_and_grad_function(local_cost_fun, machine, *args)
Function to compute the gradient and value of the local cost function, with respect
to the parameters of the `machine`.
Args:
local_cost_fun: the cost function
machine: netket's JaxMachine containing the variational ansatz and parameters
*args: additional arguments
Returns:
the value of the local_cost_fun(machine, *args)
the gradient of of `local_cost_fun(machine, *args)`
"""
return _local_cost_and_grad_function(local_cost_fun, machine, parameters, *args)
@partial(jax.jit, static_argnums=(0, 1))
def _local_costs_and_grads_function(local_cost_fun, logpsi, pars, *args):
local_costs_and_grads_fun = jax.vmap(
__local_cost_and_grad_function,
in_axes=(None,) + _batch_axes[local_cost_fun],
out_axes=(0, 0),
)
return local_costs_and_grads_fun(local_cost_fun, logpsi, pars, *args)
def local_costs_and_grads_function(local_cost_fun, machine, parameters, *args):
"""
local_costs_and_grads_function(local_cost_fun, machine, *args)
Function to compute the value and the gradient of the `local_cost_fun` function
with respect to the parameters of the `machine`, vmapped along `*args` 0-th
dimension.
Args:
local_cost_fun: the cost function
machine: netket's JaxMachine containing the variational ansatz and parameters
*args: additional arguments
Returns:
the value of the local_cost_function for every `*args` input
the gradient with respect to the weights
"""
return _local_costs_and_grads_function(
local_cost_fun,
machine,
parameters,
*args,
)
@partial(define_local_cost_function, static_argnums=0, batch_axes=(None, None, 0, 0, 0))
def local_value_cost(logpsi, pars, vp, mel, v):
return jnp.sum(mel * jnp.exp(logpsi(pars, vp) - logpsi(pars, v)))
@partial(define_local_cost_function, static_argnums=0, batch_axes=(None, None, 0, 0, 0))
def local_value_op_op_cost(logpsi, pars, σp, mel, σ):
σ_σp = jax.vmap(lambda σp, σ: jnp.hstack((σp, σ)), in_axes=(0, None))(σp, σ)
σ_σ = jnp.hstack((σ, σ))
return jnp.sum(mel * jnp.exp(logpsi(pars, σ_σp) - logpsi(pars, σ_σ)))
| 36.38785 | 88 | 0.720046 |
81934394389f6681c577293790ba2b078d8aee8d | 652 | py | Python | myspiders/spider_console_news.py | zhouhongf/spider_news | 6ea4a80d050bf3ab70724f45312c5a2580e714a6 | [
"MIT"
] | 2 | 2021-11-27T06:40:44.000Z | 2022-02-23T11:19:11.000Z | myspiders/spider_console_news.py | zhouhongf/spider_news | 6ea4a80d050bf3ab70724f45312c5a2580e714a6 | [
"MIT"
] | null | null | null | myspiders/spider_console_news.py | zhouhongf/spider_news | 6ea4a80d050bf3ab70724f45312c5a2580e714a6 | [
"MIT"
] | 1 | 2021-12-15T09:11:16.000Z | 2021-12-15T09:11:16.000Z | import os
import sys
from config import Config
from importlib import import_module
# sys.path.append('../')
def file_name(file_dir=os.path.join(Config.BASE_DIR, 'myspiders/spider_news')):
all_files = []
for file in os.listdir(file_dir):
if file.endswith('_spider.py'):
all_files.append(file.replace('.py', ''))
return all_files
def spider_console_news():
all_files = file_name()
for spider in all_files:
spider_module = import_module("myspiders.spider_news.{}".format(spider))
spider_module.start()
if __name__ == '__main__':
spider_console_news()
# pass
| 24.148148 | 81 | 0.653374 |
fff327fc597ba0f45d4433491111ed4e79dba68f | 2,466 | py | Python | tutorials/TutorialCollection.py | lmidolo/samplemaker | 8211af0e4cea60aea8f5720d5ff0ee532c442123 | [
"BSD-3-Clause"
] | null | null | null | tutorials/TutorialCollection.py | lmidolo/samplemaker | 8211af0e4cea60aea8f5720d5ff0ee532c442123 | [
"BSD-3-Clause"
] | null | null | null | tutorials/TutorialCollection.py | lmidolo/samplemaker | 8211af0e4cea60aea8f5720d5ff0ee532c442123 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Tutorial device collection
"""
# This is how you create a collection of devices, just add all the classes in this file
# Check the end of this file, we run a command to make samplemaker aware of our devices
import samplemaker.makers as sm # used for drawing
from samplemaker.devices import Device, registerDevicesInModule # We need the registerDevicesInModule function
# class definition
class FreeFreeMembrane(Device):
# We need to implement a few mandatory functions here:
def initialize(self):
# This function setups some variable, like the unique identifier name
self.set_name("CUSTOM_FFM")
# Also add a description, useful for documenting later
self.set_description("Free free membrane as in 10.1103/PhysRevB.98.155316, etc etc")
def parameters(self):
# define all the paramters of the device and their default values.
# You can specify what type the parameter has and what it the minimum-maximum allowed values
# Default is float and range (0,infinity) for all parameters.
self.addparameter("L", 40, "Length of the membrane", param_type=float, param_range=(0.5,150))
self.addparameter("W", 12.5, "Width of the membrane")
self.addparameter("tetW", 2, "Tether width")
self.addparameter("tetOff", 11, "Tether offset from the center")
self.addparameter("R", 30, "Support ring radius")
def geom(self):
# This is where we place the commands for drawing!
# This function should return a GeomGroup
# we can fetch the parameters first to shorten the notation
# note that you can choose whetner a type cast should be made (i.e. forcing the parameter to be
# of the type specified in the addparameter command) and if it should be clipped in the allowed range.
p = self.get_params(cast_types=True,clip_in_range=True)
# Draw the membrane
mem = sm.make_rect(0,0,p["W"],p["L"])
# Draw tether
tet = sm.make_rect(0,p["tetOff"],p["R"]*2,p["tetW"])
# Mirror to get the second one
tet2 = tet.copy()
tet2.mirrorY(0)
mem+=tet+tet2
# Support ring
ring = sm.make_circle(0, 0, p["R"],to_poly=True,vertices=64)
# boolean
ring.boolean_difference(mem, 1, 1)
return ring
### Important: register all devices in this module
registerDevicesInModule(__name__) | 44.035714 | 111 | 0.667478 |
912b9776a346a913c787ac26d80eff8862a03130 | 6,974 | py | Python | triplet.py | leonardbereska/master | a654b3d9d4520fa3141a4595ee5ee0f9c2cfd4fe | [
"MIT"
] | null | null | null | triplet.py | leonardbereska/master | a654b3d9d4520fa3141a4595ee5ee0f9c2cfd4fe | [
"MIT"
] | null | null | null | triplet.py | leonardbereska/master | a654b3d9d4520fa3141a4595ee5ee0f9c2cfd4fe | [
"MIT"
] | null | null | null | import torch.optim as optim
import torchvision
import torch
from torch.autograd import Variable
import numpy as np
from torch.nn import functional as F
import dataset
import helpers
import models
from config import *
# assert(file_name == 'triplet' or file_name == 'bigbottle' or file_name == 'tripletpro')
writer, save_dir = helpers.init(gpu, file_name, experiment_name)
# model = models.Triplet().cuda()
# model = models.BigBottle2(n_layer=n_layer).cuda()
model = models.TripletPro().cuda()
model.apply(models.init_weights) # xavier init
optimizer = optim.Adam(model.parameters(), lr=lr)
def run(n_epochs):
for epoch in range(n_epochs):
for n, (frame0, frame1, frame2, frame_rand) in enumerate(dataset.train_loader):
niter = epoch * len(dataset.train_loader) + n # count gradient updates
model.train()
frame0 = Variable(frame0).cuda()
frame1 = Variable(frame1).cuda()
frame2 = Variable(frame2).cuda()
frame_rand = Variable(frame_rand).cuda()
# Appearance constancy loss
# a1 = model.appearance(frame0)
# a2 = model.appearance(frame_rand)
# loss_appear = F.l1_loss(a1, a2) # two frames in video should have same appearance
# Pose constancy loss
# p1 = model.pose(frame0)
# frame_trans = frame0 # insert some transform here e.g. contrast, color inversion, small transl/rotations
# p2 = model.pose(frame_trans)
# loss_pose = F.l1_loss(p1, p2) # pose should not change under transformations
# Reconstruction Loss
optimizer.zero_grad()
# output, mu, log_var = model(frame0, frame2)
# loss = helpers.vae_loss(output, frame1, mu=mu, logvar=log_var, batch_size=batch_size, img_size=img_size,
# nc=nc)
output = model(frame0, frame2, frame_rand)
loss_reconst = F.l1_loss(output, frame1) # TODO make a proper VAE loss
loss = loss_reconst # + loss_appear # + loss_pose
loss.backward()
optimizer.step()
train_loss = loss.data[0]
writer.add_scalar('Loss/Train', train_loss, niter)
if epoch % log_interval == 0:
if print_output:
print("Epoch [{}/{}], Gradient Step: {}, Train Loss: {:.4f}"
.format(epoch, n_epochs, (epoch + 1) * len(dataset.train_loader), train_loss))
# test loss
model.eval()
test_loss = 0
for n, (frame0, frame1, frame2, frame_rand) in enumerate(dataset.test_loader):
frame0 = Variable(frame0).cuda()
frame1 = Variable(frame1).cuda()
frame2 = Variable(frame2).cuda()
frame_rand = Variable(frame_rand).cuda()
# output, mu, log_var = model(frame0, frame2)
# loss = helpers.vae_loss(output, frame1, mu=mu, logvar=log_var, batch_size=batch_size, img_size=img_size,
# nc=nc)
output = model(frame0, frame2, frame_rand)
loss = F.l1_loss(output, frame1)
test_loss += loss.data[0]
test_loss /= len(dataset.test_loader)
writer.add_scalar('Loss/Test', test_loss, epoch)
# test reconstruction quality for images from train and test set
# TODo new eval.py for inspecting latent space
# phases = ['train', 'test']
phases = ['train']
for phase in phases:
if phase == 'train':
evalset = dataset.trainset
else:
evalset = dataset.testset
# Test triplet reconstruction
# get random subset
idx = np.random.choice(range(evalset.num_subsets)) # random index of triplet
frames = evalset.get_subset(idx) # triplet from train data
inputs = list(f.view([1] + [i for i in f.shape]) for f in frames) # format for batch
frames = list(Variable(frame).cuda() for frame in inputs)
outputs = list()
outputs.append(model.reconstruct(frames[0]))
outputs.append(model.forward(frames[0], frames[2], frames[3]))
outputs.append(model.reconstruct(frames[2]))
outputs.append(model.reconstruct(frames[3])) # also reconstruct random frame
outputs = [out.data.cpu() for out in outputs]
show_images(inputs+outputs, 4, 'Interpolation', epoch)
# Test pose and appearance switch
# video = np.random.choice(evalset.sequences) # same video
# a, b = np.random.choice(video, 2)
# a = evalset.get_image(None, img_path=a)
# b = evalset.get_image(None, img_path=b)
video1, video2 = np.random.choice(evalset.sequences, 2) # different video
a = np.random.choice(video1)
b = np.random.choice(video2)
a = evalset.get_image(None, img_path=a)
b = evalset.get_image(None, img_path=b)
a = a.view([1] + [i for i in a.shape])
b = b.view([1] + [i for i in b.shape])
p_a = model.pose(Variable(a).cuda())
p_b = model.pose(Variable(b).cuda())
a_a = model.appearance(Variable(a).cuda())
a_b = model.appearance(Variable(b).cuda())
x_ab = model.generate(p_a, a_b) # pose a, appearance b
x_ba = model.generate(p_b, a_a)
x_ab = x_ab.data.cpu()
x_ba = x_ba.data.cpu()
show_images([a, b, x_ab, x_ba], 2, 'Switch Pose/Appearance', epoch)
# Test interpolation
length = 5
seq = video1[0:length]
seq = [evalset.get_image(None, img_path=path) for path in seq]
seq = [img.view([1] + [i for i in img.shape]) for img in seq]
appear = model.appearance(Variable(seq[0]).cuda())
p_init = model.pose(Variable(seq[0]).cuda())
p_end = model.pose(Variable(seq[-1]).cuda())
alpha = [float(i) / (length-1) for i in range(0, length)]
poses = [alpha[i] * p_init + (1-alpha[i]) * p_end for i in range(0, length)]
images = [model.generate(p, appear) for p in poses]
show_images(images, length, 'Linear Interpolation in Pose space', epoch)
def show_images(img_list, how_many_in_one_row, description, iter):
img_list = torch.cat(img_list, 0)
grid = helpers.convert_image_np(torchvision.utils.make_grid(img_list, how_many_in_one_row))
writer.add_image(description, grid, iter)
# torch.save(model.state_dict(), '{}/triplet.pkl'.format(save_dir))
run(num_epochs)
| 44.139241 | 122 | 0.567823 |
7fb5159a71f489f9c8b111d516a335c971bc5133 | 4,036 | py | Python | eval_scripts/ppi_eval.py | SherylHYX/GraphSAGE | 4d54c8165605502e4e358cfc6c5efbeddcc62221 | [
"MIT"
] | null | null | null | eval_scripts/ppi_eval.py | SherylHYX/GraphSAGE | 4d54c8165605502e4e358cfc6c5efbeddcc62221 | [
"MIT"
] | null | null | null | eval_scripts/ppi_eval.py | SherylHYX/GraphSAGE | 4d54c8165605502e4e358cfc6c5efbeddcc62221 | [
"MIT"
] | null | null | null | from __future__ import print_function
import json
import numpy as np
from networkx.readwrite import json_graph
from argparse import ArgumentParser
''' To evaluate the embeddings, we run a logistic regression.
Run this script after running unsupervised training.
Baseline of using features-only can be run by setting data_dir as 'feat'
Example:
python eval_scripts/ppi_eval.py ../data/ppi unsup-ppi/n2v_big_0.000010 test
'''
def run_regression(train_embeds, train_labels, test_embeds, test_labels):
np.random.seed(1)
from sklearn.linear_model import SGDClassifier
from sklearn.dummy import DummyClassifier
from sklearn.metrics import f1_score
from sklearn.multioutput import MultiOutputClassifier
dummy = MultiOutputClassifier(DummyClassifier())
dummy.fit(train_embeds, train_labels)
log = MultiOutputClassifier(SGDClassifier(loss="log"), n_jobs=10)
log.fit(train_embeds, train_labels)
f1 = 0
f1_full = []
f1_baseline_full = []
for i in range(test_labels.shape[1]):
f1 = f1_score(test_labels[:,i], log.predict(test_embeds)[:,i], average="micro")
f1_full.append(f1)
# print("F1 score", f1)
for i in range(test_labels.shape[1]):
f1_baseline = f1_score(test_labels[:,i], dummy.predict(test_embeds)[:,i], average="micro")
f1_baseline_full.append(f1_baseline)
# print("Random baseline F1 score", f1_baseline)
print("F1 and F1 baseline are {} and {} respectively.".format(np.mean(f1_full),np.mean(f1_baseline_full)))
if __name__ == '__main__':
parser = ArgumentParser("Run evaluation on PPI data.")
parser.add_argument("dataset_dir", help="Path to directory containing the dataset.")
parser.add_argument("embed_dir", help="Path to directory containing the learned node embeddings. Set to 'feat' for raw features.")
parser.add_argument("setting", help="Either val or test.")
args = parser.parse_args()
dataset_dir = args.dataset_dir
data_dir = args.embed_dir
setting = args.setting
print("Loading data...")
G = json_graph.node_link_graph(json.load(open(dataset_dir + "/ppi-G.json")))
labels = json.load(open(dataset_dir + "/ppi-class_map.json"))
labels = {int(i):l for i, l in labels.items()}
train_ids = [n for n in G.nodes() if not G.node[n]['val'] and not G.node[n]['test']]
test_ids = [n for n in G.nodes() if G.node[n][setting]]
train_labels = np.array([labels[i] for i in train_ids])
if train_labels.ndim == 1:
train_labels = np.expand_dims(train_labels, 1)
test_labels = np.array([labels[i] for i in test_ids])
print("running", data_dir)
if data_dir == "feat":
print("Using only features..")
feats = np.load(dataset_dir + "/ppi-feats.npy")
## Logistic gets thrown off by big counts, so log transform num comments and score
feats[:,0] = np.log(feats[:,0]+1.0)
feats[:,1] = np.log(feats[:,1]-min(np.min(feats[:,1]), -1))
feat_id_map = json.load(open(dataset_dir + "/ppi-id_map.json"))
feat_id_map = {int(id):val for id,val in feat_id_map.items()}
train_feats = feats[[feat_id_map[id] for id in train_ids]]
test_feats = feats[[feat_id_map[id] for id in test_ids]]
print("Running regression..")
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
scaler.fit(train_feats)
train_feats = scaler.transform(train_feats)
test_feats = scaler.transform(test_feats)
run_regression(train_feats, train_labels, test_feats, test_labels)
else:
embeds = np.load(data_dir + "/val.npy")
id_map = {}
with open(data_dir + "/val.txt") as fp:
for i, line in enumerate(fp):
id_map[int(line.strip())] = i
train_embeds = embeds[[id_map[id] for id in train_ids]]
test_embeds = embeds[[id_map[id] for id in test_ids]]
print("Running regression..")
run_regression(train_embeds, train_labels, test_embeds, test_labels)
| 44.844444 | 134 | 0.67889 |
0f36e9edf37e4e750c6b66bd9230a955d79966bb | 194 | py | Python | setup.py | edurenye/yolov3-tf2 | 08bfbe6111ffbbd0ba93bc3a3165c921ac9f4897 | [
"MIT"
] | null | null | null | setup.py | edurenye/yolov3-tf2 | 08bfbe6111ffbbd0ba93bc3a3165c921ac9f4897 | [
"MIT"
] | null | null | null | setup.py | edurenye/yolov3-tf2 | 08bfbe6111ffbbd0ba93bc3a3165c921ac9f4897 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(name='yolov3_tf2',
version='0.1',
url='https://github.com/zzh8829/yolov3-tf2',
author='Zihao Zhang',
author_email='zzh8829@gmail.com') | 27.714286 | 50 | 0.664948 |
2cb96f3610bbe07ed0c4cb7f1ba74c65da2131a1 | 27,804 | py | Python | barbante/recommendation/tests/fixtures/HybridRecommenderFixture.py | hypermindr/barbante | 40056e9e4f4564461294b3a1d9afc855062350ac | [
"MIT"
] | 10 | 2015-06-01T21:48:16.000Z | 2021-08-20T20:18:48.000Z | barbante/recommendation/tests/fixtures/HybridRecommenderFixture.py | hypermindr/barbante | 40056e9e4f4564461294b3a1d9afc855062350ac | [
"MIT"
] | null | null | null | barbante/recommendation/tests/fixtures/HybridRecommenderFixture.py | hypermindr/barbante | 40056e9e4f4564461294b3a1d9afc855062350ac | [
"MIT"
] | 2 | 2015-06-03T21:54:32.000Z | 2015-11-24T23:13:05.000Z | """ Test fixture for hybrid recommendations.
"""
import datetime as dt
import nose.tools
import barbante.context as ctx
import barbante.maintenance.product_templates as pt
import barbante.maintenance.product_templates_tfidf as pt_tfidf
import barbante.maintenance.user_templates as ut
from barbante.recommendation.tests.fixtures.RecommenderFixture import RecommenderFixture
import barbante.tests as tests
class HybridRecommenderFixture(RecommenderFixture):
""" Class for testing barbante.recommendation.HybridRecommender subclasses.
"""
def setup(self):
super().setup()
ut.generate_templates(self.session_context)
pt.generate_templates(self.session_context)
pt_tfidf.generate_templates(self.session_context)
def test_recommend(self, test_recommendation_quality=True):
""" Tests whether meaningful recommendations were obtained.
"""
super().test_recommend(test_recommendation_quality=False)
def test_recommend_with_exception_in_one_concrete_recommender(self):
""" Tests whether the hybrid recommender recovers from a failure in one of the specialists.
"""
session = tests.init_session(user_id="u_eco_1", algorithm=self.algorithm)
session.algorithm_weights = {self.algorithm: [["Mock", 1.0]]}
session.fill_in_algorithm = None
recommender = session.get_recommender()
result = recommender.recommend(self.n_recommendations)
nose.tools.eq_(result, [], "A failure in a specialist algorithm should yield an empty list")
def test_recommend_non_existing_user(self):
""" Tests whether meaningful recommendations are returned even for unknown users.
"""
session = tests.init_session(user_id="Invalid user id", algorithm=self.algorithm)
recommender = session.get_recommender()
results = recommender.recommend(self.n_recommendations)
nose.tools.ok_(len(results) > 0, "Hybrid recommenders should recommend even for unknown users")
def test_recommend_anonymous_user(self):
""" Tests whether valid recommendations are returned for an anonymous user.
"""
session = tests.init_session(user_id="hmrtmp_AnonymousUser1", algorithm=self.algorithm)
recommender = session.get_recommender()
results = recommender.recommend(self.n_recommendations)
nose.tools.ok_(len(results) > 0, "Hybrid recommenders should recommend even for anonymous users")
def test_fill_in_products(self):
""" Tests the merge based on fixed slices.
"""
n_recommendations = 30
recommendations_by_alg = {"UBCF": [[[50], "UBCF_1"],
[[30], "UBCF_2"],
[[10], "UBCF_3"],
[[5], "UBCF_4"],
[[2], "UBCF_5"]],
"PBCF": [[[50], "PBCF_1"],
[[30], "PBCF_2"],
[[10], "PBCF_3"],
[[5], "PBCF_4"]],
"CB": [[[50], "CB_1"],
[[40], "CB_2"],
[[30], "CB_3"],
[[20], "CB_4"],
[[10], "CB_5"],
[[9], "CB_6"],
[[8], "CB_7"],
[[7], "CB_8"],
[[4], "CB_9"]],
"POP": [[[50], "POP_1"],
[[30], "POP_2"],
[[10], "POP_3"],
[[5], "POP_4"],
[[4], "POP_5"],
[[3], "POP_6"],
[[4], "POP_7"]]}
session = tests.init_session(user_id="u_eco_1", algorithm=self.algorithm)
recommender = session.get_recommender()
merged_recommendations = recommender.merge_algorithm_contributions(recommendations_by_alg, n_recommendations)
recommender.include_fill_in_recommendations(merged_recommendations, recommendations_by_alg, n_recommendations)
products_rank = [rec[1] for rec in merged_recommendations]
for item in products_rank[18:]:
nose.tools.ok_(item.startswith("POP_"), "Wrong rank after merge")
def test_history_decay_step(self):
# It is not easy to test decays here, since the same item can be recommended by different algorithms.
# Since the decay logic is applied by the base Recommender, no big deal we do not repeat the test here.
pass
def test_history_decay_rational(self):
# It is not easy to test decays here, since the same item can be recommended by different algorithms.
# Since the decay logic is applied by the base Recommender, no big deal we do not repeat the test here.
pass
def test_history_decay_exponential(self):
# It is not easy to test decays here, since the same item can be recommended by different algorithms.
# Since the decay logic is applied by the base Recommender, no big deal we do not repeat the test here.
pass
def test_history_decay_linear(self):
# It is not easy to test decays here, since the same item can be recommended by different algorithms.
# Since the decay logic is applied by the base Recommender, no big deal we do not repeat the test here.
pass
def test_in_boost(self):
# It is not easy to test in-boosts here, since the same item can be recommended by different algorithms.
# Since the in-boost logic is applied by the base Recommender, no big deal we do not repeat the test here.
pass
def test_product_age_decay_exponential(self):
# It is not easy to test decays here, since the same item can be recommended by different algorithms.
# Since the decay logic is applied by the base Recommender, no big deal we do not repeat the test here.
pass
def test_pre_filter_returning_all(self):
target_user = "u_tec_1"
custom_settings = {
'filter_strategy': ctx.BEFORE_SCORING
}
intended_count = self.db_proxy.get_product_model_count()
self._check_empty_filter_returning_all_products(custom_settings, intended_count, target_user)
def test_pos_filter_returning_all(self):
target_user = "u_tec_1"
custom_settings = {
'filter_strategy': ctx.AFTER_SCORING
}
session = tests.init_session(user_id=target_user, custom_settings=custom_settings, algorithm=self.algorithm)
recommender = session.get_recommender()
intended_count = len(recommender.recommend(1000))
self._check_empty_filter_returning_all_products(custom_settings, intended_count, target_user)
def test_pre_vs_pos_filter_without_missing_pre_filtered_candidates(self):
filter_string = '{"language": "portuguese", "category": "Economia"}'
n_recommendations = 4
self._check_pre_and_pos_filters_match(filter_string, n_recommendations)
def test_pre_vs_pos_filter_with_missing_pre_filtered_candidates(self):
filter_string = '{"language": "portuguese"}'
n_recommendations = 15
self._check_pre_and_pos_filters_match(filter_string, n_recommendations)
def test_pre_filter_returning_none(self):
self._check_result_is_none_for_bad_filter(ctx.BEFORE_SCORING)
def test_pos_filter_returning_none(self):
self._check_result_is_none_for_bad_filter(ctx.AFTER_SCORING)
def test_pre_filter_with_language(self):
self._check_language_filter(ctx.BEFORE_SCORING)
def test_pos_filter_with_language(self):
self._check_language_filter(ctx.AFTER_SCORING)
def test_pre_filter_with_german_language(self):
strategy = ctx.BEFORE_SCORING
intended_count = 3
self._check_number_of_filtered_products(intended_count, strategy)
def test_pos_filter_with_german_language(self):
strategy = ctx.AFTER_SCORING
intended_count = 2
self._check_number_of_filtered_products(intended_count, strategy)
def test_pre_filter_with_basic_and_parameters(self):
strategy = ctx.BEFORE_SCORING
intended_count = 3
self._check_basic_and_filters(intended_count, strategy)
def test_pos_filter_with_basic_and_parameters(self):
strategy = ctx.AFTER_SCORING
intended_count = 2
self._check_basic_and_filters(intended_count, strategy)
def test_pre_filter_with_basic_or_parameters(self):
strategy = ctx.BEFORE_SCORING
intended_count = 5
self._check_basic_or_filters(intended_count, strategy)
def test_pos_filter_with_basic_or_parameters(self):
strategy = ctx.AFTER_SCORING
intended_count = 5
self._check_basic_or_filters(intended_count, strategy)
def test_pre_filter_with_list_filter(self):
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
custom_settings = {
'filter_strategy': ctx.BEFORE_SCORING
}
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german",'
'"source": "source2"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), 2)
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german",'
'"source": "source1"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(len(filtered_products), 1)
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german",'
'"source": ["source2", "source3"]}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(len(filtered_products), 2)
def test_pos_filter_with_list_filter(self):
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
custom_settings = {
'filter_strategy': ctx.AFTER_SCORING
}
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german",'
'"source": "source2"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), 1)
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german",'
'"source": "source1"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german",'
'"source": ["source2", "source3"]}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(len(filtered_products), 1)
def test_pre_filter_with_dates(self):
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
custom_settings = {
'filter_strategy': ctx.BEFORE_SCORING
}
date = self.session_context.get_present_date()
date_str = date.isoformat()
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": "{0}"}}'.format(
date_str),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), 3)
one_hour_before = date - dt.timedelta(hours=1)
one_hour_after = date + dt.timedelta(hours=1)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$gt": "{0}"}}}}'.format(
one_hour_before.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 3)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}"}}}}'.format(
one_hour_before.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}"}}}}'.format(
one_hour_after.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 3)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}"}}}}'.format(
one_hour_before.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$gt": "{0}", "$lt": "{1}"}}}}'.format(
one_hour_before.isoformat(), one_hour_after.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 3)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}", "$gt": "{1}"}}}}'.format(
one_hour_before.isoformat(), one_hour_after.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
def test_pos_filter_with_dates(self):
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
custom_settings = {
'filter_strategy': ctx.AFTER_SCORING
}
date = self.session_context.get_present_date()
date_str = date.isoformat()
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": "{0}"}}'.format(date_str),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), 2)
one_hour_before = date - dt.timedelta(hours=1)
one_hour_after = date + dt.timedelta(hours=1)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$gt": "{0}"}}}}'.format(
one_hour_before.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 2)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}"}}}}'.format(
one_hour_before.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}"}}}}'.format(
one_hour_after.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 2)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}"}}}}'.format(
one_hour_before.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$gt": "{0}", "$lt": "{1}"}}}}'.format(
one_hour_before.isoformat(), one_hour_after.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 2)
session = tests.init_session(
user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{{"language": "german","date": {{"$lt": "{0}", "$gt": "{1}"}}}}'.format(
one_hour_before.isoformat(), one_hour_after.isoformat()),
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.eq_(len(filtered_products), 0)
def _check_empty_filter_returning_all_products(self, custom_settings, intended_count, target_user):
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string="{}",
algorithm=self.algorithm)
recommender = session.get_recommender()
filter_count = len(recommender.recommend(1000))
nose.tools.ok_(intended_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(intended_count, filter_count,
'An empty filter should bring all products total({0}), returned({1})'.format(
intended_count, filter_count))
def _check_result_is_none_for_bad_filter(self, strategy):
target_user = "u_tec_1"
custom_settings = {
'filter_strategy': strategy
}
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "xxxx"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(1000)
nose.tools.eq_(len(filtered_products), 0)
def _check_language_filter(self, strategy):
product_count = self.db_proxy.get_product_model_count()
target_user = "u_tec_1"
custom_settings = {
'filter_strategy': strategy
}
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
algorithm=self.algorithm)
recommender = session.get_recommender()
product_ids = [product_id for _, product_id in recommender.recommend(1000)]
products = {product_id for product_id, product in
self.db_proxy.fetch_product_models(product_ids=product_ids,
max_date=session.get_present_date()).items() if
product.get_attribute("language") == "portuguese"}
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "portuguese"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(1000)
filtered_product_ids = [product_id for _, product_id in filtered_products]
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.ok_(
products.issubset(filtered_product_ids),
'A filtered request should only bring the products that match the filter requirements,'
' total({0}), returned({1})'.format(len(product_ids), len(filtered_product_ids)))
def _check_number_of_filtered_products(self, intended_count, strategy):
custom_settings = {
'filter_strategy': strategy
}
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string='{"language": "german"}',
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), intended_count)
def _check_pre_and_pos_filters_match(self, filter_string, n_recommendations):
target_user = "u_tec_1"
custom_settings = {
'filter_strategy': ctx.BEFORE_SCORING,
'previous_consumption_factor': 0
}
session = tests.init_session(custom_settings,
context_filter_string=filter_string,
user_id=target_user,
algorithm=self.algorithm)
pre_filtered_candidates_count = len(session.filtered_products)
# sanity check
nose.tools.ok_(pre_filtered_candidates_count > 0, "Weak test. No pre-filtered candidate products.")
recommender = session.get_recommender()
recommendation_with_pre_filter = recommender.recommend(n_recommendations)
ranked_products_pre_filter = [r[1] for r in recommendation_with_pre_filter]
custom_settings = {
'filter_strategy': ctx.AFTER_SCORING,
'previous_consumption_factor': 0
}
session = tests.init_session(custom_settings,
context_filter_string=filter_string,
user_id=target_user,
algorithm=self.algorithm)
recommender = session.get_recommender()
recommendation_with_pos_filter = recommender.recommend(n_recommendations)
ranked_products_pos_filter = [r[1] for r in recommendation_with_pos_filter]
nose.tools.eq_(ranked_products_pre_filter[:pre_filtered_candidates_count],
ranked_products_pos_filter[:pre_filtered_candidates_count],
"Recommendation lists for pre- and pos-filters do not match")
def _check_basic_and_filters(self, intended_count, strategy):
custom_settings = {
'filter_strategy': strategy
}
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
date = self.session_context.get_present_date()
date_str = date.isoformat()
context_filter_string = '{"$and": [{"language": "german"}, {"date": "' + date_str + '"}]}'
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string=context_filter_string,
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), intended_count)
def _check_basic_or_filters(self, intended_count, strategy):
custom_settings = {
'filter_strategy': strategy
}
product_count = self.db_proxy.get_product_model_count()
target_user = "u_filter_1"
date = self.session_context.get_present_date()
date_str = date.isoformat()
context_filter_string = '{"$or": [{"language": "german"}, {"date": "' + date_str + '"}]}'
session = tests.init_session(user_id=target_user,
custom_settings=custom_settings,
context_filter_string=context_filter_string,
algorithm=self.algorithm)
recommender = session.get_recommender()
filtered_products = recommender.recommend(5)
nose.tools.ok_(product_count > 0, 'The filter test requires products to exist')
nose.tools.eq_(len(filtered_products), intended_count)
| 48.864675 | 118 | 0.606675 |
4b3e9db303ca8e09b220ed76840263b3f827b439 | 13,291 | py | Python | imodels/rule_list/bayesian_rule_list/bayesian_rule_list.py | teakfi/imodels | 0144698c5c9b919b2640cb8e2caac3a9124a2a0e | [
"MIT"
] | 598 | 2020-09-15T19:46:27.000Z | 2022-03-31T21:05:30.000Z | imodels/rule_list/bayesian_rule_list/bayesian_rule_list.py | teakfi/imodels | 0144698c5c9b919b2640cb8e2caac3a9124a2a0e | [
"MIT"
] | 52 | 2020-09-28T12:15:03.000Z | 2022-03-22T16:43:51.000Z | imodels/rule_list/bayesian_rule_list/bayesian_rule_list.py | teakfi/imodels | 0144698c5c9b919b2640cb8e2caac3a9124a2a0e | [
"MIT"
] | 60 | 2020-09-16T21:42:28.000Z | 2022-03-31T14:08:29.000Z | import numpy as np
import pandas as pd
import random
from collections import Counter, defaultdict
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.utils.multiclass import check_classification_targets, unique_labels
from sklearn.utils.validation import check_X_y, check_array, check_is_fitted
from imodels.rule_list.bayesian_rule_list.brl_util import (
default_permsdic, preds_d_t, run_bdl_multichain_serial, merge_chains, get_point_estimate, get_rule_rhs
)
from imodels.rule_list.rule_list import RuleList
from imodels.util.convert import itemsets_to_rules
from imodels.util.extract import extract_fpgrowth
from imodels.util.rule import get_feature_dict, replace_feature_name, Rule
class BayesianRuleListClassifier(BaseEstimator, RuleList, ClassifierMixin):
"""
This is a scikit-learn compatible wrapper for the Bayesian Rule List
classifier developed by Benjamin Letham. It produces a highly
interpretable model (a list of decision rules) by sampling many different
rule lists, trying to optimize for compactness and predictive performance.
Parameters
----------
listlengthprior : int, optional (default=3)
Prior hyperparameter for expected list length (excluding null rule)
listwidthprior : int, optional (default=1)
Prior hyperparameter for expected list width (excluding null rule)
maxcardinality : int, optional (default=2)
Maximum cardinality of an itemset
minsupport : float, optional (default=0.1)
Minimum support (fraction between 0 and 1) of an itemset
alpha : array_like, shape = [n_classes]
prior hyperparameter for multinomial pseudocounts
n_chains : int, optional (default=3)
Number of MCMC chains for inference
max_iter : int, optional (default=50000)
Maximum number of iterations
class1label: str, optional (default="class 1")
Label or description of what the positive class (with y=1) means
verbose: bool, optional (default=True)
Verbose output
random_state: int
Random seed
"""
def __init__(self,
listlengthprior=3,
listwidthprior=1,
maxcardinality=2,
minsupport=0.1,
disc_strategy='mdlp',
disc_kwargs={},
alpha=np.array([1., 1.]),
n_chains=3,
max_iter=50000,
class1label="class 1",
verbose=False,
random_state=42):
self.listlengthprior = listlengthprior
self.listwidthprior = listwidthprior
self.maxcardinality = maxcardinality
self.minsupport = minsupport
self.disc_strategy = disc_strategy
self.disc_kwargs = disc_kwargs
self.alpha = alpha
self.n_chains = n_chains
self.max_iter = max_iter
self.class1label = class1label
self.verbose = verbose
self._zmin = 1
self.thinning = 1 # The thinning rate
self.burnin = self.max_iter // 2 # the number of samples to drop as burn-in in-simulation
self.discretizer = None
self.d_star = None
self.random_state = random_state
self.seed()
def seed(self):
if self.random_state is not None:
random.seed(self.random_state)
np.random.seed(self.random_state)
def _setlabels(self, X, feature_names=[]):
if len(feature_names) == 0:
if type(X) == pd.DataFrame and ('object' in str(X.columns.dtype) or 'str' in str(X.columns.dtype)):
feature_names = X.columns
else:
feature_names = ["ft" + str(i + 1) for i in range(len(X[0]))]
self.feature_names = feature_names
def fit(self, X, y, feature_names: list = None, undiscretized_features=[], verbose=False):
"""Fit rule lists to data.
Note: Numerical data in `X` is automatically discretized.
To prevent discretization (e.g. to protect columns containing categorical data represented as integers),
pass the list of protected column names in the `fit` method,
e.g. `model.fit(X,y,undiscretized_features=['CAT_COLUMN_NAME'])`
(entries in undiscretized columns will be converted to strings and used as categorical values)
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training data
y : array_like, shape = [n_samples]
Labels
feature_names : array_like, shape = [n_features], optional (default: [])
String labels for each feature.
If empty and X is a DataFrame, column labels are used.
If empty and X is not a DataFrame, then features are simply enumerated
undiscretized_features : array_like, shape = [n_features], optional (default: [])
String labels for each feature which is NOT to be discretized.
If empty, all numeric features are discretized
verbose : bool
Currently doesn't do anything
Returns
-------
self : returns an instance of self.
"""
self.seed()
if len(set(y)) != 2:
raise Exception("Only binary classification is supported at this time!")
X, y = check_X_y(X, y)
check_classification_targets(y)
self.n_features_in_ = X.shape[1]
self.classes_ = unique_labels(y)
self.feature_dict_ = get_feature_dict(X.shape[1], feature_names)
self.feature_placeholders = np.array(list(self.feature_dict_.keys()))
self.feature_names = np.array(list(self.feature_dict_.values()))
itemsets, self.discretizer = extract_fpgrowth(X, y,
feature_names=self.feature_placeholders,
minsupport=self.minsupport,
maxcardinality=self.maxcardinality,
undiscretized_features=undiscretized_features,
disc_strategy=self.disc_strategy,
disc_kwargs=self.disc_kwargs,
verbose=verbose)
X_df_onehot = self.discretizer.transform(X)
# Now form the data-vs.-lhs set
# X[j] is the set of data points that contain itemset j (that is, satisfy rule j)
for c in X_df_onehot.columns:
X_df_onehot[c] = [c if x == 1 else '' for x in list(X_df_onehot[c])]
X = [{}] * (len(itemsets) + 1)
X[0] = set(range(len(X_df_onehot))) # the default rule satisfies all data
for (j, lhs) in enumerate(itemsets):
X[j + 1] = set([i for (i, xi) in enumerate(X_df_onehot.values) if set(lhs).issubset(xi)])
# now form lhs_len
lhs_len = [0]
for lhs in itemsets:
lhs_len.append(len(lhs))
nruleslen = Counter(lhs_len)
lhs_len = np.array(lhs_len)
itemsets_all = ['null']
itemsets_all.extend(itemsets)
Xtrain, Ytrain, nruleslen, lhs_len, self.itemsets = (
X, np.vstack((1 - np.array(y), y)).T.astype(int), nruleslen, lhs_len, itemsets_all
)
permsdic = defaultdict(default_permsdic) # We will store here the MCMC results
# Do MCMC
res, Rhat = run_bdl_multichain_serial(self.max_iter, self.thinning, self.alpha, self.listlengthprior,
self.listwidthprior, Xtrain, Ytrain, nruleslen, lhs_len,
self.maxcardinality, permsdic, self.burnin, self.n_chains,
[None] * self.n_chains, verbose=self.verbose, seed=self.random_state)
# Merge the chains
permsdic = merge_chains(res)
# The point estimate, BRL-point
self.d_star = get_point_estimate(permsdic, lhs_len, Xtrain, Ytrain, self.alpha, nruleslen, self.maxcardinality,
self.listlengthprior, self.listwidthprior,
verbose=self.verbose) # get the point estimate
if self.d_star:
# Compute the rule consequent
self.theta, self.ci_theta = get_rule_rhs(Xtrain, Ytrain, self.d_star, self.alpha, True)
self.final_itemsets = np.array(self.itemsets, dtype=object)[self.d_star]
rule_strs = itemsets_to_rules(self.final_itemsets)
self.rules_without_feature_names_ = [Rule(r) for r in rule_strs]
self.rules_ = [
replace_feature_name(rule, self.feature_dict_) for rule in self.rules_without_feature_names_
]
self.complexity_ = self._get_complexity()
return self
def _get_complexity(self):
n_rule_terms = sum([len(iset) for iset in self.final_itemsets if type(iset) != str])
return n_rule_terms + 1
# def __repr__(self, decimals=1):
# if self.d_star:
# detect = ""
# if self.class1label != "class 1":
# detect = "for detecting " + self.class1label
# header = "Trained RuleListClassifier " + detect + "\n"
# separator = "".join(["="] * len(header)) + "\n"
# s = ""
# for i, j in enumerate(self.d_star):
# if self.itemsets[j] != 'null':
# condition = "ELSE IF " + (
# " AND ".join([str(self.itemsets[j][k]) for k in range(len(self.itemsets[j]))])) + " THEN"
# else:
# condition = "ELSE"
# s += condition + " probability of " + self.class1label + ": " + str(
# np.round(self.theta[i] * 100, decimals)) + "% (" + str(
# np.round(self.ci_theta[i][0] * 100, decimals)) + "%-" + str(
# np.round(self.ci_theta[i][1] * 100, decimals)) + "%)\n"
# return header + separator + s[5:] + separator[1:]
# else:
# return "(Untrained RuleListClassifier)"
def __repr__(self, decimals=1):
if self.d_star:
detect = ""
if self.class1label != "class 1":
detect = "for detecting " + self.class1label
header = "Trained RuleListClassifier " + detect + "\n"
separator = "".join(["="] * len(header)) + "\n"
s = ""
for i in range(len(self.rules_) + 1):
if i != len(self.rules_):
condition = "ELSE IF " + str(self.rules_[i]) + " THEN"
else:
condition = "ELSE"
s += condition + " probability of " + self.class1label + ": " + str(
np.round(self.theta[i] * 100, decimals)) + "% (" + str(
np.round(self.ci_theta[i][0] * 100, decimals)) + "%-" + str(
np.round(self.ci_theta[i][1] * 100, decimals)) + "%)\n"
return header + separator + s[5:] + separator[1:]
else:
return "(Untrained RuleListClassifier)"
def _to_itemset_indices(self, X_df_onehot):
# X[j] is the set of data points that contain itemset j (that is, satisfy rule j)
for c in X_df_onehot.columns:
X_df_onehot[c] = [c if x == 1 else '' for x in list(X_df_onehot[c])]
X = [set() for j in range(len(self.itemsets))]
X[0] = set(range(X_df_onehot.shape[0])) # the default rule satisfies all data
for (j, lhs) in enumerate(self.itemsets):
if j > 0:
X[j] = set([i for (i, xi) in enumerate(X_df_onehot.values) if set(lhs).issubset(xi)])
return X
def predict_proba(self, X):
"""Compute probabilities of possible outcomes for samples in X.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
T : array-like, shape = [n_samples, n_classes]
Returns the probability of the sample for each class in
the model. The columns correspond to the classes in sorted
order, as they appear in the attribute `classes_`.
"""
check_is_fitted(self)
X = check_array(X)
if self.discretizer:
D = self.discretizer.transform(X)
else:
D = pd.DataFrame(X, columns=self.feature_names)
N = len(D)
X2 = self._to_itemset_indices(D)
P = preds_d_t(X2, np.zeros((N, 1), dtype=int), self.d_star, self.theta)
return np.vstack((1 - P, P)).T
def predict(self, X, threshold=0.1):
"""Perform classification on samples in X.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
y_pred : array, shape = [n_samples]
Class labels for samples in X.
"""
check_is_fitted(self)
X = check_array(X)
# print('predicting!')
# print('preds_proba', self.predict_proba(X)[:, 1])
return 1 * (self.predict_proba(X)[:, 1] >= threshold)
| 41.927445 | 119 | 0.57648 |
8b4f1c0ca3e311131cfe461d08de494f126f0110 | 11,392 | py | Python | sahara/service/heat/templates.py | redhat-openstack/sahara | 67165c96eceb1ce3b087870934d394602f5dd959 | [
"Apache-2.0"
] | null | null | null | sahara/service/heat/templates.py | redhat-openstack/sahara | 67165c96eceb1ce3b087870934d394602f5dd959 | [
"Apache-2.0"
] | null | null | null | sahara/service/heat/templates.py | redhat-openstack/sahara | 67165c96eceb1ce3b087870934d394602f5dd959 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2015 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from oslo_config import cfg
from oslo_log import log as logging
import six
from sahara.utils import general as g
from sahara.utils.openstack import heat as h
from sahara.utils.openstack import neutron
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
SSH_PORT = 22
def _get_inst_name(cluster_name, ng_name, index):
return g.generate_instance_name(cluster_name, ng_name, index + 1)
def _get_aa_group_name(cluster_name):
return g.generate_aa_group_name(cluster_name)
def _get_port_name(inst_name):
return '%s-port' % inst_name
def _get_floating_name(inst_name):
return '%s-floating' % inst_name
def _get_floating_assoc_name(inst_name):
return '%s-floating-assoc' % inst_name
def _get_volume_name(inst_name, volume_idx):
return '%s-volume-%i' % (inst_name, volume_idx)
def _get_volume_attach_name(inst_name, volume_idx):
return '%s-volume-attachment-%i' % (inst_name, volume_idx)
class ClusterTemplate(object):
def __init__(self, cluster):
self.cluster = cluster
self.node_groups_extra = {}
def add_node_group_extra(self, node_group_id, node_count,
gen_userdata_func):
self.node_groups_extra[node_group_id] = {
'node_count': node_count,
'gen_userdata_func': gen_userdata_func
}
def _get_main_template(self):
return json.dumps({
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "Data Processing Cluster by Sahara",
"Resources": self._serialize_resources(),
"Outputs": {}
})
def instantiate(self, update_existing, disable_rollback=True):
main_tmpl = self._get_main_template()
heat = h.client()
kwargs = {
'stack_name': self.cluster.name,
'timeout_mins': 180,
'disable_rollback': disable_rollback,
'parameters': {},
'template': main_tmpl}
if not update_existing:
heat.stacks.create(**kwargs)
else:
for stack in heat.stacks.list():
if stack.stack_name == self.cluster.name:
stack.update(**kwargs)
break
return ClusterStack(self, h.get_stack(self.cluster.name))
def _need_aa_server_group(self, node_group):
for node_process in node_group.node_processes:
if node_process in self.cluster.anti_affinity:
return True
return False
def _get_anti_affinity_scheduler_hints(self, node_group):
if not self._need_aa_server_group(node_group):
return {}
return {"scheduler_hints": {"group": {"Ref": _get_aa_group_name(
self.cluster.name)}}}
def _serialize_resources(self):
resources = {}
if self.cluster.anti_affinity:
resources.update(self._serialize_aa_server_group())
for ng in self.cluster.node_groups:
if ng.auto_security_group:
resources.update(self._serialize_auto_security_group(ng))
for idx in range(0, self.node_groups_extra[ng.id]['node_count']):
resources.update(self._serialize_instance(ng, idx))
return resources
def _serialize_auto_security_group(self, ng):
security_group_name = g.generate_auto_security_group_name(ng)
security_group_description = (
"Auto security group created by Sahara for Node Group "
"'%s' of cluster '%s'." % (ng.name, ng.cluster.name))
rules = self._serialize_auto_security_group_rules(ng)
return {
security_group_name: {
"Type": "AWS::EC2::SecurityGroup",
"Properties": {
"GroupDescription": security_group_description,
"SecurityGroupIngress": rules
}
}
}
def _serialize_auto_security_group_rules(self, ng):
create_rule = lambda cidr, proto, from_port, to_port: {
"CidrIp": cidr,
"IpProtocol": proto,
"FromPort": six.text_type(from_port),
"ToPort": six.text_type(to_port)}
rules = []
for port in ng.open_ports:
rules.append(create_rule('0.0.0.0/0', 'tcp', port, port))
rules.append(create_rule('0.0.0.0/0', 'tcp', SSH_PORT, SSH_PORT))
# open all traffic for private networks
if CONF.use_neutron:
for cidr in neutron.get_private_network_cidrs(ng.cluster):
for protocol in ['tcp', 'udp']:
rules.append(create_rule(cidr, protocol, 1, 65535))
rules.append(create_rule(cidr, 'icmp', -1, -1))
return rules
def _serialize_instance(self, ng, idx):
resources = {}
properties = {}
inst_name = _get_inst_name(self.cluster.name, ng.name, idx)
if CONF.use_neutron:
port_name = _get_port_name(inst_name)
resources.update(self._serialize_port(
port_name, self.cluster.neutron_management_network,
self._get_security_groups(ng)))
properties["networks"] = [{"port": {"Ref": port_name}}]
if ng.floating_ip_pool:
resources.update(self._serialize_neutron_floating(
inst_name, port_name, ng.floating_ip_pool))
else:
if ng.floating_ip_pool:
resources.update(self._serialize_nova_floating(
inst_name, ng.floating_ip_pool))
if ng.security_groups:
properties["security_groups"] = self._get_security_groups(ng)
# Check if cluster contains user key-pair and include it to template.
if self.cluster.user_keypair_id:
properties["key_name"] = self.cluster.user_keypair_id
gen_userdata_func = self.node_groups_extra[ng.id]['gen_userdata_func']
userdata = gen_userdata_func(ng, inst_name)
if ng.availability_zone:
properties["availability_zone"] = ng.availability_zone
properties.update(self._get_anti_affinity_scheduler_hints(ng))
properties.update({
"name": inst_name,
"flavor": six.text_type(ng.flavor_id),
"image": ng.get_image_id(),
"admin_user": ng.image_username,
"user_data": userdata
})
resources.update({
inst_name: {
"Type": "OS::Nova::Server",
"Properties": properties
}
})
for idx in range(0, ng.volumes_per_node):
resources.update(self._serialize_volume(
inst_name, idx, ng.volumes_size, ng.volumes_availability_zone,
ng.volume_type, ng.volume_local_to_instance))
return resources
def _serialize_port(self, port_name, fixed_net_id, security_groups):
properties = {
"network_id": fixed_net_id,
"replacement_policy": "AUTO"
}
if security_groups:
properties["security_groups"] = security_groups
return {
port_name: {
"Type": "OS::Neutron::Port",
"Properties": properties
}
}
def _serialize_neutron_floating(self, inst_name, port_name,
floating_net_id):
floating_ip_name = _get_floating_name(inst_name)
return {
floating_ip_name: {
"Type": "OS::Neutron::FloatingIP",
"Properties": {
"floating_network_id": floating_net_id,
"port_id": {"Ref": port_name}
}
}
}
def _serialize_nova_floating(self, inst_name, floating_pool_name):
floating_ip_name = _get_floating_name(inst_name)
floating_ip_assoc_name = _get_floating_assoc_name(inst_name)
return {
floating_ip_name: {
"Type": "OS::Nova::FloatingIP",
"Properties": {
"pool": floating_pool_name
}
},
floating_ip_assoc_name: {
"Type": "OS::Nova::FloatingIPAssociation",
"Properties": {
"floating_ip": {"Ref": floating_ip_name},
"server_id": {"Ref": inst_name}
}
}
}
def _serialize_volume(self, inst_name, volume_idx, volumes_size,
volumes_availability_zone, volume_type,
volume_local_to_instance):
volume_name = _get_volume_name(inst_name, volume_idx)
volume_attach_name = _get_volume_attach_name(inst_name, volume_idx)
properties = {
"name": volume_name,
"size": six.text_type(volumes_size),
"volume_type": volume_type
}
if volumes_availability_zone:
properties["availability_zone"] = volumes_availability_zone
if volume_local_to_instance:
properties["scheduler_hints"] = {
"local_to_instance": {"Ref": inst_name}}
return {
volume_name: {
"Type": "OS::Cinder::Volume",
"Properties": properties
},
volume_attach_name: {
"Type": "OS::Cinder::VolumeAttachment",
"Properties": {
"instance_uuid": {"Ref": inst_name},
"volume_id": {"Ref": volume_name},
"mountpoint": None
}
}
}
def _get_security_groups(self, node_group):
if not node_group.auto_security_group:
return node_group.security_groups
return (list(node_group.security_groups or []) +
[{"Ref": g.generate_auto_security_group_name(node_group)}])
def _serialize_aa_server_group(self):
server_group_name = _get_aa_group_name(self.cluster.name)
return {
server_group_name: {
"Type": "OS::Nova::ServerGroup",
"Properties": {
"name": server_group_name,
"policies": ["anti-affinity"]
}
}
}
class ClusterStack(object):
def __init__(self, tmpl, heat_stack):
self.tmpl = tmpl
self.heat_stack = heat_stack
def get_node_group_instances(self, node_group):
insts = []
count = self.tmpl.node_groups_extra[node_group.id]['node_count']
heat = h.client()
for i in range(0, count):
name = _get_inst_name(self.tmpl.cluster.name, node_group.name, i)
res = heat.resources.get(self.heat_stack.id, name)
insts.append((name, res.physical_resource_id))
return insts
| 33.02029 | 78 | 0.593838 |
eb3ff1b92d8d3404b22945f3bfb081cc5b00da01 | 257 | py | Python | features/signals.py | KDD-OpenSource/fexum | 2288a4be2fcbd5bed0e2549204f313bca50d3265 | [
"MIT"
] | 6 | 2017-05-15T16:07:15.000Z | 2020-05-18T10:04:38.000Z | features/signals.py | KDD-OpenSource/fexum | 2288a4be2fcbd5bed0e2549204f313bca50d3265 | [
"MIT"
] | 34 | 2017-05-11T13:45:03.000Z | 2017-07-29T19:18:45.000Z | features/signals.py | KDD-OpenSource/fexum | 2288a4be2fcbd5bed0e2549204f313bca50d3265 | [
"MIT"
] | null | null | null | from django.db.models.signals import post_delete
from django.dispatch.dispatcher import receiver
from features.models import Dataset
@receiver(post_delete, sender=Dataset)
def dataset_delete(sender, instance, **kwargs):
instance.content.delete(False)
| 28.555556 | 48 | 0.817121 |
1bcc93ae0c6c269a7d51dd0682e8915be3f1d43c | 1,781 | py | Python | lib/geovista/geoplotter.py | jamesp/geovista | 4850c519c7a37c4765befa06fbab933350637c93 | [
"BSD-3-Clause"
] | null | null | null | lib/geovista/geoplotter.py | jamesp/geovista | 4850c519c7a37c4765befa06fbab933350637c93 | [
"BSD-3-Clause"
] | null | null | null | lib/geovista/geoplotter.py | jamesp/geovista | 4850c519c7a37c4765befa06fbab933350637c93 | [
"BSD-3-Clause"
] | null | null | null | from typing import Any, Optional
import pyvista as pv
from pyvista.utilities import abstract_class
import pyvistaqt as pvqt
import vtk
from .geometry import COASTLINE_RESOLUTION, get_coastlines
from .log import get_logger
__all__ = ["GeoBackgroundPlotter", "GeoMultiPlotter", "GeoPlotter"]
# configure the logger
logger = get_logger(__name__)
@abstract_class
class GeoBasePlotter:
def add_base_layer(self, **kwargs: Optional[Any]) -> vtk.vtkActor:
"""
TBD
Parameters
----------
kwargs : Any, optional
Returns
-------
vtkActor
Notes
-----
.. versionadded:: 0.1.0
"""
# TODO: rationalise zorder
mesh = pv.Sphere(radius=1 - (1e-3), theta_resolution=360, phi_resolution=180)
return self.add_mesh(mesh, **kwargs)
def add_coastlines(
self, resolution: Optional[str] = COASTLINE_RESOLUTION, **kwargs: Optional[Any]
) -> vtk.vtkActor:
"""
TBD
Parameters
----------
resolution : str, default=COASTLINE_RESOLUTION
kwargs : Any, optional
Returns
-------
vtkActor
Notes
-----
.. versionadded:: 0.1.0
"""
mesh = get_coastlines(resolution=resolution)
return self.add_mesh(mesh, **kwargs)
class GeoBackgroundPlotter(pvqt.BackgroundPlotter, GeoBasePlotter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class GeoMultiPlotter(pvqt.MultiPlotter, GeoBasePlotter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
class GeoPlotter(pv.Plotter, GeoBasePlotter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
| 21.719512 | 87 | 0.612016 |
61f822f04c85f2bf78ac8685a28495ad4b90ecf2 | 8,234 | py | Python | ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py | zyclove/ambari | 1032f0f54cb7b312b9a3b37570cd840f4e1e89d4 | [
"Apache-2.0"
] | null | null | null | ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py | zyclove/ambari | 1032f0f54cb7b312b9a3b37570cd840f4e1e89d4 | [
"Apache-2.0"
] | null | null | null | ambari-server/src/test/python/stacks/2.0.6/GANGLIA/test_ganglia_server.py | zyclove/ambari | 1032f0f54cb7b312b9a3b37570cd840f4e1e89d4 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python2
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from stacks.utils.RMFTestCase import *
from mock.mock import MagicMock, call, patch
from only_for_platform import not_for_platform, PLATFORM_WINDOWS
@not_for_platform(PLATFORM_WINDOWS)
class TestGangliaServer(RMFTestCase):
COMMON_SERVICES_PACKAGE_DIR = "GANGLIA/3.5.0/package"
STACK_VERSION = "2.0.6"
def test_configure_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ganglia_server.py",
classname="GangliaServer",
command="configure",
config_file="default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertNoMoreResources()
def test_start_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ganglia_server.py",
classname="GangliaServer",
command="start",
config_file="default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E service hdp-gmetad start >> /tmp/gmetad.log 2>&1 ; /bin/ps auwx | /bin/grep [g]metad >> /tmp/gmetad.log 2>&1',
path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
)
self.assertResourceCalled('MonitorWebserver', 'restart',
)
self.assertNoMoreResources()
def test_stop_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ganglia_server.py",
classname="GangliaServer",
command="stop",
config_file="default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assertResourceCalled('Execute', 'ambari-sudo.sh [RMF_ENV_PLACEHOLDER] -H -E service hdp-gmetad stop >> /tmp/gmetad.log 2>&1 ; /bin/ps auwx | /bin/grep [g]metad >> /tmp/gmetad.log 2>&1',
path = ['/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin'],
)
self.assertResourceCalled('MonitorWebserver', 'restart',
)
self.assertNoMoreResources()
def test_install_default(self):
self.executeScript(self.COMMON_SERVICES_PACKAGE_DIR + "/scripts/ganglia_server.py",
classname="GangliaServer",
command="install",
config_file="default.json",
stack_version = self.STACK_VERSION,
target = RMFTestCase.TARGET_COMMON_SERVICES
)
self.assert_configure_default()
def assert_configure_default(self):
self.assertResourceCalled('Directory', '/usr/libexec/hdp/ganglia',
owner = 'root',
group = 'root',
create_parents = True,
)
self.assertResourceCalled('File', '/etc/init.d/hdp-gmetad',
content = StaticFile('gmetad.init'),
mode = 0755,
)
self.assertResourceCalled('File', '/etc/init.d/hdp-gmond',
content = StaticFile('gmond.init'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkGmond.sh',
content = StaticFile('checkGmond.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/checkRrdcached.sh',
content = StaticFile('checkRrdcached.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmetadLib.sh',
content = StaticFile('gmetadLib.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/gmondLib.sh',
content = StaticFile('gmondLib.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/rrdcachedLib.sh',
content = StaticFile('rrdcachedLib.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/setupGanglia.sh',
content = StaticFile('setupGanglia.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmetad.sh',
content = StaticFile('startGmetad.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startGmond.sh',
content = StaticFile('startGmond.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/startRrdcached.sh',
content = StaticFile('startRrdcached.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmetad.sh',
content = StaticFile('stopGmetad.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopGmond.sh',
content = StaticFile('stopGmond.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/stopRrdcached.sh',
content = StaticFile('stopRrdcached.sh'),
mode = 0755,
)
self.assertResourceCalled('File', '/usr/libexec/hdp/ganglia/teardownGanglia.sh',
content = StaticFile('teardownGanglia.sh'),
mode = 0755,
)
self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaClusters.conf',
owner = 'root',
template_tag = None,
group = 'root',
mode = 0755,
)
self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaEnv.sh',
owner = 'root',
template_tag = None,
group = 'root',
mode = 0755,
)
self.assertResourceCalled('TemplateConfig', '/usr/libexec/hdp/ganglia/gangliaLib.sh',
owner = 'root',
template_tag = None,
group = 'root',
mode = 0755,
)
self.assertResourceCalled('Execute', '/usr/libexec/hdp/ganglia/setupGanglia.sh -t -o root -g hadoop',
path = ['/usr/libexec/hdp/ganglia',
'/usr/sbin',
'/sbin:/usr/local/bin',
'/bin',
'/usr/bin'],
)
self.assertResourceCalled('Directory', '/var/run/ganglia',
mode=0755,
create_parents = True
)
self.assertResourceCalled('Directory', '/var/lib/ganglia-web/dwoo',
owner = 'wwwrun',
create_parents = True,
recursive_ownership = True,
mode = 0755,
)
self.assertResourceCalled('Directory', '/srv/www/cgi-bin',
create_parents = True,
)
self.assertResourceCalled('TemplateConfig', '/srv/www/cgi-bin/rrd.py',
owner = "root",
group = "root",
mode = 0755,
)
self.assertResourceCalled('Directory', '/var/lib/ganglia/rrds',
owner = 'nobody',
group = 'nobody',
create_parents = True,
mode = 0755,
)
self.assertResourceCalled('File', '/etc/apache2/conf.d/ganglia.conf',
content = Template('ganglia.conf.j2'),
mode = 0644,
)
self.assertResourceCalled('File', '/etc/ganglia/gmetad.conf',
owner = 'root',
group = 'hadoop',
)
| 39.970874 | 198 | 0.606267 |
49b2dbbc25672dbe6506d10544fe10edb50b8fd7 | 318 | py | Python | m2-modified/ims/common/agentless-system-crawler/crawler/plugins/systems/load_vm_crawler.py | CCI-MOC/ABMI | 955c12ae9d2dc7afe7323f6c25f2af120f5b281a | [
"Apache-2.0"
] | 108 | 2015-07-21T10:40:36.000Z | 2021-07-01T06:54:51.000Z | m2-modified/ims/common/agentless-system-crawler/crawler/plugins/systems/load_vm_crawler.py | CCI-MOC/ABMI | 955c12ae9d2dc7afe7323f6c25f2af120f5b281a | [
"Apache-2.0"
] | 320 | 2015-07-21T01:33:20.000Z | 2020-07-21T15:57:02.000Z | m2-modified/ims/common/agentless-system-crawler/crawler/plugins/systems/load_vm_crawler.py | CCI-MOC/ABMI | 955c12ae9d2dc7afe7323f6c25f2af120f5b281a | [
"Apache-2.0"
] | 61 | 2015-07-20T18:26:37.000Z | 2021-03-17T01:18:54.000Z | from icrawl_plugin import IVMCrawler
import logging
try:
import psvmi
except ImportError:
psvmi = None
logger = logging.getLogger('crawlutils')
class load_vm_crawler(IVMCrawler):
def get_feature(self):
return 'load'
def crawl(self, vm_desc, **kwargs):
raise NotImplementedError()
| 16.736842 | 40 | 0.707547 |
4f9abfb2ac6a4b94f2867753c170735d80da9b05 | 2,166 | py | Python | tests/broker/test_del_intervention.py | ned21/aquilon | 6562ea0f224cda33b72a6f7664f48d65f96bd41a | [
"Apache-2.0"
] | 7 | 2015-07-31T05:57:30.000Z | 2021-09-07T15:18:56.000Z | tests/broker/test_del_intervention.py | ned21/aquilon | 6562ea0f224cda33b72a6f7664f48d65f96bd41a | [
"Apache-2.0"
] | 115 | 2015-03-03T13:11:46.000Z | 2021-09-20T12:42:24.000Z | tests/broker/test_del_intervention.py | ned21/aquilon | 6562ea0f224cda33b72a6f7664f48d65f96bd41a | [
"Apache-2.0"
] | 13 | 2015-03-03T11:17:59.000Z | 2021-09-09T09:16:41.000Z | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the del intervention command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestDelIntervention(TestBrokerCommand):
def test_100_del_intervention(self):
path = ["resource", "host", "server1.aqd-unittest.ms.com",
"intervention", "i1", "config"]
self.check_plenary_exists(*path)
command = ["del_intervention", "--intervention=i1",
"--hostname=server1.aqd-unittest.ms.com"]
self.successtest(command)
self.check_plenary_gone(*path)
path = ["resource", "host", "server1.aqd-unittest.ms.com",
"intervention", "blank", "config"]
self.check_plenary_exists(*path)
command = ["del_intervention", "--intervention=bLaNk",
"--hostname=server1.aqd-unittest.ms.com"]
self.successtest(command)
self.check_plenary_gone(*path)
command = ["del_intervention", "--intervention=groups",
"--hostname=server1.aqd-unittest.ms.com"]
self.successtest(command)
command = ["del_intervention", "--intervention=disable",
"--hostname=server1.aqd-unittest.ms.com"]
self.successtest(command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestDelIntervention)
unittest.TextTestRunner(verbosity=2).run(suite)
| 36.711864 | 76 | 0.674054 |
6404eb5d41b3fe5dee8b0d2d802aa4fd88a4b989 | 3,815 | py | Python | build/lib/SciDataTool/Methods/Data1D/get_values.py | enjoyneer87/SciDataTool | 37ddc4071f1edb1270ee03e43595c3f943fb9bd8 | [
"Apache-2.0"
] | 24 | 2020-04-06T11:30:04.000Z | 2022-03-15T17:03:33.000Z | build/lib/SciDataTool/Methods/Data1D/get_values.py | enjoyneer87/SciDataTool | 37ddc4071f1edb1270ee03e43595c3f943fb9bd8 | [
"Apache-2.0"
] | 48 | 2020-07-02T13:00:51.000Z | 2022-03-14T11:22:39.000Z | build/lib/SciDataTool/Methods/Data1D/get_values.py | enjoyneer87/SciDataTool | 37ddc4071f1edb1270ee03e43595c3f943fb9bd8 | [
"Apache-2.0"
] | 18 | 2020-06-08T16:20:05.000Z | 2021-11-08T12:59:35.000Z | from importlib import import_module
from numpy import array
from SciDataTool.Functions.conversions import convert
from SciDataTool.Functions.symmetries import rebuild_symmetries_axis
from SciDataTool.Functions import AxisError, NormError
def get_values(
self,
unit="SI",
is_oneperiod=False,
is_antiperiod=False,
is_smallestperiod=False,
normalization=None,
operation=None,
is_real=True,
corr_unit=None,
):
"""Returns the vector 'axis' taking symmetries into account.
Parameters
----------
self: Data1D
a Data1D object
unit: str
requested unit
is_oneperiod: bool
return values on a single period
is_antiperiod: bool
return values on a semi period (only for antiperiodic signals)
is_smallestperiod: bool
return values on smallest available period
normalization: str
name of normalization to use
operation: str
name of the operation (e.g. "freqs_to_time")
Returns
-------
Vector of axis values
"""
# Dynamic import to avoid loop
module = __import__("SciDataTool.Classes.Norm_vector", fromlist=["Norm_vector"])
Norm_vector = getattr(module, "Norm_vector")
if unit != "SI" and unit != self.unit:
if unit in self.normalizations and normalization is None:
normalization = unit
unit = "SI"
values = self.values
norm_vector = None
# Ignore symmetries if fft axis
if self.name == "freqs" or self.name == "wavenumber":
is_smallestperiod = True
# Rebuild symmetries
if is_smallestperiod:
pass
elif is_antiperiod:
if "antiperiod" in self.symmetries:
pass
else:
raise AxisError("axis has no antiperiodicity")
elif is_oneperiod:
if "antiperiod" in self.symmetries:
nper = self.symmetries["antiperiod"]
self.symmetries["antiperiod"] = 2
values = rebuild_symmetries_axis(values, self.symmetries)
self.symmetries["antiperiod"] = nper
pass
elif "period" in self.symmetries:
pass
else:
pass
else:
values = rebuild_symmetries_axis(values, self.symmetries)
if (
normalization is not None
and normalization in self.normalizations
and isinstance(self.normalizations[normalization], Norm_vector)
):
norm_vector = self.normalizations[normalization].vector.copy()
self.normalizations[normalization].vector = rebuild_symmetries_axis(
norm_vector, self.symmetries
)
# fft/ifft
if operation is not None:
module = import_module("SciDataTool.Functions.conversions")
func = getattr(module, operation) # Conversion function
values = array(func(values, is_real=is_real))
# Normalization
if normalization is not None:
if normalization in self.normalizations:
if (
self.normalizations[normalization].unit == "SI"
or self.normalizations[normalization].unit == self.unit
):
# Axis is in the correct unit for the normalization
values = self.normalizations[normalization].normalize(values)
else:
raise NormError("Normalization is not available in this unit")
else:
raise NormError("Normalization is not available")
# Unit conversion
if unit != "SI" and unit != self.unit:
if corr_unit is not None:
values = convert(values, corr_unit, unit)
else:
values = convert(values, self.unit, unit)
if norm_vector is not None:
self.normalizations[normalization].vector = norm_vector
return values
| 31.791667 | 84 | 0.63329 |
e3d52352e219b89a01a13b5f8c3d2780ac68dd8a | 9,762 | py | Python | src/gluonnlp/data/dataset.py | brettkoonce/gluon-nlp | 205453e11e7638aeb786da09b4faa399eb6bc95d | [
"Apache-2.0"
] | null | null | null | src/gluonnlp/data/dataset.py | brettkoonce/gluon-nlp | 205453e11e7638aeb786da09b4faa399eb6bc95d | [
"Apache-2.0"
] | null | null | null | src/gluonnlp/data/dataset.py | brettkoonce/gluon-nlp | 205453e11e7638aeb786da09b4faa399eb6bc95d | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=undefined-all-variable
"""NLP Toolkit Dataset API. It allows easy and customizable loading of corpora and dataset files.
Files can be loaded into formats that are immediately ready for training and evaluation."""
__all__ = ['TextLineDataset', 'CorpusDataset', 'ConcatDataset', 'TSVDataset', 'NumpyDataset']
import io
import os
import bisect
import numpy as np
from mxnet.gluon.data import SimpleDataset, Dataset, ArrayDataset
from .utils import concat_sequence, line_splitter, whitespace_splitter, Splitter
class ConcatDataset(Dataset):
"""Dataset that concatenates a list of datasets.
Parameters
----------
datasets : list
List of datasets.
"""
def __init__(self, datasets):
self.datasets = datasets
self.cum_sizes = np.cumsum([0] + [len(d) for d in datasets])
def __getitem__(self, i):
dataset_id = bisect.bisect_right(self.cum_sizes, i)
sample_id = i - self.cum_sizes[dataset_id - 1]
return self.datasets[dataset_id - 1][sample_id]
def __len__(self):
return self.cum_sizes[-1]
class TextLineDataset(SimpleDataset):
"""Dataset that comprises lines in a file. Each line will be stripped.
Parameters
----------
filename : str
Path to the input text file.
encoding : str, default 'utf8'
File encoding format.
"""
def __init__(self, filename, encoding='utf8'):
lines = []
with io.open(filename, 'r', encoding=encoding) as in_file:
for line in in_file:
lines.append(line.strip())
super(TextLineDataset, self).__init__(lines)
def _corpus_dataset_process(s, bos, eos):
tokens = [bos] if bos else []
tokens.extend(s)
if eos:
tokens.append(eos)
return tokens
class TSVDataset(SimpleDataset):
"""Common tab separated text dataset that reads text fields based on provided sample splitter
and field separator.
The returned dataset includes samples, each of which can either be a list of text fields
if field_separator is specified, or otherwise a single string segment produced by the
sample_splitter.
Example::
# assume `test.tsv` contains the following content:
# Id\tFirstName\tLastName
# a\tJiheng\tJiang
# b\tLaoban\tZha
# discard the first line and select the 0th and 2nd fields
dataset = data.TSVDataset('test.tsv', num_discard_samples=1, field_indices=[0, 2])
assert dataset[0] == [u'a', u'Jiang']
assert dataset[1] == [u'b', u'Zha']
Parameters
----------
filename : str or list of str
Path to the input text file or list of paths to the input text files.
encoding : str, default 'utf8'
File encoding format.
sample_splitter : function, default str.splitlines
A function that splits the dataset string into samples.
field_separator : function or None, default Splitter('\t')
A function that splits each sample string into list of text fields.
If None, raw samples are returned according to `sample_splitter`.
num_discard_samples : int, default 0
Number of samples discarded at the head of the first file.
field_indices : list of int or None, default None
If set, for each sample, only fields with provided indices are selected as the output.
Otherwise all fields are returned.
"""
def __init__(self, filename, encoding='utf8',
sample_splitter=line_splitter, field_separator=Splitter('\t'),
num_discard_samples=0, field_indices=None):
assert sample_splitter, 'sample_splitter must be specified.'
if not isinstance(filename, (tuple, list)):
filename = (filename, )
self._filenames = [os.path.expanduser(f) for f in filename]
self._encoding = encoding
self._sample_splitter = sample_splitter
self._field_separator = field_separator
self._num_discard_samples = num_discard_samples
self._field_indices = field_indices
super(TSVDataset, self).__init__(self._read())
def _should_discard(self):
discard = self._num_discard_samples > 0
self._num_discard_samples -= 1
return discard
def _field_selector(self, fields):
if not self._field_indices:
return fields
return [fields[i] for i in self._field_indices]
def _read(self):
all_samples = []
for filename in self._filenames:
with io.open(filename, 'r', encoding=self._encoding) as fin:
content = fin.read()
samples = (s for s in self._sample_splitter(content) if not self._should_discard())
if self._field_separator:
samples = [self._field_selector(self._field_separator(s)) for s in samples]
all_samples += samples
return all_samples
class CorpusDataset(SimpleDataset):
"""Common text dataset that reads a whole corpus based on provided sample splitter
and word tokenizer.
The returned dataset includes samples, each of which can either be a list of tokens if tokenizer
is specified, or otherwise a single string segment produced by the sample_splitter.
Parameters
----------
filename : str or list of str
Path to the input text file or list of paths to the input text files.
encoding : str, default 'utf8'
File encoding format.
flatten : bool, default False
Whether to return all samples as flattened tokens. If True, each sample is a token.
skip_empty : bool, default True
Whether to skip the empty samples produced from sample_splitters. If False, `bos` and `eos`
will be added in empty samples.
sample_splitter : function, default str.splitlines
A function that splits the dataset string into samples.
tokenizer : function or None, default str.split
A function that splits each sample string into list of tokens. If None, raw samples are
returned according to `sample_splitter`.
bos : str or None, default None
The token to add at the begining of each sequence. If None, or if tokenizer is not
specified, then nothing is added.
eos : str or None, default None
The token to add at the end of each sequence. If None, or if tokenizer is not
specified, then nothing is added.
"""
def __init__(self, filename, encoding='utf8', flatten=False, skip_empty=True,
sample_splitter=line_splitter, tokenizer=whitespace_splitter,
bos=None, eos=None):
assert sample_splitter, 'sample_splitter must be specified.'
if not isinstance(filename, (tuple, list)):
filename = (filename, )
self._filenames = [os.path.expanduser(f) for f in filename]
self._encoding = encoding
self._flatten = flatten
self._skip_empty = skip_empty
self._sample_splitter = sample_splitter
self._tokenizer = tokenizer
self._bos = bos
self._eos = eos
super(CorpusDataset, self).__init__(self._read())
def _read(self):
all_samples = []
for filename in self._filenames:
with io.open(filename, 'r', encoding=self._encoding) as fin:
content = fin.read()
samples = (s.strip() for s in self._sample_splitter(content))
if self._tokenizer:
samples = [
_corpus_dataset_process(self._tokenizer(s), self._bos, self._eos)
for s in samples if s or not self._skip_empty
]
if self._flatten:
samples = concat_sequence(samples)
elif self._skip_empty:
samples = [s for s in samples if s]
all_samples += samples
return all_samples
class NumpyDataset(ArrayDataset):
"""A dataset wrapping over a Numpy binary (.npy, .npz) file.
If the file is a .npy file, then a single numpy array is loaded.
If the file is a .npz file with multiple arrays, then a list of
numpy arrays are loaded, ordered by their key in the archive.
Sparse matrix is not yet supported.
Parameters
----------
filename : str
Path to the .npy or .npz file.
Properties
----------
keys: list of str or None
The list of keys loaded from the .npz file.
"""
def __init__(self, filename):
arrs = np.load(filename)
keys = None
data = []
if filename.endswith('.npy'):
data.append(arrs)
elif filename.endswith('.npz'):
keys = sorted(arrs.keys())
for key in keys:
data.append(arrs[key])
else:
raise ValueError('Unsupported extension: %s'%filename)
self._keys = keys
super(NumpyDataset, self).__init__(*data)
@property
def keys(self):
return self._keys
| 37.837209 | 100 | 0.656628 |
6b409454b743d905f3a2d4f89526cbc984a2e830 | 433 | py | Python | Section05_Singleton/SingletonAllocator/Database.py | enriqueescobar-askida/Kinito.Python | e4c5521e771c4de0ceaf81776a4a61f7de01edb4 | [
"MIT"
] | 1 | 2020-10-20T07:41:51.000Z | 2020-10-20T07:41:51.000Z | Section05_Singleton/SingletonAllocator/Database.py | enriqueescobar-askida/Kinito.Python | e4c5521e771c4de0ceaf81776a4a61f7de01edb4 | [
"MIT"
] | null | null | null | Section05_Singleton/SingletonAllocator/Database.py | enriqueescobar-askida/Kinito.Python | e4c5521e771c4de0ceaf81776a4a61f7de01edb4 | [
"MIT"
] | null | null | null | class Database:
initialized = False
def __init__(self):
# self.id = random.randint(1,101)
# print('Generated an id of ', self.id)
# print('Loading database from file')
pass
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Database, cls)\
.__new__(cls, *args, **kwargs)
return cls._instance
| 24.055556 | 49 | 0.56351 |
9c3bedcbd92627412ae402594d30dbfe8dfdca21 | 12,583 | py | Python | scipy/linalg/_decomp_ldl.py | smola/scipy | ff8b9d9e87a585a820846d7f459d6156ba621c4d | [
"BSD-3-Clause"
] | 2 | 2020-06-20T14:11:14.000Z | 2020-10-12T07:11:36.000Z | scipy/linalg/_decomp_ldl.py | smola/scipy | ff8b9d9e87a585a820846d7f459d6156ba621c4d | [
"BSD-3-Clause"
] | null | null | null | scipy/linalg/_decomp_ldl.py | smola/scipy | ff8b9d9e87a585a820846d7f459d6156ba621c4d | [
"BSD-3-Clause"
] | null | null | null | from __future__ import division, print_function, absolute_import
from warnings import warn
import numpy as np
from numpy import (atleast_2d, ComplexWarning, arange, zeros_like, imag, diag,
iscomplexobj, tril, triu, argsort, empty_like)
from .decomp import _asarray_validated
from .lapack import get_lapack_funcs, _compute_lwork
__all__ = ['ldl']
def ldl(A, lower=True, hermitian=True, overwrite_a=False, check_finite=True):
""" Computes the LDLt or Bunch-Kaufman factorization of a symmetric/
hermitian matrix.
This function returns a block diagonal matrix D consisting blocks of size
at most 2x2 and also a possibly permuted unit lower triangular matrix
``L`` such that the factorization ``A = L D L^H`` or ``A = L D L^T``
holds. If ``lower`` is False then (again possibly permuted) upper
triangular matrices are returned as outer factors.
The permutation array can be used to triangularize the outer factors
simply by a row shuffle, i.e., ``lu[perm, :]`` is an upper/lower
triangular matrix. This is also equivalent to multiplication with a
permutation matrix ``P.dot(lu)``, where ``P`` is a column-permuted
identity matrix ``I[:, perm]``.
Depending on the value of the boolean ``lower``, only upper or lower
triangular part of the input array is referenced. Hence, a triangular
matrix on entry would give the same result as if the full matrix is
supplied.
Parameters
----------
a : array_like
Square input array
lower : bool, optional
This switches between the lower and upper triangular outer factors of
the factorization. Lower triangular (``lower=True``) is the default.
hermitian : bool, optional
For complex-valued arrays, this defines whether ``a = a.conj().T`` or
``a = a.T`` is assumed. For real-valued arrays, this switch has no
effect.
overwrite_a : bool, optional
Allow overwriting data in ``a`` (may enhance performance). The default
is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
lu : ndarray
The (possibly) permuted upper/lower triangular outer factor of the
factorization.
d : ndarray
The block diagonal multiplier of the factorization.
perm : ndarray
The row-permutation index array that brings lu into triangular form.
Raises
------
ValueError
If input array is not square.
ComplexWarning
If a complex-valued array with nonzero imaginary parts on the
diagonal is given and hermitian is set to True.
Examples
--------
Given an upper triangular array `a` that represents the full symmetric
array with its entries, obtain `l`, 'd' and the permutation vector `perm`:
>>> import numpy as np
>>> from scipy.linalg import ldl
>>> a = np.array([[2, -1, 3], [0, 2, 0], [0, 0, 1]])
>>> lu, d, perm = ldl(a, lower=0) # Use the upper part
>>> lu
array([[ 0. , 0. , 1. ],
[ 0. , 1. , -0.5],
[ 1. , 1. , 1.5]])
>>> d
array([[-5. , 0. , 0. ],
[ 0. , 1.5, 0. ],
[ 0. , 0. , 2. ]])
>>> perm
array([2, 1, 0])
>>> lu[perm, :]
array([[ 1. , 1. , 1.5],
[ 0. , 1. , -0.5],
[ 0. , 0. , 1. ]])
>>> lu.dot(d).dot(lu.T)
array([[ 2., -1., 3.],
[-1., 2., 0.],
[ 3., 0., 1.]])
Notes
-----
This function uses ``?SYTRF`` routines for symmetric matrices and
``?HETRF`` routines for Hermitian matrices from LAPACK. See [1]_ for
the algorithm details.
Depending on the ``lower`` keyword value, only lower or upper triangular
part of the input array is referenced. Moreover, this keyword also defines
the structure of the outer factors of the factorization.
.. versionadded:: 1.1.0
See also
--------
cholesky, lu
References
----------
.. [1] J.R. Bunch, L. Kaufman, Some stable methods for calculating
inertia and solving symmetric linear systems, Math. Comput. Vol.31,
1977. DOI: 10.2307/2005787
"""
a = atleast_2d(_asarray_validated(A, check_finite=check_finite))
if a.shape[0] != a.shape[1]:
raise ValueError('The input array "a" should be square.')
# Return empty arrays for empty square input
if a.size == 0:
return empty_like(a), empty_like(a), np.array([], dtype=int)
n = a.shape[0]
r_or_c = complex if iscomplexobj(a) else float
# Get the LAPACK routine
if r_or_c is complex and hermitian:
s, sl = 'hetrf', 'hetrf_lwork'
if np.any(imag(diag(a))):
warn('scipy.linalg.ldl():\nThe imaginary parts of the diagonal'
'are ignored. Use "hermitian=False" for factorization of'
'complex symmetric arrays.', ComplexWarning, stacklevel=2)
else:
s, sl = 'sytrf', 'sytrf_lwork'
solver, solver_lwork = get_lapack_funcs((s, sl), (a,))
lwork = _compute_lwork(solver_lwork, n, lower=lower)
ldu, piv, info = solver(a, lwork=lwork, lower=lower,
overwrite_a=overwrite_a)
if info < 0:
raise ValueError('{} exited with the internal error "illegal value '
'in argument number {}". See LAPACK documentation '
'for the error codes.'.format(s.upper(), -info))
swap_arr, pivot_arr = _ldl_sanitize_ipiv(piv, lower=lower)
d, lu = _ldl_get_d_and_l(ldu, pivot_arr, lower=lower, hermitian=hermitian)
lu, perm = _ldl_construct_tri_factor(lu, swap_arr, pivot_arr, lower=lower)
return lu, d, perm
def _ldl_sanitize_ipiv(a, lower=True):
"""
This helper function takes the rather strangely encoded permutation array
returned by the LAPACK routines ?(HE/SY)TRF and converts it into
regularized permutation and diagonal pivot size format.
Since FORTRAN uses 1-indexing and LAPACK uses different start points for
upper and lower formats there are certain offsets in the indices used
below.
Let's assume a result where the matrix is 6x6 and there are two 2x2
and two 1x1 blocks reported by the routine. To ease the coding efforts,
we still populate a 6-sized array and fill zeros as the following ::
pivots = [2, 0, 2, 0, 1, 1]
This denotes a diagonal matrix of the form ::
[x x ]
[x x ]
[ x x ]
[ x x ]
[ x ]
[ x]
In other words, we write 2 when the 2x2 block is first encountered and
automatically write 0 to the next entry and skip the next spin of the
loop. Thus, a separate counter or array appends to keep track of block
sizes are avoided. If needed, zeros can be filtered out later without
losing the block structure.
Parameters
----------
a : ndarray
The permutation array ipiv returned by LAPACK
lower : bool, optional
The switch to select whether upper or lower triangle is chosen in
the LAPACK call.
Returns
-------
swap_ : ndarray
The array that defines the row/column swap operations. For example,
if row two is swapped with row four, the result is [0, 3, 2, 3].
pivots : ndarray
The array that defines the block diagonal structure as given above.
"""
n = a.size
swap_ = arange(n)
pivots = zeros_like(swap_, dtype=int)
skip_2x2 = False
# Some upper/lower dependent offset values
# range (s)tart, r(e)nd, r(i)ncrement
x, y, rs, re, ri = (1, 0, 0, n, 1) if lower else (-1, -1, n-1, -1, -1)
for ind in range(rs, re, ri):
# If previous spin belonged already to a 2x2 block
if skip_2x2:
skip_2x2 = False
continue
cur_val = a[ind]
# do we have a 1x1 block or not?
if cur_val > 0:
if cur_val != ind+1:
# Index value != array value --> permutation required
swap_[ind] = swap_[cur_val-1]
pivots[ind] = 1
# Not.
elif cur_val < 0 and cur_val == a[ind+x]:
# first neg entry of 2x2 block identifier
if -cur_val != ind+2:
# Index value != array value --> permutation required
swap_[ind+x] = swap_[-cur_val-1]
pivots[ind+y] = 2
skip_2x2 = True
else: # Doesn't make sense, give up
raise ValueError('While parsing the permutation array '
'in "scipy.linalg.ldl", invalid entries '
'found. The array syntax is invalid.')
return swap_, pivots
def _ldl_get_d_and_l(ldu, pivs, lower=True, hermitian=True):
"""
Helper function to extract the diagonal and triangular matrices for
LDL.T factorization.
Parameters
----------
ldu : ndarray
The compact output returned by the LAPACK routing
pivs : ndarray
The sanitized array of {0, 1, 2} denoting the sizes of the pivots. For
every 2 there is a succeeding 0.
lower : bool, optional
If set to False, upper triangular part is considered.
hermitian : bool, optional
If set to False a symmetric complex array is assumed.
Returns
-------
d : ndarray
The block diagonal matrix.
lu : ndarray
The upper/lower triangular matrix
"""
is_c = iscomplexobj(ldu)
d = diag(diag(ldu))
n = d.shape[0]
blk_i = 0 # block index
# row/column offsets for selecting sub-, super-diagonal
x, y = (1, 0) if lower else (0, 1)
lu = tril(ldu, -1) if lower else triu(ldu, 1)
diag_inds = arange(n)
lu[diag_inds, diag_inds] = 1
for blk in pivs[pivs != 0]:
# increment the block index and check for 2s
# if 2 then copy the off diagonals depending on uplo
inc = blk_i + blk
if blk == 2:
d[blk_i+x, blk_i+y] = ldu[blk_i+x, blk_i+y]
# If Hermitian matrix is factorized, the cross-offdiagonal element
# should be conjugated.
if is_c and hermitian:
d[blk_i+y, blk_i+x] = ldu[blk_i+x, blk_i+y].conj()
else:
d[blk_i+y, blk_i+x] = ldu[blk_i+x, blk_i+y]
lu[blk_i+x, blk_i+y] = 0.
blk_i = inc
return d, lu
def _ldl_construct_tri_factor(lu, swap_vec, pivs, lower=True):
"""
Helper function to construct explicit outer factors of LDL factorization.
If lower is True the permuted factors are multiplied as L(1)*L(2)*...*L(k).
Otherwise, the permuted factors are multiplied as L(k)*...*L(2)*L(1). See
LAPACK documentation for more details.
Parameters
----------
lu : ndarray
The triangular array that is extracted from LAPACK routine call with
ones on the diagonals.
swap_vec : ndarray
The array that defines the row swapping indices. If the kth entry is m
then rows k,m are swapped. Notice that the mth entry is not necessarily
k to avoid undoing the swapping.
pivs : ndarray
The array that defines the block diagonal structure returned by
_ldl_sanitize_ipiv().
lower : bool, optional
The boolean to switch between lower and upper triangular structure.
Returns
-------
lu : ndarray
The square outer factor which satisfies the L * D * L.T = A
perm : ndarray
The permutation vector that brings the lu to the triangular form
Notes
-----
Note that the original argument "lu" is overwritten.
"""
n = lu.shape[0]
perm = arange(n)
# Setup the reading order of the permutation matrix for upper/lower
rs, re, ri = (n-1, -1, -1) if lower else (0, n, 1)
for ind in range(rs, re, ri):
s_ind = swap_vec[ind]
if s_ind != ind:
# Column start and end positions
col_s = ind if lower else 0
col_e = n if lower else ind+1
# If we stumble upon a 2x2 block include both cols in the perm.
if pivs[ind] == (0 if lower else 2):
col_s += -1 if lower else 0
col_e += 0 if lower else 1
lu[[s_ind, ind], col_s:col_e] = lu[[ind, s_ind], col_s:col_e]
perm[[s_ind, ind]] = perm[[ind, s_ind]]
return lu, argsort(perm)
| 35.44507 | 79 | 0.608043 |
895b82a8fe953a79bcb72c96c50765c05d8eb6b1 | 1,812 | py | Python | tests/__mocks__.py | stphivos/bg-kube | 3936c2899f76b2f58df7a5f38fac741c638562cf | [
"MIT"
] | 8 | 2017-11-24T12:07:02.000Z | 2020-04-27T03:27:58.000Z | tests/__mocks__.py | stphivos/bg-kube | 3936c2899f76b2f58df7a5f38fac741c638562cf | [
"MIT"
] | 19 | 2017-11-14T23:35:31.000Z | 2022-03-08T22:50:02.000Z | tests/__mocks__.py | stphivos/bg-kube | 3936c2899f76b2f58df7a5f38fac741c638562cf | [
"MIT"
] | null | null | null | from random import randint, choice
from string import ascii_letters
from sys import maxsize
from mock import patch
patch_object = patch.object = patch.object
class DictionaryObject(dict):
__getattr__ = dict.get
__setattr__ = dict.__setitem__
def get_random_int(min_value=-maxsize, max_value=maxsize):
return randint(min_value, max_value)
def get_random_str(min_length=1, max_length=100):
return ''.join(choice(ascii_letters) for _ in range(randint(min_length, max_length)))
def get_values():
return dict(
a=1,
b='2',
c='d',
e='f=g',
h=' i '
)
def get_lines(values):
def value(x):
if isinstance(x, str) and x.isdigit():
return '\'{}\''.format(x)
return x
return ['{}={}'.format(k, value(v)) for k, v in values.items()]
def get_options():
return DictionaryObject(
docker_machine_name='my_rest_api',
cluster_zone='us-central1-a',
cluster_name='my-us-cluster',
image_name='gcr.io/project-id/my-rest-api',
service_name='svc-rest-api',
service_config='./config/services/public.yaml',
deployment_name='dep-rest-api',
deployment_config='./config/deployments/main.yaml',
context='.',
dockerfile='./Dockerfile',
env_file='.env.prod',
smoke_service_name='svc-rest-api-e2e',
smoke_tests_command='pytest -s ./src/e2e',
smoke_service_config='./config/services/smoke.yaml',
db_migrations_job_config_seed='./config/jobs/db-migrate.yaml',
kops_state_store='s3://todoapp-cluster-state-store'
)
def get_named_resource(kind=None, name=None):
return {
'kind': kind or get_random_str(),
'metadata': {
'name': name or get_random_str()
}
}
| 26.26087 | 89 | 0.63245 |
3e5394af87436776a7cd48dadc7e2f1e1a16bf31 | 171 | py | Python | autofaiss/version.py | Evaia/autofaiss | ad164b7be30ddf9ce45ab616d31cb4365fe7f5ab | [
"Apache-2.0"
] | null | null | null | autofaiss/version.py | Evaia/autofaiss | ad164b7be30ddf9ce45ab616d31cb4365fe7f5ab | [
"Apache-2.0"
] | null | null | null | autofaiss/version.py | Evaia/autofaiss | ad164b7be30ddf9ce45ab616d31cb4365fe7f5ab | [
"Apache-2.0"
] | null | null | null | # pylint: disable=all
__version__ = "1.7.0"
__author__ = "Criteo"
MAJOR = __version__.split(".")[0]
MINOR = __version__.split(".")[1]
PATCH = __version__.split(".")[2]
| 17.1 | 33 | 0.654971 |
19e1062d877f52ccdadc6a39ef874c9578b9f0d5 | 306 | py | Python | test/fontio.py | jposada202020/Adafruit_CircuitPython_Bitmap_Font | 1aeb6ef4f8936e7c595dce323bab79897118f1d2 | [
"Unlicense",
"MIT-0",
"MIT"
] | 14 | 2019-02-13T00:23:57.000Z | 2021-12-16T06:13:39.000Z | test/fontio.py | jposada202020/Adafruit_CircuitPython_Bitmap_Font | 1aeb6ef4f8936e7c595dce323bab79897118f1d2 | [
"Unlicense",
"MIT-0",
"MIT"
] | 39 | 2019-02-12T18:21:30.000Z | 2022-02-18T20:16:08.000Z | test/fontio.py | jposada202020/Adafruit_CircuitPython_Bitmap_Font | 1aeb6ef4f8936e7c595dce323bab79897118f1d2 | [
"Unlicense",
"MIT-0",
"MIT"
] | 18 | 2019-02-25T23:36:03.000Z | 2022-01-18T01:35:23.000Z | # SPDX-FileCopyrightText: 2021 ladyada for Adafruit Industries
#
# SPDX-License-Identifier: MIT
"""Implementation of minimal fontio subset for testing"""
import collections
Glyph = collections.namedtuple(
"Glyph",
["bitmap", "tile_index", "width", "height", "dx", "dy", "shift_x", "shift_y"],
)
| 23.538462 | 82 | 0.702614 |
f6974479703166bf2703729797ea58949671536e | 723 | py | Python | tests/unit/docker/test_docker.py | abreu4/jina | d1d045e9e0933dffb3bd668cb9cfebab6cd52202 | [
"Apache-2.0"
] | 2 | 2021-04-22T16:59:02.000Z | 2021-04-22T17:14:32.000Z | tests/unit/docker/test_docker.py | abreu4/jina | d1d045e9e0933dffb3bd668cb9cfebab6cd52202 | [
"Apache-2.0"
] | 4 | 2020-09-01T17:47:27.000Z | 2021-04-16T23:11:57.000Z | tests/unit/docker/test_docker.py | abreu4/jina | d1d045e9e0933dffb3bd668cb9cfebab6cd52202 | [
"Apache-2.0"
] | null | null | null | from jina.docker.checker import is_error_message
def test_checker_is_error_message():
err_msg_list = [
'HubIO@11[C]:ERROR: Command errored out with exit status 1: ...',
'HubIO@11[C]: ERROR: Failed building wheel for ...',
'HubIO@11[C]: gcc: error trying to exec : execvp: No such file or directory'
'HubIO@11[W]:======================== 1 failed, 6 warnings in 1.05s ========================= '
]
non_err_msg_list = [
'HubIO@11[C]: warnings.warn(error_info)',
'HubIO@11[C]:Get:18 liberror-perl'
]
for _err in err_msg_list:
assert is_error_message(_err)
for _non_err in non_err_msg_list:
assert not is_error_message(_non_err)
| 34.428571 | 103 | 0.59751 |
0f4c4714eb2927e7d84dbb60619d19820619c0cd | 5,172 | py | Python | scripts/logreg_laplace_demo.py | karalleyna/pyprobml | 72195e46fdffc4418910e76d02e3d6469f4ce272 | [
"MIT"
] | null | null | null | scripts/logreg_laplace_demo.py | karalleyna/pyprobml | 72195e46fdffc4418910e76d02e3d6469f4ce272 | [
"MIT"
] | null | null | null | scripts/logreg_laplace_demo.py | karalleyna/pyprobml | 72195e46fdffc4418910e76d02e3d6469f4ce272 | [
"MIT"
] | null | null | null | # Author: Meduri Venkata Shivaditya
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import norm, multivariate_normal
import bayes_logistic # pip install bayes_logistic
import warnings
import pyprobml_utils as pml
warnings.filterwarnings("ignore", category=RuntimeWarning)
np.random.seed(135)
#Creating data
N = 30
D = 2
mu1 = np.hstack((np.ones((N,1)), 5 * np.ones((N, 1))))
mu2 = np.hstack((-5 * np.ones((N,1)), np.ones((N, 1))))
class1_std = 1
class2_std = 1.1
X_1 = np.add(class1_std*np.random.randn(N,2), mu1)
X_2 = np.add(2*class2_std*np.random.randn(N,2), mu2)
X = np.vstack((X_1,X_2))
t = np.vstack((np.ones((N,1)),np.zeros((N,1))))
#Plotting data
x_1, y_1 = X[np.where(t==1)[0]].T
x_2, y_2 = X[np.where(t==0)[0]].T
plt.figure(0)
plt.scatter(x_1,y_1,c = 'red', s=20, marker = 'o')
plt.scatter(x_2,y_2,c = 'blue', s = 20, marker = 'o')
#Plotting Predictions
alpha = 100
Range = 8
step = 0.1
xx, yy = np.meshgrid(np.arange(-Range,Range,step),np.arange(-Range,Range,step))
[n,n] = xx.shape
W = np.hstack((xx.reshape((n*n, 1)),yy.reshape((n*n, 1))))
Xgrid = W
ws = np.array([[3, 1], [4, 2], [5, 3], [7, 3]])
col = ['black', 'red', 'green', 'blue']
for ii in range(ws.shape[0]):
w = ws[ii][:]
pred = 1.0/(1+np.exp(np.dot(-Xgrid,w)))
plt.contour(xx, yy, pred.reshape((n, n)), 1, colors=col[ii])
plt.title("data")
pml.savefig("logreg_laplace_data.pdf", dpi = 300)
#Plot prior, likelihood, posterior
Xt = np.transpose(X)
f=np.dot(W,Xt)
log_prior = np.log(multivariate_normal.pdf(W, cov=(np.identity(D))*alpha))
log_like = np.dot(np.dot(W, Xt), t) - np.sum(np.log(1+np.exp(f)), 1).reshape((n*n,1))
log_joint = log_like.reshape((n*n,1)) + log_prior.reshape((n*n,1))
#Plotting log-prior
#plt.figure(1)
#plt.contour(xx, yy, -1*log_prior.reshape((n,n)), 30)
#plt.title("Log-Prior")
plt.figure(1)
plt.contour(xx, yy, -1*log_like.reshape((n,n)), 30)
plt.title("Log-Likelihood")
#Plotting points corresponding to chosen lines
for ii in range(0, ws.shape[0]):
w = np.transpose(ws[ii, :])
plt.annotate(str(ii+1), xy=(w[0], w[1]), color=col[ii])
j=np.argmax(log_like)
wmle = W[j, :]
slope = wmle[1] / wmle[0]
#plt.axline([wmle[0], wmle[1]], slope=slope)
plt.plot([0, 7.9], [0, 7.9*slope])
plt.grid()
pml.savefig("logreg_laplace_logLik.pdf", dpi = 300)
#Plotting the log posterior(Unnormalised
plt.figure(2)
plt.contour(xx,yy,-1*log_joint.reshape((n,n)), 30)
plt.title("Log-Unnormalised Posterior")
j2=np.argmax(log_joint)
wb = W[j2][:]
plt.scatter(wb[0], wb[1], c='red' , s = 100)
plt.grid()
pml.savefig("logreg_laplace_unnormalised_posterior.pdf", dpi = 300)
#Plotting the Laplace approximation to posterior
plt.figure(3)
#https://bayes-logistic.readthedocs.io/en/latest/usage.html
#Visit the website above to access the source code of bayes_logistic library
#parameter info : bayes_logistic.fit_bayes_logistic(y, X, wprior, H, weights=None, solver='Newton-CG', bounds=None, maxiter=100)
wfit, hfit = bayes_logistic.fit_bayes_logistic(t.reshape((N*D)), X, np.zeros(D), ((np.identity(D))*1/alpha), weights=None, solver='Newton-CG', bounds=None, maxiter=100)
co = np.linalg.inv(hfit)
#wfit represents the posterior parameters (MAP estimate)
#hfit represents the posterior Hessian (Hessian of negative log posterior evaluated at MAP parameters)
log_laplace_posterior = np.log(multivariate_normal.pdf(W, mean = wfit, cov=co))
plt.contour(xx, yy, -1*log_laplace_posterior.reshape((n,n)), 30)
plt.scatter(wb[0], wb[1], c='red' , s = 100)
plt.title("Laplace Approximation to Posterior")
plt.grid()
pml.savefig("logreg_laplace_posterior.pdf", dpi = 300)
#Plotting the predictive distribution for logistic regression
plt.figure(5)
pred = 1.0/(1+np.exp(np.dot(-Xgrid,wfit)))
plt.contour(xx, yy, pred.reshape((n,n)), 30)
x_1, y_1 = X[np.where(t == 1)[0]].T
x_2, y_2 = X[np.where(t == 0)[0]].T
plt.scatter(x_1, y_1, c='red', s=20, marker='o')
plt.scatter(x_2, y_2, c = 'blue', s=40, marker = 'o')
plt.title("p(y=1|x, wMAP)")
pml.savefig("logreg_laplace_prediction_plugin.pdf", dpi=300)
#Decision boundary for sampled w
plt.figure(6)
plt.scatter(x_1, y_1, c='red', s=20, marker='o')
plt.scatter(x_2, y_2, c='blue', s=20, marker='o')
predm = np.zeros((n*n,1))
s = 100
for i in range(s):
wsamp = np.random.multivariate_normal(mean = wfit, cov=co)
pred = 1.0/(1+np.exp(np.dot(-Xgrid,wsamp)))
predm = np.add(predm, pred.reshape((n*n, 1)))
plt.contour(xx, yy, pred.reshape((n,n)), np.array([0.5]))
plt.title("Decision boundary for sampled w")
pml.savefig("logreg_laplace_prediction_samples.pdf", dpi=300)
#MC
plt.figure(7)
predm = predm/s
plt.contour(xx, yy, predm.reshape((n,n)), 30)
plt.scatter(x_1, y_1, c='red', s=20, marker='o')
plt.scatter(x_2, y_2, c='blue', s=20, marker='o')
plt.title("MC approx of p(y=1|x)")
pml.savefig("logreg_laplace_prediction_mc.pdf", dpi=300)
#Numerical
plt.figure(8)
plt.scatter(x_1, y_1, c='red', s=20, marker='o')
plt.scatter(x_2, y_2, c='blue', s=20, marker='o')
pr = bayes_logistic.bayes_logistic_prob(Xgrid, wfit, hfit)
plt.contour(xx, yy, pr.reshape((n, n)), 30)
plt.title("Deterministic approx of p(y=1|x)")
pml.savefig("logreg_laplace_prediction_probit.pdf", dpi=300)
plt.show()
| 33.584416 | 168 | 0.684648 |
dd8b7e0205465a869d7f3815371010a99a372f22 | 4,722 | py | Python | tweb/utils/strings.py | marcohong/tweb | 194518f6969b9c0d73899b7e616bd93a02d93c16 | [
"MIT"
] | 2 | 2021-01-01T14:52:25.000Z | 2022-01-20T03:52:39.000Z | tweb/utils/strings.py | marcohong/tweb | 194518f6969b9c0d73899b7e616bd93a02d93c16 | [
"MIT"
] | null | null | null | tweb/utils/strings.py | marcohong/tweb | 194518f6969b9c0d73899b7e616bd93a02d93c16 | [
"MIT"
] | null | null | null | import os
import sys
import uuid
import time
import string
import random
import datetime
def get_start_shell() -> str:
return os.path.abspath(sys.argv[0])
def get_root_path(path: str = None) -> str:
'''
Get project base path, default start shell dir
:param path: `<str>`
:return:
'''
if path and not os.path.exists(path):
path = os.path.dirname(get_start_shell())
else:
path = os.path.dirname(get_start_shell())
return path
def get_real_path(path: str, root_path: str = None) -> str:
'''
get file real path
'''
if path in ['.', '..'] or path.startswith('../'):
return os.path.realpath(path)
elif path.startswith('~/'):
user_home = os.path.expanduser('~')
return path.replace('~', user_home)
elif path.startswith('/'):
return path
else:
return os.path.join(get_root_path(), path)
def browser(user_agent: str, origin_agent: str) -> str:
if 'chrome' in user_agent:
return 'Chrome'
elif 'firefox' in user_agent:
return 'Firefox'
elif 'safari' in user_agent:
return 'Safari'
browser = origin_agent.split()[-1]
if '/' in browser:
return browser.split('/')[0]
return browser
def get_browser(user_agent: str) -> str:
'''
get browser name
'''
origin_agent = user_agent
user_agent = user_agent.lower()
if not user_agent:
return 'UnKnown'
elif 'windows' in user_agent:
if 'trident' in user_agent:
return 'IE'
elif 'edge' in user_agent:
return 'Edge'
elif 'mac os x' in user_agent:
return browser(user_agent, origin_agent)
elif 'android' in user_agent:
return browser(user_agent, origin_agent)
return browser(user_agent, origin_agent)
def get_os(user_agent: str) -> str:
'''
get user system type
'''
user_agent = user_agent.lower()
if 'windows' in user_agent:
return 'Windows'
elif 'ipad' in user_agent:
return 'iPad'
elif 'iphone' in user_agent:
return 'iPhone'
elif 'mac os x' in user_agent:
return 'Mac OS X'
elif 'android' in user_agent:
return 'Android'
elif 'linux' in user_agent:
return 'Linux'
else:
return 'UnKnown'
def is_chinese(word: str) -> bool:
if isinstance(word, bytes):
word = str(word)
for ch in word:
if '\u4e00' <= ch <= '\u9fa5':
return True
return False
def req_is_json(request) -> bool:
'''
Check request is json.
:param request: `<tornado.web.httputil.HTTPServerRequest>`
:return: `<bool>` True is json, else return False
'''
if 'application/json' in request.headers.get('Accept', ''):
return True
return False
def get_file_name() -> str:
'''
:return: uuid name
'''
return uuid.uuid4().hex
def get_date(fmt: str = '%Y-%m-%d', days: int = 0) -> str:
date = datetime.datetime.now()
if days:
date = date + datetime.timedelta(days=days)
return date.strftime(fmt)
def get_now_date(fmt: str = '%Y-%m-%d') -> str:
'''return date str'''
return datetime.datetime.now().strftime(fmt)
def get_now_time(fmt: bool = True) -> datetime.datetime:
if fmt:
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
return datetime.datetime.now()
def datetime_to_str(date: datetime.datetime,
fmt: str = '%Y-%m-%d %H:%M:%S') -> str:
return date.strftime(fmt)
def str_to_datetime(str_time: str) -> datetime.datetime:
if ' ' not in str_time and len(str_time) < 11:
str_time = f'{str_time} 00:00:00'
return datetime.datetime.strptime(str_time, '%Y-%m-%d %H:%M:%S')
def get_timestamp(millisecond: bool = False):
if millisecond:
return int(time.time() * 1000)
return int(time.time())
def timestamp_to_time(timestamp: float) -> str:
'''
conevrt timestamp to datetime yyyy-mm-dd HH24:MM:SS
'''
return time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(timestamp))
def get_rand_digit(length) -> str:
if length > 1:
start = int('1' + '0'.zfill(length - 1))
ended = int(f'{start}0') - 1
else:
start, ended = 0, 9
digit = random.randint(start, ended)
return f'{digit}'
def get_rand_str(length: int = 8) -> str:
'''
:param length: max value 62
random string from (A-Za-z0-9)
:param length: string length
'''
length = length if length <= 62 else 62
return ''.join(random.sample(string.ascii_letters + string.digits, length))
def get_uuid(to_hex: bool = False) -> str:
if to_hex:
return uuid.uuid4().hex
return str(uuid.uuid4())
| 24.722513 | 79 | 0.604405 |
0c2aacdc9df003776d2c0b67fd997bfd545e791b | 6,498 | py | Python | king_phisher/archive.py | 1ndy/king-phisher | 937706a22d17ca01428f0edee48b9d60a20b33d9 | [
"BSD-3-Clause"
] | 3 | 2018-12-17T02:54:18.000Z | 2021-07-07T15:46:04.000Z | king_phisher/archive.py | 1ndy/king-phisher | 937706a22d17ca01428f0edee48b9d60a20b33d9 | [
"BSD-3-Clause"
] | null | null | null | king_phisher/archive.py | 1ndy/king-phisher | 937706a22d17ca01428f0edee48b9d60a20b33d9 | [
"BSD-3-Clause"
] | 4 | 2017-09-14T03:02:40.000Z | 2019-06-25T02:58:50.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/archive.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import datetime
import io
import os
import tarfile
from king_phisher import its
from king_phisher import serializers
from king_phisher import version
def is_archive(file_path):
"""
Check if the specified file appears to be a valid archive file that can be
opened with :py:class:`.ArchiveFile`.
:param str file_path: The path to the file to check.
:return: Whether or not the file looks like a compatible archive.
:rtype: bool
"""
return tarfile.is_tarfile(file_path)
class ArchiveFile(object):
"""
An object representing a generic archive for storing information. The
resulting archive file is a tarfile that can easily be opened and
manipulated with external tools. This class also facilitates storing
metadata with the archive. This metadata contains basic information such as
the version of King Phisher that generated it, and a UTC timestamp of when
it was created.
"""
metadata_file_name = 'metadata.json'
def __init__(self, file_name, mode, encoding='utf-8'):
"""
:param str file_name: The path to the file to open as an archive.
:param str mode: The mode to open the file such as 'r' or 'w'.
:param str encoding: The encoding to use for strings.
"""
self._mode = mode + ':bz2'
self.encoding = encoding
self.file_name = file_name
epoch = datetime.datetime.utcfromtimestamp(0)
self.mtime = (datetime.datetime.utcnow() - epoch).total_seconds()
self._tar_h = tarfile.open(file_name, self._mode)
if 'r' in mode and self.has_file(self.metadata_file_name):
self.metadata = serializers.JSON.loads(self.get_data(self.metadata_file_name).decode(self.encoding))
else:
self.metadata = {}
if 'w' in mode:
self.metadata['timestamp'] = datetime.datetime.utcnow().isoformat()
self.metadata['version'] = version.version
def add_data(self, name, data):
"""
Add arbitrary data directly to the archive under the specified name.
This allows data to be directly inserted into the archive without first
writing it to a file or file like object.
:param str name: The name of the destination file in the archive.
:param data: The data to place into the archive.
:type data: bytes, str
"""
if its.py_v2 and isinstance(data, unicode):
data = data.encode(self.encoding)
elif its.py_v3 and isinstance(data, str):
data = data.encode(self.encoding)
pseudo_file = io.BytesIO()
pseudo_file.write(data)
tarinfo = tarfile.TarInfo(name=name)
tarinfo.mtime = self.mtime
tarinfo.size = pseudo_file.tell()
pseudo_file.seek(os.SEEK_SET)
self._tar_h.addfile(tarinfo=tarinfo, fileobj=pseudo_file)
def add_file(self, name, file_path, recursive=True):
"""
Place a file or directory into the archive. If *file_path* is a
directory, it's contents will be added recursively if *recursive* is
True.
:param str name: The name of the destination file in the archive.
:param str file_path: The path to the file to add to the archive.
:param bool recursive: Whether or not to add directory contents.
"""
self._tar_h.add(file_path, arcname=name, recursive=recursive)
def close(self):
"""Close the handle to the archive."""
if 'w' in self.mode:
self.add_data(self.metadata_file_name, serializers.JSON.dumps(self.metadata, pretty=True))
self._tar_h.close()
@property
def files(self):
"""
This property is a generator which yields tuples of two objects each
where the first is the file name and the second is the file object. The
metadata file is skipped.
:return: A generator which yields all the contained file name and file objects.
:rtype: tuple
"""
for name in self._tar_h.getnames():
if name == self.metadata_file_name:
continue
yield name, self.get_file(name)
@property
def file_names(self):
"""
This property is a generator which yields the names of all of the
contained files. The metadata file is skipped.
:return: A generator which yields all the contained file names.
:rtype: str
"""
for name in self._tar_h.getnames():
if name == self.metadata_file_name:
continue
yield name
def get_data(self, name):
"""
Return the data contained within the specified archive file.
:param str name: The name of the source file in the archive.
:return: The contents of the specified file.
:rtype: bytes
"""
return self.get_file(name).read()
def get_file(self, name):
"""
Return the specified file object from the archive.
:param str name: The name of the source file in the archive.
:return: The specified file.
:rtype: file
"""
member = self._tar_h.getmember(name)
return self._tar_h.extractfile(member)
def has_file(self, name):
"""
Check if a file exists within archive.
:param str name:
:return: Whether or not the file exists.
:rtype: bool
"""
return name in self._tar_h.getnames()
@property
def mode(self):
"""
A read-only attribute representing the mode that the archive file was
opened in.
"""
return self._mode
| 34.020942 | 103 | 0.736996 |
ef80d4e3607a5f95db0ceb8076dc345de5fc3e63 | 391 | py | Python | src/contact/wsgi.py | avvypardhan/Contact-App | a1585f236299a505b7f4770d49a38f612eb3ab6f | [
"bzip2-1.0.6"
] | null | null | null | src/contact/wsgi.py | avvypardhan/Contact-App | a1585f236299a505b7f4770d49a38f612eb3ab6f | [
"bzip2-1.0.6"
] | 8 | 2019-12-04T23:04:46.000Z | 2022-02-10T07:21:33.000Z | src/contact/wsgi.py | avvypardhan/Contact-App | a1585f236299a505b7f4770d49a38f612eb3ab6f | [
"bzip2-1.0.6"
] | null | null | null | """
WSGI config for contact project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'contact.settings')
application = get_wsgi_application()
| 23 | 78 | 0.785166 |
4f87e03637072614a1d6c990a61969b07496a6e4 | 22,946 | py | Python | util/secscan/api.py | dongboyan77/quay | 8018e5bd80f17e6d855b58b7d5f2792d92675905 | [
"Apache-2.0"
] | null | null | null | util/secscan/api.py | dongboyan77/quay | 8018e5bd80f17e6d855b58b7d5f2792d92675905 | [
"Apache-2.0"
] | null | null | null | util/secscan/api.py | dongboyan77/quay | 8018e5bd80f17e6d855b58b7d5f2792d92675905 | [
"Apache-2.0"
] | null | null | null | import os
import logging
from abc import ABCMeta, abstractmethod
from six import add_metaclass
from urlparse import urljoin
import requests
from data import model
from data.database import CloseForLongOperation, TagManifest, Image, Manifest, ManifestLegacyImage
from data.model.storage import get_storage_locations
from data.model.image import get_image_with_storage
from data.registry_model.datatypes import Manifest as ManifestDataType, LegacyImage
from util.abchelpers import nooper
from util.failover import failover, FailoverException
from util.secscan.validator import SecurityConfigValidator
from util.security.registry_jwt import generate_bearer_token, build_context_and_subject
from _init import CONF_DIR
TOKEN_VALIDITY_LIFETIME_S = 60 # Amount of time the security scanner has to call the layer URL
UNKNOWN_PARENT_LAYER_ERROR_MSG = "worker: parent layer is unknown, it must be processed first"
MITM_CERT_PATH = os.path.join(CONF_DIR, "mitm.cert")
DEFAULT_HTTP_HEADERS = {"Connection": "close"}
logger = logging.getLogger(__name__)
class AnalyzeLayerException(Exception):
""" Exception raised when a layer fails to analyze due to a request issue. """
class AnalyzeLayerRetryException(Exception):
""" Exception raised when a layer fails to analyze due to a request issue, and the request should
be retried.
"""
class MissingParentLayerException(AnalyzeLayerException):
""" Exception raised when the parent of the layer is missing from the security scanner. """
class InvalidLayerException(AnalyzeLayerException):
""" Exception raised when the layer itself cannot be handled by the security scanner. """
class APIRequestFailure(Exception):
""" Exception raised when there is a failure to conduct an API request. """
class Non200ResponseException(Exception):
""" Exception raised when the upstream API returns a non-200 HTTP status code. """
def __init__(self, response):
super(Non200ResponseException, self).__init__()
self.response = response
_API_METHOD_INSERT = "layers"
_API_METHOD_GET_LAYER = "layers/%s"
_API_METHOD_DELETE_LAYER = "layers/%s"
_API_METHOD_MARK_NOTIFICATION_READ = "notifications/%s"
_API_METHOD_GET_NOTIFICATION = "notifications/%s"
_API_METHOD_PING = "metrics"
def compute_layer_id(layer):
""" Returns the ID for the layer in the security scanner. """
# NOTE: this is temporary until we switch to Clair V3.
if isinstance(layer, ManifestDataType):
if layer._is_tag_manifest:
layer = TagManifest.get(id=layer._db_id).tag.image
else:
manifest = Manifest.get(id=layer._db_id)
try:
layer = ManifestLegacyImage.get(manifest=manifest).image
except ManifestLegacyImage.DoesNotExist:
return None
elif isinstance(layer, LegacyImage):
layer = Image.get(id=layer._db_id)
assert layer.docker_image_id
assert layer.storage.uuid
return "%s.%s" % (layer.docker_image_id, layer.storage.uuid)
class SecurityScannerAPI(object):
""" Helper class for talking to the Security Scan service (usually Clair). """
def __init__(
self,
config,
storage,
server_hostname=None,
client=None,
skip_validation=False,
uri_creator=None,
instance_keys=None,
):
feature_enabled = config.get("FEATURE_SECURITY_SCANNER", False)
has_valid_config = skip_validation
if not skip_validation and feature_enabled:
config_validator = SecurityConfigValidator(
feature_enabled, config.get("SECURITY_SCANNER_ENDPOINT")
)
has_valid_config = config_validator.valid()
if feature_enabled and has_valid_config:
self.state = ImplementedSecurityScannerAPI(
config,
storage,
server_hostname,
client=client,
uri_creator=uri_creator,
instance_keys=instance_keys,
)
else:
self.state = NoopSecurityScannerAPI()
def __getattr__(self, name):
return getattr(self.state, name, None)
@add_metaclass(ABCMeta)
class SecurityScannerAPIInterface(object):
""" Helper class for talking to the Security Scan service (usually Clair). """
@abstractmethod
def cleanup_layers(self, layers):
""" Callback invoked by garbage collection to cleanup any layers that no longer
need to be stored in the security scanner.
"""
pass
@abstractmethod
def ping(self):
""" Calls GET on the metrics endpoint of the security scanner to ensure it is running
and properly configured. Returns the HTTP response.
"""
pass
@abstractmethod
def delete_layer(self, layer):
""" Calls DELETE on the given layer in the security scanner, removing it from
its database.
"""
pass
@abstractmethod
def analyze_layer(self, layer):
""" Posts the given layer to the security scanner for analysis, blocking until complete.
Returns the analysis version on success or raises an exception deriving from
AnalyzeLayerException on failure. Callers should handle all cases of AnalyzeLayerException.
"""
pass
@abstractmethod
def check_layer_vulnerable(self, layer_id, cve_name):
""" Checks to see if the layer with the given ID is vulnerable to the specified CVE. """
pass
@abstractmethod
def get_notification(self, notification_name, layer_limit=100, page=None):
""" Gets the data for a specific notification, with optional page token.
Returns a tuple of the data (None on failure) and whether to retry.
"""
pass
@abstractmethod
def mark_notification_read(self, notification_name):
""" Marks a security scanner notification as read. """
pass
@abstractmethod
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
""" Returns the layer data for the specified layer. On error, returns None. """
pass
@nooper
class NoopSecurityScannerAPI(SecurityScannerAPIInterface):
""" No-op version of the security scanner API. """
pass
class ImplementedSecurityScannerAPI(SecurityScannerAPIInterface):
""" Helper class for talking to the Security Scan service (Clair). """
# TODO refactor this to not take an app config, and instead just the things it needs as a config object
def __init__(
self, config, storage, server_hostname, client=None, uri_creator=None, instance_keys=None
):
self._config = config
self._instance_keys = instance_keys
self._client = client
self._storage = storage
self._server_hostname = server_hostname
self._default_storage_locations = config["DISTRIBUTED_STORAGE_PREFERENCE"]
self._target_version = config.get("SECURITY_SCANNER_ENGINE_VERSION_TARGET", 2)
self._uri_creator = uri_creator
def _get_image_url_and_auth(self, image):
""" Returns a tuple of the url and the auth header value that must be used
to fetch the layer data itself. If the image can't be addressed, we return
None.
"""
if self._instance_keys is None:
raise Exception("No Instance keys provided to Security Scanner API")
path = model.storage.get_layer_path(image.storage)
locations = self._default_storage_locations
if not self._storage.exists(locations, path):
locations = get_storage_locations(image.storage.uuid)
if not locations or not self._storage.exists(locations, path):
logger.warning(
"Could not find a valid location to download layer %s out of %s",
compute_layer_id(image),
locations,
)
return None, None
uri = self._storage.get_direct_download_url(locations, path)
auth_header = None
if uri is None:
# Use the registry API instead, with a signed JWT giving access
repo_name = image.repository.name
namespace_name = image.repository.namespace_user.username
repository_and_namespace = "/".join([namespace_name, repo_name])
# Generate the JWT which will authorize this
audience = self._server_hostname
context, subject = build_context_and_subject()
access = [
{"type": "repository", "name": repository_and_namespace, "actions": ["pull"],}
]
auth_token = generate_bearer_token(
audience, subject, context, access, TOKEN_VALIDITY_LIFETIME_S, self._instance_keys
)
auth_header = "Bearer " + auth_token
uri = self._uri_creator(repository_and_namespace, image.storage.content_checksum)
return uri, auth_header
def _new_analyze_request(self, layer):
""" Create the request body to submit the given layer for analysis. If the layer's URL cannot
be found, returns None.
"""
layer_id = compute_layer_id(layer)
if layer_id is None:
return None
url, auth_header = self._get_image_url_and_auth(layer)
if url is None:
return None
layer_request = {
"Name": layer_id,
"Path": url,
"Format": "Docker",
}
if auth_header is not None:
layer_request["Headers"] = {
"Authorization": auth_header,
}
if layer.parent is not None:
if layer.parent.docker_image_id and layer.parent.storage.uuid:
layer_request["ParentName"] = compute_layer_id(layer.parent)
return {
"Layer": layer_request,
}
def cleanup_layers(self, layers):
""" Callback invoked by garbage collection to cleanup any layers that no longer
need to be stored in the security scanner.
"""
for layer in layers:
self.delete_layer(layer)
def ping(self):
""" Calls GET on the metrics endpoint of the security scanner to ensure it is running
and properly configured. Returns the HTTP response.
"""
try:
return self._call("GET", _API_METHOD_PING)
except requests.exceptions.Timeout as tie:
logger.exception("Timeout when trying to connect to security scanner endpoint")
msg = "Timeout when trying to connect to security scanner endpoint: %s" % tie.message
raise Exception(msg)
except requests.exceptions.ConnectionError as ce:
logger.exception("Connection error when trying to connect to security scanner endpoint")
msg = (
"Connection error when trying to connect to security scanner endpoint: %s"
% ce.message
)
raise Exception(msg)
except (requests.exceptions.RequestException, ValueError) as ve:
logger.exception("Exception when trying to connect to security scanner endpoint")
msg = "Exception when trying to connect to security scanner endpoint: %s" % ve
raise Exception(msg)
def delete_layer(self, layer):
""" Calls DELETE on the given layer in the security scanner, removing it from
its database.
"""
layer_id = compute_layer_id(layer)
if layer_id is None:
return None
# NOTE: We are adding an extra check here for the time being just to be sure we're
# not hitting any overlap.
docker_image_id, layer_storage_uuid = layer_id.split(".")
if get_image_with_storage(docker_image_id, layer_storage_uuid):
logger.warning("Found shared Docker ID and storage for layer %s", layer_id)
return False
try:
self._call("DELETE", _API_METHOD_DELETE_LAYER % layer_id)
return True
except Non200ResponseException:
return False
except requests.exceptions.RequestException:
logger.exception("Failed to delete layer: %s", layer_id)
return False
def analyze_layer(self, layer):
""" Posts the given layer to the security scanner for analysis, blocking until complete.
Returns the analysis version on success or raises an exception deriving from
AnalyzeLayerException on failure. Callers should handle all cases of AnalyzeLayerException.
"""
def _response_json(request, response):
try:
return response.json()
except ValueError:
logger.exception(
"Failed to decode JSON when analyzing layer %s", request["Layer"]["Name"]
)
raise AnalyzeLayerException
request = self._new_analyze_request(layer)
if not request:
logger.error("Could not build analyze request for layer %s", layer.id)
raise AnalyzeLayerException
logger.info("Analyzing layer %s", request["Layer"]["Name"])
try:
response = self._call("POST", _API_METHOD_INSERT, body=request)
except requests.exceptions.Timeout:
logger.exception("Timeout when trying to post layer data response for %s", layer.id)
raise AnalyzeLayerRetryException
except requests.exceptions.ConnectionError:
logger.exception(
"Connection error when trying to post layer data response for %s", layer.id
)
raise AnalyzeLayerRetryException
except (requests.exceptions.RequestException) as re:
logger.exception("Failed to post layer data response for %s: %s", layer.id, re)
raise AnalyzeLayerException
except Non200ResponseException as ex:
message = _response_json(request, ex.response).get("Error").get("Message", "")
logger.warning(
"A warning event occurred when analyzing layer %s (status code %s): %s",
request["Layer"]["Name"],
ex.response.status_code,
message,
)
# 400 means the layer could not be analyzed due to a bad request.
if ex.response.status_code == 400:
if message == UNKNOWN_PARENT_LAYER_ERROR_MSG:
raise MissingParentLayerException(
"Bad request to security scanner: %s" % message
)
else:
logger.exception("Got non-200 response for analyze of layer %s", layer.id)
raise AnalyzeLayerException("Bad request to security scanner: %s" % message)
# 422 means that the layer could not be analyzed:
# - the layer could not be extracted (might be a manifest or an invalid .tar.gz)
# - the layer operating system / package manager is unsupported
elif ex.response.status_code == 422:
raise InvalidLayerException
# Otherwise, it is some other error and we should retry.
raise AnalyzeLayerRetryException
# Return the parsed API version.
return _response_json(request, response)["Layer"]["IndexedByVersion"]
def check_layer_vulnerable(self, layer_id, cve_name):
""" Checks to see if the layer with the given ID is vulnerable to the specified CVE. """
layer_data = self._get_layer_data(layer_id, include_vulnerabilities=True)
if layer_data is None or "Layer" not in layer_data or "Features" not in layer_data["Layer"]:
return False
for feature in layer_data["Layer"]["Features"]:
for vuln in feature.get("Vulnerabilities", []):
if vuln["Name"] == cve_name:
return True
return False
def get_notification(self, notification_name, layer_limit=100, page=None):
""" Gets the data for a specific notification, with optional page token.
Returns a tuple of the data (None on failure) and whether to retry.
"""
try:
params = {"limit": layer_limit}
if page is not None:
params["page"] = page
response = self._call(
"GET", _API_METHOD_GET_NOTIFICATION % notification_name, params=params
)
json_response = response.json()
except requests.exceptions.Timeout:
logger.exception("Timeout when trying to get notification for %s", notification_name)
return None, True
except requests.exceptions.ConnectionError:
logger.exception(
"Connection error when trying to get notification for %s", notification_name
)
return None, True
except (requests.exceptions.RequestException, ValueError):
logger.exception("Failed to get notification for %s", notification_name)
return None, False
except Non200ResponseException as ex:
return None, ex.response.status_code != 404 and ex.response.status_code != 400
return json_response, False
def mark_notification_read(self, notification_name):
""" Marks a security scanner notification as read. """
try:
self._call("DELETE", _API_METHOD_MARK_NOTIFICATION_READ % notification_name)
return True
except Non200ResponseException:
return False
except requests.exceptions.RequestException:
logger.exception("Failed to mark notification as read: %s", notification_name)
return False
def get_layer_data(self, layer, include_features=False, include_vulnerabilities=False):
""" Returns the layer data for the specified layer. On error, returns None. """
layer_id = compute_layer_id(layer)
if layer_id is None:
return None
return self._get_layer_data(layer_id, include_features, include_vulnerabilities)
def _get_layer_data(self, layer_id, include_features=False, include_vulnerabilities=False):
params = {}
if include_features:
params = {"features": True}
if include_vulnerabilities:
params = {"vulnerabilities": True}
try:
response = self._call("GET", _API_METHOD_GET_LAYER % layer_id, params=params)
logger.debug(
"Got response %s for vulnerabilities for layer %s", response.status_code, layer_id
)
try:
return response.json()
except ValueError:
logger.exception("Failed to decode response JSON")
return None
except Non200ResponseException as ex:
logger.debug(
"Got failed response %s for vulnerabilities for layer %s",
ex.response.status_code,
layer_id,
)
if ex.response.status_code == 404:
return None
else:
logger.error(
"downstream security service failure: status %d, text: %s",
ex.response.status_code,
ex.response.text,
)
if ex.response.status_code // 100 == 5:
raise APIRequestFailure("Downstream service returned 5xx")
else:
raise APIRequestFailure("Downstream service returned non-200")
except requests.exceptions.Timeout:
logger.exception(
"API call timed out for loading vulnerabilities for layer %s", layer_id
)
raise APIRequestFailure("API call timed out")
except requests.exceptions.ConnectionError:
logger.exception("Connection error for loading vulnerabilities for layer %s", layer_id)
raise APIRequestFailure("Could not connect to security service")
except requests.exceptions.RequestException:
logger.exception("Failed to get layer data response for %s", layer_id)
raise APIRequestFailure()
def _request(self, method, endpoint, path, body, params, timeout):
""" Issues an HTTP request to the security endpoint. """
url = _join_api_url(endpoint, self._config.get("SECURITY_SCANNER_API_VERSION", "v1"), path)
signer_proxy_url = self._config.get("JWTPROXY_SIGNER", "localhost:8081")
logger.debug("%sing security URL %s", method.upper(), url)
resp = self._client.request(
method,
url,
json=body,
params=params,
timeout=timeout,
verify=MITM_CERT_PATH,
headers=DEFAULT_HTTP_HEADERS,
proxies={"https": "https://" + signer_proxy_url, "http": "http://" + signer_proxy_url},
)
if resp.status_code // 100 != 2:
raise Non200ResponseException(resp)
return resp
def _call(self, method, path, params=None, body=None):
""" Issues an HTTP request to the security endpoint handling the logic of using an alternative
BATCH endpoint for non-GET requests and failover for GET requests.
"""
timeout = self._config.get("SECURITY_SCANNER_API_TIMEOUT_SECONDS", 1)
endpoint = self._config["SECURITY_SCANNER_ENDPOINT"]
with CloseForLongOperation(self._config):
# If the request isn't a read, attempt to use a batch stack and do not fail over.
if method != "GET":
if self._config.get("SECURITY_SCANNER_ENDPOINT_BATCH") is not None:
endpoint = self._config["SECURITY_SCANNER_ENDPOINT_BATCH"]
timeout = (
self._config.get("SECURITY_SCANNER_API_BATCH_TIMEOUT_SECONDS") or timeout
)
return self._request(method, endpoint, path, body, params, timeout)
# The request is read-only and can failover.
all_endpoints = [endpoint] + self._config.get(
"SECURITY_SCANNER_READONLY_FAILOVER_ENDPOINTS", []
)
return _failover_read_request(
*[
((self._request, endpoint, path, body, params, timeout), {})
for endpoint in all_endpoints
]
)
def _join_api_url(endpoint, api_version, path):
pathless_url = urljoin(endpoint, "/" + api_version) + "/"
return urljoin(pathless_url, path)
@failover
def _failover_read_request(request_fn, endpoint, path, body, params, timeout):
""" This function auto-retries read-only requests until they return a 2xx status code. """
try:
return request_fn("GET", endpoint, path, body, params, timeout)
except (requests.exceptions.RequestException, Non200ResponseException) as ex:
raise FailoverException(ex)
| 40.115385 | 107 | 0.641114 |
91e14d81bcf5b1a4279eca1cb8e64dcd22e2800a | 12,198 | py | Python | DataScience/Analytics/prototype/automatic_testing.py | Yasir326/address-index-data | f95da1f5ecda911d5d5a83ce396b33837b629bdd | [
"MIT"
] | 13 | 2016-11-30T16:52:59.000Z | 2021-03-26T23:49:41.000Z | DataScience/Analytics/prototype/automatic_testing.py | Yasir326/address-index-data | f95da1f5ecda911d5d5a83ce396b33837b629bdd | [
"MIT"
] | 84 | 2016-11-17T10:46:54.000Z | 2022-02-24T09:09:31.000Z | DataScience/Analytics/prototype/automatic_testing.py | Yasir326/address-index-data | f95da1f5ecda911d5d5a83ce396b33837b629bdd | [
"MIT"
] | 7 | 2019-01-26T10:43:25.000Z | 2022-02-24T08:53:54.000Z | #!/usr/bin/env python
"""
ONS Address Index - Automatic Testing of Different Datasets
===========================================================
A simple wrapper to call all independent address linking datasets in serial.
Could be parallelised trivially, however, the linking code is memory hungry
so the node needs to have sufficient memory to enable this.
Running
-------
After all requirements are satisfied, the script can be invoked using CPython interpreter::
python automatic_testing.py
Requirements
------------
:requires: pandas (tested with 0.19.1)
:requires: matplotlib (tested with 1.5.3)
:requires: sqlalchemy (tested with 1.1.4)
:requires: addressLinking (and all the requirements within it)
Author
------
:author: Sami Niemi (sami.niemi@valtech.co.uk)
Version
-------
:version: 0.8
:date: 22-Feb-2017
"""
import os
import datetime
import sqlite3
import Analytics.prototype.welshAddresses as wa
import Analytics.prototype.welshAddressesSet2 as wa2
import Analytics.prototype.welshAddressesSet3 as wa3
import Analytics.prototype.landRegistryAddresses as lr
import Analytics.prototype.edgeCaseAddresses as ec
import Analytics.prototype.patientRecordAddresses as pr
import Analytics.prototype.lifeEventsAddresses as le
import Analytics.prototype.companiesHouseAddresses as ch
import Analytics.prototype.businessIndexAddresses as bi
import Analytics.prototype.CQCAddresses as cq
import pandas as pd
import matplotlib.pyplot as plt
from sqlalchemy import create_engine
from Analytics.linking import addressLinking
# set global location variable that is platform specific so that there is no need to make code changes
if 'Pro.local' in os.uname().nodename:
ABpath = '/Users/saminiemi/Projects/ONS/AddressIndex/data/ADDRESSBASE/'
outpath = '/Users/saminiemi/Projects/ONS/AddressIndex/linkedData/'
inputPath = '/Users/saminiemi/Projects/ONS/AddressIndex/data/'
local = True
elif 'cdhut-d03-' in os.uname().nodename:
ABpath = '/opt/scratch/AddressIndex/AddressBase/'
outpath = '/opt/scratch/AddressIndex/Results/'
inputPath = '/opt/scratch/AddressIndex/TestData/'
local = False
elif 'localhost.' in os.uname().nodename:
ABpath = '/home/niemis/AddressIndex/AddressBase/'
outpath = '/home/niemis/AddressIndex/Results/'
inputPath = '/home/niemis/AddressIndex/TestData/'
local = False
else:
raise ConnectionError('ERROR: cannot access AddressBase or connect to the SQLite3 database')
def run_all_datasets():
"""
Run all address linking codes in serial.
:return: None
"""
settings = dict(ABpath=ABpath, outpath=outpath, inputPath=inputPath)
print('Running Edge Case addresses test...')
ec.run_edge_case_linker(**settings)
print('Running Business Index test...')
bi.run_business_index_linker(**settings)
if local:
print('Cannot run Patient Records test locally...')
else:
print('Running Patient Records addresses test...')
pr.run_patient_record_address_linker(**settings)
if local:
print('Cannot run Life Events test locally...')
else:
print('Running Life Events test locally...')
le.run_life_events_linker(**settings)
print('Running Welsh addresses test...')
wa.run_welsh_address_linker(**settings)
print('Running Welsh addresses second test set...')
wa2.run_welsh_address_linker(**settings)
print('Running Welsh addresses third test set...')
wa3.run_welsh_address_linker(**settings)
print('Running Land Registry addresses test...')
lr.run_land_registry_linker(**settings)
print('Running Companies House test...')
ch.run_companies_house_linker(**settings)
print('Running CQC test...')
cq.run_CQC_address_linker(**settings)
def _load_welsh_data():
"""
Load Welsh address data and results. Joint the information together to a single
dataframe.
:return: a single data frame containing original data and attached UPRNs
:rtype: pandas.DataFrame
"""
# load original data
original = pd.read_csv(outpath + 'WelshGovernmentData21Nov2016.csv',
usecols=['ID', 'UPRNs_matched_to_date'])
original.rename(columns={'UPRNs_matched_to_date': 'UPRN_ORIG'}, inplace=True)
# load prototype linked data
prototype = pd.read_csv(outpath + 'WelshGov_matched.csv',
usecols=['ID', 'UPRN'])
prototype.rename(columns={'UPRN': 'UPRN_PROTO'}, inplace=True)
# load SAS code (PG) data
sas = pd.read_csv(outpath + 'Paul_matches_with_address_text_welshGov.csv',
usecols=['UID', 'UPRN'])
sas.rename(columns={'UID': 'ID', 'UPRN': 'UPRN_SAS'}, inplace=True)
# join data frames
data = pd.merge(original, prototype, how='left', on='ID')
data = pd.merge(data, sas, how='left', on='ID')
return data
def _compute_welsh_performance(df, methods=('UPRN_ORIG', 'UPRN_PROTO', 'UPRN_SAS')):
"""
Compute performance for the Welsh dataset using SAS code UPRNs as a reference.
:param df: dataframe containing UPRNs of methods as columns
:type df: pandas.DataFrame
:param methods: a tuple listing methods to analyse
:type methods: tuple
:return: results of the performance computations
:rtype: dict
"""
# simple performance metrics that can be computed directly from the data frame and dummies
msk = df['UPRN_PROTO'].isnull()
addresses = len(df.index)
linked = len(df.loc[~msk].index)
not_linked = len(df.loc[msk].index)
msk = df['UPRN_SAS'].isnull()
withUPRN = len(df.loc[~msk].index)
correct = -1
false_positive = -1
new_UPRNs = -1
# iterate over the possible method combinations - capture relevant information
for method1 in methods:
for method2 in methods:
if method1 == 'UPRN_SAS' and method2 == 'UPRN_PROTO':
agree = df[method1] == df[method2]
nagree = len(df.loc[agree].index)
msk = (~df[method1].isnull()) & (~df[method2].isnull())
disagree = df.loc[msk, method1] != df.loc[msk, method2]
ndisagree = len(df.loc[msk & disagree].index)
msk = (df[method1].isnull()) & (~df[method2].isnull())
nmethod2only = len(df.loc[msk].index)
correct = nagree
false_positive = ndisagree
new_UPRNs = nmethod2only
results = dict(addresses=addresses, correct=correct, false_positive=false_positive, linked=linked,
new_UPRNs=new_UPRNs, not_linked=not_linked, withUPRN=withUPRN)
return results
def compute_performance():
"""
Computes additional performance metrics as some datasets have multiple UPRNs attached or
UPRNs have been attached later.
:return: None
"""
welsh_data = _load_welsh_data()
# compute results and create a dictionary
results = _compute_welsh_performance(welsh_data, methods=('UPRN_PROTO', 'UPRN_SAS'))
results['code_version'] = addressLinking.__version__
results['dataset'] = 'WelshGovernmentData21Nov2016.csv'
results['date'] = datetime.datetime.now()
results['name'] = 'WelshGovSAS'
# convert to Pandas Dataframe
results = pd.DataFrame.from_records([results])
# push to the database
with sqlite3.connect(outpath + 'AddressLinkingResults.sqlite') as cnx:
results.to_sql('results', cnx, index=False, if_exists='append')
def _get_data_from_db(sql):
"""
Pull data from a database.
:param sql: sql query to execute to pull the data
:type sql: str
:return: queried data
:rtype: pandas.DataFrame
"""
# build the connection string from specifying the DB type, location, and filename separately
connection = 'sqlite:///' + outpath + 'AddressLinkingResults.sqlite'
df = pd.read_sql_query(sql, create_engine(connection))
return df
def _create_figures(plot_data, testset_name, columns_to_plot):
"""
Create two figures to show the performance as a function of time.
:param plot_data: dataframe contaninig column date and those to be plotted
:type plot_data: pandas.DataFrame
:param testset_name: name of the test dataset, used as a part of the output file name
:type testset_name: str
:param columns_to_plot: a list of names of the columns storing the performance metrics to be plotted
:type columns_to_plot: list
:return: None
"""
plot_data.plot(x='date', y=columns_to_plot, lw=2,
subplots=True, sharex=True, layout=(3, 2), figsize=(12, 18),
fontsize=16, sort_columns=True, color='m',
xlim=(plot_data['date'].min() - datetime.timedelta(days=1),
plot_data['date'].max() + datetime.timedelta(days=1)))
plt.tight_layout()
plt.savefig(outpath + testset_name + 'results.png')
plt.close()
plot_data.plot(x='date', y=columns_to_plot, lw=2,
figsize=(12, 18), fontsize=16, sort_columns=True,
xlim=(plot_data['date'].min() - datetime.timedelta(days=1),
plot_data['date'].max() + datetime.timedelta(days=1)),
ylim=(plot_data[columns_to_plot].min(axis=0).min() - 1,
plot_data[columns_to_plot].max(axis=0).max() + 1))
plt.tight_layout()
plt.savefig(outpath + testset_name + 'results2.png')
plt.close()
def _create_precision_recall_figure(plot_data, testset_name):
"""
Create a simple figure showing precision, recall, and f1-score.
:param plot_data: dataframe contaninig column date and those to be plotted
:type plot_data: pandas.DataFrame
:param testset_name: name of the test dataset, used as a part of the output file name
:type testset_name: str
:return: None
"""
columns_to_plot = ['precision', 'recall', 'f1score']
plot_data['precision'] = plot_data['correct'] / (plot_data['correct'] + plot_data['false_positive'])
plot_data['recall'] = plot_data['correct'] / plot_data['addresses']
plot_data['f1score'] = 2. * (plot_data['precision'] * plot_data['recall']) / \
(plot_data['precision'] + plot_data['recall'])
plot_data.plot(x='date', y=columns_to_plot, lw=2,
figsize=(12, 18), fontsize=16, sort_columns=True,
xlim=(plot_data['date'].min() - datetime.timedelta(days=1),
plot_data['date'].max() + datetime.timedelta(days=1)),
ylim=(plot_data[columns_to_plot].min(axis=0).min() * 0.95,
plot_data[columns_to_plot].max(axis=0).max() * 1.05))
plt.tight_layout()
plt.savefig(outpath + testset_name + 'results3.png')
plt.close()
def plot_performance():
"""
Generates simple graphs which show the linking performance as a function of time for all datasets
available from the results database.
For each dataset two graphs are generated: 1) figure with multiple sub-figures, and 2)
a single figure showing multiple lines.
:return: None
"""
# query data and place it to a Pandas DataFrame
data = _get_data_from_db('select * from results;')
# convert date to datetime
data['date'] = pd.to_datetime(data['date'])
# create figures
for testset_name in set(data['name']):
plot_data = data.loc[data['name'] == testset_name]
print('Plotting {} results'.format(testset_name))
_create_figures(plot_data, testset_name,
['addresses', 'correct', 'false_positive', 'linked', 'new_UPRNs', 'not_linked'])
msk = plot_data['false_positive'] >= 0
plot_data = plot_data.loc[msk]
if len(plot_data.index) > 0:
_create_precision_recall_figure(plot_data, testset_name)
def run_all(plot_only=False):
"""
Execute the fully automated testing sequence.
:param plot_only: whether to re-run all test datasets or simply generate performance figures
:param plot_only: bool
:return: None
"""
if not plot_only:
run_all_datasets()
compute_performance()
plot_performance()
if __name__ == "__main__":
run_all()
| 34.360563 | 104 | 0.669864 |
c91e808d177e7eef5ac0f3703c198f1965edb996 | 2,452 | py | Python | boto/compat.py | rectalogic/boto | 1ac79d0c984bfd83f26e7c3af4877a731a63ecc2 | [
"MIT"
] | 1 | 2019-06-22T23:31:13.000Z | 2019-06-22T23:31:13.000Z | boto/compat.py | rectalogic/boto | 1ac79d0c984bfd83f26e7c3af4877a731a63ecc2 | [
"MIT"
] | null | null | null | boto/compat.py | rectalogic/boto | 1ac79d0c984bfd83f26e7c3af4877a731a63ecc2 | [
"MIT"
] | null | null | null | # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import os
# This allows boto modules to say "from boto.compat import json". This is
# preferred so that all modules don't have to repeat this idiom.
try:
import simplejson as json
except ImportError:
import json
# If running in Google App Engine there is no "user" and
# os.path.expanduser() will fail. Attempt to detect this case and use a
# no-op expanduser function in this case.
try:
os.path.expanduser('~')
expanduser = os.path.expanduser
except (AttributeError, ImportError):
# This is probably running on App Engine.
expanduser = (lambda x: x)
# Use unittest2 for older versions of Python
try:
import unittest2 as unittest
except ImportError:
import unittest
from boto.vendored import six
from boto.vendored.six import BytesIO, StringIO
from boto.vendored.six.moves import filter, http_client, map, _thread, \
urllib, zip
from boto.vendored.six.moves.configparser import SafeConfigParser
from boto.vendored.six.moves.urllib.parse import parse_qs, quote, unquote, \
urlparse, urlsplit
from boto.vendored.six.moves.urllib.request import urlopen
if six.PY3:
# StandardError was removed, so use the base exception type instead
StandardError = Exception
else:
StandardError = StandardError
| 38.920635 | 77 | 0.738581 |
a661518ad92a5bdf1f7d1b5c1ad0f7fd07a497cb | 5,644 | py | Python | aws_lambda_builders/actions.py | mingkun2020/aws-lambda-builders | 71e507fbc08eb8d583be339b3701ce30bf026776 | [
"Apache-2.0"
] | 88 | 2020-09-03T18:51:44.000Z | 2022-03-22T23:46:14.000Z | aws_lambda_builders/actions.py | mingkun2020/aws-lambda-builders | 71e507fbc08eb8d583be339b3701ce30bf026776 | [
"Apache-2.0"
] | 88 | 2020-09-01T19:22:28.000Z | 2022-03-30T01:55:36.000Z | aws_lambda_builders/actions.py | mingkun2020/aws-lambda-builders | 71e507fbc08eb8d583be339b3701ce30bf026776 | [
"Apache-2.0"
] | 40 | 2020-09-02T17:29:39.000Z | 2022-03-31T02:36:17.000Z | """
Definition of actions used in the workflow
"""
import logging
import os
import shutil
import six
from aws_lambda_builders.utils import copytree
LOG = logging.getLogger(__name__)
class ActionFailedError(Exception):
"""
Base class for exception raised when action failed to complete. Use this to express well-known failure scenarios.
"""
pass
class Purpose(object):
"""
Enum like object to describe the purpose of each action.
"""
# Action is identifying dependencies, downloading, compiling and resolving them
RESOLVE_DEPENDENCIES = "RESOLVE_DEPENDENCIES"
# Action is copying source code
COPY_SOURCE = "COPY_SOURCE"
# Action is copying dependencies
COPY_DEPENDENCIES = "COPY_DEPENDENCIES"
# Action is moving dependencies
MOVE_DEPENDENCIES = "MOVE_DEPENDENCIES"
# Action is compiling source code
COMPILE_SOURCE = "COMPILE_SOURCE"
# Action is cleaning up the target folder
CLEAN_UP = "CLEAN_UP"
@staticmethod
def has_value(item):
return item in Purpose.__dict__.values()
class _ActionMetaClass(type):
def __new__(mcs, name, bases, class_dict):
cls = type.__new__(mcs, name, bases, class_dict)
if cls.__name__ == "BaseAction":
return cls
# Validate class variables
# All classes must provide a name
if not isinstance(cls.NAME, six.string_types):
raise ValueError("Action must provide a valid name")
if not Purpose.has_value(cls.PURPOSE):
raise ValueError("Action must provide a valid purpose")
return cls
class BaseAction(six.with_metaclass(_ActionMetaClass, object)):
"""
Base class for all actions. It does not provide any implementation.
"""
# Every action must provide a name
NAME = None
# Optional description explaining what this action is about. Used to print help text
DESCRIPTION = ""
# What is this action meant for? Must be a valid instance of `Purpose` class
PURPOSE = None
def execute(self):
"""
Runs the action. This method should complete the action, and if it fails raise appropriate exceptions.
:raises lambda_builders.actions.ActionFailedError: Instance of this class if something went wrong with the
action
"""
raise NotImplementedError("execute")
def __repr__(self):
return "Name={}, Purpose={}, Description={}".format(self.NAME, self.PURPOSE, self.DESCRIPTION)
class CopySourceAction(BaseAction):
NAME = "CopySource"
DESCRIPTION = "Copying source code while skipping certain commonly excluded files"
PURPOSE = Purpose.COPY_SOURCE
def __init__(self, source_dir, dest_dir, excludes=None):
self.source_dir = source_dir
self.dest_dir = dest_dir
self.excludes = excludes or []
def execute(self):
copytree(self.source_dir, self.dest_dir, ignore=shutil.ignore_patterns(*self.excludes))
class CopyDependenciesAction(BaseAction):
NAME = "CopyDependencies"
DESCRIPTION = "Copying dependencies while skipping source file"
PURPOSE = Purpose.COPY_DEPENDENCIES
def __init__(self, source_dir, artifact_dir, destination_dir):
self.source_dir = source_dir
self.artifact_dir = artifact_dir
self.dest_dir = destination_dir
def execute(self):
source = set(os.listdir(self.source_dir))
artifact = set(os.listdir(self.artifact_dir))
dependencies = artifact - source
for name in dependencies:
dependencies_source = os.path.join(self.artifact_dir, name)
new_destination = os.path.join(self.dest_dir, name)
if os.path.isdir(dependencies_source):
copytree(dependencies_source, new_destination)
else:
shutil.copy2(dependencies_source, new_destination)
class MoveDependenciesAction(BaseAction):
NAME = "MoveDependencies"
DESCRIPTION = "Moving dependencies while skipping source file"
PURPOSE = Purpose.MOVE_DEPENDENCIES
def __init__(self, source_dir, artifact_dir, destination_dir):
self.source_dir = source_dir
self.artifact_dir = artifact_dir
self.dest_dir = destination_dir
def execute(self):
source = set(os.listdir(self.source_dir))
artifact = set(os.listdir(self.artifact_dir))
dependencies = artifact - source
for name in dependencies:
dependencies_source = os.path.join(self.artifact_dir, name)
new_destination = os.path.join(self.dest_dir, name)
shutil.move(dependencies_source, new_destination)
class CleanUpAction(BaseAction):
"""
Class for cleaning the directory. It will clean all the files in the directory but doesn't delete the directory
"""
NAME = "CleanUp"
DESCRIPTION = "Cleaning up the target folder"
PURPOSE = Purpose.CLEAN_UP
def __init__(self, target_dir):
self.target_dir = target_dir
def execute(self):
if not os.path.isdir(self.target_dir):
LOG.info("Clean up action: %s does not exist and will be skipped.", str(self.target_dir))
return
targets = os.listdir(self.target_dir)
LOG.info("Clean up action: folder %s will be cleaned", str(self.target_dir))
for name in targets:
target_path = os.path.join(self.target_dir, name)
LOG.debug("Clean up action: %s is deleted", str(target_path))
if os.path.isdir(target_path):
shutil.rmtree(target_path)
else:
os.remove(target_path)
| 28.795918 | 117 | 0.675762 |
86c2fe4c6bccb8c4869548a85304d05d424ed707 | 12,120 | py | Python | dist/weewx-4.0.0b6/bin/weeutil/ftpupload.py | v0rts/docker-weewx | 70b2f252051dfead4fcb74e74662b297831e6342 | [
"Apache-2.0"
] | 10 | 2017-01-05T17:30:48.000Z | 2021-09-18T15:04:20.000Z | dist/weewx-4.0.0b6/bin/weeutil/ftpupload.py | v0rts/docker-weewx | 70b2f252051dfead4fcb74e74662b297831e6342 | [
"Apache-2.0"
] | 2 | 2019-07-21T10:48:42.000Z | 2022-02-16T20:36:45.000Z | dist/weewx-4.0.0b6/bin/weeutil/ftpupload.py | v0rts/docker-weewx | 70b2f252051dfead4fcb74e74662b297831e6342 | [
"Apache-2.0"
] | 12 | 2017-01-05T18:50:30.000Z | 2021-10-05T07:35:45.000Z | #
# Copyright (c) 2009-2015 Tom Keffer <tkeffer@gmail.com>
#
# See the file LICENSE.txt for your full rights.
#
"""For uploading files to a remove server via FTP"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import with_statement
import ftplib
import logging
import os
import sys
import time
from six.moves import cPickle
log = logging.getLogger(__name__)
class FtpUpload(object):
"""Uploads a directory and all its descendants to a remote server.
Keeps track of when a file was last uploaded, so it is uploaded only
if its modification time is newer."""
def __init__(self, server,
user, password,
local_root, remote_root,
port=21,
name="FTP",
passive=True,
max_tries=3,
secure=False,
debug=0,
secure_data=True):
"""Initialize an instance of FtpUpload.
After initializing, call method run() to perform the upload.
server: The remote server to which the files are to be uploaded.
user,
password : The user name and password that are to be used.
name: A unique name to be given for this FTP session. This allows more
than one session to be uploading from the same local directory. [Optional.
Default is 'FTP'.]
passive: True to use passive mode; False to use active mode. [Optional.
Default is True (passive mode)]
max_tries: How many times to try creating a directory or uploading
a file before giving up [Optional. Default is 3]
secure: Set to True to attempt an FTP over TLS (FTPS) session.
debug: Set to 1 for extra debug information, 0 otherwise.
secure_data: If a secure session is requested (option secure=True),
should we attempt a secure data connection as well? This option is useful
due to a bug in the Python FTP client library. See Issue #284.
[Optional. Default is True]
"""
self.server = server
self.user = user
self.password = password
self.local_root = os.path.normpath(local_root)
self.remote_root = os.path.normpath(remote_root)
self.port = port
self.name = name
self.passive = passive
self.max_tries = max_tries
self.secure = secure
self.debug = debug
self.secure_data = secure_data
def run(self):
"""Perform the actual upload.
returns: the number of files uploaded."""
if self.secure:
try:
FTPClass = ftplib.FTP_TLS
except AttributeError:
FTPClass = ftplib.FTP
log.debug("Your version of Python does not support FTPS. Using insecure connection.")
self.secure = False
else:
FTPClass = ftplib.FTP
# Get the timestamp and members of the last upload:
(timestamp, fileset) = self.getLastUpload()
n_uploaded = 0
# Try to connect to the ftp server up to max_tries times:
try:
if self.secure:
log.debug("Attempting secure connection to %s" % self.server)
else:
log.debug("Attempting connection to %s" % self.server)
for count in range(self.max_tries):
try:
ftp_server = FTPClass()
ftp_server.connect(self.server, self.port)
if self.debug:
ftp_server.set_debuglevel(self.debug)
ftp_server.login(self.user, self.password)
ftp_server.set_pasv(self.passive)
if self.secure and self.secure_data:
ftp_server.prot_p()
log.debug("Secure data connection to %s" % self.server)
else:
log.debug("Connected to %s" % self.server)
break
except ftplib.all_errors as e:
log.error("Unable to connect or log into server : %s" % e)
else:
# This is executed only if the loop terminates naturally (without a break statement),
# meaning the ftp connection failed max_tries times. Abandon ftp upload
log.error("Attempted %d times to connect to server %s. Giving up."
% (self.max_tries, self.server))
return n_uploaded
# Walk the local directory structure
for (dirpath, unused_dirnames, filenames) in os.walk(self.local_root):
# Strip out the common local root directory. What is left
# will be the relative directory both locally and remotely.
local_rel_dir_path = dirpath.replace(self.local_root, '.')
if self._skipThisDir(local_rel_dir_path):
continue
# This is the absolute path to the remote directory:
remote_dir_path = os.path.normpath(os.path.join(self.remote_root, local_rel_dir_path))
# Make the remote directory if necessary:
self._make_remote_dir(ftp_server, remote_dir_path)
# Now iterate over all members of the local directory:
for filename in filenames:
full_local_path = os.path.join(dirpath, filename)
# See if this file can be skipped:
if self._skipThisFile(timestamp, fileset, full_local_path):
continue
full_remote_path = os.path.join(remote_dir_path, filename)
STOR_cmd = "STOR %s" % full_remote_path
# Retry up to max_tries times:
for count in range(self.max_tries):
# If we have to retry, we should probably reopen the file as well.
# Hence, the open is in the inner loop:
with open(full_local_path, 'rb') as fd:
try:
ftp_server.storbinary(STOR_cmd, fd)
except ftplib.all_errors as e:
# Unsuccessful. Log it and go around again.
log.error("Attempt #%d. Failed uploading %s to %s. Reason: %s"
% (count + 1, full_remote_path, self.server, e))
ftp_server.set_pasv(self.passive)
else:
# Success. Log it, break out of the loop
n_uploaded += 1
fileset.add(full_local_path)
log.debug("Uploaded file %s" % full_remote_path)
break
else:
# This is executed only if the loop terminates naturally (without a break statement),
# meaning the upload failed max_tries times. Log it, move on to the next file.
log.error("Failed to upload file %s" % full_remote_path)
finally:
try:
ftp_server.quit()
except:
pass
timestamp = time.time()
self.saveLastUpload(timestamp, fileset)
return n_uploaded
def getLastUpload(self):
"""Reads the time and members of the last upload from the local root"""
timeStampFile = os.path.join(self.local_root, "#%s.last" % self.name)
# If the file does not exist, an IOError exception will be raised.
# If the file exists, but is truncated, an EOFError will be raised.
# Either way, be prepared to catch it.
try:
with open(timeStampFile, "rb") as f:
timestamp = cPickle.load(f)
fileset = cPickle.load(f)
except (IOError, EOFError, cPickle.PickleError, AttributeError):
timestamp = 0
fileset = set()
# Either the file does not exist, or it is garbled.
# Either way, it's safe to remove it.
try:
os.remove(timeStampFile)
except OSError:
pass
return (timestamp, fileset)
def saveLastUpload(self, timestamp, fileset):
"""Saves the time and members of the last upload in the local root."""
timeStampFile = os.path.join(self.local_root, "#%s.last" % self.name)
with open(timeStampFile, "wb") as f:
cPickle.dump(timestamp, f)
cPickle.dump(fileset, f)
def _make_remote_dir(self, ftp_server, remote_dir_path):
"""Make a remote directory if necessary."""
# Try to make the remote directory up max_tries times, then give up.
for unused_count in range(self.max_tries):
try:
ftp_server.mkd(remote_dir_path)
except ftplib.all_errors as e:
# Got an exception. It might be because the remote directory already exists:
if sys.exc_info()[0] is ftplib.error_perm:
msg = str(e).strip()
# If a directory already exists, some servers respond with a '550' ("Requested action not taken") code,
# others with a '521' ("Access denied" or "Pathname already exists") code.
if msg.startswith('550') or msg.startswith('521'):
# Directory already exists
return
log.error("Got error while attempting to make remote directory %s" % remote_dir_path)
log.error(" **** Error: %s" % e)
else:
log.debug("Made directory %s" % remote_dir_path)
return
else:
log.error("Unable to create remote directory %s" % remote_dir_path)
raise IOError("Unable to create remote directory %s" % remote_dir_path)
def _skipThisDir(self, local_dir):
return os.path.basename(local_dir) in ('.svn', 'CVS')
def _skipThisFile(self, timestamp, fileset, full_local_path):
filename = os.path.basename(full_local_path)
if filename[-1] == '~' or filename[0] == '#':
return True
if full_local_path not in fileset:
return False
if os.stat(full_local_path).st_mtime > timestamp:
return False
# Filename is in the set, and is up to date.
return True
if __name__ == '__main__':
import socket
import configobj
import weewx
import weeutil.logger
weewx.debug = 1
weeutil.logger.setup('ftpupload', {})
if len(sys.argv) < 2:
print("""Usage: ftpupload.py path-to-configuration-file [path-to-be-ftp'd]""")
sys.exit(weewx.CMD_ERROR)
try:
config_dict = configobj.ConfigObj(sys.argv[1], file_error=True, encoding='utf-8')
except IOError:
print("Unable to open configuration file %s" % sys.argv[1])
raise
if len(sys.argv) == 2:
try:
ftp_dir = os.path.join(config_dict['WEEWX_ROOT'],
config_dict['StdReport']['HTML_ROOT'])
except KeyError:
print("No HTML_ROOT in configuration dictionary.")
sys.exit(1)
else:
ftp_dir = sys.argv[2]
socket.setdefaulttimeout(10)
ftp_upload = FtpUpload(config_dict['StdReport']['FTP']['server'],
config_dict['StdReport']['FTP']['user'],
config_dict['StdReport']['FTP']['password'],
ftp_dir,
config_dict['StdReport']['FTP']['path'],
'FTP',
config_dict['StdReport']['FTP'].as_bool('passive'),
config_dict['StdReport']['FTP'].as_int('max_tries'))
ftp_upload.run()
| 39.737705 | 123 | 0.552723 |
802f0000a3deed24863b56fded7c90a5f47c0ed1 | 298 | py | Python | lib/model/roi_layers/__init__.py | YeolJ00/Gradually-Applying-Weakly-Supervised-and-Active-Learning-for-Mass-Detection-in-Breast-Ultrasound | 2aa6a02ffc688455308819b719a6e0ccc4154d5b | [
"MIT"
] | 4 | 2021-09-22T14:49:55.000Z | 2022-01-24T14:59:16.000Z | lib/model/roi_layers/__init__.py | YeolJ00/faster-rcnn-pytorch | 2aa6a02ffc688455308819b719a6e0ccc4154d5b | [
"MIT"
] | null | null | null | lib/model/roi_layers/__init__.py | YeolJ00/faster-rcnn-pytorch | 2aa6a02ffc688455308819b719a6e0ccc4154d5b | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
from .nms import nms
from .roi_align import ROIAlign
from .roi_align import roi_align
from .roi_pool import ROIPool
from .roi_pool import roi_pool
__all__ = ["nms", "roi_align", "ROIAlign", "roi_pool", "ROIPool"] | 33.111111 | 71 | 0.768456 |
b794e1b5408d8cf8a0f8a4a474dd264a0df3185d | 2,333 | py | Python | setup.py | BlueOwlDev/python-mocket | 7075f206394c6f226b975a2955821a2609023e24 | [
"BSD-3-Clause"
] | null | null | null | setup.py | BlueOwlDev/python-mocket | 7075f206394c6f226b975a2955821a2609023e24 | [
"BSD-3-Clause"
] | null | null | null | setup.py | BlueOwlDev/python-mocket | 7075f206394c6f226b975a2955821a2609023e24 | [
"BSD-3-Clause"
] | null | null | null | import io
import os
import sys
from setuptools import find_packages, setup
os.environ.setdefault("PIPENV_SKIP_LOCK", "1")
major, minor = sys.version_info[:2]
install_requires = [
line
for line in io.open(
os.path.join(os.path.dirname(__file__), "requirements.txt")
).readlines()
if not line.startswith("-i")
]
pook_requires = ("pook>=0.2.1",)
exclude_packages = ("tests", "tests.*")
def read_version(package):
init_path = os.path.join(package, "__init__.py")
with io.open(init_path, "r") as fd:
for line in fd:
if line.startswith("__version__ = "):
return line.split()[-1].strip().strip('"')
setup(
name="mocket",
version=read_version("mocket"),
author="Giorgio Salluzzo",
author_email="giorgio.salluzzo@gmail.com",
url="https://github.com/mindflayer/python-mocket",
description="Socket Mock Framework - for all kinds of socket animals, web-clients included - \
with gevent/asyncio/SSL support",
long_description=io.open("README.rst", encoding="utf-8").read(),
long_description_content_type="text/x-rst",
packages=find_packages(exclude=exclude_packages),
install_requires=install_requires,
setup_requires=[],
extras_require={
"speedups": [
'xxhash;platform_python_implementation=="CPython"',
'xxhash-cffi;platform_python_implementation=="PyPy"',
],
"dev": [],
"pook": pook_requires, # plugins version supporting mocket.plugins.pook.MocketEngine
},
test_suite="runtests.runtests",
license="BSD",
classifiers=[
"Development Status :: 6 - Mature",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development",
"Topic :: Software Development :: Testing",
"License :: OSI Approved :: BSD License",
],
)
| 32.859155 | 98 | 0.633519 |
0e18b51b898e36d1def11aefec3195938804effe | 1,170 | py | Python | Assignment2/draw_line.py | rahlin1004/sc-projects | 48c602b0474ddb910a08534ba56bbd13de79b7bc | [
"MIT"
] | null | null | null | Assignment2/draw_line.py | rahlin1004/sc-projects | 48c602b0474ddb910a08534ba56bbd13de79b7bc | [
"MIT"
] | null | null | null | Assignment2/draw_line.py | rahlin1004/sc-projects | 48c602b0474ddb910a08534ba56bbd13de79b7bc | [
"MIT"
] | null | null | null | """
File: draw_line.py
Name: Sarah Lin
-------------------------
this program help you to draw lines
"""
from campy.graphics.gobjects import GOval, GLine
from campy.graphics.gwindow import GWindow
from campy.gui.events.mouse import onmouseclicked
SIZE = 10 # this constant control the circle size
w = GWindow(500, 500, title='drawing_line.py') # window
TIME = 0 # this constant control the time you click
circle = GOval(SIZE, SIZE) # a circle
def main():
"""
This program creates lines on an instance of GWindow class.
There is a circle indicating the user’s first click. A line appears
at the condition where the circle disappears as the user clicks
on the canvas for the second time.
"""
onmouseclicked(draw)
def draw(event):
"""
:param event: mouse event
"""
global TIME
TIME += 1
if TIME % 2 == 1: # if TIME is oval, add circle
w.add(circle, x=event.x - SIZE/2, y=event.y - SIZE/2)
else:
line = GLine(circle.x + SIZE/2, circle.y + SIZE/2, event.x, event.y)
w.remove(circle)
w.add(line)
if __name__ == "__main__":
main()
| 27.209302 | 77 | 0.623077 |
2a6f78a3dbb9f1ba095c6e83fb94011e82d2717f | 4,369 | py | Python | tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py | SergeKireev/pants | cd92c65aeb3dfdcee3e0946f2b68a301ef2f4541 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py | SergeKireev/pants | cd92c65aeb3dfdcee3e0946f2b68a301ef2f4541 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/backend/codegen/jaxb/test_jaxb_gen.py | SergeKireev/pants | cd92c65aeb3dfdcee3e0946f2b68a301ef2f4541 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
from pants.backend.codegen.jaxb.jaxb_gen import JaxbGen
from pants.backend.codegen.jaxb.jaxb_library import JaxbLibrary
from pants.backend.codegen.jaxb.register import build_file_aliases as register_codegen
from pants.build_graph.register import build_file_aliases as register_core
from pants.testutil.jvm.nailgun_task_test_base import NailgunTaskTestBase
class JaxbGenJavaTest(NailgunTaskTestBase):
@classmethod
def task_type(cls):
return JaxbGen
@classmethod
def alias_groups(cls):
return register_core().merge(register_codegen())
def create_schema(self, *component_names):
return ('<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema">\n'
+ '\n'.join(self.create_complex_type(c) for c in component_names)
+ '\n</xsd:schema>')
def create_complex_type(self, name):
return (
'''<xsd:complexType name="{name}">
<xsd:sequence>
<xsd:element name="commonName" type="xsd:string"/>
<xsd:element name="scientificName" type="xsd:string"/>
<xsd:element name="colorRGB" type="xsd:integer"/>
<xsd:element name="tasty" type="xsd:boolean"/>
</xsd:sequence>
</xsd:complexType>'''.format(name=name)
)
def test_correct_package(self):
fix = JaxbGen._correct_package
self.assertEqual(fix('com.foo.bar'), 'com.foo.bar', 'Expected no change.')
self.assertEqual(fix('com/foo/bar'), 'com.foo.bar', 'Expected slashes to dots.')
self.assertEqual(fix('.com.foo.bar'), 'com.foo.bar', 'Should have trimmed leading dots.')
self.assertEqual(fix('com.foo.bar.'), 'com.foo.bar', 'Should have trimmed trialing dots.')
self.assertEqual(fix('org/pantsbuild/example/foo'), 'org.pantsbuild.example.foo',
'Should work on packages other than com.foo.bar.')
with self.assertRaises(ValueError):
fix('po..ta..to')
with self.assertRaises(ValueError):
fix('po.ta..to')
with self.assertRaises(ValueError):
fix('..po.ta..to...')
self.assertEqual(fix('///org.pantsbuild/example...'), 'org.pantsbuild.example')
def test_guess_package(self):
guess_history = []
def guess(path):
result = JaxbGen._correct_package(JaxbGen._guess_package(path))
guess_history.append(result)
return result
supported_prefixes = ('com', 'org', 'net',)
for prefix in supported_prefixes:
self.assertEqual(guess('.pants.d/foo.bar/{0}/pantsbuild/potato/Potato.java'.format(prefix)),
'{0}.pantsbuild.potato'.format(prefix),
'Failed for prefix {0}: {1}.'.format(prefix, guess_history[-1]))
self.assertEqual(guess('{0}/pantsbuild/potato/Potato.java'.format(prefix)),
'{0}.pantsbuild.potato'.format(prefix),
'Failed for prefix {0}: {1}.'.format(prefix, guess_history[-1]))
self.assertEqual(guess('/User/foo/bar/.pants.d/gen/jaxb/foo/bar/'
'{0}/company/project/a/File.java'.format(prefix)),
'{0}.company.project.a'.format(prefix),
'Failed for prefix {0}: {1}.'.format(prefix, guess_history[-1]))
self.assertEqual(guess('pantsbuild/potato/Potato.java'),
'pantsbuild.potato',
'Failed with no prefix: {0}'.format(guess_history[-1]))
def test_simple(self):
self.create_file('foo/vegetable.xml', self.create_schema('Vegetable'))
jaxblib = self.make_target('foo:jaxblib', JaxbLibrary, sources=['vegetable.xml'])
context = self.context(target_roots=[jaxblib])
task = self.execute(context)
files = []
for (dirpath, dirnames, filenames) in os.walk(task.workdir):
for filename in filenames:
if filename.endswith('.java'):
files.append(os.path.join(dirpath, filename))
self.assertEqual(sorted(['ObjectFactory.java', 'Vegetable.java']),
sorted([os.path.basename(f) for f in files]))
# Make sure there is no header with a timestamp in the generated file
for f in files:
with open(f, 'r') as jaxb_file:
contents = jaxb_file.read()
self.assertNotIn('// Generated on:', contents)
| 45.989474 | 98 | 0.644999 |
aa4e3281a7f373935d42d18ded7243d39849ca7c | 1,052 | py | Python | datamodel_code_generator/parser/openapi.py | Chilipp/datamodel-code-generator | 849955ad82034a04a7036914e04402e8ebd18776 | [
"MIT"
] | null | null | null | datamodel_code_generator/parser/openapi.py | Chilipp/datamodel-code-generator | 849955ad82034a04a7036914e04402e8ebd18776 | [
"MIT"
] | null | null | null | datamodel_code_generator/parser/openapi.py | Chilipp/datamodel-code-generator | 849955ad82034a04a7036914e04402e8ebd18776 | [
"MIT"
] | null | null | null | from typing import Any, Dict
import yaml
from datamodel_code_generator import snooper_to_methods
from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
@snooper_to_methods(max_variable_length=None)
class OpenAPIParser(JsonSchemaParser):
def parse_raw(self) -> None:
for source in self.iter_source:
if self.validation:
from prance import BaseParser
base_parser = BaseParser(
spec_string=source.text, backend='openapi-spec-validator'
)
components: Dict[str, Any] = base_parser.specification['components']
else:
components = yaml.safe_load(source.text)['components']
self.model_resolver.set_current_root(list(source.path.parts))
for obj_name, raw_obj in components[
'schemas'
].items(): # type: str, Dict[Any, Any]
self.parse_raw_obj(
obj_name, raw_obj, ['components', 'schemas', obj_name]
)
| 36.275862 | 84 | 0.620722 |
da34c8c3ac850ebdfd8ea38564876230cb438bd4 | 1,181 | py | Python | strategies/gekko-japonicus-master/evaluation/gekko/API.py | tobby2002/tradyai-api | 1314528ee5200114a951e298a64633d4485eef62 | [
"MIT"
] | 229 | 2018-01-05T13:32:52.000Z | 2021-12-18T00:57:49.000Z | strategies/gekko-japonicus-master/evaluation/gekko/API.py | tobby2002/tradyai-api | 1314528ee5200114a951e298a64633d4485eef62 | [
"MIT"
] | 142 | 2018-01-04T23:39:28.000Z | 2019-12-14T16:38:24.000Z | strategies/gekko-japonicus-master/evaluation/gekko/API.py | tobby2002/tradyai-api | 1314528ee5200114a951e298a64633d4485eef62 | [
"MIT"
] | 95 | 2018-01-06T05:35:23.000Z | 2021-12-13T16:42:22.000Z | #!/bin/python
import os
import requests
import json
from subprocess import Popen, PIPE
def initializeGekko(): # not used yet.
CMD = ['node', gekkoDIR + '/gekko', '--ui']
D = Popen(CMD, stdin=PIPE, stdout=PIPE, stderr=PIPE)
def checkInstance(instanceUrl):
try:
Request = requests.get(instanceUrl)
except Exception:
return False
if Request.text:
return True
def httpPost(URL, data={}, Verbose=True):
try:
Request = requests.post(URL, json=data)
Response = json.loads(Request.text)
except ConnectionRefusedError:
print("Error: Gekko comm error! Check your local Gekko instance.")
exit()
except Exception as e:
if Verbose:
print("Error: config failure")
print(e)
print(URL)
print(data)
return False
return Response
def loadHostsFile(HostsFilePath):
remoteGekkos = []
if os.path.isfile(HostsFilePath):
H = open(HostsFilePath).read().split('\n')
for W in H:
if W and not '=' in W and not '[' in W:
remoteGekkos.append("http://%s:3000" % W)
return remoteGekkos
| 24.102041 | 74 | 0.598645 |
4239dd3b51fea8e51d3cdaf33116595cc2748f34 | 1,927 | py | Python | sdk/cwl/arvados_cwl/context.py | chlige/arvados | e4a68851e521c0e3152f9790683d8cdb1b3923df | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/cwl/arvados_cwl/context.py | chlige/arvados | e4a68851e521c0e3152f9790683d8cdb1b3923df | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | sdk/cwl/arvados_cwl/context.py | chlige/arvados | e4a68851e521c0e3152f9790683d8cdb1b3923df | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | # Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
from cwltool.context import LoadingContext, RuntimeContext
from collections import namedtuple
class ArvLoadingContext(LoadingContext):
def __init__(self, kwargs=None):
super(ArvLoadingContext, self).__init__(kwargs)
class ArvRuntimeContext(RuntimeContext):
def __init__(self, kwargs=None):
self.work_api = None
self.extra_reffiles = []
self.priority = 500
self.enable_reuse = True
self.runnerjob = ""
self.submit_request_uuid = None
self.project_uuid = None
self.trash_intermediate = False
self.intermediate_output_ttl = 0
self.update_workflow = ""
self.create_workflow = False
self.submit_runner_ram = 0
self.ignore_docker_for_reuse = False
self.submit = True
self.submit_runner_image = None
self.wait = True
self.cwl_runner_job = None
self.storage_classes = "default"
self.intermediate_storage_classes = "default"
self.current_container = None
self.http_timeout = 300
self.submit_runner_cluster = None
self.cluster_target_id = 0
self.always_submit_runner = False
self.collection_cache_size = 256
self.match_local_docker = False
super(ArvRuntimeContext, self).__init__(kwargs)
if self.submit_request_uuid:
self.submit_runner_cluster = self.submit_request_uuid[0:5]
def get_outdir(self) -> str:
"""Return self.outdir or create one with self.tmp_outdir_prefix."""
return self.outdir
def get_tmpdir(self) -> str:
"""Return self.tmpdir or create one with self.tmpdir_prefix."""
return self.tmpdir
def create_tmpdir(self) -> str:
"""Return self.tmpdir or create one with self.tmpdir_prefix."""
return self.tmpdir
| 33.807018 | 75 | 0.670472 |
fb95e831a4666c32864b9ff166332cae2bb78252 | 1,104 | py | Python | numbermix.py | thekushalpokhrel/Python_Programs_SoftDev_DataAnalysis | e56e0e853aca4367ebf99ae18e920b80f39bd133 | [
"MIT"
] | null | null | null | numbermix.py | thekushalpokhrel/Python_Programs_SoftDev_DataAnalysis | e56e0e853aca4367ebf99ae18e920b80f39bd133 | [
"MIT"
] | null | null | null | numbermix.py | thekushalpokhrel/Python_Programs_SoftDev_DataAnalysis | e56e0e853aca4367ebf99ae18e920b80f39bd133 | [
"MIT"
] | null | null | null | #questions for numbers level
que_num_lev1=['105+26+30 = ',
'1152+269+963 = ',
'5269+125+478 = ',
'256+36+987 = ',
'1245+69+5896 = ']
que_num_lev2=['86*2 = ',
'526*2 = ',
'256*8 = ',
'256*9 = ',
'589*3 = ']
que_num_lev3=['256*2+69 = ',
'1256+65*2 = ',
'12589*3+12589 = ',
'125+69*56*2 = ',
'1458+6987*2*36 = ']
#answers of questions of numbers levels
ans_num_lev1={'105+26+30 = ':'161',
'1152+269+963 = ':'2384',
'5269+125+478 = ':'5872',
'256+36+987 = ':'1279',
'1245+69+5896 = ':'7210'}
ans_num_lev2={'86*2 = ':'172',
'526*2 = ':'1052',
'256*8 = ':'2048',
'256*9 = ':'2304',
'589*3 = ':'1767'}
ans_num_lev3={'256*2+69 = ':'581',
'1256+65*2 = ':'5154',
'12589*3+12589 = ':'50356',
'125+69*56*2 = ':'21728',
'1458+6987*2*36 = ':'608040'}
| 27.6 | 44 | 0.365942 |
c3d07292329cef3a003d88df6aab05f3005b1d35 | 1,393 | py | Python | pack.py | Zhen-Dong/BitPack | 491a2a739b7138e1006637e132f4ab5307a91db9 | [
"MIT"
] | 32 | 2021-01-17T08:57:01.000Z | 2022-02-17T10:52:54.000Z | pack.py | Zhen-Dong/BitPack | 491a2a739b7138e1006637e132f4ab5307a91db9 | [
"MIT"
] | null | null | null | pack.py | Zhen-Dong/BitPack | 491a2a739b7138e1006637e132f4ab5307a91db9 | [
"MIT"
] | 8 | 2021-02-08T22:38:22.000Z | 2021-08-08T10:01:41.000Z | import argparse
import torch
from bitpack.pytorch_interface import save_quantized_state_dict
parser = argparse.ArgumentParser(description='BitPack to efficiently save mixed-precision models')
parser.add_argument('--input-int-file',
type=str,
default=None,
help='path to the quantized model with integer format')
parser.add_argument('--packed-output-path',
type=str,
default='./packed_quantized_checkpoint.pth.tar',
help='path to output the packed checkpoint')
parser.add_argument('--force-pack-fp',
action='store_true',
help='if the input is in floating-point form'
'whether to force the input tensor to int8 and then pack it')
args = parser.parse_args()
state_dict = torch.load(args.input_int_file)
weight_integer = state_dict['weight_integer']
# If the checkpoint contains integer values stored in floating point format,
# force_pack_fp can convert it to integer tensor and then pack accordingly.
# Here we use int32 to represent temporary results, in order to prevent potential overflow.
if args.force_pack_fp:
weight_integer = {k : weight_integer[k].type(torch.int32) for k in weight_integer.keys()}
save_quantized_state_dict(weight_integer, args.packed_output_path)
| 46.433333 | 99 | 0.677674 |
941a668e0d801716fa03904aba79e1f96f7b35d7 | 1,035 | py | Python | Annotated_Transformer/PositionEncoding.py | treefriend/NLP_study_tf2 | ec064b451efc57b90a002430a01c840ba8262557 | [
"MIT"
] | null | null | null | Annotated_Transformer/PositionEncoding.py | treefriend/NLP_study_tf2 | ec064b451efc57b90a002430a01c840ba8262557 | [
"MIT"
] | null | null | null | Annotated_Transformer/PositionEncoding.py | treefriend/NLP_study_tf2 | ec064b451efc57b90a002430a01c840ba8262557 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2020-05-21 22:44
# @Author : Shupeng
import math
import torch
import torch.nn as nn
from torch.autograd import Variable
class PositionalEncoding(nn.Module):
def __init__(self, d_model, dropout, max_len=5000):
super(PositionalEncoding, self).__init__()
self.dropout = nn.Dropout(p=dropout)
# Compute the positional encodings once in log space
pe = torch.zeros(max_len, d_model)
position = torch.arange(0, max_len).unsqueeze(1)
div_term = torch.exp(torch.arange(0, d_model, 2) *
-(math.log(10000.0) / d_model))
# 这样算之后,偶数都是正弦的,奇数都是余弦的
# 0,1是同一个周期,2,3是同一个周期,4,5是同一个周期
# 随着数值变大,周期也越来越长
pe[:, 0::2] = torch.sin(position * div_term)
pe[:, 1::2] = torch.cos(position * div_term)
pe = pe.unsqueeze(0)
self.register_buffer('pe', pe)
def forward(self, x):
x = x + Variable(self.pe[:, :x.size(1)], requires_grad=False)
return self.dropout(x)
| 30.441176 | 69 | 0.604831 |
626f38da05503cf61a6fbd11c6bb8ccaff769346 | 1,425 | py | Python | Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/__pyinstaller/hook-pygame.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/__pyinstaller/hook-pygame.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | Python_MiniGame_Fighter/venv/Lib/site-packages/pygame/__pyinstaller/hook-pygame.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | """
binaries hook for pygame seems to be required for pygame 2.0 Windows.
Otherwise some essential DLLs will not be transfered to the exe.
And also put hooks for datas, resources that pygame uses, to work
correctly with pyinstaller
"""
import os
import platform
from pygame import __file__ as pygame_main_file
# Get pygame's folder
pygame_folder = os.path.dirname(os.path.abspath(pygame_main_file))
# datas is the variable that pyinstaller looks for while processing hooks
datas = []
# A helper to append the relative path of a resource to hook variable - datas
def _append_to_datas(file_path):
global datas
res_path = os.path.join(pygame_folder, file_path)
if os.path.exists(res_path):
datas.append((res_path, "pygame"))
# First append the font file, then based on the OS, append pygame icon file
_append_to_datas("freesansbold.ttf")
if platform.system() == "Darwin":
_append_to_datas("pygame_icon.tiff")
else:
_append_to_datas("pygame_icon.bmp")
if platform.system() == "Windows":
from PyInstaller.utils.hooks import collect_dynamic_libs
pre_binaries = collect_dynamic_libs('pygame')
binaries = []
for b in pre_binaries:
binary, location = b
# settles all the DLLs into the top level folder, which prevents duplication
# with the DLLs already being put there.
binaries.append((binary, "."))
| 31.666667 | 85 | 0.711579 |
c099f12343b2a5b81c747582e03b26b13b2f6723 | 33,161 | py | Python | vealous/libs/pydouban.py | lepture/Vealous | a13956a70871419f24bab7e7c9ecbaf09337feb1 | [
"BSD-3-Clause"
] | 1 | 2019-04-14T19:58:42.000Z | 2019-04-14T19:58:42.000Z | vealous/libs/pydouban.py | lepture/Vealous | a13956a70871419f24bab7e7c9ecbaf09337feb1 | [
"BSD-3-Clause"
] | null | null | null | vealous/libs/pydouban.py | lepture/Vealous | a13956a70871419f24bab7e7c9ecbaf09337feb1 | [
"BSD-3-Clause"
] | null | null | null | #-*- coding: utf-8 -*-
"""
pydouban
A lightweight douban api library.
Basic Usage:
>>> import pydouban
>>> key = 'your douban oauth consumer key'
>>> secret = 'your douban oauth consumer secret'
>>> auth = pydouban.Auth(key, secret)
>>> dic = auth.login()
>>> print dic['url']
...
>>> token_qs = auth.get_acs_token(dic['oauth_token'],dic['oauth_token_secret'])
>>> api = pydouban.Api()
>>> print api.search_people('ahbei')
>>> api.set_qs_oauth(key, secret, qs)
>>> print api.get_profile()
"""
'''
The BSD License
Copyright (c) 2010, Marvour <marvour@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the author nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
__version__ = '1.0.0'
__author__ = 'Hsiaoming Young<i@shiao.org>'
__website__ = 'http://i.shiao.org/a/pydouban'
import hmac
import urllib
import urllib2
import httplib
from hashlib import sha1
from random import getrandbits
from time import time
from cgi import escape
try:
import json # Python >= 2.6
except ImportError:
try:
import simplejson as json # Python < 2.6
except ImportError:
try:
from django.utils import simplejson as json
except ImportError:
raise ImportError
AUTH_URL = 'http://www.douban.com/service/auth'
API_URL = 'http://api.douban.com'
class Auth(object):
"""
Easy OAuth for Douban.
>>> auth = pydouban.Auth(key, secret)
>>> print auth.login()
more information on http://www.douban.com/service/apidoc/auth
"""
_token = ''
_token_secret = ''
def __init__(self, key='', secret=''):
if key:
self._key = key
if secret:
self._secret = secret
def set_consumer(self, key, secret):
self._key = key
self._secret = secret
def set_token(self, token, token_secret):
self._token = token
self._token_secret = token_secret
def set_qs_token(self, qs):
dic = _qs2dict(qs)
self._token = dic['oauth_token']
self._token_secret = dic['oauth_token_secret']
def get_oauth_params(self, url, params, method='GET'):
assert hasattr(self, '_key'), "need consumer key."
assert hasattr(self, '_secret'), "need consumer secret."
oauth_params = {
'oauth_consumer_key': self._key,
'oauth_signature_method': 'HMAC-SHA1',
'oauth_timestamp': int(time()),
'oauth_nonce': getrandbits(64),
'oauth_version': '1.0'}
if self._token:
oauth_params['oauth_token'] = self._token
# Add other params
params.update(oauth_params)
# Sort and concat
s = ''
for k in sorted(params):
s += _quote(k) + '=' + _quote(params[k]) + '&'
msg = method + '&' + _quote(url) + '&' + _quote(s[:-1])
# Maybe token_secret is empty
key = self._secret + '&' + self._token_secret
digest = hmac.new(key, msg, sha1).digest()
params['oauth_signature'] = digest.encode('base64')[:-1]
return params
def login(self, callback=None):
req_token_url = AUTH_URL + '/request_token'
params = self.get_oauth_params(req_token_url, {})
res = urllib2.urlopen(url=req_token_url + '?' + _dict2qs(params))
if 200 != res.code:
raise Exception('OAuth Request Token Error: ' + res.read())
dic = _qs2dict(res.read())
self._token = dic['oauth_token']
self._token_secret = dic['oauth_token_secret']
sig_params = {'oauth_signature': params['oauth_signature']}
dic['params'] = _dict2qs(self.get_oauth_params(req_token_url,sig_params))
dic['url'] = AUTH_URL + '/authorize?' + dic['params']
if callback:
dic['url'] += '&' + _dict2qs({'oauth_callback':callback})
return dic
def get_acs_token(self, req_token, req_token_secret):
acs_token_url = AUTH_URL + '/access_token'
self._token = req_token
self._token_secret = req_token_secret
params = self.get_oauth_params(acs_token_url, {})
res = urllib2.urlopen(url=acs_token_url + '?' + _dict2qs(params))
if 200 != res.code:
raise Exception('OAuth Access Token Error: ' + res.read())
return res.read() # qs
class Api(object):
"""
Douban API Service
Documentation on http://i.shiao.org/a/pydouban
more information on http://www.douban.com/service/apidoc/reference/
"""
def __init__(self, alt='json', start_index=1, max_results=10, skip_read=True):
if alt in ('atom', 'xml'):
self._alt = 'atom'
else:
self._alt = 'json'
self._start = start_index
self._max = max_results
self._skip = skip_read
def set_var(self, start_index, max_results):
self._start = start_index
self._max = max_results
def set_key(self, key):
self._key = key
def set_oauth(self, key, secret, acs_token, acs_token_secret):
self._oauth = Auth(key, secret)
self._oauth.set_token(acs_token, acs_token_secret)
def set_qs_oauth(self, key, secret, qs):
self._oauth = Auth(key, secret)
self._oauth.set_qs_token(qs)
#{{{ method
def _post(self, path, body, params={}):
assert hasattr(self, '_oauth'), "need be authed."
res_url = API_URL + path
dic = self._oauth.get_oauth_params(res_url, params, 'POST')
headers = _dict2header(dic)
headers['Content-Type'] = 'application/atom+xml; charset=utf-8'
con = httplib.HTTPConnection('api.douban.com', 80)
con.request('POST', path, body, headers)
res = con.getresponse()
if 201 != res.status:
raise Exception('Douban Post Error : ' + str(res.status))
if self._skip:
return True
return res.read()
def _put(self, path, body, params={}):
assert hasattr(self, '_oauth'), "need be authed."
res_url = API_URL + path
dic = self._oauth.get_oauth_params(res_url, params, 'PUT')
headers = _dict2header(dic)
headers['Content-Type'] = 'application/atom+xml; charset=utf-8'
con = httplib.HTTPConnection('api.douban.com', 80)
con.request('PUT', path, body, headers)
res = con.getresponse()
if 202 != res.status:
raise Exception('Douban Put Error : ' + str(res.status))
if self._skip:
return True
return res.read()
def _del(self, path, params={}):
assert hasattr(self, '_oauth'), "need be authed."
res_url = API_URL + path
dic = self._oauth.get_oauth_params(res_url, params, 'DELETE')
headers = _dict2header(dic)
con = httplib.HTTPConnection('api.douban.com', 80, timeout=5)
con.request('DELETE', path, None, headers)
res = con.getresponse()
if 200 != res.status:
raise Exception('Douban Delete Error : ' + str(res.status))
if self._skip:
return True
return res.read()
def _get_open(self, url):
res = urllib2.urlopen(url)
if 200 != res.code:
raise Exception('Douban Get Error : ' + str(res.code))
if 'json' == self._alt:
return _FormateData.render(res.read())
return res.read()
def _get(self, path, params={}):
assert hasattr(self, '_oauth'), "need be authed."
res_url = API_URL + path
params.update({'alt': self._alt})
dic = self._oauth.get_oauth_params(res_url, params, 'GET')
url=res_url + '?' + _dict2qs(dic)
return self._get_open(url)
def _get_public(self, path, params={}):
path += '?alt=' + self._alt
if params:
path += '&' + _dict2qs(params)
if hasattr(self, '_key'):
path += '&apikey=' + self._key
url=API_URL+path
return self._get_open(url)
#}}}
#{{{ public method, no need for oauth
def get_people(self, userID):
path = '/people/%s?alt=%s' % (userID, self._alt)
if hasattr(self, '_key'):
path = '/people/%s?alt=%s&apikey=%s' % (userID, self._alt, self._key)
return self._get_open(API_URL+path)
def search_people(self, q):
q = _escape(q)
dic = {'q':q, 'alt':self._alt,'start-index':self._start,'max-results':self._max}
path = '/people?' + _dict2qs(dic)
if hasattr(self, '_key'):
path += '&apikey=' + self._key
return self._get_open(url=API_URL+path)
def _sq(self, q, tag=None, var='movie'):
q = _escape(q)
path = '/%s/subjects' % var
dic = {'q':q, 'alt':self._alt,'start':self._start,'max':self._max}
s = '?q=%(q)s&alt=%(alt)s&start-index=%(start)s&max-results=%(max)s'\
% dic
if tag:
s += '&tag=' + tag
if hasattr(self, '_key'):
s += '&apikey=' + self._key
return self._get_open(API_URL + path + s)
def search_movie(self, q, tag=None):
return self._sq(q, tag, var='movie')
def search_book(self, q, tag=None):
return self._sq(q, tag, var='book')
def search_music(self, q, tag=None):
return self._sq(q, tag, var='music')
#}}}
#{{{ resource (public and oauth)
def get_book_byid(self, subjectID):
path = '/book/subject/%s' % subjectID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_book_byisbn(self, isbnID):
path = '/book/subject/isbn/%s' % isbnID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_movie_byid(self, subjectID):
path = '/movie/subject/%s' % subjectID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_movie_byimdb(self, imdbID):
path = '/movie/subject/imdb/%s' % imdbID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_music_byid(self, subjectID):
path = '/music/subject/%s' % subjectID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_book_tags(self, subjectID):
path = '/book/subject/%s/tags' % subjectID
params = {'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
def get_movie_tags(self, subjectID):
path = '/movie/subject/%s/tags' % subjectID
params = {'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
def get_music_tags(self, subjectID):
path = '/music/subject/%s/tags' % subjectID
params = {'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
def get_user_tags(self, userID, cat='book'):
path = '/people/%s/tags' % userID
if cat not in ('book','music','movie'):
cat = 'book'
params = {'cat': cat,'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
#}}}
#{{{ user info
def get_profile(self):
""" get authed user's information"""
path = '/people/%40me'
return self._get(path)
def get_friends(self):
""" get authed user's friends"""
path = '/people/%40me/friends'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_contacts(self):
''' get authed user's contacts'''
path = '/people/%40me/contacts'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
#}}}
#{{{ collection
# http://www.douban.com/service/apidoc/reference/collection
def get_collections(self, cat, status=None, tag=None):
if cat not in ('book','movie','music'):
cat = 'book'
path = '/people/%40me/collection'
params = {'cat': cat, 'start-index': self._start, 'max-results': self._max}
if tag:
params.update({'tag':tag})
if status:
params.update({'status':status})
return self._get(path, params)
def get_collection(self, collectionID):
path = '/collection/%s' % collectionID
return self._get(path)
def _collection_atom(self, sourceURL, status, rating, tags, comment, privacy):
if int(rating) > 5:
rating = 5
elif int(rating) < 0:
rating = 0
if not isinstance(tags, list):
raise TypeError
if privacy not in ('public', 'private'):
privacy = 'public'
atom = _atom_db_header
atom += '<db:status>' + status + '</db:status>'
for tag in tags:
atom += '<db:tag name="%s" />' % _escape(tag)
atom += '<gd:rating xmlns:gd="http://schemas.google.com/g/2005" value="%s" />' % rating
atom += '<content>%s</content>' % _escape(comment)
atom += '<db:subject><id>%s</id></db:subject>' % sourceURL
atom += '<db:attribute name="privacy">%s</db:attribute></entry>' % privacy
return atom
def post_collection(self, sourceURL, status, rating=0, tags=[], comment='', privacy='public'):
path = 'http://api.douban.com/collection'
atom = self._collection_atom(sourceURL, status, rating, tags, comment, privacy)
return self._post(path, atom)
def update_collection(self, collectionID, sourceURL, status, rating=0, tags=[], comment='', privacy='public'):
path = 'http://api.douban.com/collection/%s' % collectionID
atom = self._collection_atom(sourceURL, status, rating, tags, comment, privacy)
return self._put(path, atom)
def del_collection(self, collectionID):
path = 'http://api.douban.com/collection/%s' % collectionID
return self._del(path)
#}}}
#{{{ events
def get_events(self):
''' get authed user's events'''
path = '/people/%40me/events'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_initiate_events(self):
''' get authed user's initiate events'''
path = '/people/%40me/events/initiate'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_participate_events(self):
''' get authed user's participate events'''
path = '/people/%40me/events/participate'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_wish_events(self):
''' get authed user's wish events'''
path = '/people/%40me/events/wish'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
# { no oauth needed
def get_event(self, eventID):
path = '/event/%s' % eventID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_event_participants(self, eventID):
path = '/event/%s/participants' % eventID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_event_wishers(self, eventID):
path = '/event/%s/wishers' % eventID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_user_events(self, userID):
path = '/people/%s/events' % userID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_user_initiate_events(self, userID):
path = '/people/%s/events/initiate' % userID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_user_participate_events(self, userID):
path = '/people/%s/events/participate' % userID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_user_wish_events(self, userID):
path = '/people/%s/events/wish' % userID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_location_events(self, locationID):
path = '/event/location/%s' % locationID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def search_events(self, q, term='all', location='all'):
path = '/events'
q = _escape(q)
params = {'q': q, 'location': location, 'type': term,
'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
# }
def _event_atom(self, title, content, where, term, invite_only, can_invite):
if term not in 'commonweal,drama,exhibition,film,music,others,party,salon,sports,travel'.split(','):
term = 'all'
if invite_only not in ('yes', 'no'):
invite_only = 'no'
if can_invite not in ('yes', 'no'):
can_invite = 'yes'
atom = _atom_sq_header
atom += '<title>%s</title>' % _escape(title)
atom += '<category scheme="http://www.douban.com/2007#kind" term="http://www.douban.com/2007#event.%s"/>' % term
atom += '<content>%s</content>' % _escape(content)
atom += '<db:attribute name="invite_only">%s</db:attribute>' % invite_only
atom += '<db:attribute name="can_invite">%s</db:attribute>' % can_invite
atom += '<gd:where valueString="%s" /></entry>' % _escape(where)
return atom
def post_event(self, title, content, where, term='all', invite_only='no', can_invite='yes'):
path = '/events'
atom = _event_atom(title, content, where, term, invite_only, can_invite)
return self._post(path, atom)
def update_event(self,eventID, title, content, where, term='all', invite_only='no', can_invite='yes'):
path = '/event/%s' % eventID
atom = _event_atom(title, content, where, term, invite_only, can_invite)
return self._post(path, atom)
def join_event(self, eventID):
path = '/event/%s/participants' % eventID
return self._post(path, None)
def wish_event(self, eventID):
path = '/event/%s/wishers' % eventID
return self._post(path, None)
def unjoin_event(self, eventID):
path = '/event/%s/participants' % eventID
return self._del(path)
def unwish_event(self, eventID):
path = '/event/%s/wishers' % eventID
return self._del(path)
def del_event(self, eventID, content):
path = '/event/%s/delete' % eventID
atom = _atom_db_header
atom += '<content>%s</content></entry>' % _escape(content)
return self._post(path, atom) #TODO status code
#}}}
#{{{ note
def get_notes(self):
path = '/people/%40me/notes'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_note(self, noteID):
path = '/note/%s' % noteID
return self._get(path)
def _note_atom(self, title, content, privacy, can_reply):
if privacy not in ('public', 'friend', 'private'):
privacy = 'private'
if can_reply not in ('yes', 'no'):
can_reply = 'no'
if 'private' == privacy:
can_reply = 'no'
atom = _atom_db_header
atom += '<title>%s</title>' % _escape(title)
atom += '<content>%s</content>' % _escape(content)
atom += '<db:attribute name="privacy">%s</db:attribute>' % privacy
atom += '<db:attribute name="can_reply">%s</db:attribute>' % can_reply
atom += '</entry>'
return atom
def post_note(self, title, content, privacy='public', can_reply='yes'):
path = '/notes'
atom = self._note_atom(title, content, privacy, can_reply)
return self._post(path, atom)
def update_note(self, noteID, title, content, privacy='public', can_reply='yes'):
path = '/note/%s' % noteID
atom = self._note_atom(title, content, privacy, can_reply)
return self._put(path, atom)
def del_note(self, noteID):
path = '/note/%s' % noteID
return self._del(path)
#}}}
#{{{ review
# http://www.douban.com/service/apidoc/reference/review
def get_review(self, reviewID):
path = '/review/%s' % reviewID
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_user_reviews(self, userID):
path = '/people/%s/reviews' % userID
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def _get_subject_reviews(self, path):
params = {'start-index': self._start, 'max-results': self._max}
if hasattr(self, '_oauth'):
return self._get(path, params)
return self._get_public(path, params)
def get_book_reviews_byid(self, subjectID):
path = '/book/subject/%s/reviews' % subjectID
return self._get_subject_reviews(path)
def get_book_reviews_byisbn(self, isbnID):
path = '/book/subject/isbn/%s/reviews' % isbnID
return self._get_subject_reviews(path)
def get_movie_reviews_byid(self, subjectID):
path = '/movie/subject/%s/reviews' % subjectID
return self._get_subject_reviews(path)
def get_movie_reviews_byimdb(self, imdbID):
path = '/book/subject/imdb/%s/reviews' % imdbID
return self._get_subject_reviews(path)
def get_music_reviews_byid(self, subjectID):
path = '/music/subject/%s/reviews' % subjectID
return self._get_subject_reviews(path)
def post_review(self, sourceURL, title, rating, content):
path = '/reviews'
if int(rating) > 5:
rating = 5
elif int(rating) < 0:
rating = 0
atom = _atom_db_header
atom += '<db:subject xmlns:db="http://www.douban.com/xmlns/"><id>%s</id></db:subject>' % sourceURL
atom += '<content>%s</content>' % _escape(content)
atom += '<gd:rating xmlns:gd="http://schemas.google.com/g/2005" value="%s" />' % rating
atom += '<title>%s</title></entry>' % _escape(title)
return self._post(path, atom)
def update_review(self, reviewID, sourceURL, title, rating, content):
path = '/review/%s' % reviewID
if int(rating) > 5:
rating = 5
elif int(rating) < 0:
rating = 0
atom = _atom_db_header
atom += '<db:subject xmlns:db="http://www.douban.com/xmlns/"><id>%s</id></db:subject>' % sourceURL
atom += '<content>%s</content>' % _escape(content)
atom += '<gd:rating xmlns:gd="http://schemas.google.com/g/2005" value="%s" />' % rating
atom += '<title>%s</title></entry>' % _escape(title)
return self._put(path, atom)
def del_review(self, reviewID):
path = '/review/%s' % reviewID
return self._del(path)
#}}}
#{{{ miniblog
def get_user_miniblog(self, userID, term=None):
path = '/people/%s/miniblog' % userID
params = {'start-index': self._start, 'max-results': self._max}
if term:
params.update({'type':term})
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_miniblog(self, term=None):
path = '/people/%40me/miniblog'
params = {'start-index': self._start, 'max-results': self._max}
if term:
# set term='saying' to filter
params.update({'type':term})
return self._get(path, params)
def get_user_contacts_miniblog(self,userID, term=None):
path = '/people/%s/miniblog/contacts' % userID
params = {'start-index': self._start, 'max-results': self._max}
if term:
params.update({'type':term})
if hasattr(self, '_oauth'):
return self._get(path)
return self._get_public(path)
def get_contacts_miniblog(self, term=None):
path = '/people/%40me/miniblog/contacts'
params = {'start-index': self._start, 'max-results': self._max}
if term:
# set term='saying' to filter
params.update({'type':term})
return self._get(path, params)
def get_miniblog_replies(self, miniblogID):
path = '/miniblog/%s/comments' % miniblogID
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def post_miniblog(self, content):
path = '/miniblog/saying'
atom = _atom_db_header
atom += '<content>%s</content></entry>' % _escape(content)
return self._post(path, atom)
def post_miniblog_reply(self, miniblogID, content):
path = '/miniblog/%s/comments' % miniblogID
atom = _atom_db_header
atom += '<content>%s</content></entry>' % _escape(content)
return self._post(path, atom)
def del_miniblog(self, miniblogID):
path = '/miniblog/%s' % miniblogID
return self._del(path)
#}}}
#{{{ recommendation
# http://www.douban.com/service/apidoc/reference/recommendation
def get_recommendation(self, recommendationID):
path = '/recommendation/%s' % recommendationID
return self._get_public(path)
def get_user_recommendations(self, userID):
path = '/people/%s/recommendations' % userID
params = {'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
def get_recommendation_replies(self, recommendationID):
path = '/recommendation/%s/comments' % recommendationID
params = {'start-index': self._start, 'max-results': self._max}
return self._get_public(path, params)
def post_recommendation(self, sourceURL, title, content, rel='related'):
path = '/recommendations'
atom = _atom_sq_header
atom += '<title>%s</title>' % _escape(title)
atom += '<db:attribute name="comment">%s</db:attribute>' % _escape(content)
atom += '<link href="%s" rel="%s" /></entry>' % (sourceURL, rel)
return self._post(path, atom)
def del_recommendation(self, recommendationID):
path = '/recommendation/%s' % recommendationID
return self._del(path)
def reply_recommendation(self, recommendationID, content):
path = '/recommendation/%s/comments' % recommendationID
atom = '<?xml version="1.0" encoding="UTF-8"?><entry><content>%s</content></entry>' % _escape(content)
return self._post(path, atom)
def del_recommendation_reply(self, recommendationID, replyID):
path = '/recommendation/%s/comment/%s' % (recommendationID, replyID)
return self._del(path)
#}}}
#{{{ douban mail
# http://www.douban.com/service/apidoc/reference/doumail
def get_mail(self, doumailID):
path = '/doumail/%s' % doumailID
return self._get(path)
def get_inbox_mails(self):
path = '/doumail/inbox'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_unread_mails(self):
path = '/doumail/inbox/unread'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def get_outbox_mails(self):
path = '/doumail/outbox'
params = {'start-index': self._start, 'max-results': self._max}
return self._get(path, params)
def post_mail(self, receiverID, title, content):
atom = _atom_sq_header
atom += '<db:entity name="receiver"><uri>http://api.douban.com/people/%s</uri></db:entity>' % _escape(receiverID)
atom += '<content>%s</content>' % _escape(content)
atom += '<title>%s</title>' % _escape(title)
atom += '</entry>'
return self._post(path, atom)
def del_mail(self, doumailID):
path = '/doumail/%s' % doumailID
return self._del(path)
def mark_mail_read(self, doumailID):
path = '/doumail/%s' % doumailID
atom = _atom_sq_header
atom += '<db:attribute name="unread">false</db:attribute></entry>'
return self._put(path, atom)
def _escape(s):
try: s = s.encode('utf-8')
except UnicodeDecodeError: pass
return escape(s)
def _quote(s):
return urllib.quote(str(s), '~')
def _qs2dict(s):
dic = {}
for param in str(s).split('&'):
(key, value) = param.split('=')
dic[key] = value
return dic
def _dict2qs(dic):
return '&'.join(['%s=%s' % (key, _quote(value)) for key, value in dic.iteritems()])
def _dict2header(dic):
s = ', '.join(['%s="%s"' % (k, _quote(v)) for k, v in dic.iteritems() if k.startswith('oauth_')])
auth_header = 'OAuth realm="", %s' % s
return {'Authorization': auth_header}
_atom_db_header = '''
<?xml version="1.0" encoding="UTF-8"?><entry xmlns:ns0="http://www.w3.org/2005/Atom" xmlns:db="http://www.douban.com/xmlns/">
'''
_atom_sq_header = '''
<?xml version="1.0" encoding="UTF-8"?><entry xmlns="http://www.w3.org/2005/Atom" xmlns:db="http://www.douban.com/xmlns/" xmlns:gd="http://schemas.google.com/g/2005" xmlns:opensearch="http://a9.com/-/spec/opensearchrss/1.0/">
'''
class _FormateData(dict):
"""
Copy from web.utils.Stroage
"""
@classmethod
def _json_dic(cls, dic):
store = cls()
for key in dic.keys():
value = dic[key]
if isinstance(value, dict):
value = cls._json_dic(value)
elif isinstance(dic[key], list):
value = [cls._json_dic(subdic) for subdic in dic[key]]
if key.startswith('db:'):
key = key.replace('db:','')
elif key.startswith('gd:'):
key = key.replace('gd:','')
elif key.startswith('opensearch:'):
key = key.replace('opensearch:', '')
elif key.startswith('openSearch:'):
key = key.replace('openSearch:', '')
elif key.startswith('$'):
key = key.replace('$','')
elif key.startswith('@'):
key = key.replace('@','')
store[key] = value
return store
@classmethod
def render(cls, s):
dic = json.loads(s)
return cls._json_dic(dic)
def __getattr__(self, key):
try:
return self[key]
except KeyError, k:
raise AttributeError, k
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try:
del self[key]
except KeyError, k:
raise AttributeError, k
def __repr__(self):
return dict.__repr__(self)
| 37.470056 | 224 | 0.600525 |
22b9e92cda9fe64fa0f81628a53b51cd039e0145 | 15,189 | py | Python | docs/flydra-sphinx-docs/ext/docscrape.py | elhananby/flydra | 09b86859b1863700cdea0bbcdd4758da6c83930b | [
"Apache-2.0",
"MIT"
] | 45 | 2017-08-25T06:46:56.000Z | 2021-08-29T16:42:49.000Z | docs/flydra-sphinx-docs/ext/docscrape.py | elhananby/flydra | 09b86859b1863700cdea0bbcdd4758da6c83930b | [
"Apache-2.0",
"MIT"
] | 7 | 2017-10-16T10:46:20.000Z | 2020-12-03T16:42:55.000Z | docs/flydra-sphinx-docs/ext/docscrape.py | elhananby/flydra | 09b86859b1863700cdea0bbcdd4758da6c83930b | [
"Apache-2.0",
"MIT"
] | 21 | 2018-04-11T09:06:40.000Z | 2021-12-26T23:38:40.000Z | """Extract reference documentation from the NumPy source tree.
"""
import inspect
import textwrap
import re
import pydoc
from warnings import warn
class Reader(object):
"""A line-based string reader.
"""
def __init__(self, data):
"""
Parameters
----------
data : str
String with lines separated by '\n'.
"""
if isinstance(data,list):
self._str = data
else:
self._str = data.split('\n') # store string as list of lines
self.reset()
def __getitem__(self, n):
return self._str[n]
def reset(self):
self._l = 0 # current line nr
def read(self):
if not self.eof():
out = self[self._l]
self._l += 1
return out
else:
return ''
def seek_next_non_empty_line(self):
for l in self[self._l:]:
if l.strip():
break
else:
self._l += 1
def eof(self):
return self._l >= len(self._str)
def read_to_condition(self, condition_func):
start = self._l
for line in self[start:]:
if condition_func(line):
return self[start:self._l]
self._l += 1
if self.eof():
return self[start:self._l+1]
return []
def read_to_next_empty_line(self):
self.seek_next_non_empty_line()
def is_empty(line):
return not line.strip()
return self.read_to_condition(is_empty)
def read_to_next_unindented_line(self):
def is_unindented(line):
return (line.strip() and (len(line.lstrip()) == len(line)))
return self.read_to_condition(is_unindented)
def peek(self,n=0):
if self._l + n < len(self._str):
return self[self._l + n]
else:
return ''
def is_empty(self):
return not ''.join(self._str).strip()
class NumpyDocString(object):
def __init__(self, docstring, config={}):
docstring = textwrap.dedent(docstring).split('\n')
self._doc = Reader(docstring)
self._parsed_data = {
'Signature': '',
'Summary': [''],
'Extended Summary': [],
'Parameters': [],
'Returns': [],
'Raises': [],
'Warns': [],
'Other Parameters': [],
'Attributes': [],
'Methods': [],
'See Also': [],
'Notes': [],
'Warnings': [],
'References': '',
'Examples': '',
'index': {}
}
self._parse()
def __getitem__(self,key):
return self._parsed_data[key]
def __setitem__(self,key,val):
if not self._parsed_data.has_key(key):
warn("Unknown section %s" % key)
else:
self._parsed_data[key] = val
def _is_at_section(self):
self._doc.seek_next_non_empty_line()
if self._doc.eof():
return False
l1 = self._doc.peek().strip() # e.g. Parameters
if l1.startswith('.. index::'):
return True
l2 = self._doc.peek(1).strip() # ---------- or ==========
return l2.startswith('-'*len(l1)) or l2.startswith('='*len(l1))
def _strip(self,doc):
i = 0
j = 0
for i,line in enumerate(doc):
if line.strip(): break
for j,line in enumerate(doc[::-1]):
if line.strip(): break
return doc[i:len(doc)-j]
def _read_to_next_section(self):
section = self._doc.read_to_next_empty_line()
while not self._is_at_section() and not self._doc.eof():
if not self._doc.peek(-1).strip(): # previous line was empty
section += ['']
section += self._doc.read_to_next_empty_line()
return section
def _read_sections(self):
while not self._doc.eof():
data = self._read_to_next_section()
name = data[0].strip()
if name.startswith('..'): # index section
yield name, data[1:]
elif len(data) < 2:
yield StopIteration
else:
yield name, self._strip(data[2:])
def _parse_param_list(self,content):
r = Reader(content)
params = []
while not r.eof():
header = r.read().strip()
if ' : ' in header:
arg_name, arg_type = header.split(' : ')[:2]
else:
arg_name, arg_type = header, ''
desc = r.read_to_next_unindented_line()
desc = dedent_lines(desc)
params.append((arg_name,arg_type,desc))
return params
_name_rgx = re.compile(r"^\s*(:(?P<role>\w+):`(?P<name>[a-zA-Z0-9_.-]+)`|"
r" (?P<name2>[a-zA-Z0-9_.-]+))\s*", re.X)
def _parse_see_also(self, content):
"""
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, :meth:`func_name`, func_name3
"""
items = []
def parse_item_name(text):
"""Match ':role:`name`' or 'name'"""
m = self._name_rgx.match(text)
if m:
g = m.groups()
if g[1] is None:
return g[3], None
else:
return g[2], g[1]
raise ValueError("%s is not a item name" % text)
def push_item(name, rest):
if not name:
return
name, role = parse_item_name(name)
items.append((name, list(rest), role))
del rest[:]
current_func = None
rest = []
for line in content:
if not line.strip(): continue
m = self._name_rgx.match(line)
if m and line[m.end():].strip().startswith(':'):
push_item(current_func, rest)
current_func, line = line[:m.end()], line[m.end():]
rest = [line.split(':', 1)[1].strip()]
if not rest[0]:
rest = []
elif not line.startswith(' '):
push_item(current_func, rest)
current_func = None
if ',' in line:
for func in line.split(','):
if func.strip():
push_item(func, [])
elif line.strip():
current_func = line
elif current_func is not None:
rest.append(line.strip())
push_item(current_func, rest)
return items
def _parse_index(self, section, content):
"""
.. index: default
:refguide: something, else, and more
"""
def strip_each_in(lst):
return [s.strip() for s in lst]
out = {}
section = section.split('::')
if len(section) > 1:
out['default'] = strip_each_in(section[1].split(','))[0]
for line in content:
line = line.split(':')
if len(line) > 2:
out[line[1]] = strip_each_in(line[2].split(','))
return out
def _parse_summary(self):
"""Grab signature (if given) and summary"""
if self._is_at_section():
return
summary = self._doc.read_to_next_empty_line()
summary_str = " ".join([s.strip() for s in summary]).strip()
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
self['Signature'] = summary_str
if not self._is_at_section():
self['Summary'] = self._doc.read_to_next_empty_line()
else:
self['Summary'] = summary
if not self._is_at_section():
self['Extended Summary'] = self._read_to_next_section()
def _parse(self):
self._doc.reset()
self._parse_summary()
for (section,content) in self._read_sections():
if not section.startswith('..'):
section = ' '.join([s.capitalize() for s in section.split(' ')])
if section in ('Parameters', 'Attributes', 'Methods',
'Returns', 'Raises', 'Warns'):
self[section] = self._parse_param_list(content)
elif section.startswith('.. index::'):
self['index'] = self._parse_index(section, content)
elif section == 'See Also':
self['See Also'] = self._parse_see_also(content)
else:
self[section] = content
# string conversion routines
def _str_header(self, name, symbol='-'):
return [name, len(name)*symbol]
def _str_indent(self, doc, indent=4):
out = []
for line in doc:
out += [' '*indent + line]
return out
def _str_signature(self):
if self['Signature']:
return [self['Signature'].replace('*','\*')] + ['']
else:
return ['']
def _str_summary(self):
if self['Summary']:
return self['Summary'] + ['']
else:
return []
def _str_extended_summary(self):
if self['Extended Summary']:
return self['Extended Summary'] + ['']
else:
return []
def _str_param_list(self, name):
out = []
if self[name]:
out += self._str_header(name)
for param,param_type,desc in self[name]:
out += ['%s : %s' % (param, param_type)]
out += self._str_indent(desc)
out += ['']
return out
def _str_section(self, name):
out = []
if self[name]:
out += self._str_header(name)
out += self[name]
out += ['']
return out
def _str_see_also(self, func_role):
if not self['See Also']: return []
out = []
out += self._str_header("See Also")
last_had_desc = True
for func, desc, role in self['See Also']:
if role:
link = ':%s:`%s`' % (role, func)
elif func_role:
link = ':%s:`%s`' % (func_role, func)
else:
link = "`%s`_" % func
if desc or last_had_desc:
out += ['']
out += [link]
else:
out[-1] += ", %s" % link
if desc:
out += self._str_indent([' '.join(desc)])
last_had_desc = True
else:
last_had_desc = False
out += ['']
return out
def _str_index(self):
idx = self['index']
out = []
out += ['.. index:: %s' % idx.get('default','')]
for section, references in idx.items():
if section == 'default':
continue
out += [' :%s: %s' % (section, ', '.join(references))]
return out
def __str__(self, func_role=''):
out = []
out += self._str_signature()
out += self._str_summary()
out += self._str_extended_summary()
for param_list in ('Parameters','Returns','Raises'):
out += self._str_param_list(param_list)
out += self._str_section('Warnings')
out += self._str_see_also(func_role)
for s in ('Notes','References','Examples'):
out += self._str_section(s)
for param_list in ('Attributes', 'Methods'):
out += self._str_param_list(param_list)
out += self._str_index()
return '\n'.join(out)
def indent(str,indent=4):
indent_str = ' '*indent
if str is None:
return indent_str
lines = str.split('\n')
return '\n'.join(indent_str + l for l in lines)
def dedent_lines(lines):
"""Deindent a list of lines maximally"""
return textwrap.dedent("\n".join(lines)).split("\n")
def header(text, style='-'):
return text + '\n' + style*len(text) + '\n'
class FunctionDoc(NumpyDocString):
def __init__(self, func, role='func', doc=None, config={}):
self._f = func
self._role = role # e.g. "func" or "meth"
if doc is None:
if func is None:
raise ValueError("No function or docstring given")
doc = inspect.getdoc(func) or ''
NumpyDocString.__init__(self, doc)
if not self['Signature'] and func is not None:
func, func_name = self.get_func()
try:
# try to read signature
argspec = inspect.getargspec(func)
argspec = inspect.formatargspec(*argspec)
argspec = argspec.replace('*','\*')
signature = '%s%s' % (func_name, argspec)
except TypeError, e:
signature = '%s()' % func_name
self['Signature'] = signature
def get_func(self):
func_name = getattr(self._f, '__name__', self.__class__.__name__)
if inspect.isclass(self._f):
func = getattr(self._f, '__call__', self._f.__init__)
else:
func = self._f
return func, func_name
def __str__(self):
out = ''
func, func_name = self.get_func()
signature = self['Signature'].replace('*', '\*')
roles = {'func': 'function',
'meth': 'method'}
if self._role:
if not roles.has_key(self._role):
print "Warning: invalid role %s" % self._role
out += '.. %s:: %s\n \n\n' % (roles.get(self._role,''),
func_name)
out += super(FunctionDoc, self).__str__(func_role=self._role)
return out
class ClassDoc(NumpyDocString):
def __init__(self, cls, doc=None, modulename='', func_doc=FunctionDoc,
config={}):
if not inspect.isclass(cls) and cls is not None:
raise ValueError("Expected a class or None, but got %r" % cls)
self._cls = cls
if modulename and not modulename.endswith('.'):
modulename += '.'
self._mod = modulename
if doc is None:
if cls is None:
raise ValueError("No class or documentation string given")
doc = pydoc.getdoc(cls)
NumpyDocString.__init__(self, doc)
if config.get('show_class_members', True):
if not self['Methods']:
self['Methods'] = [(name, '', '')
for name in sorted(self.methods)]
if not self['Attributes']:
self['Attributes'] = [(name, '', '')
for name in sorted(self.properties)]
@property
def methods(self):
if self._cls is None:
return []
return [name for name,func in inspect.getmembers(self._cls)
if not name.startswith('_') and callable(func)]
@property
def properties(self):
if self._cls is None:
return []
return [name for name,func in inspect.getmembers(self._cls)
if not name.startswith('_') and func is None]
| 30.438878 | 80 | 0.500099 |
ba3e44fc1791088ca5be008229b0988840fd2d1d | 1,450 | py | Python | server.py | balajirama/guessing_game | 89813882b669d0af3e17c070bfe9b10c80a4f843 | [
"MIT"
] | null | null | null | server.py | balajirama/guessing_game | 89813882b669d0af3e17c070bfe9b10c80a4f843 | [
"MIT"
] | null | null | null | server.py | balajirama/guessing_game | 89813882b669d0af3e17c070bfe9b10c80a4f843 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, request, redirect, session
app = Flask(__name__)
app.secret_key = 'darksecret'
import random
@app.route("/")
def root():
if 'number' not in session:
session['number'] = random.randint(1,100)
session['message'] = 'start'
session['trials'] = 0
session['placeholder'] = random.randint(1,100)
session['oldguesses'] = list()
session['wasted']=False
print("inside root() before render_template()")
return render_template("game.html")
@app.route("/guess", methods=['POST'])
def guess():
print("inside guess() before everything")
session['trials'] += 1
try:
session['guess']=int(request.form['guess'])
except:
session['guess']=session['placeholder']
if session['guess'] in session['oldguesses']:
session['wasted']=True
else:
session['wasted']=False
session['oldguesses'].append(session['guess'])
if session['guess']==session['number']:
session['message'] = 'match'
else:
if session['guess'] > session['number']:
session['message'] = 'high'
else:
session['message'] = 'low'
session['placeholder'] = random.randint(1,100)
print("inside guess() before redirect()")
return redirect("/")
@app.route("/restart")
def restart():
session.clear()
return redirect("/")
if __name__ == '__main__':
app.run(debug=True)
| 27.884615 | 68 | 0.606207 |
9afbe918c33006f6fb33d20a64a6ac380fe6ce4c | 2,351 | py | Python | RecoJets/JetPlusTracks/test/JetShapeJPTAnalysis_mc_s_sql_qcd_pat2.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | RecoJets/JetPlusTracks/test/JetShapeJPTAnalysis_mc_s_sql_qcd_pat2.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | RecoJets/JetPlusTracks/test/JetShapeJPTAnalysis_mc_s_sql_qcd_pat2.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
process = cms.Process("RECO4")
process.load('Configuration/StandardSequences/Services_cff')
process.load('FWCore/MessageService/MessageLogger_cfi')
process.load("Configuration.StandardSequences.GeometryDB_cff")
process.load('Configuration.StandardSequences.MagneticField_AutoFromDBCurrent_cff')
process.load('Configuration/StandardSequences/Reconstruction_cff')
process.load('Configuration/StandardSequences/EndOfProcess_cff')
process.load('Configuration/StandardSequences/FrontierConditions_GlobalTag_cff')
process.load('Configuration/EventContent/EventContent_cff')
process.load('RecoJets.Configuration.RecoJPTJets_cff')
process.load('JetMETCorrections.Configuration.CorrectedJetProducersAllAlgos_cff')
process.load('JetMETCorrections.Configuration.CorrectedJetProducers_cff')
process.load('JetMETCorrections.Configuration.JetCorrectors_cff')
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:phase1_2018_realistic', '')
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(100)
)
#################################################################
### For 219, file from RelVal
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
'/store/relval/CMSSW_10_6_4/RelValProdTTbar_13_pmx25ns/MINIAODSIM/PUpmx25ns_106X_upgrade2018_realistic_v9-v1/10000/87AD30D2-F673-F54C-8974-CB916CC66098.root'
)
)
process.RECOoutput = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring("keep *_JetPlusTrackZSPCorJetAntiKt4PAT_*_*"),
fileName = cms.untracked.string('file:jptreco.root')
)
##########
process.endjob_step = cms.EndPath(process.endOfProcess)
process.RECOoutput_step = cms.EndPath(process.RECOoutput)
process.load("RecoJets.JetPlusTracks.PATJetPlusTrackCorrections_cff")
process.p01=cms.Path(process.PATJetPlusTrackCorrectionsAntiKt4)
process.p1 =cms.Schedule(
process.p01,
process.endjob_step,
process.RECOoutput_step
)
# Automatic addition of the customisation function from Configuration.DataProcessing.Utils
from Configuration.DataProcessing.Utils import addMonitoring
#call to customisation function addMonitoring imported from Configuration.DataProcessing.Utils
process = addMonitoring(process)
| 41.982143 | 160 | 0.788601 |
2dcb7a4be6625d53ca3aa54df47621117fa9a086 | 1,527 | py | Python | notest/testset.py | GodQ/notest | 530d91782e8ed06493a1313facbed86e06662daf | [
"Apache-2.0"
] | 3 | 2019-05-10T09:36:07.000Z | 2021-04-16T23:40:46.000Z | notest/testset.py | GodQ/notest | 530d91782e8ed06493a1313facbed86e06662daf | [
"Apache-2.0"
] | null | null | null | notest/testset.py | GodQ/notest | 530d91782e8ed06493a1313facbed86e06662daf | [
"Apache-2.0"
] | 1 | 2019-05-10T09:43:48.000Z | 2019-05-10T09:43:48.000Z | import json
from notest.common_test import CommonTest
from notest.lib.parsing import safe_to_json
DEFAULT_TIMEOUT = 30
class TestSetConfig:
""" Configuration for a test run """
testset_name = None
timeout = DEFAULT_TIMEOUT # timeout of tests, in seconds
request_client = None # requests or pycurl
retries = 0 # Retries on failures
interactive = False
verbose = False
ssl_insecure = True
# Binding and creation of generators
collect_import_result = False
variable_binds = None
generators = None # Map of generator name to generator function
extract = None # extract several variable in context after this test set run
data_driven_generator = None
data_driven_generator_name = None
working_directory = None
def set_default_base_url(self, url):
self.variable_binds['default_base_url'] = url
def set_variable_binds(self, k, v):
self.variable_binds[k] = v
def __str__(self):
return json.dumps(self, default=safe_to_json)
class TestSet(CommonTest):
test_type = "testset"
name = None
group = "testset"
input = None
extract = None
file_path = None
""" Encapsulates a set of tests and test configuration for them """
tests = list()
config = TestSetConfig()
subtestsets = dict()
def __init__(self):
self.config = TestSetConfig()
self.tests = list()
self.subtestsets = dict()
def __str__(self):
return json.dumps(self, default=safe_to_json) | 26.789474 | 81 | 0.682384 |
96a763057a61cb1a2a7d2febfd1f493fe464268d | 398 | py | Python | d2l/__init__.py | 0x00A0/d2l-zh_paddle | b9e64216e55dfe88066750ed46b75c96664d02a2 | [
"Apache-2.0"
] | null | null | null | d2l/__init__.py | 0x00A0/d2l-zh_paddle | b9e64216e55dfe88066750ed46b75c96664d02a2 | [
"Apache-2.0"
] | null | null | null | d2l/__init__.py | 0x00A0/d2l-zh_paddle | b9e64216e55dfe88066750ed46b75c96664d02a2 | [
"Apache-2.0"
] | null | null | null | """Saved source code for "Dive into Deep Learing" (https://d2l.ai).
Please import d2l by one of the following ways:
from d2l import mxnet as d2l # Use MXNet as the backend
from d2l import torch as d2l # Use PyTorch as the backend
from d2l import tensorflow as d2l # Use TensorFlow as the backend
from d2l import paddle as d2l # Use TensorFlow as the backend
"""
__version__ = "2.0.0-beta0"
| 30.615385 | 67 | 0.738693 |
3889c91df98e10a60dff6a1a91b24acbb47da6b2 | 512 | py | Python | dynamic_preferences/migrations/0004_move_user_model.py | EliotBerriot/django-dynamic-preferences | 1a4cba7323f5456a3ddb2b9a387775f5fa7e0458 | [
"BSD-3-Clause"
] | 244 | 2015-06-19T15:48:21.000Z | 2020-12-28T21:16:48.000Z | dynamic_preferences/migrations/0004_move_user_model.py | EliotBerriot/django-dynamic-preferences | 1a4cba7323f5456a3ddb2b9a387775f5fa7e0458 | [
"BSD-3-Clause"
] | 193 | 2015-07-08T04:55:22.000Z | 2020-12-04T08:59:08.000Z | dynamic_preferences/migrations/0004_move_user_model.py | EliotBerriot/django-dynamic-preferences | 1a4cba7323f5456a3ddb2b9a387775f5fa7e0458 | [
"BSD-3-Clause"
] | 81 | 2015-07-12T22:18:47.000Z | 2020-12-12T03:04:29.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
"""
Migration to move the user preferences to a dedicated app, see #33
Borrowed from http://stackoverflow.com/a/26472482/2844093
"""
dependencies = [
("dynamic_preferences", "0003_auto_20151223_1407"),
]
# cf https://github.com/agateblue/django-dynamic-preferences/pull/142
operations = []
| 25.6 | 73 | 0.707031 |
b34a318389ca7ec859e7b02241a13462d7e1d7c5 | 2,081 | py | Python | src/python/pants/backend/helm/goals/tailor.py | wonlay/pants | 53c66503b6898e83c9c9596e56cde5ad9ed6a0d3 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/helm/goals/tailor.py | wonlay/pants | 53c66503b6898e83c9c9596e56cde5ad9ed6a0d3 | [
"Apache-2.0"
] | null | null | null | src/python/pants/backend/helm/goals/tailor.py | wonlay/pants | 53c66503b6898e83c9c9596e56cde5ad9ed6a0d3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import os
from dataclasses import dataclass
from itertools import chain
from pants.backend.helm.subsystems.helm import HelmSubsystem
from pants.backend.helm.target_types import HelmChartTarget
from pants.backend.helm.util_rules.chart_metadata import HELM_CHART_METADATA_FILENAMES
from pants.core.goals.tailor import (
AllOwnedSources,
PutativeTarget,
PutativeTargets,
PutativeTargetsRequest,
)
from pants.engine.fs import PathGlobs, Paths
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.unions import UnionRule
from pants.util.logging import LogLevel
@dataclass(frozen=True)
class PutativeHelmChartTargetsRequest(PutativeTargetsRequest):
pass
@rule(desc="Determine candidate Helm chart targets to create", level=LogLevel.DEBUG)
async def find_putative_helm_targets(
request: PutativeHelmChartTargetsRequest,
all_owned_sources: AllOwnedSources,
helm_subsystem: HelmSubsystem,
) -> PutativeTargets:
if not helm_subsystem.tailor:
return PutativeTargets()
found_chart_paths = await MultiGet(
Get(Paths, PathGlobs, request.search_paths.path_globs(filename))
for filename in HELM_CHART_METADATA_FILENAMES
)
all_chart_files = chain.from_iterable([p.files for p in found_chart_paths])
unowned_chart_files = set(all_chart_files) - set(all_owned_sources)
putative_targets = []
for chart_file in sorted(unowned_chart_files):
dirname, filename = os.path.split(chart_file)
putative_targets.append(
PutativeTarget.for_target_type(
HelmChartTarget,
name=os.path.basename(dirname),
path=dirname,
triggering_sources=[filename],
)
)
return PutativeTargets(putative_targets)
def rules():
return [*collect_rules(), UnionRule(PutativeTargetsRequest, PutativeHelmChartTargetsRequest)]
| 33.031746 | 97 | 0.753964 |
82c67e13ee3036798d32dcb8aa89ced5f7ae4751 | 5,305 | py | Python | docs/conf.py | xywang58/molecool | 8aa12bbdc62d70f87ce44250825dbde77ab26bc5 | [
"BSD-3-Clause"
] | null | null | null | docs/conf.py | xywang58/molecool | 8aa12bbdc62d70f87ce44250825dbde77ab26bc5 | [
"BSD-3-Clause"
] | null | null | null | docs/conf.py | xywang58/molecool | 8aa12bbdc62d70f87ce44250825dbde77ab26bc5 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# Incase the project was not installed
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import molecool
# -- Project information -----------------------------------------------------
project = 'molecool'
copyright = ("2020, Andy Wang. Project structure based on the "
"Computational Molecular Science Python Cookiecutter version 1.5")
author = 'Andy Wang'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autosummary',
'sphinx.ext.autodoc',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
'sphinx.ext.intersphinx',
'sphinx.ext.extlinks',
]
autosummary_generate = True
napoleon_google_docstring = False
napoleon_use_param = False
napoleon_use_ivar = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'default'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'molecooldoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'molecool.tex', 'molecool Documentation',
'molecool', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'molecool', 'molecool Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'molecool', 'molecool Documentation',
author, 'molecool', 'A Python package for analyzing and visualizing xyz files.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| 30.314286 | 85 | 0.65655 |
331292abc000a0f5781c4a88691286e722fa6bd1 | 2,866 | py | Python | salt/states/loop.py | jubrad/salt | 7960334fb726cfde45e6409da79a65535c626685 | [
"Apache-2.0"
] | 1 | 2021-08-14T13:48:38.000Z | 2021-08-14T13:48:38.000Z | salt/states/loop.py | jubrad/salt | 7960334fb726cfde45e6409da79a65535c626685 | [
"Apache-2.0"
] | 3 | 2015-03-31T14:44:05.000Z | 2015-06-18T19:02:24.000Z | salt/states/loop.py | jubrad/salt | 7960334fb726cfde45e6409da79a65535c626685 | [
"Apache-2.0"
] | 4 | 2020-11-04T06:28:05.000Z | 2022-02-09T10:54:49.000Z | # -*- coding: utf-8 -*-
'''
Loop state
Allows for looping over execution modules.
.. versionadded:: 2017.7.0
.. code-block:: yaml
wait_for_service_to_be_healthy:
loop.until:
- name: boto_elb.get_instance_health
- condition: m_ret[0]['state'] == 'InService'
- period: 5
- timeout: 20
- m_args:
- {{ elb }}
- m_kwargs:
keyid: {{ access_key }}
key: {{ secret_key }}
instances: "{{ instance }}"
.. warning::
This state allows arbitrary python code to be executed through the condition
parameter which is literally evaluated within the state. Please use caution.
'''
# Import python libs
from __future__ import absolute_import
import logging
import time
# Initialize logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'loop'
def __virtual__():
return True
def until(name,
m_args=None,
m_kwargs=None,
condition=None,
period=0,
timeout=604800):
'''
Loop over an execution module until a condition is met.
name
The name of the execution module
m_args
The execution module's positional arguments
m_kwargs
The execution module's keyword arguments
condition
The condition which must be met for the loop to break. This
should contain ``m_ret`` which is the return from the execution
module.
period
The number of seconds to wait between executions
timeout
The timeout in seconds
'''
ret = {'name': name,
'changes': {},
'result': False,
'comment': ''}
if name not in __salt__:
ret['comment'] = 'Cannot find module {0}'.format(name)
return ret
if condition is None:
ret['comment'] = 'An exit condition must be specified'
return ret
if not isinstance(period, int):
ret['comment'] = 'Period must be specified as an integer in seconds'
return ret
if not isinstance(timeout, int):
ret['comment'] = 'Timeout must be specified as an integer in seconds'
return ret
if __opts__['test']:
ret['comment'] = 'The execution module {0} will be run'.format(name)
ret['result'] = None
return ret
def timed_out():
if time.time() >= timeout:
return True
return False
timeout = time.time() + timeout
while not timed_out():
m_ret = __salt__[name](*m_args, **m_kwargs)
if eval(condition): # pylint: disable=W0123
ret['result'] = True
ret['comment'] = 'Condition {0} was met'.format(condition)
return ret
time.sleep(period)
ret['comment'] = 'Timed out while waiting for condition {0}'.format(condition)
return ret
| 24.921739 | 82 | 0.600488 |
304df669dfb5311b9c1dfc52b5f64ed191339515 | 34,206 | py | Python | main/example3/utils.py | YerbaPage/FEM | b60fb76b1556f3f92dce96e37ce4e5f3632424c3 | [
"MIT"
] | 1 | 2021-09-13T05:27:52.000Z | 2021-09-13T05:27:52.000Z | main/example3/utils.py | YerbaPage/FEM_forth_perturb | b60fb76b1556f3f92dce96e37ce4e5f3632424c3 | [
"MIT"
] | null | null | null | main/example3/utils.py | YerbaPage/FEM_forth_perturb | b60fb76b1556f3f92dce96e37ce4e5f3632424c3 | [
"MIT"
] | null | null | null | """This module contains utility functions such as convenient access to
SciPy linear solvers."""
import sys
from skfem.assembly import BilinearForm, LinearForm
from skfem.models.poisson import *
from scipy.sparse.linalg import LinearOperator, minres
from skfem.helpers import d, dd, ddd, dot, ddot, grad, dddot, prod
from skfem import *
import warnings
from typing import Optional, Union, Tuple, Callable, Dict
from inspect import signature
from skfem.visuals.matplotlib import draw, plot
import numpy as np
import scipy.sparse as sp
import scipy.sparse.csgraph as spg
import scipy.sparse.linalg as spl
from numpy import ndarray
from scipy.sparse import spmatrix
import pyamg
from skfem.assembly import asm, BilinearForm, LinearForm, DofsView
from skfem.assembly.basis import Basis
from skfem.element import ElementVectorH1
# custom types for describing input and output values
LinearSolver = Callable[[spmatrix, ndarray], ndarray]
EigenSolver = Callable[[spmatrix, spmatrix], Tuple[ndarray, ndarray]]
CondensedSystem = Union[spmatrix,
Tuple[spmatrix, ndarray],
Tuple[spmatrix, spmatrix],
Tuple[spmatrix, ndarray, ndarray],
Tuple[spmatrix, ndarray, ndarray, ndarray],
Tuple[spmatrix, spmatrix, ndarray, ndarray]]
DofsCollection = Union[ndarray, DofsView, Dict[str, DofsView]]
# preconditioners, e.g. for :func:`skfem.utils.solver_iter_krylov`
def build_pc_ilu(A: spmatrix,
drop_tol: Optional[float] = 1e-4,
fill_factor: Optional[float] = 20) -> spl.LinearOperator:
"""Incomplete LU preconditioner."""
P = spl.spilu(A.tocsc(), drop_tol=drop_tol, fill_factor=fill_factor)
M = spl.LinearOperator(A.shape, matvec=P.solve)
return M
def build_pc_diag(A: spmatrix) -> spmatrix:
"""Diagonal preconditioner."""
return sp.spdiags(1.0/A.diagonal(), 0, A.shape[0], A.shape[0])
# solvers for :func:`skfem.utils.solve`
def solver_eigen_scipy(**kwargs) -> EigenSolver:
"""Solve generalized eigenproblem using SciPy (ARPACK).
Returns
-------
EigenSolver
A solver function that can be passed to :func:`solve`.
"""
params = {
'sigma': 10,
'k': 5,
'mode': 'normal',
}
params.update(kwargs)
def solver(K, M, **solve_time_kwargs):
params.update(solve_time_kwargs)
from scipy.sparse.linalg import eigsh
return eigsh(K, M=M, **params)
return solver
def solver_direct_scipy(**kwargs) -> LinearSolver:
"""The default linear solver of SciPy."""
def solver(A, b, **solve_time_kwargs):
kwargs.update(solve_time_kwargs)
return spl.spsolve(A, b, **kwargs)
return solver
def solver_iter_mgcg_iter(krylov: Optional[LinearSolver] = spl.cg, verbose: Optional[bool] = False, **kwargs) -> LinearSolver:
"""MGCG iterative linear solver.
Parameters
----------
krylov
A Krylov iterative linear solver, like, and by default,
:func:`scipy.sparse.linalg.cg`
verbose
If True, print the norm of the iterate.
Returns
-------
LinearSolver
A solver function that can be passed to :func:`solve`.
And prints num of iterations
"""
def callback(x):
if verbose:
print(np.linalg.norm(x))
def solver(A, b, **solve_time_kwargs):
kwargs.update(solve_time_kwargs)
import pyamg
ml = pyamg.ruge_stuben_solver(A)
kwargs['M'] = ml.aspreconditioner() # params to be developed
sol, info, iter = krylov(A, b, **{'callback': callback, **kwargs})
print('mgcg total interation steps:', iter)
if info > 0:
warnings.warn("Convergence not achieved!")
elif info == 0 and verbose:
print(f"{krylov.__name__} converged to "
+ f"tol={kwargs.get('tol', 'default')} and "
+ f"atol={kwargs.get('atol', 'default')}")
return sol
return solver
def solver_iter_mgcg(krylov: Optional[LinearSolver] = spl.cg,
verbose: Optional[bool] = False,
**kwargs) -> LinearSolver:
"""MGCG iterative linear solver.
Parameters
----------
krylov
A Krylov iterative linear solver, like, and by default,
:func:`scipy.sparse.linalg.cg`
verbose
If True, print the norm of the iterate.
Returns
-------
LinearSolver
A solver function that can be passed to :func:`solve`.
"""
def callback(x):
if verbose:
print(np.linalg.norm(x))
def solver(A, b, **solve_time_kwargs):
kwargs.update(solve_time_kwargs)
import pyamg
ml = pyamg.ruge_stuben_solver(A)
kwargs['M'] = ml.aspreconditioner() # params to be developed
try:
sol, info, _ = krylov(A, b, **{'callback': callback, **kwargs})
except:
sol, info = krylov(A, b, **{'callback': callback, **kwargs})
if info > 0:
warnings.warn("Convergence not achieved!")
elif info == 0 and verbose:
print(f"{krylov.__name__} converged to "
+ f"tol={kwargs.get('tol', 'default')} and "
+ f"atol={kwargs.get('atol', 'default')}")
return sol
return solver
def solver_iter_pyamg(verbose: Optional[bool] = False,
**kwargs) -> LinearSolver:
"""Pyamg iterative linear solver.
Parameters
----------
verbose
If True, print the norm of the iterate.
Any remaining keyword arguments are passed on to the solver, in particular
tol and atol, the tolerances, maxiter, and M, the preconditioner. If the
last is omitted, a diagonal preconditioner is supplied using
:func:`skfem.utils.build_pc_diag`.
Returns
-------
LinearSolver
A solver function that can be passed to :func:`solve`.
"""
def my_pyamg(A, b, **kwargs):
'''
solver for pyamg
'''
import pyamg
ml = pyamg.ruge_stuben_solver(A)
# print(ml)
x = ml.solve(b, tol=1e-10, maxiter=10000, callback=callback)
return x, np.linalg.norm(b-A*x)
def callback(x):
if verbose:
print(np.linalg.norm(x))
def solver(A, b, **solve_time_kwargs):
kwargs.update(solve_time_kwargs)
sol, info = my_pyamg(A, b, **{'callback': callback, **kwargs})
if info > 1 and verbose:
print('Warning: residual norm =', info)
return sol
return solver
def solver_iter_krylov_iter(krylov: Optional[LinearSolver] = spl.cg,
verbose: Optional[bool] = False,
**kwargs) -> LinearSolver:
"""Krylov-subspace iterative linear solver.
Parameters
----------
krylov
A Krylov iterative linear solver, like, and by default,
:func:`scipy.sparse.linalg.cg`
verbose
If True, print the norm of the iterate.
Any remaining keyword arguments are passed on to the solver, in particular
tol and atol, the tolerances, maxiter, and M, the preconditioner. If the
last is omitted, a diagonal preconditioner is supplied using
:func:`skfem.utils.build_pc_diag`.
Returns
-------
LinearSolver
A solver function that can be passed to :func:`solve`.
And prints num of iters
"""
def callback(x):
if verbose:
print(np.linalg.norm(x))
def solver(A, b, **solve_time_kwargs):
kwargs.update(solve_time_kwargs)
pre = False
if 'Precondition' in kwargs:
if kwargs['Precondition'] == True:
pre = True
kwargs.pop('Precondition')
if 'M' not in kwargs and pre:
# print('build_pc_diag(A) enabled')
# pass
kwargs['M'] = build_pc_diag(A)
# print(kwargs['M'])
sol, info, iter = krylov(A, b, **{'callback': callback, **kwargs})
print(krylov.__name__, 'total interation steps:', iter)
if info > 0:
warnings.warn("Convergence not achieved!")
elif info == 0 and verbose:
# print(info)
print(f"{krylov.__name__} converged to "
+ f"tol={kwargs.get('tol', 'default')} and "
+ f"atol={kwargs.get('atol', 'default')}")
return sol
return solver
def solver_iter_krylov(krylov: Optional[LinearSolver] = spl.cg,
verbose: Optional[bool] = False,
**kwargs) -> LinearSolver:
"""Krylov-subspace iterative linear solver.
Parameters
----------
krylov
A Krylov iterative linear solver, like, and by default,
:func:`scipy.sparse.linalg.cg`
verbose
If True, print the norm of the iterate.
Any remaining keyword arguments are passed on to the solver, in particular
tol and atol, the tolerances, maxiter, and M, the preconditioner. If the
last is omitted, a diagonal preconditioner is supplied using
:func:`skfem.utils.build_pc_diag`.
Returns
-------
LinearSolver
A solver function that can be passed to :func:`solve`.
"""
def callback(x):
if verbose:
print(np.linalg.norm(x))
def solver(A, b, **solve_time_kwargs):
kwargs.update(solve_time_kwargs)
pre = False
if 'Precondition' in kwargs:
if kwargs['Precondition'] == True:
pre = True
kwargs.pop('Precondition')
if 'M' not in kwargs and pre:
# print('build_pc_diag(A) enabled')
# pass
kwargs['M'] = build_pc_diag(A)
# print(kwargs['M'])
sol, info, _ = krylov(A, b, **{'callback': callback, **kwargs})
if info > 0:
warnings.warn("Convergence not achieved!")
elif info == 0 and verbose:
# print(info)
print(f"{krylov.__name__} converged to "
+ f"tol={kwargs.get('tol', 'default')} and "
+ f"atol={kwargs.get('atol', 'default')}")
return sol
return solver
def solver_iter_pcg(**kwargs) -> LinearSolver:
"""Conjugate gradient solver, specialized from solver_iter_krylov"""
return solver_iter_krylov(**kwargs)
# solve and condense
def solve(A: spmatrix,
b: Union[ndarray, spmatrix],
x: Optional[ndarray] = None,
I: Optional[ndarray] = None,
solver: Optional[Union[LinearSolver, EigenSolver]] = None,
**kwargs) -> ndarray:
"""Solve a linear system or a generalized eigenvalue problem.
The remaining keyword arguments are passed to the solver.
Parameters
----------
A
The system matrix
b
The right hand side vector or the mass matrix of a generalized
eigenvalue problem.
solver
Choose one of the following solvers:
:func:`skfem.utils.solver_direct_scipy` (default),
:func:`skfem.utils.solver_eigen_scipy` (default),
:func:`skfem.utils.solver_iter_pcg`,
:func:`skfem.utils.solver_iter_krylov`.
"""
if solver is None:
if isinstance(b, spmatrix):
solver = solver_eigen_scipy(**kwargs)
elif isinstance(b, ndarray):
solver = solver_direct_scipy(**kwargs)
if x is not None and I is not None:
if isinstance(b, spmatrix):
L, X = solver(A, b, **kwargs)
y = np.tile(x.copy()[:, None], (1, X.shape[1]))
y[I] = X
return L, y
else:
y = x.copy()
y[I] = solver(A, b, **kwargs)
return y
else:
return solver(A, b, **kwargs)
def _flatten_dofs(S: DofsCollection) -> ndarray:
if S is None:
return None
else:
if isinstance(S, ndarray):
return S
elif isinstance(S, DofsView):
return S.flatten()
elif isinstance(S, dict):
return np.unique(np.concatenate([S[key].flatten() for key in S]))
raise NotImplementedError("Unable to flatten the given set of DOFs.")
def condense(A: spmatrix,
b: Union[ndarray, spmatrix] = None,
x: ndarray = None,
I: DofsCollection = None,
D: DofsCollection = None,
expand: bool = True) -> CondensedSystem:
"""Eliminate degrees-of-freedom from a linear system.
The user should provide the linear system ``A`` and ``b``
and either the set of DOFs to eliminate (``D``) or the set
of DOFs to keep (``I``). Optionally, nonzero values for
the eliminated DOFs can be supplied via ``x``.
.. note::
Supports also generalized eigenvalue problems
where ``b`` is a matrix.
Parameters
----------
A
The system matrix
b
The right hand side vector or the mass matrix for generalized
eigenvalue problems.
x
The values of the condensed degrees-of-freedom. If not given, assumed
to be zero.
I
The set of degree-of-freedom indices to keep.
D
The set of degree-of-freedom indices to dismiss.
expand
If `True` (default), returns also `x` and `I`. As a consequence,
:func:`skfem.utils.solve` will expand the solution vector
automatically.
Returns
-------
CondensedSystem
The condensed linear system and (optionally) information about
the boundary values.
"""
D = _flatten_dofs(D)
I = _flatten_dofs(I)
if x is None:
x = np.zeros(A.shape[0])
if I is None and D is None:
raise Exception("Either I or D must be given!")
elif I is None and D is not None:
I = np.setdiff1d(np.arange(A.shape[0]), D)
elif D is None and I is not None:
D = np.setdiff1d(np.arange(A.shape[0]), I)
else:
raise Exception("Give only I or only D!")
if b is None:
ret_value = (A[I].T[I].T,)
else:
if isinstance(b, spmatrix):
# generalized eigenvalue problem: don't modify rhs
Aout = A[I].T[I].T
bout = b[I].T[I].T
elif isinstance(b, ndarray):
Aout = A[I].T[I].T
bout = b[I] - A[I].T[D].T @ x[D]
else:
raise Exception("The second arg type not supported.")
ret_value = (Aout, bout)
if expand:
ret_value += (x, I)
return ret_value if len(ret_value) > 1 else ret_value[0]
# additional utilities
def rcm(A: spmatrix,
b: ndarray) -> Tuple[spmatrix, ndarray, ndarray]:
"""Reverse Cuthill-McKee ordering."""
p = spg.reverse_cuthill_mckee(A, symmetric_mode=False)
return A[p].T[p].T, b[p], p
def adaptive_theta(est, theta=0.5, max=None):
"""For choosing which elements to refine in an adaptive strategy."""
if max is None:
return np.nonzero(theta * np.max(est) < est)[0]
else:
return np.nonzero(theta * max < est)[0]
def project(fun,
basis_from: Basis = None,
basis_to: Basis = None,
diff: int = None,
I: ndarray = None,
expand: bool = False,
solver: Optional[Union[LinearSolver, EigenSolver]] = None) -> ndarray:
"""Projection from one basis to another.
Parameters
----------
fun
A solution vector or a function handle.
basis_from
The finite element basis to project from.
basis_to
The finite element basis to project to.
diff
Differentiate with respect to the given dimension.
I
Index set for limiting the projection to a subset.
expand
Passed to :func:`skfem.utils.condense`.
Returns
-------
ndarray
The projected solution vector.
"""
@BilinearForm
def mass(u, v, w):
p = u * v
return sum(p) if isinstance(basis_to.elem, ElementVectorH1) else p
@LinearForm
def funv(v, w):
if len(signature(fun).parameters) == 1:
p = fun(w.x) * v
else:
warnings.warn("The function provided to 'project' should "
"take only one argument in the future.",
DeprecationWarning)
p = fun(*w.x) * v
return sum(p) if isinstance(basis_to.elem, ElementVectorH1) else p
@BilinearForm
def deriv(u, v, w):
from skfem.helpers import grad
du = grad(u)
return du[diff] * v
M = asm(mass, basis_to)
if not isinstance(fun, ndarray):
f = asm(funv, basis_to)
else:
if diff is not None:
f = asm(deriv, basis_from, basis_to) @ fun
else:
# print(asm(mass, basis_from, basis_to).shape)
# print(fun.shape)
f = asm(mass, basis_from, basis_to) @ fun
if I is not None:
# print('aaa')
if solver is None:
return solve(*condense(M, f, I=I, expand=expand))
else:
return solve(*condense(M, f, I=I, expand=expand), solver=solver_iter_krylov(solver, tol=1e-11))
return solve(M, f)
def pproject(fun,
basis_from: Basis = None,
basis_to: Basis = None,
diff: int = None,
I: ndarray = None,
expand: bool = False,
solver: Optional[Union[LinearSolver, EigenSolver]] = None) -> ndarray:
"""Projection from one basis to another.
Parameters
----------
fun
A solution vector or a function handle.
basis_from
The finite element basis to project from.
basis_to
The finite element basis to project to.
diff
Differentiate with respect to the given dimension.
I
Index set for limiting the projection to a subset.
expand
Passed to :func:`skfem.utils.condense`.
Returns
-------
ndarray
The projected solution vector.
"""
@BilinearForm
def mass(u, v, w):
p = u * v
return sum(p) if isinstance(basis_to.elem, ElementVectorH1) else p
@LinearForm
def funv(v, w):
if len(signature(fun).parameters) == 1:
p = fun(w.x) * v
else:
warnings.warn("The function provided to 'project' should "
"take only one argument in the future.",
DeprecationWarning)
p = fun(*w.x) * v
return sum(p) if isinstance(basis_to.elem, ElementVectorH1) else p
@BilinearForm
def deriv(u, v, w):
from skfem.helpers import grad
du = grad(u)
return du[diff] * v
M = asm(mass, basis_to)
if not isinstance(fun, ndarray):
f = asm(funv, basis_to)
else:
if diff is not None:
f = asm(deriv, basis_from, basis_to) @ fun
else:
f = asm(mass, basis_from, basis_to) @ fun
if I is not None:
# print('aaa')
if solver is None:
return solve(*condense(M, f, I=I, expand=expand))
else:
return solve(*condense(M, f, I=I, expand=expand), solver=solver_iter_krylov(solver, tol=1e-11))
return solve(M, f)
# for backwards compatibility
def L2_projection(a, b, c=None):
"""Superseded by :func:`skfem.utils.project`."""
return project(a, basis_to=b, I=c)
def derivative(a, b, c, d=0):
"""Superseded by :func:`skfem.utils.project`."""
return project(a, basis_from=b, basis_to=c, diff=d)
# functions for loads and boundary
pi = np.pi
sin = np.sin
cos = np.cos
exp = np.exp
# atan = np.arctan2
# parameters
# end of parameters
# print parameters
# functions
def arctan3(y, x):
theta = np.arctan2(y, x)
theta[theta <= 0] += 2 * pi
return theta
#############################
# eps = 1e-12
def exact_u(x, y):
theta = arctan3(y, x)
return (x**2 + y**2)**(5/6) * sin(5*theta/3)
def dexact_u(x, y):
theta = arctan3(y, x)
dux = (5*x*sin((5*theta)/3))/(3*(x**2 + y**2)**(1/6)) - (5*y*cos((5*theta)/3)*(x**2 + y**2)**(5/6))/(3*(y**2 + x**2))
duy = (5*y*sin((5*theta)/3))/(3*(x**2 + y**2)**(1/6)) + (5*x*cos((5*theta)/3)*(x**2 + y**2)**(5/6))/(3*(y**2 + x**2))
# dux = (exact_u(x+eps/2, y) - exact_u(x-eps/2, y)) / eps
# duy = (exact_u(x, y+eps/2) - exact_u(x, y-eps/2)) / eps
return dux, duy
def ddexact(x, y):
theta = arctan3(y, x)
duxx = -(10*(y**2*sin((5*theta)/3) - x**2*sin((5*theta)/3) +
2*x*y*cos((5*theta)/3)))/(9*(x**2 + y**2)**(7/6))
duxy = (10*(x**2*cos((5*theta)/3) - y**2*cos((5*theta)/3) +
2*x*y*sin((5*theta)/3)))/(9*(x**2 + y**2)**(7/6))
duyx = duxy
duyy = (10*(y**2*sin((5*theta)/3) - x**2*sin((5*theta)/3) +
2*x*y*cos((5*theta)/3)))/(9*(x**2 + y**2)**(7/6))
# duxx = (dexact_u(x+eps/2, y)[0] - dexact_u(x-eps/2, y)[0]) / eps
# duxy = (dexact_u(x, y+eps/2)[0] - dexact_u(x, y-eps/2)[0]) / eps
# duyx = (dexact_u(x+eps/2, y)[1] - dexact_u(x-eps/2, y)[1]) / eps
# duyy = (dexact_u(x, y+eps/2)[1] - dexact_u(x, y-eps/2)[1]) / eps
return duxx, duxy, duyx, duyy
# @LinearForm
# def f_load(v, w):
# '''
# for $(f, x_{h})$
# '''
# return 0
########################################
# @LinearForm
# def f_load(v, w):
# '''
# for $(f, x_{h})$
# '''
# lu = 0
# llu = 0
# return (epsilon**2 * llu - lu) * v
# def exact_u(x, y):
# return x*y
# def dexact_u(x, y):
# dux = y
# duy = x
# return dux, duy
# def ddexact(x, y):
# duxx = 0
# duxy = 1
# duyx = duxy
# duyy = 0
# return duxx, duxy, duyx, duyy
# ###################################x**2*y**2*(x - 1)**2*(y - 1)**2
# @LinearForm
# def f_load(v, w):
# '''
# for $(f, x_{h})$
# '''
# x, y = w.x
# # lu = 0
# # llu = 0
# # return (epsilon**2 * llu - lu) * v
# return (24*ep**2*x**4 - 48*ep**2*x**3 + 288*ep**2*x**2*y**2 - 288*ep**2*x**2*y + 72*ep**2*x**2 - 288*ep**2*x*y**2 + 288*ep**2*x*y - 48*ep**2*x + 24*ep**2*y**4 - 48*ep**2*y**3 + 72*ep**2*y**2 - 48*ep**2*y + 8*ep**2 - 12*x**4*y**2 + 12*x**4*y - 2*x**4 + 24*x**3*y**2 - 24*x**3*y + 4*x**3 - 12*x**2*y**4 + 24*x**2*y**3 - 24*x**2*y**2 + 12*x**2*y - 2*x**2 + 12*x*y**4 - 24*x*y**3 + 12*x*y**2 - 2*y**4 + 4*y**3 - 2*y**2) * v
# def exact_u(x, y):
# return x**2*y**2*(x - 1)**2*(y - 1)**2
# def dexact_u(x, y):
# dux = 2*x*y**2*(y - 1)**2*(2*x**2 - 3*x + 1)
# duy = 2*x**2*y*(x - 1)**2*(2*y**2 - 3*y + 1)
# return dux, duy
# def ddexact(x, y):
# duxx = 2*y**2*(y - 1)**2*(6*x**2 - 6*x + 1)
# duxy = 4*x*y*(2*x**2 - 3*x + 1)*(2*y**2 - 3*y + 1)
# duyx = duxy
# duyy = 2*x**2*(x - 1)**2*(6*y**2 - 6*y + 1)
# return duxx, duxy, duyx, duyy
###################################
###################################x*y*(x - 1)*(y - 1)
# @LinearForm
# def f_load(v, w):
# '''
# for $(f, x_{h})$
# '''
# x, y = w.x
# # lu = 0
# # llu = 0
# # return (epsilon**2 * llu - lu) * v
# return (8*ep**2 - 2*x*(x - 1) - 2*y*(y - 1)) * v
# def exact_u(x, y):
# return x*y*(x - 1)*(y - 1)
# def dexact_u(x, y):
# dux = y*(2*x - 1)*(y - 1)
# duy = x*(2*y - 1)*(x - 1)
# return dux, duy
# def ddexact(x, y):
# duxx = 2*y**2
# duxy = 4*x*y
# duyx = duxy
# duyy = 2*x**2
# return duxx, duxy, duyx, duyy
# ###################################
###################################
# @LinearForm
# def f_load(v, w):
# '''
# for $(f, x_{h})$
# '''
# x, y = w.x
# # lu = 0
# # llu = 0
# # return (epsilon**2 * llu - lu) * v
# return ((24*x**2*(x - 1)**2 + 24*y**2*(y - 1)**2 + 2*(4*x*(2*x - 2) + 2*(x - 1)**2 + 2*x**2)*(4*y*(2*y - 2) + 2*(y - 1)**2 + 2*y**2) + 72)*ep**2 - (y**2*(y - 1)**2 + 2)*(4*x*(2*x - 2) + 2*(x - 1)**2 + 2*x**2) - (x**2*(x - 1)**2 + 1)*(4*y*(2*y - 2) + 2*(y - 1)**2 + 2*y**2)) * v
# def exact_u(x, y):
# return (x**2*(x - 1)**2 + 1)*(y**2*(y - 1)**2 + 2)
# def dexact_u(x, y):
# dux = (y**2*(y - 1)**2 + 2)*(2*x*(x - 1)**2 + x**2*(2*x - 2))
# duy = (x**2*(x - 1)**2 + 1)*(2*y*(y - 1)**2 + y**2*(2*y - 2))
# return dux, duy
# def ddexact(x, y):
# duxx = (12*x**2 - 12*x + 2)*(y**4 - 2*y**3 + y**2 + 2)
# duxy = 4*x*y*(2*x**2 - 3*x + 1)*(2*y**2 - 3*y + 1)
# duyx = duxy
# duyy = (12*y**2 - 12*y + 2)*(x**4 - 2*x**3 + x**2 + 1)
# return duxx, duxy, duyx, duyy
###################################
# def exact_u(x, y):
# out = np.zeros_like(x)
# if x.ndim == 1:
# for i in range(x.shape[0]):
# if x[i] != 0:
# out[i] = (x[i]**2 + y[i]**2)**(5 / 6) * sin(5 * atan(y[i] / x[i]) / 3)
# elif y[i] > 0:
# out[i] = (x[i]**2 + y[i]**2)**(5 / 6) * sin(pi/2 * 5 / 3)
# else:
# out[i] = (x[i]**2 + y[i]**2)**(5 / 6) * sin(3/2 * pi * 5 / 3)
# else:
# out = (x**2 + y**2)**(5/6) * sin(5*atan(y / x)/3)
# return out
# def get_theta(x, y):
# import math
# import cmath
# theta = np.zeros_like(x)
# for i in range(x.shape[0]):
# for j in range(x.shape[1]):
# # print(x)
# r, rad = cmath.polar(complex(x[i, j], y[i, j]))
# theta[i, j] = math.degrees(rad)
# return theta
# def dexact_u(x, y):
# theta = get_theta(x, y)
# dux = (5*x*sin((5*theta)/3))/(3*(x**2 + y**2)**(1/6)) - (5*y*cos((5*theta)/3)*(x**2 + y**2)**(5/6))/(3*(y**2 + x**2))
# duy = (5*y*sin((5*theta)/3))/(3*(x**2 + y**2)**(1/6)) + (5*x*cos((5*theta)/3)*(x**2 + y**2)**(5/6))/(3*(y**2 + x**2))
# return dux, duy
# def ddexact(x, y):
# theta = get_theta(x, y)
# duxx = -(10*(y**2*sin((5*theta)/3) - x**2*sin((5*theta)/3) + 2*x*y*cos((5*theta)/3)))/(9*(x**2 + y**2)**(7/6))
# duxy = (10*(x**2*cos((5*theta)/3) - y**2*cos((5*theta)/3) + 2*x*y*sin((5*theta)/3)))/(9*(x**2 + y**2)**(7/6))
# duyx = duxy
# duyy = (10*(y**2*sin((5*theta)/3) - x**2*sin((5*theta)/3) + 2*x*y*cos((5*theta)/3)))/(9*(x**2 + y**2)**(7/6))
# return duxx, duxy, duyx, duyy
def easy_boundary_penalty(m, basis):
'''
Input basis
----------------
Return D for boundary conditions
'''
dofs = basis.find_dofs({
'left': m.facets_satisfying(lambda x: x[0] == 0),
'right': m.facets_satisfying(lambda x: x[0] == 1),
'top': m.facets_satisfying(lambda x: x[1] == 1),
'buttom': m.facets_satisfying(lambda x: x[1] == 0)
})
D = np.concatenate((dofs['left'].nodal['u'], dofs['right'].nodal['u'],
dofs['top'].nodal['u'], dofs['buttom'].nodal['u']))
return D
def easy_boundary(m, basis):
'''
Input basis
----------------
Return D for boundary conditions
'''
dofs = basis.find_dofs({
'left': m.facets_satisfying(lambda x: x[0] == 0),
'right': m.facets_satisfying(lambda x: x[0] == 1),
'top': m.facets_satisfying(lambda x: x[1] == 1),
'buttom': m.facets_satisfying(lambda x: x[1] == 0)
})
D = np.concatenate((dofs['left'].nodal['u'], dofs['right'].nodal['u'],
dofs['top'].nodal['u'], dofs['buttom'].nodal['u'],
dofs['left'].facet['u_n'], dofs['right'].facet['u_n'],
dofs['top'].facet['u_n'], dofs['buttom'].facet['u_n']))
return D
def solve_problem1__(m, element_type='P1', solver_type='pcg', intorder=6, tol=1e-8, epsilon=1e-6):
'''
switching to mgcg solver for problem 1
'''
if element_type == 'P1':
element = {'w': ElementTriP1(), 'u': ElementTriMorley()}
elif element_type == 'P2':
element = {'w': ElementTriP2(), 'u': ElementTriMorley()}
else:
raise Exception("Element not supported")
basis = {
variable: InteriorBasis(m, e, intorder=intorder)
for variable, e in element.items()
} # intorder: integration order for quadrature
K1 = asm(laplace, basis['w'])
f1 = asm(f_load, basis['w'])
if solver_type == 'amg':
wh = solve(
*condense(K1, f1, D=basis['w'].find_dofs()), solver=solver_iter_pyamg(tol=tol))
elif solver_type == 'pcg':
wh = solve(*condense(K1, f1, D=basis['w'].find_dofs()),
solver=solver_iter_krylov(Precondition=True, tol=tol))
elif solver_type == 'mgcg':
wh = solve(
*condense(K1, f1, D=basis['w'].find_dofs()), solver=solver_iter_mgcg(tol=tol))
else:
raise Exception("Solver not supported")
K2 = epsilon**2 * asm(a_load, basis['u']) + asm(b_load, basis['u'])
f2 = asm(wv_load, basis['w'], basis['u']) * wh
if solver_type == 'amg':
uh0 = solve(*condense(K2, f2, D=easy_boundary(m,
basis['u'])), solver=solver_iter_pyamg(tol=tol))
elif solver_type == 'pcg':
uh0 = solve(*condense(K2, f2, D=easy_boundary(m,
basis['u'])), solver=solver_iter_krylov(Precondition=True, tol=tol))
elif solver_type == 'mgcg':
uh0 = solve(*condense(K2, f2, D=easy_boundary(m,
basis['u'])), solver=solver_iter_mgcg(tol=tol))
else:
raise Exception("Solver not supported")
return uh0, basis
def solve_problem2(m, element_type='P1', solver_type='pcg', intorder=6, tol=1e-8, epsilon=1e-6):
'''
adding mgcg solver for problem 2
'''
if element_type == 'P1':
element = {'w': ElementTriP1(), 'u': ElementTriMorley()}
elif element_type == 'P2':
element = {'w': ElementTriP2(), 'u': ElementTriMorley()}
else:
raise Exception("The element not supported")
basis = {
variable: InteriorBasis(m, e, intorder=intorder)
for variable, e in element.items()
}
K1 = asm(laplace, basis['w'])
f1 = asm(f_load, basis['w'])
if solver_type == 'amg':
wh = solve(
*condense(K1, f1, D=basis['w'].find_dofs()), solver=solver_iter_pyamg(tol=tol))
elif solver_type == 'pcg':
wh = solve(*condense(K1, f1, D=basis['w'].find_dofs()),
solver=solver_iter_krylov(Precondition=True, tol=tol))
elif solver_type == 'mgcg':
wh = solve(
*condense(K1, f1, D=basis['w'].find_dofs()), solver=solver_iter_mgcg(tol=tol))
else:
raise Exception("Solver not supported")
fbasis = FacetBasis(m, element['u'])
p1 = asm(penalty_1, fbasis)
p2 = asm(penalty_2, fbasis)
p3 = asm(penalty_3, fbasis)
P = p1 + p2 + p3
K2 = epsilon**2 * asm(a_load, basis['u']) + \
epsilon**2 * P + asm(b_load, basis['u'])
f2 = asm(wv_load, basis['w'], basis['u']) * wh
if solver_type == 'amg':
uh0 = solve(*condense(K2, f2, D=easy_boundary_penalty(m,
basis['u'])), solver=solver_iter_pyamg(tol=tol))
elif solver_type == 'pcg':
uh0 = solve(*condense(K2, f2, D=easy_boundary_penalty(m,
basis['u'])), solver=solver_iter_krylov(Precondition=True, tol=tol))
elif solver_type == 'mgcg':
uh0 = solve(*condense(K2, f2, D=easy_boundary_penalty(m,
basis['u'])), solver=solver_iter_mgcg(tol=tol))
else:
raise Exception("Solver not supported")
return uh0, basis, fbasis
@Functional
def L2pnvError(w):
return (w.h * dot(w['n'].value, w['w'].grad))**2
@BilinearForm
def a_load(u, v, w):
'''
for $a_{h}$
'''
return ddot(dd(u), dd(v))
@BilinearForm
def b_load(u, v, w):
'''
for $b_{h}$
'''
return dot(grad(u), grad(v))
@BilinearForm
def wv_load(u, v, w):
'''
for $(\nabla \chi_{h}, \nabla_{h} v_{h})$
'''
return dot(grad(u), grad(v))
@BilinearForm
def penalty_1(u, v, w):
return ddot(-dd(u), prod(w.n, w.n)) * dot(grad(v), w.n)
@BilinearForm
def penalty_2(u, v, w):
return ddot(-dd(v), prod(w.n, w.n)) * dot(grad(u), w.n)
@BilinearForm
def penalty_3(u, v, w):
return (sigma / w.h) * dot(grad(u), w.n) * dot(grad(v), w.n)
@BilinearForm
def laplace(u, v, w):
'''
for $(\nabla w_{h}, \nabla \chi_{h})$
'''
return dot(grad(u), grad(v))
@Functional
def L2uError(w):
x, y = w.x
return (w.w - exact_u(x, y))**2
def get_DuError(basis, u):
duh = basis.interpolate(u).grad
x = basis.global_coordinates().value
dx = basis.dx # quadrature weights
dux, duy = dexact_u(x[0], x[1])
return np.sqrt(np.sum(((duh[0] - dux)**2 + (duh[1] - duy)**2) * dx))
def get_D2uError(basis, u):
dduh = basis.interpolate(u).hess
x = basis.global_coordinates(
).value # coordinates of quadrature points [x, y]
dx = basis.dx # quadrature weights
duxx, duxy, duyx, duyy = ddexact(x[0], x[1])
return np.sqrt(
np.sum(((dduh[0][0] - duxx)**2 + (dduh[0][1] - duxy)**2 +
(dduh[1][1] - duyy)**2 + (dduh[1][0] - duyx)**2) * dx))
# def exact_u(x, y):
# return (sin(pi * x) * sin(pi * y))**2
# def dexact_u(x, y):
# dux = 2 * pi * cos(pi * x) * sin(pi * x) * sin(pi * y)**2
# duy = 2 * pi * cos(pi * y) * sin(pi * x)**2 * sin(pi * y)
# return dux, duy
# def ddexact(x, y):
# duxx = 2 * pi**2 * cos(pi * x)**2 * sin(pi * y)**2 - 2 * pi**2 * sin(
# pi * x)**2 * sin(pi * y)**2
# duxy = 2 * pi * cos(pi * x) * sin(pi * x) * 2 * pi * cos(pi * y) * sin(
# pi * y)
# duyx = duxy
# duyy = 2 * pi**2 * cos(pi * y)**2 * sin(pi * x)**2 - 2 * pi**2 * sin(
# pi * y)**2 * sin(pi * x)**2
# return duxx, duxy, duyx, duyy
def show_result(L2s, H1s, H2s, epus):
print(' h L2u H1u H2u epu')
for i in range(H2s.shape[0] - 1):
print(
'2^-' + str(i + 2), ' {:.2f} {:.2f} {:.2f} {:.2f}'.format(
-np.log2(L2s[i + 1] / L2s[i]), -np.log2(H1s[i + 1] / H1s[i]),
-np.log2(H2s[i + 1] / H2s[i]),
-np.log2(epus[i + 1] / epus[i])))
print(
'2^-' + str(i + 2), ' {:.3e} {:.3e} {:.3e} {:.3e}'.format(
L2s[i + 1], H1s[i + 1],
H2s[i + 1],
epus[i + 1]))
| 30.595707 | 425 | 0.534029 |
29c5051467518ef127a203b85968b4ed89b23176 | 689 | py | Python | python/SecantMethod.py | vladalex01/numerical-methods | 2c58e726c50073824751f07eb1f7be1645bac337 | [
"MIT"
] | null | null | null | python/SecantMethod.py | vladalex01/numerical-methods | 2c58e726c50073824751f07eb1f7be1645bac337 | [
"MIT"
] | null | null | null | python/SecantMethod.py | vladalex01/numerical-methods | 2c58e726c50073824751f07eb1f7be1645bac337 | [
"MIT"
] | null | null | null | import numpy as np
def f(x):
return 2 * x**3 + 1
def SecantMethod(f, x0, x1, tol, max_iter):
x = 0
for i in range (1, max_iter):
print(" La pasul: {}".format(i))
f0 = f(x0)
f1 = f(x1)
print("f0 = {}; f1 = {}".format(f0, f1))
xi = x1 - f1 * (x1 - x0) / (f1 - f0)
fxi = f(xi)
print("xi = {}; fxi = {}".format(xi, fxi))
# lin 18
if (abs(fxi) < np.finfo(float).eps):
x = xi
return x
epsilon = abs((xi - x1) / xi)
print("Epsilon: {}".format(epsilon))
#stop - 25
if (epsilon < tol):
x = xi
return x
x0 = x1
x1 = xi
print("Maximum number of iterations reached: {}".format(i))
return x
print(SecantMethod(f, 17.0, 6.0, 10 ** (-10), 100)) | 19.685714 | 65 | 0.534107 |
df8416b9c41fb9a7ca8cb7a4002c9a690a4bbd7f | 12,048 | py | Python | lldb/test/API/types/AbstractBase.py | elizabethandrews/llvm | 308498236c1c4778fdcba0bfbb556adf8aa333ea | [
"Apache-2.0"
] | 5 | 2021-02-21T22:35:08.000Z | 2022-02-01T18:22:50.000Z | lldb/test/API/types/AbstractBase.py | elizabethandrews/llvm | 308498236c1c4778fdcba0bfbb556adf8aa333ea | [
"Apache-2.0"
] | null | null | null | lldb/test/API/types/AbstractBase.py | elizabethandrews/llvm | 308498236c1c4778fdcba0bfbb556adf8aa333ea | [
"Apache-2.0"
] | 1 | 2021-03-30T11:22:52.000Z | 2021-03-30T11:22:52.000Z | """
Abstract base class of basic types provides a generic type tester method.
"""
from __future__ import print_function
import os
import re
import lldb
from lldbsuite.test.lldbtest import *
import lldbsuite.test.lldbutil as lldbutil
def Msg(var, val, using_frame_variable):
return "'%s %s' matches the output (from compiled code): %s" % (
'frame variable --show-types' if using_frame_variable else 'expression', var, val)
class GenericTester(TestBase):
# This is the pattern by design to match the " var = 'value'" output from
# printf() stmts (see basic_type.cpp).
pattern = re.compile(" (\*?a[^=]*) = '([^=]*)'$")
# Assert message.
DATA_TYPE_GROKKED = "Data type from expr parser output is parsed correctly"
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# We'll use the test method name as the exe_name.
# There are a bunch of test cases under test/types and we don't want the
# module cacheing subsystem to be confused with executable name "a.out"
# used for all the test cases.
self.exe_name = self.testMethodName
golden = "{}-golden-output.txt".format(self.testMethodName)
if configuration.is_reproducer():
self.golden_filename = self.getReproducerArtifact(golden)
else:
self.golden_filename = self.getBuildArtifact(golden)
def tearDown(self):
"""Cleanup the test byproducts."""
if os.path.exists(self.golden_filename) and not configuration.is_reproducer():
os.remove(self.golden_filename)
TestBase.tearDown(self)
#==========================================================================#
# Functions build_and_run() and build_and_run_expr() are generic functions #
# which are called from the Test*Types*.py test cases. The API client is #
# responsible for supplying two mandatory arguments: the source file, e.g.,#
# 'int.cpp', and the atoms, e.g., set(['unsigned', 'long long']) to the #
# functions. There are also three optional keyword arguments of interest, #
# as follows: #
# #
# bc -> blockCaptured (defaulted to False) #
# True: testing vars of various basic types from inside a block #
# False: testing vars of various basic types from a function #
# qd -> quotedDisplay (defaulted to False) #
# True: the output from 'frame var' or 'expr var' contains a pair #
# of single quotes around the value #
# False: no single quotes are to be found around the value of #
# variable #
#==========================================================================#
def build_and_run(self, source, atoms, bc=False, qd=False):
self.build_and_run_with_source_atoms_expr(
source, atoms, expr=False, bc=bc, qd=qd)
def build_and_run_expr(self, source, atoms, bc=False, qd=False):
self.build_and_run_with_source_atoms_expr(
source, atoms, expr=True, bc=bc, qd=qd)
def build_and_run_with_source_atoms_expr(
self, source, atoms, expr, bc=False, qd=False):
# See also Makefile and basic_type.cpp:177.
if bc:
d = {'CXX_SOURCES': source, 'EXE': self.exe_name,
'CFLAGS_EXTRAS': '-DTEST_BLOCK_CAPTURED_VARS'}
else:
d = {'CXX_SOURCES': source, 'EXE': self.exe_name}
self.build(dictionary=d)
self.setTearDownCleanup(dictionary=d)
if expr:
self.generic_type_expr_tester(
self.exe_name, atoms, blockCaptured=bc, quotedDisplay=qd)
else:
self.generic_type_tester(
self.exe_name,
atoms,
blockCaptured=bc,
quotedDisplay=qd)
def process_launch_o(self):
# process launch command output redirect always goes to host the
# process is running on
if lldb.remote_platform:
# process launch -o requires a path that is valid on the target
self.assertIsNotNone(lldb.remote_platform.GetWorkingDirectory())
remote_path = lldbutil.append_to_process_working_directory(self,
"lldb-stdout-redirect.txt")
self.runCmd(
'process launch -- {remote}'.format(remote=remote_path))
# copy remote_path to local host
self.runCmd('platform get-file {remote} "{local}"'.format(
remote=remote_path, local=self.golden_filename))
elif configuration.is_reproducer_replay():
# Don't overwrite the golden file generated at capture time.
self.runCmd('process launch')
else:
self.runCmd(
'process launch -o "{local}"'.format(local=self.golden_filename))
def get_golden_list(self, blockCaptured=False):
with open(self.golden_filename, 'r') as f:
go = f.read()
golden_list = []
# Scan the golden output line by line, looking for the pattern:
#
# variable = 'value'
#
for line in go.split(os.linesep):
# We'll ignore variables of array types from inside a block.
if blockCaptured and '[' in line:
continue
match = self.pattern.search(line)
if match:
var, val = match.group(1), match.group(2)
golden_list.append((var, val))
return golden_list
def generic_type_tester(
self,
exe_name,
atoms,
quotedDisplay=False,
blockCaptured=False):
"""Test that variables with basic types are displayed correctly."""
self.runCmd("file %s" % self.getBuildArtifact(exe_name),
CURRENT_EXECUTABLE_SET)
# First, capture the golden output emitted by the oracle, i.e., the
# series of printf statements.
self.process_launch_o()
# This golden list contains a list of (variable, value) pairs extracted
# from the golden output.
gl = self.get_golden_list(blockCaptured)
# This test uses a #include of "basic_type.cpp" so we need to enable
# always setting inlined breakpoints.
self.runCmd('settings set target.inline-breakpoint-strategy always')
# And add hooks to restore the settings during tearDown().
self.addTearDownHook(lambda: self.runCmd(
"settings set target.inline-breakpoint-strategy headers"))
# Bring the program to the point where we can issue a series of
# 'frame variable --show-types' command.
if blockCaptured:
break_line = line_number(
"basic_type.cpp",
"// Break here to test block captured variables.")
else:
break_line = line_number(
"basic_type.cpp",
"// Here is the line we will break on to check variables.")
lldbutil.run_break_set_by_file_and_line(
self,
"basic_type.cpp",
break_line,
num_expected_locations=1,
loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
self.expect("process status", STOPPED_DUE_TO_BREAKPOINT,
substrs=["stop reason = breakpoint",
" at basic_type.cpp:%d" % break_line,])
#self.runCmd("frame variable --show-types")
# Now iterate through the golden list, comparing against the output from
# 'frame variable --show-types var'.
for var, val in gl:
self.runCmd("frame variable --show-types %s" % var)
output = self.res.GetOutput()
# The input type is in a canonical form as a set of named atoms.
# The display type string must contain each and every element.
#
# Example:
# runCmd: frame variable --show-types a_array_bounded[0]
# output: (char) a_array_bounded[0] = 'a'
#
try:
dt = re.match("^\((.*)\)", output).group(1)
except:
self.fail(self.DATA_TYPE_GROKKED)
# Expect the display type string to contain each and every atoms.
self.expect(
dt, "Display type: '%s' must contain the type atoms: '%s'" %
(dt, atoms), exe=False, substrs=list(atoms))
# The (var, val) pair must match, too.
nv = ("%s = '%s'" if quotedDisplay else "%s = %s") % (var, val)
self.expect(output, Msg(var, val, True), exe=False,
substrs=[nv])
def generic_type_expr_tester(
self,
exe_name,
atoms,
quotedDisplay=False,
blockCaptured=False):
"""Test that variable expressions with basic types are evaluated correctly."""
self.runCmd("file %s" % self.getBuildArtifact(exe_name),
CURRENT_EXECUTABLE_SET)
# First, capture the golden output emitted by the oracle, i.e., the
# series of printf statements.
self.process_launch_o()
# This golden list contains a list of (variable, value) pairs extracted
# from the golden output.
gl = self.get_golden_list(blockCaptured)
# This test uses a #include of "basic_type.cpp" so we need to enable
# always setting inlined breakpoints.
self.runCmd('settings set target.inline-breakpoint-strategy always')
# And add hooks to restore the settings during tearDown().
self.addTearDownHook(lambda: self.runCmd(
"settings set target.inline-breakpoint-strategy headers"))
# Bring the program to the point where we can issue a series of
# 'expr' command.
if blockCaptured:
break_line = line_number(
"basic_type.cpp",
"// Break here to test block captured variables.")
else:
break_line = line_number(
"basic_type.cpp",
"// Here is the line we will break on to check variables.")
lldbutil.run_break_set_by_file_and_line(
self,
"basic_type.cpp",
break_line,
num_expected_locations=1,
loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
self.expect("process status", STOPPED_DUE_TO_BREAKPOINT,
substrs=["stop reason = breakpoint",
" at basic_type.cpp:%d" % break_line])
#self.runCmd("frame variable --show-types")
# Now iterate through the golden list, comparing against the output from
# 'expr var'.
for var, val in gl:
self.runCmd("expression %s" % var)
output = self.res.GetOutput()
# The input type is in a canonical form as a set of named atoms.
# The display type string must contain each and every element.
#
# Example:
# runCmd: expr a
# output: (double) $0 = 1100.12
#
try:
dt = re.match("^\((.*)\) \$[0-9]+ = ", output).group(1)
except:
self.fail(self.DATA_TYPE_GROKKED)
# Expect the display type string to contain each and every atoms.
self.expect(
dt, "Display type: '%s' must contain the type atoms: '%s'" %
(dt, atoms), exe=False, substrs=list(atoms))
# The val part must match, too.
valPart = ("'%s'" if quotedDisplay else "%s") % val
self.expect(output, Msg(var, val, False), exe=False,
substrs=[valPart])
| 41.979094 | 90 | 0.566235 |
00d1a17988ac9311f65610c00fc65fe9ab23cb8b | 5,915 | py | Python | quru/server/raft/state/candidate.py | ShawnHan1993/quru | 6b103a54d8228e4e2d44b06cc068c60a44b02d67 | [
"MIT"
] | null | null | null | quru/server/raft/state/candidate.py | ShawnHan1993/quru | 6b103a54d8228e4e2d44b06cc068c60a44b02d67 | [
"MIT"
] | null | null | null | quru/server/raft/state/candidate.py | ShawnHan1993/quru | 6b103a54d8228e4e2d44b06cc068c60a44b02d67 | [
"MIT"
] | null | null | null | import asyncio
import random
import typing
from aiozipkin.span import SpanAbc
from ....quru_logger import logger
from ....env import HEARTBEAT_INTERVAL
from ....words import RaftLog
from ..timer import Timer
from .base import BaseState, log_consistency_check, candidate_qualification
from ...protocol.layer2 import RPCTimeoutError
class Candidate(BaseState):
def start(self) -> asyncio.Task:
lower = HEARTBEAT_INTERVAL * 2
upper = HEARTBEAT_INTERVAL * 4
self._election_timer = Timer(
lambda: random.randint(lower, upper) * 0.001,
self._poll)
return self._election_timer.start(at_once=True)
async def _poll(self):
span, self._trace = self._core.create_trace(
"Term:{} Seg:0".format(
self._current_term + 1
),
"poll")
self._trace.start()
with span as span:
if self.__class__.__name__ != "Candidate":
logger.warn("Role_leak.", span=span)
return
self._voted_for = self._core.name
self._current_term += 1
for _ in range(2):
logger.info(
"Starting_poll...",
name=self._core.name,
term=self._current_term,
span=span)
votes = await self.request_vote(span)
pros = sum(votes) + 1
total = self._core.cohort_size or self._core.quorum_size
if pros <= total // 2:
logger.info(
"Polling_fails.",
pros=pros,
total=total,
i_am=self._core.name,
term=self._current_term,
span=span
)
else:
# Might be a polling succeeding, if there's no role leak.
break
else:
# Let the election timer goes to the polling of next term.
return
if self.__class__.__name__ == "Candidate":
logger.info(
"Polling_succeeds!",
pros=pros,
total=total,
i_am=self._core.name,
span=span
)
await self._core.to_leader()
# From now on, 'self' is Leader type.
async def request_vote(self, span: SpanAbc = None):
prev_log = self._core.ledger[-1] \
if len(self._core.ledger) > 0 else None
data = {
"term": self._current_term,
"candidate_id": self._core.name,
"prev_log": prev_log,
}
tks = []
if self._core.cohort_size == 0:
# No cluster exists
for peer, _ in self._core.quorum:
if peer == self._core.name:
continue
tk = asyncio.create_task(
self._request_single_vote(
peer,
data,
span
)
)
tks.append(tk)
return await asyncio.gather(*tks)
else:
for peer in self._core.cohort:
if peer == self._core.name:
continue
tk = asyncio.create_task(
self._request_single_vote(
peer,
data,
span
)
)
tks.append(tk)
return await asyncio.gather(*tks)
async def _request_single_vote(self, peer, data, span: SpanAbc = None):
logger.debug("Requesting_vote_from_{}.".format(peer), span=span)
try:
vo = await self._core.call(peer, "reqvot", data, span)
logger.debug("Got_{}_from_{}.".format(vo, peer), span=span)
return vo
except RPCTimeoutError:
logger.warn(
"Not_receiving_resp_for_voting.",
fro=peer, iam=self._core.name)
logger.debug("Got_none_from_{}.".format(peer), span=span)
return False
def stop(self):
self._election_timer.stop()
async def on_request_store(self, data, span: SpanAbc = None):
logger.info("Rev_store_request_on_election_phase.", span=span)
await asyncio.sleep(3) # Sleep to wait for\
# self-changing to follower/leader
return await self.on_request_store(data, span)
@candidate_qualification
def on_request_vote(self, data, span: SpanAbc):
term = data['term']
candidate_id = data['candidate_id']
self._current_term = term
self._voted_for = candidate_id
logger.info(
"Vote_for_{}".format(candidate_id),
i_am=self._core.name,
state=self.__class__.__name__,
span=span)
self._core.to_follower()
return True
@log_consistency_check
def on_append_entries(
self,
data: dict,
span: SpanAbc = None
):
self._leader_id = data['leader_id']
if self._voted_for is not None:
self._voted_for = None
entries: typing.List[RaftLog] = data['entries']
leader_commit: int = data['leader_commit']
self._core.ledger.replace(data['prev_log'].index + 1, entries)
if self._commit_index < leader_commit:
for i in range(self._commit_index, leader_commit + 1):
log = self._core.ledger[i]
self._core.apply(log)
self._commit_index += 1
self._current_term = data['term']
logger.debug(
"Good_appent_from_{}.".format(data['leader_id']),
span=span)
self._core.to_follower()
return True, []
| 35 | 77 | 0.514286 |
a5784097b96e3988fd4b432114b0389412e0ad06 | 360 | py | Python | tasks.py | li195111/FastAPI-Template | ca5c0b6633496b7ddcb1ad35566d817a3e16598a | [
"MIT"
] | null | null | null | tasks.py | li195111/FastAPI-Template | ca5c0b6633496b7ddcb1ad35566d817a3e16598a | [
"MIT"
] | null | null | null | tasks.py | li195111/FastAPI-Template | ca5c0b6633496b7ddcb1ad35566d817a3e16598a | [
"MIT"
] | null | null | null | from celery import Celery
task = Celery('tasks',
broker='redis://localhost:6379/0',
backend='redis://localhost:6379/0')
'''
@task.task
def function():
...
'''
@task.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
# sender.add_periodic_task(10.0, clean_key_pairs.s(), name='clean every 10s')
pass
| 22.5 | 81 | 0.65 |
81187a12ca45b2b1e1460b755b45f741ba28fc65 | 3,620 | py | Python | opts.py | RockingRok/SeniorDesignDemo | c42a44310a3976cf6de2a7f3701bc6749373736c | [
"MIT"
] | 1 | 2018-11-30T19:19:50.000Z | 2018-11-30T19:19:50.000Z | opts.py | RockingRok/SeniorDesignDemo | c42a44310a3976cf6de2a7f3701bc6749373736c | [
"MIT"
] | null | null | null | opts.py | RockingRok/SeniorDesignDemo | c42a44310a3976cf6de2a7f3701bc6749373736c | [
"MIT"
] | null | null | null | import argparse
parser = argparse.ArgumentParser(description="PyTorch implementation of Temporal Segment Networks")
parser.add_argument('dataset', type=str, choices=['something','jester','moments'])
parser.add_argument('modality', type=str, choices=['RGB', 'Flow'])
parser.add_argument('--train_list', type=str,default="")
parser.add_argument('--val_list', type=str, default="")
parser.add_argument('--root_path', type=str, default="")
parser.add_argument('--store_name', type=str, default="")
# ========================= Model Configs ==========================
parser.add_argument('--arch', type=str, default="BNInception")
parser.add_argument('--num_segments', type=int, default=3)
parser.add_argument('--consensus_type', type=str, default='avg')
parser.add_argument('--k', type=int, default=3)
parser.add_argument('--dropout', '--do', default=0.8, type=float,
metavar='DO', help='dropout ratio (default: 0.5)')
parser.add_argument('--loss_type', type=str, default="nll",
choices=['nll'])
parser.add_argument('--img_feature_dim', default=256, type=int, help="the feature dimension for each frame")
# ========================= Learning Configs ==========================
parser.add_argument('--epochs', default=120, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('-b', '--batch-size', default=128, type=int,
metavar='N', help='mini-batch size (default: 256)')
parser.add_argument('--lr', '--learning-rate', default=0.001, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--lr_steps', default=[50, 100], type=float, nargs="+",
metavar='LRSteps', help='epochs to decay learning rate by 10')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float,
metavar='W', help='weight decay (default: 5e-4)')
parser.add_argument('--clip-gradient', '--gd', default=20, type=float,
metavar='W', help='gradient norm clipping (default: disabled)')
parser.add_argument('--no_partialbn', '--npb', default=False, action="store_true")
# ========================= Monitor Configs ==========================
parser.add_argument('--print-freq', '-p', default=20, type=int,
metavar='N', help='print frequency (default: 10)')
parser.add_argument('--eval-freq', '-ef', default=5, type=int,
metavar='N', help='evaluation frequency (default: 5)')
# ========================= Runtime Configs ==========================
parser.add_argument('-j', '--workers', default=30, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--snapshot_pref', type=str, default="")
parser.add_argument('--start-epoch', default=0, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--gpus', nargs='+', type=int, default=None)
parser.add_argument('--flow_prefix', default="", type=str)
parser.add_argument('--root_log',type=str, default='log')
parser.add_argument('--root_model', type=str, default='model')
parser.add_argument('--root_output',type=str, default='output')
| 57.460317 | 108 | 0.620718 |
559f521086bd410bb53080954deec566f6a13f8a | 7,381 | py | Python | KiBuzzard/dialog/dialog_base.py | femtoduino/KiBuzzard | 2065819fc822549a03516377929fbc2cecc66252 | [
"MIT"
] | 17 | 2021-01-24T03:09:36.000Z | 2021-02-13T12:40:23.000Z | KiBuzzard/dialog/dialog_base.py | arturo182/KiBuzzard | a963b413a0c89f7d7eb88bc15fc15186a19f5dfc | [
"MIT"
] | null | null | null | KiBuzzard/dialog/dialog_base.py | arturo182/KiBuzzard | a963b413a0c89f7d7eb88bc15fc15186a19f5dfc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Oct 26 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class KiBuzzardDialog
###########################################################################
class KiBuzzardDialog ( wx.Dialog ):
def __init__( self, parent ):
wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = u"KiBuzzard", pos = wx.DefaultPosition, size = wx.Size( 420,280 ), style = wx.CLOSE_BOX|wx.DEFAULT_DIALOG_STYLE|wx.RESIZE_BORDER )
#self.SetSizeHints( wx.Size( 420,280 ), wx.DefaultSize )
dialogSizer = wx.BoxSizer( wx.VERTICAL )
self.notebook = wx.Notebook( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, 0 )
self.guiPanel = wx.Panel( self.notebook, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
guiSizer = wx.BoxSizer( wx.VERTICAL )
guiFlexSizer = wx.FlexGridSizer( 5, 2, 0, 0 )
guiFlexSizer.AddGrowableCol( 1 )
guiFlexSizer.SetFlexibleDirection( wx.BOTH )
guiFlexSizer.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_ALL )
self.labelText = wx.StaticText( self.guiPanel, wx.ID_ANY, u"Label:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.labelText.Wrap( -1 )
guiFlexSizer.Add( self.labelText, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )
labelFlexSizer = wx.FlexGridSizer( 0, 3, 0, 0 )
labelFlexSizer.AddGrowableCol( 1 )
labelFlexSizer.SetFlexibleDirection( wx.BOTH )
labelFlexSizer.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
labelStartComboBoxChoices = [ wx.EmptyString, u"[", u"(", u"/", u"<" ]
self.labelStartComboBox = wx.ComboBox( self.guiPanel, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, labelStartComboBoxChoices, wx.CB_READONLY )
labelFlexSizer.Add( self.labelStartComboBox, 0, wx.ALL, 5 )
self.labelEdit = wx.TextCtrl( self.guiPanel, wx.ID_ANY, u" ", wx.DefaultPosition, wx.DefaultSize, 0 )
labelFlexSizer.Add( self.labelEdit, 0, wx.ALL|wx.EXPAND, 5 )
labelEndComboBoxChoices = [ wx.EmptyString, u"]", u")", u"/", u">" ]
self.labelEndComboBox = wx.ComboBox( self.guiPanel, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, labelEndComboBoxChoices, wx.CB_READONLY )
labelFlexSizer.Add( self.labelEndComboBox, 0, wx.ALL, 5 )
guiFlexSizer.Add( labelFlexSizer, 1, wx.EXPAND, 5 )
self.fontText = wx.StaticText( self.guiPanel, wx.ID_ANY, u"Font:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.fontText.Wrap( -1 )
guiFlexSizer.Add( self.fontText, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )
fontComboBoxChoices = []
self.fontComboBox = wx.ComboBox( self.guiPanel, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, fontComboBoxChoices, wx.CB_READONLY )
guiFlexSizer.Add( self.fontComboBox, 0, wx.ALL|wx.LEFT|wx.RIGHT, 5 )
self.scaleText = wx.StaticText( self.guiPanel, wx.ID_ANY, u"Scale:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.scaleText.Wrap( -1 )
guiFlexSizer.Add( self.scaleText, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )
self.scaleSpinCtrl = wx.SpinCtrlDouble( self.guiPanel, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.SP_ARROW_KEYS, 0, 1, 0.04, 0.01 )
self.scaleSpinCtrl.SetDigits( 0 )
guiFlexSizer.Add( self.scaleSpinCtrl, 0, wx.ALL, 5 )
self.verticalAlignText = wx.StaticText( self.guiPanel, wx.ID_ANY, u"Vertical Align:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.verticalAlignText.Wrap( -1 )
guiFlexSizer.Add( self.verticalAlignText, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )
verticalAlignComboBoxChoices = [ u"Top", u"Center", u"Bottom" ]
self.verticalAlignComboBox = wx.ComboBox( self.guiPanel, wx.ID_ANY, u"Center", wx.DefaultPosition, wx.DefaultSize, verticalAlignComboBoxChoices, wx.CB_READONLY )
guiFlexSizer.Add( self.verticalAlignComboBox, 0, wx.ALL, 5 )
self.horizontalAlignText = wx.StaticText( self.guiPanel, wx.ID_ANY, u"Horizontal Align:", wx.DefaultPosition, wx.DefaultSize, 0 )
self.horizontalAlignText.Wrap( -1 )
guiFlexSizer.Add( self.horizontalAlignText, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )
horizontalAlignComboBoxChoices = [ u"Left", u"Center", u"Right" ]
self.horizontalAlignComboBox = wx.ComboBox( self.guiPanel, wx.ID_ANY, u"Center", wx.DefaultPosition, wx.DefaultSize, horizontalAlignComboBoxChoices, wx.CB_READONLY )
guiFlexSizer.Add( self.horizontalAlignComboBox, 0, wx.ALL, 5 )
guiSizer.Add( guiFlexSizer, 1, wx.EXPAND, 5 )
self.createButton = wx.Button( self.guiPanel, wx.ID_ANY, u"Create!", wx.DefaultPosition, wx.DefaultSize, 0 )
guiSizer.Add( self.createButton, 0, wx.ALL|wx.EXPAND, 5 )
self.guiPanel.SetSizer( guiSizer )
self.guiPanel.Layout()
guiSizer.Fit( self.guiPanel )
self.notebook.AddPage( self.guiPanel, u"GUI", True )
self.cmdLinePanel = wx.Panel( self.notebook, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )
cmdLineSizer = wx.BoxSizer( wx.VERTICAL )
cmdLineLabelSizer = wx.BoxSizer( wx.HORIZONTAL )
self.cmdLineLabel = wx.StaticText( self.cmdLinePanel, wx.ID_ANY, u"cmd", wx.DefaultPosition, wx.DefaultSize, 0 )
self.cmdLineLabel.Wrap( -1 )
cmdLineLabelSizer.Add( self.cmdLineLabel, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5 )
self.cmdLineEdit = wx.TextCtrl( self.cmdLinePanel, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, wx.TE_PROCESS_ENTER )
cmdLineLabelSizer.Add( self.cmdLineEdit, 1, wx.ALL|wx.EXPAND, 5 )
cmdLineSizer.Add( cmdLineLabelSizer, 1, wx.EXPAND, 5 )
cmdLineHelpSizer = wx.BoxSizer( wx.VERTICAL )
self.cmdLineHelpLabel = wx.StaticText( self.cmdLinePanel, wx.ID_ANY, u"Press Enter to Create Label", wx.DefaultPosition, wx.DefaultSize, 0 )
self.cmdLineHelpLabel.Wrap( -1 )
cmdLineHelpSizer.Add( self.cmdLineHelpLabel, 1, wx.ALIGN_CENTER|wx.ALL, 5 )
cmdLineSizer.Add( cmdLineHelpSizer, 1, wx.EXPAND, 5 )
self.cmdLinePanel.SetSizer( cmdLineSizer )
self.cmdLinePanel.Layout()
cmdLineSizer.Fit( self.cmdLinePanel )
self.notebook.AddPage( self.cmdLinePanel, u"Command Line", False )
dialogSizer.Add( self.notebook, 1, wx.EXPAND |wx.ALL, 5 )
self.SetSizer( dialogSizer )
self.Layout()
self.Centre( wx.BOTH )
# Connect Events
self.labelEdit.Bind( wx.EVT_TEXT_ENTER, self.labelEditOnTextEnter )
self.createButton.Bind( wx.EVT_BUTTON, self.createButtonOnButtonClick )
self.cmdLineEdit.Bind( wx.EVT_TEXT_ENTER, self.cmdLineEditOnTextEnter )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def labelEditOnTextEnter( self, event ):
pass
def createButtonOnButtonClick( self, event ):
pass
def cmdLineEditOnTextEnter( self, event ):
pass
| 44.463855 | 197 | 0.658718 |
ea77a533a061ab8a89e995bfcf9d820712750779 | 1,011 | py | Python | aiogram/__init__.py | fakegit/aiogram | 583f00ce317dfb4c2c6c4df4bd4694c6f0b0bf18 | [
"MIT"
] | 2 | 2021-12-28T06:40:14.000Z | 2022-02-05T08:33:13.000Z | aiogram/__init__.py | MrShox777/aiogram | 763efb77631de07c563868273b1bd22b15f63be5 | [
"MIT"
] | null | null | null | aiogram/__init__.py | MrShox777/aiogram | 763efb77631de07c563868273b1bd22b15f63be5 | [
"MIT"
] | null | null | null | import sys
if sys.version_info < (3, 7):
raise ImportError('Your Python version {0} is not supported by aiogram, please install '
'Python 3.7+'.format('.'.join(map(str, sys.version_info[:3]))))
import asyncio
import os
from . import bot
from . import contrib
from . import dispatcher
from . import types
from . import utils
from .bot import Bot
from .dispatcher import Dispatcher
from .dispatcher import filters
from .dispatcher import middlewares
from .utils import exceptions, executor, helper, markdown as md
try:
import uvloop
except ImportError:
uvloop = None
else:
if 'DISABLE_UVLOOP' not in os.environ:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
__all__ = (
'Bot',
'Dispatcher',
'__api_version__',
'__version__',
'bot',
'contrib',
'dispatcher',
'exceptions',
'executor',
'filters',
'helper',
'md',
'middlewares',
'types',
'utils',
)
__version__ = '2.17.1'
__api_version__ = '5.5'
| 21.0625 | 92 | 0.663699 |
c2d0905853f9c67d2fd9621498f3d7653b3c962b | 21,275 | py | Python | newcomb.py | jensv/fluxtubestability | 743ec7c43de5371e0ac9d98cec7bcb235703b13d | [
"MIT"
] | null | null | null | newcomb.py | jensv/fluxtubestability | 743ec7c43de5371e0ac9d98cec7bcb235703b13d | [
"MIT"
] | null | null | null | newcomb.py | jensv/fluxtubestability | 743ec7c43de5371e0ac9d98cec7bcb235703b13d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Feb 08 15:36:58 2015
@author: Jens von der Linden
"""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future.builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, map, next, oct, open, pow, range, round,
str, super, zip)
"""Python 3.x compatibility"""
import sys
from scipy.interpolate import splev
import numpy as np
from numpy import atleast_1d
import scipy.integrate
import newcomb_init as init
import singularity_frobenius as frob
import find_singularties as find_sing
import external_stability as ext
import newcomb_f as new_f
import newcomb_g as new_g
def stability(params, offset, suydam_offset, suppress_output=False,
method='lsoda', rtol=None, max_step=None, nsteps=None,
xi_given=[0., 1.], diagnose=False, sing_search_points=10000,
f_func=new_f.newcomb_f_16, g_func=new_g.newcomb_g_18_dimless_wo_q,
skip_external_stability=False, stiff=False, use_jac=True,
adapt_step_size=False, adapt_min_steps=500):
r"""
Determine external stability.
Parameters
----------
params: dict
equilibrium parameters including spline coefficients
offset : float
offset after which to start integrating after singularties
suydam_offset : float
offset after which to start integrating after suydam unstable
singularties
suppress_output: boolean
flag to suppress diagnostic print statements
method: string
integration method to use. Either an integrator in scipy.integate.ode
or 'odeint' for scipy.integrate.odeint
rtol : float
passed to ode solver relative tolerance setting for ODE integrator
max_step: float
option passed to ode solver, limit for max step size
nsteps: int
option passed to ode solder, maximum number of steps allowed during call
to solver.
xi_given: tuple of floats
Initial condition used for xi if equilibrim does not start ar r=0
diagnose: bool
flag to print out more diagnostc statements during integration
sing_search_points: int
number of points to divide domain by in search for singularities
f_func: function
python function to use to calculate f
g_func: function
python function to use to calculate given
skip_external_stability: bool
flag to skip external stability
stiff: bool
flag to indicate stiff equation to integrator
use_jac: bool
flag to have integrator use Jacobian
adapt_step_size: bool
flag to use adaptive step size integrator
adapt_min_steps: int
specify minimum number of steps
Returns
-------
stable_external : bool
True if delta_w > 0
suydam_stable : bool
True if all singularties (except r=0) are Suydam stable
delta_w : float
total perturbed potential energy
missing_end_params:
diagnostic ouput no longer used.
xi : ndarray
xi at interval boundary (only last element is relevant)
xi_der : ndarray
derivative of xi at interval boundary (only last element is relevant)
r_array : ndarray
array of r values at which xi is integrated
Notes
-----
Examines the equilibrium. If the equilibrium has a singularity, the
Frobenius method is used to determine a small solution at an r > than
instability. If the singularity is suydam unstable no attempt is made to
calulate external stability.
If there is no Frobenius instability power series solution
close to r=0 is chosen or if the integration does not start at r=0 a given
xi is used as boundary condition.
Only the last interval is integrated.
To save time xi and xi_der are only evaluated at r=a (under the hood the
integrator is evaluating xi and xi_der across the interval).
Xi and xi_der are plugged into the potential energy equation to determine
stability.
"""
params.update({'f_func': f_func, 'g_func': g_func})
missing_end_params = None
if params['m'] == -1:
sing_params = {'a': params['r_0'], 'b': params['a'],
'points': sing_search_points, 'k': params['k'],
'm': params['m'], 'b_z_spl': params['b_z'],
'b_z_prime_spl': params['b_z_prime'],
'b_theta_spl': params['b_theta'],
'b_theta_prime_spl': params['b_theta_prime'],
'p_prime_spl': params['p_prime'], 'offset': offset,
'tol': 1E-2, 'beta_0': params['beta_0']}
(interval,
starts_with_sing,
suydam_stable,
suydam_unstable_interval) = intervals_with_singularties(suppress_output,
**sing_params)
else:
suydam_stable = True
starts_with_sing = False
suydam_unstable_interval = False
interval = [params['r_0'], params['a']]
interval, init_value = setup_initial_conditions(interval, starts_with_sing,
offset, suydam_offset,
xi_given=xi_given,
**params)
if not suydam_unstable_interval:
if skip_external_stability:
(xi, xi_der, r_array) = newcomb_int(params, interval,
init_value, method,
diagnose, max_step,
nsteps, rtol,
skip_external_stability=True,
stiff=stiff,
use_jac=use_jac,
adapt_step_size=adapt_step_size,
adapt_min_steps=adapt_min_steps)
return xi, xi_der
(stable_external, delta_w,
missing_end_params, xi, xi_der,
r_array) = newcomb_int(params, interval, init_value, method,
diagnose, max_step, nsteps, rtol,
stiff=stiff, use_jac=use_jac,
adapt_step_size=adapt_step_size,
adapt_min_steps=adapt_min_steps)
else:
msg = ("Last singularity is suydam unstable. " +
"Unable to deterime external instability at k = %.3f."
% params['k'])
print(msg)
delta_w = None
stable_external = None
xi = np.asarray([np.nan])
xi_der = np.asarray([np.nan])
r_array = np.asarray([np.nan])
return (stable_external, suydam_stable, delta_w, missing_end_params, xi,
xi_der, r_array)
def newcomb_der(r, y, k, m, b_z_spl, b_z_prime_spl, b_theta_spl,
b_theta_prime_spl, p_prime_spl, q_spl, q_prime_spl,
f_func, g_func, beta_0):
r"""
Returns derivatives of Newcomb's Euler-Lagrange equation expressed as a set
of 2 first order ODEs.
Parameters
----------
r : float
radius for which to find derivative
y : ndarray (2)
values of :math:`\xi` and :math:`f \xi'`
k : float
axial periodicity number
m : float
azimuthal periodicity number
b_z_spl : scipy spline tck tuple
axial magnetic field
b_theta_spl : scipy spline tck tuple
azimuthal magnetic field
b_theta_prime_spl: scipy spline tck tuple
radial derivative of azimuthal magnetic field
p_prime_spl : scipy spline tck tuple
derivative of pressure
q_spl : scipy spline tck tuple
safety factor
f_func : function
function which returns f of Newcomb's Euler-Lagrange equation
g_func : function
function which returns f of Newcomb's Euler-Lagrange equation
beta_0 : float
pressure ratio on axis
Returns
-------
y_prime : ndarray of floats (2)
derivatives of y
Notes
-----
The system of ODEs representing the Euler-Lagrange equations is
.. math::
\frac{d \xi}{dr} &= \xi' \\
\frac{d (f \xi')}{dr} &= g \xi
Reference
---------
Newcomb (1960) Hydromagnetic Stability of a diffuse linear pinch.
"""
y_prime = np.zeros(2)
r_arr = np.asarray(r)
r_arr = atleast_1d(r_arr).ravel()
g_params = {'r': r, 'k': k, 'm': m, 'b_z': splev(r_arr, b_z_spl),
'b_theta': splev(r_arr, b_theta_spl),
'p_prime': splev(r_arr, p_prime_spl),
'beta_0': beta_0}
f_params = {'r': r, 'k': k, 'm': m, 'b_z': splev(r_arr, b_z_spl),
'b_theta': splev(r_arr, b_theta_spl), 'q': splev(r_arr, q_spl)}
y_prime[0] = y[1] / f_func(**f_params)
y_prime[1] = y[0]*g_func(**g_params)
return y_prime
def newcomb_jac(r, y, k, m, b_z_spl, b_z_prime_spl, b_theta_spl,
b_theta_prime_spl, p_prime_spl, q_spl, q_prime_spl,
f_func, g_func, beta_0):
r"""
Jacobian of Newcomb's Euler-Lagrange equation.
"""
r_arr = np.asarray(r)
r_arr = atleast_1d(r_arr).ravel()
g_params = {'r': r, 'k': k, 'm': m, 'b_z': splev(r_arr, b_z_spl),
'b_theta': splev(r_arr, b_theta_spl),
'p_prime': splev(r_arr, p_prime_spl),
'beta_0': beta_0}
f_params = {'r': r, 'k': k, 'm': m, 'b_z': splev(r_arr, b_z_spl),
'b_theta': splev(r_arr, b_theta_spl), 'q': splev(r_arr, q_spl)}
jac = np.zeros((2,2))
jac[0,1] = 1. / f_func(**f_params)
jac[1,0] = g_func(**g_params)
return jac
def newcomb_der_for_odeint(y, r, *args):
r"""
odeint uses a derivative function with y and r passed as arguments in
reverse order.
"""
return newcomb_der(r, y, *args)
def divide_by_f(r, xi_der_f, k, m, b_z_spl, b_theta_spl, q_spl, f_func):
r"""
Divides :math:`y[1]=f \xi'` by f to recover :math:`\xi`.
"""
r_arr = np.asarray(r)
r_arr = atleast_1d(r_arr).ravel()
f_params = {'r': r, 'k': k, 'm': m, 'b_z': splev(r_arr, b_z_spl),
'b_theta': splev(r_arr, b_theta_spl), 'q': splev(r_arr, q_spl)}
return xi_der_f / f_func(**f_params)
def intervals_with_singularties(suppress_output, **sing_params):
r"""
Determines if an interval starts with a singularity, is Suydam unstable.
"""
starts_with_sing = False
suydam_unstable_interval = False
suydam_stable = False
interval = [sing_params['a'], sing_params['b']]
(sings,
sings_wo_0, intervals) = find_sing.identify_singularties(**sing_params)
if not sings_wo_0.size == 0:
if not suppress_output:
print("Non-geometric singularties identified at r =", sings_wo_0)
interval = [sings_wo_0[-1], sing_params['b']]
starts_with_sing = True
# Check singularties for Suydam stability
suydam_result = check_suydam(sings_wo_0, **sing_params)
if suydam_result.size != 0:
suydam_stable = False
if sings_wo_0.size > 0 and abs(suydam_result[-1] - sings_wo_0[-1]) < 1e-8:
if not suppress_output:
print("Profile is Suydam unstable at r =", suydam_result)
suydam_unstable_interval = True
else:
suydam_stable = True
return interval, starts_with_sing, suydam_stable, suydam_unstable_interval
def setup_initial_conditions(interval, starts_with_sing, offset,
suydam_offset, xi_given=[0., 1.], **params):
r"""
Returns the initial condition to use for integrating an interval.
"""
if interval[0] == 0.:
interval[0] += offset
if interval[0] > interval[1]:
interval[0] = interval[1]
init_params = dict(params)
r_arr = np.asarray(interval[0])
r_arr = atleast_1d(r_arr).ravel()
init_params.update({'b_z': splev(r_arr, params['b_z']),
'b_theta': splev(r_arr, params['b_theta']),
'q': splev(r_arr, params['q'])})
init_value = init.init_geometric_sing(interval[0], **init_params)
else:
if starts_with_sing:
if interval[0]+suydam_offset > interval[1]:
suydam_offset = interval[1] - interval[0]
frob_params = {'offset': suydam_offset, 'b_z_spl': params['b_z'],
'b_z_prime_spl': params['b_z_prime'],
'b_theta_spl': params['b_theta'],
'b_theta_prime_spl': params['b_theta_prime'],
'q_spl': params['q'], 'f_func': new_f.newcomb_f_16,
'p_prime_spl': params['p_prime'],
'beta_0': params['beta_0'], 'r_sing': interval[0]}
xi_given = frob.sing_small_solution(**frob_params)
interval[0] += suydam_offset
init_params = dict(params)
r_arr = np.asarray(interval[0])
r_arr = atleast_1d(r_arr).ravel()
init_params.update({'b_z': splev(r_arr, params['b_z']),
'b_theta': splev(r_arr, params['b_theta']),
'q': splev(r_arr, params['q'])})
init_value = init.init_xi_given(xi_given, r_arr, **init_params)
else:
init_params = dict(params)
init_params.pop('r')
init_params.update({'b_z': splev(r_arr, params['b_z']),
'b_theta': splev(r_arr, params['b_theta']),
'q': splev(r_arr, params['q'])})
init_value = init.init_xi_given(xi_given, interval[0], **init_params)
return interval, init_value
def check_suydam(r, b_z_spl, b_z_prime_spl, b_theta_spl, b_theta_prime_spl,
p_prime_spl, beta_0, **kwargs):
r"""
Return radial positions at which the Euler-Lagrange equation is singular
and Suydam's criterion is violated.
Parameters
----------
r : ndarray of floats (M)
positions at which f=0.
b_z_spl : scipy spline object
axial magnetic field
b_theta_spl : scipy spline object
azimuthal magnetic field
p_prime_spl : scipy spline object
derivative of pressure
beta_0 : float
beta on axis
Returns
-------
unstable_r : ndarray of floats (N)
positions at which plasma column is suydam unstable
"""
params = {'r': r, 'b_z_spl': b_z_spl, 'b_z_prime_spl': b_z_prime_spl,
'b_theta_spl': b_theta_spl,
'b_theta_prime_spl': b_theta_prime_spl,
'p_prime_spl': p_prime_spl, 'beta_0': beta_0}
unstable_mask = np.invert(frob.sings_suydam_stable(**params))
return r[unstable_mask]
def newcomb_int(params, interval, init_value, method, diagnose, max_step,
nsteps, rtol, skip_external_stability=False, stiff=False,
use_jac=True, adapt_step_size=False, adapt_min_steps=500):
r"""
Integrates newcomb's euler Lagrange equation in a given interval with lsoda
either with the scipy.ode object oriented interface or with scipy.odeint.
"""
missing_end_params = None
#print('k_bar', params['k'], 'interval:', interval[0], interval[1], init_value)
args = (params['k'], params['m'], params['b_z'], params['b_z_prime'],
params['b_theta'], params['b_theta_prime'], params['p_prime'],
params['q'], params['q_prime'], params['f_func'], params['g_func'],
params['beta_0'])
if adapt_step_size:
interval_list = [interval]
max_step_list = [10.**(np.floor(np.log10(1 - params['core_radius']))-1)]
nsteps_for_list = 10.**(np.abs(np.log10(max_step_list[0]))+.5) * (1 - params['core_radius'])
nsteps_for_list = adapt_min_steps if nsteps_for_list < adapt_min_steps else nsteps_for_list
nsteps_list = [nsteps_for_list]
if interval[0] < params['core_radius']:
interval_list.insert(0, [interval[0], params['core_radius']])
interval_list[1] = [params['core_radius'], interval[1]]
max_step_list.insert(0, max_step)
nsteps_list.insert(0, nsteps)
else:
interval_list = [interval]
max_step_list = [max_step]
nsteps_list = [nsteps]
for i, interval in enumerate(interval_list):
max_step = max_step_list[i]
nsteps = nsteps_list[i]
if diagnose:
r_array = np.linspace(interval[0], interval[1], 250)
else:
r_array = np.asarray(interval)
if method == 'lsoda_odeint':
if 'core_radius' in params.keys():
transition_points = np.asarray([params['core_radius'],
params['core_radius'] +
params['transition_width'],
params['core_radius'] +
params['transition_width'] +
params['skin_width']])
tcrit = np.asarray(transition_points[np.less(interval[0], transition_points)])
else:
tcrit = None
integrator_args = {}
if rtol is not None:
integrator_args['rtol'] = rtol
if nsteps is not None:
integrator_args['mxstep'] = nsteps
if max_step is not None:
integrator_args['hmax'] = max_step
if use_jac:
results = scipy.integrate.odeint(newcomb_der_for_odeint,
np.asarray(init_value),
np.asarray(r_array),
Dfun=newcomb_jac,
tcrit=tcrit,
args=args,
**integrator_args)
else:
results = scipy.integrate.odeint(newcomb_der_for_odeint,
np.asarray(init_value),
np.asarray(r_array),
tcrit=tcrit,
args=args,
**integrator_args)
xi = np.asarray([results[:, 0]]).ravel()
xi_der_f = np.asarray([results[:, 1]]).ravel()
else:
if use_jac:
integrator = scipy.integrate.ode(newcomb_der, jac=newcomb_jac)
else:
integrator = scipy.integrate.ode(newcomb_der)
integrator_args = {}
if rtol is not None:
integrator_args['rtol'] = rtol
if nsteps is not None:
integrator_args['nsteps'] = nsteps
if max_step is not None:
integrator_args['max_step'] = max_step
if stiff:
integrator_args['method'] = 'bdf'
integrator.set_integrator(method, **integrator_args)
integrator.set_f_params(*args)
integrator.set_jac_params(*args)
integrator.set_initial_value(init_value, interval[0])
results = np.empty((r_array.size, 2))
results[0] = init_value
for i, r in enumerate(r_array[1:]):
integrator.integrate(r)
results[i+1, :] = integrator.y
if not integrator.successful():
results[i+1:, :] = [np.nan, np.nan]
break
xi = results[:, 0]
xi_der_f = results[:, 1]
init_value = [xi[-1], xi_der_f[-1]]
xi_der = divide_by_f(r_array,
xi_der_f,
params['k'],
params['m'],
params['b_z'],
params['b_theta'],
params['q'],
params['f_func'])
if np.all(np.isfinite(results[-1])):
if skip_external_stability:
return xi, xi_der, r_array
(stable_external,
delta_w) = ext.external_stability_from_analytic_condition(params,
xi[-1],
xi_der[-1],
without_sing=True,
dim_less=True)
#print(delta_w)
else:
msg = ("Integration to plasma edge did not succeed. " +
"Can not determine external stability at k = %.3f."
% params['k'])
print(msg)
missing_end_params = params
stable_external = None
delta_w = None
return (stable_external, delta_w, missing_end_params, xi, xi_der, r_array)
| 40.370019 | 100 | 0.558496 |
357846a51d6146b60c6d5b0d1cea91a44051ff36 | 507 | py | Python | fcm_app/migrations/0012_fcm_concept_id_in_fcm.py | gtsapelas/TRANSrisk_fcm_app | d9d0efc6d693461fda14a71481c6061756527dcb | [
"MIT"
] | null | null | null | fcm_app/migrations/0012_fcm_concept_id_in_fcm.py | gtsapelas/TRANSrisk_fcm_app | d9d0efc6d693461fda14a71481c6061756527dcb | [
"MIT"
] | null | null | null | fcm_app/migrations/0012_fcm_concept_id_in_fcm.py | gtsapelas/TRANSrisk_fcm_app | d9d0efc6d693461fda14a71481c6061756527dcb | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-11-10 09:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fcm_app', '0011_auto_20171025_1632'),
]
operations = [
migrations.AddField(
model_name='fcm_concept',
name='id_in_fcm',
field=models.CharField(default='dummy', max_length=10),
preserve_default=False,
),
]
| 23.045455 | 67 | 0.621302 |
4e9d7739bbe1b683a81b07f1175f23af561ed6e0 | 4,708 | py | Python | qlib/utils/serial.py | majiajue/qlib | 93088485c31508f74dc8e19dfc7a55e906ae9024 | [
"MIT"
] | 8,637 | 2020-09-21T05:07:34.000Z | 2022-03-31T10:02:54.000Z | qlib/utils/serial.py | Sainpse/qlib | 84103c7d43eaa0ff74118a4d05884f659f0548eb | [
"MIT"
] | 711 | 2020-09-21T03:32:44.000Z | 2022-03-31T22:18:42.000Z | qlib/utils/serial.py | Sainpse/qlib | 84103c7d43eaa0ff74118a4d05884f659f0548eb | [
"MIT"
] | 1,569 | 2020-09-21T10:21:08.000Z | 2022-03-31T01:14:12.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import pickle
import dill
from pathlib import Path
from typing import Union
from ..config import C
class Serializable:
"""
Serializable will change the behaviors of pickle.
- It only saves the state whose name **does not** start with `_`
It provides a syntactic sugar for distinguish the attributes which user doesn't want.
- For examples, a learnable Datahandler just wants to save the parameters without data when dumping to disk
"""
pickle_backend = "pickle" # another optional value is "dill" which can pickle more things of python.
default_dump_all = False # if dump all things
FLAG_KEY = "_qlib_serial_flag"
def __init__(self):
self._dump_all = self.default_dump_all
self._exclude = []
def __getstate__(self) -> dict:
return {
k: v for k, v in self.__dict__.items() if k not in self.exclude and (self.dump_all or not k.startswith("_"))
}
def __setstate__(self, state: dict):
self.__dict__.update(state)
@property
def dump_all(self):
"""
will the object dump all object
"""
return getattr(self, "_dump_all", False)
@property
def exclude(self):
"""
What attribute will not be dumped
"""
return getattr(self, "_exclude", [])
def config(self, dump_all: bool = None, exclude: list = None, recursive=False):
"""
configure the serializable object
Parameters
----------
dump_all : bool
will the object dump all object
exclude : list
What attribute will not be dumped
recursive : bool
will the configuration be recursive
"""
params = {"dump_all": dump_all, "exclude": exclude}
for k, v in params.items():
if v is not None:
attr_name = f"_{k}"
setattr(self, attr_name, v)
if recursive:
for obj in self.__dict__.values():
# set flag to prevent endless loop
self.__dict__[self.FLAG_KEY] = True
if isinstance(obj, Serializable) and self.FLAG_KEY not in obj.__dict__:
obj.config(**params, recursive=True)
del self.__dict__[self.FLAG_KEY]
def to_pickle(self, path: Union[Path, str], dump_all: bool = None, exclude: list = None):
"""
Dump self to a pickle file.
Args:
path (Union[Path, str]): the path to dump
dump_all (bool, optional): if need to dump all things. Defaults to None.
exclude (list, optional): will exclude the attributes in this list when dumping. Defaults to None.
"""
self.config(dump_all=dump_all, exclude=exclude)
with Path(path).open("wb") as f:
# pickle interface like backend; such as dill
self.get_backend().dump(self, f, protocol=C.dump_protocol_version)
@classmethod
def load(cls, filepath):
"""
Load the serializable class from a filepath.
Args:
filepath (str): the path of file
Raises:
TypeError: the pickled file must be `type(cls)`
Returns:
`type(cls)`: the instance of `type(cls)`
"""
with open(filepath, "rb") as f:
object = cls.get_backend().load(f)
if isinstance(object, cls):
return object
else:
raise TypeError(f"The instance of {type(object)} is not a valid `{type(cls)}`!")
@classmethod
def get_backend(cls):
"""
Return the real backend of a Serializable class. The pickle_backend value can be "pickle" or "dill".
Returns:
module: pickle or dill module based on pickle_backend
"""
# NOTE: pickle interface like backend; such as dill
if cls.pickle_backend == "pickle":
return pickle
elif cls.pickle_backend == "dill":
return dill
else:
raise ValueError("Unknown pickle backend, please use 'pickle' or 'dill'.")
@staticmethod
def general_dump(obj, path: Union[Path, str]):
"""
A general dumping method for object
Parameters
----------
obj : object
the object to be dumped
path : Union[Path, str]
the target path the data will be dumped
"""
path = Path(path)
if isinstance(obj, Serializable):
obj.to_pickle(path)
else:
with path.open("wb") as f:
pickle.dump(obj, f, protocol=C.dump_protocol_version)
| 32.027211 | 120 | 0.585599 |
e89788556cf379d733894dc57d258eb0e23f4bb6 | 7,559 | py | Python | minemeld/ft/cofense.py | christian-recordedfuture/minemeld-core | 175e85fba07cec4f7da15a83426ebf04dbc0ff3e | [
"Apache-2.0"
] | 147 | 2016-07-22T18:15:49.000Z | 2022-03-26T23:32:44.000Z | minemeld/ft/cofense.py | christian-recordedfuture/minemeld-core | 175e85fba07cec4f7da15a83426ebf04dbc0ff3e | [
"Apache-2.0"
] | 167 | 2016-07-27T07:02:25.000Z | 2021-12-16T16:26:52.000Z | minemeld/ft/cofense.py | christian-recordedfuture/minemeld-core | 175e85fba07cec4f7da15a83426ebf04dbc0ff3e | [
"Apache-2.0"
] | 112 | 2016-07-22T07:14:29.000Z | 2022-03-24T18:43:12.000Z | # Copyright 2016 Palo Alto Networks, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module implements minemeld.ft.cofense.Triage, the Miner node for
Cofense Triage API.
"""
import os
import yaml
import requests
import itertools
import logging
import math
import pytz
from datetime import timedelta
from urlparse import urljoin
from . import basepoller
from .utils import interval_in_sec, EPOCH
LOG = logging.getLogger(__name__)
_TRIAGE_API_CALL_PATH = '/api/public/v1/triage_threat_indicators'
_API_USER_AGENT = 'Cofense Intelligence (minemeld)'
_RESULTS_PER_PAGE = 50
class Triage(basepoller.BasePollerFT):
def configure(self):
super(Triage, self).configure()
self.polling_timeout = self.config.get('polling_timeout', 20)
self.prefix = self.config.get('prefix', 'cofense')
initial_interval = self.config.get('initial_interval', '30d')
self.initial_interval = interval_in_sec(initial_interval)
if self.initial_interval is None:
LOG.error(
'%s - wrong initial_interval format: %s',
self.name, initial_interval
)
self.initial_interval = interval_in_sec('30d')
self.source_name = self.config.get('source_name', 'cofense.triage')
self.headers = {'user-agent': _API_USER_AGENT}
self.confidence_map = self.config.get('confidence_map', {
'Malicious': 100,
'Suspicious': 50
})
self.verify_cert = self.config.get('verify_cert', True)
self.api_domain = self.config.get('api_domain', None)
self.api_account = self.config.get('api_account', None)
self.api_token = self.config.get('api_token', None)
self.side_config_path = self.config.get('side_config', None)
if self.side_config_path is None:
self.side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'%s_side_config.yml' % self.name
)
self._load_side_config()
def _load_side_config(self):
try:
with open(self.side_config_path, 'r') as f:
sconfig = yaml.safe_load(f)
except Exception as e:
LOG.error('%s - Error loading side config: %s', self.name, str(e))
return
api_domain = sconfig.get('api_domain', None)
if api_domain is not None:
self.api_domain = api_domain
LOG.info('%s - API Domain set', self.name)
api_account = sconfig.get('api_account', None)
if api_account is not None:
self.api_account = api_account
LOG.info('%s - API Account set', self.name)
api_token = sconfig.get('api_token', None)
if api_token is not None:
self.api_token = api_token
LOG.info('%s - API Token set', self.name)
verify_cert = sconfig.get('verify_cert', None)
if verify_cert is not None:
self.verify_cert = verify_cert
LOG.info('%s - Verify Cert set', self.name)
def _process_item(self, item):
LOG.debug('{} - item: {}'.format(self.name, item))
report_id = item.get('report_id', None)
type_ = item.get('threat_key', None)
indicator = item.get('threat_value', None)
level = item.get('threat_level', None)
if type_ is None or indicator is None:
LOG.error('{} - entry with no value or type: {!r}'.format(self.name, item))
return []
if level not in self.confidence_map:
LOG.info('{} - threat_level {} not in cofidence map: indicator ignored'.format(self.name, level))
return []
if type_ == 'URL':
type_ = 'URL'
elif type_ == 'Domain':
type_ = 'Domain'
elif type_ == 'MD5':
type_ = 'md5'
elif type_ == 'SHA256':
type_ = 'sha256'
else:
LOG.error('{} - unknown indicator type: {!r}'.format(self.name, item))
return []
value = dict(type=type_)
if report_id is not None:
value['{}_report_id'.format(self.prefix)] = report_id
if level is not None:
value['{}_threat_level'.format(self.prefix)] = level
value['confidence'] = self.confidence_map[level]
return [[indicator, value]]
def _build_iterator(self, now):
if self.api_domain is None or self.api_account is None or self.api_token is None:
raise RuntimeError('%s - credentials not set' % self.name)
poll_start = self.last_successful_run
if self.last_successful_run is None:
poll_start = now - (self.initial_interval * 1000)
dt_poll_start = EPOCH + timedelta(milliseconds=poll_start)
LOG.debug('{} - polling start: {}'.format(self.name, dt_poll_start))
num_of_pages = self._check_number_of_pages(dt_poll_start)
LOG.info("{} - polling: start date: {!r} number of pages: {!r}".format(
self.name, dt_poll_start, num_of_pages
))
return self._iterate_over_pages(dt_poll_start, num_of_pages)
def _check_number_of_pages(self, dt_poll_start):
r = self._perform_api_call(dt_poll_start)
total_entries = r.headers['Total']
LOG.info('{} - polling total entries: {}'.format(self.name, total_entries))
return int(math.ceil(int(total_entries)/float(_RESULTS_PER_PAGE)))
def _iterate_over_pages(self, start_date, num_of_pages):
for page_num in xrange(1, num_of_pages+1):
r = self._perform_api_call(start_date, page_num)
processed_data = r.json()
for entry in processed_data:
yield entry
def _perform_api_call(self, start_date, page=None):
headers = self.headers.copy()
headers['Authorization'] = 'Token token={}:{}'.format(self.api_account, self.api_token)
params = {
"per_page": _RESULTS_PER_PAGE,
"start_date": start_date.strftime('%Y-%m-%dT%H:%M')
}
if page is not None:
params['page'] = page
request_url = urljoin(self.api_domain, _TRIAGE_API_CALL_PATH)
r = requests.get(request_url,
params=params,
verify=self.verify_cert,
headers=headers
)
r.raise_for_status()
return r
def hup(self, source=None):
LOG.info('%s - hup received, reload side config', self.name)
self._load_side_config()
super(Triage, self).hup(source)
@staticmethod
def gc(name, config=None):
basepoller.BasePollerFT.gc(name, config=config)
side_config_path = None
if config is not None:
side_config_path = config.get('side_config', None)
if side_config_path is None:
side_config_path = os.path.join(
os.environ['MM_CONFIG_DIR'],
'{}_side_config.yml'.format(name)
)
try:
os.remove(side_config_path)
except:
pass
| 34.20362 | 109 | 0.617807 |
71bf07895d851a03c1509020f87b12e9d0c73feb | 5,941 | py | Python | leetcode/models/graphql_question_contributor.py | prius/python-leetcode | 5e8bbee0f941ece8188853703d47aad992191456 | [
"MIT"
] | 25 | 2021-09-27T20:13:33.000Z | 2022-03-25T07:12:19.000Z | leetcode/models/graphql_question_contributor.py | iraghavr/python-leetcode | 5e8bbee0f941ece8188853703d47aad992191456 | [
"MIT"
] | 3 | 2021-12-26T00:22:41.000Z | 2022-03-06T17:30:24.000Z | leetcode/models/graphql_question_contributor.py | iraghavr/python-leetcode | 5e8bbee0f941ece8188853703d47aad992191456 | [
"MIT"
] | 6 | 2021-10-16T04:51:45.000Z | 2022-02-24T10:53:55.000Z | # coding: utf-8
"""
Leetcode API
Leetcode API implementation. # noqa: E501
OpenAPI spec version: 1.0.1-1
Contact: pv.safronov@gmail.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import annotations
import pprint
import re # noqa: F401
import six
class GraphqlQuestionContributor(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
"username": "str",
"profile_url": "str",
"avatar_url": "str",
"typename": "str",
}
attribute_map = {
"username": "username",
"profile_url": "profileUrl",
"avatar_url": "avatarUrl",
"typename": "__typename",
}
def __init__(
self, username=None, profile_url=None, avatar_url=None, typename=None
) -> None: # noqa: E501
"""GraphqlQuestionContributor - a model defined in Swagger""" # noqa: E501
self._username = None
self._profile_url = None
self._avatar_url = None
self._typename = None
self.discriminator = None
self.username = username
self.profile_url = profile_url
self.avatar_url = avatar_url
if typename is not None:
self.typename = typename
@property
def username(self):
"""Gets the username of this GraphqlQuestionContributor. # noqa: E501
:return: The username of this GraphqlQuestionContributor. # noqa: E501
:rtype: str
"""
return self._username
@username.setter
def username(self, username):
"""Sets the username of this GraphqlQuestionContributor.
:param username: The username of this GraphqlQuestionContributor. # noqa: E501
:type: str
"""
if username is None:
raise ValueError(
"Invalid value for `username`, must not be `None`"
) # noqa: E501
self._username = username
@property
def profile_url(self):
"""Gets the profile_url of this GraphqlQuestionContributor. # noqa: E501
:return: The profile_url of this GraphqlQuestionContributor. # noqa: E501
:rtype: str
"""
return self._profile_url
@profile_url.setter
def profile_url(self, profile_url):
"""Sets the profile_url of this GraphqlQuestionContributor.
:param profile_url: The profile_url of this GraphqlQuestionContributor. # noqa: E501
:type: str
"""
if profile_url is None:
raise ValueError(
"Invalid value for `profile_url`, must not be `None`"
) # noqa: E501
self._profile_url = profile_url
@property
def avatar_url(self):
"""Gets the avatar_url of this GraphqlQuestionContributor. # noqa: E501
:return: The avatar_url of this GraphqlQuestionContributor. # noqa: E501
:rtype: str
"""
return self._avatar_url
@avatar_url.setter
def avatar_url(self, avatar_url):
"""Sets the avatar_url of this GraphqlQuestionContributor.
:param avatar_url: The avatar_url of this GraphqlQuestionContributor. # noqa: E501
:type: str
"""
if avatar_url is None:
raise ValueError(
"Invalid value for `avatar_url`, must not be `None`"
) # noqa: E501
self._avatar_url = avatar_url
@property
def typename(self):
"""Gets the typename of this GraphqlQuestionContributor. # noqa: E501
:return: The typename of this GraphqlQuestionContributor. # noqa: E501
:rtype: str
"""
return self._typename
@typename.setter
def typename(self, typename):
"""Sets the typename of this GraphqlQuestionContributor.
:param typename: The typename of this GraphqlQuestionContributor. # noqa: E501
:type: str
"""
self._typename = typename
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(
map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)
)
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(
map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict")
else item,
value.items(),
)
)
else:
result[attr] = value
if issubclass(GraphqlQuestionContributor, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self) -> str:
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self) -> str:
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other: GraphqlQuestionContributor) -> bool:
"""Returns true if both objects are equal"""
if not isinstance(other, GraphqlQuestionContributor):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other) -> bool:
"""Returns true if both objects are not equal"""
return not self == other
| 28.980488 | 93 | 0.581215 |
70a312b19b2631d0b0df1c351eb2fb56f6737627 | 746 | py | Python | renewer/aws.py | cloud-gov/legacy-domain-certificate-renewer | 6b008fdc8e1277cfe4449626e6c488d11fc4857c | [
"CC0-1.0"
] | 1 | 2021-11-16T17:25:21.000Z | 2021-11-16T17:25:21.000Z | renewer/aws.py | cloud-gov/legacy-domain-certificate-renewer | 6b008fdc8e1277cfe4449626e6c488d11fc4857c | [
"CC0-1.0"
] | 1 | 2021-12-22T19:04:34.000Z | 2021-12-22T19:04:34.000Z | renewer/aws.py | cloud-gov/legacy-domain-certificate-renewer | 6b008fdc8e1277cfe4449626e6c488d11fc4857c | [
"CC0-1.0"
] | null | null | null | import boto3
from renewer.extensions import config
commercial_session = boto3.Session(
region_name=config.AWS_COMMERCIAL_REGION,
aws_access_key_id=config.AWS_COMMERCIAL_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_COMMERCIAL_SECRET_ACCESS_KEY,
)
govcloud_session = boto3.Session(
region_name=config.AWS_GOVCLOUD_REGION,
aws_access_key_id=config.AWS_GOVCLOUD_ACCESS_KEY_ID,
aws_secret_access_key=config.AWS_GOVCLOUD_SECRET_ACCESS_KEY,
)
alb = govcloud_session.client("elbv2")
cloudfront = commercial_session.client("cloudfront")
iam_govcloud = govcloud_session.client("iam")
iam_commercial = commercial_session.client("iam")
s3_govcloud = govcloud_session.client("s3")
s3_commercial = commercial_session.client("s3")
| 33.909091 | 66 | 0.828418 |
7f76211ceda3f36a544cd20b011a2a4d28a91cb9 | 247,991 | py | Python | tests/test_browser.py | sofu456/emscripten | 1108a8aae24aa2a18c64172e9d7d828b79e6b63f | [
"MIT"
] | 1 | 2021-06-15T20:40:30.000Z | 2021-06-15T20:40:30.000Z | tests/test_browser.py | hanbitmyths/emscripten | e642863ed84f8bfef2ff2c3a45b87dc6b1f0b4f6 | [
"MIT"
] | null | null | null | tests/test_browser.py | hanbitmyths/emscripten | e642863ed84f8bfef2ff2c3a45b87dc6b1f0b4f6 | [
"MIT"
] | null | null | null | # coding=utf-8
# Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
import argparse
import json
import multiprocessing
import os
import random
import shlex
import shutil
import subprocess
import time
import unittest
import webbrowser
import zlib
from http.server import BaseHTTPRequestHandler, HTTPServer
from urllib.request import urlopen
from runner import BrowserCore, RunnerCore, path_from_root, has_browser, EMTEST_BROWSER, Reporting
from runner import create_file, parameterized, ensure_dir, disabled, test_file, WEBIDL_BINDER
from tools import building
from tools import shared
from tools import system_libs
from tools.shared import EMCC, WINDOWS, FILE_PACKAGER, PIPE
from tools.shared import try_delete, config
def test_chunked_synchronous_xhr_server(support_byte_ranges, chunkSize, data, checksum, port):
class ChunkedServerHandler(BaseHTTPRequestHandler):
def sendheaders(s, extra=[], length=len(data)):
s.send_response(200)
s.send_header("Content-Length", str(length))
s.send_header("Access-Control-Allow-Origin", "http://localhost:%s" % port)
s.send_header('Cross-Origin-Resource-Policy', 'cross-origin')
s.send_header('Cache-Control', 'no-cache, no-store, must-revalidate')
s.send_header("Access-Control-Expose-Headers", "Content-Length, Accept-Ranges")
s.send_header("Content-type", "application/octet-stream")
if support_byte_ranges:
s.send_header("Accept-Ranges", "bytes")
for i in extra:
s.send_header(i[0], i[1])
s.end_headers()
def do_HEAD(s):
s.sendheaders()
def do_OPTIONS(s):
s.sendheaders([("Access-Control-Allow-Headers", "Range")], 0)
def do_GET(s):
if s.path == '/':
s.sendheaders()
elif not support_byte_ranges:
s.sendheaders()
s.wfile.write(data)
else:
start, end = s.headers.get("range").split("=")[1].split("-")
start = int(start)
end = int(end)
end = min(len(data) - 1, end)
length = end - start + 1
s.sendheaders([], length)
s.wfile.write(data[start:end + 1])
# CORS preflight makes OPTIONS requests which we need to account for.
expectedConns = 22
httpd = HTTPServer(('localhost', 11111), ChunkedServerHandler)
for i in range(expectedConns + 1):
httpd.handle_request()
def shell_with_script(shell_file, output_file, replacement):
with open(path_from_root('src', shell_file)) as input:
with open(output_file, 'w') as output:
output.write(input.read().replace('{{{ SCRIPT }}}', replacement))
def is_chrome():
return EMTEST_BROWSER and 'chrom' in EMTEST_BROWSER.lower()
def no_chrome(note='chrome is not supported'):
if is_chrome():
return unittest.skip(note)
return lambda f: f
def is_firefox():
return EMTEST_BROWSER and 'firefox' in EMTEST_BROWSER.lower()
def no_firefox(note='firefox is not supported'):
if is_firefox():
return unittest.skip(note)
return lambda f: f
def no_swiftshader(f):
assert callable(f)
def decorated(self):
if is_chrome() and '--use-gl=swiftshader' in EMTEST_BROWSER:
self.skipTest('not compatible with swiftshader')
return f(self)
return decorated
def requires_threads(f):
assert callable(f)
def decorated(self, *args, **kwargs):
if os.environ.get('EMTEST_LACKS_THREAD_SUPPORT'):
self.skipTest('EMTEST_LACKS_THREAD_SUPPORT is set')
return f(self, *args, **kwargs)
return decorated
def requires_asmfs(f):
assert callable(f)
def decorated(self, *args, **kwargs):
# https://github.com/emscripten-core/emscripten/issues/9534
self.skipTest('ASMFS is looking for a maintainer')
return f(self, *args, **kwargs)
return decorated
# Today we only support the wasm backend so any tests that is disabled under the llvm
# backend is always disabled.
# TODO(sbc): Investigate all tests with this decorator and either fix of remove the test.
def no_wasm_backend(note=''):
assert not callable(note)
return unittest.skip(note)
requires_graphics_hardware = unittest.skipIf(os.getenv('EMTEST_LACKS_GRAPHICS_HARDWARE'), "This test requires graphics hardware")
requires_sound_hardware = unittest.skipIf(os.getenv('EMTEST_LACKS_SOUND_HARDWARE'), "This test requires sound hardware")
requires_sync_compilation = unittest.skipIf(is_chrome(), "This test requires synchronous compilation, which does not work in Chrome (except for tiny wasms)")
requires_offscreen_canvas = unittest.skipIf(os.getenv('EMTEST_LACKS_OFFSCREEN_CANVAS'), "This test requires a browser with OffscreenCanvas")
class browser(BrowserCore):
@classmethod
def setUpClass(cls):
super(browser, cls).setUpClass()
cls.browser_timeout = 60
print()
print('Running the browser tests. Make sure the browser allows popups from localhost.')
print()
def setUp(self):
super(BrowserCore, self).setUp()
# avoid various compiler warnings that many browser tests currently generate
self.emcc_args += [
'-Wno-pointer-sign',
'-Wno-int-conversion',
]
def test_sdl1_in_emscripten_nonstrict_mode(self):
if 'EMCC_STRICT' in os.environ and int(os.environ['EMCC_STRICT']):
self.skipTest('This test requires being run in non-strict mode (EMCC_STRICT env. variable unset)')
# TODO: This test is verifying behavior that will be deprecated at some point in the future, remove this test once
# system JS libraries are no longer automatically linked to anymore.
self.btest('hello_world_sdl.cpp', reference='htmltest.png')
def test_sdl1(self):
self.btest('hello_world_sdl.cpp', reference='htmltest.png', args=['-lSDL', '-lGL'])
self.btest('hello_world_sdl.cpp', reference='htmltest.png', args=['-s', 'USE_SDL', '-lGL']) # is the default anyhow
# Deliberately named as test_zzz_* to make this test the last one
# as this test may take the focus away from the main test window
# by opening a new window and possibly not closing it.
def test_zzz_html_source_map(self):
if not has_browser():
self.skipTest('need a browser')
cpp_file = 'src.cpp'
html_file = 'src.html'
# browsers will try to 'guess' the corresponding original line if a
# generated line is unmapped, so if we want to make sure that our
# numbering is correct, we need to provide a couple of 'possible wrong
# answers'. thus, we add some printf calls so that the cpp file gets
# multiple mapped lines. in other words, if the program consists of a
# single 'throw' statement, browsers may just map any thrown exception to
# that line, because it will be the only mapped line.
with open(cpp_file, 'w') as f:
f.write(r'''
#include <cstdio>
int main() {
printf("Starting test\n");
try {
throw 42; // line 8
} catch (int e) { }
printf("done\n");
return 0;
}
''')
# use relative paths when calling emcc, because file:// URIs can only load
# sourceContent when the maps are relative paths
try_delete(html_file)
try_delete(html_file + '.map')
self.compile_btest(['src.cpp', '-o', 'src.html', '-g4'])
self.assertExists(html_file)
self.assertExists('src.wasm.map')
webbrowser.open_new('file://' + html_file)
print('''
If manually bisecting:
Check that you see src.cpp among the page sources.
Even better, add a breakpoint, e.g. on the printf, then reload, then step
through and see the print (best to run with EMTEST_SAVE_DIR=1 for the reload).
''')
def test_emscripten_log(self):
self.btest_exit(test_file('emscripten_log', 'emscripten_log.cpp'),
args=['--pre-js', path_from_root('src', 'emscripten-source-map.min.js'), '-g4'])
def test_preload_file(self):
absolute_src_path = os.path.join(self.get_dir(), 'somefile.txt').replace('\\', '/')
open(absolute_src_path, 'w').write('''load me right before running the code please''')
absolute_src_path2 = os.path.join(self.get_dir(), '.somefile.txt').replace('\\', '/')
open(absolute_src_path2, 'w').write('''load me right before running the code please''')
absolute_src_path3 = os.path.join(self.get_dir(), 'some@file.txt').replace('\\', '/')
open(absolute_src_path3, 'w').write('''load me right before running the code please''')
def make_main(path):
print('make main at', path)
path = path.replace('\\', '\\\\').replace('"', '\\"') # Escape tricky path name for use inside a C string.
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
int main() {
FILE *f = fopen("%s", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%%s|\n", buf);
int result = !strcmp("load me right before", buf);
REPORT_RESULT(result);
return 0;
}
''' % path)
test_cases = [
# (source preload-file string, file on target FS to load)
("somefile.txt", "somefile.txt"),
(".somefile.txt@somefile.txt", "somefile.txt"),
("./somefile.txt", "somefile.txt"),
("somefile.txt@file.txt", "file.txt"),
("./somefile.txt@file.txt", "file.txt"),
("./somefile.txt@./file.txt", "file.txt"),
("somefile.txt@/file.txt", "file.txt"),
("somefile.txt@/", "somefile.txt"),
(absolute_src_path + "@file.txt", "file.txt"),
(absolute_src_path + "@/file.txt", "file.txt"),
(absolute_src_path + "@/", "somefile.txt"),
("somefile.txt@/directory/file.txt", "/directory/file.txt"),
("somefile.txt@/directory/file.txt", "directory/file.txt"),
(absolute_src_path + "@/directory/file.txt", "directory/file.txt"),
("some@@file.txt@other.txt", "other.txt"),
("some@@file.txt@some@@otherfile.txt", "some@otherfile.txt")]
for srcpath, dstpath in test_cases:
print('Testing', srcpath, dstpath)
make_main(dstpath)
self.compile_btest(['main.cpp', '--preload-file', srcpath, '-o', 'page.html'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
if WINDOWS:
# On Windows, the following non-alphanumeric non-control code ASCII characters are supported.
# The characters <, >, ", |, ?, * are not allowed, because the Windows filesystem doesn't support those.
tricky_filename = '!#$%&\'()+,-. ;=@[]^_`{}~.txt'
else:
# All 7-bit non-alphanumeric non-control code ASCII characters except /, : and \ are allowed.
tricky_filename = '!#$%&\'()+,-. ;=@[]^_`{}~ "*<>?|.txt'
open(os.path.join(self.get_dir(), tricky_filename), 'w').write('''load me right before running the code please''')
make_main(tricky_filename)
# As an Emscripten-specific feature, the character '@' must be escaped in the form '@@' to not confuse with the 'src@dst' notation.
self.compile_btest(['main.cpp', '--preload-file', tricky_filename.replace('@', '@@'), '-o', 'page.html'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
# By absolute path
make_main('somefile.txt') # absolute becomes relative
self.compile_btest(['main.cpp', '--preload-file', absolute_src_path, '-o', 'page.html'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
# Test subdirectory handling with asset packaging.
try_delete('assets')
ensure_dir('assets/sub/asset1/'.replace('\\', '/'))
ensure_dir('assets/sub/asset1/.git'.replace('\\', '/')) # Test adding directory that shouldn't exist.
ensure_dir('assets/sub/asset2/'.replace('\\', '/'))
create_file('assets/sub/asset1/file1.txt', '''load me right before running the code please''')
create_file('assets/sub/asset1/.git/shouldnt_be_embedded.txt', '''this file should not get embedded''')
create_file('assets/sub/asset2/file2.txt', '''load me right before running the code please''')
absolute_assets_src_path = 'assets'.replace('\\', '/')
def make_main_two_files(path1, path2, nonexistingpath):
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
int main() {
FILE *f = fopen("%s", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%%s|\n", buf);
int result = !strcmp("load me right before", buf);
f = fopen("%s", "r");
if (f == NULL)
result = 0;
fclose(f);
f = fopen("%s", "r");
if (f != NULL)
result = 0;
REPORT_RESULT(result);
return 0;
}
''' % (path1, path2, nonexistingpath))
test_cases = [
# (source directory to embed, file1 on target FS to load, file2 on target FS to load, name of a file that *shouldn't* exist on VFS)
("assets", "assets/sub/asset1/file1.txt", "assets/sub/asset2/file2.txt", "assets/sub/asset1/.git/shouldnt_be_embedded.txt"),
("assets/", "assets/sub/asset1/file1.txt", "assets/sub/asset2/file2.txt", "assets/sub/asset1/.git/shouldnt_be_embedded.txt"),
("assets@/", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"),
("assets/@/", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"),
("assets@./", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"),
(absolute_assets_src_path + "@/", "/sub/asset1/file1.txt", "/sub/asset2/file2.txt", "/sub/asset1/.git/shouldnt_be_embedded.txt"),
(absolute_assets_src_path + "@/assets", "/assets/sub/asset1/file1.txt", "/assets/sub/asset2/file2.txt", "assets/sub/asset1/.git/shouldnt_be_embedded.txt")]
for test in test_cases:
(srcpath, dstpath1, dstpath2, nonexistingpath) = test
make_main_two_files(dstpath1, dstpath2, nonexistingpath)
print(srcpath)
self.compile_btest(['main.cpp', '--preload-file', srcpath, '--exclude-file', '*/.*', '-o', 'page.html'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
# Should still work with -o subdir/..
make_main('somefile.txt') # absolute becomes relative
ensure_dir('dirrey')
self.compile_btest(['main.cpp', '--preload-file', absolute_src_path, '-o', 'dirrey/page.html'])
self.run_browser('dirrey/page.html', 'You should see |load me right before|.', '/report_result?1')
# With FS.preloadFile
create_file('pre.js', '''
Module.preRun = function() {
FS.createPreloadedFile('/', 'someotherfile.txt', 'somefile.txt', true, false); // we need --use-preload-plugins for this.
};
''')
make_main('someotherfile.txt')
self.compile_btest(['main.cpp', '--pre-js', 'pre.js', '-o', 'page.html', '--use-preload-plugins'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
# Tests that user .html shell files can manually download .data files created with --preload-file cmdline.
def test_preload_file_with_manual_data_download(self):
src = test_file('manual_download_data.cpp')
create_file('file.txt', '''Hello!''')
self.compile_btest([src, '-o', 'manual_download_data.js', '--preload-file', 'file.txt@/file.txt'])
shutil.copyfile(test_file('manual_download_data.html'), 'manual_download_data.html')
self.run_browser('manual_download_data.html', 'Hello!', '/report_result?1')
# Tests that if the output files have single or double quotes in them, that it will be handled by correctly escaping the names.
def test_output_file_escaping(self):
tricky_part = '\'' if WINDOWS else '\' and \"' # On Windows, files/directories may not contain a double quote character. On non-Windowses they can, so test that.
d = 'dir with ' + tricky_part
abs_d = os.path.join(self.get_dir(), d)
ensure_dir(abs_d)
txt = 'file with ' + tricky_part + '.txt'
abs_txt = os.path.join(abs_d, txt)
open(abs_txt, 'w').write('load me right before')
cpp = os.path.join(d, 'file with ' + tricky_part + '.cpp')
open(cpp, 'w').write(r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
int main() {
FILE *f = fopen("%s", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%%s|\n", buf);
int result = !strcmp("|load me right before|", buf);
REPORT_RESULT(result);
return 0;
}
''' % (txt.replace('\'', '\\\'').replace('\"', '\\"')))
data_file = os.path.join(abs_d, 'file with ' + tricky_part + '.data')
data_js_file = os.path.join(abs_d, 'file with ' + tricky_part + '.js')
self.run_process([FILE_PACKAGER, data_file, '--use-preload-cache', '--indexedDB-name=testdb', '--preload', abs_txt + '@' + txt, '--js-output=' + data_js_file])
page_file = os.path.join(d, 'file with ' + tricky_part + '.html')
abs_page_file = os.path.join(self.get_dir(), page_file)
self.compile_btest([cpp, '--pre-js', data_js_file, '-o', abs_page_file, '-s', 'FORCE_FILESYSTEM'])
self.run_browser(page_file, '|load me right before|.', '/report_result?0')
def test_preload_caching(self):
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
extern "C" {
extern int checkPreloadResults();
}
int main(int argc, char** argv) {
FILE *f = fopen("%s", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%%s|\n", buf);
int result = 0;
result += !strcmp("load me right before", buf);
result += checkPreloadResults();
REPORT_RESULT(result);
return 0;
}
''' % 'somefile.txt')
create_file('test.js', '''
mergeInto(LibraryManager.library, {
checkPreloadResults: function() {
var cached = 0;
var packages = Object.keys(Module['preloadResults']);
packages.forEach(function(package) {
var fromCache = Module['preloadResults'][package]['fromCache'];
if (fromCache)
++ cached;
});
return cached;
}
});
''')
# test caching of various sizes, including sizes higher than 128MB which is
# chrome's limit on IndexedDB item sizes, see
# https://cs.chromium.org/chromium/src/content/renderer/indexed_db/webidbdatabase_impl.cc?type=cs&q=%22The+serialized+value+is+too+large%22&sq=package:chromium&g=0&l=177
# https://cs.chromium.org/chromium/src/out/Debug/gen/third_party/blink/public/mojom/indexeddb/indexeddb.mojom.h?type=cs&sq=package:chromium&g=0&l=60
for extra_size in (0, 1 * 1024 * 1024, 100 * 1024 * 1024, 150 * 1024 * 1024):
if is_chrome() and extra_size >= 100 * 1024 * 1024:
continue
create_file('somefile.txt', '''load me right before running the code please''' + ('_' * extra_size))
print('size:', os.path.getsize('somefile.txt'))
self.compile_btest(['main.cpp', '--use-preload-cache', '--js-library', 'test.js', '--preload-file', 'somefile.txt', '-o', 'page.html', '-s', 'ALLOW_MEMORY_GROWTH'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?2')
def test_preload_caching_indexeddb_name(self):
create_file('somefile.txt', '''load me right before running the code please''')
def make_main(path):
print(path)
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
extern "C" {
extern int checkPreloadResults();
}
int main(int argc, char** argv) {
FILE *f = fopen("%s", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%%s|\n", buf);
int result = 0;
result += !strcmp("load me right before", buf);
result += checkPreloadResults();
REPORT_RESULT(result);
return 0;
}
''' % path)
create_file('test.js', '''
mergeInto(LibraryManager.library, {
checkPreloadResults: function() {
var cached = 0;
var packages = Object.keys(Module['preloadResults']);
packages.forEach(function(package) {
var fromCache = Module['preloadResults'][package]['fromCache'];
if (fromCache)
++ cached;
});
return cached;
}
});
''')
make_main('somefile.txt')
self.run_process([FILE_PACKAGER, 'somefile.data', '--use-preload-cache', '--indexedDB-name=testdb', '--preload', 'somefile.txt', '--js-output=' + 'somefile.js'])
self.compile_btest(['main.cpp', '--js-library', 'test.js', '--pre-js', 'somefile.js', '-o', 'page.html', '-s', 'FORCE_FILESYSTEM'])
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?1')
self.run_browser('page.html', 'You should see |load me right before|.', '/report_result?2')
def test_multifile(self):
# a few files inside a directory
ensure_dir(os.path.join('subdirr', 'moar'))
create_file(os.path.join('subdirr', 'data1.txt'), '1214141516171819')
create_file(os.path.join('subdirr', 'moar', 'data2.txt'), '3.14159265358979')
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
int main() {
char buf[17];
FILE *f = fopen("subdirr/data1.txt", "r");
fread(buf, 1, 16, f);
buf[16] = 0;
fclose(f);
printf("|%s|\n", buf);
int result = !strcmp("1214141516171819", buf);
FILE *f2 = fopen("subdirr/moar/data2.txt", "r");
fread(buf, 1, 16, f2);
buf[16] = 0;
fclose(f2);
printf("|%s|\n", buf);
result = result && !strcmp("3.14159265358979", buf);
REPORT_RESULT(result);
return 0;
}
''')
# by individual files
self.compile_btest(['main.cpp', '--preload-file', 'subdirr/data1.txt', '--preload-file', 'subdirr/moar/data2.txt', '-o', 'page.html'])
self.run_browser('page.html', 'You should see two cool numbers', '/report_result?1')
os.remove('page.html')
# by directory, and remove files to make sure
self.compile_btest(['main.cpp', '--preload-file', 'subdirr', '-o', 'page.html'])
shutil.rmtree('subdirr')
self.run_browser('page.html', 'You should see two cool numbers', '/report_result?1')
def test_custom_file_package_url(self):
# a few files inside a directory
ensure_dir('subdirr')
ensure_dir('cdn')
create_file(os.path.join('subdirr', 'data1.txt'), '1214141516171819')
# change the file package base dir to look in a "cdn". note that normally
# you would add this in your own custom html file etc., and not by
# modifying the existing shell in this manner
create_file('shell.html', open(path_from_root('src', 'shell.html')).read().replace('var Module = {', 'var Module = { locateFile: function (path, prefix) {if (path.endsWith(".wasm")) {return prefix + path;} else {return "cdn/" + path;}}, '))
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
int main() {
char buf[17];
FILE *f = fopen("subdirr/data1.txt", "r");
fread(buf, 1, 16, f);
buf[16] = 0;
fclose(f);
printf("|%s|\n", buf);
int result = !strcmp("1214141516171819", buf);
REPORT_RESULT(result);
return 0;
}
''')
self.compile_btest(['main.cpp', '--shell-file', 'shell.html', '--preload-file', 'subdirr/data1.txt', '-o', 'test.html'])
shutil.move('test.data', os.path.join('cdn', 'test.data'))
self.run_browser('test.html', '', '/report_result?1')
def test_missing_data_throws_error(self):
def setup(assetLocalization):
self.clear()
create_file('data.txt', 'data')
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten.h>
int main() {
// This code should never be executed in terms of missing required dependency file.
REPORT_RESULT(0);
return 0;
}
''')
create_file('on_window_error_shell.html', r'''
<html>
<center><canvas id='canvas' width='256' height='256'></canvas></center>
<hr><div id='output'></div><hr>
<script type='text/javascript'>
window.onerror = function(error) {
window.onerror = null;
var result = error.indexOf("test.data") >= 0 ? 1 : 0;
var xhr = new XMLHttpRequest();
xhr.open('GET', 'http://localhost:8888/report_result?' + result, true);
xhr.send();
setTimeout(function() { window.close() }, 1000);
}
var Module = {
locateFile: function (path, prefix) {if (path.endsWith(".wasm")) {return prefix + path;} else {return "''' + assetLocalization + r'''" + path;}},
print: (function() {
var element = document.getElementById('output');
return function(text) { element.innerHTML += text.replace('\n', '<br>', 'g') + '<br>';};
})(),
canvas: document.getElementById('canvas')
};
</script>
{{{ SCRIPT }}}
</body>
</html>''')
def test():
# test test missing file should run xhr.onload with status different than 200, 304 or 206
setup("")
self.compile_btest(['main.cpp', '--shell-file', 'on_window_error_shell.html', '--preload-file', 'data.txt', '-o', 'test.html'])
shutil.move('test.data', 'missing.data')
self.run_browser('test.html', '', '/report_result?1')
# test unknown protocol should go through xhr.onerror
setup("unknown_protocol://")
self.compile_btest(['main.cpp', '--shell-file', 'on_window_error_shell.html', '--preload-file', 'data.txt', '-o', 'test.html'])
self.run_browser('test.html', '', '/report_result?1')
# test wrong protocol and port
setup("https://localhost:8800/")
self.compile_btest(['main.cpp', '--shell-file', 'on_window_error_shell.html', '--preload-file', 'data.txt', '-o', 'test.html'])
self.run_browser('test.html', '', '/report_result?1')
test()
# TODO: CORS, test using a full url for locateFile
# create_file('shell.html', open(path_from_root('src', 'shell.html')).read().replace('var Module = {', 'var Module = { locateFile: function (path) {return "http:/localhost:8888/cdn/" + path;}, '))
# test()
def test_dev_random(self):
self.btest(os.path.join('filesystem', 'dev_random.cpp'), expected='0')
def test_sdl_swsurface(self):
self.btest('sdl_swsurface.c', args=['-lSDL', '-lGL'], expected='1')
def test_sdl_surface_lock_opts(self):
# Test Emscripten-specific extensions to optimize SDL_LockSurface and SDL_UnlockSurface.
self.btest('hello_world_sdl.cpp', reference='htmltest.png', message='You should see "hello, world!" and a colored cube.', args=['-DTEST_SDL_LOCK_OPTS', '-lSDL', '-lGL'])
def test_sdl_image(self):
# load an image file, get pixel data. Also O2 coverage for --preload-file, and memory-init
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg')
src = test_file('sdl_image.c')
for mem in [0, 1]:
for dest, dirname, basename in [('screenshot.jpg', '/', 'screenshot.jpg'),
('screenshot.jpg@/assets/screenshot.jpg', '/assets', 'screenshot.jpg')]:
self.compile_btest([
src, '-o', 'page.html', '-O2', '-lSDL', '-lGL', '--memory-init-file', str(mem),
'--preload-file', dest, '-DSCREENSHOT_DIRNAME="' + dirname + '"', '-DSCREENSHOT_BASENAME="' + basename + '"', '--use-preload-plugins'
])
self.run_browser('page.html', '', '/report_result?600')
def test_sdl_image_jpeg(self):
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpeg')
src = test_file('sdl_image.c')
self.compile_btest([
src, '-o', 'page.html', '-lSDL', '-lGL',
'--preload-file', 'screenshot.jpeg', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.jpeg"', '--use-preload-plugins'
])
self.run_browser('page.html', '', '/report_result?600')
def test_sdl_image_prepare(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl_image_prepare.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-lSDL', '-lGL'], also_proxied=True, manually_trigger_reftest=True)
def test_sdl_image_prepare_data(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl_image_prepare_data.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-lSDL', '-lGL'], manually_trigger_reftest=True)
def test_sdl_image_must_prepare(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg')
self.btest('sdl_image_must_prepare.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.jpg', '-lSDL', '-lGL'], manually_trigger_reftest=True)
def test_sdl_stb_image(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl_stb_image.c', reference='screenshot.jpg', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL'])
def test_sdl_stb_image_bpp(self):
# load grayscale image without alpha
self.clear()
shutil.copyfile(test_file('sdl-stb-bpp1.png'), 'screenshot.not')
self.btest('sdl_stb_image.c', reference='sdl-stb-bpp1.png', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL'])
# load grayscale image with alpha
self.clear()
shutil.copyfile(test_file('sdl-stb-bpp2.png'), 'screenshot.not')
self.btest('sdl_stb_image.c', reference='sdl-stb-bpp2.png', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL'])
# load RGB image
self.clear()
shutil.copyfile(test_file('sdl-stb-bpp3.png'), 'screenshot.not')
self.btest('sdl_stb_image.c', reference='sdl-stb-bpp3.png', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL'])
# load RGBA image
self.clear()
shutil.copyfile(test_file('sdl-stb-bpp4.png'), 'screenshot.not')
self.btest('sdl_stb_image.c', reference='sdl-stb-bpp4.png', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL'])
def test_sdl_stb_image_data(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl_stb_image_data.c', reference='screenshot.jpg', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL'])
def test_sdl_stb_image_cleanup(self):
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl_stb_image_cleanup.c', expected='0', args=['-s', 'STB_IMAGE', '--preload-file', 'screenshot.not', '-lSDL', '-lGL', '--memoryprofiler'])
def test_sdl_canvas(self):
self.clear()
self.btest('sdl_canvas.c', expected='1', args=['-s', 'LEGACY_GL_EMULATION', '-lSDL', '-lGL'])
# some extra coverage
self.clear()
self.btest('sdl_canvas.c', expected='1', args=['-s', 'LEGACY_GL_EMULATION', '-O0', '-s', 'SAFE_HEAP', '-lSDL', '-lGL'])
self.clear()
self.btest('sdl_canvas.c', expected='1', args=['-s', 'LEGACY_GL_EMULATION', '-O2', '-s', 'SAFE_HEAP', '-lSDL', '-lGL'])
def post_manual_reftest(self, reference=None):
self.reftest(test_file(self.reference if reference is None else reference))
html = open('test.html').read()
html = html.replace('</body>', '''
<script>
function assert(x, y) { if (!x) throw 'assertion failed ' + y }
%s
var windowClose = window.close;
window.close = function() {
// wait for rafs to arrive and the screen to update before reftesting
setTimeout(function() {
doReftest();
setTimeout(windowClose, 5000);
}, 1000);
};
</script>
</body>''' % open('reftest.js').read())
create_file('test.html', html)
def test_sdl_canvas_proxy(self):
create_file('data.txt', 'datum')
self.btest('sdl_canvas_proxy.c', reference='sdl_canvas_proxy.png', args=['--proxy-to-worker', '--preload-file', 'data.txt', '-lSDL', '-lGL'], manual_reference=True, post_build=self.post_manual_reftest)
@requires_graphics_hardware
def test_glgears_proxy_jstarget(self):
# test .js target with --proxy-worker; emits 2 js files, client and worker
self.compile_btest([test_file('hello_world_gles_proxy.c'), '-o', 'test.js', '--proxy-to-worker', '-s', 'GL_TESTING', '-lGL', '-lglut'])
shell_with_script('shell_minimal.html', 'test.html', '<script src="test.js"></script>')
self.post_manual_reftest('gears.png')
self.run_browser('test.html', None, '/report_result?0')
def test_sdl_canvas_alpha(self):
# N.B. On Linux with Intel integrated graphics cards, this test needs Firefox 49 or newer.
# See https://github.com/emscripten-core/emscripten/issues/4069.
create_file('flag_0.js', '''
Module['arguments'] = ['-0'];
''')
self.btest('sdl_canvas_alpha.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_alpha.png', reference_slack=12)
self.btest('sdl_canvas_alpha.c', args=['--pre-js', 'flag_0.js', '-lSDL', '-lGL'], reference='sdl_canvas_alpha_flag_0.png', reference_slack=12)
def test_sdl_key(self):
for delay in [0, 1]:
for defines in [
[],
['-DTEST_EMSCRIPTEN_SDL_SETEVENTHANDLER']
]:
for async_ in [
[],
['-DTEST_SLEEP', '-s', 'ASSERTIONS', '-s', 'SAFE_HEAP', '-s', 'ASYNCIFY']
]:
print(delay, defines, async_)
create_file('pre.js', '''
function keydown(c) {
%s
var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
%s
}
function keyup(c) {
%s
var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
%s
}
''' % ('setTimeout(function() {' if delay else '', '}, 1);' if delay else '', 'setTimeout(function() {' if delay else '', '}, 1);' if delay else ''))
self.compile_btest([test_file('sdl_key.c'), '-o', 'page.html'] + defines + async_ + ['--pre-js', 'pre.js', '-s', 'EXPORTED_FUNCTIONS=[_main]', '-lSDL', '-lGL'])
self.run_browser('page.html', '', '/report_result?223092870')
def test_sdl_key_proxy(self):
create_file('pre.js', '''
var Module = {};
Module.postRun = function() {
function doOne() {
Module._one();
setTimeout(doOne, 1000/60);
}
setTimeout(doOne, 1000/60);
}
''')
def post():
html = open('test.html').read()
html = html.replace('</body>', '''
<script>
function keydown(c) {
var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
}
function keyup(c) {
var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
}
keydown(1250);keydown(38);keyup(38);keyup(1250); // alt, up
keydown(1248);keydown(1249);keydown(40);keyup(40);keyup(1249);keyup(1248); // ctrl, shift, down
keydown(37);keyup(37); // left
keydown(39);keyup(39); // right
keydown(65);keyup(65); // a
keydown(66);keyup(66); // b
keydown(100);keyup(100); // trigger the end
</script>
</body>''')
create_file('test.html', html)
self.btest('sdl_key_proxy.c', '223092870', args=['--proxy-to-worker', '--pre-js', 'pre.js', '-s', 'EXPORTED_FUNCTIONS=[_main,_one]', '-lSDL', '-lGL'], manual_reference=True, post_build=post)
def test_canvas_focus(self):
self.btest('canvas_focus.c', '1')
def test_keydown_preventdefault_proxy(self):
def post():
html = open('test.html').read()
html = html.replace('</body>', '''
<script>
function keydown(c) {
var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
return document.dispatchEvent(event);
}
function keypress(c) {
var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
return document.dispatchEvent(event);
}
function keyup(c) {
var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
return document.dispatchEvent(event);
}
function sendKey(c) {
// Simulate the sending of the keypress event when the
// prior keydown event is not prevent defaulted.
if (keydown(c) === false) {
console.log('keydown prevent defaulted, NOT sending keypress!!!');
} else {
keypress(c);
}
keyup(c);
}
// Send 'a'. Simulate the sending of the keypress event when the
// prior keydown event is not prevent defaulted.
sendKey(65);
// Send backspace. Keypress should not be sent over as default handling of
// the Keydown event should be prevented.
sendKey(8);
keydown(100);keyup(100); // trigger the end
</script>
</body>''')
create_file('test.html', html)
self.btest('keydown_preventdefault_proxy.cpp', '300', args=['--proxy-to-worker', '-s', 'EXPORTED_FUNCTIONS=[_main]'], manual_reference=True, post_build=post)
def test_sdl_text(self):
create_file('pre.js', '''
Module.postRun = function() {
function doOne() {
Module._one();
setTimeout(doOne, 1000/60);
}
setTimeout(doOne, 1000/60);
}
function simulateKeyEvent(c) {
var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.body.dispatchEvent(event);
}
''')
self.compile_btest([test_file('sdl_text.c'), '-o', 'page.html', '--pre-js', 'pre.js', '-s', 'EXPORTED_FUNCTIONS=[_main,_one]', '-lSDL', '-lGL'])
self.run_browser('page.html', '', '/report_result?1')
def test_sdl_mouse(self):
create_file('pre.js', '''
function simulateMouseEvent(x, y, button) {
var event = document.createEvent("MouseEvents");
if (button >= 0) {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousedown', true, true, window,
1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event1);
var event2 = document.createEvent("MouseEvents");
event2.initMouseEvent('mouseup', true, true, window,
1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event2);
} else {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousemove', true, true, window,
0, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y,
0, 0, 0, 0,
0, null);
Module['canvas'].dispatchEvent(event1);
}
}
window['simulateMouseEvent'] = simulateMouseEvent;
''')
self.compile_btest([test_file('sdl_mouse.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lSDL', '-lGL'])
self.run_browser('page.html', '', '/report_result?1')
def test_sdl_mouse_offsets(self):
create_file('pre.js', '''
function simulateMouseEvent(x, y, button) {
var event = document.createEvent("MouseEvents");
if (button >= 0) {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousedown', true, true, window,
1, x, y, x, y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event1);
var event2 = document.createEvent("MouseEvents");
event2.initMouseEvent('mouseup', true, true, window,
1, x, y, x, y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event2);
} else {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousemove', true, true, window,
0, x, y, x, y,
0, 0, 0, 0,
0, null);
Module['canvas'].dispatchEvent(event1);
}
}
window['simulateMouseEvent'] = simulateMouseEvent;
''')
create_file('page.html', '''
<html>
<head>
<style type="text/css">
html, body { margin: 0; padding: 0; }
#container {
position: absolute;
left: 5px; right: 0;
top: 5px; bottom: 0;
}
#canvas {
position: absolute;
left: 0; width: 600px;
top: 0; height: 450px;
}
textarea {
margin-top: 500px;
margin-left: 5px;
width: 600px;
}
</style>
</head>
<body>
<div id="container">
<canvas id="canvas"></canvas>
</div>
<textarea id="output" rows="8"></textarea>
<script type="text/javascript">
var Module = {
canvas: document.getElementById('canvas'),
print: (function() {
var element = document.getElementById('output');
element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
element.value += text + "\\n";
element.scrollTop = element.scrollHeight; // focus on bottom
};
})()
};
</script>
<script type="text/javascript" src="sdl_mouse.js"></script>
</body>
</html>
''')
self.compile_btest([test_file('sdl_mouse.c'), '-DTEST_SDL_MOUSE_OFFSETS', '-O2', '--minify=0', '-o', 'sdl_mouse.js', '--pre-js', 'pre.js', '-lSDL', '-lGL'])
self.run_browser('page.html', '', '/report_result?1')
def test_glut_touchevents(self):
self.btest('glut_touchevents.c', '1', args=['-lglut'])
def test_glut_wheelevents(self):
self.btest('glut_wheelevents.c', '1', args=['-lglut'])
@requires_graphics_hardware
def test_glut_glutget_no_antialias(self):
self.btest('glut_glutget.c', '1', args=['-lglut', '-lGL'])
self.btest('glut_glutget.c', '1', args=['-lglut', '-lGL', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED'])
# This test supersedes the one above, but it's skipped in the CI because anti-aliasing is not well supported by the Mesa software renderer.
@requires_graphics_hardware
def test_glut_glutget(self):
self.btest('glut_glutget.c', '1', args=['-lglut', '-lGL'])
self.btest('glut_glutget.c', '1', args=['-lglut', '-lGL', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED'])
def test_sdl_joystick_1(self):
# Generates events corresponding to the Working Draft of the HTML5 Gamepad API.
# http://www.w3.org/TR/2012/WD-gamepad-20120529/#gamepad-interface
create_file('pre.js', '''
var gamepads = [];
// Spoof this function.
navigator['getGamepads'] = function() {
return gamepads;
};
window['addNewGamepad'] = function(id, numAxes, numButtons) {
var index = gamepads.length;
gamepads.push({
axes: new Array(numAxes),
buttons: new Array(numButtons),
id: id,
index: index
});
var i;
for (i = 0; i < numAxes; i++) gamepads[index].axes[i] = 0;
for (i = 0; i < numButtons; i++) gamepads[index].buttons[i] = 0;
};
window['simulateGamepadButtonDown'] = function (index, button) {
gamepads[index].buttons[button] = 1;
};
window['simulateGamepadButtonUp'] = function (index, button) {
gamepads[index].buttons[button] = 0;
};
window['simulateAxisMotion'] = function (index, axis, value) {
gamepads[index].axes[axis] = value;
};
''')
self.compile_btest([test_file('sdl_joystick.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lSDL', '-lGL'])
self.run_browser('page.html', '', '/report_result?2')
def test_sdl_joystick_2(self):
# Generates events corresponding to the Editor's Draft of the HTML5 Gamepad API.
# https://dvcs.w3.org/hg/gamepad/raw-file/default/gamepad.html#idl-def-Gamepad
create_file('pre.js', '''
var gamepads = [];
// Spoof this function.
navigator['getGamepads'] = function() {
return gamepads;
};
window['addNewGamepad'] = function(id, numAxes, numButtons) {
var index = gamepads.length;
gamepads.push({
axes: new Array(numAxes),
buttons: new Array(numButtons),
id: id,
index: index
});
var i;
for (i = 0; i < numAxes; i++) gamepads[index].axes[i] = 0;
// Buttons are objects
for (i = 0; i < numButtons; i++) gamepads[index].buttons[i] = { pressed: false, value: 0 };
};
// FF mutates the original objects.
window['simulateGamepadButtonDown'] = function (index, button) {
gamepads[index].buttons[button].pressed = true;
gamepads[index].buttons[button].value = 1;
};
window['simulateGamepadButtonUp'] = function (index, button) {
gamepads[index].buttons[button].pressed = false;
gamepads[index].buttons[button].value = 0;
};
window['simulateAxisMotion'] = function (index, axis, value) {
gamepads[index].axes[axis] = value;
};
''')
self.compile_btest([test_file('sdl_joystick.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lSDL', '-lGL'])
self.run_browser('page.html', '', '/report_result?2')
@requires_graphics_hardware
def test_glfw_joystick(self):
# Generates events corresponding to the Editor's Draft of the HTML5 Gamepad API.
# https://dvcs.w3.org/hg/gamepad/raw-file/default/gamepad.html#idl-def-Gamepad
create_file('pre.js', '''
var gamepads = [];
// Spoof this function.
navigator['getGamepads'] = function() {
return gamepads;
};
window['addNewGamepad'] = function(id, numAxes, numButtons) {
var index = gamepads.length;
var gamepad = {
axes: new Array(numAxes),
buttons: new Array(numButtons),
id: id,
index: index
};
gamepads.push(gamepad)
var i;
for (i = 0; i < numAxes; i++) gamepads[index].axes[i] = 0;
// Buttons are objects
for (i = 0; i < numButtons; i++) gamepads[index].buttons[i] = { pressed: false, value: 0 };
// Dispatch event (required for glfw joystick; note not used in SDL test)
var event = new Event('gamepadconnected');
event.gamepad = gamepad;
window.dispatchEvent(event);
};
// FF mutates the original objects.
window['simulateGamepadButtonDown'] = function (index, button) {
gamepads[index].buttons[button].pressed = true;
gamepads[index].buttons[button].value = 1;
};
window['simulateGamepadButtonUp'] = function (index, button) {
gamepads[index].buttons[button].pressed = false;
gamepads[index].buttons[button].value = 0;
};
window['simulateAxisMotion'] = function (index, axis, value) {
gamepads[index].axes[axis] = value;
};
''')
self.compile_btest([test_file('test_glfw_joystick.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-lGL', '-lglfw3', '-s', 'USE_GLFW=3'])
self.run_browser('page.html', '', '/report_result?2')
@requires_graphics_hardware
def test_webgl_context_attributes(self):
# Javascript code to check the attributes support we want to test in the WebGL implementation
# (request the attribute, create a context and check its value afterwards in the context attributes).
# Tests will succeed when an attribute is not supported.
create_file('check_webgl_attributes_support.js', '''
mergeInto(LibraryManager.library, {
webglAntialiasSupported: function() {
canvas = document.createElement('canvas');
context = canvas.getContext('experimental-webgl', {antialias: true});
attributes = context.getContextAttributes();
return attributes.antialias;
},
webglDepthSupported: function() {
canvas = document.createElement('canvas');
context = canvas.getContext('experimental-webgl', {depth: true});
attributes = context.getContextAttributes();
return attributes.depth;
},
webglStencilSupported: function() {
canvas = document.createElement('canvas');
context = canvas.getContext('experimental-webgl', {stencil: true});
attributes = context.getContextAttributes();
return attributes.stencil;
},
webglAlphaSupported: function() {
canvas = document.createElement('canvas');
context = canvas.getContext('experimental-webgl', {alpha: true});
attributes = context.getContextAttributes();
return attributes.alpha;
}
});
''')
# Copy common code file to temporary directory
filepath = test_file('test_webgl_context_attributes_common.c')
temp_filepath = os.path.join(self.get_dir(), os.path.basename(filepath))
shutil.copyfile(filepath, temp_filepath)
# perform tests with attributes activated
self.btest('test_webgl_context_attributes_glut.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-lglut', '-lGLEW'])
self.btest('test_webgl_context_attributes_sdl.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-lSDL', '-lGLEW'])
self.btest('test_webgl_context_attributes_sdl2.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-s', 'USE_SDL=2', '-lGLEW'])
self.btest('test_webgl_context_attributes_glfw.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-DAA_ACTIVATED', '-DDEPTH_ACTIVATED', '-DSTENCIL_ACTIVATED', '-DALPHA_ACTIVATED', '-lGL', '-lglfw', '-lGLEW'])
# perform tests with attributes desactivated
self.btest('test_webgl_context_attributes_glut.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-lGL', '-lglut', '-lGLEW'])
self.btest('test_webgl_context_attributes_sdl.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-lGL', '-lSDL', '-lGLEW'])
self.btest('test_webgl_context_attributes_glfw.c', '1', args=['--js-library', 'check_webgl_attributes_support.js', '-lGL', '-lglfw', '-lGLEW'])
@requires_graphics_hardware
def test_webgl_no_double_error(self):
self.btest('webgl_error.cpp', '0')
@requires_graphics_hardware
def test_webgl_parallel_shader_compile(self):
self.btest('webgl_parallel_shader_compile.cpp', '1')
@requires_graphics_hardware
def test_webgl_explicit_uniform_location(self):
self.btest('webgl_explicit_uniform_location.c', '1', args=['-s', 'GL_EXPLICIT_UNIFORM_LOCATION=1', '-s', 'MIN_WEBGL_VERSION=2'])
# Test that -s GL_PREINITIALIZED_CONTEXT=1 works and allows user to set Module['preinitializedWebGLContext'] to a preinitialized WebGL context.
@requires_graphics_hardware
def test_preinitialized_webgl_context(self):
self.btest('preinitialized_webgl_context.cpp', '5', args=['-s', 'GL_PREINITIALIZED_CONTEXT', '--shell-file', test_file('preinitialized_webgl_context.html')])
@requires_threads
def test_emscripten_get_now(self):
for args in [[], ['-s', 'USE_PTHREADS'], ['-s', 'ENVIRONMENT=web', '-O2', '--closure=1']]:
self.btest('emscripten_get_now.cpp', '1', args=args)
def test_write_file_in_environment_web(self):
self.btest_exit('write_file.c', args=['-s', 'ENVIRONMENT=web', '-Os', '--closure=1'])
def test_fflush(self):
self.btest('test_fflush.cpp', '0', args=['-s', 'EXIT_RUNTIME', '--shell-file', test_file('test_fflush.html')], reporting=Reporting.NONE)
def test_file_db(self):
secret = str(time.time())
create_file('moar.txt', secret)
self.btest('file_db.cpp', '1', args=['--preload-file', 'moar.txt', '-DFIRST'])
shutil.copyfile('test.html', 'first.html')
self.btest('file_db.cpp', secret, args=['-s', 'FORCE_FILESYSTEM'])
shutil.copyfile('test.html', 'second.html')
create_file('moar.txt', 'aliantha')
self.btest('file_db.cpp', secret, args=['--preload-file', 'moar.txt']) # even with a file there, we load over it
shutil.move('test.html', 'third.html')
def test_fs_idbfs_sync(self):
for extra in [[], ['-DEXTRA_WORK']]:
secret = str(time.time())
self.btest(test_file('fs', 'test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DFIRST', '-DSECRET=\"' + secret + '\"', '-s', 'EXPORTED_FUNCTIONS=[_main,_test,_success]', '-lidbfs.js'])
self.btest(test_file('fs', 'test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DSECRET=\"' + secret + '\"', '-s', 'EXPORTED_FUNCTIONS=[_main,_test,_success]', '-lidbfs.js'] + extra)
def test_fs_idbfs_sync_force_exit(self):
secret = str(time.time())
self.btest(test_file('fs', 'test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DFIRST', '-DSECRET=\"' + secret + '\"', '-s', 'EXPORTED_FUNCTIONS=[_main,_test,_success]', '-s', 'EXIT_RUNTIME', '-DFORCE_EXIT', '-lidbfs.js'])
self.btest(test_file('fs', 'test_idbfs_sync.c'), '1', args=['-lidbfs.js', '-DSECRET=\"' + secret + '\"', '-s', 'EXPORTED_FUNCTIONS=[_main,_test,_success]', '-s', 'EXIT_RUNTIME', '-DFORCE_EXIT', '-lidbfs.js'])
def test_fs_idbfs_fsync(self):
# sync from persisted state into memory before main()
create_file('pre.js', '''
Module.preRun = function() {
addRunDependency('syncfs');
FS.mkdir('/working1');
FS.mount(IDBFS, {}, '/working1');
FS.syncfs(true, function (err) {
if (err) throw err;
removeRunDependency('syncfs');
});
};
''')
args = ['--pre-js', 'pre.js', '-lidbfs.js', '-s', 'EXIT_RUNTIME', '-s', 'ASYNCIFY']
secret = str(time.time())
self.btest(test_file('fs', 'test_idbfs_fsync.c'), '1', args=args + ['-DFIRST', '-DSECRET=\"' + secret + '\"', '-s', 'EXPORTED_FUNCTIONS=[_main,_success]', '-lidbfs.js'])
self.btest(test_file('fs', 'test_idbfs_fsync.c'), '1', args=args + ['-DSECRET=\"' + secret + '\"', '-s', 'EXPORTED_FUNCTIONS=[_main,_success]', '-lidbfs.js'])
def test_fs_memfs_fsync(self):
args = ['-s', 'ASYNCIFY', '-s', 'EXIT_RUNTIME']
secret = str(time.time())
self.btest(test_file('fs', 'test_memfs_fsync.c'), '1', args=args + ['-DSECRET=\"' + secret + '\"'])
def test_fs_workerfs_read(self):
secret = 'a' * 10
secret2 = 'b' * 10
create_file('pre.js', '''
var Module = {};
Module.preRun = function() {
var blob = new Blob(['%s']);
var file = new File(['%s'], 'file.txt');
FS.mkdir('/work');
FS.mount(WORKERFS, {
blobs: [{ name: 'blob.txt', data: blob }],
files: [file],
}, '/work');
};
''' % (secret, secret2))
self.btest(test_file('fs', 'test_workerfs_read.c'), '1', args=['-lworkerfs.js', '--pre-js', 'pre.js', '-DSECRET=\"' + secret + '\"', '-DSECRET2=\"' + secret2 + '\"', '--proxy-to-worker', '-lworkerfs.js'])
def test_fs_workerfs_package(self):
create_file('file1.txt', 'first')
ensure_dir('sub')
open(os.path.join('sub', 'file2.txt'), 'w').write('second')
self.run_process([FILE_PACKAGER, 'files.data', '--preload', 'file1.txt', os.path.join('sub', 'file2.txt'), '--separate-metadata', '--js-output=files.js'])
self.btest(os.path.join('fs', 'test_workerfs_package.cpp'), '1', args=['-lworkerfs.js', '--proxy-to-worker', '-lworkerfs.js'])
def test_fs_lz4fs_package(self):
# generate data
ensure_dir('subdir')
create_file('file1.txt', '0123456789' * (1024 * 128))
open(os.path.join('subdir', 'file2.txt'), 'w').write('1234567890' * (1024 * 128))
random_data = bytearray(random.randint(0, 255) for x in range(1024 * 128 * 10 + 1))
random_data[17] = ord('X')
open('file3.txt', 'wb').write(random_data)
# compress in emcc, -s LZ4=1 tells it to tell the file packager
print('emcc-normal')
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '2', args=['-s', 'LZ4=1', '--preload-file', 'file1.txt', '--preload-file', 'subdir/file2.txt', '--preload-file', 'file3.txt'])
assert os.path.getsize('file1.txt') + os.path.getsize(os.path.join('subdir', 'file2.txt')) + os.path.getsize('file3.txt') == 3 * 1024 * 128 * 10 + 1
assert os.path.getsize('test.data') < (3 * 1024 * 128 * 10) / 2 # over half is gone
print(' emcc-opts')
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '2', args=['-s', 'LZ4=1', '--preload-file', 'file1.txt', '--preload-file', 'subdir/file2.txt', '--preload-file', 'file3.txt', '-O2'])
# compress in the file packager, on the server. the client receives compressed data and can just use it. this is typical usage
print('normal')
out = subprocess.check_output([FILE_PACKAGER, 'files.data', '--preload', 'file1.txt', 'subdir/file2.txt', 'file3.txt', '--lz4'])
open('files.js', 'wb').write(out)
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '2', args=['--pre-js', 'files.js', '-s', 'LZ4=1', '-s', 'FORCE_FILESYSTEM'])
print(' opts')
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '2', args=['--pre-js', 'files.js', '-s', 'LZ4=1', '-s', 'FORCE_FILESYSTEM', '-O2'])
print(' modularize')
self.compile_btest([test_file('fs', 'test_lz4fs.cpp'), '--pre-js', 'files.js', '-s', 'LZ4=1', '-s', 'FORCE_FILESYSTEM', '-s', 'MODULARIZE=1'])
create_file('a.html', '''
<script src="a.out.js"></script>
<script>
Module()
</script>
''')
self.run_browser('a.html', '.', '/report_result?2')
# load the data into LZ4FS manually at runtime. This means we compress on the client. This is generally not recommended
print('manual')
subprocess.check_output([FILE_PACKAGER, 'files.data', '--preload', 'file1.txt', 'subdir/file2.txt', 'file3.txt', '--separate-metadata', '--js-output=files.js'])
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '1', args=['-DLOAD_MANUALLY', '-s', 'LZ4=1', '-s', 'FORCE_FILESYSTEM'])
print(' opts')
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '1', args=['-DLOAD_MANUALLY', '-s', 'LZ4=1', '-s', 'FORCE_FILESYSTEM', '-O2'])
print(' opts+closure')
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '1', args=['-DLOAD_MANUALLY', '-s', 'LZ4=1', '-s', 'FORCE_FILESYSTEM', '-O2', '--closure=1', '-g1', '-s', 'CLOSURE_WARNINGS=quiet'])
'''# non-lz4 for comparison
try:
os.mkdir('files')
except OSError:
pass
shutil.copyfile('file1.txt', os.path.join('files', 'file1.txt'))
shutil.copyfile('file2.txt', os.path.join('files', 'file2.txt'))
shutil.copyfile('file3.txt', os.path.join('files', 'file3.txt'))
out = subprocess.check_output([FILE_PACKAGER, 'files.data', '--preload', 'files/file1.txt', 'files/file2.txt', 'files/file3.txt'])
open('files.js', 'wb').write(out)
self.btest(os.path.join('fs', 'test_lz4fs.cpp'), '2', args=['--pre-js', 'files.js'])'''
def test_separate_metadata_later(self):
# see issue #6654 - we need to handle separate-metadata both when we run before
# the main program, and when we are run later
create_file('data.dat', ' ')
self.run_process([FILE_PACKAGER, 'more.data', '--preload', 'data.dat', '--separate-metadata', '--js-output=more.js'])
self.btest(os.path.join('browser', 'separate_metadata_later.cpp'), '1', args=['-s', 'FORCE_FILESYSTEM'])
def test_idbstore(self):
secret = str(time.time())
for stage in [0, 1, 2, 3, 0, 1, 2, 0, 0, 1, 4, 2, 5]:
self.clear()
self.btest(test_file('idbstore.c'), str(stage), args=['-lidbstore.js', '-DSTAGE=' + str(stage), '-DSECRET=\"' + secret + '\"'])
def test_idbstore_sync(self):
secret = str(time.time())
self.clear()
self.btest(test_file('idbstore_sync.c'), '6', args=['-lidbstore.js', '-DSECRET=\"' + secret + '\"', '--memory-init-file', '1', '-O3', '-g2', '-s', 'ASYNCIFY'])
def test_idbstore_sync_worker(self):
secret = str(time.time())
self.clear()
self.btest(test_file('idbstore_sync_worker.c'), '6', args=['-lidbstore.js', '-DSECRET=\"' + secret + '\"', '--memory-init-file', '1', '-O3', '-g2', '--proxy-to-worker', '-s', 'INITIAL_MEMORY=80MB', '-s', 'ASYNCIFY'])
def test_force_exit(self):
self.btest('force_exit.c', expected='17', args=['-s', 'EXIT_RUNTIME'])
def test_sdl_pumpevents(self):
# key events should be detected using SDL_PumpEvents
create_file('pre.js', '''
function keydown(c) {
var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
}
''')
self.btest_exit('sdl_pumpevents.c', assert_returncode=7, args=['--pre-js', 'pre.js', '-lSDL', '-lGL'])
def test_sdl_canvas_size(self):
self.btest('sdl_canvas_size.c', expected='1',
args=['-O2', '--minify=0', '--shell-file',
test_file('sdl_canvas_size.html'), '-lSDL', '-lGL'])
@requires_graphics_hardware
def test_sdl_gl_read(self):
# SDL, OpenGL, readPixels
self.compile_btest([test_file('sdl_gl_read.c'), '-o', 'something.html', '-lSDL', '-lGL'])
self.run_browser('something.html', '.', '/report_result?1')
@requires_graphics_hardware
def test_sdl_gl_mapbuffers(self):
self.btest('sdl_gl_mapbuffers.c', expected='1', args=['-s', 'FULL_ES3=1', '-lSDL', '-lGL'],
message='You should see a blue triangle.')
@requires_graphics_hardware
def test_sdl_ogl(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_ogl.c', reference='screenshot-gray-purple.png', reference_slack=1,
args=['-O2', '--minify=0', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with gray at the top.')
@requires_graphics_hardware
def test_sdl_ogl_regal(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_ogl.c', reference='screenshot-gray-purple.png', reference_slack=1,
args=['-O2', '--minify=0', '--preload-file', 'screenshot.png', '-s', 'USE_REGAL', '-DUSE_REGAL', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with gray at the top.')
@requires_graphics_hardware
def test_sdl_ogl_defaultmatrixmode(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_ogl_defaultMatrixMode.c', reference='screenshot-gray-purple.png', reference_slack=1,
args=['--minify=0', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with gray at the top.')
@requires_graphics_hardware
def test_sdl_ogl_p(self):
# Immediate mode with pointers
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_ogl_p.c', reference='screenshot-gray.png', reference_slack=1,
args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with gray at the top.')
@requires_graphics_hardware
def test_sdl_ogl_proc_alias(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_ogl_proc_alias.c', reference='screenshot-gray-purple.png', reference_slack=1,
args=['-O2', '-g2', '-s', 'INLINING_LIMIT', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'])
@requires_graphics_hardware
def test_sdl_fog_simple(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_fog_simple.c', reference='screenshot-fog-simple.png',
args=['-O2', '--minify=0', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl_fog_negative(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_fog_negative.c', reference='screenshot-fog-negative.png',
args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl_fog_density(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_fog_density.c', reference='screenshot-fog-density.png',
args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl_fog_exp2(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_fog_exp2.c', reference='screenshot-fog-exp2.png',
args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl_fog_linear(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_fog_linear.c', reference='screenshot-fog-linear.png', reference_slack=1,
args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins', '-lSDL', '-lGL'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_glfw(self):
self.btest('glfw.c', '1', args=['-s', 'LEGACY_GL_EMULATION', '-lglfw', '-lGL'])
self.btest('glfw.c', '1', args=['-s', 'LEGACY_GL_EMULATION', '-s', 'USE_GLFW=2', '-lglfw', '-lGL'])
def test_glfw_minimal(self):
self.btest('glfw_minimal.c', '1', args=['-lglfw', '-lGL'])
self.btest('glfw_minimal.c', '1', args=['-s', 'USE_GLFW=2', '-lglfw', '-lGL'])
def test_glfw_time(self):
self.btest('test_glfw_time.c', '1', args=['-s', 'USE_GLFW=3', '-lglfw', '-lGL'])
def _test_egl_base(self, *args):
self.compile_btest([test_file('test_egl.c'), '-O2', '-o', 'page.html', '-lEGL', '-lGL'] + list(args))
self.run_browser('page.html', '', '/report_result?1')
@requires_graphics_hardware
def test_egl(self):
self._test_egl_base()
@requires_threads
@requires_graphics_hardware
def test_egl_with_proxy_to_pthread(self):
self._test_egl_base('-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'OFFSCREEN_FRAMEBUFFER')
def _test_egl_width_height_base(self, *args):
self.compile_btest([test_file('test_egl_width_height.c'), '-O2', '-o', 'page.html', '-lEGL', '-lGL'] + list(args))
self.run_browser('page.html', 'Should print "(300, 150)" -- the size of the canvas in pixels', '/report_result?1')
def test_egl_width_height(self):
self._test_egl_width_height_base()
@requires_threads
def test_egl_width_height_with_proxy_to_pthread(self):
self._test_egl_width_height_base('-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD')
@requires_graphics_hardware
def test_egl_createcontext_error(self):
self.btest('test_egl_createcontext_error.c', '1', args=['-lEGL', '-lGL'])
def test_worker(self):
# Test running in a web worker
create_file('file.dat', 'data for worker')
html_file = open('main.html', 'w')
html_file.write('''
<html>
<body>
Worker Test
<script>
var worker = new Worker('worker.js');
worker.onmessage = function(event) {
var xhr = new XMLHttpRequest();
xhr.open('GET', 'http://localhost:%s/report_result?' + event.data);
xhr.send();
setTimeout(function() { window.close() }, 1000);
};
</script>
</body>
</html>
''' % self.port)
html_file.close()
for file_data in [1, 0]:
cmd = [EMCC, test_file('hello_world_worker.cpp'), '-o', 'worker.js'] + (['--preload-file', 'file.dat'] if file_data else [])
print(cmd)
self.run_process(cmd)
self.assertExists('worker.js')
self.run_browser('main.html', '', '/report_result?hello from worker, and :' + ('data for w' if file_data else '') + ':')
self.assertContained('you should not see this text when in a worker!', self.run_js('worker.js')) # code should run standalone too
@no_firefox('keeps sending OPTIONS requests, and eventually errors')
def test_chunked_synchronous_xhr(self):
main = 'chunked_sync_xhr.html'
worker_filename = "download_and_checksum_worker.js"
html_file = open(main, 'w')
html_file.write(r"""
<!doctype html>
<html>
<head><meta charset="utf-8"><title>Chunked XHR</title></head>
<html>
<body>
Chunked XHR Web Worker Test
<script>
var worker = new Worker(""" + json.dumps(worker_filename) + r""");
var buffer = [];
worker.onmessage = function(event) {
if (event.data.channel === "stdout") {
var xhr = new XMLHttpRequest();
xhr.open('GET', 'http://localhost:%s/report_result?' + event.data.line);
xhr.send();
setTimeout(function() { window.close() }, 1000);
} else {
if (event.data.trace) event.data.trace.split("\n").map(function(v) { console.error(v); });
if (event.data.line) {
console.error(event.data.line);
} else {
var v = event.data.char;
if (v == 10) {
var line = buffer.splice(0);
console.error(line = line.map(function(charCode){return String.fromCharCode(charCode);}).join(''));
} else {
buffer.push(v);
}
}
}
};
</script>
</body>
</html>
""" % self.port)
html_file.close()
c_source_filename = "checksummer.c"
prejs_filename = "worker_prejs.js"
prejs_file = open(prejs_filename, 'w')
prejs_file.write(r"""
if (typeof(Module) === "undefined") Module = {};
Module["arguments"] = ["/bigfile"];
Module["preInit"] = function() {
FS.createLazyFile('/', "bigfile", "http://localhost:11111/bogus_file_path", true, false);
};
var doTrace = true;
Module["print"] = function(s) { self.postMessage({channel: "stdout", line: s}); };
Module["printErr"] = function(s) { self.postMessage({channel: "stderr", char: s, trace: ((doTrace && s === 10) ? new Error().stack : null)}); doTrace = false; };
""")
prejs_file.close()
# vs. os.path.join(self.get_dir(), filename)
# vs. test_file('hello_world_gles.c')
self.compile_btest([test_file(c_source_filename), '-g', '-s', 'SMALL_XHR_CHUNKS', '-o', worker_filename,
'--pre-js', prejs_filename])
chunkSize = 1024
data = os.urandom(10 * chunkSize + 1) # 10 full chunks and one 1 byte chunk
checksum = zlib.adler32(data) & 0xffffffff # Python 2 compatibility: force bigint
server = multiprocessing.Process(target=test_chunked_synchronous_xhr_server, args=(True, chunkSize, data, checksum, self.port))
server.start()
# block until the server is actually ready
for i in range(60):
try:
urlopen('http://localhost:11111')
break
except Exception as e:
print('(sleep for server)')
time.sleep(1)
if i == 60:
raise e
try:
self.run_browser(main, 'Chunked binary synchronous XHR in Web Workers!', '/report_result?' + str(checksum))
finally:
server.terminate()
# Avoid race condition on cleanup, wait a bit so that processes have released file locks so that test tearDown won't
# attempt to rmdir() files in use.
if WINDOWS:
time.sleep(2)
@requires_graphics_hardware
def test_glgears(self, extra_args=[]):
self.btest('hello_world_gles.c', reference='gears.png', reference_slack=3,
args=['-DHAVE_BUILTIN_SINCOS', '-lGL', '-lglut'] + extra_args)
@requires_graphics_hardware
@requires_threads
def test_glgears_pthreads(self, extra_args=[]):
# test that a program that doesn't use pthreads still works with with pthreads enabled
# (regression test for https://github.com/emscripten-core/emscripten/pull/8059#issuecomment-488105672)
self.test_glgears(['-s', 'USE_PTHREADS'])
@requires_graphics_hardware
def test_glgears_long(self):
for proxy in [0, 1]:
print('proxy', proxy)
self.btest('hello_world_gles.c', expected=list(map(str, range(15, 500))), args=['-DHAVE_BUILTIN_SINCOS', '-DLONGTEST', '-lGL', '-lglut', '-DANIMATE'] + (['--proxy-to-worker'] if proxy else []))
@requires_graphics_hardware
def test_glgears_animation(self):
es2_suffix = ['', '_full', '_full_944']
for full_es2 in [0, 1, 2]:
print(full_es2)
self.compile_btest([test_file('hello_world_gles%s.c' % es2_suffix[full_es2]), '-o', 'something.html',
'-DHAVE_BUILTIN_SINCOS', '-s', 'GL_TESTING', '-lGL', '-lglut',
'--shell-file', test_file('hello_world_gles_shell.html')] +
(['-s', 'FULL_ES2=1'] if full_es2 else []))
self.run_browser('something.html', 'You should see animating gears.', '/report_gl_result?true')
@requires_graphics_hardware
def test_fulles2_sdlproc(self):
self.btest_exit('full_es2_sdlproc.c', assert_returncode=1, args=['-s', 'GL_TESTING', '-DHAVE_BUILTIN_SINCOS', '-s', 'FULL_ES2', '-lGL', '-lSDL', '-lglut'])
@requires_graphics_hardware
def test_glgears_deriv(self):
self.btest('hello_world_gles_deriv.c', reference='gears.png', reference_slack=2,
args=['-DHAVE_BUILTIN_SINCOS', '-lGL', '-lglut'],
message='You should see animating gears.')
with open('test.html') as f:
assert 'gl-matrix' not in f.read(), 'Should not include glMatrix when not needed'
@requires_graphics_hardware
def test_glbook(self):
self.emcc_args.remove('-Werror')
programs = self.get_library('glbook', [
os.path.join('Chapter_2', 'Hello_Triangle', 'CH02_HelloTriangle.o'),
os.path.join('Chapter_8', 'Simple_VertexShader', 'CH08_SimpleVertexShader.o'),
os.path.join('Chapter_9', 'Simple_Texture2D', 'CH09_SimpleTexture2D.o'),
os.path.join('Chapter_9', 'Simple_TextureCubemap', 'CH09_TextureCubemap.o'),
os.path.join('Chapter_9', 'TextureWrap', 'CH09_TextureWrap.o'),
os.path.join('Chapter_10', 'MultiTexture', 'CH10_MultiTexture.o'),
os.path.join('Chapter_13', 'ParticleSystem', 'CH13_ParticleSystem.o'),
], configure=None)
def book_path(*pathelems):
return test_file('glbook', *pathelems)
for program in programs:
print(program)
basename = os.path.basename(program)
args = ['-lGL', '-lEGL', '-lX11']
if basename == 'CH10_MultiTexture.o':
shutil.copyfile(book_path('Chapter_10', 'MultiTexture', 'basemap.tga'), 'basemap.tga')
shutil.copyfile(book_path('Chapter_10', 'MultiTexture', 'lightmap.tga'), 'lightmap.tga')
args += ['--preload-file', 'basemap.tga', '--preload-file', 'lightmap.tga']
elif basename == 'CH13_ParticleSystem.o':
shutil.copyfile(book_path('Chapter_13', 'ParticleSystem', 'smoke.tga'), 'smoke.tga')
args += ['--preload-file', 'smoke.tga', '-O2'] # test optimizations and closure here as well for more coverage
self.btest(program,
reference=book_path(basename.replace('.o', '.png')),
args=args)
@requires_graphics_hardware
@parameterized({
'normal': (['-s', 'FULL_ES2=1'],),
# Enabling FULL_ES3 also enables ES2 automatically
'full_es3': (['-s', 'FULL_ES3=1'],)
})
def test_gles2_emulation(self, args):
print(args)
shutil.copyfile(test_file('glbook', 'Chapter_10', 'MultiTexture', 'basemap.tga'), 'basemap.tga')
shutil.copyfile(test_file('glbook', 'Chapter_10', 'MultiTexture', 'lightmap.tga'), 'lightmap.tga')
shutil.copyfile(test_file('glbook', 'Chapter_13', 'ParticleSystem', 'smoke.tga'), 'smoke.tga')
for source, reference in [
(os.path.join('glbook', 'Chapter_2', 'Hello_Triangle', 'Hello_Triangle_orig.c'), test_file('glbook', 'CH02_HelloTriangle.png')),
# (os.path.join('glbook', 'Chapter_8', 'Simple_VertexShader', 'Simple_VertexShader_orig.c'), test_file('glbook', 'CH08_SimpleVertexShader.png')), # XXX needs INT extension in WebGL
(os.path.join('glbook', 'Chapter_9', 'TextureWrap', 'TextureWrap_orig.c'), test_file('glbook', 'CH09_TextureWrap.png')),
# (os.path.join('glbook', 'Chapter_9', 'Simple_TextureCubemap', 'Simple_TextureCubemap_orig.c'), test_file('glbook', 'CH09_TextureCubemap.png')), # XXX needs INT extension in WebGL
(os.path.join('glbook', 'Chapter_9', 'Simple_Texture2D', 'Simple_Texture2D_orig.c'), test_file('glbook', 'CH09_SimpleTexture2D.png')),
(os.path.join('glbook', 'Chapter_10', 'MultiTexture', 'MultiTexture_orig.c'), test_file('glbook', 'CH10_MultiTexture.png')),
(os.path.join('glbook', 'Chapter_13', 'ParticleSystem', 'ParticleSystem_orig.c'), test_file('glbook', 'CH13_ParticleSystem.png')),
]:
print(source)
self.btest(source,
reference=reference,
args=['-I' + test_file('glbook', 'Common'),
test_file('glbook', 'Common', 'esUtil.c'),
test_file('glbook', 'Common', 'esShader.c'),
test_file('glbook', 'Common', 'esShapes.c'),
test_file('glbook', 'Common', 'esTransform.c'),
'-lGL', '-lEGL', '-lX11',
'--preload-file', 'basemap.tga', '--preload-file', 'lightmap.tga', '--preload-file', 'smoke.tga'] + args)
@requires_graphics_hardware
def test_clientside_vertex_arrays_es3(self):
self.btest('clientside_vertex_arrays_es3.c', reference='gl_triangle.png', args=['-s', 'FULL_ES3=1', '-s', 'USE_GLFW=3', '-lglfw', '-lGLESv2'])
def test_emscripten_api(self):
self.btest('emscripten_api_browser.cpp', '1', args=['-s', 'EXPORTED_FUNCTIONS=[_main,_third]', '-lSDL'])
def test_emscripten_api2(self):
def setup():
create_file('script1.js', '''
Module._set(456);
''')
create_file('file1.txt', 'first')
create_file('file2.txt', 'second')
setup()
self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=open('script2.js', 'w'))
self.btest('emscripten_api_browser2.cpp', '1', args=['-s', 'EXPORTED_FUNCTIONS=[_main,_set]', '-s', 'FORCE_FILESYSTEM'])
# check using file packager to another dir
self.clear()
setup()
ensure_dir('sub')
self.run_process([FILE_PACKAGER, 'sub/test.data', '--preload', 'file1.txt', 'file2.txt'], stdout=open('script2.js', 'w'))
shutil.copyfile(os.path.join('sub', 'test.data'), 'test.data')
self.btest('emscripten_api_browser2.cpp', '1', args=['-s', 'EXPORTED_FUNCTIONS=[_main,_set]', '-s', 'FORCE_FILESYSTEM'])
def test_emscripten_api_infloop(self):
self.btest('emscripten_api_browser_infloop.cpp', '7')
def test_emscripten_fs_api(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png') # preloaded *after* run
self.btest('emscripten_fs_api_browser.cpp', '1', args=['-lSDL'])
def test_emscripten_fs_api2(self):
self.btest('emscripten_fs_api_browser2.cpp', '1', args=['-s', "ASSERTIONS=0"])
self.btest('emscripten_fs_api_browser2.cpp', '1', args=['-s', "ASSERTIONS=1"])
@requires_threads
def test_emscripten_main_loop(self):
for args in [[], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'EXIT_RUNTIME']]:
self.btest('emscripten_main_loop.cpp', '0', args=args)
@requires_threads
def test_emscripten_main_loop_settimeout(self):
for args in [
[],
# test pthreads + AUTO_JS_LIBRARIES mode as well
['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'AUTO_JS_LIBRARIES=0']
]:
self.btest('emscripten_main_loop_settimeout.cpp', '1', args=args)
@requires_threads
def test_emscripten_main_loop_and_blocker(self):
for args in [[], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD']]:
self.btest('emscripten_main_loop_and_blocker.cpp', '0', args=args)
@requires_threads
def test_emscripten_main_loop_and_blocker_exit(self):
# Same as above but tests that EXIT_RUNTIME works with emscripten_main_loop. The
# app should still stay alive until the loop ends
self.btest_exit('emscripten_main_loop_and_blocker.cpp', 0)
@requires_threads
def test_emscripten_main_loop_setimmediate(self):
for args in [[], ['--proxy-to-worker'], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD']]:
self.btest('emscripten_main_loop_setimmediate.cpp', '1', args=args)
def test_fs_after_main(self):
for args in [[], ['-O1']]:
self.btest('fs_after_main.cpp', '0', args=args)
def test_sdl_quit(self):
self.btest('sdl_quit.c', '1', args=['-lSDL', '-lGL'])
def test_sdl_resize(self):
# FIXME(https://github.com/emscripten-core/emscripten/issues/12978)
self.emcc_args.append('-Wno-deprecated-declarations')
self.btest('sdl_resize.c', '1', args=['-lSDL', '-lGL'])
def test_glshaderinfo(self):
self.btest('glshaderinfo.cpp', '1', args=['-lGL', '-lglut'])
@requires_graphics_hardware
def test_glgetattachedshaders(self):
self.btest('glgetattachedshaders.c', '1', args=['-lGL', '-lEGL'])
# Covered by dEQP text suite (we can remove it later if we add coverage for that).
@requires_graphics_hardware
def test_glframebufferattachmentinfo(self):
self.btest('glframebufferattachmentinfo.c', '1', args=['-lGLESv2', '-lEGL'])
@requires_graphics_hardware
def test_sdlglshader(self):
self.btest('sdlglshader.c', reference='sdlglshader.png', args=['-O2', '--closure=1', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_sdlglshader2(self):
self.btest('sdlglshader2.c', expected='1', args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True)
@requires_graphics_hardware
def test_gl_glteximage(self):
self.btest('gl_teximage.c', '1', args=['-lGL', '-lSDL'])
@requires_graphics_hardware
@requires_threads
def test_gl_textures(self):
for args in [[], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'OFFSCREEN_FRAMEBUFFER']]:
self.btest('gl_textures.cpp', '0', args=['-lGL'] + args)
@requires_graphics_hardware
def test_gl_ps(self):
# pointers and a shader
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('gl_ps.c', reference='gl_ps.png', args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'], reference_slack=1)
@requires_graphics_hardware
def test_gl_ps_packed(self):
# packed data that needs to be strided
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('gl_ps_packed.c', reference='gl_ps.png', args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'], reference_slack=1)
@requires_graphics_hardware
def test_gl_ps_strides(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('gl_ps_strides.c', reference='gl_ps_strides.png', args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'])
@requires_graphics_hardware
def test_gl_ps_worker(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('gl_ps_worker.c', reference='gl_ps.png', args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'], reference_slack=1, also_proxied=True)
@requires_graphics_hardware
def test_gl_renderers(self):
self.btest('gl_renderers.c', reference='gl_renderers.png', args=['-s', 'GL_UNSAFE_OPTS=0', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_gl_stride(self):
self.btest('gl_stride.c', reference='gl_stride.png', args=['-s', 'GL_UNSAFE_OPTS=0', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_gl_vertex_buffer_pre(self):
self.btest('gl_vertex_buffer_pre.c', reference='gl_vertex_buffer_pre.png', args=['-s', 'GL_UNSAFE_OPTS=0', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_gl_vertex_buffer(self):
self.btest('gl_vertex_buffer.c', reference='gl_vertex_buffer.png', args=['-s', 'GL_UNSAFE_OPTS=0', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], reference_slack=1)
@requires_graphics_hardware
def test_gles2_uniform_arrays(self):
self.btest('gles2_uniform_arrays.cpp', args=['-s', 'GL_ASSERTIONS', '-lGL', '-lSDL'], expected=['1'], also_proxied=True)
@requires_graphics_hardware
def test_gles2_conformance(self):
self.btest('gles2_conformance.cpp', args=['-s', 'GL_ASSERTIONS', '-lGL', '-lSDL'], expected=['1'])
@requires_graphics_hardware
def test_matrix_identity(self):
self.btest('gl_matrix_identity.c', expected=['-1882984448', '460451840', '1588195328', '2411982848'], args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre_regal(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre.png'), args=['-s', 'USE_REGAL', '-DUSE_REGAL', '-lGL', '-lSDL'])
@requires_graphics_hardware
@requires_sync_compilation
def test_cubegeom_pre_relocatable(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '-s', 'RELOCATABLE'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre2(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre2.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre2.png'), args=['-s', 'GL_DEBUG', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL']) # some coverage for GL_DEBUG not breaking the build
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre3(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre3.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre2.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@parameterized({
'': ([],),
'tracing': (['-sTRACE_WEBGL_CALLS'],),
})
@requires_graphics_hardware
def test_cubegeom(self, args):
# proxy only in the simple, normal case (we can't trace GL calls when
# proxied)
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom.png'), args=['-O2', '-g', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'] + args, also_proxied=not args)
@requires_graphics_hardware
def test_cubegeom_regal(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom.png'), args=['-O2', '-g', '-DUSE_REGAL', '-s', 'USE_REGAL', '-lGL', '-lSDL'], also_proxied=True)
@requires_threads
@requires_graphics_hardware
def test_cubegeom_regal_mt(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom.png'), args=['-O2', '-g', '-pthread', '-DUSE_REGAL', '-s', 'USE_PTHREADS', '-s', 'USE_REGAL', '-lGL', '-lSDL'], also_proxied=False)
@requires_graphics_hardware
def test_cubegeom_proc(self):
create_file('side.c', r'''
extern void* SDL_GL_GetProcAddress(const char *);
void *glBindBuffer = 0; // same name as the gl function, to check that the collision does not break us
void *getBindBuffer() {
if (!glBindBuffer) glBindBuffer = SDL_GL_GetProcAddress("glBindBuffer");
return glBindBuffer;
}
''')
# also test -Os in wasm, which uses meta-dce, which should not break legacy gl emulation hacks
for opts in [[], ['-O1'], ['-Os']]:
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_proc.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom.png'), args=opts + ['side.c', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_glew(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_glew.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom.png'), args=['-O2', '--closure=1', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lGLEW', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_color(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_color.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_color.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_normal(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_normal.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_normal.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True)
@requires_graphics_hardware
def test_cubegeom_normal_dap(self): # draw is given a direct pointer to clientside memory, no element array buffer
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_normal.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True)
@requires_graphics_hardware
def test_cubegeom_normal_dap_far(self): # indices do nto start from 0
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap_far.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_normal.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_normal_dap_far_range(self): # glDrawRangeElements
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap_far_range.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_normal.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_normal_dap_far_glda(self): # use glDrawArrays
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap_far_glda.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap_far_glda.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_firefox('fails on CI but works locally')
def test_cubegeom_normal_dap_far_glda_quad(self): # with quad
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap_far_glda_quad.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_normal_dap_far_glda_quad.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_mt(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_mt.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_mt.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL']) # multitexture
@requires_graphics_hardware
def test_cubegeom_color2(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_color2.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_color2.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True)
@requires_graphics_hardware
def test_cubegeom_texturematrix(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_texturematrix.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_texturematrix.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_fog(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_fog.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_fog.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre_vao(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre_vao_regal(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao.png'), args=['-s', 'USE_REGAL', '-DUSE_REGAL', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre2_vao(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre2_vao.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_pre2_vao2(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre2_vao2.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre2_vao2.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
@no_swiftshader
def test_cubegeom_pre_vao_es(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao_es.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_pre_vao.png'), args=['-s', 'FULL_ES2=1', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cubegeom_u4fv_2(self):
self.btest(os.path.join('third_party', 'cubegeom', 'cubegeom_u4fv_2.c'), reference=os.path.join('third_party', 'cubegeom', 'cubegeom_u4fv_2.png'), args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_cube_explosion(self):
self.btest('cube_explosion.c', reference='cube_explosion.png', args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], also_proxied=True)
@requires_graphics_hardware
def test_glgettexenv(self):
self.btest('glgettexenv.c', args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'], expected=['1'])
def test_sdl_canvas_blank(self):
self.btest('sdl_canvas_blank.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_blank.png')
def test_sdl_canvas_palette(self):
self.btest('sdl_canvas_palette.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_palette.png')
def test_sdl_canvas_twice(self):
self.btest('sdl_canvas_twice.c', args=['-lSDL', '-lGL'], reference='sdl_canvas_twice.png')
def test_sdl_set_clip_rect(self):
self.btest('sdl_set_clip_rect.c', args=['-lSDL', '-lGL'], reference='sdl_set_clip_rect.png')
def test_sdl_maprgba(self):
self.btest('sdl_maprgba.c', args=['-lSDL', '-lGL'], reference='sdl_maprgba.png', reference_slack=3)
def test_sdl_create_rgb_surface_from(self):
self.btest('sdl_create_rgb_surface_from.c', args=['-lSDL', '-lGL'], reference='sdl_create_rgb_surface_from.png')
def test_sdl_rotozoom(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl_rotozoom.c', reference='sdl_rotozoom.png', args=['--preload-file', 'screenshot.png', '--use-preload-plugins', '-lSDL', '-lGL'], reference_slack=3)
def test_sdl_gfx_primitives(self):
self.btest('sdl_gfx_primitives.c', args=['-lSDL', '-lGL'], reference='sdl_gfx_primitives.png', reference_slack=1)
def test_sdl_canvas_palette_2(self):
create_file('pre.js', '''
Module['preRun'].push(function() {
SDL.defaults.copyOnLock = false;
});
''')
create_file('args-r.js', '''
Module['arguments'] = ['-r'];
''')
create_file('args-g.js', '''
Module['arguments'] = ['-g'];
''')
create_file('args-b.js', '''
Module['arguments'] = ['-b'];
''')
self.btest('sdl_canvas_palette_2.c', reference='sdl_canvas_palette_r.png', args=['--pre-js', 'pre.js', '--pre-js', 'args-r.js', '-lSDL', '-lGL'])
self.btest('sdl_canvas_palette_2.c', reference='sdl_canvas_palette_g.png', args=['--pre-js', 'pre.js', '--pre-js', 'args-g.js', '-lSDL', '-lGL'])
self.btest('sdl_canvas_palette_2.c', reference='sdl_canvas_palette_b.png', args=['--pre-js', 'pre.js', '--pre-js', 'args-b.js', '-lSDL', '-lGL'])
def test_sdl_ttf_render_text_solid(self):
self.btest('sdl_ttf_render_text_solid.c', reference='sdl_ttf_render_text_solid.png', args=['-O2', '-s', 'INITIAL_MEMORY=16MB', '-lSDL', '-lGL'])
def test_sdl_alloctext(self):
self.btest('sdl_alloctext.c', expected='1', args=['-O2', '-s', 'INITIAL_MEMORY=16MB', '-lSDL', '-lGL'])
def test_sdl_surface_refcount(self):
self.btest('sdl_surface_refcount.c', args=['-lSDL'], expected='1')
def test_sdl_free_screen(self):
self.btest('sdl_free_screen.cpp', args=['-lSDL', '-lGL'], reference='htmltest.png')
@requires_graphics_hardware
def test_glbegin_points(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('glbegin_points.c', reference='glbegin_points.png', args=['--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '--use-preload-plugins'])
@requires_graphics_hardware
def test_s3tc(self):
shutil.copyfile(test_file('screenshot.dds'), 'screenshot.dds')
self.btest('s3tc.c', reference='s3tc.png', args=['--preload-file', 'screenshot.dds', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_s3tc_ffp_only(self):
shutil.copyfile(test_file('screenshot.dds'), 'screenshot.dds')
self.btest('s3tc.c', reference='s3tc.png', args=['--preload-file', 'screenshot.dds', '-s', 'LEGACY_GL_EMULATION', '-s', 'GL_FFP_ONLY', '-lGL', '-lSDL'])
@no_chrome('see #7117')
@requires_graphics_hardware
def test_aniso(self):
shutil.copyfile(test_file('water.dds'), 'water.dds')
self.btest('aniso.c', reference='aniso.png', reference_slack=2, args=['--preload-file', 'water.dds', '-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL', '-Wno-incompatible-pointer-types'])
@requires_graphics_hardware
def test_tex_nonbyte(self):
self.btest('tex_nonbyte.c', reference='tex_nonbyte.png', args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_float_tex(self):
self.btest('float_tex.cpp', reference='float_tex.png', args=['-lGL', '-lglut'])
@requires_graphics_hardware
def test_subdata(self):
self.btest('gl_subdata.cpp', reference='float_tex.png', args=['-lGL', '-lglut'])
@requires_graphics_hardware
def test_perspective(self):
self.btest('perspective.c', reference='perspective.png', args=['-s', 'LEGACY_GL_EMULATION', '-lGL', '-lSDL'])
@requires_graphics_hardware
def test_glerror(self):
self.btest('gl_error.c', expected='1', args=['-s', 'LEGACY_GL_EMULATION', '-lGL'])
def test_openal_error(self):
for args in [
[],
['-lopenal', '-s', 'STRICT'],
['--closure=1']
]:
print(args)
self.btest('openal_error.c', expected='1', args=args)
def test_openal_capture_sanity(self):
self.btest('openal_capture_sanity.c', expected='0')
def test_runtimelink(self):
create_file('header.h', r'''
struct point
{
int x, y;
};
''')
create_file('supp.cpp', r'''
#include <stdio.h>
#include "header.h"
extern void mainFunc(int x);
extern int mainInt;
void suppFunc(struct point &p) {
printf("supp: %d,%d\n", p.x, p.y);
mainFunc(p.x + p.y);
printf("supp see: %d\n", mainInt);
}
int suppInt = 76;
''')
create_file('main.cpp', r'''
#include <stdio.h>
#include "header.h"
extern void suppFunc(struct point &p);
extern int suppInt;
void mainFunc(int x) {
printf("main: %d\n", x);
}
int mainInt = 543;
int main( int argc, const char *argv[] ) {
struct point p = { 54, 2 };
suppFunc(p);
printf("main see: %d\nok.\n", suppInt);
return suppInt;
}
''')
self.run_process([EMCC, 'supp.cpp', '-o', 'supp.wasm', '-s', 'SIDE_MODULE', '-O2', '-s', 'EXPORT_ALL'])
self.btest_exit('main.cpp', args=['-DBROWSER=1', '-s', 'MAIN_MODULE', '-O2', 'supp.wasm', '-s', 'EXPORT_ALL'], assert_returncode=76)
def test_pre_run_deps(self):
# Adding a dependency in preRun will delay run
create_file('pre.js', '''
Module.preRun = function() {
addRunDependency();
out('preRun called, added a dependency...');
setTimeout(function() {
Module.okk = 10;
removeRunDependency()
}, 2000);
};
''')
for mem in [0, 1]:
self.btest('pre_run_deps.cpp', expected='10', args=['--pre-js', 'pre.js', '--memory-init-file', str(mem)])
@no_wasm_backend('mem init file')
def test_mem_init(self):
create_file('pre.js', '''
function myJSCallback() { // called from main()
Module._note(1);
}
Module.preRun = function() {
addOnPreMain(function() {
Module._note(2);
});
};
''')
create_file('post.js', '''
var assert = function(check, text) {
if (!check) {
console.log('assert failed: ' + text);
maybeReportResultToServer(9);
}
}
Module._note(4); // this happens too early! and is overwritten when the mem init arrives
''')
# with assertions, we notice when memory was written to too early
self.btest('mem_init.cpp', expected='9', args=['-s', 'WASM=0', '--pre-js', 'pre.js', '--post-js', 'post.js', '--memory-init-file', '1'])
# otherwise, we just overwrite
self.btest('mem_init.cpp', expected='3', args=['-s', 'WASM=0', '--pre-js', 'pre.js', '--post-js', 'post.js', '--memory-init-file', '1', '-s', 'ASSERTIONS=0'])
@no_wasm_backend('mem init file')
def test_mem_init_request(self):
def test(what, status):
print(what, status)
create_file('pre.js', '''
var xhr = Module.memoryInitializerRequest = new XMLHttpRequest();
xhr.open('GET', "''' + what + '''", true);
xhr.responseType = 'arraybuffer';
xhr.send(null);
console.warn = function(x) {
if (x.indexOf('a problem seems to have happened with Module.memoryInitializerRequest') >= 0) {
var xhr = new XMLHttpRequest();
xhr.open('GET', 'http://localhost:%s/report_result?0');
setTimeout(xhr.onload = function() {
console.log('close!');
window.close();
}, 1000);
xhr.send();
throw 'halt';
}
console.log('WARNING: ' + x);
};
''' % self.port)
self.btest('mem_init_request.cpp', expected=status, args=['-s', 'WASM=0', '--pre-js', 'pre.js', '--memory-init-file', '1'])
test('test.html.mem', '1')
test('nothing.nowhere', '0')
def test_runtime_misuse(self):
post_prep = '''
var expected_ok = false;
function doCcall(n) {
ccall('note', 'string', ['number'], [n]);
}
var wrapped = cwrap('note', 'string', ['number']); // returns a string to suppress cwrap optimization
function doCwrapCall(n) {
var str = wrapped(n);
out('got ' + str);
assert(str === 'silly-string');
}
function doDirectCall(n) {
Module['_note'](n);
}
'''
post_test = '''
var ok = false;
try {
doCcall(1);
ok = true; // should fail and not reach here, runtime is not ready yet so ccall will abort
} catch(e) {
out('expected fail 1');
assert(e.toString().indexOf('assert') >= 0); // assertion, not something else
ABORT = false; // hackish
}
assert(ok === expected_ok);
ok = false;
try {
doCwrapCall(2);
ok = true; // should fail and not reach here, runtime is not ready yet so cwrap call will abort
} catch(e) {
out('expected fail 2');
assert(e.toString().indexOf('assert') >= 0); // assertion, not something else
ABORT = false; // hackish
}
assert(ok === expected_ok);
ok = false;
try {
doDirectCall(3);
ok = true; // should fail and not reach here, runtime is not ready yet so any code execution
} catch(e) {
out('expected fail 3');
assert(e.toString().indexOf('assert') >= 0); // assertion, not something else
ABORT = false; // hackish
}
assert(ok === expected_ok);
'''
post_hook = r'''
function myJSCallback() {
// Run on the next event loop, as code may run in a postRun right after main().
setTimeout(function() {
var xhr = new XMLHttpRequest();
assert(Module.noted);
xhr.open('GET', 'http://localhost:%s/report_result?' + HEAP32[Module.noted>>2]);
xhr.send();
setTimeout(function() { window.close() }, 1000);
}, 0);
// called from main, this is an ok time
doCcall(100);
doCwrapCall(200);
doDirectCall(300);
}
''' % self.port
create_file('pre_runtime.js', r'''
Module.onRuntimeInitialized = function(){
myJSCallback();
};
''')
for filename, extra_args, second_code in [
('runtime_misuse.cpp', [], 600),
('runtime_misuse_2.cpp', ['--pre-js', 'pre_runtime.js'], 601) # 601, because no main means we *do* run another call after exit()
]:
for mode in [[], ['-s', 'WASM=0']]:
print('\n', filename, extra_args, mode)
print('mem init, so async, call too early')
create_file('post.js', post_prep + post_test + post_hook)
self.btest(filename, expected='600', args=['--post-js', 'post.js', '--memory-init-file', '1', '-s', 'EXIT_RUNTIME'] + extra_args + mode, reporting=Reporting.NONE)
print('sync startup, call too late')
create_file('post.js', post_prep + 'Module.postRun.push(function() { ' + post_test + ' });' + post_hook)
self.btest(filename, expected=str(second_code), args=['--post-js', 'post.js', '-s', 'EXIT_RUNTIME'] + extra_args + mode, reporting=Reporting.NONE)
print('sync, runtime still alive, so all good')
create_file('post.js', post_prep + 'expected_ok = true; Module.postRun.push(function() { ' + post_test + ' });' + post_hook)
self.btest(filename, expected='606', args=['--post-js', 'post.js'] + extra_args + mode, reporting=Reporting.NONE)
def test_cwrap_early(self):
self.btest(os.path.join('browser', 'cwrap_early.cpp'), args=['-O2', '-s', 'ASSERTIONS', '--pre-js', test_file('browser', 'cwrap_early.js'), '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=[cwrap]'], expected='0')
def test_worker_api(self):
self.compile_btest([test_file('worker_api_worker.cpp'), '-o', 'worker.js', '-s', 'BUILD_AS_WORKER', '-s', 'EXPORTED_FUNCTIONS=[_one]'])
self.btest('worker_api_main.cpp', expected='566')
def test_worker_api_2(self):
self.compile_btest([test_file('worker_api_2_worker.cpp'), '-o', 'worker.js', '-s', 'BUILD_AS_WORKER', '-O2', '--minify=0', '-s', 'EXPORTED_FUNCTIONS=[_one,_two,_three,_four]', '--closure=1'])
self.btest('worker_api_2_main.cpp', args=['-O2', '--minify=0'], expected='11')
def test_worker_api_3(self):
self.compile_btest([test_file('worker_api_3_worker.cpp'), '-o', 'worker.js', '-s', 'BUILD_AS_WORKER', '-s', 'EXPORTED_FUNCTIONS=[_one]'])
self.btest('worker_api_3_main.cpp', expected='5')
def test_worker_api_sleep(self):
self.compile_btest([test_file('worker_api_worker_sleep.cpp'), '-o', 'worker.js', '-s', 'BUILD_AS_WORKER', '-s', 'EXPORTED_FUNCTIONS=[_one]', '-s', 'ASYNCIFY'])
self.btest('worker_api_main.cpp', expected='566')
def test_emscripten_async_wget2(self):
self.btest('test_emscripten_async_wget2.cpp', expected='0')
def test_module(self):
self.run_process([EMCC, test_file('browser_module.cpp'), '-o', 'lib.wasm', '-O2', '-s', 'SIDE_MODULE', '-s', 'EXPORTED_FUNCTIONS=[_one,_two]'])
self.btest('browser_main.cpp', args=['-O2', '-s', 'MAIN_MODULE'], expected='8')
@parameterized({
'non-lz4': ([],),
'lz4': (['-s', 'LZ4'],)
})
def test_preload_module(self, args):
create_file('library.c', r'''
#include <stdio.h>
int library_func() {
return 42;
}
''')
self.run_process([EMCC, 'library.c', '-s', 'SIDE_MODULE', '-O2', '-o', 'library.wasm', '-s', 'EXPORT_ALL'])
os.rename('library.wasm', 'library.so')
create_file('main.c', r'''
#include <dlfcn.h>
#include <stdio.h>
#include <emscripten.h>
int main() {
int found = EM_ASM_INT(
return Module['preloadedWasm']['/library.so'] !== undefined;
);
if (!found) {
return 1;
}
void *lib_handle = dlopen("/library.so", RTLD_NOW);
if (!lib_handle) {
return 2;
}
typedef int (*voidfunc)();
voidfunc x = (voidfunc)dlsym(lib_handle, "library_func");
if (!x || x() != 42) {
return 3;
}
return 0;
}
''')
self.btest_exit(
'main.c',
args=['-s', 'MAIN_MODULE', '--preload-file', '.@/', '-O2', '--use-preload-plugins', '-s', 'EXPORT_ALL'] + args)
def test_mmap_file(self):
create_file('data.dat', 'data from the file ' + ('.' * 9000))
self.btest(test_file('mmap_file.c'), expected='1', args=['--preload-file', 'data.dat'])
# This does not actually verify anything except that --cpuprofiler and --memoryprofiler compiles.
# Run interactive.test_cpuprofiler_memoryprofiler for interactive testing.
@requires_graphics_hardware
def test_cpuprofiler_memoryprofiler(self):
self.btest('hello_world_gles.c', expected='0', args=['-DLONGTEST=1', '-DTEST_MEMORYPROFILER_ALLOCATIONS_MAP=1', '-O2', '--cpuprofiler', '--memoryprofiler', '-lGL', '-lglut', '-DANIMATE'])
def test_uuid(self):
# Run with ./runner browser.test_uuid
# We run this test in Node/SPIDERMONKEY and browser environments because we try to make use of
# high quality crypto random number generators such as crypto.getRandomValues or randomBytes (if available).
# First run tests in Node and/or SPIDERMONKEY using self.run_js. Use closure compiler so we can check that
# require('crypto').randomBytes and window.crypto.getRandomValues doesn't get minified out.
self.run_process([EMCC, '-O2', '--closure=1', test_file('uuid', 'test.c'), '-o', 'test.js', '-luuid'])
test_js_closure = open('test.js').read()
# Check that test.js compiled with --closure 1 contains ").randomBytes" and "window.crypto.getRandomValues"
assert ").randomBytes" in test_js_closure
assert "window.crypto.getRandomValues" in test_js_closure
out = self.run_js('test.js')
print(out)
# Tidy up files that might have been created by this test.
try_delete(test_file('uuid', 'test.js'))
try_delete(test_file('uuid', 'test.js.map'))
# Now run test in browser
self.btest(test_file('uuid', 'test.c'), '1', args=['-luuid'])
@requires_graphics_hardware
def test_glew(self):
self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW'], expected='1')
self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW', '-s', 'LEGACY_GL_EMULATION'], expected='1')
self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW', '-DGLEW_MX'], expected='1')
self.btest(test_file('glew.c'), args=['-lGL', '-lSDL', '-lGLEW', '-s', 'LEGACY_GL_EMULATION', '-DGLEW_MX'], expected='1')
def test_doublestart_bug(self):
create_file('pre.js', r'''
if (!Module['preRun']) Module['preRun'] = [];
Module["preRun"].push(function () {
addRunDependency('test_run_dependency');
removeRunDependency('test_run_dependency');
});
''')
self.btest('doublestart.c', args=['--pre-js', 'pre.js'], expected='1')
@parameterized({
'': ([],),
'closure': (['-O2', '-g1', '--closure=1', '-s', 'HTML5_SUPPORT_DEFERRING_USER_SENSITIVE_REQUESTS=0'],),
'pthread': (['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'],),
'legacy': (['-s', 'MIN_FIREFOX_VERSION=0', '-s', 'MIN_SAFARI_VERSION=0', '-s', 'MIN_IE_VERSION=0', '-s', 'MIN_EDGE_VERSION=0', '-s', 'MIN_CHROME_VERSION=0'],)
})
@requires_threads
def test_html5_core(self, opts):
self.btest(test_file('test_html5_core.c'), args=opts, expected='0')
@requires_threads
def test_html5_gamepad(self):
for opts in [[], ['-O2', '-g1', '--closure=1'], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD']]:
print(opts)
self.btest(test_file('test_gamepad.c'), args=[] + opts, expected='0')
@requires_graphics_hardware
def test_html5_webgl_create_context_no_antialias(self):
for opts in [[], ['-O2', '-g1', '--closure=1'], ['-s', 'FULL_ES2=1']]:
print(opts)
self.btest(test_file('webgl_create_context.cpp'), args=opts + ['-DNO_ANTIALIAS', '-lGL'], expected='0')
# This test supersedes the one above, but it's skipped in the CI because anti-aliasing is not well supported by the Mesa software renderer.
@requires_threads
@requires_graphics_hardware
def test_html5_webgl_create_context(self):
for opts in [[], ['-O2', '-g1', '--closure=1'], ['-s', 'FULL_ES2=1'], ['-s', 'USE_PTHREADS']]:
print(opts)
self.btest(test_file('webgl_create_context.cpp'), args=opts + ['-lGL'], expected='0')
@requires_graphics_hardware
# Verify bug https://github.com/emscripten-core/emscripten/issues/4556: creating a WebGL context to Module.canvas without an ID explicitly assigned to it.
def test_html5_webgl_create_context2(self):
self.btest(test_file('webgl_create_context2.cpp'), expected='0')
@requires_graphics_hardware
# Verify bug https://github.com/emscripten-core/emscripten/issues/4556: creating a WebGL context to Module.canvas without an ID explicitly assigned to it.
# (this only makes sense in the old deprecated -s DISABLE_DEPRECATED_FIND_EVENT_TARGET_BEHAVIOR=0 mode)
def test_html5_special_event_targets(self):
self.btest(test_file('browser', 'html5_special_event_targets.cpp'), args=['-lGL'], expected='0')
@requires_graphics_hardware
def test_html5_webgl_destroy_context(self):
for opts in [[], ['-O2', '-g1'], ['-s', 'FULL_ES2=1']]:
print(opts)
self.btest(test_file('webgl_destroy_context.cpp'), args=opts + ['--shell-file', test_file('webgl_destroy_context_shell.html'), '-lGL'], expected='0')
@no_chrome('see #7373')
@requires_graphics_hardware
def test_webgl_context_params(self):
if WINDOWS:
self.skipTest('SKIPPED due to bug https://bugzilla.mozilla.org/show_bug.cgi?id=1310005 - WebGL implementation advertises implementation defined GL_IMPLEMENTATION_COLOR_READ_TYPE/FORMAT pair that it cannot read with')
self.btest(test_file('webgl_color_buffer_readpixels.cpp'), args=['-lGL'], expected='0')
# Test for PR#5373 (https://github.com/emscripten-core/emscripten/pull/5373)
def test_webgl_shader_source_length(self):
for opts in [[], ['-s', 'FULL_ES2=1']]:
print(opts)
self.btest(test_file('webgl_shader_source_length.cpp'), args=opts + ['-lGL'], expected='0')
# Tests calling glGetString(GL_UNMASKED_VENDOR_WEBGL).
def test_webgl_unmasked_vendor_webgl(self):
self.btest(test_file('webgl_unmasked_vendor_webgl.c'), args=['-lGL'], expected='0')
def test_webgl2(self):
for opts in [
['-s', 'MIN_CHROME_VERSION=0'],
['-O2', '-g1', '--closure=1', '-s', 'WORKAROUND_OLD_WEBGL_UNIFORM_UPLOAD_IGNORED_OFFSET_BUG'],
['-s', 'FULL_ES2=1'],
]:
print(opts)
self.btest(test_file('webgl2.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'] + opts, expected='0')
# Tests the WebGL 2 glGetBufferSubData() functionality.
@requires_graphics_hardware
def test_webgl2_get_buffer_sub_data(self):
self.btest(test_file('webgl2_get_buffer_sub_data.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'], expected='0')
@requires_graphics_hardware
@requires_threads
def test_webgl2_pthreads(self):
# test that a program can be compiled with pthreads and render WebGL2 properly on the main thread
# (the testcase doesn't even use threads, but is compiled with thread support).
self.btest(test_file('webgl2.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL', '-s', 'USE_PTHREADS'], expected='0')
def test_webgl2_objects(self):
self.btest(test_file('webgl2_objects.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'], expected='0')
def test_html5_webgl_api(self):
for mode in [['-s', 'OFFSCREENCANVAS_SUPPORT', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'],
['-s', 'OFFSCREEN_FRAMEBUFFER', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'],
[]]:
if 'OFFSCREENCANVAS_SUPPORT' in mode and os.getenv('EMTEST_LACKS_OFFSCREEN_CANVAS'):
continue
self.btest(test_file('html5_webgl.c'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'] + mode, expected='0')
def test_webgl2_ubos(self):
self.btest(test_file('webgl2_ubos.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'], expected='0')
@requires_graphics_hardware
def test_webgl2_garbage_free_entrypoints(self):
self.btest(test_file('webgl2_garbage_free_entrypoints.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1'], expected='1')
self.btest(test_file('webgl2_garbage_free_entrypoints.cpp'), expected='1')
@requires_graphics_hardware
def test_webgl2_backwards_compatibility_emulation(self):
self.btest(test_file('webgl2_backwards_compatibility_emulation.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-s', 'WEBGL2_BACKWARDS_COMPATIBILITY_EMULATION=1'], expected='0')
@requires_graphics_hardware
def test_webgl2_runtime_no_context(self):
# tests that if we support WebGL1 and 2, and WebGL2RenderingContext exists,
# but context creation fails, that we can then manually try to create a
# WebGL1 context and succeed.
self.btest(test_file('test_webgl2_runtime_no_context.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2'], expected='1')
@requires_graphics_hardware
def test_webgl2_invalid_teximage2d_type(self):
self.btest(test_file('webgl2_invalid_teximage2d_type.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2'], expected='0')
@requires_graphics_hardware
def test_webgl_with_closure(self):
self.btest(test_file('webgl_with_closure.cpp'), args=['-O2', '-s', 'MAX_WEBGL_VERSION=2', '--closure=1', '-lGL'], expected='0')
# Tests that -s GL_ASSERTIONS=1 and glVertexAttribPointer with packed types works
@requires_graphics_hardware
def test_webgl2_packed_types(self):
self.btest(test_file('webgl2_draw_packed_triangle.c'), args=['-lGL', '-s', 'MAX_WEBGL_VERSION=2', '-s', 'GL_ASSERTIONS'], expected='0')
@requires_graphics_hardware
def test_webgl2_pbo(self):
self.btest(test_file('webgl2_pbo.cpp'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'], expected='0')
@no_firefox('fails on CI likely due to GPU drivers there')
@requires_graphics_hardware
def test_webgl2_sokol_mipmap(self):
self.btest(test_file('third_party', 'sokol', 'mipmap-emsc.c'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL', '-O1'],
reference=os.path.join('third_party', 'sokol', 'mipmap-emsc.png'), reference_slack=2)
@no_firefox('fails on CI likely due to GPU drivers there')
@requires_graphics_hardware
def test_webgl2_sokol_mrt(self):
self.btest(test_file('third_party', 'sokol', 'mrt-emcc.c'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'],
reference=os.path.join('third_party', 'sokol', 'mrt-emcc.png'))
@requires_graphics_hardware
def test_webgl2_sokol_arraytex(self):
self.btest(test_file('third_party', 'sokol', 'arraytex-emsc.c'), args=['-s', 'MAX_WEBGL_VERSION=2', '-lGL'],
reference=os.path.join('third_party', 'sokol', 'arraytex-emsc.png'))
def test_sdl_touch(self):
for opts in [[], ['-O2', '-g1', '--closure=1']]:
print(opts)
self.btest(test_file('sdl_touch.c'), args=opts + ['-DAUTOMATE_SUCCESS=1', '-lSDL', '-lGL'], expected='0')
def test_html5_mouse(self):
for opts in [[], ['-O2', '-g1', '--closure=1']]:
print(opts)
self.btest(test_file('test_html5_mouse.c'), args=opts + ['-DAUTOMATE_SUCCESS=1'], expected='0')
def test_sdl_mousewheel(self):
for opts in [[], ['-O2', '-g1', '--closure=1']]:
print(opts)
self.btest(test_file('test_sdl_mousewheel.c'), args=opts + ['-DAUTOMATE_SUCCESS=1', '-lSDL', '-lGL'], expected='0')
def test_wget(self):
create_file('test.txt', 'emscripten')
self.btest(test_file('test_wget.c'), expected='1', args=['-s', 'ASYNCIFY'])
def test_wget_data(self):
create_file('test.txt', 'emscripten')
self.btest(test_file('test_wget_data.c'), expected='1', args=['-O2', '-g2', '-s', 'ASYNCIFY'])
def test_locate_file(self):
for wasm in [0, 1]:
print('wasm', wasm)
self.clear()
create_file('src.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <assert.h>
int main() {
FILE *f = fopen("data.txt", "r");
assert(f && "could not open file");
char buf[100];
int num = fread(buf, 1, 20, f);
assert(num == 20 && "could not read 20 bytes");
buf[20] = 0;
fclose(f);
int result = !strcmp("load me right before", buf);
printf("|%s| : %d\n", buf, result);
REPORT_RESULT(result);
return 0;
}
''')
create_file('data.txt', 'load me right before...')
create_file('pre.js', 'Module.locateFile = function(x) { return "sub/" + x };')
self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data.txt'], stdout=open('data.js', 'w'))
# put pre.js first, then the file packager data, so locateFile is there for the file loading code
self.compile_btest(['src.cpp', '-O2', '-g', '--pre-js', 'pre.js', '--pre-js', 'data.js', '-o', 'page.html', '-s', 'FORCE_FILESYSTEM', '-s', 'WASM=' + str(wasm)])
ensure_dir('sub')
if wasm:
shutil.move('page.wasm', os.path.join('sub', 'page.wasm'))
else:
shutil.move('page.html.mem', os.path.join('sub', 'page.html.mem'))
shutil.move('test.data', os.path.join('sub', 'test.data'))
self.run_browser('page.html', None, '/report_result?1')
# alternatively, put locateFile in the HTML
print('in html')
create_file('shell.html', '''
<body>
<script>
var Module = {
locateFile: function(x) { return "sub/" + x }
};
</script>
{{{ SCRIPT }}}
</body>
''')
def in_html(expected, args=[]):
self.compile_btest(['src.cpp', '-O2', '-g', '--shell-file', 'shell.html', '--pre-js', 'data.js', '-o', 'page.html', '-s', 'SAFE_HEAP', '-s', 'ASSERTIONS', '-s', 'FORCE_FILESYSTEM', '-s', 'WASM=' + str(wasm)] + args)
if wasm:
shutil.move('page.wasm', os.path.join('sub', 'page.wasm'))
else:
shutil.move('page.html.mem', os.path.join('sub', 'page.html.mem'))
self.run_browser('page.html', None, '/report_result?' + expected)
in_html('1')
# verify that the mem init request succeeded in the latter case
if not wasm:
create_file('src.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
int result = EM_ASM_INT({
return Module['memoryInitializerRequest'].status;
});
printf("memory init request: %d\n", result);
REPORT_RESULT(result);
return 0;
}
''')
in_html('200')
@requires_graphics_hardware
@parameterized({
'no_gl': (['-DCLIENT_API=GLFW_NO_API'],),
'gl_es': (['-DCLIENT_API=GLFW_OPENGL_ES_API'],)
})
def test_glfw3(self, args):
for opts in [[], ['-s', 'LEGACY_GL_EMULATION'], ['-Os', '--closure=1']]:
print(opts)
self.btest(test_file('glfw3.c'), args=['-s', 'USE_GLFW=3', '-lglfw', '-lGL'] + args + opts, expected='1')
@requires_graphics_hardware
def test_glfw_events(self):
self.btest(test_file('glfw_events.c'), args=['-s', 'USE_GLFW=2', "-DUSE_GLFW=2", '-lglfw', '-lGL'], expected='1')
self.btest(test_file('glfw_events.c'), args=['-s', 'USE_GLFW=3', "-DUSE_GLFW=3", '-lglfw', '-lGL'], expected='1')
@requires_graphics_hardware
def test_sdl2_image(self):
# load an image file, get pixel data. Also O2 coverage for --preload-file, and memory-init
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg')
for mem in [0, 1]:
for dest, dirname, basename in [('screenshot.jpg', '/', 'screenshot.jpg'),
('screenshot.jpg@/assets/screenshot.jpg', '/assets', 'screenshot.jpg')]:
self.compile_btest([
test_file('sdl2_image.c'), '-o', 'page.html', '-O2', '--memory-init-file', str(mem),
'--preload-file', dest, '-DSCREENSHOT_DIRNAME="' + dirname + '"', '-DSCREENSHOT_BASENAME="' + basename + '"', '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '--use-preload-plugins'
])
self.run_browser('page.html', '', '/report_result?600')
@requires_graphics_hardware
def test_sdl2_image_jpeg(self):
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpeg')
self.compile_btest([
test_file('sdl2_image.c'), '-o', 'page.html',
'--preload-file', 'screenshot.jpeg', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.jpeg"', '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '--use-preload-plugins'
])
self.run_browser('page.html', '', '/report_result?600')
@requires_graphics_hardware
def test_sdl2_image_formats(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.jpg')
self.btest('sdl2_image.c', expected='512', args=['--preload-file', 'screenshot.png', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.png"',
'-DNO_PRELOADED', '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["png"]'])
self.btest('sdl2_image.c', expected='600', args=['--preload-file', 'screenshot.jpg', '-DSCREENSHOT_DIRNAME="/"', '-DSCREENSHOT_BASENAME="screenshot.jpg"',
'-DBITSPERPIXEL=24', '-DNO_PRELOADED', '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-s', 'SDL2_IMAGE_FORMATS=["jpg"]'])
def test_sdl2_key(self):
create_file('pre.js', '''
Module.postRun = function() {
function doOne() {
Module._one();
setTimeout(doOne, 1000/60);
}
setTimeout(doOne, 1000/60);
}
function keydown(c) {
var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
var prevented = !document.dispatchEvent(event);
//send keypress if not prevented
if (!prevented) {
var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
}
}
function keyup(c) {
var event = new KeyboardEvent("keyup", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
}
''')
self.compile_btest([test_file('sdl2_key.c'), '-o', 'page.html', '-s', 'USE_SDL=2', '--pre-js', 'pre.js', '-s', 'EXPORTED_FUNCTIONS=[_main,_one]'])
self.run_browser('page.html', '', '/report_result?37182145')
def test_sdl2_text(self):
create_file('pre.js', '''
Module.postRun = function() {
function doOne() {
Module._one();
setTimeout(doOne, 1000/60);
}
setTimeout(doOne, 1000/60);
}
function simulateKeyEvent(c) {
var event = new KeyboardEvent("keypress", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.body.dispatchEvent(event);
}
''')
self.compile_btest([test_file('sdl2_text.c'), '-o', 'page.html', '--pre-js', 'pre.js', '-s', 'EXPORTED_FUNCTIONS=[_main,_one]', '-s', 'USE_SDL=2'])
self.run_browser('page.html', '', '/report_result?1')
@requires_graphics_hardware
def test_sdl2_mouse(self):
create_file('pre.js', '''
function simulateMouseEvent(x, y, button) {
var event = document.createEvent("MouseEvents");
if (button >= 0) {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousedown', true, true, window,
1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event1);
var event2 = document.createEvent("MouseEvents");
event2.initMouseEvent('mouseup', true, true, window,
1, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event2);
} else {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousemove', true, true, window,
0, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y, Module['canvas'].offsetLeft + x, Module['canvas'].offsetTop + y,
0, 0, 0, 0,
0, null);
Module['canvas'].dispatchEvent(event1);
}
}
window['simulateMouseEvent'] = simulateMouseEvent;
''')
self.compile_btest([test_file('sdl2_mouse.c'), '-O2', '--minify=0', '-o', 'page.html', '--pre-js', 'pre.js', '-s', 'USE_SDL=2'])
self.run_browser('page.html', '', '/report_result?1')
@requires_graphics_hardware
def test_sdl2_mouse_offsets(self):
create_file('pre.js', '''
function simulateMouseEvent(x, y, button) {
var event = document.createEvent("MouseEvents");
if (button >= 0) {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousedown', true, true, window,
1, x, y, x, y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event1);
var event2 = document.createEvent("MouseEvents");
event2.initMouseEvent('mouseup', true, true, window,
1, x, y, x, y,
0, 0, 0, 0,
button, null);
Module['canvas'].dispatchEvent(event2);
} else {
var event1 = document.createEvent("MouseEvents");
event1.initMouseEvent('mousemove', true, true, window,
0, x, y, x, y,
0, 0, 0, 0,
0, null);
Module['canvas'].dispatchEvent(event1);
}
}
window['simulateMouseEvent'] = simulateMouseEvent;
''')
create_file('page.html', '''
<html>
<head>
<style type="text/css">
html, body { margin: 0; padding: 0; }
#container {
position: absolute;
left: 5px; right: 0;
top: 5px; bottom: 0;
}
#canvas {
position: absolute;
left: 0; width: 600px;
top: 0; height: 450px;
}
textarea {
margin-top: 500px;
margin-left: 5px;
width: 600px;
}
</style>
</head>
<body>
<div id="container">
<canvas id="canvas"></canvas>
</div>
<textarea id="output" rows="8"></textarea>
<script type="text/javascript">
var Module = {
canvas: document.getElementById('canvas'),
print: (function() {
var element = document.getElementById('output');
element.value = ''; // clear browser cache
return function(text) {
if (arguments.length > 1) text = Array.prototype.slice.call(arguments).join(' ');
element.value += text + "\\n";
element.scrollTop = element.scrollHeight; // focus on bottom
};
})()
};
</script>
<script type="text/javascript" src="sdl2_mouse.js"></script>
</body>
</html>
''')
self.compile_btest([test_file('sdl2_mouse.c'), '-DTEST_SDL_MOUSE_OFFSETS=1', '-O2', '--minify=0', '-o', 'sdl2_mouse.js', '--pre-js', 'pre.js', '-s', 'USE_SDL=2'])
self.run_browser('page.html', '', '/report_result?1')
@requires_threads
def test_sdl2_threads(self):
self.btest('sdl2_threads.c', expected='4', args=['-s', 'USE_PTHREADS', '-s', 'USE_SDL=2', '-s', 'PROXY_TO_PTHREAD'])
@requires_graphics_hardware
def test_sdl2glshader(self):
self.btest('sdl2glshader.c', reference='sdlglshader.png', args=['-s', 'USE_SDL=2', '-O2', '--closure=1', '-g1', '-s', 'LEGACY_GL_EMULATION'])
self.btest('sdl2glshader.c', reference='sdlglshader.png', args=['-s', 'USE_SDL=2', '-O2', '-s', 'LEGACY_GL_EMULATION'], also_proxied=True) # XXX closure fails on proxy
@requires_graphics_hardware
def test_sdl2_canvas_blank(self):
self.btest('sdl2_canvas_blank.c', reference='sdl_canvas_blank.png', args=['-s', 'USE_SDL=2'])
@requires_graphics_hardware
def test_sdl2_canvas_palette(self):
self.btest('sdl2_canvas_palette.c', reference='sdl_canvas_palette.png', args=['-s', 'USE_SDL=2'])
@requires_graphics_hardware
def test_sdl2_canvas_twice(self):
self.btest('sdl2_canvas_twice.c', reference='sdl_canvas_twice.png', args=['-s', 'USE_SDL=2'])
@requires_graphics_hardware
def test_sdl2_gfx(self):
self.btest('sdl2_gfx.cpp', args=['-s', 'USE_SDL=2', '-s', 'USE_SDL_GFX=2'], reference='sdl2_gfx.png', reference_slack=2)
@requires_graphics_hardware
def test_sdl2_canvas_palette_2(self):
create_file('args-r.js', '''
Module['arguments'] = ['-r'];
''')
create_file('args-g.js', '''
Module['arguments'] = ['-g'];
''')
create_file('args-b.js', '''
Module['arguments'] = ['-b'];
''')
self.btest('sdl2_canvas_palette_2.c', reference='sdl_canvas_palette_r.png', args=['-s', 'USE_SDL=2', '--pre-js', 'args-r.js'])
self.btest('sdl2_canvas_palette_2.c', reference='sdl_canvas_palette_g.png', args=['-s', 'USE_SDL=2', '--pre-js', 'args-g.js'])
self.btest('sdl2_canvas_palette_2.c', reference='sdl_canvas_palette_b.png', args=['-s', 'USE_SDL=2', '--pre-js', 'args-b.js'])
def test_sdl2_swsurface(self):
self.btest('sdl2_swsurface.c', expected='1', args=['-s', 'USE_SDL=2', '-s', 'INITIAL_MEMORY=64MB'])
@requires_graphics_hardware
def test_sdl2_image_prepare(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl2_image_prepare.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2'], manually_trigger_reftest=True)
@requires_graphics_hardware
def test_sdl2_image_prepare_data(self):
# load an image file, get pixel data.
shutil.copyfile(test_file('screenshot.jpg'), 'screenshot.not')
self.btest('sdl2_image_prepare_data.c', reference='screenshot.jpg', args=['--preload-file', 'screenshot.not', '-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2'], manually_trigger_reftest=True)
@requires_graphics_hardware
def test_sdl2_canvas_proxy(self):
def post():
html = open('test.html').read()
html = html.replace('</body>', '''
<script>
function assert(x, y) { if (!x) throw 'assertion failed ' + y }
%s
var windowClose = window.close;
window.close = function() {
// wait for rafs to arrive and the screen to update before reftesting
setTimeout(function() {
doReftest();
setTimeout(windowClose, 5000);
}, 1000);
};
</script>
</body>''' % open('reftest.js').read())
create_file('test.html', html)
create_file('data.txt', 'datum')
self.btest('sdl2_canvas_proxy.c', reference='sdl2_canvas.png', args=['-s', 'USE_SDL=2', '--proxy-to-worker', '--preload-file', 'data.txt', '-s', 'GL_TESTING'], manual_reference=True, post_build=post)
def test_sdl2_pumpevents(self):
# key events should be detected using SDL_PumpEvents
create_file('pre.js', '''
function keydown(c) {
var event = new KeyboardEvent("keydown", { 'keyCode': c, 'charCode': c, 'view': window, 'bubbles': true, 'cancelable': true });
document.dispatchEvent(event);
}
''')
self.btest('sdl2_pumpevents.c', expected='7', args=['--pre-js', 'pre.js', '-s', 'USE_SDL=2'])
def test_sdl2_timer(self):
self.btest('sdl2_timer.c', expected='5', args=['-s', 'USE_SDL=2'])
def test_sdl2_canvas_size(self):
self.btest('sdl2_canvas_size.c', expected='1', args=['-s', 'USE_SDL=2'])
@requires_graphics_hardware
def test_sdl2_gl_read(self):
# SDL, OpenGL, readPixels
self.compile_btest([test_file('sdl2_gl_read.c'), '-o', 'something.html', '-s', 'USE_SDL=2'])
self.run_browser('something.html', '.', '/report_result?1')
@requires_graphics_hardware
def test_sdl2_glmatrixmode_texture(self):
self.btest('sdl2_glmatrixmode_texture.c', reference='sdl2_glmatrixmode_texture.png',
args=['-s', 'LEGACY_GL_EMULATION', '-s', 'USE_SDL=2'],
message='You should see a (top) red-white and (bottom) white-red image.')
@requires_graphics_hardware
def test_sdl2_gldrawelements(self):
self.btest('sdl2_gldrawelements.c', reference='sdl2_gldrawelements.png',
args=['-s', 'LEGACY_GL_EMULATION', '-s', 'USE_SDL=2'],
message='GL drawing modes. Bottom: points, lines, line loop, line strip. Top: triangles, triangle strip, triangle fan, quad.')
@requires_graphics_hardware
def test_sdl2_fog_simple(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl2_fog_simple.c', reference='screenshot-fog-simple.png',
args=['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '-O2', '--minify=0', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl2_fog_negative(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl2_fog_negative.c', reference='screenshot-fog-negative.png',
args=['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl2_fog_density(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl2_fog_density.c', reference='screenshot-fog-density.png',
args=['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl2_fog_exp2(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl2_fog_exp2.c', reference='screenshot-fog-exp2.png',
args=['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins'],
message='You should see an image with fog.')
@requires_graphics_hardware
def test_sdl2_fog_linear(self):
shutil.copyfile(test_file('screenshot.png'), 'screenshot.png')
self.btest('sdl2_fog_linear.c', reference='screenshot-fog-linear.png', reference_slack=1,
args=['-s', 'USE_SDL=2', '-s', 'USE_SDL_IMAGE=2', '--preload-file', 'screenshot.png', '-s', 'LEGACY_GL_EMULATION', '--use-preload-plugins'],
message='You should see an image with fog.')
def test_sdl2_unwasteful(self):
self.btest('sdl2_unwasteful.cpp', expected='1', args=['-s', 'USE_SDL=2', '-O1'])
def test_sdl2_canvas_write(self):
self.btest('sdl2_canvas_write.cpp', expected='0', args=['-s', 'USE_SDL=2'])
@requires_graphics_hardware
def test_sdl2_gl_frames_swap(self):
def post_build(*args):
self.post_manual_reftest(*args)
html = open('test.html').read()
html2 = html.replace('''Module['postRun'] = doReftest;''', '') # we don't want the very first frame
assert html != html2
create_file('test.html', html2)
self.btest('sdl2_gl_frames_swap.c', reference='sdl2_gl_frames_swap.png', args=['--proxy-to-worker', '-s', 'GL_TESTING', '-s', 'USE_SDL=2'], manual_reference=True, post_build=post_build)
@requires_graphics_hardware
def test_sdl2_ttf(self):
shutil.copy2(test_file('freetype', 'LiberationSansBold.ttf'), self.get_dir())
self.btest('sdl2_ttf.c', reference='sdl2_ttf.png',
args=['-O2', '-s', 'USE_SDL=2', '-s', 'USE_SDL_TTF=2', '--embed-file', 'LiberationSansBold.ttf'],
message='You should see colorful "hello" and "world" in the window')
@requires_graphics_hardware
def test_sdl2_ttf_rtl(self):
shutil.copy2(test_file('third_party', 'notofont', 'NotoNaskhArabic-Regular.ttf'), self.get_dir())
self.btest('sdl2_ttf_rtl.c', reference='sdl2_ttf_rtl.png',
args=['-O2', '-s', 'USE_SDL=2', '-s', 'USE_SDL_TTF=2', '--embed-file', 'NotoNaskhArabic-Regular.ttf'],
message='You should see colorful "سلام" and "جهان" with shaped Arabic script in the window')
def test_sdl2_custom_cursor(self):
shutil.copyfile(test_file('cursor.bmp'), 'cursor.bmp')
self.btest('sdl2_custom_cursor.c', expected='1', args=['--preload-file', 'cursor.bmp', '-s', 'USE_SDL=2'])
def test_sdl2_misc(self):
self.btest_exit('sdl2_misc.c', args=['-s', 'USE_SDL=2'])
@disabled('https://github.com/emscripten-core/emscripten/issues/13101')
def test_sdl2_misc_main_module(self):
self.btest_exit('sdl2_misc.c', args=['-s', 'USE_SDL=2', '-s', 'MAIN_MODULE'])
def test_sdl2_misc_via_object(self):
self.run_process([EMCC, '-c', test_file('sdl2_misc.c'), '-s', 'USE_SDL=2', '-o', 'test.o'])
self.compile_btest(['test.o', '-s', 'EXIT_RUNTIME', '-s', 'USE_SDL=2', '-o', 'test.html'])
self.run_browser('test.html', '...', '/report_result?exit:0')
@parameterized({
'dash_s': (['-s', 'USE_SDL=2', '-s', 'USE_SDL_MIXER=2'],),
'dash_l': (['-lSDL2', '-lSDL2_mixer'],),
})
@requires_sound_hardware
def test_sdl2_mixer_wav(self, flags):
shutil.copyfile(test_file('sounds', 'the_entertainer.wav'), 'sound.wav')
self.btest('sdl2_mixer_wav.c', expected='1', args=['--preload-file', 'sound.wav', '-s', 'INITIAL_MEMORY=33554432'] + flags)
@parameterized({
'wav': ([], '0', 'the_entertainer.wav'),
'ogg': (['ogg'], 'MIX_INIT_OGG', 'alarmvictory_1.ogg'),
'mp3': (['mp3'], 'MIX_INIT_MP3', 'pudinha.mp3'),
'mod': (['mod'], 'MIX_INIT_MOD', 'bleep.xm'),
})
@requires_sound_hardware
def test_sdl2_mixer_music(self, formats, flags, music_name):
shutil.copyfile(test_file('sounds', music_name), music_name)
self.btest('sdl2_mixer_music.c', expected='1', args=[
'--preload-file', music_name,
'-DSOUND_PATH=' + json.dumps(music_name),
'-DFLAGS=' + flags,
'-s', 'USE_SDL=2',
'-s', 'USE_SDL_MIXER=2',
'-s', 'SDL2_MIXER_FORMATS=' + json.dumps(formats),
'-s', 'INITIAL_MEMORY=33554432'
])
@no_wasm_backend('cocos2d needs to be ported')
@requires_graphics_hardware
def test_cocos2d_hello(self):
cocos2d_root = os.path.join(system_libs.Ports.get_build_dir(), 'cocos2d')
preload_file = os.path.join(cocos2d_root, 'samples', 'HelloCpp', 'Resources') + '@'
self.btest('cocos2d_hello.cpp', reference='cocos2d_hello.png', reference_slack=1,
args=['-s', 'USE_COCOS2D=3', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0',
'--preload-file', preload_file, '--use-preload-plugins',
'-Wno-inconsistent-missing-override'],
message='You should see Cocos2d logo')
def test_async(self):
for opts in [0, 1, 2, 3]:
print(opts)
self.btest('browser/async.cpp', '1', args=['-O' + str(opts), '-g2', '-s', 'ASYNCIFY'])
def test_asyncify_tricky_function_sig(self):
self.btest('browser/test_asyncify_tricky_function_sig.cpp', '85', args=['-s', 'ASYNCIFY_ONLY=[foo(char.const*?.int#),foo2(),main,__original_main]', '-s', 'ASYNCIFY=1'])
@requires_threads
def test_async_in_pthread(self):
self.btest('browser/async.cpp', '1', args=['-s', 'ASYNCIFY', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-g'])
def test_async_2(self):
# Error.stackTraceLimit default to 10 in chrome but this test relies on more
# than 40 stack frames being reported.
create_file('pre.js', 'Error.stackTraceLimit = 80;\n')
self.btest('browser/async_2.cpp', '40', args=['-O3', '--pre-js', 'pre.js', '-s', 'ASYNCIFY'])
def test_async_virtual(self):
for opts in [0, 3]:
print(opts)
self.btest('browser/async_virtual.cpp', '5', args=['-O' + str(opts), '-profiling', '-s', 'ASYNCIFY'])
def test_async_virtual_2(self):
for opts in [0, 3]:
print(opts)
self.btest('browser/async_virtual_2.cpp', '1', args=['-O' + str(opts), '-s', 'ASSERTIONS', '-s', 'SAFE_HEAP', '-profiling', '-s', 'ASYNCIFY'])
# Test async sleeps in the presence of invoke_* calls, which can happen with
# longjmp or exceptions.
@parameterized({
'O0': ([],), # noqa
'O3': (['-O3'],), # noqa
})
def test_async_longjmp(self, args):
self.btest('browser/async_longjmp.cpp', '2', args=args + ['-s', 'ASYNCIFY'])
def test_async_mainloop(self):
for opts in [0, 3]:
print(opts)
self.btest('browser/async_mainloop.cpp', '121', args=['-O' + str(opts), '-s', 'ASYNCIFY'])
@requires_sound_hardware
def test_sdl_audio_beep_sleep(self):
self.btest('sdl_audio_beep_sleep.cpp', '1', args=['-Os', '-s', 'ASSERTIONS', '-s', 'DISABLE_EXCEPTION_CATCHING=0', '-profiling', '-s', 'SAFE_HEAP', '-lSDL', '-s', 'ASYNCIFY'], timeout=90)
def test_mainloop_reschedule(self):
self.btest('mainloop_reschedule.cpp', '1', args=['-Os', '-s', 'ASYNCIFY'])
def test_mainloop_infloop(self):
self.btest('mainloop_infloop.cpp', '1', args=['-s', 'ASYNCIFY'])
def test_async_iostream(self):
self.btest('browser/async_iostream.cpp', '1', args=['-s', 'ASYNCIFY'])
# Test an async return value. The value goes through a custom JS library
# method that uses asyncify, and therefore it needs to be declared in
# ASYNCIFY_IMPORTS.
# To make the test more precise we also use ASYNCIFY_IGNORE_INDIRECT here.
@parameterized({
'normal': (['-s', 'ASYNCIFY_IMPORTS=[sync_tunnel]'],), # noqa
'response': (['-s', 'ASYNCIFY_IMPORTS=@filey.txt'],), # noqa
'nothing': (['-DBAD'],), # noqa
'empty_list': (['-DBAD', '-s', 'ASYNCIFY_IMPORTS=[]'],), # noqa
'em_js_bad': (['-DBAD', '-DUSE_EM_JS'],), # noqa
})
def test_async_returnvalue(self, args):
if '@' in str(args):
create_file('filey.txt', '["sync_tunnel"]')
self.btest('browser/async_returnvalue.cpp', '0', args=['-s', 'ASYNCIFY', '-s', 'ASYNCIFY_IGNORE_INDIRECT', '--js-library', test_file('browser', 'async_returnvalue.js')] + args + ['-s', 'ASSERTIONS'])
def test_async_stack_overflow(self):
self.btest('browser/async_stack_overflow.cpp', 'abort:RuntimeError: unreachable', args=['-s', 'ASYNCIFY', '-s', 'ASYNCIFY_STACK_SIZE=4'])
def test_async_bad_list(self):
self.btest('browser/async_bad_list.cpp', '0', args=['-s', 'ASYNCIFY', '-s', 'ASYNCIFY_ONLY=[waka]', '--profiling'])
# Tests that when building with -s MINIMAL_RUNTIME=1, the build can use -s MODULARIZE=1 as well.
def test_minimal_runtime_modularize(self):
self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.html', '-s', 'MODULARIZE', '-s', 'MINIMAL_RUNTIME'])
self.run_browser('test.html', None, '/report_result?0')
@requires_sync_compilation
def test_modularize(self):
for opts in [
[],
['-O1'],
['-O2', '-profiling'],
['-O2'],
['-O2', '--closure=1']
]:
for args, code in [
# defaults
([], '''
let promise = Module();
if (!promise instanceof Promise) throw new Error('Return value should be a promise');
'''),
# use EXPORT_NAME
(['-s', 'EXPORT_NAME="HelloWorld"'], '''
if (typeof Module !== "undefined") throw "what?!"; // do not pollute the global scope, we are modularized!
HelloWorld.noInitialRun = true; // errorneous module capture will load this and cause timeout
let promise = HelloWorld();
if (!promise instanceof Promise) throw new Error('Return value should be a promise');
'''),
# pass in a Module option (which prevents main(), which we then invoke ourselves)
(['-s', 'EXPORT_NAME="HelloWorld"'], '''
HelloWorld({ noInitialRun: true }).then(hello => {
hello._main();
});
'''),
# Even without a mem init file, everything is async
(['-s', 'EXPORT_NAME="HelloWorld"', '--memory-init-file', '0'], '''
HelloWorld({ noInitialRun: true }).then(hello => {
hello._main();
});
'''),
]:
print('test on', opts, args, code)
# this test is synchronous, so avoid async startup due to wasm features
self.compile_btest([test_file('browser_test_hello_world.c'), '-s', 'MODULARIZE', '-s', 'SINGLE_FILE'] + args + opts)
create_file('a.html', '''
<script src="a.out.js"></script>
<script>
%s
</script>
''' % code)
self.run_browser('a.html', '...', '/report_result?0')
def test_modularize_network_error(self):
test_c_path = test_file('browser_test_hello_world.c')
browser_reporting_js_path = test_file('browser_reporting.js')
self.compile_btest([test_c_path, '-s', 'MODULARIZE', '-s', 'EXPORT_NAME="createModule"', '--extern-pre-js', browser_reporting_js_path], reporting=Reporting.NONE)
create_file('a.html', '''
<script src="a.out.js"></script>
<script>
createModule()
.then(() => {
reportResultToServer("Module creation succeeded when it should have failed");
})
.catch(err => {
reportResultToServer(err.message.slice(0, 54));
});
</script>
''')
print('Deleting a.out.wasm to cause a download error')
os.remove('a.out.wasm')
self.run_browser('a.html', '...', '/report_result?abort(both async and sync fetching of the wasm failed)')
def test_modularize_init_error(self):
test_cpp_path = test_file('browser', 'test_modularize_init_error.cpp')
browser_reporting_js_path = test_file('browser_reporting.js')
self.compile_btest([test_cpp_path, '-s', 'MODULARIZE', '-s', 'EXPORT_NAME="createModule"', '--extern-pre-js', browser_reporting_js_path], reporting=Reporting.NONE)
create_file('a.html', '''
<script src="a.out.js"></script>
<script>
if (typeof window === 'object') {
window.addEventListener('unhandledrejection', function(event) {
reportResultToServer("Unhandled promise rejection: " + event.reason.message);
});
}
createModule()
.then(() => {
reportResultToServer("Module creation succeeded when it should have failed");
})
.catch(err => {
reportResultToServer(err);
});
</script>
''')
self.run_browser('a.html', '...', '/report_result?intentional error to test rejection')
# test illustrating the regression on the modularize feature since commit c5af8f6
# when compiling with the --preload-file option
def test_modularize_and_preload_files(self):
# amount of memory different from the default one that will be allocated for the emscripten heap
totalMemory = 33554432
for opts in [[], ['-O1'], ['-O2', '-profiling'], ['-O2'], ['-O2', '--closure=1']]:
# the main function simply checks that the amount of allocated heap memory is correct
create_file('test.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
EM_ASM({
// use eval here in order for the test with closure compiler enabled to succeed
var totalMemory = Module['INITIAL_MEMORY'];
assert(totalMemory === %d, 'bad memory size');
});
REPORT_RESULT(0);
return 0;
}
''' % totalMemory)
# generate a dummy file
create_file('dummy_file', 'dummy')
# compile the code with the modularize feature and the preload-file option enabled
# no wasm, since this tests customizing total memory at runtime
self.compile_btest(['test.c', '-s', 'WASM=0', '-s', 'MODULARIZE', '-s', 'EXPORT_NAME="Foo"', '--preload-file', 'dummy_file'] + opts)
create_file('a.html', '''
<script src="a.out.js"></script>
<script>
// instantiate the Foo module with custom INITIAL_MEMORY value
var foo = Foo({ INITIAL_MEMORY: %d });
</script>
''' % totalMemory)
self.run_browser('a.html', '...', '/report_result?0')
def test_webidl(self):
# see original in test_core.py
self.run_process([WEBIDL_BINDER, test_file('webidl', 'test.idl'), 'glue'])
self.assertExists('glue.cpp')
self.assertExists('glue.js')
for opts in [[], ['-O1'], ['-O2']]:
print(opts)
self.btest(os.path.join('webidl', 'test.cpp'), '1', args=['--post-js', 'glue.js', '-I.', '-DBROWSER'] + opts)
@requires_sync_compilation
def test_dynamic_link(self):
create_file('pre.js', '''
Module.dynamicLibraries = ['side.wasm'];
''')
create_file('main.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <emscripten.h>
char *side(const char *data);
int main() {
char *temp = side("hello through side\n");
char *ret = (char*)malloc(strlen(temp)+1);
strcpy(ret, temp);
temp[1] = 'x';
EM_ASM({
Module.realPrint = out;
out = function(x) {
if (!Module.printed) Module.printed = x;
Module.realPrint(x);
};
});
puts(ret);
EM_ASM({ assert(Module.printed === 'hello through side', ['expected', Module.printed]); });
REPORT_RESULT(2);
return 0;
}
''')
create_file('side.cpp', r'''
#include <stdlib.h>
#include <string.h>
char *side(const char *data);
char *side(const char *data) {
char *ret = (char*)malloc(strlen(data)+1);
strcpy(ret, data);
return ret;
}
''')
self.run_process([EMCC, 'side.cpp', '-s', 'SIDE_MODULE', '-O2', '-o', 'side.wasm', '-s', 'EXPORT_ALL'])
self.btest(self.in_dir('main.cpp'), '2', args=['-s', 'MAIN_MODULE', '-O2', '--pre-js', 'pre.js', '-s', 'EXPORT_ALL'])
print('wasm in worker (we can read binary data synchronously there)')
create_file('pre.js', '''
var Module = { dynamicLibraries: ['side.wasm'] };
''')
self.run_process([EMCC, 'side.cpp', '-s', 'SIDE_MODULE', '-O2', '-o', 'side.wasm', '-s', 'EXPORT_ALL'])
self.btest(self.in_dir('main.cpp'), '2', args=['-s', 'MAIN_MODULE', '-O2', '--pre-js', 'pre.js', '--proxy-to-worker', '-s', 'EXPORT_ALL'])
print('wasm (will auto-preload since no sync binary reading)')
create_file('pre.js', '''
Module.dynamicLibraries = ['side.wasm'];
''')
# same wasm side module works
self.btest(self.in_dir('main.cpp'), '2', args=['-s', 'MAIN_MODULE', '-O2', '--pre-js', 'pre.js', '-s', 'EXPORT_ALL'])
# verify that dynamic linking works in all kinds of in-browser environments.
# don't mix different kinds in a single test.
@parameterized({
'': ([0],),
'inworker': ([1],),
})
def test_dylink_dso_needed(self, inworker):
self.emcc_args += ['-O2']
# --proxy-to-worker only on main
if inworker:
self.emcc_args += ['--proxy-to-worker']
def do_run(src, expected_output):
# XXX there is no infrastructure (yet ?) to retrieve stdout from browser in tests.
# -> do the assert about expected output inside browser.
#
# we have to put the hook into post.js because in main it is too late
# (in main we won't be able to catch what static constructors inside
# linked dynlibs printed), and in pre.js it is too early (out is not yet
# setup by the shell).
create_file('post.js', r'''
Module.realPrint = out;
out = function(x) {
if (!Module.printed) Module.printed = "";
Module.printed += x + '\n'; // out is passed str without last \n
Module.realPrint(x);
};
''')
create_file('test_dylink_dso_needed.c', src + r'''
#include <emscripten/em_asm.h>
int main() {
int rtn = test_main();
EM_ASM({
var expected = %r;
assert(Module.printed === expected, ['stdout expected:', expected]);
});
return rtn;
}
''' % expected_output)
self.btest_exit(self.in_dir('test_dylink_dso_needed.c'), args=self.get_emcc_args() + ['--post-js', 'post.js'])
self._test_dylink_dso_needed(do_run)
@requires_graphics_hardware
@requires_sync_compilation
def test_dynamic_link_glemu(self):
create_file('pre.js', '''
Module.dynamicLibraries = ['side.wasm'];
''')
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <assert.h>
const char *side();
int main() {
const char *exts = side();
puts(side());
assert(strstr(exts, "GL_EXT_texture_env_combine"));
REPORT_RESULT(1);
return 0;
}
''')
create_file('side.cpp', r'''
#include "SDL/SDL.h"
#include "SDL/SDL_opengl.h"
const char *side() {
SDL_Init(SDL_INIT_VIDEO);
SDL_SetVideoMode(600, 600, 16, SDL_OPENGL);
return (const char *)glGetString(GL_EXTENSIONS);
}
''')
self.run_process([EMCC, 'side.cpp', '-s', 'SIDE_MODULE', '-O2', '-o', 'side.wasm', '-lSDL', '-s', 'EXPORT_ALL'])
self.btest(self.in_dir('main.cpp'), '1', args=['-s', 'MAIN_MODULE', '-O2', '-s', 'LEGACY_GL_EMULATION', '-lSDL', '-lGL', '--pre-js', 'pre.js', '-s', 'EXPORT_ALL'])
def test_dynamic_link_many(self):
# test asynchronously loading two side modules during startup
create_file('pre.js', '''
Module.dynamicLibraries = ['side1.wasm', 'side2.wasm'];
''')
create_file('main.c', r'''
int side1();
int side2();
int main() {
return side1() + side2();
}
''')
create_file('side1.c', r'''
int side1() { return 1; }
''')
create_file('side2.c', r'''
int side2() { return 2; }
''')
self.run_process([EMCC, 'side1.c', '-s', 'SIDE_MODULE', '-o', 'side1.wasm'])
self.run_process([EMCC, 'side2.c', '-s', 'SIDE_MODULE', '-o', 'side2.wasm'])
self.btest_exit(self.in_dir('main.c'), assert_returncode=3,
args=['-s', 'MAIN_MODULE', '--pre-js', 'pre.js'])
def test_dynamic_link_pthread_many(self):
# Test asynchronously loading two side modules during startup
# They should always load in the same order
# Verify that function pointers in the browser's main thread
# reffer to the same function as in a pthread worker.
# The main thread function table is populated asynchronously
# in the browser's main thread. However, it should still be
# populated in the same order as in a pthread worker to
# guarantee function pointer interop.
create_file('main.cpp', r'''
#include <thread>
int side1();
int side2();
int main() {
auto side1_ptr = &side1;
auto side2_ptr = &side2;
// Don't join the thread since this is running in the
// browser's main thread.
std::thread([=]{
REPORT_RESULT(int(
side1_ptr == &side1 &&
side2_ptr == &side2
));
}).detach();
return 0;
}
''')
# The browser will try to load side1 first.
# Use a big payload in side1 so that it takes longer to load than side2
create_file('side1.cpp', r'''
char const * payload1 = "''' + str(list(range(1, int(1e5)))) + r'''";
int side1() { return 1; }
''')
create_file('side2.cpp', r'''
char const * payload2 = "0";
int side2() { return 2; }
''')
self.run_process([EMCC, 'side1.cpp', '-Wno-experimental', '-pthread', '-s', 'SIDE_MODULE', '-o', 'side1.wasm'])
self.run_process([EMCC, 'side2.cpp', '-Wno-experimental', '-pthread', '-s', 'SIDE_MODULE', '-o', 'side2.wasm'])
self.btest(self.in_dir('main.cpp'), '1',
args=['-Wno-experimental', '-pthread', '-s', 'MAIN_MODULE', 'side1.wasm', 'side2.wasm'])
def test_memory_growth_during_startup(self):
create_file('data.dat', 'X' * (30 * 1024 * 1024))
self.btest('browser_test_hello_world.c', '0', args=['-s', 'ASSERTIONS', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=16MB', '-s', 'TOTAL_STACK=16384', '--preload-file', 'data.dat'])
# pthreads tests
def prep_no_SAB(self):
create_file('html.html', open(path_from_root('src', 'shell_minimal.html')).read().replace('''<body>''', '''<body>
<script>
SharedArrayBuffer = undefined;
Atomics = undefined;
</script>
'''))
@requires_threads
def test_pthread_c11_threads(self):
self.btest(test_file('pthread', 'test_pthread_c11_threads.c'),
expected='0',
args=['-g4', '-std=gnu11', '-xc', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'TOTAL_MEMORY=64mb'])
@requires_threads
def test_pthread_pool_size_strict(self):
# Check that it doesn't fail with sufficient number of threads in the pool.
self.btest(test_file('pthread', 'test_pthread_c11_threads.c'),
expected='0',
args=['-g2', '-xc', '-std=gnu11', '-pthread', '-s', 'PTHREAD_POOL_SIZE=4', '-s', 'PTHREAD_POOL_SIZE_STRICT=2', '-s', 'TOTAL_MEMORY=64mb'])
# Check that it fails instead of deadlocking on insufficient number of threads in the pool.
self.btest(test_file('pthread', 'test_pthread_c11_threads.c'),
expected='abort:Assertion failed: thrd_create(&t4, thread_main, NULL) == thrd_success',
args=['-g2', '-xc', '-std=gnu11', '-pthread', '-s', 'PTHREAD_POOL_SIZE=3', '-s', 'PTHREAD_POOL_SIZE_STRICT=2', '-s', 'TOTAL_MEMORY=64mb'])
@requires_threads
def test_pthread_in_pthread_pool_size_strict(self):
# Check that it fails when there's a pthread creating another pthread.
self.btest(test_file('pthread', 'test_pthread_create_pthread.cpp'), expected='1', args=['-g2', '-pthread', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'PTHREAD_POOL_SIZE_STRICT=2'])
# Check that it fails when there's a pthread creating another pthread.
self.btest(test_file('pthread', 'test_pthread_create_pthread.cpp'), expected='-200', args=['-g2', '-pthread', '-s', 'PTHREAD_POOL_SIZE=1', '-s', 'PTHREAD_POOL_SIZE_STRICT=2'])
# Test that the emscripten_ atomics api functions work.
@parameterized({
'normal': ([],),
'closure': (['--closure=1'],),
})
@requires_threads
def test_pthread_atomics(self, args=[]):
self.btest(test_file('pthread', 'test_pthread_atomics.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8', '-g1'] + args)
# Test 64-bit atomics.
@requires_threads
def test_pthread_64bit_atomics(self):
self.btest(test_file('pthread', 'test_pthread_64bit_atomics.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test 64-bit C++11 atomics.
@requires_threads
def test_pthread_64bit_cxx11_atomics(self):
for opt in [['-O0'], ['-O3']]:
for pthreads in [[], ['-s', 'USE_PTHREADS']]:
self.btest(test_file('pthread', 'test_pthread_64bit_cxx11_atomics.cpp'), expected='0', args=opt + pthreads)
# Test c++ std::thread::hardware_concurrency()
@requires_threads
def test_pthread_hardware_concurrency(self):
self.btest(test_file('pthread', 'test_pthread_hardware_concurrency.cpp'), expected='0', args=['-O2', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE="navigator.hardwareConcurrency"'])
@parameterized({
'join': ('join',),
'wait': ('wait',),
})
@requires_threads
def test_pthread_main_thread_blocking(self, name):
print('Test that we error if not ALLOW_BLOCKING_ON_MAIN_THREAD')
self.btest(test_file('pthread', 'main_thread_%s.cpp' % name), expected='abort:Blocking on the main thread is not allowed by default.', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-s', 'ALLOW_BLOCKING_ON_MAIN_THREAD=0'])
if name == 'join':
print('Test that by default we just warn about blocking on the main thread.')
self.btest(test_file('pthread', 'main_thread_%s.cpp' % name), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
print('Test that tryjoin is fine, even if not ALLOW_BLOCKING_ON_MAIN_THREAD')
self.btest(test_file('pthread', 'main_thread_join.cpp'), expected='2', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-g', '-DTRY_JOIN', '-s', 'ALLOW_BLOCKING_ON_MAIN_THREAD=0'])
print('Test that tryjoin is fine, even if not ALLOW_BLOCKING_ON_MAIN_THREAD, and even without a pool')
self.btest(test_file('pthread', 'main_thread_join.cpp'), expected='2', args=['-O3', '-s', 'USE_PTHREADS', '-g', '-DTRY_JOIN', '-s', 'ALLOW_BLOCKING_ON_MAIN_THREAD=0'])
print('Test that everything works ok when we are on a pthread.')
self.btest(test_file('pthread', 'main_thread_%s.cpp' % name), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-s', 'PROXY_TO_PTHREAD', '-s', 'ALLOW_BLOCKING_ON_MAIN_THREAD=0'])
# Test the old GCC atomic __sync_fetch_and_op builtin operations.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_gcc_atomic_fetch_and_op(self):
for opt in [[], ['-O1'], ['-O2'], ['-O3'], ['-Os']]:
for debug in [[], ['-g']]:
args = opt + debug
print(args)
self.btest(test_file('pthread', 'test_pthread_gcc_atomic_fetch_and_op.cpp'), expected='0', args=args + ['-s', 'INITIAL_MEMORY=64MB', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# 64 bit version of the above test.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_gcc_64bit_atomic_fetch_and_op(self):
self.btest(test_file('pthread', 'test_pthread_gcc_64bit_atomic_fetch_and_op.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'], also_asmjs=True)
# Test the old GCC atomic __sync_op_and_fetch builtin operations.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_gcc_atomic_op_and_fetch(self):
self.btest(test_file('pthread', 'test_pthread_gcc_atomic_op_and_fetch.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'], also_asmjs=True)
# 64 bit version of the above test.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_gcc_64bit_atomic_op_and_fetch(self):
self.btest(test_file('pthread', 'test_pthread_gcc_64bit_atomic_op_and_fetch.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'], also_asmjs=True)
# Tests the rest of the remaining GCC atomics after the two above tests.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_gcc_atomics(self):
self.btest(test_file('pthread', 'test_pthread_gcc_atomics.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test the __sync_lock_test_and_set and __sync_lock_release primitives.
@requires_threads
def test_pthread_gcc_spinlock(self):
for arg in [[], ['-DUSE_EMSCRIPTEN_INTRINSICS']]:
self.btest(test_file('pthread', 'test_pthread_gcc_spinlock.cpp'), expected='800', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'] + arg, also_asmjs=True)
# Test that basic thread creation works.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_create(self):
def test(args):
print(args)
self.btest(test_file('pthread', 'test_pthread_create.cpp'),
expected='0',
args=['-s', 'INITIAL_MEMORY=64MB', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'] + args,
extra_tries=0) # this should be 100% deterministic
print() # new line
test([])
test(['-O3'])
# TODO: re-enable minimal runtime once the flakiness is figure out,
# https://github.com/emscripten-core/emscripten/issues/12368
# test(['-s', 'MINIMAL_RUNTIME'])
# Test that preallocating worker threads work.
@requires_threads
def test_pthread_preallocates_workers(self):
self.btest(test_file('pthread', 'test_pthread_preallocates_workers.cpp'), expected='0', args=['-O3', '-s', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=4', '-s', 'PTHREAD_POOL_DELAY_LOAD'])
# Test that allocating a lot of threads doesn't regress. This needs to be checked manually!
@requires_threads
def test_pthread_large_pthread_allocation(self):
self.btest(test_file('pthread', 'test_large_pthread_allocation.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=128MB', '-O3', '-s', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=50'], message='Check output from test to ensure that a regression in time it takes to allocate the threads has not occurred.')
# Tests the -s PROXY_TO_PTHREAD=1 option.
@requires_threads
def test_pthread_proxy_to_pthread(self):
self.btest(test_file('pthread', 'test_pthread_proxy_to_pthread.c'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
# Test that a pthread can spawn another pthread of its own.
@requires_threads
def test_pthread_create_pthread(self):
for modularize in [[], ['-s', 'MODULARIZE', '-s', 'EXPORT_NAME=MyModule', '--shell-file', test_file('shell_that_launches_modularize.html')]]:
self.btest(test_file('pthread', 'test_pthread_create_pthread.cpp'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2'] + modularize)
# Test another case of pthreads spawning pthreads, but this time the callers immediately join on the threads they created.
@requires_threads
def test_pthread_nested_spawns(self):
self.btest(test_file('pthread', 'test_pthread_nested_spawns.cpp'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2'])
# Test that main thread can wait for a pthread to finish via pthread_join().
@requires_threads
def test_pthread_join(self):
self.btest(test_file('pthread', 'test_pthread_join.cpp'), expected='6765', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test that threads can rejoin the pool once detached and finished
@requires_threads
def test_std_thread_detach(self):
self.btest(test_file('pthread', 'test_std_thread_detach.cpp'), expected='0', args=['-s', 'USE_PTHREADS'])
# Test pthread_cancel() operation
@requires_threads
def test_pthread_cancel(self):
self.btest(test_file('pthread', 'test_pthread_cancel.cpp'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test that pthread_cancel() cancels pthread_cond_wait() operation
@requires_threads
def test_pthread_cancel_cond_wait(self):
self.btest_exit(test_file('pthread', 'test_pthread_cancel_cond_wait.cpp'), assert_returncode=1, args=['-O3', '-s', 'USE_PTHREADS=1', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test pthread_kill() operation
@no_chrome('pthread_kill hangs chrome renderer, and keep subsequent tests from passing')
@requires_threads
def test_pthread_kill(self):
self.btest(test_file('pthread', 'test_pthread_kill.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test that pthread cleanup stack (pthread_cleanup_push/_pop) works.
@requires_threads
def test_pthread_cleanup(self):
self.btest(test_file('pthread', 'test_pthread_cleanup.cpp'), expected='907640832', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Tests the pthread mutex api.
@requires_threads
def test_pthread_mutex(self):
for arg in [[], ['-DSPINLOCK_TEST']]:
self.btest(test_file('pthread', 'test_pthread_mutex.cpp'), expected='50', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'] + arg)
@requires_threads
def test_pthread_attr_getstack(self):
self.btest(test_file('pthread', 'test_pthread_attr_getstack.cpp'), expected='0', args=['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2'])
# Test that memory allocation is thread-safe.
@requires_threads
def test_pthread_malloc(self):
self.btest(test_file('pthread', 'test_pthread_malloc.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Stress test pthreads allocating memory that will call to sbrk(), and main thread has to free up the data.
@requires_threads
def test_pthread_malloc_free(self):
self.btest(test_file('pthread', 'test_pthread_malloc_free.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8', '-s', 'INITIAL_MEMORY=256MB'])
# Test that the pthread_barrier API works ok.
@requires_threads
def test_pthread_barrier(self):
self.btest(test_file('pthread', 'test_pthread_barrier.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test the pthread_once() function.
@requires_threads
def test_pthread_once(self):
self.btest(test_file('pthread', 'test_pthread_once.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test against a certain thread exit time handling bug by spawning tons of threads.
@no_firefox('https://bugzilla.mozilla.org/show_bug.cgi?id=1666568')
@requires_threads
def test_pthread_spawns(self):
self.btest(test_file('pthread', 'test_pthread_spawns.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8', '--closure=1', '-s', 'ENVIRONMENT=web,worker'])
# It is common for code to flip volatile global vars for thread control. This is a bit lax, but nevertheless, test whether that
# kind of scheme will work with Emscripten as well.
@requires_threads
def test_pthread_volatile(self):
for arg in [[], ['-DUSE_C_VOLATILE']]:
self.btest(test_file('pthread', 'test_pthread_volatile.cpp'), expected='1', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'] + arg)
# Test thread-specific data (TLS).
@requires_threads
def test_pthread_thread_local_storage(self):
self.btest(test_file('pthread', 'test_pthread_thread_local_storage.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8', '-s', 'ASSERTIONS'])
# Test the pthread condition variable creation and waiting.
@requires_threads
def test_pthread_condition_variable(self):
self.btest(test_file('pthread', 'test_pthread_condition_variable.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
# Test that pthreads are able to do printf.
@requires_threads
def test_pthread_printf(self):
def run(debug):
self.btest(test_file('pthread', 'test_pthread_printf.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-s', 'LIBRARY_DEBUG=%d' % debug])
run(debug=True)
run(debug=False)
# Test that pthreads are able to do cout. Failed due to https://bugzilla.mozilla.org/show_bug.cgi?id=1154858.
@requires_threads
def test_pthread_iostream(self):
self.btest(test_file('pthread', 'test_pthread_iostream.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
@requires_threads
def test_pthread_unistd_io_bigint(self):
self.btest_exit(test_file('unistd', 'io.c'), args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'WASM_BIGINT'])
# Test that the main thread is able to use pthread_set/getspecific.
@requires_threads
def test_pthread_setspecific_mainthread(self):
self.btest(test_file('pthread', 'test_pthread_setspecific_mainthread.cpp'), expected='0', args=['-s', 'INITIAL_MEMORY=64MB', '-O3', '-s', 'USE_PTHREADS'], also_asmjs=True)
# Test that pthreads have access to filesystem.
@requires_threads
def test_pthread_file_io(self):
self.btest(test_file('pthread', 'test_pthread_file_io.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
# Test that the pthread_create() function operates benignly in the case that threading is not supported.
@requires_threads
def test_pthread_supported(self):
for args in [[], ['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8']]:
self.btest(test_file('pthread', 'test_pthread_supported.cpp'), expected='0', args=['-O3'] + args)
@requires_threads
def test_pthread_dispatch_after_exit(self):
self.btest_exit(test_file('pthread', 'test_pthread_dispatch_after_exit.c'), args=['-s', 'USE_PTHREADS'])
# Test the operation of Module.pthreadMainPrefixURL variable
@no_wasm_backend('uses js')
@requires_threads
def test_pthread_custom_pthread_main_url(self):
ensure_dir('cdn')
create_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
#include <emscripten/emscripten.h>
#include <emscripten/threading.h>
#include <pthread.h>
int result = 0;
void *thread_main(void *arg) {
emscripten_atomic_store_u32(&result, 1);
pthread_exit(0);
}
int main() {
pthread_t t;
if (emscripten_has_threading_support()) {
pthread_create(&t, 0, thread_main, 0);
pthread_join(t, 0);
} else {
result = 1;
}
REPORT_RESULT(result);
}
''')
# Test that it is possible to define "Module.locateFile" string to locate where worker.js will be loaded from.
create_file('shell.html', open(path_from_root('src', 'shell.html')).read().replace('var Module = {', 'var Module = { locateFile: function (path, prefix) {if (path.endsWith(".wasm")) {return prefix + path;} else {return "cdn/" + path;}}, '))
self.compile_btest(['main.cpp', '--shell-file', 'shell.html', '-s', 'WASM=0', '-s', 'IN_TEST_HARNESS', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-o', 'test.html'])
shutil.move('test.worker.js', os.path.join('cdn', 'test.worker.js'))
shutil.copyfile('test.html.mem', os.path.join('cdn', 'test.html.mem'))
self.run_browser('test.html', '', '/report_result?1')
# Test that it is possible to define "Module.locateFile(foo)" function to locate where worker.js will be loaded from.
create_file('shell2.html', open(path_from_root('src', 'shell.html')).read().replace('var Module = {', 'var Module = { locateFile: function(filename) { if (filename == "test.worker.js") return "cdn/test.worker.js"; else return filename; }, '))
self.compile_btest(['main.cpp', '--shell-file', 'shell2.html', '-s', 'WASM=0', '-s', 'IN_TEST_HARNESS', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-o', 'test2.html'])
try_delete('test.worker.js')
self.run_browser('test2.html', '', '/report_result?1')
# Test that if the main thread is performing a futex wait while a pthread needs it to do a proxied operation (before that pthread would wake up the main thread), that it's not a deadlock.
@requires_threads
def test_pthread_proxying_in_futex_wait(self):
self.btest(test_file('pthread', 'test_pthread_proxying_in_futex_wait.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
# Test that sbrk() operates properly in multithreaded conditions
@requires_threads
def test_pthread_sbrk(self):
for aborting_malloc in [0, 1]:
print('aborting malloc=' + str(aborting_malloc))
# With aborting malloc = 1, test allocating memory in threads
# With aborting malloc = 0, allocate so much memory in threads that some of the allocations fail.
self.btest(test_file('pthread', 'test_pthread_sbrk.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8', '-s', 'ABORTING_MALLOC=' + str(aborting_malloc), '-DABORTING_MALLOC=' + str(aborting_malloc), '-s', 'INITIAL_MEMORY=128MB'])
# Test that -s ABORTING_MALLOC=0 works in both pthreads and non-pthreads builds. (sbrk fails gracefully)
@requires_threads
def test_pthread_gauge_available_memory(self):
for opts in [[], ['-O2']]:
for args in [[], ['-s', 'USE_PTHREADS']]:
self.btest(test_file('gauge_available_memory.cpp'), expected='1', args=['-s', 'ABORTING_MALLOC=0'] + args + opts)
# Test that the proxying operations of user code from pthreads to main thread work
@requires_threads
def test_pthread_run_on_main_thread(self):
self.btest(test_file('pthread', 'test_pthread_run_on_main_thread.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
# Test how a lot of back-to-back called proxying operations behave.
@requires_threads
def test_pthread_run_on_main_thread_flood(self):
self.btest(test_file('pthread', 'test_pthread_run_on_main_thread_flood.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
# Test that it is possible to asynchronously call a JavaScript function on the main thread.
@requires_threads
def test_pthread_call_async(self):
self.btest(test_file('pthread', 'call_async.c'), expected='1', args=['-s', 'USE_PTHREADS'])
# Test that it is possible to synchronously call a JavaScript function on the main thread and get a return value back.
@requires_threads
def test_pthread_call_sync_on_main_thread(self):
self.btest(test_file('pthread', 'call_sync_on_main_thread.c'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-DPROXY_TO_PTHREAD=1', '--js-library', test_file('pthread', 'call_sync_on_main_thread.js')])
self.btest(test_file('pthread', 'call_sync_on_main_thread.c'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread', 'call_sync_on_main_thread.js')])
self.btest(test_file('pthread', 'call_sync_on_main_thread.c'), expected='1', args=['-Oz', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread', 'call_sync_on_main_thread.js'), '-s', 'EXPORTED_FUNCTIONS=[_main,_malloc]'])
# Test that it is possible to asynchronously call a JavaScript function on the main thread.
@requires_threads
def test_pthread_call_async_on_main_thread(self):
self.btest(test_file('pthread', 'call_async_on_main_thread.c'), expected='7', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-DPROXY_TO_PTHREAD=1', '--js-library', test_file('pthread', 'call_async_on_main_thread.js')])
self.btest(test_file('pthread', 'call_async_on_main_thread.c'), expected='7', args=['-O3', '-s', 'USE_PTHREADS', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread', 'call_async_on_main_thread.js')])
self.btest(test_file('pthread', 'call_async_on_main_thread.c'), expected='7', args=['-Oz', '-DPROXY_TO_PTHREAD=0', '--js-library', test_file('pthread', 'call_async_on_main_thread.js')])
# Tests that spawning a new thread does not cause a reinitialization of the global data section of the application memory area.
@requires_threads
def test_pthread_global_data_initialization(self):
mem_init_modes = [[], ['--memory-init-file', '0'], ['--memory-init-file', '1']]
for mem_init_mode in mem_init_modes:
for args in [['-s', 'MODULARIZE', '-s', 'EXPORT_NAME=MyModule', '--shell-file', test_file('shell_that_launches_modularize.html')], ['-O3']]:
self.btest(test_file('pthread', 'test_pthread_global_data_initialization.c'), expected='20', args=args + mem_init_mode + ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'PTHREAD_POOL_SIZE'])
@requires_threads
@requires_sync_compilation
def test_pthread_global_data_initialization_in_sync_compilation_mode(self):
mem_init_modes = [[], ['--memory-init-file', '0'], ['--memory-init-file', '1']]
for mem_init_mode in mem_init_modes:
args = ['-s', 'WASM_ASYNC_COMPILATION=0']
self.btest(test_file('pthread', 'test_pthread_global_data_initialization.c'), expected='20', args=args + mem_init_mode + ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'PTHREAD_POOL_SIZE'])
# Test that emscripten_get_now() reports coherent wallclock times across all pthreads, instead of each pthread independently reporting wallclock times since the launch of that pthread.
@requires_threads
def test_pthread_clock_drift(self):
self.btest(test_file('pthread', 'test_pthread_clock_drift.cpp'), expected='1', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
@requires_threads
def test_pthread_utf8_funcs(self):
self.btest(test_file('pthread', 'test_pthread_utf8_funcs.cpp'), expected='0', args=['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
# Test the emscripten_futex_wake(addr, INT_MAX); functionality to wake all waiters
@requires_threads
def test_pthread_wake_all(self):
self.btest(test_file('pthread', 'test_futex_wake_all.cpp'), expected='0', args=['-O3', '-s', 'USE_PTHREADS', '-s', 'INITIAL_MEMORY=64MB', '-s', 'NO_EXIT_RUNTIME'], also_asmjs=True)
# Test that stack base and max correctly bound the stack on pthreads.
@requires_threads
def test_pthread_stack_bounds(self):
self.btest(test_file('pthread', 'test_pthread_stack_bounds.cpp'), expected='1', args=['-s', 'USE_PTHREADS'])
# Test that real `thread_local` works.
@requires_threads
def test_pthread_tls(self):
self.btest(test_file('pthread', 'test_pthread_tls.cpp'), expected='1337', args=['-s', 'PROXY_TO_PTHREAD', '-s', 'USE_PTHREADS'])
# Test that real `thread_local` works in main thread without PROXY_TO_PTHREAD.
@requires_threads
def test_pthread_tls_main(self):
self.btest(test_file('pthread', 'test_pthread_tls_main.cpp'), expected='1337', args=['-s', 'USE_PTHREADS'])
@requires_threads
def test_pthread_safe_stack(self):
# Note that as the test runs with PROXY_TO_PTHREAD, we set TOTAL_STACK,
# and not DEFAULT_PTHREAD_STACK_SIZE, as the pthread for main() gets the
# same stack size as the main thread normally would.
self.btest(test_file('core', 'test_safe_stack.c'), expected='abort:stack overflow', args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'STACK_OVERFLOW_CHECK=2', '-s', 'TOTAL_STACK=64KB'])
@parameterized({
'leak': ['test_pthread_lsan_leak', ['-g4']],
'no_leak': ['test_pthread_lsan_no_leak'],
})
@requires_threads
def test_pthread_lsan(self, name, args=[]):
self.btest(test_file('pthread', name + '.cpp'), expected='1', args=['-fsanitize=leak', '-s', 'INITIAL_MEMORY=256MB', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '--pre-js', test_file('pthread', name + '.js')] + args)
@parameterized({
# Reusing the LSan test files for ASan.
'leak': ['test_pthread_lsan_leak', ['-g4']],
'no_leak': ['test_pthread_lsan_no_leak'],
})
@requires_threads
def test_pthread_asan(self, name, args=[]):
self.btest(test_file('pthread', name + '.cpp'), expected='1', args=['-fsanitize=address', '-s', 'INITIAL_MEMORY=256MB', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '--pre-js', test_file('pthread', name + '.js')] + args)
@requires_threads
def test_pthread_asan_use_after_free(self):
self.btest(test_file('pthread', 'test_pthread_asan_use_after_free.cpp'), expected='1', args=['-fsanitize=address', '-s', 'INITIAL_MEMORY=256MB', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '--pre-js', test_file('pthread', 'test_pthread_asan_use_after_free.js')])
@requires_threads
def test_pthread_exit_process(self):
args = ['-s', 'USE_PTHREADS',
'-s', 'PROXY_TO_PTHREAD',
'-s', 'PTHREAD_POOL_SIZE=2',
'-s', 'EXIT_RUNTIME',
'-DEXIT_RUNTIME',
'-O0']
args += ['--pre-js', test_file('core', 'pthread', 'test_pthread_exit_runtime.pre.js')]
self.btest(test_file('core', 'pthread', 'test_pthread_exit_runtime.c'), expected='onExit status: 42', args=args)
@requires_threads
def test_pthread_no_exit_process(self):
# Same as above but without EXIT_RUNTIME. In this case we don't expect onExit to
# ever be called.
args = ['-s', 'USE_PTHREADS',
'-s', 'PROXY_TO_PTHREAD',
'-s', 'PTHREAD_POOL_SIZE=2',
'-O0']
args += ['--pre-js', test_file('core', 'pthread', 'test_pthread_exit_runtime.pre.js')]
self.btest(test_file('core', 'pthread', 'test_pthread_exit_runtime.c'), expected='43', args=args)
# Tests MAIN_THREAD_EM_ASM_INT() function call signatures.
def test_main_thread_em_asm_signatures(self):
self.btest_exit(test_file('core', 'test_em_asm_signatures.cpp'), assert_returncode=121, args=[])
@requires_threads
def test_main_thread_em_asm_signatures_pthreads(self):
self.btest_exit(test_file('core', 'test_em_asm_signatures.cpp'), assert_returncode=121, args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'ASSERTIONS'])
@requires_threads
def test_main_thread_async_em_asm(self):
self.btest_exit(test_file('core', 'test_main_thread_async_em_asm.cpp'), args=['-O3', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'ASSERTIONS'])
@requires_threads
def test_main_thread_em_asm_blocking(self):
create_file('page.html', open(test_file('browser', 'test_em_asm_blocking.html')).read())
self.compile_btest([test_file('browser', 'test_em_asm_blocking.cpp'), '-O2', '-o', 'wasm.js', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
self.run_browser('page.html', '', '/report_result?8')
# Test that it is possible to send a signal via calling alarm(timeout), which in turn calls to the signal handler set by signal(SIGALRM, func);
def test_sigalrm(self):
self.btest(test_file('sigalrm.cpp'), expected='0', args=['-O3'])
def test_canvas_style_proxy(self):
self.btest('canvas_style_proxy.c', expected='1', args=['--proxy-to-worker', '--shell-file', test_file('canvas_style_proxy_shell.html'), '--pre-js', test_file('canvas_style_proxy_pre.js')])
def test_canvas_size_proxy(self):
self.btest(test_file('canvas_size_proxy.c'), expected='0', args=['--proxy-to-worker'])
def test_custom_messages_proxy(self):
self.btest(test_file('custom_messages_proxy.c'), expected='1', args=['--proxy-to-worker', '--shell-file', test_file('custom_messages_proxy_shell.html'), '--post-js', test_file('custom_messages_proxy_postjs.js')])
def test_vanilla_html_when_proxying(self):
for opts in [0, 1, 2]:
print(opts)
self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js', '-O' + str(opts), '--proxy-to-worker'])
create_file('test.html', '<script src="test.js"></script>')
self.run_browser('test.html', None, '/report_result?0')
def test_in_flight_memfile_request(self):
# test the XHR for an asm.js mem init file being in flight already
for o in [0, 1, 2]:
print(o)
opts = ['-O' + str(o), '-s', 'WASM=0']
print('plain html')
self.compile_btest([test_file('in_flight_memfile_request.c'), '-o', 'test.js'] + opts)
create_file('test.html', '<script src="test.js"></script>')
self.run_browser('test.html', None, '/report_result?0') # never when we provide our own HTML like this.
print('default html')
self.btest('in_flight_memfile_request.c', expected='0' if o < 2 else '1', args=opts) # should happen when there is a mem init file (-O2+)
@requires_sync_compilation
def test_binaryen_async(self):
# notice when we use async compilation
script = '''
<script>
// note if we do async compilation
var real_wasm_instantiate = WebAssembly.instantiate;
var real_wasm_instantiateStreaming = WebAssembly.instantiateStreaming;
if (typeof real_wasm_instantiateStreaming === 'function') {
WebAssembly.instantiateStreaming = function(a, b) {
Module.sawAsyncCompilation = true;
return real_wasm_instantiateStreaming(a, b);
};
} else {
WebAssembly.instantiate = function(a, b) {
Module.sawAsyncCompilation = true;
return real_wasm_instantiate(a, b);
};
}
// show stderr for the viewer's fun
err = function(x) {
out('<<< ' + x + ' >>>');
console.log(x);
};
</script>
{{{ SCRIPT }}}
'''
shell_with_script('shell.html', 'shell.html', script)
common_args = ['--shell-file', 'shell.html']
for opts, returncode in [
([], 1),
(['-O1'], 1),
(['-O2'], 1),
(['-O3'], 1),
(['-s', 'WASM_ASYNC_COMPILATION'], 1), # force it on
(['-O1', '-s', 'WASM_ASYNC_COMPILATION=0'], 0), # force it off
]:
print(opts, returncode)
self.btest_exit('binaryen_async.c', assert_returncode=returncode, args=common_args + opts)
# Ensure that compilation still works and is async without instantiateStreaming available
no_streaming = ' <script> WebAssembly.instantiateStreaming = undefined;</script>'
shell_with_script('shell.html', 'shell.html', no_streaming + script)
self.btest_exit('binaryen_async.c', assert_returncode=1, args=common_args)
# Test that implementing Module.instantiateWasm() callback works.
@parameterized({
'': ([],),
'asan': (['-fsanitize=address', '-s', 'INITIAL_MEMORY=128MB'],)
})
def test_manual_wasm_instantiate(self, args=[]):
self.compile_btest([test_file('manual_wasm_instantiate.cpp'), '-o', 'manual_wasm_instantiate.js'] + args)
shutil.copyfile(test_file('manual_wasm_instantiate.html'), 'manual_wasm_instantiate.html')
self.run_browser('manual_wasm_instantiate.html', 'wasm instantiation succeeded', '/report_result?1')
def test_wasm_locate_file(self):
# Test that it is possible to define "Module.locateFile(foo)" function to locate where worker.js will be loaded from.
ensure_dir('cdn')
create_file('shell2.html', open(path_from_root('src', 'shell.html')).read().replace('var Module = {', 'var Module = { locateFile: function(filename) { if (filename == "test.wasm") return "cdn/test.wasm"; else return filename; }, '))
self.compile_btest([test_file('browser_test_hello_world.c'), '--shell-file', 'shell2.html', '-o', 'test.html'])
shutil.move('test.wasm', os.path.join('cdn', 'test.wasm'))
self.run_browser('test.html', '', '/report_result?0')
def test_utf8_textdecoder(self):
self.btest_exit('benchmark_utf8.cpp', 0, args=['--embed-file', test_file('utf8_corpus.txt') + '@/utf8_corpus.txt', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=[UTF8ToString]'])
def test_utf16_textdecoder(self):
self.btest_exit('benchmark_utf16.cpp', 0, args=['--embed-file', test_file('utf16_corpus.txt') + '@/utf16_corpus.txt', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=[UTF16ToString,stringToUTF16,lengthBytesUTF16]'])
def test_TextDecoder(self):
self.btest('browser_test_hello_world.c', '0', args=['-s', 'TEXTDECODER=0'])
just_fallback = os.path.getsize('test.js')
self.btest('browser_test_hello_world.c', '0')
td_with_fallback = os.path.getsize('test.js')
self.btest('browser_test_hello_world.c', '0', args=['-s', 'TEXTDECODER=2'])
td_without_fallback = os.path.getsize('test.js')
self.assertLess(td_without_fallback, just_fallback)
self.assertLess(just_fallback, td_with_fallback)
def test_small_js_flags(self):
self.btest('browser_test_hello_world.c', '0', args=['-O3', '--closure=1', '-s', 'INCOMING_MODULE_JS_API=[]', '-s', 'ENVIRONMENT=web'])
# Check an absolute js code size, with some slack.
size = os.path.getsize('test.js')
print('size:', size)
# Note that this size includes test harness additions (for reporting the result, etc.).
self.assertLess(abs(size - 5453), 100)
# Tests that it is possible to initialize and render WebGL content in a pthread by using OffscreenCanvas.
# -DTEST_CHAINED_WEBGL_CONTEXT_PASSING: Tests that it is possible to transfer WebGL canvas in a chain from main thread -> thread 1 -> thread 2 and then init and render WebGL content there.
@no_chrome('see https://crbug.com/961765')
@requires_threads
@requires_offscreen_canvas
def test_webgl_offscreen_canvas_in_pthread(self):
for args in [[], ['-DTEST_CHAINED_WEBGL_CONTEXT_PASSING']]:
self.btest('gl_in_pthread.cpp', expected='1', args=args + ['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'OFFSCREENCANVAS_SUPPORT', '-lGL'])
# Tests that it is possible to render WebGL content on a <canvas> on the main thread, after it has once been used to render WebGL content in a pthread first
# -DTEST_MAIN_THREAD_EXPLICIT_COMMIT: Test the same (WebGL on main thread after pthread), but by using explicit .commit() to swap on the main thread instead of implicit "swap when rAF ends" logic
@requires_threads
@requires_offscreen_canvas
@disabled('This test is disabled because current OffscreenCanvas does not allow transfering it after a rendering context has been created for it.')
def test_webgl_offscreen_canvas_in_mainthread_after_pthread(self):
for args in [[], ['-DTEST_MAIN_THREAD_EXPLICIT_COMMIT']]:
self.btest('gl_in_mainthread_after_pthread.cpp', expected='0', args=args + ['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'OFFSCREENCANVAS_SUPPORT', '-lGL'])
@requires_threads
@requires_offscreen_canvas
def test_webgl_offscreen_canvas_only_in_pthread(self):
self.btest('gl_only_in_pthread.cpp', expected='0', args=['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE', '-s', 'OFFSCREENCANVAS_SUPPORT', '-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER'])
# Tests that rendering from client side memory without default-enabling extensions works.
@requires_graphics_hardware
def test_webgl_from_client_side_memory_without_default_enabled_extensions(self):
self.btest('webgl_draw_triangle.c', '0', args=['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DEXPLICIT_SWAP=1', '-DDRAW_FROM_CLIENT_MEMORY=1', '-s', 'FULL_ES2=1'])
# Tests for WEBGL_multi_draw extension
# For testing WebGL draft extensions like this, if using chrome as the browser,
# We might want to append the --enable-webgl-draft-extensions to the EMTEST_BROWSER env arg.
@requires_graphics_hardware
def test_webgl_multi_draw(self):
self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png',
args=['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ARRAYS=1', '-DEXPLICIT_SWAP=1'])
self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png',
args=['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ARRAYS_INSTANCED=1', '-DEXPLICIT_SWAP=1'])
self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png',
args=['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ELEMENTS=1', '-DEXPLICIT_SWAP=1'])
self.btest('webgl_multi_draw_test.c', reference='webgl_multi_draw.png',
args=['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DMULTI_DRAW_ELEMENTS_INSTANCED=1', '-DEXPLICIT_SWAP=1'])
# Tests for base_vertex/base_instance extension
# For testing WebGL draft extensions like this, if using chrome as the browser,
# We might want to append the --enable-webgl-draft-extensions to the EMTEST_BROWSER env arg.
# If testing on Mac, you also need --use-cmd-decoder=passthrough to get this extension.
# Also there is a known bug with Mac Intel baseInstance which can fail producing the expected image result.
@requires_graphics_hardware
def test_webgl_draw_base_vertex_base_instance(self):
for multiDraw in [0, 1]:
for drawElements in [0, 1]:
self.btest('webgl_draw_base_vertex_base_instance_test.c', reference='webgl_draw_instanced_base_vertex_base_instance.png',
args=['-lGL',
'-s', 'MAX_WEBGL_VERSION=2',
'-s', 'OFFSCREEN_FRAMEBUFFER',
'-DMULTI_DRAW=' + str(multiDraw),
'-DDRAW_ELEMENTS=' + str(drawElements),
'-DEXPLICIT_SWAP=1',
'-DWEBGL_CONTEXT_VERSION=2'])
# Tests that -s OFFSCREEN_FRAMEBUFFER=1 rendering works.
@requires_graphics_hardware
def test_webgl_offscreen_framebuffer(self):
# Tests all the different possible versions of libgl
for threads in [[], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD']]:
for version in [[], ['-s', 'FULL_ES3'], ['-s', 'FULL_ES3']]:
args = ['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DEXPLICIT_SWAP=1'] + threads + version
print('with args: %s' % str(args))
self.btest('webgl_draw_triangle.c', '0', args=args)
# Tests that VAOs can be used even if WebGL enableExtensionsByDefault is set to 0.
@requires_graphics_hardware
def test_webgl_vao_without_automatic_extensions(self):
self.btest('test_webgl_no_auto_init_extensions.c', '0', args=['-lGL', '-s', 'GL_SUPPORT_AUTOMATIC_ENABLE_EXTENSIONS=0'])
# Tests that offscreen framebuffer state restoration works
@requires_graphics_hardware
def test_webgl_offscreen_framebuffer_state_restoration(self):
for args in [
# full state restoration path on WebGL 1.0
['-s', 'MAX_WEBGL_VERSION', '-s', 'OFFSCREEN_FRAMEBUFFER_FORBID_VAO_PATH'],
# VAO path on WebGL 1.0
['-s', 'MAX_WEBGL_VERSION'],
['-s', 'MAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=0'],
# VAO path on WebGL 2.0
['-s', 'MAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1', '-DTEST_ANTIALIAS=1', '-DTEST_REQUIRE_VAO=1'],
# full state restoration path on WebGL 2.0
['-s', 'MAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1', '-DTEST_ANTIALIAS=1', '-s', 'OFFSCREEN_FRAMEBUFFER_FORBID_VAO_PATH'],
# blitFramebuffer path on WebGL 2.0 (falls back to VAO on Firefox < 67)
['-s', 'MAX_WEBGL_VERSION=2', '-DTEST_WEBGL2=1', '-DTEST_ANTIALIAS=0'],
]:
cmd = args + ['-lGL', '-s', 'OFFSCREEN_FRAMEBUFFER', '-DEXPLICIT_SWAP=1']
self.btest('webgl_offscreen_framebuffer_swap_with_bad_state.c', '0', args=cmd)
# Tests that -s WORKAROUND_OLD_WEBGL_UNIFORM_UPLOAD_IGNORED_OFFSET_BUG=1 rendering works.
@requires_graphics_hardware
def test_webgl_workaround_webgl_uniform_upload_bug(self):
self.btest('webgl_draw_triangle_with_uniform_color.c', '0', args=['-lGL', '-s', 'WORKAROUND_OLD_WEBGL_UNIFORM_UPLOAD_IGNORED_OFFSET_BUG'])
# Tests that using an array of structs in GL uniforms works.
@requires_graphics_hardware
def test_webgl_array_of_structs_uniform(self):
self.btest('webgl_array_of_structs_uniform.c', args=['-lGL', '-s', 'MAX_WEBGL_VERSION=2'], reference='webgl_array_of_structs_uniform.png')
# Tests that if a WebGL context is created in a pthread on a canvas that has not been transferred to that pthread, WebGL calls are then proxied to the main thread
# -DTEST_OFFSCREEN_CANVAS=1: Tests that if a WebGL context is created on a pthread that has the canvas transferred to it via using Emscripten's EMSCRIPTEN_PTHREAD_TRANSFERRED_CANVASES="#canvas", then OffscreenCanvas is used
# -DTEST_OFFSCREEN_CANVAS=2: Tests that if a WebGL context is created on a pthread that has the canvas transferred to it via automatic transferring of Module.canvas when EMSCRIPTEN_PTHREAD_TRANSFERRED_CANVASES is not defined, then OffscreenCanvas is also used
@requires_threads
@requires_offscreen_canvas
def test_webgl_offscreen_canvas_in_proxied_pthread(self):
for asyncify in [0, 1]:
cmd = ['-s', 'USE_PTHREADS', '-s', 'OFFSCREENCANVAS_SUPPORT', '-lGL', '-s', 'GL_DEBUG', '-s', 'PROXY_TO_PTHREAD']
if asyncify:
# given the synchronous render loop here, asyncify is needed to see intermediate frames and
# the gradual color change
cmd += ['-s', 'ASYNCIFY', '-DASYNCIFY']
print(str(cmd))
self.btest('gl_in_proxy_pthread.cpp', expected='1', args=cmd)
@requires_threads
@requires_graphics_hardware
@requires_offscreen_canvas
def test_webgl_resize_offscreencanvas_from_main_thread(self):
for args1 in [[], ['-s', 'PROXY_TO_PTHREAD']]:
for args2 in [[], ['-DTEST_SYNC_BLOCKING_LOOP=1']]:
for args3 in [[], ['-s', 'OFFSCREENCANVAS_SUPPORT', '-s', 'OFFSCREEN_FRAMEBUFFER']]:
cmd = args1 + args2 + args3 + ['-s', 'USE_PTHREADS', '-lGL', '-s', 'GL_DEBUG']
print(str(cmd))
self.btest('resize_offscreencanvas_from_main_thread.cpp', expected='1', args=cmd)
@requires_graphics_hardware
def test_webgl_simple_enable_extensions(self):
for webgl_version in [1, 2]:
for simple_enable_extensions in [0, 1]:
cmd = ['-DWEBGL_CONTEXT_VERSION=' + str(webgl_version),
'-DWEBGL_SIMPLE_ENABLE_EXTENSION=' + str(simple_enable_extensions),
'-s', 'MAX_WEBGL_VERSION=2',
'-s', 'GL_SUPPORT_AUTOMATIC_ENABLE_EXTENSIONS=' + str(simple_enable_extensions),
'-s', 'GL_SUPPORT_SIMPLE_ENABLE_EXTENSIONS=' + str(simple_enable_extensions)]
self.btest('webgl2_simple_enable_extensions.c', expected='0', args=cmd)
# Tests the feature that shell html page can preallocate the typed array and place it
# to Module.buffer before loading the script page.
# In this build mode, the -s INITIAL_MEMORY=xxx option will be ignored.
# Preallocating the buffer in this was is asm.js only (wasm needs a Memory).
def test_preallocated_heap(self):
self.btest_exit('test_preallocated_heap.cpp', args=['-s', 'WASM=0', '-s', 'INITIAL_MEMORY=16MB', '-s', 'ABORTING_MALLOC=0', '--shell-file', test_file('test_preallocated_heap_shell.html')])
# Tests emscripten_fetch() usage to XHR data directly to memory without persisting results to IndexedDB.
def test_fetch_to_memory(self):
# Test error reporting in the negative case when the file URL doesn't exist. (http 404)
self.btest('fetch/to_memory.cpp',
expected='1',
args=['-s', 'FETCH_DEBUG', '-s', 'FETCH', '-DFILE_DOES_NOT_EXIST'],
also_asmjs=True)
# Test the positive case when the file URL exists. (http 200)
shutil.copyfile(test_file('gears.png'), 'gears.png')
for arg in [[], ['-s', 'FETCH_SUPPORT_INDEXEDDB=0']]:
self.btest('fetch/to_memory.cpp',
expected='1',
args=['-s', 'FETCH_DEBUG', '-s', 'FETCH'] + arg,
also_asmjs=True)
@parameterized({
'': ([],),
'pthread_exit': (['-DDO_PTHREAD_EXIT'],),
})
@requires_threads
def test_fetch_from_thread(self, args):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/from_thread.cpp',
expected='42',
args=args + ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'FETCH_DEBUG', '-s', 'FETCH', '-DFILE_DOES_NOT_EXIST'],
also_asmjs=True)
def test_fetch_to_indexdb(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/to_indexeddb.cpp',
expected='1',
args=['-s', 'FETCH_DEBUG', '-s', 'FETCH'],
also_asmjs=True)
# Tests emscripten_fetch() usage to persist an XHR into IndexedDB and subsequently load up from there.
def test_fetch_cached_xhr(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/cached_xhr.cpp',
expected='1',
args=['-s', 'FETCH_DEBUG', '-s', 'FETCH'],
also_asmjs=True)
# Tests that response headers get set on emscripten_fetch_t values.
@requires_threads
def test_fetch_response_headers(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/response_headers.cpp', expected='1', args=['-s', 'FETCH_DEBUG', '-s', 'FETCH', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'], also_asmjs=True)
# Test emscripten_fetch() usage to stream a XHR in to memory without storing the full file in memory
def test_fetch_stream_file(self):
self.skipTest('moz-chunked-arraybuffer was firefox-only and has been removed')
# Strategy: create a large 128MB file, and compile with a small 16MB Emscripten heap, so that the tested file
# won't fully fit in the heap. This verifies that streaming works properly.
s = '12345678'
for i in range(14):
s = s[::-1] + s # length of str will be 2^17=128KB
with open('largefile.txt', 'w') as f:
for i in range(1024):
f.write(s)
self.btest('fetch/stream_file.cpp',
expected='1',
args=['-s', 'FETCH_DEBUG', '-s', 'FETCH', '-s', 'INITIAL_MEMORY=536870912'],
also_asmjs=True)
# Tests emscripten_fetch() usage in synchronous mode when used from the main
# thread proxied to a Worker with -s PROXY_TO_PTHREAD=1 option.
@requires_threads
def test_fetch_sync_xhr(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/sync_xhr.cpp', expected='1', args=['-s', 'FETCH_DEBUG', '-s', 'FETCH', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
# Tests emscripten_fetch() usage when user passes none of the main 3 flags (append/replace/no_download).
# In that case, in append is implicitly understood.
@requires_threads
def test_fetch_implicit_append(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/example_synchronous_fetch.cpp', expected='200', args=['-s', 'FETCH', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
# Tests synchronous emscripten_fetch() usage from wasm pthread in fastcomp.
@requires_threads
def test_fetch_sync_xhr_in_wasm(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/example_synchronous_fetch.cpp', expected='200', args=['-s', 'FETCH', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
# Tests that the Fetch API works for synchronous XHRs when used with --proxy-to-worker.
@requires_threads
def test_fetch_sync_xhr_in_proxy_to_worker(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/sync_xhr.cpp',
expected='1',
args=['-s', 'FETCH_DEBUG', '-s', 'FETCH', '--proxy-to-worker'],
also_asmjs=True)
# Tests waiting on EMSCRIPTEN_FETCH_WAITABLE request from a worker thread
@no_wasm_backend("emscripten_fetch_wait uses an asm.js based web worker")
@requires_threads
def test_fetch_sync_fetch_in_main_thread(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/sync_fetch_in_main_thread.cpp', expected='0', args=['-s', 'FETCH_DEBUG', '-s', 'FETCH', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
@requires_threads
@no_wasm_backend("WASM2JS does not yet support pthreads")
def test_fetch_idb_store(self):
self.btest('fetch/idb_store.cpp', expected='0', args=['-s', 'USE_PTHREADS', '-s', 'FETCH', '-s', 'WASM=0', '-s', 'PROXY_TO_PTHREAD'])
@requires_threads
@no_wasm_backend("WASM2JS does not yet support pthreads")
def test_fetch_idb_delete(self):
shutil.copyfile(test_file('gears.png'), 'gears.png')
self.btest('fetch/idb_delete.cpp', expected='0', args=['-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG', '-s', 'FETCH', '-s', 'WASM=0', '-s', 'PROXY_TO_PTHREAD'])
@requires_asmfs
@requires_threads
def test_asmfs_hello_file(self):
# Test basic file loading and the valid character set for files.
ensure_dir('dirrey')
shutil.copyfile(test_file('asmfs', 'hello_file.txt'), os.path.join(self.get_dir(), 'dirrey', 'hello file !#$%&\'()+,-.;=@[]^_`{}~ %%.txt'))
self.btest_exit('asmfs/hello_file.cpp', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG', '-s', 'PROXY_TO_PTHREAD'])
@requires_asmfs
@requires_threads
def test_asmfs_read_file_twice(self):
shutil.copyfile(test_file('asmfs', 'hello_file.txt'), 'hello_file.txt')
self.btest_exit('asmfs/read_file_twice.cpp', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG', '-s', 'PROXY_TO_PTHREAD'])
@requires_asmfs
@requires_threads
def test_asmfs_fopen_write(self):
self.btest_exit('asmfs/fopen_write.cpp', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_asmfs
@requires_threads
def test_asmfs_mkdir_create_unlink_rmdir(self):
self.btest('cstdio/test_remove.cpp', expected='0', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_asmfs
@requires_threads
def test_asmfs_dirent_test_readdir(self):
self.btest('dirent/test_readdir.c', expected='0', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_asmfs
@requires_threads
def test_asmfs_dirent_test_readdir_empty(self):
self.btest('dirent/test_readdir_empty.c', expected='0', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_asmfs
@requires_threads
def test_asmfs_unistd_close(self):
self.btest_exit(test_file('unistd', 'close.c'), 0, args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_asmfs
@requires_threads
def test_asmfs_unistd_access(self):
self.btest_exit(test_file('unistd', 'access.c'), 0, args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_asmfs
@requires_threads
def test_asmfs_unistd_unlink(self):
# TODO: Once symlinks are supported, remove -DNO_SYMLINK=1
self.btest_exit(test_file('unistd', 'unlink.c'), 0, args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG', '-DNO_SYMLINK=1'])
@requires_asmfs
@requires_threads
def test_asmfs_test_fcntl_open(self):
self.btest('fcntl/test_fcntl_open.c', expected='0', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG', '-s', 'PROXY_TO_PTHREAD'])
@requires_asmfs
@requires_threads
def test_asmfs_relative_paths(self):
self.btest_exit('asmfs/relative_paths.cpp', args=['-s', 'ASMFS', '-s', 'WASM=0', '-s', 'USE_PTHREADS', '-s', 'FETCH_DEBUG'])
@requires_threads
def test_pthread_locale(self):
for args in [
[],
['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2'],
]:
print("Testing with: ", args)
self.btest('pthread/test_pthread_locale.c', expected='1', args=args)
# Tests the Emscripten HTML5 API emscripten_set_canvas_element_size() and emscripten_get_canvas_element_size() functionality in singlethreaded programs.
def test_emscripten_set_canvas_element_size(self):
self.btest('emscripten_set_canvas_element_size.c', expected='1')
# Test that emscripten_get_device_pixel_ratio() is callable from pthreads (and proxies to main thread to obtain the proper window.devicePixelRatio value).
@requires_threads
def test_emscripten_get_device_pixel_ratio(self):
for args in [[], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD']]:
self.btest('emscripten_get_device_pixel_ratio.c', expected='1', args=args)
# Tests that emscripten_run_script() variants of functions work in pthreads.
@requires_threads
def test_pthread_run_script(self):
for args in [[], ['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD']]:
self.btest(test_file('pthread', 'test_pthread_run_script.cpp'), expected='1', args=['-O3'] + args)
# Tests emscripten_set_canvas_element_size() and OffscreenCanvas functionality in different build configurations.
@requires_threads
@requires_graphics_hardware
def test_emscripten_animate_canvas_element_size(self):
for args in [
['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1'],
['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1', '-s', 'PROXY_TO_PTHREAD', '-s', 'USE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1'],
['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1', '-s', 'PROXY_TO_PTHREAD', '-s', 'USE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1', '-DTEST_EXPLICIT_CONTEXT_SWAP=1'],
['-DTEST_EXPLICIT_CONTEXT_SWAP=1', '-s', 'PROXY_TO_PTHREAD', '-s', 'USE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1'],
['-DTEST_EXPLICIT_CONTEXT_SWAP=1', '-s', 'PROXY_TO_PTHREAD', '-s', 'USE_PTHREADS', '-s', 'OFFSCREEN_FRAMEBUFFER=1', '-DTEST_MANUALLY_SET_ELEMENT_CSS_SIZE=1'],
['-DTEST_EMSCRIPTEN_SET_MAIN_LOOP=1', '-s', 'OFFSCREENCANVAS_SUPPORT'],
]:
cmd = ['-lGL', '-O3', '-g2', '--shell-file', test_file('canvas_animate_resize_shell.html'), '-s', 'GL_DEBUG', '--threadprofiler'] + args
print(' '.join(cmd))
self.btest('canvas_animate_resize.cpp', expected='1', args=cmd)
# Tests the absolute minimum pthread-enabled application.
@requires_threads
def test_pthread_hello_thread(self):
for opts in [[], ['-O3']]:
for modularize in [[], ['-s', 'MODULARIZE', '-s', 'EXPORT_NAME=MyModule', '--shell-file', test_file('shell_that_launches_modularize.html')]]:
self.btest(test_file('pthread', 'hello_thread.c'), expected='1', args=['-s', 'USE_PTHREADS'] + modularize + opts)
# Tests that a pthreads build of -s MINIMAL_RUNTIME=1 works well in different build modes
def test_minimal_runtime_hello_pthread(self):
for opts in [[], ['-O3']]:
for modularize in [[], ['-s', 'MODULARIZE', '-s', 'EXPORT_NAME=MyModule']]:
self.btest(test_file('pthread', 'hello_thread.c'), expected='1', args=['-s', 'MINIMAL_RUNTIME', '-s', 'USE_PTHREADS'] + modularize + opts)
# Tests memory growth in pthreads mode, but still on the main thread.
@requires_threads
def test_pthread_growth_mainthread(self):
self.emcc_args.remove('-Werror')
def run(emcc_args=[]):
self.btest(test_file('pthread', 'test_pthread_memory_growth_mainthread.c'), expected='1', args=['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=32MB', '-s', 'MAXIMUM_MEMORY=256MB'] + emcc_args, also_asmjs=False)
run()
run(['-s', 'PROXY_TO_PTHREAD'])
# Tests memory growth in a pthread.
@requires_threads
def test_pthread_growth(self):
self.emcc_args.remove('-Werror')
def run(emcc_args=[]):
self.btest(test_file('pthread', 'test_pthread_memory_growth.c'), expected='1', args=['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'INITIAL_MEMORY=32MB', '-s', 'MAXIMUM_MEMORY=256MB', '-g'] + emcc_args, also_asmjs=False)
run()
run(['-s', 'ASSERTIONS'])
run(['-s', 'PROXY_TO_PTHREAD'])
# Tests that time in a pthread is relative to the main thread, so measurements
# on different threads are still monotonic, as if checking a single central
# clock.
@requires_threads
def test_pthread_reltime(self):
self.btest(test_file('pthread', 'test_pthread_reltime.cpp'), expected='3', args=['-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE'])
# Tests that it is possible to load the main .js file of the application manually via a Blob URL, and still use pthreads.
@requires_threads
def test_load_js_from_blob_with_pthreads(self):
# TODO: enable this with wasm, currently pthreads/atomics have limitations
self.compile_btest([test_file('pthread', 'hello_thread.c'), '-s', 'USE_PTHREADS', '-o', 'hello_thread_with_blob_url.js'])
shutil.copyfile(test_file('pthread', 'main_js_as_blob_loader.html'), 'hello_thread_with_blob_url.html')
self.run_browser('hello_thread_with_blob_url.html', 'hello from thread!', '/report_result?1')
# Tests that base64 utils work in browser with no native atob function
def test_base64_atob_fallback(self):
create_file('test.c', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
return 0;
}
''')
# generate a dummy file
create_file('dummy_file', 'dummy')
# compile the code with the modularize feature and the preload-file option enabled
self.compile_btest(['test.c', '-s', 'EXIT_RUNTIME', '-s', 'MODULARIZE', '-s', 'EXPORT_NAME="Foo"', '--preload-file', 'dummy_file', '-s', 'SINGLE_FILE'])
create_file('a.html', '''
<script>
atob = undefined;
fetch = undefined;
</script>
<script src="a.out.js"></script>
<script>
var foo = Foo();
</script>
''')
self.run_browser('a.html', '...', '/report_result?exit:0')
# Tests that SINGLE_FILE works as intended in generated HTML (with and without Worker)
def test_single_file_html(self):
self.btest('single_file_static_initializer.cpp', '19', args=['-s', 'SINGLE_FILE'], also_proxied=True)
self.assertExists('test.html')
self.assertNotExists('test.js')
self.assertNotExists('test.worker.js')
self.assertNotExists('test.wasm')
self.assertNotExists('test.mem')
# Tests that SINGLE_FILE works as intended in generated HTML with MINIMAL_RUNTIME
def test_minimal_runtime_single_file_html(self):
for wasm in [0, 1]:
for opts in [[], ['-O3']]:
self.btest('single_file_static_initializer.cpp', '19', args=opts + ['-s', 'MINIMAL_RUNTIME', '-s', 'SINGLE_FILE', '-s', 'WASM=' + str(wasm)])
self.assertExists('test.html')
self.assertNotExists('test.js')
self.assertNotExists('test.wasm')
self.assertNotExists('test.asm.js')
self.assertNotExists('test.mem')
self.assertNotExists('test.js')
self.assertNotExists('test.worker.js')
# Tests that SINGLE_FILE works when built with ENVIRONMENT=web and Closure enabled (#7933)
def test_single_file_in_web_environment_with_closure(self):
self.btest('minimal_hello.c', '0', args=['-s', 'SINGLE_FILE', '-s', 'ENVIRONMENT=web', '-O2', '--closure=1'])
# Tests that SINGLE_FILE works as intended with locateFile
def test_single_file_locate_file(self):
for wasm_enabled in [True, False]:
args = [test_file('browser_test_hello_world.c'), '-o', 'test.js', '-s', 'SINGLE_FILE']
if not wasm_enabled:
args += ['-s', 'WASM=0']
self.compile_btest(args)
create_file('test.html', '''
<script>
var Module = {
locateFile: function (path) {
if (path.indexOf('data:') === 0) {
throw new Error('Unexpected data URI.');
}
return path;
}
};
</script>
<script src="test.js"></script>
''')
self.run_browser('test.html', None, '/report_result?0')
# Tests that SINGLE_FILE works as intended in a Worker in JS output
def test_single_file_worker_js(self):
self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js', '--proxy-to-worker', '-s', 'SINGLE_FILE'])
create_file('test.html', '<script src="test.js"></script>')
self.run_browser('test.html', None, '/report_result?0')
self.assertExists('test.js')
self.assertNotExists('test.worker.js')
# Tests that pthreads code works as intended in a Worker. That is, a pthreads-using
# program can run either on the main thread (normal tests) or when we start it in
# a Worker in this test (in that case, both the main application thread and the worker threads
# are all inside Web Workers).
@requires_threads
def test_pthreads_started_in_worker(self):
self.compile_btest([test_file('pthread', 'test_pthread_atomics.cpp'), '-o', 'test.js', '-s', 'INITIAL_MEMORY=64MB', '-s', 'USE_PTHREADS', '-s', 'PTHREAD_POOL_SIZE=8'])
create_file('test.html', '''
<script>
new Worker('test.js');
</script>
''')
self.run_browser('test.html', None, '/report_result?0')
def test_access_file_after_heap_resize(self):
create_file('test.txt', 'hello from file')
self.compile_btest([test_file('access_file_after_heap_resize.c'), '-s', 'ALLOW_MEMORY_GROWTH', '--preload-file', 'test.txt', '-o', 'page.html'])
self.run_browser('page.html', 'hello from file', '/report_result?15')
# with separate file packager invocation
self.run_process([FILE_PACKAGER, 'data.data', '--preload', 'test.txt', '--js-output=' + 'data.js'])
self.compile_btest([test_file('access_file_after_heap_resize.c'), '-s', 'ALLOW_MEMORY_GROWTH', '--pre-js', 'data.js', '-o', 'page.html', '-s', 'FORCE_FILESYSTEM'])
self.run_browser('page.html', 'hello from file', '/report_result?15')
def test_unicode_html_shell(self):
create_file('main.cpp', r'''
int main() {
REPORT_RESULT(0);
return 0;
}
''')
create_file('shell.html', open(path_from_root('src', 'shell.html')).read().replace('Emscripten-Generated Code', 'Emscripten-Generated Emoji 😅'))
self.compile_btest(['main.cpp', '--shell-file', 'shell.html', '-o', 'test.html'])
self.run_browser('test.html', None, '/report_result?0')
# Tests the functionality of the emscripten_thread_sleep() function.
@requires_threads
def test_emscripten_thread_sleep(self):
self.btest(test_file('pthread', 'emscripten_thread_sleep.c'), expected='1', args=['-s', 'USE_PTHREADS', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=[print]'])
# Tests that Emscripten-compiled applications can be run from a relative path in browser that is different than the address of the current page
def test_browser_run_from_different_directory(self):
self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.html', '-O3'])
ensure_dir('subdir')
shutil.move('test.js', os.path.join('subdir', 'test.js'))
shutil.move('test.wasm', os.path.join('subdir', 'test.wasm'))
src = open('test.html').read()
# Make sure JS is loaded from subdirectory
create_file('test-subdir.html', src.replace('test.js', 'subdir/test.js'))
self.run_browser('test-subdir.html', None, '/report_result?0')
# Similar to `test_browser_run_from_different_directory`, but asynchronous because of `-s MODULARIZE=1`
def test_browser_run_from_different_directory_async(self):
for args, creations in [
(['-s', 'MODULARIZE'], [
'Module();', # documented way for using modularize
'new Module();' # not documented as working, but we support it
]),
]:
print(args)
# compile the code with the modularize feature and the preload-file option enabled
self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js', '-O3'] + args)
ensure_dir('subdir')
shutil.move('test.js', os.path.join('subdir', 'test.js'))
shutil.move('test.wasm', os.path.join('subdir', 'test.wasm'))
for creation in creations:
print(creation)
# Make sure JS is loaded from subdirectory
create_file('test-subdir.html', '''
<script src="subdir/test.js"></script>
<script>
%s
</script>
''' % creation)
self.run_browser('test-subdir.html', None, '/report_result?0')
# Similar to `test_browser_run_from_different_directory`, but
# also also we eval the initial code, so currentScript is not present. That prevents us
# from finding the file in a subdir, but here we at least check we do not regress compared to the
# normal case of finding in the current dir.
def test_browser_modularize_no_current_script(self):
# test both modularize (and creating an instance) and modularize-instance
# (which creates by itself)
for path, args, creation in [
([], ['-s', 'MODULARIZE'], 'Module();'),
(['subdir'], ['-s', 'MODULARIZE'], 'Module();'),
]:
print(path, args, creation)
filesystem_path = os.path.join('.', *path)
ensure_dir(filesystem_path)
# compile the code with the modularize feature and the preload-file option enabled
self.compile_btest([test_file('browser_test_hello_world.c'), '-o', 'test.js'] + args)
shutil.move('test.js', os.path.join(filesystem_path, 'test.js'))
shutil.move('test.wasm', os.path.join(filesystem_path, 'test.wasm'))
open(os.path.join(filesystem_path, 'test.html'), 'w').write('''
<script>
setTimeout(function() {
var xhr = new XMLHttpRequest();
xhr.open('GET', 'test.js', false);
xhr.send(null);
eval(xhr.responseText);
%s
}, 1);
</script>
''' % creation)
self.run_browser('/'.join(path + ['test.html']), None, '/report_result?0')
def test_emscripten_request_animation_frame(self):
self.btest(test_file('emscripten_request_animation_frame.c'), '0')
def test_emscripten_request_animation_frame_loop(self):
self.btest(test_file('emscripten_request_animation_frame_loop.c'), '0')
def test_request_animation_frame(self):
self.btest('request_animation_frame.cpp', '0', also_proxied=True)
@requires_threads
def test_emscripten_set_timeout(self):
self.btest(test_file('emscripten_set_timeout.c'), '0', args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
@requires_threads
def test_emscripten_set_timeout_loop(self):
self.btest(test_file('emscripten_set_timeout_loop.c'), '0', args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
def test_emscripten_set_immediate(self):
self.btest(test_file('emscripten_set_immediate.c'), '0')
def test_emscripten_set_immediate_loop(self):
self.btest(test_file('emscripten_set_immediate_loop.c'), '0')
@requires_threads
def test_emscripten_set_interval(self):
self.btest(test_file('emscripten_set_interval.c'), '0', args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
# Test emscripten_performance_now() and emscripten_date_now()
@requires_threads
def test_emscripten_performance_now(self):
self.btest(test_file('emscripten_performance_now.c'), '0', args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
@requires_threads
def test_embind_with_pthreads(self):
self.btest('embind_with_pthreads.cpp', '1', args=['--bind', '-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD'])
def test_embind_with_asyncify(self):
self.btest('embind_with_asyncify.cpp', '1', args=['--bind', '-s', 'ASYNCIFY'])
# Test emscripten_console_log(), emscripten_console_warn() and emscripten_console_error()
def test_emscripten_console_log(self):
self.btest(test_file('emscripten_console_log.c'), '0', args=['--pre-js', test_file('emscripten_console_log_pre.js')])
def test_emscripten_throw_number(self):
self.btest(test_file('emscripten_throw_number.c'), '0', args=['--pre-js', test_file('emscripten_throw_number_pre.js')])
def test_emscripten_throw_string(self):
self.btest(test_file('emscripten_throw_string.c'), '0', args=['--pre-js', test_file('emscripten_throw_string_pre.js')])
# Tests that Closure run in combination with -s ENVIRONMENT=web mode works with a minimal console.log() application
def test_closure_in_web_only_target_environment_console_log(self):
self.btest('minimal_hello.c', '0', args=['-s', 'ENVIRONMENT=web', '-O3', '--closure=1'])
# Tests that Closure run in combination with -s ENVIRONMENT=web mode works with a small WebGL application
@requires_graphics_hardware
def test_closure_in_web_only_target_environment_webgl(self):
self.btest('webgl_draw_triangle.c', '0', args=['-lGL', '-s', 'ENVIRONMENT=web', '-O3', '--closure=1'])
def test_no_declare_asm_module_exports_asmjs(self):
for minimal_runtime in [[], ['-s', 'MINIMAL_RUNTIME']]:
self.btest(test_file('declare_asm_module_exports.cpp'), '1', args=['-s', 'DECLARE_ASM_MODULE_EXPORTS=0', '-s', 'ENVIRONMENT=web', '-O3', '--closure=1', '-s', 'WASM=0'] + minimal_runtime)
def test_no_declare_asm_module_exports_wasm_minimal_runtime(self):
self.btest(test_file('declare_asm_module_exports.cpp'), '1', args=['-s', 'DECLARE_ASM_MODULE_EXPORTS=0', '-s', 'ENVIRONMENT=web', '-O3', '--closure=1', '-s', 'MINIMAL_RUNTIME'])
# Tests that the different code paths in src/shell_minimal_runtime.html all work ok.
def test_minimal_runtime_loader_shell(self):
args = ['-s', 'MINIMAL_RUNTIME=2']
for wasm in [[], ['-s', 'WASM=0', '--memory-init-file', '0'], ['-s', 'WASM=0', '--memory-init-file', '1'], ['-s', 'SINGLE_FILE'], ['-s', 'WASM=0', '-s', 'SINGLE_FILE']]:
for modularize in [[], ['-s', 'MODULARIZE']]:
print(str(args + wasm + modularize))
self.btest('minimal_hello.c', '0', args=args + wasm + modularize)
# Tests that -s MINIMAL_RUNTIME=1 works well in different build modes
def test_minimal_runtime_hello_world(self):
for args in [[], ['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_COMPILATION', '--closure=1'], ['-s', 'MINIMAL_RUNTIME_STREAMING_WASM_INSTANTIATION', '--closure', '1']]:
self.btest(test_file('small_hello_world.c'), '0', args=args + ['-s', 'MINIMAL_RUNTIME'])
@requires_threads
def test_offset_converter(self, *args):
try:
self.btest_exit(test_file('browser', 'test_offset_converter.c'), assert_returncode=1, args=['-s', 'USE_OFFSET_CONVERTER', '-g4', '-s', 'PROXY_TO_PTHREAD', '-s', 'USE_PTHREADS'])
except Exception as e:
# dump the wasm file; this is meant to help debug #10539 on the bots
print(self.run_process([os.path.join(building.get_binaryen_bin(), 'wasm-opt'), 'test.wasm', '-g', '--print', '-all'], stdout=PIPE).stdout)
raise e
# Tests emscripten_unwind_to_js_event_loop() behavior
def test_emscripten_unwind_to_js_event_loop(self, *args):
self.btest(test_file('browser', 'test_emscripten_unwind_to_js_event_loop.c'), '1', args=['-s', 'NO_EXIT_RUNTIME'])
def test_wasm2js_fallback(self):
for args in [[], ['-s', 'MINIMAL_RUNTIME']]:
self.compile_btest([test_file('small_hello_world.c'), '-s', 'WASM=2', '-o', 'test.html'] + args)
# First run with WebAssembly support enabled
# Move the Wasm2js fallback away to test it is not accidentally getting loaded.
os.rename('test.wasm.js', 'test.wasm.js.unused')
self.run_browser('test.html', 'hello!', '/report_result?0')
os.rename('test.wasm.js.unused', 'test.wasm.js')
# Then disable WebAssembly support in VM, and try again.. Should still work with Wasm2JS fallback.
html = open('test.html', 'r').read()
html = html.replace('<body>', '<body><script>delete WebAssembly;</script>')
open('test.html', 'w').write(html)
os.remove('test.wasm') # Also delete the Wasm file to test that it is not attempted to be loaded.
self.run_browser('test.html', 'hello!', '/report_result?0')
def test_wasm2js_fallback_on_wasm_compilation_failure(self):
for args in [[], ['-s', 'MINIMAL_RUNTIME']]:
self.compile_btest([test_file('small_hello_world.c'), '-s', 'WASM=2', '-o', 'test.html'] + args)
# Run without the .wasm.js file present: with Wasm support, the page should still run
os.rename('test.wasm.js', 'test.wasm.js.unused')
self.run_browser('test.html', 'hello!', '/report_result?0')
# Restore the .wasm.js file, then corrupt the .wasm file, that should trigger the Wasm2js fallback to run
os.rename('test.wasm.js.unused', 'test.wasm.js')
shutil.copyfile('test.js', 'test.wasm')
self.run_browser('test.html', 'hello!', '/report_result?0')
def test_system(self):
self.btest(test_file('system.c'), '0')
# Tests that it is possible to hook into/override a symbol defined in a system library.
@requires_graphics_hardware
def test_override_system_js_lib_symbol(self):
# This test verifies it is possible to override a symbol from WebGL library.
# When WebGL is implicitly linked in, the implicit linking should happen before any user --js-libraries, so that they can adjust
# the behavior afterwards.
self.btest(test_file('test_override_system_js_lib_symbol.c'),
expected='5121',
args=['--js-library', test_file('test_override_system_js_lib_symbol.js')])
# When WebGL is explicitly linked to in strict mode, the linking order on command line should enable overriding.
self.btest(test_file('test_override_system_js_lib_symbol.c'),
expected='5121',
args=['-s', 'AUTO_JS_LIBRARIES=0', '-lwebgl.js', '--js-library', test_file('test_override_system_js_lib_symbol.js')])
@no_firefox('no 4GB support yet')
def test_zzz_zzz_4gb(self):
# TODO Convert to an actual browser test when it reaches stable.
# For now, keep this in browser as this suite runs serially, which
# means we don't compete for memory with anything else (and run it
# at the very very end, to reduce the risk of it OOM-killing the
# browser).
# test that we can allocate in the 2-4GB range, if we enable growth and
# set the max appropriately
self.emcc_args += ['-O2', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=4GB']
self.do_run_in_out_file_test('browser', 'test_4GB.cpp', js_engines=[config.V8_ENGINE])
# Tests that emmalloc supports up to 4GB Wasm heaps.
@no_firefox('no 4GB support yet')
def test_zzz_zzz_emmalloc_4gb(self):
self.btest(test_file('mem_growth.cpp'),
expected='-65536', # == 4*1024*1024*1024 - 65536 casted to signed
args=['-s', 'MALLOC=emmalloc', '-s', 'ABORTING_MALLOC=0', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=4GB'])
# Test that it is possible to malloc() a huge 3GB memory block in 4GB mode using emmalloc.
# Also test emmalloc-memvalidate and emmalloc-memvalidate-verbose build configurations.
@no_firefox('no 4GB support yet')
def test_emmalloc_3GB(self):
def test(args):
self.btest(test_file('alloc_3gb.cpp'),
expected='0',
args=['-s', 'MAXIMUM_MEMORY=4GB', '-s', 'ALLOW_MEMORY_GROWTH=1'] + args)
test(['-s', 'MALLOC=emmalloc'])
test(['-s', 'MALLOC=emmalloc-debug'])
test(['-s', 'MALLOC=emmalloc-memvalidate'])
test(['-s', 'MALLOC=emmalloc-memvalidate-verbose'])
@no_firefox('no 4GB support yet')
def test_zzz_zzz_emmalloc_memgrowth(self, *args):
self.btest(test_file('browser', 'emmalloc_memgrowth.cpp'), expected='0', args=['-s', 'MALLOC=emmalloc', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'ABORTING_MALLOC=0', '-s', 'ASSERTIONS=2', '-s', 'MINIMAL_RUNTIME=1', '-s', 'MAXIMUM_MEMORY=4GB'])
@no_firefox('no 4GB support yet')
def test_zzz_zzz_2gb_fail(self):
# TODO Convert to an actual browser test when it reaches stable.
# For now, keep this in browser as this suite runs serially, which
# means we don't compete for memory with anything else (and run it
# at the very very end, to reduce the risk of it OOM-killing the
# browser).
# test that growth doesn't go beyond 2GB without the max being set for that,
# and that we can catch an allocation failure exception for that
self.emcc_args += ['-O2', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=2GB']
self.do_run_in_out_file_test('browser', 'test_2GB_fail.cpp', js_engines=[config.V8_ENGINE])
@no_firefox('no 4GB support yet')
def test_zzz_zzz_4gb_fail(self):
# TODO Convert to an actual browser test when it reaches stable.
# For now, keep this in browser as this suite runs serially, which
# means we don't compete for memory with anything else (and run it
# at the very very end, to reduce the risk of it OOM-killing the
# browser).
# test that we properly report an allocation error that would overflow over
# 4GB.
self.emcc_args += ['-O2', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=4GB', '-s', 'ABORTING_MALLOC=0']
self.do_run_in_out_file_test('browser', 'test_4GB_fail.cpp', js_engines=[config.V8_ENGINE])
@disabled("only run this manually, to test for race conditions")
@parameterized({
'normal': ([],),
'assertions': (['-s', 'ASSERTIONS'],)
})
@requires_threads
def test_manual_pthread_proxy_hammer(self, args):
# the specific symptom of the hang that was fixed is that the test hangs
# at some point, using 0% CPU. often that occured in 0-200 iterations, but
# you may want to adjust "ITERATIONS".
self.btest(test_file('pthread', 'test_pthread_proxy_hammer.cpp'),
expected='0',
args=['-s', 'USE_PTHREADS', '-O2', '-s', 'PROXY_TO_PTHREAD',
'-DITERATIONS=1024', '-g1'] + args,
timeout=10000,
# don't run this with the default extra_tries value, as this is
# *meant* to notice something random, a race condition.
extra_tries=0)
def test_assert_failure(self):
self.btest(test_file('browser', 'test_assert_failure.c'), 'abort:Assertion failed: false && "this is a test"')
EMRUN = path_from_root('emrun')
class emrun(RunnerCore):
def test_emrun_info(self):
if not has_browser():
self.skipTest('need a browser')
result = self.run_process([EMRUN, '--system_info', '--browser_info'], stdout=PIPE).stdout
assert 'CPU' in result
assert 'Browser' in result
assert 'Traceback' not in result
result = self.run_process([EMRUN, '--list_browsers'], stdout=PIPE).stdout
assert 'Traceback' not in result
def test_no_browser(self):
# Test --no_browser mode where we have to take care of launching the browser ourselves
# and then killing emrun when we are done.
if not has_browser():
self.skipTest('need a browser')
self.run_process([EMCC, test_file('test_emrun.c'), '--emrun', '-o', 'hello_world.html'])
proc = subprocess.Popen([EMRUN, '--no_browser', '.', '--port=3333'], stdout=PIPE)
try:
if EMTEST_BROWSER:
print('Starting browser')
browser_cmd = shlex.split(EMTEST_BROWSER)
browser = subprocess.Popen(browser_cmd + ['http://localhost:3333/hello_world.html'])
try:
while True:
stdout = proc.stdout.read()
if b'Dumping out file' in stdout:
break
finally:
print('Terminating browser')
browser.terminate()
browser.wait()
finally:
print('Terminating emrun server')
proc.terminate()
proc.wait()
def test_emrun(self):
self.run_process([EMCC, test_file('test_emrun.c'), '--emrun', '-o', 'hello_world.html'])
if not has_browser():
self.skipTest('need a browser')
# We cannot run emrun from the temp directory the suite will clean up afterwards, since the
# browser that is launched will have that directory as startup directory, and the browser will
# not close as part of the test, pinning down the cwd on Windows and it wouldn't be possible to
# delete it. Therefore switch away from that directory before launching.
os.chdir(path_from_root())
args_base = [EMRUN, '--timeout', '30', '--safe_firefox_profile',
'--kill_exit', '--port', '6939', '--verbose',
'--log_stdout', self.in_dir('stdout.txt'),
'--log_stderr', self.in_dir('stderr.txt')]
# Verify that trying to pass argument to the page without the `--` separator will
# generate an actionable error message
err = self.expect_fail(args_base + ['--foo'])
self.assertContained('error: unrecognized arguments: --foo', err)
self.assertContained('remember to add `--` between arguments', err)
if EMTEST_BROWSER is not None:
# If EMTEST_BROWSER carried command line arguments to pass to the browser,
# (e.g. "firefox -profile /path/to/foo") those can't be passed via emrun,
# so strip them out.
browser_cmd = shlex.split(EMTEST_BROWSER)
browser_path = browser_cmd[0]
args_base += ['--browser', browser_path]
if len(browser_cmd) > 1:
browser_args = browser_cmd[1:]
if 'firefox' in browser_path and ('-profile' in browser_args or '--profile' in browser_args):
# emrun uses its own -profile, strip it out
parser = argparse.ArgumentParser(add_help=False) # otherwise it throws with -headless
parser.add_argument('-profile')
parser.add_argument('--profile')
browser_args = parser.parse_known_args(browser_args)[1]
if browser_args:
args_base += ['--browser_args', ' ' + ' '.join(browser_args)]
for args in [
args_base,
args_base + ['--private_browsing', '--port', '6941']
]:
args += [self.in_dir('hello_world.html'), '--', '1', '2', '--3']
print(shared.shlex_join(args))
proc = self.run_process(args, check=False)
self.assertEqual(proc.returncode, 100)
stdout = open(self.in_dir('stdout.txt'), 'r').read()
stderr = open(self.in_dir('stderr.txt'), 'r').read()
self.assertContained('argc: 4', stdout)
self.assertContained('argv[3]: --3', stdout)
self.assertContained('hello, world!', stdout)
self.assertContained('Testing ASCII characters: !"$%&\'()*+,-./:;<=>?@[\\]^_`{|}~', stdout)
self.assertContained('Testing char sequences: %20%21 ä', stdout)
self.assertContained('hello, error stream!', stderr)
| 47.883954 | 310 | 0.646459 |
3c0f89f3ec5e091a5567faf10c0d96a3806118db | 8,788 | py | Python | suapp/moduleloader.py | schilduil/suapp | c6e269fca0ce7af1127d41906ff811b007ad915f | [
"MIT"
] | null | null | null | suapp/moduleloader.py | schilduil/suapp | c6e269fca0ce7af1127d41906ff811b007ad915f | [
"MIT"
] | null | null | null | suapp/moduleloader.py | schilduil/suapp | c6e269fca0ce7af1127d41906ff811b007ad915f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Loads to datamodels of modules.
A module must have a datamodel.py for this to work:
modlib/<module>/datamodel.py
The datamodel.py must have the following elements:
app_name
A string with the name of the app to which this module belongs.
def requirements()
A function returning a list of modules required by this module.
def definitions()
A function returning a dictionary of names: subclass of db.Entity
that describe the database tables of the module.
def ui_definitions()
A function returning a dictionary of UI ORM classes: subclass of
suapp.orm.UiOrmObject that describes the UI ORM that act as interface
between the application and the database.
def view_definitions()
A function returning a tuple of (queries, views, flow) where
views is a dict of view definitions.
queries is a dict of queries used in views.
flow is an update on the flow.
"""
import datetime
import importlib
import logging
import sys
from pony.orm import *
import suapp.orm
from suapp.jandw import Jeeves
modules = []
db = Database()
__all__ = ["ModuleDependencyLoading", "import_modlib", "get_database", "modules"]
def get_database():
return db
class ModuleDependencyLoading(ImportError):
pass
def import_modlib(app_name, module_name, jeeves, scope=None, config=None):
"""
Importing the modlib module and all dependencies.
"""
# Listing the modules we've already loaded.
modules_loaded = []
if module_name in modules:
# Already loaded, skip
return modules_loaded
# Initializing config and scope if not passed.
if config is not None:
suapp.orm.UiOrmObject.config = config
if scope is None:
scope = {}
# Importing the module in Python.
module_entity = importlib.import_module("modlib.%s" % (module_name))
# Checking if the app matches in the module.
if app_name:
if app_name != module_entity.app_name:
# Wrong app type: skipping and warn about it.
logging.getLogger(__name__).warning(
"Skipping module %s (%s) as it does not belong to app %s.",
module_name,
module_entity.app_name,
app_name,
)
return False
else:
# Initializing it if it isn't set yet.
app_name = module_entity.app_name
# First making sure all requirement modules are loaded.
for requirement_name in module_entity.requirements():
# Checking if we've already done the required module.
if requirement_name not in modules:
# Trying to import
required_modules_loaded = import_modlib(
app_name, requirement_name, jeeves, scope
)
if not required_modules_loaded:
# Import of the requirement failed.
raise ModuleDependencyLoading(
"Could not load datamodule %s because requirement %s failed to load."
% (module_name, requirement_name)
)
modules_loaded.extend(required_modules_loaded)
# Loading all the PonyORM classes into the global scope.
classes_dict = module_entity.definitions(db, scope)
if "modlib" not in globals():
scope["modlib"] = sys.modules["modlib"]
globals().update({"modlib": sys.modules["modlib"]})
for name, value in classes_dict.items():
setattr(sys.modules[value.__module__], name, value)
scope["suapp"] = sys.modules["suapp"]
# Loading all the UI ORM classes in to the global scope.
classes_dict = module_entity.ui_definitions(db, scope)
for name, value in classes_dict.items():
setattr(sys.modules[value.__module__], name, value)
# Loading the views.
(queries, views, flow) = module_entity.view_definitions()
if queries:
jeeves.queries.update(queries)
if flow:
jeeves.flow.update(flow)
if views:
jeeves.views.update(views)
# Adding to the list of imported modules.
modules.append(module_name)
modules_loaded.append(module_name)
logging.getLogger(__name__).info("Loaded %s.", module_name)
return modules_loaded
if __name__ == "__main__":
# Default settings
app_name = "suapp"
modules_to_import = []
# Parsing command line arguments.
if len(sys.argv) > 1:
app_name = sys.argv[1]
if len(sys.argv) > 2:
for mod in sys.argv[2:]:
modules_to_import.append(mod)
# If no modules, at least do base.
if not modules_to_import:
modules_to_import = ["base"]
scope = {}
for mod in modules_to_import:
import_modlib(app_name, mod, Jeeves(), scope)
print("Modules: %s" % (modules))
db.bind("sqlite", ":memory:")
db.generate_mapping(create_tables=True)
with db_session():
try:
# === BASE === "
print(modlib.base.Individual.mro())
vayf = modlib.base.Individual(
code="VAYF", dob=datetime.date(year=2007, month=1, day=1)
)
goc = modlib.base.Individual(
code="GOc", dob=datetime.date(year=2006, month=1, day=1)
)
govayf62 = modlib.base.Individual(
code="(GOVAYF)62",
dob=datetime.datetime(year=2008, month=1, day=1),
parents=[goc, vayf],
)
ac = modlib.base.Individual(
code="AC", dob=datetime.datetime(year=2009, month=1, day=1)
)
ac62110 = modlib.base.Individual(
code="(AC62)110",
dob=datetime.datetime(year=2010, month=1, day=1),
parents=[ac, govayf62],
)
ac110280 = modlib.base.Individual(
code="(AC110)280",
dob=datetime.datetime(year=2011, month=1, day=1),
parents=[ac, ac62110],
)
print(vayf)
print(goc)
print(modlib.kinship.Kinship.mro())
print(goc._attrs_)
# === KINSHIP === #
k_goc_vayf = modlib.kinship.Kinship(first=goc, second=vayf, kinship=0.0)
ui_k_goc_vayf = modlib.kinship.UiKinship(orm=k_goc_vayf)
print(
"%s, %s: %s"
% (
ui_k_goc_vayf.first.code,
ui_k_goc_vayf.second.code,
ui_k_goc_vayf.kinship,
)
)
flush()
ui_k_goc_vayf = modlib.kinship.UiKinship(first=vayf, second=goc)
print(
"%s, %s: %s"
% (
ui_k_goc_vayf.first.code,
ui_k_goc_vayf.second.code,
ui_k_goc_vayf.kinship,
)
)
print("Parents (GOVAYF)62: %s" % (govayf62.parents.page(1, pagesize=2)))
print("")
# Start calculation inbreeding
i = modlib.base.UiIndividual(orm=ac110280)
print("Record: %s" % (i._ui_orm))
for key in sorted(i.ui_attributes):
print("\t%s: %s" % (key, getattr(i, key)))
print("")
print("Record: %s" % (i._ui_orm))
for key in sorted(i.__dict__):
print("\t%s: %s" % (key, getattr(i, key)))
print("")
print("Record: %s" % (ui_k_goc_vayf))
for key in sorted(ui_k_goc_vayf.ui_attributes):
print("\t%s: %s" % (key, getattr(ui_k_goc_vayf, key)))
print("")
print("Calculated kinships:")
for kinship in select(c for c in modlib.kinship.Kinship):
if kinship.pc_kinship:
print(
"\t%s, %s: %2.2f%% (%2.2f%%)"
% (
kinship.first.code,
kinship.second.code,
kinship.kinship * 100.00,
kinship.pc_kinship * 100.00,
)
)
else:
print(
"\t%s, %s: %2.2f%%"
% (
kinship.first.code,
kinship.second.code,
kinship.kinship * 100.00,
)
)
print("")
print(
"Inbreeding in %s is: %2.2f%% (%2.2f%%)"
% (i.code, (i.ui_inbreeding) * 100.00, (i.ui_pc_inbreeding) * 100.00)
)
print("")
except:
# raise
pass
| 33.670498 | 89 | 0.546541 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.