hexsha stringlengths 40 40 | size int64 7 1.04M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 247 | max_stars_repo_name stringlengths 4 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 368k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 247 | max_issues_repo_name stringlengths 4 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 247 | max_forks_repo_name stringlengths 4 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.04M | avg_line_length float64 1.77 618k | max_line_length int64 1 1.02M | alphanum_fraction float64 0 1 | original_content stringlengths 7 1.04M | filtered:remove_function_no_docstring int64 -102 942k | filtered:remove_class_no_docstring int64 -354 977k | filtered:remove_delete_markers int64 0 60.1k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eae142ea1ecb1f8ff613dd717c0d305593aeab43 | 2,128 | py | Python | src/Operators/RaycastSelect.py | paigeco/VirtualGoniometer | 536e7e77fbb036ad8d777b42e751a0f3e80b8242 | [
"CC0-1.0"
] | 1 | 2021-02-22T02:53:26.000Z | 2021-02-22T02:53:26.000Z | src/Operators/RaycastSelect.py | paigeco/VirtualGoniometer | 536e7e77fbb036ad8d777b42e751a0f3e80b8242 | [
"CC0-1.0"
] | 5 | 2021-03-26T15:15:34.000Z | 2021-06-11T20:16:00.000Z | src/Operators/RaycastSelect.py | paigeco/VirtualGoniometer | 536e7e77fbb036ad8d777b42e751a0f3e80b8242 | [
"CC0-1.0"
] | null | null | null | """ [ raycast select module ] """
from bpy.types import Operator
import bpy
from bpy import ops as O
from .DoRaycast import do_raycast
from . import CallbackOptions
class PerformRaycastSelect(Operator):
"""Run a side differentiation and select the points by raycast"""
bl_idname = "view3d.raycast_select_pair"
bl_label = "RayCast Select Operator"
bl_options = {'REGISTER', 'UNDO'}
# Establish some variables
execution_function_name: bpy.props.StringProperty(name='callback', default='run_by_selection')
# Initialize some variables
execution_function = None
save_mode = None
break_number = 0
| 38.690909 | 98 | 0.650846 | """ [ raycast select module ] """
from bpy.types import Operator
import bpy
from bpy import ops as O
from .DoRaycast import do_raycast
from . import CallbackOptions
class PerformRaycastSelect(Operator):
"""Run a side differentiation and select the points by raycast"""
bl_idname = "view3d.raycast_select_pair"
bl_label = "RayCast Select Operator"
bl_options = {'REGISTER', 'UNDO'}
# Establish some variables
execution_function_name: bpy.props.StringProperty(name='callback', default='run_by_selection')
# Initialize some variables
execution_function = None
save_mode = None
break_number = 0
def modal(self, context, event):
if event.type in {'MIDDLEMOUSE', 'WHEELUPMOUSE', 'WHEELDOWNMOUSE'}:
# allow navigation
return {'PASS_THROUGH'}
elif event.type == 'MOUSEMOVE':
do_raycast(context, event, CallbackOptions.move_cursor, bn=self.break_number)
return {'RUNNING_MODAL'}
elif event.type == 'LEFTMOUSE':
do_raycast(context, event, self.execution_function, bn=self.break_number)
return {'RUNNING_MODAL'}
elif event.type in {'RIGHTMOUSE', 'ESC'} or context.active_object.mode != 'OBJECT':
bpy.context.space_data.overlay.show_cursor = False
O.object.mode_set(mode=self.save_mode)
return {'CANCELLED'}
return {'RUNNING_MODAL'}
def invoke(self, context, event):
self.execution_function = getattr(CallbackOptions, self.execution_function_name)
self.break_number = context.active_object.cs_individual_VG_.breaks
if context.space_data.type == 'VIEW_3D':
self.save_mode = context.active_object.mode
bpy.context.space_data.overlay.show_cursor = True
if self.save_mode != 'OBJECT':
O.object.mode_set(mode='OBJECT')
context.window_manager.modal_handler_add(self)
return {'RUNNING_MODAL'}
else:
self.report({'WARNING'}, "Active space must be a View3d")
return {'CANCELLED'}
| 1,426 | 0 | 58 |
cb5dff93b42d21207db6a7d97bdc3e24798ca2be | 230 | py | Python | aiogoogletrans/__init__.py | kngxscn/aiogoogletrans | f54c895f6d76b4558f8e836b05c0a4822b532bd4 | [
"MIT"
] | null | null | null | aiogoogletrans/__init__.py | kngxscn/aiogoogletrans | f54c895f6d76b4558f8e836b05c0a4822b532bd4 | [
"MIT"
] | null | null | null | aiogoogletrans/__init__.py | kngxscn/aiogoogletrans | f54c895f6d76b4558f8e836b05c0a4822b532bd4 | [
"MIT"
] | null | null | null | """Free Google Translate API for Python. Translates totally free of charge."""
__all__ = 'Translator',
__version__ = '3.2.1'
from aiogoogletrans.client import Translator
from aiogoogletrans.constants import LANGCODES, LANGUAGES
| 28.75 | 78 | 0.791304 | """Free Google Translate API for Python. Translates totally free of charge."""
__all__ = 'Translator',
__version__ = '3.2.1'
from aiogoogletrans.client import Translator
from aiogoogletrans.constants import LANGCODES, LANGUAGES
| 0 | 0 | 0 |
5456ae38d1bda26af82195ff1446f184342a0712 | 10,236 | py | Python | skyportal/tests/frontend/test_scanning_page.py | rcthomas/skyportal | 28f47c97a620082b0c6df54e072ed125eab1fc4a | [
"BSD-3-Clause"
] | null | null | null | skyportal/tests/frontend/test_scanning_page.py | rcthomas/skyportal | 28f47c97a620082b0c6df54e072ed125eab1fc4a | [
"BSD-3-Clause"
] | null | null | null | skyportal/tests/frontend/test_scanning_page.py | rcthomas/skyportal | 28f47c97a620082b0c6df54e072ed125eab1fc4a | [
"BSD-3-Clause"
] | null | null | null | import uuid
import pytest
from selenium.common.exceptions import TimeoutException
from skyportal.tests import api
@pytest.mark.flaky(reruns=2)
@pytest.mark.flaky(reruns=2)
@pytest.mark.flaky(reruns=2)
@pytest.mark.flaky(reruns=2)
@pytest.mark.flaky(reruns=2)
@pytest.mark.flaky(reruns=2)
| 34.698305 | 88 | 0.610297 | import uuid
import pytest
from selenium.common.exceptions import TimeoutException
from skyportal.tests import api
def test_candidates_page_render(driver, user, public_candidate):
driver.get(f"/become_user/{user.id}")
driver.get("/candidates")
driver.wait_for_xpath(f'//a[text()="{public_candidate.id}"]')
@pytest.mark.flaky(reruns=2)
def test_candidate_group_filtering(
driver,
user,
public_candidate,
public_filter,
public_group,
upload_data_token,
manage_groups_token,
):
candidate_id = str(uuid.uuid4())
for i in range(5):
status, data = api(
"POST",
"candidates",
data={
"id": f"{candidate_id}_{i}",
"ra": 234.22,
"dec": -22.33,
"redshift": 3,
"altdata": {"simbad": {"class": "RRLyr"}},
"transient": False,
"ra_dis": 2.3,
"filter_ids": [public_filter.id],
},
token=upload_data_token,
)
assert status == 200
assert data["data"]["id"] == f"{candidate_id}_{i}"
status, data = api(
"POST",
"groups",
data={"name": str(uuid.uuid4()), "group_admins": [user.id]},
token=manage_groups_token,
)
assert status == 200
driver.get(f"/become_user/{user.id}")
driver.get("/candidates")
for i in range(5):
driver.wait_for_xpath(f'//a[text()="{candidate_id}_{i}"]')
group_checkbox = driver.wait_for_xpath(f'//input[starts-with(@name,"groupIDs[0]")]')
driver.scroll_to_element_and_click(group_checkbox)
submit_button = driver.wait_for_xpath('//span[text()="Search"]')
driver.scroll_to_element_and_click(submit_button)
for i in range(5):
driver.wait_for_xpath_to_disappear(f'//a[text()="{candidate_id}_{i}"]')
driver.scroll_to_element_and_click(group_checkbox)
driver.scroll_to_element_and_click(submit_button)
for i in range(5):
driver.wait_for_xpath(f'//a[text()="{candidate_id}_{i}"]')
@pytest.mark.flaky(reruns=2)
def test_candidate_unsaved_only_filtering(
driver,
user,
public_candidate,
public_filter,
public_group,
upload_data_token,
manage_groups_token,
):
candidate_id = str(uuid.uuid4())
for i in range(5):
status, data = api(
"POST",
"sources",
data={
"id": f"{candidate_id}_{i}",
"ra": 234.22,
"dec": -22.33,
"redshift": 3,
"altdata": {"simbad": {"class": "RRLyr"}},
"transient": False,
"ra_dis": 2.3,
"group_ids": [public_group.id],
},
token=upload_data_token,
)
assert status == 200
status, data = api(
"POST",
"candidates",
data={
"id": f"{candidate_id}_{i}",
"ra": 234.22,
"dec": -22.33,
"redshift": 3,
"altdata": {"simbad": {"class": "RRLyr"}},
"transient": False,
"ra_dis": 2.3,
"filter_ids": [public_filter.id],
},
token=upload_data_token,
)
assert status == 200
assert data["data"]["id"] == f"{candidate_id}_{i}"
driver.get(f"/become_user/{user.id}")
driver.get("/candidates")
for i in range(5):
driver.wait_for_xpath(f'//a[text()="{candidate_id}_{i}"]')
unsaved_only_checkbox = driver.wait_for_xpath('//input[@name="unsavedOnly"]')
driver.scroll_to_element_and_click(unsaved_only_checkbox)
submit_button = driver.wait_for_xpath('//span[text()="Search"]')
driver.scroll_to_element_and_click(submit_button)
for i in range(5):
driver.wait_for_xpath_to_disappear(f'//a[text()="{candidate_id}_{i}"]')
driver.scroll_to_element_and_click(unsaved_only_checkbox)
driver.scroll_to_element_and_click(submit_button)
for i in range(5):
driver.wait_for_xpath(f'//a[text()="{candidate_id}_{i}"]')
@pytest.mark.flaky(reruns=2)
def test_candidate_date_filtering(
driver,
user,
public_candidate,
public_filter,
public_group,
upload_data_token,
ztf_camera,
):
candidate_id = str(uuid.uuid4())
for i in range(5):
status, data = api(
"POST",
"candidates",
data={
"id": f"{candidate_id}_{i}",
"ra": 234.22,
"dec": -22.33,
"redshift": 3,
"altdata": {"simbad": {"class": "RRLyr"}},
"transient": False,
"ra_dis": 2.3,
"filter_ids": [public_filter.id],
},
token=upload_data_token,
)
assert status == 200
assert data["data"]["id"] == f"{candidate_id}_{i}"
status, data = api(
"POST",
"photometry",
data={
"obj_id": f"{candidate_id}_{i}",
"mjd": 58000.0,
"instrument_id": ztf_camera.id,
"flux": 12.24,
"fluxerr": 0.031,
"zp": 25.0,
"magsys": "ab",
"filter": "ztfr",
"group_ids": [public_group.id],
},
token=upload_data_token,
)
assert status == 200
driver.get(f"/become_user/{user.id}")
driver.get("/candidates")
for i in range(5):
driver.wait_for_xpath(f'//a[text()="{candidate_id}_{i}"]')
start_date_input = driver.wait_for_xpath("//input[@name='startDate']")
start_date_input.clear()
start_date_input.send_keys("20001212")
end_date_input = driver.wait_for_xpath("//input[@name='endDate']")
end_date_input.clear()
end_date_input.send_keys("20011212")
submit_button = driver.wait_for_xpath_to_be_clickable('//span[text()="Search"]')
driver.scroll_to_element_and_click(submit_button)
for i in range(5):
driver.wait_for_xpath_to_disappear(f'//a[text()="{candidate_id}_{i}"]', 10)
end_date_input.clear()
end_date_input.send_keys("20901212")
submit_button = driver.wait_for_xpath_to_be_clickable('//span[text()="Search"]')
driver.scroll_to_element_and_click(submit_button)
for i in range(5):
driver.wait_for_xpath(f'//a[text()="{candidate_id}_{i}"]', 10)
@pytest.mark.flaky(reruns=2)
def test_save_candidate_quick_save(
driver, group_admin_user, public_group, public_candidate
):
driver.get(f"/become_user/{group_admin_user.id}")
driver.get("/candidates")
driver.wait_for_xpath(f'//a[text()="{public_candidate.id}"]')
save_button = driver.wait_for_xpath(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.scroll_to_element_and_click(save_button)
try:
driver.wait_for_xpath_to_disappear(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.wait_for_xpath('//a[text()="Previously Saved"]')
except TimeoutException:
driver.refresh()
driver.wait_for_xpath_to_disappear(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.wait_for_xpath('//a[text()="Previously Saved"]')
@pytest.mark.flaky(reruns=2)
def test_save_candidate_select_groups(
driver, group_admin_user, public_group, public_candidate
):
driver.get(f"/become_user/{group_admin_user.id}")
driver.get("/candidates")
driver.wait_for_xpath(f'//a[text()="{public_candidate.id}"]')
carat = driver.wait_for_xpath(
f'//button[@name="saveCandidateButtonDropDownArrow{public_candidate.id}"]'
)
driver.scroll_to_element_and_click(carat)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath_to_be_clickable(
f'//*[@name="buttonMenuOption{public_candidate.id}_Select groups & save"]'
),
)
save_button = driver.wait_for_xpath_to_be_clickable(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.scroll_to_element_and_click(save_button)
assert driver.wait_for_xpath("//input[@name='group_ids[0]']").is_selected()
second_save_button = driver.wait_for_xpath(
f'//button[@name="finalSaveCandidateButton{public_candidate.id}"]'
)
second_save_button.click()
try:
driver.wait_for_xpath_to_disappear(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.wait_for_xpath('//a[text()="Previously Saved"]')
except TimeoutException:
driver.refresh()
driver.wait_for_xpath_to_disappear(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.wait_for_xpath('//a[text()="Previously Saved"]')
@pytest.mark.flaky(reruns=2)
def test_save_candidate_no_groups_error_message(
driver, group_admin_user, public_group, public_candidate
):
driver.get(f"/become_user/{group_admin_user.id}")
driver.get("/candidates")
driver.wait_for_xpath(f'//a[text()="{public_candidate.id}"]')
carat = driver.wait_for_xpath_to_be_clickable(
f'//button[@name="saveCandidateButtonDropDownArrow{public_candidate.id}"]'
)
driver.scroll_to_element_and_click(carat)
driver.execute_script(
"arguments[0].click();",
driver.wait_for_xpath_to_be_clickable(
f'//*[@name="buttonMenuOption{public_candidate.id}_Select groups & save"]'
),
)
save_button = driver.wait_for_xpath_to_be_clickable(
f'//button[@name="initialSaveCandidateButton{public_candidate.id}"]'
)
driver.scroll_to_element_and_click(save_button)
group_checkbox = driver.wait_for_xpath("//input[@name='group_ids[0]']")
assert group_checkbox.is_selected()
group_checkbox.click()
assert not group_checkbox.is_selected()
second_save_button = driver.wait_for_xpath_to_be_clickable(
f'//button[@name="finalSaveCandidateButton{public_candidate.id}"]'
)
second_save_button.click()
driver.wait_for_xpath('//div[contains(.,"Select at least one group")]')
| 9,779 | 0 | 155 |
d6098297f81cedb892e9a2baee378708a279c2c9 | 1,696 | py | Python | pypy/translator/backendopt/test/test_checkvirtual.py | camillobruni/pygirl | ddbd442d53061d6ff4af831c1eab153bcc771b5a | [
"MIT"
] | 12 | 2016-01-06T07:10:28.000Z | 2021-05-13T23:02:02.000Z | pypy/translator/backendopt/test/test_checkvirtual.py | woodrow/pyoac | b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7 | [
"MIT"
] | null | null | null | pypy/translator/backendopt/test/test_checkvirtual.py | woodrow/pyoac | b5dc59e6a38e7912db47f26fb23ffa4764a3c0e7 | [
"MIT"
] | 2 | 2016-07-29T07:09:50.000Z | 2016-10-16T08:50:26.000Z | from pypy.rpython.ootypesystem.ootype import ROOT, Instance, \
addMethods, meth, Meth, Void
from pypy.translator.backendopt.checkvirtual import check_virtual_methods
| 29.241379 | 73 | 0.601415 | from pypy.rpython.ootypesystem.ootype import ROOT, Instance, \
addMethods, meth, Meth, Void
from pypy.translator.backendopt.checkvirtual import check_virtual_methods
def test_nonvirtual():
A = Instance("A", ROOT)
addMethods(A, {"foo": meth(Meth([], Void))})
check_virtual_methods()
assert A._methods["foo"]._virtual == False
def test_checkvirtual_simple():
A = Instance("A", ROOT)
B = Instance("B", A)
addMethods(A, {"foo": meth(Meth([], Void)),
"bar": meth(Meth([], Void))})
addMethods(B, {"foo": meth(Meth([], Void))})
check_virtual_methods()
assert A._methods["foo"]._virtual == True
assert A._methods["bar"]._virtual == False
assert B._methods["foo"]._virtual == False
def test_checkvirtual_deep():
A = Instance("A", ROOT)
B = Instance("B", A)
C = Instance("C", B)
addMethods(A, {"foo": meth(Meth([], Void)),
"bar": meth(Meth([], Void))})
addMethods(C, {"foo": meth(Meth([], Void))})
check_virtual_methods()
assert A._methods["foo"]._virtual == True
assert A._methods["bar"]._virtual == False
assert "foo" not in B._methods
assert C._methods["foo"]._virtual == False
def test_checkvirtual_brother():
A = Instance("A", ROOT)
B1 = Instance("B1", A)
B2 = Instance("B2", A)
addMethods(A, {"foo": meth(Meth([], Void)),
"bar": meth(Meth([], Void))})
addMethods(B1, {"foo": meth(Meth([], Void))})
check_virtual_methods()
assert A._methods["foo"]._virtual == True
assert A._methods["bar"]._virtual == False
assert B1._methods["foo"]._virtual == False
assert "foo" not in B2._methods
| 1,432 | 0 | 92 |
e05e2f86f5041d7af068f49abaf50767e4e372f3 | 33 | py | Python | ARCH_FILES/modules/__init__.py | Andrew95496/hypergraze | 224719eb661a4069923355930ef3b5f0aca44dde | [
"MIT"
] | null | null | null | ARCH_FILES/modules/__init__.py | Andrew95496/hypergraze | 224719eb661a4069923355930ef3b5f0aca44dde | [
"MIT"
] | null | null | null | ARCH_FILES/modules/__init__.py | Andrew95496/hypergraze | 224719eb661a4069923355930ef3b5f0aca44dde | [
"MIT"
] | null | null | null | from .URL_PARSER import get_text
| 16.5 | 32 | 0.848485 | from .URL_PARSER import get_text
| 0 | 0 | 0 |
95ff0eddae9687a2a7c6b035062ae599b96de1ce | 6,641 | py | Python | transforms/polus-recycle-vector-plugin/src/main.py | LabShare/polus-plugin-utils | 9332ca6c229401d57b063a81973d48ce718c654c | [
"MIT"
] | null | null | null | transforms/polus-recycle-vector-plugin/src/main.py | LabShare/polus-plugin-utils | 9332ca6c229401d57b063a81973d48ce718c654c | [
"MIT"
] | null | null | null | transforms/polus-recycle-vector-plugin/src/main.py | LabShare/polus-plugin-utils | 9332ca6c229401d57b063a81973d48ce718c654c | [
"MIT"
] | null | null | null | import argparse, logging, filepattern
from pathlib import Path
# Initialize the logger
logging.basicConfig(
format="%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s",
datefmt="%d-%b-%y %H:%M:%S",
)
logger = logging.getLogger("main")
logger.setLevel(logging.INFO)
if __name__ == "__main__":
# Setup the Argument parsing
logger.info("Parsing arguments...")
parser = argparse.ArgumentParser(
prog="main", description="Extract individual fields of view from a czi file."
)
parser.add_argument(
"--stitchDir",
dest="stitch_dir",
type=str,
help="Stitching vector to recycle",
required=True,
)
parser.add_argument(
"--collectionDir",
dest="collection_dir",
type=str,
help="Image collection to place in new stitching vector",
required=True,
)
parser.add_argument(
"--filepattern",
dest="pattern",
type=str,
help="Stitching vector regular expression",
required=False,
)
parser.add_argument(
"--outDir",
dest="output_dir",
type=str,
help="The directory in which to save stitching vectors.",
required=True,
)
# Get the arguments
args = parser.parse_args()
stitch_dir = Path(args.stitch_dir)
collection_dir = Path(args.collection_dir)
if collection_dir.joinpath("images").is_dir():
# switch to images folder if present
inpDir = collection_dir.joinpath("images").absolute()
pattern = args.pattern
output_dir = Path(args.output_dir)
logger.info("stitch_dir = {}".format(stitch_dir))
logger.info("collection_dir = {}".format(collection_dir))
logger.info("filepattern = {}".format(pattern))
logger.info("output_dir = {}".format(output_dir))
main(stitch_dir, collection_dir, output_dir, pattern)
| 35.137566 | 101 | 0.556392 | import argparse, logging, filepattern
from pathlib import Path
# Initialize the logger
logging.basicConfig(
format="%(asctime)s - %(name)-8s - %(levelname)-8s - %(message)s",
datefmt="%d-%b-%y %H:%M:%S",
)
logger = logging.getLogger("main")
logger.setLevel(logging.INFO)
def close_vectors(vectors):
if isinstance(vectors, dict):
for key in vectors:
close_vectors(vectors[key])
else:
vectors.close()
def main(stitch_dir: Path, collection_dir: Path, output_dir: Path, pattern: str):
if pattern in [None, ".+", ".*"]:
pattern = filepattern.infer_pattern([f.name for f in collection_dir.iterdir()])
logger.info(f"Inferred filepattern: {pattern}")
# Parse files in the image collection
fp = filepattern.FilePattern(collection_dir, pattern)
# Get valid stitching vectors
vectors = [
v
for v in Path(stitch_dir).iterdir()
if Path(v).name.startswith("img-global-positions")
]
"""Get filepatterns for each stitching vector
This section of code creates a filepattern for each stitching vector, and while
traversing the stitching vectors analyzes the patterns to see which values in the
filepattern are static or variable within a single stitching vector and across
stitching vectors. The `singulars` variable determines which case each variable is:
`singulars[v]==-1` when the variable, v, changes within a stitching vector.
`singulars[v]==None` when the variable, v, changes across stitching vectors.
`singulars[v]==int` when the variable, v, doesn't change.
The variables that change across stitching vectors are grouping variables for the
filepattern iterator.
"""
singulars = {}
vps = {}
for vector in vectors:
vps[vector.name] = filepattern.VectorPattern(vector, pattern)
for variable in vps[vector.name].variables:
if variable not in singulars.keys():
if len(vps[vector.name].uniques[variable]) == 1:
singulars[variable] = vps[vector.name].uniques[variable]
else:
singulars[variable] = -1
elif (
variable in singulars.keys()
and vps[vector.name].uniques[variable] != singulars[variable]
):
singulars[variable] = None if singulars[variable] != -1 else -1
group_by = "".join([k for k, v in singulars.items() if v == -1])
vector_count = 1
for vector in vectors:
logger.info("Processing vector: {}".format(str(vector.absolute())))
sp = vps[vector.name]
# Define the variables used in the current vector pattern so that corresponding
# files can be located from files in the image collection with filepattern.
matching = {
k.upper(): sp.uniques[k][0] for k, v in singulars.items() if v is None
}
vector_groups = [k for k, v in singulars.items() if v not in [None, -1]]
# Vector output dictionary
vector_dict = {}
# Loop through lines in the stitching vector, generate new vectors
for v in sp():
variables = {
key.upper(): value for key, value in v[0].items() if key in group_by
}
variables.update(matching)
for files in fp(**variables):
for f in files:
# Get the file writer, create it if it doesn't exist
temp_dict = vector_dict
for key in vector_groups:
if f[key] not in temp_dict.keys():
if vector_groups[-1] != key:
temp_dict[f[key]] = {}
else:
fname = "img-global-positions-{}.txt".format(
vector_count
)
vector_count += 1
logger.info("Creating vector: {}".format(fname))
temp_dict[f[key]] = open(
str(Path(output_dir).joinpath(fname).absolute()),
"w",
)
temp_dict = temp_dict[f[key]]
# If the only grouping variables are positional (xyp), then create an output file
fw = temp_dict
fw.write(
"file: {}; corr: {}; position: ({}, {}); grid: ({}, {});\n".format(
Path(f["file"]).name,
v[0]["correlation"],
v[0]["posX"],
v[0]["posY"],
v[0]["gridX"],
v[0]["gridY"],
)
)
# Close all open stitching vectors
close_vectors(vector_dict)
logger.info("Plugin completed all operations!")
if __name__ == "__main__":
# Setup the Argument parsing
logger.info("Parsing arguments...")
parser = argparse.ArgumentParser(
prog="main", description="Extract individual fields of view from a czi file."
)
parser.add_argument(
"--stitchDir",
dest="stitch_dir",
type=str,
help="Stitching vector to recycle",
required=True,
)
parser.add_argument(
"--collectionDir",
dest="collection_dir",
type=str,
help="Image collection to place in new stitching vector",
required=True,
)
parser.add_argument(
"--filepattern",
dest="pattern",
type=str,
help="Stitching vector regular expression",
required=False,
)
parser.add_argument(
"--outDir",
dest="output_dir",
type=str,
help="The directory in which to save stitching vectors.",
required=True,
)
# Get the arguments
args = parser.parse_args()
stitch_dir = Path(args.stitch_dir)
collection_dir = Path(args.collection_dir)
if collection_dir.joinpath("images").is_dir():
# switch to images folder if present
inpDir = collection_dir.joinpath("images").absolute()
pattern = args.pattern
output_dir = Path(args.output_dir)
logger.info("stitch_dir = {}".format(stitch_dir))
logger.info("collection_dir = {}".format(collection_dir))
logger.info("filepattern = {}".format(pattern))
logger.info("output_dir = {}".format(output_dir))
main(stitch_dir, collection_dir, output_dir, pattern)
| 4,711 | 0 | 46 |
ee86298212e016955fab793497037c9a177307ce | 377 | py | Python | LeetCode/1.two-sum.py | allen-ash/OJ | 1edcfb53b81fabac9a83d01a71fd74b73c5dab07 | [
"MIT"
] | null | null | null | LeetCode/1.two-sum.py | allen-ash/OJ | 1edcfb53b81fabac9a83d01a71fd74b73c5dab07 | [
"MIT"
] | null | null | null | LeetCode/1.two-sum.py | allen-ash/OJ | 1edcfb53b81fabac9a83d01a71fd74b73c5dab07 | [
"MIT"
] | null | null | null | #
# @lc app=leetcode id=1 lang=python3
#
# [1] Two Sum
#
# @lc code=start
# @lc code=end
| 19.842105 | 64 | 0.498674 | #
# @lc app=leetcode id=1 lang=python3
#
# [1] Two Sum
#
# @lc code=start
class Solution:
def twoSum(self, nums: List[int], target: int) -> List[int]:
mem = dict()
for i, num in enumerate(nums):
if target - num in mem:
return [mem[target-num], i]
else:
mem[num] = i
return []
# @lc code=end
| 244 | -6 | 48 |
905a81519555ef280dde1a2844d6a5be9267d82a | 1,652 | py | Python | app/__init__.py | Bangys/unnamed-demo | 34484107d8831cfa0158e51572d6680d92ff236d | [
"MIT"
] | 1 | 2019-06-03T14:56:05.000Z | 2019-06-03T14:56:05.000Z | app/__init__.py | Bangys/unnamed-demo | 34484107d8831cfa0158e51572d6680d92ff236d | [
"MIT"
] | 3 | 2021-03-31T19:10:28.000Z | 2021-12-13T20:03:54.000Z | app/__init__.py | Bangys/unnamed-demo | 34484107d8831cfa0158e51572d6680d92ff236d | [
"MIT"
] | null | null | null | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
from config import config
from config import Config as CF
from utils import log
bootstrap = Bootstrap()
db = SQLAlchemy()
| 31.169811 | 83 | 0.616223 | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
from config import config
from config import Config as CF
from utils import log
bootstrap = Bootstrap()
db = SQLAlchemy()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
bootstrap.init_app(app)
db.init_app(app)
with app.app_context():
db.create_all()
from app.models.board import Board
from app.models.user import User
bs = Board.query.all()
if bs == []:
init_boards = [{'title': 'news', 'name': '新闻'},
{'title': 'games', 'name': '游戏'},
{'title': 'books', 'name': '好书'},
{'title': 'bala', 'name': '闲聊'}]
User.register(dict(username=CF.FLASKY_ADMIN, email='admin@example.com',
password=CF.FLASKY_PWD))
for b in init_boards:
db.session.add(Board(b))
try:
db.session.commit()
except Exception as e:
print('init_board err:', e)
# url_prefix 路由前缀
from app.routes.index import main as index_routes
from app.routes.post import main as post_routes
from app.routes.comment import main as comment_routes
from app.routes.board import main as board_routes
app.register_blueprint(index_routes)
app.register_blueprint(post_routes, url_prefix='/post')
app.register_blueprint(comment_routes, url_prefmderix='/comment')
app.register_blueprint(board_routes, url_prefix='/board')
return app
| 1,427 | 0 | 23 |
91cecf87858a2bfc2dd753dbe1a29fff369d9038 | 10,384 | py | Python | source/kernels.py | aaronpmishkin/gaussian_processes | 91dafe4896f45ace99cfdb0e4dbc7ff3a1086b0d | [
"Apache-2.0"
] | 7 | 2018-12-16T05:14:13.000Z | 2021-11-19T14:49:51.000Z | source/kernels.py | aaronpmishkin/gaussian_processes | 91dafe4896f45ace99cfdb0e4dbc7ff3a1086b0d | [
"Apache-2.0"
] | null | null | null | source/kernels.py | aaronpmishkin/gaussian_processes | 91dafe4896f45ace99cfdb0e4dbc7ff3a1086b0d | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author: aaronpmishkin
# @Date: 2017-07-28 21:07:21
# @Last Modified by: aaronpmishkin
# @Last Modified time: 2017-08-09 13:09:35
import numpy as np
from scipy.spatial.distance import cdist
class RBF():
""" RBF
Implementation of the radial basis function kernel. Also called the squared exponential kernel.
Arguments:
----------
dim: integer
The dimensionality of inputs to the kernel (i.e. dimension of X).
length_scale: number
The length scale of the kernel function.
var: number
The variance magnitude of the kernel function.
"""
def get_parameters(self):
""" get_parameters
Get the kernel's parameters.
"""
return np.array([self.length_scale, self.var])
def set_parameters(self, theta):
""" set_parameters
Set the kernel's parameters.
Arguments:
----------
theta: array-like, shape = [2, ]
An array containing the new parameters of the kernel.
The parameter order is [length_scale, variance]
"""
self.length_scale = theta[0]
self.var = theta[1]
def cov(self, X, Y=None, theta=None):
""" cov
Compute the covariance matrix of X and Y using the RBF kernel.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
Y (optional): array-like, shape = [m_samples, n_features]
A second array of inputs.
If Y is None, then the covariance matrix of X with itself will be computed.
theta (optional): array-like, shape = [2, ]
An array of parameter values for the kernel.
"""
# print(X, Y)
if Y is None:
Y = X
if theta is None:
theta = np.array([self.length_scale, self.var])
# Compute a matrix of squared eucledian distances between X and Y
dist = cdist(X, Y, 'sqeuclidean')
K = theta[1] * np.exp(dist / (-2 * (theta[0] ** 2)))
# print(K.shape)
return K
def cov_gradient(self, X, theta=None):
""" cov_gradient
Compute the gradient of the covariance matrix of X with respect to the parameters
of the RBF kernel.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
theta (optional): array-like, shape = [2, ]
An array of parameter values for the kernel.
"""
if theta is None:
theta = np.array([self.length_scale, self.var])
dist = cdist(X, X, 'sqeuclidean')
K = np.exp(dist / (-2 * (theta[0] ** 2)))
dK_dl = theta[1] * (theta[0] ** -3) * dist * K
dK_dvar = K
return np.array([dK_dl, dK_dvar])
class Additive():
""" RBF
Implementation of the additive kernel as described by Duvenaud et al, 2011
Arguments:
----------
dim: integer,
The dimensionality of inputs to the kernel (i.e. dimension of X).
order: number, order <= dim
The order of the additive kernel.
base_kernels: array-like, shape = [dim, ]
The set of base kernel functions, one for each dimension.
var: array-like, shape = [order, ]
An array of variance magnitudes, one for each order d: 1 <= d <= D
"""
def get_parameters(self):
""" get_parameters
Get the kernel's parameters, which include the parameters of the base kernels.
"""
theta = np.copy(self.var)
for kernel in self.base_kernels:
theta = np.append(theta, kernel.get_parameters())
return theta
def set_parameters(self, theta):
""" set_parameters
Set the kernel's parameters. This must include the parameters of the base kernels.
Arguments:
----------
theta: array-like, shape = [n_parameter, ]
An array containing the new parameters of the kernel.
The first |self.order| elements must be the interaction variance parameters.
The remaining elements must be parameters for the base kernels.
"""
self.var = theta[0:self.order]
param_index = self.order
for kernel in self.base_kernels:
kernel.set_parameters(theta[param_index:param_index + kernel.num_parameters])
param_index += kernel.num_parameters
def __cov__(self, X, Y=None, order=None, theta=None, base_kernels=None):
""" __cov__
Compute the covariance matrix of inputs X and Y. Returns both the covariance matrix
and a list of covariance matrices for each order of interaction.
This is an internal helper. To obtain the just covariance matrix of X (and Y), call "cov"
instead.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
Y (optional): array-like, shape = [m_samples, n_features]
A second array of inputs.
theta: array-like, shape = [n_parameter, ]
The kernel parameters to use when computing the covariance.
If None, the current parameters of the kernel are used.
order: integer
The interaction order that will be used.
If None, the current kernel setting will be used.
base_kernels: array-like, shape = [n_features, ]
The list of base_kernels, one for each feature.
Exactly one base_kernel must be provided per input feature.
If None, the current base_kernels of the kernel are used.
"""
if Y is None:
Y = X
if theta is None:
theta = self.theta
if order is None:
order = self.order
if base_kernels is None:
base_kernels = self.base_kernels
# Z is the array of covariance matrices produced by application of the base kernels.
Z = np.ones((len(base_kernels), X.shape[0], Y.shape[0]))
# S is the array of of k^th power sums of the matrices in Z, k = 1 ... self.order
S = np.ones((order + 1, X.shape[0], Y.shape[0]))
# K is the array of k^th order additive kernels, k = 1 ... order
K = np.zeros((order + 1, X.shape[0], Y.shape[0]))
K[0] = 1
p_index = len(base_kernels)
for i, kernel in enumerate(base_kernels):
params = theta[p_index:p_index + kernel.num_parameters]
p_index += kernel.num_parameters
Z[i] = kernel.cov(X[:, i].reshape(X.shape[0], 1),
Y[:, i].reshape(Y.shape[0], 1),
theta=params)
Z_d = np.copy(Z)
for d in range(1, order + 1):
S[d] = np.sum(Z_d, axis=0)
Z_d = Z_d * Z_d
for d in range(1, order + 1):
for j in range(1, d + 1):
K[d] += ((-1) ** (j - 1)) * K[d - j] * S[j]
K[d] = K[d] / d
for d in range(1, order + 1):
K[d] = theta[d - 1] * K[d]
return np.sum(K[1:], axis=0), K[1:]
def cov(self, X, Y=None, order=None, theta=None, base_kernels=None):
""" cov
Compute the covariance matrix of inputs X and Y using __cov__.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
Y (optional): array-like, shape = [m_samples, n_features]
A second array of inputs.
theta: array-like, shape = [n_parameter, ]
The kernel parameters to use when computing the covariance.
If None, the current parameters of the kernel are used.
order: integer
The interaction order that will be used.
If None, the current kernel setting will be used.
base_kernels: array-like, shape = [X.shape[0], ]
The base_kernels to use for each feature.
Exactly one base_kernel must be provided per input feature.
If None, the current base_kernels of the kernel are used.
"""
K, K_orders = self.__cov__(X, Y, order, theta, base_kernels)
return K
def cov_gradient(self, X, theta=None):
""" cov_gradient
Compute the gradient of the covariance matrix of X with respect to the parameters
of the additive kernel and the base kernels.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
theta: array-like, shape = [n_parameter, ]
The kernel parameters to use when computing the covariance.
If None, the current parameters of the kernel are used.
"""
if theta is None:
theta = self.theta
gradient = []
p_index = self.dim
K, K_orders = self.__cov__(X, theta=theta)
for i in range(self.order):
gradient.append(K_orders[i])
for i, ki in enumerate(self.base_kernels):
dK_dki = self.cov(np.delete(X, i, axis=1),
order=(self.order - 1),
base_kernels=np.delete(self.base_kernels, i))
dki_dtheta = ki.cov_gradient(X, theta[p_index: p_index + ki.num_parameters])
gradient.extend((dK_dki + 1) * dki_dtheta)
return np.array(gradient)
| 35.2 | 99 | 0.561152 | # -*- coding: utf-8 -*-
# @Author: aaronpmishkin
# @Date: 2017-07-28 21:07:21
# @Last Modified by: aaronpmishkin
# @Last Modified time: 2017-08-09 13:09:35
import numpy as np
from scipy.spatial.distance import cdist
class RBF():
""" RBF
Implementation of the radial basis function kernel. Also called the squared exponential kernel.
Arguments:
----------
dim: integer
The dimensionality of inputs to the kernel (i.e. dimension of X).
length_scale: number
The length scale of the kernel function.
var: number
The variance magnitude of the kernel function.
"""
def __init__(self, dim, length_scale=1, var=1):
self.dim = dim
self.length_scale = length_scale
self.var = var
self.num_parameters = 2
def get_parameters(self):
""" get_parameters
Get the kernel's parameters.
"""
return np.array([self.length_scale, self.var])
def set_parameters(self, theta):
""" set_parameters
Set the kernel's parameters.
Arguments:
----------
theta: array-like, shape = [2, ]
An array containing the new parameters of the kernel.
The parameter order is [length_scale, variance]
"""
self.length_scale = theta[0]
self.var = theta[1]
def cov(self, X, Y=None, theta=None):
""" cov
Compute the covariance matrix of X and Y using the RBF kernel.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
Y (optional): array-like, shape = [m_samples, n_features]
A second array of inputs.
If Y is None, then the covariance matrix of X with itself will be computed.
theta (optional): array-like, shape = [2, ]
An array of parameter values for the kernel.
"""
# print(X, Y)
if Y is None:
Y = X
if theta is None:
theta = np.array([self.length_scale, self.var])
# Compute a matrix of squared eucledian distances between X and Y
dist = cdist(X, Y, 'sqeuclidean')
K = theta[1] * np.exp(dist / (-2 * (theta[0] ** 2)))
# print(K.shape)
return K
def cov_gradient(self, X, theta=None):
""" cov_gradient
Compute the gradient of the covariance matrix of X with respect to the parameters
of the RBF kernel.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
theta (optional): array-like, shape = [2, ]
An array of parameter values for the kernel.
"""
if theta is None:
theta = np.array([self.length_scale, self.var])
dist = cdist(X, X, 'sqeuclidean')
K = np.exp(dist / (-2 * (theta[0] ** 2)))
dK_dl = theta[1] * (theta[0] ** -3) * dist * K
dK_dvar = K
return np.array([dK_dl, dK_dvar])
class Additive():
""" RBF
Implementation of the additive kernel as described by Duvenaud et al, 2011
Arguments:
----------
dim: integer,
The dimensionality of inputs to the kernel (i.e. dimension of X).
order: number, order <= dim
The order of the additive kernel.
base_kernels: array-like, shape = [dim, ]
The set of base kernel functions, one for each dimension.
var: array-like, shape = [order, ]
An array of variance magnitudes, one for each order d: 1 <= d <= D
"""
def __init__(self, dim, order, base_kernels, var=None):
if order > dim:
raise ValueError('Kernel order cannot be larger than input dimension')
if dim != len(base_kernels):
raise ValueError('A base kernel must be provided for each input dimension')
if var is None:
var = np.ones(dim)
self.dim = dim
self.order = order
self.base_kernels = base_kernels
self.var = var
self.theta = self.get_parameters()
self.num_parameters = len(self.theta)
def get_parameters(self):
""" get_parameters
Get the kernel's parameters, which include the parameters of the base kernels.
"""
theta = np.copy(self.var)
for kernel in self.base_kernels:
theta = np.append(theta, kernel.get_parameters())
return theta
def set_parameters(self, theta):
""" set_parameters
Set the kernel's parameters. This must include the parameters of the base kernels.
Arguments:
----------
theta: array-like, shape = [n_parameter, ]
An array containing the new parameters of the kernel.
The first |self.order| elements must be the interaction variance parameters.
The remaining elements must be parameters for the base kernels.
"""
self.var = theta[0:self.order]
param_index = self.order
for kernel in self.base_kernels:
kernel.set_parameters(theta[param_index:param_index + kernel.num_parameters])
param_index += kernel.num_parameters
def __cov__(self, X, Y=None, order=None, theta=None, base_kernels=None):
""" __cov__
Compute the covariance matrix of inputs X and Y. Returns both the covariance matrix
and a list of covariance matrices for each order of interaction.
This is an internal helper. To obtain the just covariance matrix of X (and Y), call "cov"
instead.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
Y (optional): array-like, shape = [m_samples, n_features]
A second array of inputs.
theta: array-like, shape = [n_parameter, ]
The kernel parameters to use when computing the covariance.
If None, the current parameters of the kernel are used.
order: integer
The interaction order that will be used.
If None, the current kernel setting will be used.
base_kernels: array-like, shape = [n_features, ]
The list of base_kernels, one for each feature.
Exactly one base_kernel must be provided per input feature.
If None, the current base_kernels of the kernel are used.
"""
if Y is None:
Y = X
if theta is None:
theta = self.theta
if order is None:
order = self.order
if base_kernels is None:
base_kernels = self.base_kernels
# Z is the array of covariance matrices produced by application of the base kernels.
Z = np.ones((len(base_kernels), X.shape[0], Y.shape[0]))
# S is the array of of k^th power sums of the matrices in Z, k = 1 ... self.order
S = np.ones((order + 1, X.shape[0], Y.shape[0]))
# K is the array of k^th order additive kernels, k = 1 ... order
K = np.zeros((order + 1, X.shape[0], Y.shape[0]))
K[0] = 1
p_index = len(base_kernels)
for i, kernel in enumerate(base_kernels):
params = theta[p_index:p_index + kernel.num_parameters]
p_index += kernel.num_parameters
Z[i] = kernel.cov(X[:, i].reshape(X.shape[0], 1),
Y[:, i].reshape(Y.shape[0], 1),
theta=params)
Z_d = np.copy(Z)
for d in range(1, order + 1):
S[d] = np.sum(Z_d, axis=0)
Z_d = Z_d * Z_d
for d in range(1, order + 1):
for j in range(1, d + 1):
K[d] += ((-1) ** (j - 1)) * K[d - j] * S[j]
K[d] = K[d] / d
for d in range(1, order + 1):
K[d] = theta[d - 1] * K[d]
return np.sum(K[1:], axis=0), K[1:]
def cov(self, X, Y=None, order=None, theta=None, base_kernels=None):
""" cov
Compute the covariance matrix of inputs X and Y using __cov__.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
Y (optional): array-like, shape = [m_samples, n_features]
A second array of inputs.
theta: array-like, shape = [n_parameter, ]
The kernel parameters to use when computing the covariance.
If None, the current parameters of the kernel are used.
order: integer
The interaction order that will be used.
If None, the current kernel setting will be used.
base_kernels: array-like, shape = [X.shape[0], ]
The base_kernels to use for each feature.
Exactly one base_kernel must be provided per input feature.
If None, the current base_kernels of the kernel are used.
"""
K, K_orders = self.__cov__(X, Y, order, theta, base_kernels)
return K
def cov_gradient(self, X, theta=None):
""" cov_gradient
Compute the gradient of the covariance matrix of X with respect to the parameters
of the additive kernel and the base kernels.
Arguments:
----------
X: array-like, shape = [n_samples, n_features]
An array of inputs.
theta: array-like, shape = [n_parameter, ]
The kernel parameters to use when computing the covariance.
If None, the current parameters of the kernel are used.
"""
if theta is None:
theta = self.theta
gradient = []
p_index = self.dim
K, K_orders = self.__cov__(X, theta=theta)
for i in range(self.order):
gradient.append(K_orders[i])
for i, ki in enumerate(self.base_kernels):
dK_dki = self.cov(np.delete(X, i, axis=1),
order=(self.order - 1),
base_kernels=np.delete(self.base_kernels, i))
dki_dtheta = ki.cov_gradient(X, theta[p_index: p_index + ki.num_parameters])
gradient.extend((dK_dki + 1) * dki_dtheta)
return np.array(gradient)
| 672 | 0 | 53 |
b5717efa0f43455211ed07cd99d055ff0b823b2d | 2,767 | py | Python | resources/waitlist.py | WareDave/flaskRepo | 92e7cee6581508f878427c5cfba20a5d839e9ea7 | [
"MIT"
] | null | null | null | resources/waitlist.py | WareDave/flaskRepo | 92e7cee6581508f878427c5cfba20a5d839e9ea7 | [
"MIT"
] | null | null | null | resources/waitlist.py | WareDave/flaskRepo | 92e7cee6581508f878427c5cfba20a5d839e9ea7 | [
"MIT"
] | null | null | null | from flask import Blueprint, jsonify, request
from playhouse.shortcuts import model_to_dict
from flask_login import current_user, login_required
import models
waitlists = Blueprint('waitlists', 'waitlists')
print()
# Index route
@waitlist.route('/', methods=["GET"])
# Create route
@waitlist.route('/', methods=["POST"])
@login_required
# Show route
@waitlist.route('/<id>', methods=["GET"])
# Update route
@waitlist.route('/<id>', methods=["PUT"])
# Delete route
@waitlist.route('/<id>', methods=["DELETE"])
| 38.971831 | 142 | 0.676545 | from flask import Blueprint, jsonify, request
from playhouse.shortcuts import model_to_dict
from flask_login import current_user, login_required
import models
waitlists = Blueprint('waitlists', 'waitlists')
print()
# Index route
@waitlist.route('/', methods=["GET"])
def get_all_waitlists():
try:
waitlists = [model_to_dict(waitlist) for waitlist in models.WaitList.select().where(models.WaitList.loggedUser_id == current_user.id)]
print(waitlists)
for waitlist in waitlists:
waitlist['loggedUser'].pop('password')
return jsonify(data=waitlists, status={"code": 200, "message": "Success"})
except models.DoesNotExist:
return jsonify(data={}, status={"code": 400, "message": "Error getting the resources"})
# Create route
@waitlist.route('/', methods=["POST"])
@login_required
def create_waitlists():
try:
payload = request.get_json()
payload['loggedUser'] = current_user.id
waitlist = models.WaitList.create(**payload)
print(waitlist.__dict__)
waitlist_dict = model_to_dict(waitlist)
return jsonify(data = waitlist_dict, status = {"code": 201, "message": "Success"})
except models.DoesNotExist:
return jsonify(data={}, status={"code": 400, "message": "Error creating the resources"})
# Show route
@waitlist.route('/<id>', methods=["GET"])
def get_one_waitlists(id):
try:
waitlist = models.WaitList.get_by_id(id)
print(waitlist)
waitlist_dict = model_to_dict(waitlist)
return jsonify(data = waitlist_dict, status={"code": 200, "message": f"Found waitlist with id {waitlist.id}"})
except models.DoesNotExist:
return jsonify(data={}, status={"code": 400, "message": "Error getting one resource"})
# Update route
@waitlist.route('/<id>', methods=["PUT"])
def update_waitlists(id):
try:
payload = request.get_json()
query = models.WaitList.update(**payload).where(models.WaitList.id == id)
query.execute()
updated_waitlist = model_to_dict(models.WaitList.get_by_id(id))
return jsonify(data=updated_waitlist, status={"code": 200, "message": f"Resourced updated successfully"})
except models.DoesNotExist:
return jsonify(data={}, status={"code": 400, "message": "Error updating one resource"})
# Delete route
@waitlist.route('/<id>', methods=["DELETE"])
def delete_waitlist(id):
try:
query = models.WaitList.delete().where(models.WaitList.id == id)
query.execute()
return jsonify(data='Resource successfully deleted', status={"code": 200, "message": "Resource successfully deleted"})
except models.DoesNotExist:
return jsonify(data={}, status={"code": 400, "message": "Error deleting resource"})
| 2,139 | 0 | 110 |
36b34a28d70a8996132af4708395d2bc3181b745 | 239 | py | Python | HackerRank/pairs_difference.py | SimeonRaykov/Algorithms | 6f580bb69ee5badb8550e6424bfd4a8eaca775a2 | [
"MIT"
] | null | null | null | HackerRank/pairs_difference.py | SimeonRaykov/Algorithms | 6f580bb69ee5badb8550e6424bfd4a8eaca775a2 | [
"MIT"
] | null | null | null | HackerRank/pairs_difference.py | SimeonRaykov/Algorithms | 6f580bb69ee5badb8550e6424bfd4a8eaca775a2 | [
"MIT"
] | null | null | null | import itertools
print(pairs_difference(2,[1, 5, 3, 4, 2]))
| 18.384615 | 48 | 0.619247 | import itertools
def pairs_difference(diff, arr):
counter = 0
for pair in itertools.product(arr, repeat=2):
if pair[0] - pair[1] == diff:
counter+=1
return counter
print(pairs_difference(2,[1, 5, 3, 4, 2]))
| 146 | 0 | 25 |
739d5df8a213533b15ecfa771bf22e96091b2ad6 | 1,735 | py | Python | ocr/MyQLabel.py | shtiyu/pyocr | 83ffd13b3e95ba174d78f84ea414322832f83a7b | [
"MIT"
] | null | null | null | ocr/MyQLabel.py | shtiyu/pyocr | 83ffd13b3e95ba174d78f84ea414322832f83a7b | [
"MIT"
] | null | null | null | ocr/MyQLabel.py | shtiyu/pyocr | 83ffd13b3e95ba174d78f84ea414322832f83a7b | [
"MIT"
] | null | null | null | from PyQt5.QtWidgets import QLabel
from PyQt5.QtCore import Qt, pyqtSignal, QTimer
from PyQt5.QtGui import QPainter, QColor, QPen | 26.287879 | 75 | 0.549856 | from PyQt5.QtWidgets import QLabel
from PyQt5.QtCore import Qt, pyqtSignal, QTimer
from PyQt5.QtGui import QPainter, QColor, QPen
class MyQlabel(QLabel):
oksignal = pyqtSignal()
def __init__(self, parent=None):
super(MyQlabel, self).__init__(parent=parent)
self.start = (0, 0)
self.end = (0, 0)
self.move = False # 本次点击鼠标是否有选择区域
self.final_paint = False # 是否结束截图
def paintEvent(self, event):
'''
给出截图的辅助线
:param event:
:return:
'''
super().paintEvent(event)
x = self.start[0]
y = self.start[1]
w = self.end[0] - x
h = self.end[1] - y
qp = QPainter(self)
pen = QPen(Qt.transparent)
qp.setPen(pen)
if self.final_paint is False:
qp.setBrush(QColor(255, 255, 255, 160))
qp.drawRect(x, y, w, h)
def mousePressEvent(self, event):
# 点击左键开始选取截图区域
if event.button() == Qt.LeftButton:
self.start = (event.pos().x(), event.pos().y())
self.move = False
self.final_paint = False
def mouseReleaseEvent(self, event):
# 鼠标左键释放开始截图操作
if event.button() == Qt.LeftButton:
self.end = (event.pos().x(), event.pos().y())
self.final_paint = True
self.update()
if self.move:
QTimer.singleShot(500, self.send_emit) # 延时500毫秒执行,防止抓取到遮罩层
def send_emit(self):
self.oksignal.emit()
def mouseMoveEvent(self, event):
# 鼠标左键按下的同时移动鼠标绘制截图辅助线
if event.buttons() and Qt.LeftButton:
self.end = (event.pos().x(), event.pos().y())
self.move = True
self.update() | 1,103 | 652 | 23 |
70b1a3efae17962ce406fcba2c7ecbf4c8b0bf0f | 37 | py | Python | Autokey/CapsKeybinds/alt/pageup.py | MisaghM/Capslock-Keybindings | 00332c7d39cf776c43fe13aa08e1c2969747425d | [
"MIT"
] | 1 | 2021-11-05T19:39:36.000Z | 2021-11-05T19:39:36.000Z | Autokey/CapsKeybinds/alt/pageup.py | MisaghM/Capslock-Keybindings | 00332c7d39cf776c43fe13aa08e1c2969747425d | [
"MIT"
] | null | null | null | Autokey/CapsKeybinds/alt/pageup.py | MisaghM/Capslock-Keybindings | 00332c7d39cf776c43fe13aa08e1c2969747425d | [
"MIT"
] | null | null | null | keyboard.send_keys("<alt>+<page_up>") | 37 | 37 | 0.72973 | keyboard.send_keys("<alt>+<page_up>") | 0 | 0 | 0 |
73dc22b154058b3ebae0cfacf372a6ddb7b5daf3 | 3,906 | py | Python | example/mqtt/listener.py | dkmalav/tavern | e8ecab9bc876523997d61c57d572c330c08124a2 | [
"MIT"
] | null | null | null | example/mqtt/listener.py | dkmalav/tavern | e8ecab9bc876523997d61c57d572c330c08124a2 | [
"MIT"
] | 1 | 2019-04-01T14:12:57.000Z | 2019-04-01T14:12:57.000Z | example/mqtt/listener.py | dkmalav/tavern | e8ecab9bc876523997d61c57d572c330c08124a2 | [
"MIT"
] | 1 | 2019-01-31T16:39:51.000Z | 2019-01-31T16:39:51.000Z | import json
import logging
import logging.config
import os
import sqlite3
import yaml
import paho.mqtt.client as paho
DATABASE = os.environ.get("DB_NAME")
if __name__ == "__main__":
db = get_db()
with db:
try:
db.execute("CREATE TABLE devices_table (device_id TEXT NOT NULL, lights_on INTEGER NOT NULL)")
except:
pass
try:
db.execute("INSERT INTO devices_table VALUES ('123', 0)")
except:
pass
wait_for_messages()
| 23.817073 | 106 | 0.621352 | import json
import logging
import logging.config
import os
import sqlite3
import yaml
import paho.mqtt.client as paho
DATABASE = os.environ.get("DB_NAME")
def get_client():
mqtt_client = paho.Client(transport="websockets", client_id="listener")
mqtt_client.enable_logger()
mqtt_client.connect_async(host="broker", port=9001)
return mqtt_client
def get_db():
return sqlite3.connect(DATABASE)
def setup_logging():
log_cfg = """
version: 1
disable_existing_loggers: true
formatters:
fluent_fmt:
(): fluent.handler.FluentRecordFormatter
format:
level: '%(levelname)s'
where: '%(filename)s.%(lineno)d'
handlers:
fluent:
class: fluent.handler.FluentHandler
formatter: fluent_fmt
tag: listener
port: 24224
host: fluent
loggers:
paho:
handlers:
- fluent
level: DEBUG
propagate: true
'':
handlers:
- fluent
level: DEBUG
propagate: true
"""
as_dict = yaml.load(log_cfg)
logging.config.dictConfig(as_dict)
logging.info("Logging set up")
def handle_lights_topic(message):
db = get_db()
device_id = message.topic.split("/")[-2]
if message.payload.decode("utf8") == "on":
logging.info("Lights have been turned on")
with db:
db.execute("UPDATE devices_table SET lights_on = 1 WHERE device_id IS (?)", (device_id,))
elif message.payload.decode("utf8") == "off":
logging.info("Lights have been turned off")
with db:
db.execute("UPDATE devices_table SET lights_on = 0 WHERE device_id IS (?)", (device_id,))
def handle_request_topic(client, message):
db = get_db()
device_id = message.topic.split("/")[-2]
logging.info("Checking lights status")
with db:
row = db.execute("SELECT lights_on FROM devices_table WHERE device_id IS (?)", (device_id,))
try:
status = int(next(row)[0])
except Exception:
logging.exception("Error getting status for device '%s'", device_id)
else:
client.publish(
"/device/{}/status/response".format(device_id),
json.dumps({"lights": status})
)
def handle_ping_topic(client, message):
device_id = message.topic.split("/")[-2]
client.publish(
"/device/{}/pong".format(device_id),
)
def handle_echo_topic(client, message):
device_id = message.topic.split("/")[-2]
client.publish(
"/device/{}/echo/response".format(device_id),
message.payload,
)
def on_message_callback(client, userdata, message):
logging.info("Received message on %s", message.topic)
if "lights" in message.topic:
handle_lights_topic(message)
elif "echo" in message.topic:
handle_echo_topic(client, message)
elif "ping" in message.topic:
handle_ping_topic(client, message)
elif "status" in message.topic:
handle_request_topic(client, message)
else:
logging.warning("Got unexpected MQTT topic '%s'", message.topic)
def wait_for_messages():
setup_logging()
mqtt_client = get_client()
mqtt_client.on_message = on_message_callback
mqtt_client.reconnect()
topics = [
"lights",
"ping",
"echo",
"status",
]
for t in topics:
device_topic = "/device/{}/{}".format(123, t)
logging.debug("Subscribing to '%s'", device_topic)
mqtt_client.subscribe(device_topic)
mqtt_client.loop_forever()
if __name__ == "__main__":
db = get_db()
with db:
try:
db.execute("CREATE TABLE devices_table (device_id TEXT NOT NULL, lights_on INTEGER NOT NULL)")
except:
pass
try:
db.execute("INSERT INTO devices_table VALUES ('123', 0)")
except:
pass
wait_for_messages()
| 3,177 | 0 | 207 |
daf6b4f7b30ce0839dfcc31c06acf5c851fbc4d0 | 1,378 | py | Python | csc work/misc/w8/p/palindrome.py | mdnu/snake | 0df9d46ab226e1b9e5da0fa697862dd81f635029 | [
"MIT"
] | 1 | 2016-10-05T21:57:20.000Z | 2016-10-05T21:57:20.000Z | csc work/misc/w8/p/palindrome.py | mdnu/snake | 0df9d46ab226e1b9e5da0fa697862dd81f635029 | [
"MIT"
] | null | null | null | csc work/misc/w8/p/palindrome.py | mdnu/snake | 0df9d46ab226e1b9e5da0fa697862dd81f635029 | [
"MIT"
] | 1 | 2020-03-19T01:43:02.000Z | 2020-03-19T01:43:02.000Z | def reverse(s):
''' (str) -> str
Return a reversed version of s.
>>> reverse('hello')
'olleh'
>>> reverse('a')
'a'
'''
new = ''
for i in range(len(s)):
new += s[len(s) - 1 - i]
return new
def is_palindrome(s):
''' (str) -> bool
Return True iff s is a palindrome.
>>> is_palindrome('noon')
True
>>> is_palindrome('racecar')
True
>>> is_palindrome('dented')
False
'''
return reverse(s) == s
def is_palindrome2(s):
''' (str) -> bool
Return True iff s is a palindrome.
>>> is_palindrome2('noon')
True
>>> is_palindrome2('racecar')
True
>>> is_palindrome2('dented')
False
'''
half_s = ''
if (len(s) % 2) == 0:
half_s += s[len(s) // 2 :]
else:
half_s += s[(len(s) // 2) + 1:]
return reverse(half_s) == s[:len(s) // 2]
# compare the first half of s to the reverse of the second half
# omit the middle character of an odd-length string
#n = len(s)
#return s[:n // 2] == reverse(s[n - n // 2:]) | 19.971014 | 67 | 0.469521 | def reverse(s):
''' (str) -> str
Return a reversed version of s.
>>> reverse('hello')
'olleh'
>>> reverse('a')
'a'
'''
new = ''
for i in range(len(s)):
new += s[len(s) - 1 - i]
return new
def is_palindrome(s):
''' (str) -> bool
Return True iff s is a palindrome.
>>> is_palindrome('noon')
True
>>> is_palindrome('racecar')
True
>>> is_palindrome('dented')
False
'''
return reverse(s) == s
def is_palindrome2(s):
''' (str) -> bool
Return True iff s is a palindrome.
>>> is_palindrome2('noon')
True
>>> is_palindrome2('racecar')
True
>>> is_palindrome2('dented')
False
'''
half_s = ''
if (len(s) % 2) == 0:
half_s += s[len(s) // 2 :]
else:
half_s += s[(len(s) // 2) + 1:]
return reverse(half_s) == s[:len(s) // 2]
# compare the first half of s to the reverse of the second half
# omit the middle character of an odd-length string
#n = len(s)
#return s[:n // 2] == reverse(s[n - n // 2:])
def is_palindrome3(s):
for i in range(len(s) // 2):
if s[i] != s[len(s) - 1 - i]:
return False
return True
#i = 0
#j = len(s) - 1
#while i < j and s[i] == s[j]:
# i += 1
# j -= 1
#return j <= i | 238 | 0 | 27 |
d5c2ff21e29cae641742586603bef5a262bc823c | 30,671 | py | Python | Assets/StreamingAssets/.q/Lib/site-packages/docplex/mp/tck.py | hennlo/Q-shall-not-pass | 8013ce891462683eb9cfedc4ac12a1e602fc1ba8 | [
"Apache-2.0"
] | null | null | null | Assets/StreamingAssets/.q/Lib/site-packages/docplex/mp/tck.py | hennlo/Q-shall-not-pass | 8013ce891462683eb9cfedc4ac12a1e602fc1ba8 | [
"Apache-2.0"
] | null | null | null | Assets/StreamingAssets/.q/Lib/site-packages/docplex/mp/tck.py | hennlo/Q-shall-not-pass | 8013ce891462683eb9cfedc4ac12a1e602fc1ba8 | [
"Apache-2.0"
] | null | null | null | # --------------------------------------------------------------------------
# Source file provided under Apache License, Version 2.0, January 2004,
# http://www.apache.org/licenses/
# (c) Copyright IBM Corp. 2015, 2016
# --------------------------------------------------------------------------
# gendoc: ignore
import math
from docplex.mp.compat23 import izip
from docplex.mp.constr import AbstractConstraint, LinearConstraint,\
LogicalConstraint, EquivalenceConstraint, IndicatorConstraint, QuadraticConstraint
from docplex.mp.error_handler import docplex_fatal
from docplex.mp.operand import LinearOperand
from docplex.mp.dvar import Var
from docplex.mp.pwl import PwlFunction
from docplex.mp.progress import ProgressListener
from docplex.mp.utils import is_int, is_number, is_iterable, is_string, generate_constant, \
is_ordered_sequence, is_iterator, resolve_caller_as_string
from docplex.mp.vartype import VarType
import six
_vartype_code_map = {sc().cplex_typecode: sc().short_name for sc in VarType.__subclasses__()}
# noinspection PyAbstractClass
# ------------------------------
# noinspection PyPep8
_tck_map = {'default': StandardTypeChecker,
'standard': StandardTypeChecker,
'std': StandardTypeChecker,
'on': StandardTypeChecker,
# --
'numeric': NumericTypeChecker,
'full': FullTypeChecker,
# --
'off': DummyTypeChecker,
'deploy': DummyTypeChecker,
'no_checks': DummyTypeChecker}
| 39.071338 | 124 | 0.636236 | # --------------------------------------------------------------------------
# Source file provided under Apache License, Version 2.0, January 2004,
# http://www.apache.org/licenses/
# (c) Copyright IBM Corp. 2015, 2016
# --------------------------------------------------------------------------
# gendoc: ignore
import math
from docplex.mp.compat23 import izip
from docplex.mp.constr import AbstractConstraint, LinearConstraint,\
LogicalConstraint, EquivalenceConstraint, IndicatorConstraint, QuadraticConstraint
from docplex.mp.error_handler import docplex_fatal
from docplex.mp.operand import LinearOperand
from docplex.mp.dvar import Var
from docplex.mp.pwl import PwlFunction
from docplex.mp.progress import ProgressListener
from docplex.mp.utils import is_int, is_number, is_iterable, is_string, generate_constant, \
is_ordered_sequence, is_iterator, resolve_caller_as_string
from docplex.mp.vartype import VarType
import six
_vartype_code_map = {sc().cplex_typecode: sc().short_name for sc in VarType.__subclasses__()}
def vartype_code_to_string(vartype_code):
return _vartype_code_map.get(vartype_code, '????')
class DocplexNumericCheckerMixin(object):
@staticmethod
def static_validate_num1(e, checked_num=False, infinity=1e+20):
# checks for number and truncates to 1e=20
if not checked_num and not is_number(e):
docplex_fatal("Expecting number, got: {0!r}".format(e))
elif -infinity <= e <= infinity:
return e
elif e >= infinity:
return infinity
else:
return -infinity
@staticmethod
def static_validate_num2(e, infinity=1e+20, context_msg=None):
# checks for number, nans, nath.inf, and truncates to 1e+20
if not is_number(e):
docplex_fatal("Not a number: {}".format(e))
elif math.isnan(e):
msg = "NaN value found in expression"
if context_msg is not None:
try:
msg = "{0}: {1}".format(context_msg(), msg)
except TypeError:
msg = "{0}: {1}".format(context_msg, msg)
docplex_fatal(msg)
elif math.isinf(e):
msg = "Infinite value detected"
if context_msg is not None:
try:
msg = "{0}: {1}".format(context_msg(), msg)
except TypeError:
msg = "{0}: {1}".format(context_msg, msg)
docplex_fatal(msg)
elif -infinity <= e <= infinity:
return e
elif e >= infinity:
return infinity
else:
return -infinity
@classmethod
def typecheck_num_seq(cls, logger, seq, check_math, caller=None):
# build a list to avoid consuming an iterator
checked_num_list = list(seq)
for i, x in enumerate(checked_num_list):
def loop_caller():
return "%s, pos %d" % (caller, i) if caller else ""
cls.typecheck_num(logger, x, check_math, loop_caller)
return checked_num_list
@classmethod
def typecheck_num(cls, logger, arg, check_math, caller=None):
if not is_number(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}Expecting number, got: {1!r}", (caller_string, arg))
elif check_math:
if math.isnan(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}NaN value detected", (caller_string,))
elif math.isinf(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}Infinite value detected", (caller_string,))
@classmethod
def typecheck_int(cls, logger, arg, check_math, accept_negative=True, caller=None):
if not is_number(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}Expecting number, got: {1!r}", (caller_string, arg))
if check_math:
if math.isnan(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}NaN value detected", (caller_string,))
elif math.isinf(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}Infinite value detected", (caller_string,))
if not is_int(arg):
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}Expecting integer, got: {1!r}", (caller_string, arg))
elif not accept_negative and arg < 0:
caller_string = resolve_caller_as_string(caller)
logger.fatal("{0}Expecting positive integer, got: {1!r}", (caller_string, arg))
class DocplexTypeCheckerI(object):
def check_new_variable_bound(self):
raise NotImplementedError # pragma: no cover
def typecheck_iterable(self, arg):
raise NotImplementedError # pragma: no cover
def typecheck_valid_index(self, arg):
raise NotImplementedError # pragma: no cover
def typecheck_vartype(self, arg):
raise NotImplementedError # pragma: no cover
def typecheck_var(self, obj, vartype=None):
raise NotImplementedError # pragma: no cover
def typecheck_binary_var(self, obj):
return self.typecheck_var(obj, vartype='B')
def typecheck_continuous_var(self, obj):
return self.typecheck_var(obj, vartype='C')
def typecheck_var_seq(self, seq, vtype=None, caller=None):
return seq # pragma: no cover
def typecheck_logical_op_seq(self, seq):
return seq # pragma: no cover
def typecheck_logical_op(self, arg, caller):
raise NotImplementedError # pragma: no cover
def typecheck_var_seq_all_different(self, seq):
raise NotImplementedError # pragma: no cover
def typecheck_num_seq(self, seq, caller=None):
raise NotImplementedError # pragma: no cover
def typecheck_operand(self, obj, accept_numbers=True, caller=None):
raise NotImplementedError # pragma: no cover
def typecheck_constraint(self, obj):
raise NotImplementedError # pragma: no cover
def typecheck_ct_to_add(self, ct, mdl, caller):
raise NotImplementedError # pragma: no cover
def typecheck_ct_not_added(self, ct, do_raise=False, caller=None):
raise NotImplementedError # pragma: no cover
def typecheck_cts_added_to_model(self, mdl, cts, caller=None):
return cts # pragma: no cover
def typecheck_linear_constraint(self, obj, accept_ranges=True):
raise NotImplementedError # pragma: no cover
def typecheck_constraint_seq(self, cts, check_linear=False, accept_range=True):
# must return sequence unchanged
return cts # pragma: no cover
def typecheck_logical_constraint_seq(self, cts, true_if_equivalence):
# must return sequence unchanged
return cts # pragma: no cover
def typecheck_quadratic_constraint_seq(self, cts):
# must return sequence unchanged
return cts # pragma: no cover
def typecheck_linear_constraint_name_tuple_seq(self, ct_ctname_seq, accept_range=True):
# must return sequence unchanged
return ct_ctname_seq # pragma: no cover
def typecheck_zero_or_one(self, arg):
raise NotImplementedError # pragma: no cover
def typecheck_num(self, arg, caller=None):
raise NotImplementedError # pragma: no cover
def typecheck_int(self, arg, accept_negative=False, caller=None):
raise NotImplementedError # pragma: no cover
def check_vars_domain(self, lbs, ubs, names):
raise NotImplementedError # pragma: no cover
def check_var_domain(self, lbs, ubs, names):
raise NotImplementedError # pragma: no cover
def typecheck_string(self, arg, accept_empty=False, accept_none=False, caller=''):
raise NotImplementedError # pragma: no cover
def typecheck_string_seq(self, arg, accept_empty=False, accept_none=False, caller=''):
raise NotImplementedError # pragma: no cover
def typecheck_in_model(self, model, mobj, caller=''):
raise NotImplementedError # pragma: no cover
def typecheck_key_seq(self, keys, accept_empty_seq=False):
raise NotImplementedError # pragma: no cover
def get_number_validation_fn(self):
raise NotImplementedError # pragma: no cover
def typecheck_progress_listener(self, arg):
raise NotImplementedError # pragma: no cover
def typecheck_two_in_model(self, model, obj1, obj2, ctx_msg):
raise NotImplementedError # pragma: no cover
def check_ordered_sequence(self, arg, caller, accept_iterator=True):
raise NotImplementedError # pragma: no cover
def check_trivial_constraints(self):
raise NotImplementedError # pragma: no cover
def check_solution_hook(self, mdl, sol_hook_fn):
raise NotImplementedError
def typecheck_pwl_function(self, pwl):
raise NotImplementedError
def check_duplicate_name(self, name, name_table, qualifier):
raise NotImplementedError
def check_for_duplicate_keys(self, keys, caller=None):
# default is no-op
pass
# noinspection PyAbstractClass
class DOcplexLoggerTypeChecker(DocplexTypeCheckerI):
def __init__(self, logger):
self._logger = logger
def fatal(self, msg, *args):
self._logger.fatal(msg, args)
def error(self, msg, *args): # pragma: no cover
self._logger.error(msg, args)
def warning(self, msg, *args): # pragma: no cover
self._logger.warning(msg, args)
class StandardTypeChecker(DOcplexLoggerTypeChecker):
def __init__(self, logger):
DOcplexLoggerTypeChecker.__init__(self, logger)
@property
def name(self):
return "std"
def check_new_variable_bound(self):
return True
def typecheck_iterable(self, arg):
# INTERNAL: checks for an iterable
if not is_iterable(arg):
self.fatal("Expecting iterable, got: {0!s}", arg)
# safe checks.
def typecheck_valid_index(self, arg):
if arg < 0:
self.fatal("Invalid index: {0!s}", arg)
def typecheck_vartype(self, arg):
# INTERNAL: check for a valid vartype
if not isinstance(arg, VarType):
self.fatal("Not a variable type: {0!s}, type: {1!s}", arg, type(arg))
return True
def typecheck_var(self, obj, vartype=None):
# INTERNAL: check for Var instance
if not isinstance(obj, Var):
self.fatal("Expecting decision variable, got: {0!s} type: {1!s}", obj, type(obj))
if vartype and obj.cplex_typecode != vartype:
self.fatal("Expecting {0} variable, got: {1!s} type: {2!s}",
vartype_code_to_string(vartype), obj, obj.vartype)
def typecheck_var_seq(self, seq, vtype=None, caller=None):
# build a list to avoid consuming an iterator
checked_var_list = list(seq)
for i, x in enumerate(checked_var_list):
if not isinstance(x, Var):
caller_s = resolve_caller_as_string(caller)
self.fatal("{2}Expecting an iterable returning variables, {0!r} was passed at position {1}", x, i, caller_s)
if vtype and x.cplex_typecode != vtype:
caller_s = resolve_caller_as_string(caller)
self.fatal("{3}Expecting an iterable returning variables of type {0}, {1!r} was passed at position {2}",
vtype.short_name, x, i, caller_s)
return checked_var_list
def typecheck_logical_op_seq(self, seq, caller=None):
checked_args = list(seq)
for i, x in enumerate(checked_args):
if caller is None:
loop_caller = None
else:
def loop_caller():
return '%s, arg#%d' % (resolve_caller_as_string(caller, sep=''), i)
self.typecheck_logical_op(x, caller=loop_caller)
return checked_args
def typecheck_logical_op(self, arg, caller):
if not hasattr(arg, 'as_logical_operand') or arg.as_logical_operand() is None:
caller_s = resolve_caller_as_string(caller)
self.fatal('{1}Not a logical operand: {0!r}. Expecting binary variable, logical expression', arg, caller_s)
def typecheck_num_seq(self, seq, caller=None):
return DocplexNumericCheckerMixin.typecheck_num_seq(self._logger, seq, check_math=False, caller=caller)
def typecheck_var_seq_all_different(self, seq):
# return the checked sequence, so take the list
seq_as_list = list(seq)
for v in seq_as_list:
self.typecheck_var(v)
# check for all differemt and output a justifier variable apperaing twice.
inc_set = set([])
for v in seq_as_list:
if v.index in inc_set:
self.fatal('Variable: {0} appears twice in sequence', v)
else:
inc_set.add(v.index)
return seq_as_list
def typecheck_constraint(self, obj):
if not isinstance(obj, AbstractConstraint):
self.fatal("Expecting constraint, got: {0!s} with type: {1!s}", obj, type(obj))
def typecheck_ct_to_add(self, ct, mdl, caller):
if not isinstance(ct, AbstractConstraint):
self.fatal("Expecting constraint, got: {0!r} with type: {1!s}", ct, type(ct))
self.typecheck_in_model(mdl, ct, caller)
def typecheck_ct_not_added(self, ct, do_raise=False, caller=None):
if ct.is_added():
s_caller = resolve_caller_as_string(caller, sep=' ')
if do_raise:
self.fatal('{0}expects a non-added constraint, {1} is added (index={2})',
s_caller, ct, ct.index
)
else:
self.warning('{0}expects a non-added constraint, {1} is added (index={2})',
s_caller, ct, ct.index
)
def typecheck_cts_added_to_model(self, mdl, cts, caller=None):
lcts = list(cts)
for ct in lcts:
if not ct.is_added():
s_caller = resolve_caller_as_string(caller, sep=' ')
mdl.fatal("{0}Constraint: {1!s} has not been added to any model".format(s_caller, ct))
elif mdl is not ct.model:
s_caller = resolve_caller_as_string(caller, sep=' ')
mdl.fatal("{0}Constraint: {1!s} belongs to a different model".format(s_caller, ct))
return lcts
def typecheck_ct_added_to_model(self, mdl, ct, caller=None):
if not ct.is_added():
s_caller = resolve_caller_as_string(caller, sep=' ')
mdl.fatal("{0}Constraint: {1!s} has not been added to any model".format(s_caller, ct))
elif mdl is not ct.model:
s_caller = resolve_caller_as_string(caller, sep=' ')
mdl.fatal("{0}Constraint: {1!s} belongs to a different model".format(s_caller, ct))
def typecheck_linear_constraint(self, obj, accept_range=True):
if accept_range:
if not isinstance(obj, AbstractConstraint):
self.fatal("Expecting linear constraint, got: {0!r}", obj)
if not obj.is_linear():
self.fatal("Expecting linear constraint, got: {0!s} with type: {1!s}", obj, type(obj))
else:
if not isinstance(obj, LinearConstraint):
self.fatal("Expecting linear constraint, got: {0!s} with type: {1!s}", obj, type(obj))
def typecheck_constraint_seq(self, cts, check_linear=False, accept_range=True):
checked_cts_list = list(cts)
for i, ct in enumerate(checked_cts_list):
if not isinstance(ct, AbstractConstraint):
self.fatal("Expecting sequence of constraints, got: {0!r} at position {1}", ct, i)
if check_linear:
if not ct.is_linear():
self.fatal("Expecting sequence of linear constraints, got: {0!r} at position {1}", ct, i)
elif not accept_range and not isinstance(ct, LinearConstraint):
self.fatal("Expecting sequence of linear constraints (not ranges), got: {0!r} at position {1}", ct,
i)
return checked_cts_list
def typecheck_logical_constraint_seq(self, cts, true_if_equivalence):
checked_cts_list = list(cts) # listify to avoid consuming iterators....
if true_if_equivalence:
checked_type = EquivalenceConstraint
typename = "equivalence"
elif true_if_equivalence is False:
checked_type = IndicatorConstraint
typename = "indicator"
else:
checked_type = LogicalConstraint
typename = "equivalence or indicator"
for i, ct in enumerate(checked_cts_list):
if not isinstance(ct, checked_type):
self.fatal("Expecting sequence of {0} constraints, got: {1!r} at position {2}", typename, ct, i)
return checked_cts_list
def typecheck_quadratic_constraint_seq(self, cts):
checked_qcts_list = list(cts) # listify to avoid consuming iterators....
for i, ct in enumerate(checked_qcts_list):
if not isinstance(ct, QuadraticConstraint):
self.fatal("Expecting sequence of quadratic constraints, got: {0!r} at position {1}", ct, i)
return checked_qcts_list
def typecheck_linear_constraint_name_tuple_seq(self, ct_ctname_seq, accept_range=True):
# must return sequence unchanged
checked_list = list(ct_ctname_seq)
for c, (ct, ctname) in enumerate(ct_ctname_seq):
self.typecheck_linear_constraint(ct, accept_range=accept_range)
# noinspection PyArgumentEqualDefault
self.typecheck_string(ctname, accept_empty=True, accept_none=False)
return checked_list
def typecheck_zero_or_one(self, arg):
if arg != 0 and arg != 1:
self.fatal("expecting 0 or 1, got: {0!s}", arg)
def typecheck_num(self, arg, caller=None):
if not is_number(arg):
caller_string = "{0}: ".format(caller) if caller is not None else ""
self.fatal("{0}Expecting number, got: {1!r}", caller_string, arg)
def typecheck_int(self, arg, accept_negative=True, caller=None):
caller_string = "{0}: ".format(caller) if caller is not None else ""
if not is_number(arg):
self.fatal("{0}Expecting number, got: {1!r}", caller_string, arg)
elif not is_int(arg):
self.fatal("{0}Expecting integer, got: {1!r}", caller_string, arg)
elif not accept_negative and arg < 0:
self.fatal("{0}Expecting positive integer, got: {1!r}", caller_string, arg)
def check_vars_domain(self, lbs, ubs, names):
l_ubs = len(ubs)
l_lbs = len(lbs)
if l_lbs and l_ubs:
names = names or generate_constant(None, max(l_lbs, l_ubs))
# noinspection PyArgumentList,PyArgumentList
for lb, ub, varname in izip(lbs, ubs, names):
self.check_var_domain(lb, ub, varname)
def check_var_domain(self, lb, ub, varname):
if lb is not None and ub is not None and lb > ub:
self.fatal('Empty variable domain, name={0}, lb={1}, ub={2}'.format(varname, lb, ub))
def typecheck_string(self, arg, accept_empty=False, accept_none=False, caller=''):
if is_string(arg):
if not accept_empty and 0 == len(arg):
s_caller = resolve_caller_as_string(caller)
self.fatal("{0}Expecting a non-empty string", s_caller)
elif not (arg is None and accept_none):
s_caller = resolve_caller_as_string(caller)
self.fatal("{0}Expecting string, got: {1!r}", s_caller, arg)
def typecheck_string_seq(self, arg, accept_empty=False, accept_none=False, caller=''):
checked_strings = list(arg)
# do not accept a string
if is_string(arg):
s_caller = resolve_caller_as_string(caller)
self.fatal("{0}Expecting list of strings, a string was passed: '{1}'", s_caller, arg)
for s in checked_strings:
self.typecheck_string(s, accept_empty=accept_empty, accept_none=accept_none, caller=caller)
return checked_strings
def typecheck_in_model(self, model, mobj, caller=''):
# produces message of the type: "constraint ... does not belong to model
if mobj.model != model:
self.fatal("{0} ({2!s}) is not in model '{1:s}'".format(caller, model.name, mobj))
def typecheck_key_seq(self, keys, accept_empty_seq=False):
if any(k is None for k in keys):
self.fatal("Variable keys cannot be None, got: {0!r}", keys)
def get_number_validation_fn(self):
return DocplexNumericCheckerMixin.static_validate_num1
@staticmethod
def _is_operand(arg, accept_numbers=True):
return isinstance(arg, LinearOperand) or (accept_numbers and is_number(arg))
def typecheck_operand(self, arg, accept_numbers=True, caller=None):
if not self._is_operand(arg, accept_numbers=accept_numbers):
caller_str = "{0}: ".format(caller) if caller else ""
accept_str = "Expecting variable or linear expression"
if accept_numbers:
accept_str += "/number"
self.fatal("{0}{1}, got: {2!r}", caller_str, accept_str, arg)
def typecheck_progress_listener(self, arg):
if not isinstance(arg, ProgressListener):
self.fatal('Expecting ProgressListener instance, got: {0!r}', arg)
def typecheck_two_in_model(self, model, mobj1, mobj2, ctx_msg):
mobj1_model = mobj1.model
mobj2_model = mobj2.model
if mobj1_model != mobj2_model:
self.fatal("Cannot mix objects from different models in {0}. obj1={1!s}, obj2={2!s}"
.format(ctx_msg, mobj1, mobj2))
elif mobj1_model != model:
self.fatal("Objects do not belong to model {0}. obj1={1!s}, obj2={2!s}"
.format(self, mobj1, mobj2))
def check_trivial_constraints(self):
return True
def check_ordered_sequence(self, arg, caller, accept_iterator=True):
# in some cases, we need an ordered sequence, if not the code won't crash
# but may do unexpected things
if not(is_ordered_sequence(arg) or (accept_iterator and is_iterator(arg))):
self.fatal("{0}, got: {1!s}", caller, type(arg).__name__)
def check_solution_hook(self, mdl, sol_hook_fn):
if not callable(sol_hook_fn):
self.fatal('Solution hook requires a function taking a solution as argument, a non-callable was passed')
if six.PY3:
try:
from inspect import signature
hook_signature = signature(sol_hook_fn)
nb_params = len(hook_signature.parameters)
if nb_params != 1:
self.fatal(
'Solution hook requires a function taking a solution as argument, wrong number of arguments: {0}'
.format(nb_params))
except (ImportError, TypeError): # not a callable object or no signature
pass
def typecheck_pwl_function(self, pwl):
if not isinstance(pwl, PwlFunction):
self.fatal('Expecting piecewise-linear function, {0!r} was passed', pwl)
def check_duplicate_name(self, name, name_table, qualifier):
if name_table is not None:
if name in name_table:
self.warning("Duplicate {2} name: {0!s}, used for: {1}", name, name_table[name], qualifier)
class DummyTypeChecker(DOcplexLoggerTypeChecker):
# noinspection PyUnusedLocal
def __init__(self, logger):
super(DummyTypeChecker, self).__init__(logger)
@property
def name(self):
return "off"
def check_new_variable_bound(self):
return False
def typecheck_iterable(self, arg):
pass # pragma: no cover
def typecheck_valid_index(self, arg):
pass # pragma: no cover
def typecheck_vartype(self, arg):
pass # pragma: no cover
def typecheck_var(self, obj, vartype=None):
pass # pragma: no cover
def typecheck_var_seq(self, seq, vtype=None, caller=None):
return seq # pragma: no cover
def typecheck_num_seq(self, seq, caller=None):
return seq # pragma: no cover
def typecheck_var_seq_all_different(self, seq):
return seq
def typecheck_operand(self, obj, accept_numbers=True, caller=None):
pass # pragma: no cover
def typecheck_constraint(self, obj):
pass # pragma: no cover
def typecheck_ct_to_add(self, ct, mdl, caller):
pass # pragma: no cover
def typecheck_ct_not_added(self, ct, do_raise=False, caller=None):
pass
def typecheck_cts_added_to_model(self, mdl, cts, caller=None):
return cts
def typecheck_ct_added_to_model(self, mdl, ct, caller=None):
pass
def typecheck_linear_constraint(self, obj, accept_range=True):
pass # pragma: no cover
def typecheck_constraint_seq(self, cts, check_linear=False, accept_range=True):
# must return sequence unchanged
return cts # pragma: no cover
def typecheck_logical_constraint_seq(self, cts, true_if_equivalence):
# must return sequence unchanged
return cts # pragma: no cover
def typecheck_quadratic_constraint_seq(self, cts):
# must return sequence unchanged
return cts # pragma: no cover
def typecheck_linear_constraint_name_tuple_seq(self, ct_ctname_seq, accept_range=True):
# must return sequence unchanged
return ct_ctname_seq # pragma: no cover
def typecheck_zero_or_one(self, arg):
pass # pragma: no cover
def typecheck_num(self, arg, caller=None):
pass # pragma: no cover
def typecheck_int(self, arg, accept_negative=True, caller=None):
pass # pragma: no cover
def check_vars_domain(self, lbs, ubs, names):
# do nothing on variable bounds
pass
def check_var_domain(self, lb, ub, varname):
pass
def typecheck_string(self, arg, accept_empty=False, accept_none=False, caller=''):
pass # pragma: no cover
def typecheck_string_seq(self, arg, accept_empty=False, accept_none=False, caller=''):
return arg
def typecheck_in_model(self, model, mobj, caller=''):
pass # pragma: no cover
def typecheck_key_seq(self, keys, accept_empty_seq=False):
pass # pragma: no cover
def typecheck_progress_listener(self, arg):
pass # pragma: no cover
def typecheck_two_in_model(self, model, obj1, obj2, ctx_msg):
pass # pragma: no cover
def check_ordered_sequence(self, arg, caller, accept_iterator=True):
pass # pragma: no cover
def check_trivial_constraints(self):
return False
def get_number_validation_fn(self):
return None
def check_solution_hook(self, mdl, sol_hook_fn):
pass
def typecheck_pwl_function(self, pwl):
pass
def check_duplicate_name(self, name, name_table, qualifier):
pass
def typecheck_logical_op(self, arg, caller):
pass
class NumericTypeChecker(DummyTypeChecker):
def __init__(self, logger):
super(NumericTypeChecker, self).__init__(logger)
@property
def name(self):
return "numeric"
def check_new_variable_bound(self):
return True
def get_number_validation_fn(self):
return DocplexNumericCheckerMixin.static_validate_num2
def typecheck_num(self, arg, caller=None):
DocplexNumericCheckerMixin.typecheck_num(self._logger, arg, check_math=True, caller=caller)
def typecheck_int(self, arg, accept_negative=True, caller=None):
DocplexNumericCheckerMixin.typecheck_int(self._logger, arg, check_math=True, accept_negative=accept_negative,
caller=caller)
def typecheck_num_seq(self, seq, caller=None):
return DocplexNumericCheckerMixin.typecheck_num_seq(self._logger, seq, check_math=True, caller=caller)
class FullTypeChecker(StandardTypeChecker):
def __init__(self, logger):
super(FullTypeChecker, self).__init__(logger)
@property
def name(self):
return "full"
def get_number_validation_fn(self):
return DocplexNumericCheckerMixin.static_validate_num2
def typecheck_num(self, arg, caller=None):
DocplexNumericCheckerMixin.typecheck_num(self._logger, arg, check_math=True, caller=caller)
def typecheck_int(self, arg, accept_negative=True, caller=None):
DocplexNumericCheckerMixin.typecheck_int(self._logger, arg, check_math=True, accept_negative=accept_negative,
caller=caller)
def typecheck_num_seq(self, seq, caller=None):
return DocplexNumericCheckerMixin.typecheck_num_seq(self._logger, seq, check_math=True, caller=caller)
def check_for_duplicate_keys(self, keys, caller=None):
key_set = set(keys)
if len(key_set) < len(keys):
# some key is duplicated:
inc_set = set()
for k in keys:
if k in inc_set:
s_caller = resolve_caller_as_string(caller, sep=' ')
self.fatal("{0}Duplicated key: {1!s}".format(s_caller, k))
else:
inc_set.add(k)
# ------------------------------
# noinspection PyPep8
_tck_map = {'default': StandardTypeChecker,
'standard': StandardTypeChecker,
'std': StandardTypeChecker,
'on': StandardTypeChecker,
# --
'numeric': NumericTypeChecker,
'full': FullTypeChecker,
# --
'off': DummyTypeChecker,
'deploy': DummyTypeChecker,
'no_checks': DummyTypeChecker}
def get_typechecker(arg, logger):
if arg:
key = arg.lower()
if key in _tck_map:
checker_type = _tck_map[key]
else:
msg = 'Unexpected typechecker key: {0} - expecting on|off|std|default|numeric|full. Using default'.format(
key)
if logger:
logger.error(msg)
else:
print('*Warning: {0}'.format(msg))
checker_type = StandardTypeChecker
else:
checker_type = StandardTypeChecker
return checker_type(logger)
| 24,684 | 3,053 | 1,393 |
895d5c7ec5c22176fbc7bef3c30c34da72d63571 | 2,275 | py | Python | examples/density.py | dwbullok/python-colormath | 4b218effd53a52da891bbbb60661426ef194d085 | [
"BSD-3-Clause"
] | 1 | 2019-06-10T20:06:31.000Z | 2019-06-10T20:06:31.000Z | examples/density.py | dwbullok/python-colormath | 4b218effd53a52da891bbbb60661426ef194d085 | [
"BSD-3-Clause"
] | null | null | null | examples/density.py | dwbullok/python-colormath | 4b218effd53a52da891bbbb60661426ef194d085 | [
"BSD-3-Clause"
] | null | null | null | """
This module shows you how to perform various kinds of density calculations.
"""
# Does some sys.path manipulation so we can run examples in-place.
# noinspection PyUnresolvedReferences
import example_config
from colormath.color_objects import SpectralColor
from colormath.density_standards import ANSI_STATUS_T_RED, ISO_VISUAL
EXAMPLE_COLOR = SpectralColor(
observer=2, illuminant='d50',
spec_380nm=0.0600, spec_390nm=0.0600, spec_400nm=0.0641,
spec_410nm=0.0654, spec_420nm=0.0645, spec_430nm=0.0605,
spec_440nm=0.0562, spec_450nm=0.0543, spec_460nm=0.0537,
spec_470nm=0.0541, spec_480nm=0.0559, spec_490nm=0.0603,
spec_500nm=0.0651, spec_510nm=0.0680, spec_520nm=0.0705,
spec_530nm=0.0736, spec_540nm=0.0772, spec_550nm=0.0809,
spec_560nm=0.0870, spec_570nm=0.0990, spec_580nm=0.1128,
spec_590nm=0.1251, spec_600nm=0.1360, spec_610nm=0.1439,
spec_620nm=0.1511, spec_630nm=0.1590, spec_640nm=0.1688,
spec_650nm=0.1828, spec_660nm=0.1996, spec_670nm=0.2187,
spec_680nm=0.2397, spec_690nm=0.2618, spec_700nm=0.2852,
spec_710nm=0.2500, spec_720nm=0.2400, spec_730nm=0.2300)
# Feel free to comment/un-comment examples as you please.
example_auto_status_t_density()
example_manual_status_t_density()
example_visual_density()
| 40.625 | 79 | 0.712967 | """
This module shows you how to perform various kinds of density calculations.
"""
# Does some sys.path manipulation so we can run examples in-place.
# noinspection PyUnresolvedReferences
import example_config
from colormath.color_objects import SpectralColor
from colormath.density_standards import ANSI_STATUS_T_RED, ISO_VISUAL
EXAMPLE_COLOR = SpectralColor(
observer=2, illuminant='d50',
spec_380nm=0.0600, spec_390nm=0.0600, spec_400nm=0.0641,
spec_410nm=0.0654, spec_420nm=0.0645, spec_430nm=0.0605,
spec_440nm=0.0562, spec_450nm=0.0543, spec_460nm=0.0537,
spec_470nm=0.0541, spec_480nm=0.0559, spec_490nm=0.0603,
spec_500nm=0.0651, spec_510nm=0.0680, spec_520nm=0.0705,
spec_530nm=0.0736, spec_540nm=0.0772, spec_550nm=0.0809,
spec_560nm=0.0870, spec_570nm=0.0990, spec_580nm=0.1128,
spec_590nm=0.1251, spec_600nm=0.1360, spec_610nm=0.1439,
spec_620nm=0.1511, spec_630nm=0.1590, spec_640nm=0.1688,
spec_650nm=0.1828, spec_660nm=0.1996, spec_670nm=0.2187,
spec_680nm=0.2397, spec_690nm=0.2618, spec_700nm=0.2852,
spec_710nm=0.2500, spec_720nm=0.2400, spec_730nm=0.2300)
def example_auto_status_t_density():
print("=== Example: Automatic Status T Density ===")
# If no arguments are provided to calc_density(), ANSI Status T density is
# assumed. The correct RGB "filter" is automatically selected for you.
print("Density: %f" % EXAMPLE_COLOR.calc_density())
print("=== End Example ===\n")
def example_manual_status_t_density():
print("=== Example: Manual Status T Density ===")
# Here we are specifically requesting the value of the red band via the
# ANSI Status T spec.
print("Density: %f (Red)" % EXAMPLE_COLOR.calc_density(
density_standard=ANSI_STATUS_T_RED))
print("=== End Example ===\n")
def example_visual_density():
print("=== Example: Visual Density ===")
# Here we pass the ISO Visual spectral standard.
print("Density: %f" % EXAMPLE_COLOR.calc_density(
density_standard=ISO_VISUAL))
print("=== End Example ===\n")
# Feel free to comment/un-comment examples as you please.
example_auto_status_t_density()
example_manual_status_t_density()
example_visual_density()
| 879 | 0 | 75 |
461953fe324f3ff8e9df49cd3b2a5efd8490471c | 7,744 | py | Python | addon_common/common/fsm.py | senjacob/retopoflow | 7817bb7d68f98e5ae2c7835f28eeafe76367789e | [
"OML"
] | null | null | null | addon_common/common/fsm.py | senjacob/retopoflow | 7817bb7d68f98e5ae2c7835f28eeafe76367789e | [
"OML"
] | null | null | null | addon_common/common/fsm.py | senjacob/retopoflow | 7817bb7d68f98e5ae2c7835f28eeafe76367789e | [
"OML"
] | null | null | null | '''
Copyright (C) 2021 CG Cookie
https://github.com/CGCookie/retopoflow
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import inspect
from .debug import ExceptionHandler
from .debug import debugger
from .utils import find_fns
| 39.510204 | 121 | 0.545584 | '''
Copyright (C) 2021 CG Cookie
https://github.com/CGCookie/retopoflow
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import inspect
from .debug import ExceptionHandler
from .debug import debugger
from .utils import find_fns
def get_state(state, substate):
return '%s__%s' % (str(state), str(substate))
class FSM:
def __init__(self):
self.wrapper = self._create_wrapper()
self.onlyinstate_wrapper = self._create_onlyinstate_wrapper()
self._exceptionhandler = ExceptionHandler()
def add_exception_callback(self, fn, universal=True):
self._exceptionhandler.add_callback(fn, universal=universal)
def _create_wrapper(self):
fsm = self
seen = {}
class FSM_State:
def __init__(self, state, substate='main'):
self.state = state
self.substate = substate
def __call__(self, fn):
self.fn = fn
self.fnname = fn.__name__
fr = inspect.getframeinfo(inspect.currentframe().f_back)
fndata = f'{fr.filename}:{fr.lineno}'
# if self.state == 'main':
# print(f'FSM Notes: "{self.fnname}"')
# print(f' {fndata}')
if self.fnname in seen:
print(f'FSM Warning: detected multiple functions with same name: "{self.fnname}"')
print(f' pre: {seen[self.fnname]}')
print(f' cur: {fndata}')
seen[self.fnname] = fndata
def run(*args, **kwargs):
try:
return fn(*args, **kwargs)
except Exception as e:
print('Caught exception in function "%s" (state:"%s", substate:"%s")' % (
self.fnname, self.state, self.substate
))
debugger.print_exception()
print(e)
fsm._exceptionhandler.handle_exception(e)
fsm.force_set_state(fsm._reset_state, call_exit=False, call_enter=True)
return
run.fnname = self.fnname
run.fsmstate = self.state
run.fsmstate_full = get_state(self.state, self.substate)
# print('%s: registered %s as %s' % (str(fsm), self.fnname, run.fsmstate_full))
return run
return FSM_State
def _create_onlyinstate_wrapper(self):
fsm = self
class FSM_OnlyInState:
def __init__(self, states, default=None):
if type(states) is str: states = {states}
else: states = set(states)
self.states = states
self.default = default
def __call__(self, fn):
self.fn = fn
self.fnname = fn.__name__
def run(*args, **kwargs):
if fsm.state not in self.states:
return self.default
try:
return fn(*args, **kwargs)
except Exception as e:
print('Caught exception in function "%s" ("%s")' % (
self.fnname, fsm.state
))
debugger.print_exception()
print(e)
fsm._exceptionhandler.handle_exception(e)
fsm.force_set_state(fsm._reset_state, call_exit=False, call_enter=True)
return self.default
run.fnname = self.fnname
run.fsmstate = ' '.join(self.states)
return run
return FSM_OnlyInState
def init(self, obj, start='main', reset_state='main'):
self._obj = obj
self._state_next = start
self._state = None
self._reset_state = reset_state
self._fsm_states = {}
self._fsm_states_handled = { st for (st,fn) in find_fns(self._obj, 'fsmstate') }
for (m,fn) in find_fns(self._obj, 'fsmstate_full'):
assert m not in self._fsm_states, 'Duplicate states registered!'
self._fsm_states[m] = fn
# print('%s: found fn %s as %s' % (str(self), str(fn), m))
def _call(self, state, substate='main', fail_if_not_exist=False):
s = get_state(state, substate)
if s not in self._fsm_states:
assert not fail_if_not_exist, 'Could not find state "%s" with substate "%s" (%s)' % (state, substate, str(s))
return
try:
return self._fsm_states[s](self._obj)
except Exception as e:
print('Caught exception in state ("%s")' % (s))
debugger.print_exception()
self._exceptionhandler.handle_exception(e)
return
def update(self):
if self._state_next is not None and self._state_next != self._state:
if self._call(self._state, substate='can exit') == False:
# print('Cannot exit %s' % str(self._state))
self._state_next = None
return
if self._call(self._state_next, substate='can enter') == False:
# print('Cannot enter %s' % str(self._state_next))
self._state_next = None
return
# print('%s -> %s' % (str(self._state), str(self._state_next)))
self._call(self._state, substate='exit')
self._state = self._state_next
self._call(self._state, substate='enter')
ret = self._call(self._state, fail_if_not_exist=True)
if ret is None:
self._state_next = ret
ret = None
elif type(ret) is str:
if self.is_state(ret):
self._state_next = ret
ret = None
else:
self._state_next = None
ret = ret
elif type(ret) is tuple:
st = {s for s in ret if self.is_state(s)}
if len(st) == 0:
self._state_next = None
ret = ret
elif len(st) == 1:
self._state_next = next(st)
ret = ret - st
else:
assert False, 'unhandled FSM return value "%s"' % str(ret)
else:
assert False, 'unhandled FSM return value "%s"' % str(ret)
return ret
def is_state(self, state):
return state in self._fsm_states_handled
@property
def state(self):
return self._state
def force_set_state(self, state, call_exit=False, call_enter=True):
if call_exit: self._call(self._state, substate='exit')
self._state = state
self._state_next = state
if call_enter: self._call(self._state, substate='enter')
def FSMClass(cls):
cls.fsm = FSM()
cls.FSM_State = cls.fsm.wrapper
return cls
# https://krzysztofzuraw.com/blog/2016/python-class-decorators.html
# class Wrapper(object):
# def __init__(self, *args, **kwargs):
# self._wrapped = cls(*args, **kwargs)
| 6,565 | 272 | 69 |
9c0404f68429c409eb8f0997baf0db54ee2504c3 | 1,147 | py | Python | pymoira/errors.py | vasilvv/pymoira | 27facba223c220bf4c55d9b7e799a2ff951eef13 | [
"MIT"
] | 2 | 2015-11-13T23:03:12.000Z | 2015-12-19T02:43:20.000Z | pymoira/errors.py | vasilvv/pymoira | 27facba223c220bf4c55d9b7e799a2ff951eef13 | [
"MIT"
] | null | null | null | pymoira/errors.py | vasilvv/pymoira | 27facba223c220bf4c55d9b7e799a2ff951eef13 | [
"MIT"
] | 1 | 2019-10-23T06:26:13.000Z | 2019-10-23T06:26:13.000Z | #
## PyMoira client library
##
## This file contains the Moira-related errors.
#
from . import constants
class BaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class ConnectionError(BaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(BaseError):
"""An error returned from Moira server itself which has a Moira error code."""
class MoiraUnavailableError(BaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class UserError(BaseError):
"""An error related to Moira but not returned from the server."""
pass
class AuthenticationError(BaseError):
"""An error related to the authentication process."""
pass
| 25.488889 | 82 | 0.67306 | #
## PyMoira client library
##
## This file contains the Moira-related errors.
#
from . import constants
class BaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class ConnectionError(BaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(BaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
self.code = code
if code in constants.errors:
BaseError.__init__(self, "Moira error: %s" % constants.errors[code])
else:
BaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(BaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class UserError(BaseError):
"""An error related to Moira but not returned from the server."""
pass
class AuthenticationError(BaseError):
"""An error related to the authentication process."""
pass
| 247 | 0 | 31 |
03baf1ba8ce7c71522b48fe54bd648527baf03d1 | 1,489 | py | Python | yahoo_fantasy_bot/tests/conftest.py | spilchen/yahoo-baseball-assistant | a2bed8059cd9201b145d83f5dca20e1b9f4c67e8 | [
"MIT"
] | 5 | 2020-03-13T09:13:08.000Z | 2020-12-24T18:47:12.000Z | yahoo_fantasy_bot/tests/conftest.py | spilchen/yahoo-baseball-assistant | a2bed8059cd9201b145d83f5dca20e1b9f4c67e8 | [
"MIT"
] | 1 | 2021-01-18T12:59:12.000Z | 2021-01-19T03:01:12.000Z | yahoo_fantasy_bot/tests/conftest.py | spilchen/yahoo-baseball-assistant | a2bed8059cd9201b145d83f5dca20e1b9f4c67e8 | [
"MIT"
] | 2 | 2019-11-19T21:13:52.000Z | 2020-12-24T18:47:18.000Z | #!/usr/bin/python
import pytest
import pandas as pd
import numpy as np
from yahoo_fantasy_bot import roster
RBLDR_COLS = ["player_id", "name", "eligible_positions", "selected_position"]
RSEL_COLS = ["player_id", "name", "HR", "OBP", "W", "ERA"]
@pytest.fixture
@pytest.fixture
@pytest.fixture
| 32.369565 | 78 | 0.523842 | #!/usr/bin/python
import pytest
import pandas as pd
import numpy as np
from yahoo_fantasy_bot import roster
RBLDR_COLS = ["player_id", "name", "eligible_positions", "selected_position"]
RSEL_COLS = ["player_id", "name", "HR", "OBP", "W", "ERA"]
@pytest.fixture
def empty_roster():
rcont = roster.Container()
yield rcont
@pytest.fixture
def bldr():
b = roster.Builder(["C", "1B", "2B", "SS", "3B", "LF", "CF", "RF", "Util",
"SP", "SP", "SP", "SP", "SP",
"RP", "RP", "RP", "RP", "RP"])
yield b
@pytest.fixture
def fake_player_selector():
player_pool = pd.DataFrame(
[[1, "Borders", 15, 0.319, np.nan, np.nan],
[2, "Lee", 6, 0.288, np.nan, np.nan],
[3, "McGriff", 35, 0.400, np.nan, np.nan],
[4, "Fernandez", 4, 0.352, np.nan, np.nan],
[5, "Gruber", 31, 0.330, np.nan, np.nan],
[6, "Bell", 21, 0.303, np.nan, np.nan],
[7, "Wilson", 3, 0.300, np.nan, np.nan],
[8, "Felix", 15, 0.318, np.nan, np.nan],
[9, "Olerud", 14, 0.364, np.nan, np.nan],
[10, "Hill", 12, 0.281, np.nan, np.nan],
[11, "Steib", np.nan, np.nan, 18, 2.93],
[12, "Stottlemyre", np.nan, np.nan, 13, 4.34],
[13, "Wells", np.nan, np.nan, 11, 3.14],
[14, "Key", np.nan, np.nan, 13, 4.25],
[15, "Cerutti", np.nan, np.nan, 9, 4.76]], columns=RSEL_COLS)
plyr_sel = roster.PlayerSelector(player_pool)
yield plyr_sel
| 1,122 | 0 | 66 |
f91f71824a7c23cd535e3952e8e8887c71192c65 | 2,202 | py | Python | dataStructure/graph/graph_table.py | bigfoolliu/liu_aistuff | aa661d37c05c257ee293285dd0868fb7e8227628 | [
"MIT"
] | 1 | 2019-11-25T07:23:42.000Z | 2019-11-25T07:23:42.000Z | dataStructure/graph/graph_table.py | bigfoolliu/liu_aistuff | aa661d37c05c257ee293285dd0868fb7e8227628 | [
"MIT"
] | 13 | 2020-01-07T16:09:47.000Z | 2022-03-02T12:51:44.000Z | dataStructure/graph/graph_table.py | bigfoolliu/liu_aistuff | aa661d37c05c257ee293285dd0868fb7e8227628 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# author: bigfoolliu
"""
使用邻接表表示图
A --> B
A --> C
B --> C
B --> D
C --> D
D --> C
E --> F
F --> C
"""
def find_one_path(graph, start, end, path=[]):
"""
寻找graph中由start到end顶点的其中一条路径
graph: dict
start: str
end: str
return: list
"""
path = path + [start]
if start == end:
return path
if start not in graph.keys():
return None
for node in graph[start]:
if node not in path: # 保证路径的顶点不重复
new_path = find_one_path(graph, node, end, path)
if new_path:
return new_path
return path
def find_all_paths(graph, start, end, path=[]):
"""
寻找graph中由start到end顶点的所有路径
graph: dict
start: str
end: str
return: [[], ...]
"""
path = path + [start]
if start == end:
return [path]
if start not in graph.keys():
return []
paths = []
for node in graph[start]:
if node not in path:
new_paths = find_all_paths(graph, node, end, path)
for new_path in new_paths:
paths.append(new_path)
return paths
def find_shortest_path(graph, start, end, path=[]):
"""
寻找graph中由start到end顶点的最短路径,思路是将如果每次找到了新路径将旧的存储的最短路径对比
graph: dict
start: str
end: str
return: list
"""
path = path + [start]
if start == end:
return path
if start not in graph.keys():
return None
shortest_path = None
for node in graph[start]:
if node not in path:
new_path = find_shortest_path(graph, node, end, path)
if new_path:
if not shortest_path or len(new_path) < len(shortest_path): # 当有多条最短路径的时候只会记录首条
shortest_path = new_path
return shortest_path
if __name__ == "__main__":
# 使邻接表定义一个有向图
graph = {
"A": ["B", "C"],
"B": ["C", "D"],
"C": ["D"],
"D": ["C"],
"E": ["F"],
"F": ["C"]
}
print(find_one_path(graph, "A", "D")) # 结果正确
print(find_one_path(graph, "B", "F")) # TODO: 结果应该报错或者怎样
print(find_all_paths(graph, "A", "D"))
print(find_shortest_path(graph, "A", "D"))
| 21.80198 | 96 | 0.536331 | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# author: bigfoolliu
"""
使用邻接表表示图
A --> B
A --> C
B --> C
B --> D
C --> D
D --> C
E --> F
F --> C
"""
def find_one_path(graph, start, end, path=[]):
"""
寻找graph中由start到end顶点的其中一条路径
graph: dict
start: str
end: str
return: list
"""
path = path + [start]
if start == end:
return path
if start not in graph.keys():
return None
for node in graph[start]:
if node not in path: # 保证路径的顶点不重复
new_path = find_one_path(graph, node, end, path)
if new_path:
return new_path
return path
def find_all_paths(graph, start, end, path=[]):
"""
寻找graph中由start到end顶点的所有路径
graph: dict
start: str
end: str
return: [[], ...]
"""
path = path + [start]
if start == end:
return [path]
if start not in graph.keys():
return []
paths = []
for node in graph[start]:
if node not in path:
new_paths = find_all_paths(graph, node, end, path)
for new_path in new_paths:
paths.append(new_path)
return paths
def find_shortest_path(graph, start, end, path=[]):
"""
寻找graph中由start到end顶点的最短路径,思路是将如果每次找到了新路径将旧的存储的最短路径对比
graph: dict
start: str
end: str
return: list
"""
path = path + [start]
if start == end:
return path
if start not in graph.keys():
return None
shortest_path = None
for node in graph[start]:
if node not in path:
new_path = find_shortest_path(graph, node, end, path)
if new_path:
if not shortest_path or len(new_path) < len(shortest_path): # 当有多条最短路径的时候只会记录首条
shortest_path = new_path
return shortest_path
if __name__ == "__main__":
# 使邻接表定义一个有向图
graph = {
"A": ["B", "C"],
"B": ["C", "D"],
"C": ["D"],
"D": ["C"],
"E": ["F"],
"F": ["C"]
}
print(find_one_path(graph, "A", "D")) # 结果正确
print(find_one_path(graph, "B", "F")) # TODO: 结果应该报错或者怎样
print(find_all_paths(graph, "A", "D"))
print(find_shortest_path(graph, "A", "D"))
| 0 | 0 | 0 |
2d47f11252fc21b9bd13eeb20e404a8ba4f7d27d | 2,340 | py | Python | MyScripts/093-Examples-2nd-Order-Free-Response.py | diegoomataix/Curso_AeroPython | c2cf71a938062bc70dbbf7c2f21e09653fa2cedd | [
"CC-BY-4.0"
] | null | null | null | MyScripts/093-Examples-2nd-Order-Free-Response.py | diegoomataix/Curso_AeroPython | c2cf71a938062bc70dbbf7c2f21e09653fa2cedd | [
"CC-BY-4.0"
] | null | null | null | MyScripts/093-Examples-2nd-Order-Free-Response.py | diegoomataix/Curso_AeroPython | c2cf71a938062bc70dbbf7c2f21e09653fa2cedd | [
"CC-BY-4.0"
] | null | null | null | ###________________________ 2nd-Order-Free-Response ________________________###
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rcParams.update({'font.size': 22})
##__________ Functions for dumping characteristic cases __________##
##__________ 2nd order free response __________##
for xii in [1.5, 1.85, 2.5]:
# for xii in [0.00, 1.00, 2.50]:
for wnn in [1, 3, 5]:
for x00 in [-1, -0.5, 0]:
for x_dot00 in [1.5, 3.6, 4.5]:
plot_2order_free_resp(xii, wnn, x00, x_dot00)
| 27.857143 | 84 | 0.520085 | ###________________________ 2nd-Order-Free-Response ________________________###
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
matplotlib.rcParams.update({'font.size': 22})
##__________ Functions for dumping characteristic cases __________##
def undumped(wn, x0, x_dot0, t):
return x0 * np.cos(wn * t) + x_dot0 / wn * np.sin(wn * t)
def underdumped(xi, wn, x0, x_dot0, t):
wd = wn * np.sqrt(1 - xi**2)
x = (
np.exp(-xi*wn*t) *
(
x0 * np.cos(wd * t) +
(xi*wn*x0 + x_dot0) / wd * np.sin(wd * t)
)
)
return x
def critically_dumped(wn, x0, x_dot0, t):
return np.exp(-wn*t) * (x0 * (1 + wn * t) + x_dot0 * t)
def overdumped(xi, wn, x0, x_dot0, t):
a = xi * wn + wn * np.sqrt(xi**2 - 1)
b = xi * wn - wn * np.sqrt(xi**2 - 1)
x = (
(a * x0 + x_dot0) / (a - b) * np.exp(-b*t) -
(b * x0 + x_dot0) / (a - b) * np.exp(-a*t)
)
return x
##__________ 2nd order free response __________##
def plot_2order_free_resp(xi, wn, x0, x_dot0):
t = np.linspace(0, 10, 1000)
# Calculate selected
if np.isclose(xi, 0):
x = undumped(wn, x0, x_dot0, t)
elif np.isclose(xi, 1):
x = critically_dumped(wn, x0, x_dot0, t)
elif 0 < xi < 1:
x = underdumped(xi, wn, x0, x_dot0, t)
else:
x = overdumped(xi, wn, x0, x_dot0, t)
# Plot selected
plt.figure(figsize=(15, 8))
plt.plot(t, x, ls='-', lw=2, c='#b30000', label=f"$\\xi={xi:.2f}$")
# Critical dumping
x = critically_dumped(wn, x0, x_dot0, t)
plt.plot(t, x, ls='--', lw=4, alpha=0.7, c='#02818a', label="$\\xi=1.00$")
# No dumping
x = undumped(wn, x0, x_dot0, t)
# Formatting
plt.title("$\omega_n$ = {}, $x_0$ = {}, $\\dotx_0$ = {}".format(wn, x0, x_dot0))
plt.plot(t, x, ls='-', lw=4, alpha=0.5, c='#0570b0', label="$\\xi=0.00$")
plt.ylim(-1, 1)
plt.xlim(0,10)
plt.grid()
plt.xlabel('t')
plt.ylabel('x')
plt.legend(loc='upper right')
for xii in [1.5, 1.85, 2.5]:
# for xii in [0.00, 1.00, 2.50]:
for wnn in [1, 3, 5]:
for x00 in [-1, -0.5, 0]:
for x_dot00 in [1.5, 3.6, 4.5]:
plot_2order_free_resp(xii, wnn, x00, x_dot00)
| 1,662 | 0 | 113 |
146c202e3f52d8980f75ecf22eb3b1c864ef8011 | 2,066 | py | Python | py_headless_daw/processing/stream/mixer.py | hq9000/py-headless-daw | 33e08727c25d3f00b2556adf5f25c9f7ff4d4304 | [
"MIT"
] | 22 | 2020-06-09T18:46:56.000Z | 2021-09-28T02:11:42.000Z | py_headless_daw/processing/stream/mixer.py | hq9000/py-headless-daw | 33e08727c25d3f00b2556adf5f25c9f7ff4d4304 | [
"MIT"
] | 19 | 2020-06-03T06:34:57.000Z | 2021-01-26T07:36:17.000Z | py_headless_daw/processing/stream/mixer.py | hq9000/py-headless-daw | 33e08727c25d3f00b2556adf5f25c9f7ff4d4304 | [
"MIT"
] | 1 | 2020-06-18T09:25:21.000Z | 2020-06-18T09:25:21.000Z | from typing import List, Dict
import numpy as np
from em.platform.rendering.dto.time_interval import TimeInterval
from em.platform.rendering.schema.events.event import Event
from em.platform.rendering.schema.processing_strategy import ProcessingStrategy
| 35.016949 | 111 | 0.659729 | from typing import List, Dict
import numpy as np
from em.platform.rendering.dto.time_interval import TimeInterval
from em.platform.rendering.schema.events.event import Event
from em.platform.rendering.schema.processing_strategy import ProcessingStrategy
class Mixer(ProcessingStrategy):
def __init__(self):
self.gains: Dict[int, float] = {}
self.pannings: Dict[int, float] = {}
def set_gain(self, track_number: int, gain: float):
self.gains[track_number] = gain
def get_gain(self, track_number: int):
return self.gains[track_number]
def set_panning(self, track_number: int, panning: float):
self.pannings[track_number] = panning
def render(self, interval: TimeInterval, stream_inputs: List[np.ndarray], stream_outputs: List[np.ndarray],
event_inputs: List[List[Event]], event_outputs: List[List[Event]]):
num_input_stream_channels: int = len(stream_inputs)
num_output_stream_channels: int = len(stream_outputs)
for stream_output in stream_outputs:
stream_output.fill(0.0)
# assuming each unit provides the same number of channels to the mixer,
# if it doesn't, everything will break
num_input_units: int = int(num_input_stream_channels / num_output_stream_channels)
for u in range(0, num_input_units):
base_gain: float = self._get_gain(u)
panning: float = self._get_panning(u)
left_gain: float = base_gain * (1 - max([0, panning]))
right_gain: float = base_gain * (1 + min([0, panning]))
stream_outputs[0] += left_gain * stream_inputs[u * num_output_stream_channels + 0]
stream_outputs[1] += right_gain * stream_inputs[u * num_output_stream_channels + 1]
def _get_gain(self, u) -> float:
if u in self.gains:
return self.gains[u]
else:
return 1.0
def _get_panning(self, u) -> float:
if u in self.pannings:
return self.pannings[u]
else:
return 0.0
| 1,586 | 11 | 212 |
60a55c07741b3506a72dc3654cdf78961a3b481c | 981 | py | Python | doubanfm/views/help_view.py | porson/douban.fm | 5c492bfef331f4f17ded1e17fdb704aefdfc0716 | [
"MIT"
] | 2 | 2017-02-08T04:08:03.000Z | 2017-02-08T04:08:08.000Z | doubanfm/views/help_view.py | Free-sky/douban.fm | 5c492bfef331f4f17ded1e17fdb704aefdfc0716 | [
"MIT"
] | null | null | null | doubanfm/views/help_view.py | Free-sky/douban.fm | 5c492bfef331f4f17ded1e17fdb704aefdfc0716 | [
"MIT"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from doubanfm.views.lrc_view import Lrc
from doubanfm.dal.dal_help import HelpDal
class Help(Lrc):
"""帮助界面"""
| 27.25 | 73 | 0.62895 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
from doubanfm.views.lrc_view import Lrc
from doubanfm.dal.dal_help import HelpDal
class Help(Lrc):
"""帮助界面"""
def __init__(self, data):
super(Help, self).__init__(data)
def set_dal(self):
dal = HelpDal(self.data)
self.c = dal.c # 主题
self.set_title(dal.title)
self.set_suffix_selected(dal.suffix_selected)
self.set_lines(dal.lines)
def display(self):
self.set_dal()
self.make_display_lines()
print '\n'.join(self.display_lines)
def make_display_lines(self):
self.screen_height, self.screen_width = self.linesnum() # 屏幕显示行数
display_lines = ['']
display_lines.append(self._title + '\r')
display_lines.append('')
display_lines.extend(self._lines)
for i in range(self.screen_height - len(display_lines) - 1):
display_lines.append('')
self.display_lines = display_lines
| 727 | 0 | 107 |
8e4a944b85936c87ae204d5b7eda86c6a71e9905 | 392 | py | Python | utilities/mappings.py | PeterA182/liteSaber | 6560feb70fd23916c0188ba98a751f8fee99a18b | [
"MIT"
] | null | null | null | utilities/mappings.py | PeterA182/liteSaber | 6560feb70fd23916c0188ba98a751f8fee99a18b | [
"MIT"
] | null | null | null | utilities/mappings.py | PeterA182/liteSaber | 6560feb70fd23916c0188ba98a751f8fee99a18b | [
"MIT"
] | 1 | 2019-06-28T01:19:38.000Z | 2019-06-28T01:19:38.000Z | maps = {
'batting': {
'a': 'assists',
'ab': 'atbats',
'ao': 'air_outs',
'avg': 'batting_avg',
'bb': 'base_on_balls',
'cs': 'caught_stealing',
'e': 'error',
'gidp': 'ground_into_dp',
'go': 'ground_out',
'h': 'hit',
'hbp': 'hit_by_pitch',
'hr': 'home_run',
'lob': 'left_on_base'
}
} | 23.058824 | 33 | 0.415816 | maps = {
'batting': {
'a': 'assists',
'ab': 'atbats',
'ao': 'air_outs',
'avg': 'batting_avg',
'bb': 'base_on_balls',
'cs': 'caught_stealing',
'e': 'error',
'gidp': 'ground_into_dp',
'go': 'ground_out',
'h': 'hit',
'hbp': 'hit_by_pitch',
'hr': 'home_run',
'lob': 'left_on_base'
}
} | 0 | 0 | 0 |
58ad1179e2800ca6f9e07afcd2f2e47a8d128a3e | 2,698 | py | Python | tests/test_graphtools.py | HenryKenlay/grapht | 3ab2485c353f87a487ed5b70e5298121a924b56e | [
"Apache-2.0"
] | 1 | 2020-01-06T20:22:22.000Z | 2020-01-06T20:22:22.000Z | tests/test_graphtools.py | HenryKenlay/graphtools | 3ab2485c353f87a487ed5b70e5298121a924b56e | [
"Apache-2.0"
] | 2 | 2021-09-28T00:16:36.000Z | 2022-02-26T06:19:10.000Z | tests/test_graphtools.py | henrykenlay/grapht | 3ab2485c353f87a487ed5b70e5298121a924b56e | [
"Apache-2.0"
] | 1 | 2020-01-28T14:12:54.000Z | 2020-01-28T14:12:54.000Z | import networkx as nx
import numpy as np
import scipy.sparse as sp
from grapht.graphtools import *
G = nx.barabasi_albert_graph(100, 2)
G.add_edge(0, 1) # the initial condition of BA(n, 2) means it can have pendant edges, this stops that happening
G_with_pendant = G.copy()
G_with_pendant.add_node(100)
G_with_pendant.add_edge(0, 100)
G_with_isolate = G.copy()
G_with_isolate.add_node(100)
| 37.472222 | 111 | 0.668273 | import networkx as nx
import numpy as np
import scipy.sparse as sp
from grapht.graphtools import *
G = nx.barabasi_albert_graph(100, 2)
G.add_edge(0, 1) # the initial condition of BA(n, 2) means it can have pendant edges, this stops that happening
G_with_pendant = G.copy()
G_with_pendant.add_node(100)
G_with_pendant.add_edge(0, 100)
G_with_isolate = G.copy()
G_with_isolate.add_node(100)
def test_non_pendant_edges():
assert non_pendant_edges(G) == list(G.edges())
assert non_pendant_edges(G_with_isolate) == list(G_with_isolate.edges())
assert non_pendant_edges(G_with_pendant) != list(G_with_pendant.edges())
assert set(G_with_pendant.edges()) - set(non_pendant_edges(G_with_pendant)) == {(0, 100)}
def test_is_pendant():
assert is_pendant(G_with_pendant, (0, 100))
assert is_pendant(G_with_pendant, (100, 0))
assert np.sum([is_pendant(G, edge) for edge in G.edges()]) == 0
assert np.sum([is_pendant(G_with_isolate, edge) for edge in G.edges()]) == 0
assert np.sum([is_pendant(G_with_pendant, edge) for edge in G_with_pendant.edges()]) == 1
def test_has_isolated_nodes():
assert not has_isolated_nodes(G)
assert not has_isolated_nodes(G_with_pendant)
assert has_isolated_nodes(G_with_isolate)
def test_edges_removed():
G_with_edge = nx.Graph()
G_with_edge.add_nodes_from([0, 1])
G_no_edge = G_with_edge.copy()
G_with_edge.add_edge(0, 1)
assert edges_removed(G_with_edge, G_no_edge) == [(0, 1)]
assert edges_removed(G_no_edge, G_with_edge) == []
def test_laplacian():
G = nx.Graph()
G.add_nodes_from([0, 1, 2, 3])
G.add_edges_from([(0, 1), (1, 2), (2, 3), (1, 3)])
L = laplacian(G)
L_manual = np.eye(4)
L_manual[0][1] = L_manual[1][0] = -1/np.sqrt(3) #edge 0 -- 1
L_manual[1][2] = L_manual[2][1] = -1/np.sqrt(6) #edge 1 -- 2
L_manual[1][3] = L_manual[3][1] = -1/np.sqrt(6) #edge 1 -- 3
L_manual[2][3] = L_manual[3][2] = -1/np.sqrt(4) #edge 2 -- 3
assert np.allclose(L_manual, L.todense())
# isolated node case
G.remove_edge(0, 1)
L = laplacian(G)
L_manual[0][0] = 0
L_manual[0][1] = L_manual[1][0] = 0 #edge 0 -- 1
L_manual[1][2] = L_manual[2][1] = -1/np.sqrt(4) #edge 1 -- 2
L_manual[1][3] = L_manual[3][1] = -1/np.sqrt(4) #edge 1 -- 3
assert np.allclose(L_manual, L.todense())
# isolated node but with diag set to 1
L = laplacian(G, setdiag=True)
L_manual[0][0] = 1
assert np.allclose(L_manual, L.todense())
def test_sparse_is_symmetric():
A = sp.csr_matrix(np.random.rand(10, 10))
assert not sparse_is_symmetric(A)
A = np.random.rand(10, 10)
A = sp.csr_matrix(A + A.T)
assert sparse_is_symmetric(A) | 2,165 | 0 | 142 |
b0382d03044bf19f6588f3ec952aded129e7e5a7 | 408 | py | Python | keras/layers/recurrent.py | ikingye/keras | 1a3ee8441933fc007be6b2beb47af67998d50737 | [
"MIT"
] | 5 | 2020-11-30T22:26:03.000Z | 2020-12-01T22:34:25.000Z | keras/layers/recurrent.py | ikingye/keras | 1a3ee8441933fc007be6b2beb47af67998d50737 | [
"MIT"
] | 10 | 2020-12-01T22:55:29.000Z | 2020-12-11T18:31:46.000Z | keras/layers/recurrent.py | ikingye/keras | 1a3ee8441933fc007be6b2beb47af67998d50737 | [
"MIT"
] | 15 | 2020-11-30T22:12:22.000Z | 2020-12-09T01:32:48.000Z | """Recurrent layers and their base classes."""
from tensorflow.keras.layers import RNN
from tensorflow.keras.layers import StackedRNNCells
from tensorflow.keras.layers import SimpleRNN
from tensorflow.keras.layers import GRU
from tensorflow.keras.layers import LSTM
from tensorflow.keras.layers import SimpleRNNCell
from tensorflow.keras.layers import GRUCell
from tensorflow.keras.layers import LSTMCell
| 31.384615 | 51 | 0.843137 | """Recurrent layers and their base classes."""
from tensorflow.keras.layers import RNN
from tensorflow.keras.layers import StackedRNNCells
from tensorflow.keras.layers import SimpleRNN
from tensorflow.keras.layers import GRU
from tensorflow.keras.layers import LSTM
from tensorflow.keras.layers import SimpleRNNCell
from tensorflow.keras.layers import GRUCell
from tensorflow.keras.layers import LSTMCell
| 0 | 0 | 0 |
c0976b99b5d5a1ee80c9a0cfaa2ab28e201e771f | 64 | py | Python | Codewars/8kyu/my-head-is-at-the-wrong-end/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codewars/8kyu/my-head-is-at-the-wrong-end/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codewars/8kyu/my-head-is-at-the-wrong-end/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python - 2.7.6 | 16 | 25 | 0.625 | # Python - 2.7.6
def fix_the_meerkat(arr):
return arr[::-1] | 25 | 0 | 23 |
da72280ee299accaeded93bb0d38638389db5680 | 846 | py | Python | rotate.py | lstorchi/molsuperimpose | c6129e676b287040b17a7e5d5680176e2db361b6 | [
"Apache-2.0"
] | null | null | null | rotate.py | lstorchi/molsuperimpose | c6129e676b287040b17a7e5d5680176e2db361b6 | [
"Apache-2.0"
] | null | null | null | rotate.py | lstorchi/molsuperimpose | c6129e676b287040b17a7e5d5680176e2db361b6 | [
"Apache-2.0"
] | null | null | null | import sys
import pybel
filename = ""
verbose = False
if (len(sys.argv)) == 2:
filename = sys.argv[1]
else:
print "usage :", sys.argv[0] , " filename.xyz"
exit(1)
matrix = pybel.ob.matrix3x3()
matrix.RotAboutAxisByAngle(pybel.ob.vector3(1, 0, 0), 90)
if verbose:
for i in range(3):
for j in range(3):
line = "%10.5f "%(matrix.Get(i,j))
sys.stdout.write(line)
sys.stdout.write("\n")
imatrix = matrix.inverse()
if verbose:
print ""
for i in range(3):
for j in range(3):
line = "%10.5f "%(imatrix.Get(i,j))
sys.stdout.write(line)
sys.stdout.write("\n")
rotarray = pybel.ob.doubleArray(9)
matrix.GetArray(rotarray)
mol = pybel.readfile("xyz", filename).next()
mol.OBMol.Rotate(rotarray)
mol.OBMol.Translate(pybel.ob.vector3(1.0, 10.0, 3.0));
print mol.write("xyz")
| 21.15 | 57 | 0.620567 | import sys
import pybel
filename = ""
verbose = False
if (len(sys.argv)) == 2:
filename = sys.argv[1]
else:
print "usage :", sys.argv[0] , " filename.xyz"
exit(1)
matrix = pybel.ob.matrix3x3()
matrix.RotAboutAxisByAngle(pybel.ob.vector3(1, 0, 0), 90)
if verbose:
for i in range(3):
for j in range(3):
line = "%10.5f "%(matrix.Get(i,j))
sys.stdout.write(line)
sys.stdout.write("\n")
imatrix = matrix.inverse()
if verbose:
print ""
for i in range(3):
for j in range(3):
line = "%10.5f "%(imatrix.Get(i,j))
sys.stdout.write(line)
sys.stdout.write("\n")
rotarray = pybel.ob.doubleArray(9)
matrix.GetArray(rotarray)
mol = pybel.readfile("xyz", filename).next()
mol.OBMol.Rotate(rotarray)
mol.OBMol.Translate(pybel.ob.vector3(1.0, 10.0, 3.0));
print mol.write("xyz")
| 0 | 0 | 0 |
2a563552eedff6618af603ad8844730cb87f25f6 | 606 | py | Python | core/recc/system/user.py | bogonets/answer | 57f892a9841980bcbc35fa1e27521b34cd94bc25 | [
"MIT"
] | 3 | 2021-06-20T02:24:10.000Z | 2022-01-26T23:55:33.000Z | core/recc/system/user.py | bogonets/answer | 57f892a9841980bcbc35fa1e27521b34cd94bc25 | [
"MIT"
] | null | null | null | core/recc/system/user.py | bogonets/answer | 57f892a9841980bcbc35fa1e27521b34cd94bc25 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from os import setuid, chown
from pwd import getpwnam
from typing import Union
| 23.307692 | 67 | 0.638614 | # -*- coding: utf-8 -*-
from os import setuid, chown
from pwd import getpwnam
from typing import Union
def get_user_id(user: Union[str, int]) -> int:
if isinstance(user, str):
info = getpwnam(user)
pw_uid = info[2]
assert isinstance(pw_uid, int)
return pw_uid
elif isinstance(user, int):
return user
else:
raise TypeError(f"Unsupported type: {type(user).__name__}")
def set_user(user: Union[str, int]) -> None:
setuid(get_user_id(user))
def change_user(path: str, user: Union[str, int]) -> None:
chown(path, get_user_id(user), -1)
| 430 | 0 | 69 |
d153cb3b5c03c5b94d7eb0c099254847fdec267b | 16,275 | py | Python | scripts/tests/test_gristle_determinator_cmd.py | sidhu177/DataGristle | d9dd383e146c13a2a9a8cd265330122a9c565609 | [
"BSD-3-Clause"
] | null | null | null | scripts/tests/test_gristle_determinator_cmd.py | sidhu177/DataGristle | d9dd383e146c13a2a9a8cd265330122a9c565609 | [
"BSD-3-Clause"
] | null | null | null | scripts/tests/test_gristle_determinator_cmd.py | sidhu177/DataGristle | d9dd383e146c13a2a9a8cd265330122a9c565609 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
""" Tests gristle_determinator.py
Contains a primary class: FileStructureFixtureManager
Which is extended by six classes that override various methods or variables.
This is a failed experiment - since the output isn't as informative as it
should be. This should be redesigned.
See the file "LICENSE" for the full license governing this code.
Copyright 2011,2012,2013,2017 Ken Farmer
"""
#adjust pylint for pytest oddities:
#pylint: disable=missing-docstring
#pylint: disable=unused-argument
#pylint: disable=attribute-defined-outside-init
#pylint: disable=protected-access
#pylint: disable=no-self-use
#pylint: disable=empty-docstring
import tempfile
import csv
import errno
import shutil
import os
from os.path import join as pjoin, dirname
from pprint import pprint as pp
import pytest
import envoy
import datagristle.test_tools as test_tools
import datagristle.file_type as file_type
script_path = dirname(dirname(os.path.realpath((__file__))))
def get_value(parsable_out, division, section, subsection, key):
""" Gets the value (right-most field) out of gristle_determinator
parsable output given the key values for the rest of the fields.
"""
mydialect = csv.Dialect
mydialect.delimiter = '|'
mydialect.quoting = file_type.get_quote_number('QUOTE_ALL')
mydialect.quotechar = '"'
mydialect.lineterminator = '\n'
csvobj = csv.reader(parsable_out.split('\n'), dialect=mydialect)
for record in csvobj:
if not record:
continue
assert len(record) == 5
rec_division = record[0]
rec_section = record[1]
rec_subsection = record[2]
rec_key = record[3]
rec_value = record[4]
if (rec_division == division
and rec_section == section
and rec_subsection == subsection
and rec_key == key):
return rec_value
return None
| 40.086207 | 120 | 0.579908 | #!/usr/bin/env python
""" Tests gristle_determinator.py
Contains a primary class: FileStructureFixtureManager
Which is extended by six classes that override various methods or variables.
This is a failed experiment - since the output isn't as informative as it
should be. This should be redesigned.
See the file "LICENSE" for the full license governing this code.
Copyright 2011,2012,2013,2017 Ken Farmer
"""
#adjust pylint for pytest oddities:
#pylint: disable=missing-docstring
#pylint: disable=unused-argument
#pylint: disable=attribute-defined-outside-init
#pylint: disable=protected-access
#pylint: disable=no-self-use
#pylint: disable=empty-docstring
import tempfile
import csv
import errno
import shutil
import os
from os.path import join as pjoin, dirname
from pprint import pprint as pp
import pytest
import envoy
import datagristle.test_tools as test_tools
import datagristle.file_type as file_type
script_path = dirname(dirname(os.path.realpath((__file__))))
def generate_test_file(delim, rec_list, quoted=False, dir_name=None):
if dir_name:
(file_desc, fqfn) = tempfile.mkstemp(dir=dir_name)
else:
(file_desc, fqfn) = tempfile.mkstemp()
fileobj = os.fdopen(file_desc, "w")
for rec in rec_list:
if quoted:
for i in range(len(rec)):
rec[i] = '"%s"' % rec[i]
outrec = delim.join(rec)+'\n'
pp(outrec)
fileobj.write(outrec)
fileobj.close()
return fqfn
def get_value(parsable_out, division, section, subsection, key):
""" Gets the value (right-most field) out of gristle_determinator
parsable output given the key values for the rest of the fields.
"""
mydialect = csv.Dialect
mydialect.delimiter = '|'
mydialect.quoting = file_type.get_quote_number('QUOTE_ALL')
mydialect.quotechar = '"'
mydialect.lineterminator = '\n'
csvobj = csv.reader(parsable_out.split('\n'), dialect=mydialect)
for record in csvobj:
if not record:
continue
assert len(record) == 5
rec_division = record[0]
rec_section = record[1]
rec_subsection = record[2]
rec_key = record[3]
rec_value = record[4]
if (rec_division == division
and rec_section == section
and rec_subsection == subsection
and rec_key == key):
return rec_value
return None
class TestEmptyFile(object):
def setup_method(self, method):
self.tmp_dir = tempfile.mkdtemp(prefix='datagristle_deter_')
def teardown_method(self, method):
shutil.rmtree(self.tmp_dir)
def create_empty_file_with_header(self, fqfn):
with open(fqfn, 'w') as fileobj:
fileobj.write('col1, colb2, col3')
def test_empty_file(self):
fqfn = pjoin(self.tmp_dir, 'empty.csv')
test_tools.touch(fqfn)
cmd = '%s %s --outputformat=parsable' % (pjoin(script_path, 'gristle_determinator'), fqfn)
runner = envoy.run(cmd)
print(runner.std_out)
print(runner.std_err)
assert runner.status_code == errno.ENODATA
assert get_value(runner.std_out, 'file_analysis_results', 'main', 'main', 'record_count') is None
assert get_value(runner.std_out, 'file_analysis_results', 'main', 'main', 'has_header') is None
def test_empty_file_with_header(self):
fqfn = os.path.join(self.tmp_dir, 'empty_header.csv')
self.create_empty_file_with_header(fqfn)
cmd = '%s %s --outputformat=parsable' % (pjoin(script_path, 'gristle_determinator'), fqfn)
runner = envoy.run(cmd)
print(runner.std_out)
print(runner.std_err)
assert runner.status_code == 0
assert get_value(runner.std_out, 'file_analysis_results', 'main', 'main', 'record_count') == '1'
assert get_value(runner.std_out, 'file_analysis_results', 'main', 'main', 'has_header') == 'True'
def test_empty_file_with_header_and_hasheader_arg(self):
fqfn = os.path.join(self.tmp_dir, 'empty_header.csv')
self.create_empty_file_with_header(fqfn)
cmd = '%s %s --outputformat=parsable --has-header' % (pjoin(script_path, 'gristle_determinator'), fqfn)
runner = envoy.run(cmd)
print(runner.std_out)
print(runner.std_err)
assert runner.status_code == 0
assert get_value(runner.std_out, 'file_analysis_results', 'main', 'main', 'record_count') == '1'
assert get_value(runner.std_out, 'file_analysis_results', 'main', 'main', 'has_header') == 'True'
class TestOutputFormattingAndContents(object):
def setup_method(self, method):
self.tmp_dir = tempfile.mkdtemp(prefix='datagristle_deter_')
recs = [['Alabama', '8', '18'],
['Alaska', '6', '16'],
['Arizona', '6', '14'],
['Arkansas', '2', '12'],
['California', '19', '44']]
self.file_struct = {}
self.field_struct = {}
fqfn = generate_test_file(delim='|', rec_list=recs, quoted=False, dir_name=self.tmp_dir)
cmd = '%s %s --outputformat=parsable' % (os.path.join(script_path, 'gristle_determinator'), fqfn)
runner = envoy.run(cmd)
print(runner.std_out)
print(runner.std_err)
assert runner.status_code == 0
mydialect = csv.Dialect
mydialect.delimiter = '|'
mydialect.quoting = file_type.get_quote_number('QUOTE_ALL')
mydialect.quotechar = '"'
mydialect.lineterminator = '\n'
csvobj = csv.reader(runner.std_out.split('\n'), dialect=mydialect)
pp(csvobj)
for record in csvobj:
if not record:
continue
assert len(record) == 5
division = record[0]
section = record[1]
subsection = record[2]
key = record[3]
value = record[4]
assert division in ['file_analysis_results', 'field_analysis_results']
if division == 'file_analysis_results':
assert section == 'main'
assert subsection == 'main'
self.file_struct[key] = value
elif division == 'field_analysis_results':
assert 'field_' in section
assert subsection in ['main', 'top_values']
if section not in self.field_struct:
self.field_struct[section] = {}
if subsection not in self.field_struct[section]:
self.field_struct[section][subsection] = {}
self.field_struct[section][subsection][key] = value
def teardown_method(self, teardown):
shutil.rmtree(self.tmp_dir)
def test_file_info(self):
assert self.file_struct['record_count'] == '5'
assert self.file_struct['skipinitialspace'] == 'False'
assert self.file_struct['quoting'] == 'QUOTE_NONE'
assert self.file_struct['field_count'] == '3'
assert self.file_struct['delimiter'] == "'|'"
assert self.file_struct['has_header'] == 'False'
assert self.file_struct['escapechar'] == 'None'
assert self.file_struct['doublequote'] == 'False'
assert self.file_struct['format_type'] == 'csv'
def test_field_info(self):
assert self.field_struct['field_0']['main']['field_number'] == '0'
assert self.field_struct['field_0']['main']['name'] == 'field_0'
assert self.field_struct['field_0']['main']['type'] == 'string'
assert self.field_struct['field_0']['main']['known_values'] == '5'
assert self.field_struct['field_0']['main']['min'] == 'Alabama'
assert self.field_struct['field_0']['main']['max'] == 'California'
assert self.field_struct['field_0']['main']['unique_values'] == '5'
assert self.field_struct['field_0']['main']['wrong_field_cnt'] == '0'
assert self.field_struct['field_0']['main']['case'] == 'mixed'
assert self.field_struct['field_0']['main']['max_length'] == '10'
assert self.field_struct['field_0']['main']['mean_length'] == '7.6'
assert self.field_struct['field_0']['main']['min_length'] == '6'
assert self.field_struct['field_1']['main']['field_number'] == '1'
assert self.field_struct['field_1']['main']['name'] == 'field_1'
assert self.field_struct['field_1']['main']['type'] == 'integer'
assert self.field_struct['field_1']['main']['known_values'] == '4'
assert self.field_struct['field_1']['main']['min'] == '2'
assert self.field_struct['field_1']['main']['max'] == '19'
assert self.field_struct['field_1']['main']['unique_values'] == '4'
assert self.field_struct['field_1']['main']['wrong_field_cnt'] == '0'
assert self.field_struct['field_1']['main']['mean'] == '8.2'
assert self.field_struct['field_1']['main']['median'] == '6.0'
assert self.field_struct['field_1']['main']['std_dev'] == '5.74108003776293'
assert self.field_struct['field_1']['main']['variance'] == '32.96'
def test_top_value_info(self):
pp(self.field_struct)
assert self.field_struct['field_0']['top_values']['top_values'] == 'not shown - all are unique'
assert self.field_struct['field_1']['top_values']['2'] == '1'
assert self.field_struct['field_1']['top_values']['6'] == '2'
assert self.field_struct['field_1']['top_values']['8'] == '1'
assert self.field_struct['field_1']['top_values']['19'] == '1'
class TestReadLimit(object):
def setup_method(self, method):
self.tmp_dir = tempfile.mkdtemp(prefix='datagristle_deter_')
recs = [['Alabama', '8', '18'],
['Alaska', '6', '16'],
['Arizona', '6', '14'],
['Arkansas', '2', '12'],
['California', '19', '44'],
['Colorado', '19', '44'],
['Illinois', '19', '44'],
['Indiana', '19', '44'],
['Kansas', '19', '44'],
['Kentucky', '19', '44'],
['Louisiana', '19', '44'],
['Maine', '19', '44'],
['Mississippi', '19', '44'],
['Nebraska', '19', '44'],
['Oklahoma', '19', '44'],
['Tennessee', '19', '44'],
['Texas', '19', '9999'],
['Virginia', '19', '44'],
['West Virginia', '19', '44']]
self.file_struct = {}
self.field_struct = {}
fqfn = generate_test_file(delim='|', rec_list=recs, quoted=False, dir_name=self.tmp_dir)
cmd = '%s %s --read-limit 4 --outputformat=parsable' % (os.path.join(script_path, 'gristle_determinator'), fqfn)
runner = envoy.run(cmd)
print(runner.std_out)
print(runner.std_err)
assert runner.status_code == 0
mydialect = csv.Dialect
mydialect.delimiter = '|'
mydialect.quoting = file_type.get_quote_number('QUOTE_ALL')
mydialect.quotechar = '"'
mydialect.lineterminator = '\n'
csvobj = csv.reader(runner.std_out.split('\n'), dialect=mydialect)
for record in csvobj:
if not record:
continue
assert len(record) == 5
division = record[0]
section = record[1]
subsection = record[2]
key = record[3]
value = record[4]
assert division in ['file_analysis_results', 'field_analysis_results']
if division == 'file_analysis_results':
assert section == 'main'
assert subsection == 'main'
self.file_struct[key] = value
elif division == 'field_analysis_results':
assert 'field_' in section
assert subsection in ['main', 'top_values']
if section not in self.field_struct:
self.field_struct[section] = {}
if subsection not in self.field_struct[section]:
self.field_struct[section][subsection] = {}
self.field_struct[section][subsection][key] = value
def teardown_method(self, teardown):
shutil.rmtree(self.tmp_dir)
def test_limits(self):
assert 'est' in self.file_struct['record_count']
def test_file_info(self):
assert self.file_struct['skipinitialspace'] == 'False'
assert self.file_struct['quoting'] == 'QUOTE_NONE'
assert self.file_struct['field_count'] == '3'
assert self.file_struct['delimiter'] == "'|'"
def test_field_info(self):
assert self.field_struct['field_0']['main']['known_values'] == '4'
assert self.field_struct['field_0']['main']['min'] == 'Alabama'
assert self.field_struct['field_0']['main']['max'] == 'Arkansas'
assert self.field_struct['field_0']['main']['unique_values'] == '4'
class TestMaxFreq(object):
def setup_method(self, method):
self.tmp_dir = tempfile.mkdtemp(prefix='datagristle_deter_')
recs = [['Alabama', '8', '18'],
['Alaska', '6', '16'],
['Arizona', '6', '14'],
['Arkansas', '2', '12'],
['California', '19', '44'],
['Colorado', '19', '44'],
['Illinois', '19', '44'],
['Indiana', '19', '44'],
['Kansas', '19', '44'],
['Kentucky', '19', '44'],
['Louisiana', '19', '44'],
['Maine', '19', '44'],
['Mississippi', '19', '44'],
['Nebraska', '19', '44'],
['Oklahoma', '19', '44'],
['Tennessee', '19', '44'],
['Texas', '19', '9999'],
['Virginia', '19', '44'],
['West Virginia', '19', '44']]
self.file_struct = {}
self.field_struct = {}
fqfn = generate_test_file(delim='|', rec_list=recs, quoted=False, dir_name=self.tmp_dir)
cmd = '%s %s --max-freq 10 --outputformat=parsable' % (os.path.join(script_path, 'gristle_determinator'), fqfn)
runner = envoy.run(cmd)
print(runner.std_out)
print(runner.std_err)
assert runner.status_code == 0
mydialect = csv.Dialect
mydialect.delimiter = '|'
mydialect.quoting = file_type.get_quote_number('QUOTE_ALL')
mydialect.quotechar = '"'
mydialect.lineterminator = '\n'
csvobj = csv.reader(runner.std_out.split('\n'), dialect=mydialect)
for record in csvobj:
if not record:
continue
if len(record) != 5:
if 'WARNING: freq dict is too large' in record[0]:
continue # ignore warning row
else:
pytest.fail('Invalid result record: %s' % record[0])
division = record[0]
section = record[1]
subsection = record[2]
key = record[3]
value = record[4]
assert division in ['file_analysis_results', 'field_analysis_results']
if division == 'file_analysis_results':
assert section == 'main'
assert subsection == 'main'
self.file_struct[key] = value
elif division == 'field_analysis_results':
assert 'field_' in section
assert subsection in ['main', 'top_values']
if section not in self.field_struct:
self.field_struct[section] = {}
if subsection not in self.field_struct[section]:
self.field_struct[section][subsection] = {}
self.field_struct[section][subsection][key] = value
def teardown_method(self, method):
shutil.rmtree(self.tmp_dir)
def test_limits(self):
assert self.file_struct['record_count'] == '19'
def test_field_info(self):
assert self.field_struct['field_0']['main']['known_values'] == '10'
assert self.field_struct['field_0']['main']['min'] == 'Alabama'
assert self.field_struct['field_0']['main']['max'] == 'Kentucky' # affected
assert self.field_struct['field_0']['main']['unique_values'] == '10' # affected
| 13,613 | 44 | 655 |
5c7ad6f8d852ce85b7feb9c261b673e4a5fce78b | 5,823 | py | Python | Cuarta_escena.py | ArturoSirvent/presentacion_entropia | ab7a35dbca89b6bb868a357f09c80b0ed7198a93 | [
"MIT"
] | null | null | null | Cuarta_escena.py | ArturoSirvent/presentacion_entropia | ab7a35dbca89b6bb868a357f09c80b0ed7198a93 | [
"MIT"
] | null | null | null | Cuarta_escena.py | ArturoSirvent/presentacion_entropia | ab7a35dbca89b6bb868a357f09c80b0ed7198a93 | [
"MIT"
] | null | null | null | from manimlib.imports import *
import numpy as np
#ahora vamos a explicar el caso de esferas duras, qeu por encima de 50% de ocupacion muestan cristalización
#despues vamos a introducir el tema de espacio disponible
#puedes decir, bueno pero esque si hay muchas esferas o su radio es muy grande, pues no les queda otra que ordenarse para caber, claro.
#Bueno pues esque es eso lo qeu pasa cuando la temperatura es baja que se debe buscar la ordenacion para no solaparse, sino el estado no existiria, o
#o quiza empezarian a solapar y habría una presion extraña de algun lado, pero gracias a que se ordenan el sistema puede existir en equilibrio!!!
#ultima escena ya, ostia
#aqui vamos a poner la equivalencia entre mas espacio disponible y mayor numero de estados
#mencionar que el caso de esferas duras es cierto en algunos coloides, pero para otros muchos casos,
#lo ultimo sera el tema de la segregacion entrópica | 38.058824 | 150 | 0.65018 | from manimlib.imports import *
import numpy as np
class Esferas_duras(Scene):
#ahora vamos a explicar el caso de esferas duras, qeu por encima de 50% de ocupacion muestan cristalización
#despues vamos a introducir el tema de espacio disponible
def construct(self):
titulo=TextMobject("Cristalización en esferas duras")
sub1=TextMobject("- No muestran interacción entre ellas.")
sub2=TextMobject("- La propia característica de no solaparse hace que se ordenen.")
imagen1=ImageMobject("desorden1.png")
capt1=TextMobject(r"Por debajo del 50 \% de ocupación")
capt1.scale(0.7)
imagen2=ImageMobject("orden1.png")
capt2=TextMobject(r"Pasado el 50 \% de ocupación")
capt2.scale(0.6)
titulo.to_corner(UL)
sub1.next_to(titulo,DOWN*2.5,aligned_edge=LEFT)
sub2.next_to(sub1,DOWN*2.5,aligned_edge=LEFT)
imagen1.move_to(DOWN+3*LEFT)
imagen2.move_to(DOWN+3*RIGHT)
capt1.next_to(imagen1,DOWN)
capt2.next_to(imagen2,DOWN)
self.play(Write(VGroup(titulo,sub1,sub2)))
imagen1.to_edge(LEFT,buff=2)
capt1.next_to(imagen1,DOWN)
imagen2.to_edge(RIGHT,buff=2)
capt2.next_to(imagen2,DOWN)
self.wait(4)
self.play(FadeIn(imagen1),Write(capt1))
self.wait(4)
self.play(FadeIn(imagen2),Write(capt2))
self.wait(4)
#puedes decir, bueno pero esque si hay muchas esferas o su radio es muy grande, pues no les queda otra que ordenarse para caber, claro.
#Bueno pues esque es eso lo qeu pasa cuando la temperatura es baja que se debe buscar la ordenacion para no solaparse, sino el estado no existiria, o
#o quiza empezarian a solapar y habría una presion extraña de algun lado, pero gracias a que se ordenan el sistema puede existir en equilibrio!!!
class Espacio_libre(Scene):
def construct(self):
titulo=TextMobject("El espacio libre no es ")
titulo.scale(0.7)
titulo.to_corner(UL,buff=1)
ec1=TexMobject(r"(V-N* \nu_{esfera})")
ec1.next_to(titulo,RIGHT*1.5)
imagen1=ImageMobject("esf_lib_1.png")
imagen1.next_to(ec1,RIGHT)
titulo2=TextMobject("Hay una zona entorno a cada esfera, donde no hay otro centro")
titulo2.scale(0.8)
titulo2.move_to(UP)
imagen2=ImageMobject("esf_lib_2.png")
imagen2.next_to(titulo2,DOWN)
titulo3=TextMobject("En esos puntos exteriores sí se puede añadir otra esfera.")
titulo3.move_to(DOWN*2)
titulo3.scale(0.8)
#"cabe" justo en el limite!!!!
titulo4=TextMobject("Este es el volumen accesible")
ec2=TexMobject(r"(V-N*\nu_{esfera}^*)")
titulo4.to_corner(DL,buff=1)
ec2.next_to(titulo4,RIGHT)
self.play(Write(VGroup(titulo,ec1)))
self.play(FadeIn(imagen1))
self.wait(4)
self.play(Write(titulo2))
self.play(FadeIn(imagen2))
self.wait(4)
self.play(Write(titulo3))
self.wait(4)
self.play(Write(VGroup(titulo4,ec2)))
self.wait(2)
end_scene(self)
#ultima escena ya, ostia
def end_scene(self):
#esto nos coge todo lo de la escena y nos lo elimina con un fade out
self.play(*[FadeOut(i) for i in self.get_mobjects()])
class Ultima_escena(Scene):
#aqui vamos a poner la equivalencia entre mas espacio disponible y mayor numero de estados
def construct(self):
titulo=TextMobject("Si la densidad es alta, las esferas se juntarán")
titulo.scale(0.85)
titulo2=TextMobject("Si se juntan, el area \"prohibida\" es menor que las dos separadas")
titulo2.scale(0.7)
imagen2=ImageMobject("area1.png")
imagen22=ImageMobject("area2.png")
imagen2.scale(0.85)
imagen22.scale(0.85)
titulo3=TextMobject("A mayor ordenación")
titulo3.scale(0.7)
titulo4=TextMobject("Mayor area accesible")
titulo4.scale(0.7)
titulo5=TextMobject("Más estados posibles") #MAS LUGARES DONDE COLOCARTE
titulo5.scale(0.7)
titulo6=TextMobject("Mayor entropía")
titulo6.scale(0.7)
titulo7=TextMobject("¡Cuando la densidad es alta!")
titulo7.scale(0.8)
flecha1=Vector(DOWN*0.5)
flecha2=Vector(DOWN*0.8)
flecha3=Vector(DOWN*0.5)
flecha4=Vector(DOWN*0.5)
flecha5=Vector(DOWN*0.5)
titulo.to_edge(UP)
flecha1.next_to(titulo,DOWN*0.5)
titulo2.next_to(flecha1,DOWN*0.5)
imagen2.move_to(titulo2.get_center()+DOWN*1.3+RIGHT*3.4)
imagen22.move_to(titulo2.get_center()+DOWN*1.3+LEFT*2.9)
flecha2.next_to(titulo2,DOWN*2.1)
titulo3.next_to(flecha2,DOWN*2.5)
flecha3.next_to(titulo3,DOWN*0.5)
titulo4.next_to(flecha3,DOWN*0.6)
flecha4.next_to(titulo4,DOWN*0.5)
titulo5.next_to(flecha4,DOWN*0.6)
flecha5.next_to(titulo5,DOWN*0.5)
titulo6.next_to(flecha5,DOWN*0.6)
titulo7.rotate(PI/6)
titulo7.move_to(LEFT*4+DOWN*2.2)
titulo7.set_color(RED)
self.play(Write(titulo))
self.wait(2)
self.play(Write(VGroup(flecha1,titulo2)))
self.wait(2)
self.play(FadeIn(imagen2),FadeIn(imagen22))
self.wait(2)
self.play(Write(VGroup(flecha2,titulo3)))
self.wait(2)
self.play(Write(VGroup(flecha3,titulo4)))
self.wait(2)
self.play(Write(VGroup(flecha4,titulo5)))
self.wait(2)
self.play(Write(VGroup(flecha5,titulo6)))
self.wait(2)
self.play(FadeInFromLarge(titulo7))
self.wait(2)
end_scene(self)
#mencionar que el caso de esferas duras es cierto en algunos coloides, pero para otros muchos casos,
#lo ultimo sera el tema de la segregacion entrópica | 4,696 | 18 | 170 |
1541ac7c6b33d08f7f998f7950f090afd6c14b38 | 470 | py | Python | Layers/Swish.py | Vaibhavs10/IMS-Toucan | 931e4ce63a4cc675cb15b72474a3c3619632a07b | [
"Apache-2.0"
] | 93 | 2021-08-11T13:52:37.000Z | 2022-03-29T23:19:07.000Z | Layers/Swish.py | Vaibhavs10/IMS-Toucan | 931e4ce63a4cc675cb15b72474a3c3619632a07b | [
"Apache-2.0"
] | 4 | 2021-12-15T17:23:14.000Z | 2022-03-24T04:51:40.000Z | Layers/Swish.py | Vaibhavs10/IMS-Toucan | 931e4ce63a4cc675cb15b72474a3c3619632a07b | [
"Apache-2.0"
] | 25 | 2021-08-11T14:23:47.000Z | 2022-03-28T20:23:51.000Z | # Copyright 2020 Johns Hopkins University (Shinji Watanabe)
# Northwestern Polytechnical University (Pengcheng Guo)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
# Adapted by Florian Lux 2021
import torch
class Swish(torch.nn.Module):
"""
Construct an Swish activation function for Conformer.
"""
def forward(self, x):
"""
Return Swish activation function.
"""
return x * torch.sigmoid(x)
| 24.736842 | 70 | 0.646809 | # Copyright 2020 Johns Hopkins University (Shinji Watanabe)
# Northwestern Polytechnical University (Pengcheng Guo)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
# Adapted by Florian Lux 2021
import torch
class Swish(torch.nn.Module):
"""
Construct an Swish activation function for Conformer.
"""
def forward(self, x):
"""
Return Swish activation function.
"""
return x * torch.sigmoid(x)
| 0 | 0 | 0 |
b1f07dd9b9200b676df26d563936a24690638dde | 365 | py | Python | raspberry-pi-2/Pump/main.py | hivebio/ministat-1 | 4fdb0b9a016c3cfeb61f7bcf72caea885c37554d | [
"BSD-2-Clause"
] | 6 | 2015-01-26T06:38:47.000Z | 2016-04-22T16:54:43.000Z | raspberry-pi-2/Pump/main.py | hivebio/ministat-1 | 4fdb0b9a016c3cfeb61f7bcf72caea885c37554d | [
"BSD-2-Clause"
] | 2 | 2015-01-26T16:35:32.000Z | 2015-02-20T17:03:50.000Z | raspberry-pi-2/Pump/main.py | hivebio/ministat-1 | 4fdb0b9a016c3cfeb61f7bcf72caea885c37554d | [
"BSD-2-Clause"
] | 3 | 2016-09-02T22:28:05.000Z | 2020-12-26T09:12:50.000Z | import os, sys
lib_path = os.path.abspath(os.path.join('..', 'utils'))
sys.path.append(lib_path)
import ftplib as FTP
import credentials as cred
import RPIO
import RPi.GPIO as GPIO
from pump import Pump
# use BCM mode to play well with RPIO
GPIO.setmode(GPIO.BCM)
# start dispatch loop in background
RPIO.wait_for_interrupts(threaded=True)
p0 = Pump(23, 24)
| 18.25 | 55 | 0.753425 | import os, sys
lib_path = os.path.abspath(os.path.join('..', 'utils'))
sys.path.append(lib_path)
import ftplib as FTP
import credentials as cred
import RPIO
import RPi.GPIO as GPIO
from pump import Pump
# use BCM mode to play well with RPIO
GPIO.setmode(GPIO.BCM)
# start dispatch loop in background
RPIO.wait_for_interrupts(threaded=True)
p0 = Pump(23, 24)
| 0 | 0 | 0 |
15d68f87a8909d9b38611bb04ed4dfd16984022d | 163 | py | Python | test/multicall/multicall.py | cxapython/Cyberbrain | 8deb949a6b8a909c2ac23b2d84ae6e705940072d | [
"MIT"
] | 1 | 2019-09-27T01:43:51.000Z | 2019-09-27T01:43:51.000Z | test/multicall/multicall.py | cxapython/Cyberbrain | 8deb949a6b8a909c2ac23b2d84ae6e705940072d | [
"MIT"
] | null | null | null | test/multicall/multicall.py | cxapython/Cyberbrain | 8deb949a6b8a909c2ac23b2d84ae6e705940072d | [
"MIT"
] | 1 | 2019-09-27T01:43:54.000Z | 2019-09-27T01:43:54.000Z | """Multiple calls in one logical line."""
import cyberbrain
cyberbrain.init()
x = {f(x=1), f(y=2)}
cyberbrain.register(x)
| 10.1875 | 41 | 0.631902 | """Multiple calls in one logical line."""
import cyberbrain
cyberbrain.init()
def f(*args, **kwargs):
pass
x = {f(x=1), f(y=2)}
cyberbrain.register(x)
| 11 | 0 | 23 |
b1cab309247d1c7528c4a53378e0baea34baeda0 | 2,278 | py | Python | models/orpac/containers.py | olitheolix/ds2data | f0d11a5f2b02c132da7604ba9ced17089969afda | [
"Apache-2.0"
] | 2 | 2017-08-18T05:38:45.000Z | 2018-05-08T14:15:22.000Z | models/orpac/containers.py | olitheolix/ds2data | f0d11a5f2b02c132da7604ba9ced17089969afda | [
"Apache-2.0"
] | null | null | null | models/orpac/containers.py | olitheolix/ds2data | f0d11a5f2b02c132da7604ba9ced17089969afda | [
"Apache-2.0"
] | null | null | null | import math
class Shape:
"""Container to store 3D Image/array/feature/tensor sizes.
This is a convenience class because size specifications are often required
yet their format is ambigous. Sometimes, images are specified as CHW
(Tensorflow), sometimes as HWC (NumPy, Matplotlib). Sometimes, only the
width and height are needed which Tensorflow needs as (height, width) yet
eg. PIL returns as (width, height).
This container class accepts the three size parameters and can return them
in all possible formats.
Inputs:
chan: int
Number of channels. Must be non-negative or None.
height: int
Must be non-negative (can *not* be None).
width: int
Must be non-negative (can *not* be None).
"""
| 30.783784 | 83 | 0.604478 | import math
class Shape:
"""Container to store 3D Image/array/feature/tensor sizes.
This is a convenience class because size specifications are often required
yet their format is ambigous. Sometimes, images are specified as CHW
(Tensorflow), sometimes as HWC (NumPy, Matplotlib). Sometimes, only the
width and height are needed which Tensorflow needs as (height, width) yet
eg. PIL returns as (width, height).
This container class accepts the three size parameters and can return them
in all possible formats.
Inputs:
chan: int
Number of channels. Must be non-negative or None.
height: int
Must be non-negative (can *not* be None).
width: int
Must be non-negative (can *not* be None).
"""
def __init__(self, chan, height, width):
# Sanity checks.
assert chan is None or isinstance(chan, int) and chan >= 0
assert isinstance(width, int) and width >= 0
assert isinstance(height, int) and height >= 0
# Store the parameters.
self.chan = chan
self.height = height
self.width = width
def __repr__(self):
return f'Shape(chan={self.chan}, height={self.height}, width={self.width})'
def __eq__(self, ref):
try:
assert isinstance(ref, Shape)
assert ref.chan == self.chan
assert ref.height == self.height
assert ref.width == self.width
return True
except AssertionError:
return False
def copy(self):
return Shape(self.chan, self.height, self.width)
def isSquare(self):
return self.width == self.height
def isPow2(self):
try:
assert self.height > 1
assert self.width > 1
assert 2 ** int(math.log2(self.height)) == self.height
assert 2 ** int(math.log2(self.width)) == self.width
return True
except AssertionError:
return False
def chw(self):
return (self.chan, self.height, self.width)
def hwc(self):
return (self.height, self.width, self.chan)
def hw(self):
return (self.height, self.width)
def wh(self):
return (self.width, self.height)
| 1,216 | 0 | 269 |
6a727db51b330233fc53a4457786c2a8e8c28735 | 244 | py | Python | batch_script.py | aarjavchauhan/web_visualization | 7a4b8a5d22d140762ae29ec808bb02dbc79763f4 | [
"MIT"
] | 1 | 2020-10-29T03:28:08.000Z | 2020-10-29T03:28:08.000Z | batch_script.py | aarjavchauhan/web_visualization | 7a4b8a5d22d140762ae29ec808bb02dbc79763f4 | [
"MIT"
] | null | null | null | batch_script.py | aarjavchauhan/web_visualization | 7a4b8a5d22d140762ae29ec808bb02dbc79763f4 | [
"MIT"
] | null | null | null | import os
import sys
folder = sys.argv[1]
script = sys.argv[2]
for root, dirs, files in os.walk(folder):
for filename in files:
data_file = "{}/{}".format(root,filename)
os.system("python3 {} {}".format(script,data_file))
| 22.181818 | 59 | 0.639344 | import os
import sys
folder = sys.argv[1]
script = sys.argv[2]
for root, dirs, files in os.walk(folder):
for filename in files:
data_file = "{}/{}".format(root,filename)
os.system("python3 {} {}".format(script,data_file))
| 0 | 0 | 0 |
5b54e28f1e01326c5484bf6e2bf04e4d4c28018a | 9,674 | py | Python | Mixmatch/streetview_dataset/parse_data_to_tfrecord_lib.py | googleinterns/bizview-semi-supervised-learning | 3739ee461c393069d0cf00c462248e5a99e1c55b | [
"Apache-2.0"
] | 1 | 2020-06-26T16:23:35.000Z | 2020-06-26T16:23:35.000Z | Mixmatch/streetview_dataset/parse_data_to_tfrecord_lib.py | googleinterns/bizview-semi-supervised-learning | 3739ee461c393069d0cf00c462248e5a99e1c55b | [
"Apache-2.0"
] | 3 | 2020-11-13T17:47:42.000Z | 2022-02-09T23:39:42.000Z | Mixmatch/streetview_dataset/parse_data_to_tfrecord_lib.py | googleinterns/bizview-semi-supervised-learning | 3739ee461c393069d0cf00c462248e5a99e1c55b | [
"Apache-2.0"
] | 1 | 2020-09-13T07:43:21.000Z | 2020-09-13T07:43:21.000Z | import numpy as np
import tensorflow as tf
import itertools
import os # used for directory operations
import io
from PIL import Image # used to read images from directory
import random
tf.enable_eager_execution()
# Global constants
# Information from input tfrecord files
SOURCE_ID = 'image/source_id'
BBOX_CONFIDENCE = 'image/object/bbox/confidence'
BBOX_XMIN = 'image/object/bbox/xmin'
BBOX_YMIN = 'image/object/bbox/ymin'
BBOX_XMAX = 'image/object/bbox/xmax'
BBOX_YMAX = 'image/object/bbox/ymax'
# confidence threshold for determine as neg/pos examples
CONF_THRESHOLD = {'neg': 0.1, 'pos': 0.9}
OUTPUT_IMAGE_SIZE = (64, 64)
# Reads tfrecords and parse the labels and data needed for the new dataset.
# Parse and cleanup the labels to a more straigtforward format.
# Transform raw image data and label into a tfexample format.
# Write all images into the test TFrecord file.
# Striped out only the maximum confidence bbox of a image. Function is called in generate_tfexamples_from_detections().
# Striped out ALL bbox where confidence is over threshold. Function is called in generate_tfexamples_from_detections().
# Read image from path and check exclude non RGB image.
# Strip the bboxes from the parsed_image_dataset that are over threshold and added the tfexample to the return list.
# Write positive and negative tfexamples to tfrecord using the writer. A balance boolean parameter can decide to balance the pos and neg examples count.
# Write tfrecords in batches of input record files.
# Filter the dataset with images bbox lower than the threshold, and copy the image bbox to output directory. These images will be handpicked to be used as negative examples in the test set.
| 40.308333 | 189 | 0.686583 | import numpy as np
import tensorflow as tf
import itertools
import os # used for directory operations
import io
from PIL import Image # used to read images from directory
import random
tf.enable_eager_execution()
# Global constants
# Information from input tfrecord files
SOURCE_ID = 'image/source_id'
BBOX_CONFIDENCE = 'image/object/bbox/confidence'
BBOX_XMIN = 'image/object/bbox/xmin'
BBOX_YMIN = 'image/object/bbox/ymin'
BBOX_XMAX = 'image/object/bbox/xmax'
BBOX_YMAX = 'image/object/bbox/ymax'
# confidence threshold for determine as neg/pos examples
CONF_THRESHOLD = {'neg': 0.1, 'pos': 0.9}
OUTPUT_IMAGE_SIZE = (64, 64)
# Reads tfrecords and parse the labels and data needed for the new dataset.
def read_tfrecord(file_path):
raw_image_dataset = tf.data.TFRecordDataset(file_path)
# Create a dictionary describing the features.
image_feature_description = {
SOURCE_ID: tf.io.FixedLenFeature([], tf.string),
BBOX_CONFIDENCE: tf.io.VarLenFeature(tf.float32),
BBOX_XMIN: tf.io.VarLenFeature(tf.float32),
BBOX_YMIN: tf.io.VarLenFeature(tf.float32),
BBOX_XMAX: tf.io.VarLenFeature(tf.float32),
BBOX_YMAX: tf.io.VarLenFeature(tf.float32),
}
# Parse the input tf.Example proto using the dictionary above.
def _parse_image_function(example_proto):
return tf.io.parse_single_example(example_proto, image_feature_description)
parsed_image_dataset = raw_image_dataset.map(_parse_image_function)
return parsed_image_dataset
# Parse and cleanup the labels to a more straigtforward format.
def parse_detection_confidences(image_features):
# the format of image_features['image/source_id'] is 'cns/path/to/image_file_name.jpg'
img_name = str(image_features[SOURCE_ID].numpy()).split('/')[-1][:-1]
confidence = tf.sparse_tensor_to_dense(image_features[BBOX_CONFIDENCE], default_value=0).numpy()
xmin = tf.sparse_tensor_to_dense(image_features[BBOX_XMIN], default_value=0).numpy()
ymin = tf.sparse_tensor_to_dense(image_features[BBOX_YMIN], default_value=0).numpy()
xmax = tf.sparse_tensor_to_dense(image_features[BBOX_XMAX], default_value=0).numpy()
ymax = tf.sparse_tensor_to_dense(image_features[BBOX_YMAX], default_value=0).numpy()
bbox = np.vstack((xmin, ymin, xmax, ymax)) # Left, Top, Right, Bottom
return img_name, confidence, bbox
# Transform raw image data and label into a tfexample format.
def img_to_example(img, label):
imgByteArr = io.BytesIO()
img.save(imgByteArr, format='JPEG')
imgByteArr = imgByteArr.getvalue()
example = tf.train.Example(features=tf.train.Features(feature={
"image": tf.train.Feature(bytes_list=tf.train.BytesList(value=[imgByteArr])),
"label": tf.train.Feature(int64_list=tf.train.Int64List(value=[label]))}))
return example
# Write all images into the test TFrecord file.
def write_tfrecord_from_images(image_folder_path, label, writer):
for img_name in os.listdir(image_folder_path):
img_path = os.path.join(image_folder_path, img_name)
try:
img = Image.open(img_path, "r")
except Exception as e:
print(e)
print(img_path + " is not valid")
continue
# Exclude all non RGB images
if len(img.getbands()) != 3:
continue
img = img.resize(OUTPUT_IMAGE_SIZE)
example = img_to_example(img, label)
writer.write(example.SerializeToString())
# Striped out only the maximum confidence bbox of a image. Function is called in generate_tfexamples_from_detections().
def strip_top_confidence_bbox(confidence, bbox, threshold):
target = []
if confidence.size > 0 and max(confidence) > threshold['pos']:
pos = np.argmax(confidence)
target.append({'label':1, 'bbox':bbox[:, pos]})
elif confidence.size == 0 or (confidence.size > 0 and max(confidence) < threshold['neg']):
target.append({'label':0})
return target
# Striped out ALL bbox where confidence is over threshold. Function is called in generate_tfexamples_from_detections().
def strip_all_qualified_bbox(confidence, bbox, threshold):
target = []
if confidence.size > 0:
for i in range(confidence.size):
if confidence[i] > threshold['pos'] or confidence[i] < threshold['neg']:
target.append({'label':int(round(confidence[i])), 'bbox':bbox[:, i]})
return target
# Read image from path and check exclude non RGB image.
def read_and_check_image(img_path):
try:
img = Image.open(img_path, "r")
except Exception as e:
print(e)
print(img_path + " is not valid")
return None
# Exclude all non RGB images
if len(img.getbands()) != 3:
return None
return img
# Strip the bboxes from the parsed_image_dataset that are over threshold and added the tfexample to the return list.
def generate_tfexamples_from_detections(parsed_image_dataset, folder_path, include_top_camera, only_keep_top_confidence):
# Store examples in a dictionary. 0 for negative examples and 1 for positive examples.
examples = {0:[], 1:[]}
for image_features in parsed_image_dataset:
img_name, confidence, bbox = parse_detection_confidences(image_features)
# The format fo the image_name is XXXXXX_Y.jpg, the Y is the identifier of the view. 1, 2, 3 and 4 are the side views and 5 is the upward view. 0 is the view with markers overlaid.
view = img_name.split('.')[0][-1]
keep_image = view != '0' and (include_top_camera or view != '5')
if not img_name or not keep_image:
continue
img_path = os.path.join(folder_path, img_name)
img = read_and_check_image(img_path)
if not img:
continue
if only_keep_top_confidence:
target = strip_top_confidence_bbox(confidence, bbox, CONF_THRESHOLD)
else:
target = strip_all_qualified_bbox(confidence, bbox, CONF_THRESHOLD)
if not target:
continue
for t in target:
crop_img = img
if 'bbox' in t:
crop_img = crop_img.crop(t['bbox'])
crop_img = crop_img.resize(OUTPUT_IMAGE_SIZE)
example = img_to_example(crop_img, t['label'])
examples[t['label']].append(example)
return examples
# Write positive and negative tfexamples to tfrecord using the writer. A balance boolean parameter can decide to balance the pos and neg examples count.
def write_tfexample_to_tfrecord(positive_examples, negative_examples, balance, writer):
take = float('inf')
num_pos = len(positive_examples)
num_neg = len(negative_examples)
if balance:
take = min(num_pos, num_neg)
positive_examples = positive_examples[:take]
negative_examples = negative_examples[:take]
positive_examples.extend(negative_examples)
examples = positive_examples
random.seed(1)
random.shuffle(examples)
for i, example in enumerate(examples):
writer.write(example.SerializeToString())
# Write tfrecords in batches of input record files.
def batch_read_write_tfrecords(file_range, input_record_path, input_img_path, writer, detection_property):
include_top_camera = detection_property['include_top_camera']
only_keep_top_confidence = detection_property['only_keep_top_confidence']
balance = detection_property['balance']
pos_examples, neg_examples = [], []
for i in range(file_range[0], file_range[1]):
file_name = "./streetlearn_detections_tfexample-" + str(i).zfill(5) + "-of-01000.tfrecord"
parsed_image_dataset = read_tfrecord(os.path.join(input_record_path, file_name))
examples = generate_tfexamples_from_detections(parsed_image_dataset, input_img_path, include_top_camera, only_keep_top_confidence)
neg_examples.extend(examples[0])
pos_examples.extend(examples[1])
write_tfexample_to_tfrecord(pos_examples, neg_examples, balance, writer)
# Filter the dataset with images bbox lower than the threshold, and copy the image bbox to output directory. These images will be handpicked to be used as negative examples in the test set.
def filter_image_with_confidence_threshold(parsed_image_dataset, input_folder_path, output_folder_path, neg_threshold):
for image_features in parsed_image_dataset:
img_name, confidence, bbox = parse_detection_confidences(image_features)
# The format fo the image_name is XXXXXX_Y.jpg, the Y is the identifier of the view. 1, 2, 3 and 4 are the side views and 5 is the upward view. 0 is the view with markers overlaid.
view = img_name.split('.')[0][-1]
keep_image = view != '0' and view != '5'
if not img_name or not keep_image:
continue
img_path = os.path.join(input_folder_path, img_name)
img = read_and_check_image(img_path)
if not img:
continue
threshold = {'neg': neg_threshold, 'pos': 1.0}
target = strip_all_qualified_bbox(confidence, bbox, threshold)
if not target:
continue
for i, t in enumerate(target):
crop_img = img
if 'bbox' in t:
crop_img = crop_img.crop(t['bbox'])
crop_img = crop_img.resize(OUTPUT_IMAGE_SIZE)
new_file_name = img_name.split('.')[0] + '_' + str(i) + '.' + img_name.split('.')[1]
crop_img.save(os.path.join(output_folder_path + new_file_name))
| 7,721 | 0 | 242 |
94f8b2db88fc9ae505d890d4c2af2378ecf5a928 | 1,401 | py | Python | tests/test_cache.py | rexyeah/jira-cli | 6a03e904b0aca4905ea8f5c22239f84d7a82b32d | [
"MIT"
] | 125 | 2015-02-05T01:06:07.000Z | 2021-12-08T19:20:26.000Z | tests/test_cache.py | lewis6991/jira-cli | a56540231fc189ac3823df97bd4d30272430446e | [
"MIT"
] | 90 | 2015-02-12T12:41:15.000Z | 2022-02-21T02:07:17.000Z | tests/test_cache.py | lewis6991/jira-cli | a56540231fc189ac3823df97bd4d30272430446e | [
"MIT"
] | 68 | 2015-01-30T14:17:29.000Z | 2021-05-20T17:22:12.000Z | import os
import unittest
import tempfile
import hiro
import jiracli.cache
| 32.581395 | 66 | 0.618844 | import os
import unittest
import tempfile
import hiro
import jiracli.cache
class CacheTests(unittest.TestCase):
def setUp(self):
self.cache_dir = tempfile.mkdtemp()
jiracli.cache.CACHE_DIR = self.cache_dir
def test_cache_data_not_exist(self):
data = jiracli.cache.CachedData("foobar")
self.assertTrue(data.get()==None)
data.update({"foo":"bar"})
self.assertEqual(jiracli.cache.CachedData("foobar").get(),
{"foo": "bar"})
def test_cache_invalidate(self):
with hiro.Timeline().freeze() as timeline:
data = jiracli.cache.CachedData("foobar")
data.update({"foo":"bar"})
timeline.forward(1 + 60*60*24)
self.assertTrue(data.get()==None)
def test_clear_cache(self):
data = jiracli.cache.CachedData("foobar")
data.update({"foo":"bar"})
self.assertTrue(os.path.isfile(data.path))
jiracli.cache.clear_cache(data)
self.assertFalse(os.path.isfile(data.path))
jiracli.cache.clear_cache()
self.assertFalse(os.path.isdir(self.cache_dir))
def test_decorated(self):
@jiracli.cache.cached("foo")
def func(a,b):
return a+b
self.assertEqual(func(1,2), func(1,2))
self.assertNotEqual(func(1,2), func(3,4))
self.assertEqual(len(os.listdir(self.cache_dir)), 2)
| 1,155 | 15 | 155 |
066eb90268da41c2823854bf89d685050ff3e7e1 | 1,806 | py | Python | src/p_type_service.py | amitsou/cloud_services | 22a2381227ecab8d1626e3dfa961821954188327 | [
"MIT"
] | null | null | null | src/p_type_service.py | amitsou/cloud_services | 22a2381227ecab8d1626e3dfa961821954188327 | [
"MIT"
] | null | null | null | src/p_type_service.py | amitsou/cloud_services | 22a2381227ecab8d1626e3dfa961821954188327 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, os.path.join(os.path.dirname(
os.path.realpath(__file__)), "../"))
from Functions import processing_utils as pu
def on_log(client, userdata, level, buf):
"""
Log callback
"""
print("log: ", buf)
pass
def on_disconnect(client, userdata, flags, rc=0):
"""
Callback to define what's happening when disconnecting
"""
print("DisConnected flags {0}, result code:{1}, client_id: {2} ".format(flags, rc, client._client_id))
def on_message(client, userdata, message):
"""
Callback to handle subscription topics incoming messages
"""
msg = message
pu.motion_clf(msg)
def on_connect(client, userdata, flags, rc):
"""
Callback to define what to happen when connecting
"""
if(rc==0):
print("connecting to broker ", broker)
print("subscribing to topics ")
client.subscribe(in_topic)
elif(rc==3):
print("server unavailable")
client.loop_stop()
sys.exit("Server is unavailable, please try later")
elif(rc==5):
print("Invalid Credentials")
client.loop_stop()
sys.exit(5)
else:
print("Bad connection, returned code=",rc)
client.loop_stop()
sys.exit("Bad connection, returned code={0}".format(rc))
if __name__ == '__main__':
u_name. u_pass, in_topic, out_topic = pu.p_type_service_args()
broker = "localhost"
client = mqtt.Client("P-type")
client.username_pw_set(username, user_pass)
client.on_message = on_message
client.on_log = on_log
client.on_connect = on_connect
client.on_disconnect = on_disconnect
try:
client.connect(broker)
except:
print("Error connecting")
sys.exit()
client.loop_forever()
| 26.173913 | 106 | 0.633444 | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, os.path.join(os.path.dirname(
os.path.realpath(__file__)), "../"))
from Functions import processing_utils as pu
def on_log(client, userdata, level, buf):
"""
Log callback
"""
print("log: ", buf)
pass
def on_disconnect(client, userdata, flags, rc=0):
"""
Callback to define what's happening when disconnecting
"""
print("DisConnected flags {0}, result code:{1}, client_id: {2} ".format(flags, rc, client._client_id))
def on_message(client, userdata, message):
"""
Callback to handle subscription topics incoming messages
"""
msg = message
pu.motion_clf(msg)
def on_connect(client, userdata, flags, rc):
"""
Callback to define what to happen when connecting
"""
if(rc==0):
print("connecting to broker ", broker)
print("subscribing to topics ")
client.subscribe(in_topic)
elif(rc==3):
print("server unavailable")
client.loop_stop()
sys.exit("Server is unavailable, please try later")
elif(rc==5):
print("Invalid Credentials")
client.loop_stop()
sys.exit(5)
else:
print("Bad connection, returned code=",rc)
client.loop_stop()
sys.exit("Bad connection, returned code={0}".format(rc))
if __name__ == '__main__':
u_name. u_pass, in_topic, out_topic = pu.p_type_service_args()
broker = "localhost"
client = mqtt.Client("P-type")
client.username_pw_set(username, user_pass)
client.on_message = on_message
client.on_log = on_log
client.on_connect = on_connect
client.on_disconnect = on_disconnect
try:
client.connect(broker)
except:
print("Error connecting")
sys.exit()
client.loop_forever()
| 0 | 0 | 0 |
d2422cbcadfa238af5f96116a4735b6326ae1570 | 108 | py | Python | insomniac/extra_features/action_warmup.py | chikko80/Insomniac | 2d49a6d4e5a15eb63bddd9aace3cc872cf40b01a | [
"MIT"
] | null | null | null | insomniac/extra_features/action_warmup.py | chikko80/Insomniac | 2d49a6d4e5a15eb63bddd9aace3cc872cf40b01a | [
"MIT"
] | null | null | null | insomniac/extra_features/action_warmup.py | chikko80/Insomniac | 2d49a6d4e5a15eb63bddd9aace3cc872cf40b01a | [
"MIT"
] | null | null | null | from insomniac import activation_controller
exec(activation_controller.get_extra_feature("action_warmup"))
| 27 | 62 | 0.87963 | from insomniac import activation_controller
exec(activation_controller.get_extra_feature("action_warmup"))
| 0 | 0 | 0 |
fdc6ea6fc9fd6c8cf57341345967e0cba0bde198 | 4,677 | py | Python | minimir/NewbieAction.py | TinyZzh/note-python | 7698f01ece80d6670d2af5707c32504f49eba36c | [
"Apache-2.0"
] | null | null | null | minimir/NewbieAction.py | TinyZzh/note-python | 7698f01ece80d6670d2af5707c32504f49eba36c | [
"Apache-2.0"
] | null | null | null | minimir/NewbieAction.py | TinyZzh/note-python | 7698f01ece80d6670d2af5707c32504f49eba36c | [
"Apache-2.0"
] | null | null | null | # -*- coding:UTF-8 -*-
import logging
from datetime import datetime
from typing import List, Callable
from minimir import Struct
from minimir.BattleAction import BattleAction
from minimir.Utils import Utils
| 41.758929 | 116 | 0.492196 | # -*- coding:UTF-8 -*-
import logging
from datetime import datetime
from typing import List, Callable
from minimir import Struct
from minimir.BattleAction import BattleAction
from minimir.Utils import Utils
class NewbieAction(BattleAction):
# 新账户引导
# 1. 等级低于50级,转生等级0的新账户
# 2. 优先挂机。 每间隔=((等级/10) + 1) * 10分钟. 自动换装,自动尝试过图
__logger = logging.getLogger(__name__)
def evaluate(self) -> bool:
if self.yield_wait_for():
return False
# 转生低于1转、等级低于50级
return self._player.met < 1 and self._player.lvl < 50
def execute(self) -> bool:
# 幻境 - 秘境 - 推图BOSS - PK -
_battles: List[Callable] = [
self.__newbie_upgrade__,
self.__hj_fight,
self.__mj_fight,
self.__fight_fight,
]
for func in _battles:
if not func():
break
return True
# 小号优先挂机为主. 每次半个小时
# 装备提升之后尝试幻境战斗10次
def __newbie_upgrade__(self) -> bool:
if not self._player.guaji:
self.mir_req("fight", "guaji", id=self._player.map)
self._player.guajitime = datetime.now()
pass
_now_ = datetime.now()
if (_now_ - self._player.guajitime).seconds > self._config.threshold_guaji_seconds:
if self.mir_req_once("fight", "guajioff", id=self._player.map):
self.auto_arrange_bag()
pass
if self.mir_req_once("fight", "guaji", id=self._player.map):
self._player.guaji = True
self._player.guajitime = _now_
pass
pass
return False
# 整理背包 - 有天赋和系数高的装备自动保存到仓库
def auto_arrange_bag(self):
self.__logger.info("=================== {}:新账户整理背包 =======================".format(self._player.name))
# 玩家身上的装备
self.refresh_body_item()
# resp = self.mir_req("item", "loaditem", type=3, ku=0)
# 仓库
# resp = self.mir_req("item", "loaditem", type=2, ku=1)
# 背包
resp = self.mir_req_once("item", "loaditem", type=1, ku=0)
if resp:
_item_ary: List[Struct.ItemInfo] = []
for _ri in resp['item']:
_info = Struct.ItemInfo()
for fn, fv in _ri.items():
Utils.reflect_set_field([_info], fn, fv)
pass
_item_ary.append(_info)
# 检查自动出售道具
if _info.itemid in self._config.bag_auto_sell_item_list:
self.mir_req("item", "sell", id=_info.id, num=_info.num)
self.__logger.info("出售:{}, 数量:{}. info:{}".format(Struct.ItemInfo.tpl_items()[_info.itemid],
_info.num, _info))
# 检查自动使用的道具
elif _info.itemid in self._config.bag_auto_use_item_list:
self.mir_req("item", "yong", id=_info.id, num=_info.num, seat=0)
self.__logger.info("使用:{}, 数量:{}. info:{}".format(Struct.ItemInfo.tpl_items()[_info.itemid],
_info.num, _info))
# 检查自动行会捐献 - 行会物资
elif _info.itemid == 339:
if hasattr(self._player, 'hh') and self._player.hh is not None and self._player.hh.has_hh:
self.mir_req("hh", "gave", num=_info.num)
self.__logger.info("捐献:{}, 数量:{}. info:{}".format(Struct.ItemInfo.tpl_items()[_info.itemid],
_info.num, _info))
pass
pass
elif _info.itemid in Struct.equipment_attr and self._config.auto_use_better_equipment:
# 自动换装
if self._player.cmp_item(_info, ):
pass
pass
# 检查自动保存. 幸运、天赋、总系数超过10
elif _info.x6 > 0 or _info.g1 > 0 or (_info.x1 + _info.x2 + _info.x3 + _info.x4 + _info.x5) > 10:
# 从背包保存到仓库1
self.mir_req("item", "itemku", id=_info.id, type=1, ku=self._config.auto_save_item_ku)
_li = "保存:{}, 数量:{}. info:{}, 仓库:{}".format(Struct.ItemInfo.tpl_items()[_info.itemid],
_info.num,
_info, self._config.auto_save_item_ku)
self.__logger.info(_li)
else:
pass
pass
# TODO: 将其余垃圾一键熔炼
pass
return
| 4,351 | 514 | 23 |
43a84bd4eb3fb5d8fa0a3d8ae221862f3ed8789d | 5,412 | py | Python | m3u8_To_MP4/v2_multithreads_processor.py | songs18/m3u8_To_MP4 | cec3fedd96b1872dc823d465bcb2afc5d0e7e8d2 | [
"MIT"
] | 10 | 2021-06-08T00:20:14.000Z | 2022-02-01T17:27:36.000Z | m3u8_To_MP4/v2_multithreads_processor.py | songs18/m3u8_To_MP4 | cec3fedd96b1872dc823d465bcb2afc5d0e7e8d2 | [
"MIT"
] | 4 | 2021-10-31T09:28:45.000Z | 2022-03-06T08:07:35.000Z | m3u8_To_MP4/v2_multithreads_processor.py | songs18/m3u8_To_MP4 | cec3fedd96b1872dc823d465bcb2afc5d0e7e8d2 | [
"MIT"
] | 1 | 2022-03-20T03:38:40.000Z | 2022-03-20T03:38:40.000Z | # -*- coding: utf-8 -*-
import concurrent.futures
import logging
import os
import sys
from Crypto.Cipher import AES
from m3u8_To_MP4 import v2_abstract_task_processor
from m3u8_To_MP4.helpers import path_helper
from m3u8_To_MP4.helpers import printer_helper
from m3u8_To_MP4.networks.synchronous.sync_http_requester import request_for
| 48.756757 | 205 | 0.631559 | # -*- coding: utf-8 -*-
import concurrent.futures
import logging
import os
import sys
from Crypto.Cipher import AES
from m3u8_To_MP4 import v2_abstract_task_processor
from m3u8_To_MP4.helpers import path_helper
from m3u8_To_MP4.helpers import printer_helper
from m3u8_To_MP4.networks.synchronous.sync_http_requester import request_for
def download_segment(segment_url, customized_http_header):
response_code, response_content = request_for(segment_url, customized_http_header=customized_http_header)
return response_code, response_content
class MultiThreadsFileCrawler(v2_abstract_task_processor.AbstractFileCrawler):
def _fetch_segments_to_local_tmpdir(self, key_segments_pairs):
if len(key_segments_pairs) < 1:
return
progress_bar = printer_helper.ProcessBar(self.num_fetched_ts_segments, self.num_fetched_ts_segments + len(key_segments_pairs), 'segment set', 'downloading...', 'downloaded segments successfully!')
key_url_encrypted_data_triple = list()
with concurrent.futures.ThreadPoolExecutor(max_workers=self.num_concurrent) as executor:
while len(key_segments_pairs) > 0:
future_2_key_and_url = {executor.submit(download_segment, segment_url, self.customized_http_header): (key, segment_url) for key, segment_url in key_segments_pairs}
response_code, response_data = None, None
for future in concurrent.futures.as_completed(future_2_key_and_url):
key, segment_url = future_2_key_and_url[future]
try:
response_code, response_data = future.result()
except Exception as exc:
logging.exception('{} generated an exception: {}'.format(segment_url, exc))
if response_code == 200:
key_url_encrypted_data_triple.append((key, segment_url, response_data))
key_segments_pairs.remove((key, segment_url))
progress_bar.update()
if len(key_segments_pairs) > 0:
sys.stdout.write('\n')
logging.info('{} segments are failed to download, retry...'.format(len(key_segments_pairs)))
logging.info('decrypt and dump segments...')
for key, segment_url, encrypted_data in key_url_encrypted_data_triple:
file_name = path_helper.resolve_file_name_by_uri(segment_url)
file_path = os.path.join(self.tmpdir, file_name)
if key is not None:
crypt_ls = {"AES-128": AES}
crypt_obj = crypt_ls[key.method]
cryptor = crypt_obj.new(key.value.encode(), crypt_obj.MODE_CBC)
encrypted_data = cryptor.decrypt(encrypted_data)
with open(file_path, 'wb') as fin:
fin.write(encrypted_data)
class MultiThreadsUriCrawler(v2_abstract_task_processor.AbstractUriCrawler):
def _fetch_segments_to_local_tmpdir(self, key_segments_pairs):
if len(key_segments_pairs) < 1:
return
progress_bar = printer_helper.ProcessBar(self.num_fetched_ts_segments, self.num_fetched_ts_segments + len(key_segments_pairs), 'segment set', 'downloading...', 'downloaded segments successfully!')
key_url_encrypted_data_triple = list()
with concurrent.futures.ThreadPoolExecutor(max_workers=self.num_concurrent) as executor:
while len(key_segments_pairs) > 0:
future_2_key_and_url = {executor.submit(download_segment, segment_url, self.customized_http_header): (key, segment_url) for key, segment_url in key_segments_pairs}
response_code, response_data = None, None
for future in concurrent.futures.as_completed(future_2_key_and_url):
key, segment_url = future_2_key_and_url[future]
try:
response_code, response_data = future.result()
except Exception as exc:
logging.exception('{} generated an exception: {}'.format(segment_url, exc))
if response_code == 200:
key_url_encrypted_data_triple.append((key, segment_url, response_data))
key_segments_pairs.remove((key, segment_url))
progress_bar.update()
if len(key_segments_pairs) > 0:
sys.stdout.write('\n')
logging.info('{} segments are failed to download, retry...'.format(len(key_segments_pairs)))
logging.info('decrypt and dump segments...')
for key, segment_url, encrypted_data in key_url_encrypted_data_triple:
file_name = path_helper.resolve_file_name_by_uri(segment_url)
file_path = os.path.join(self.tmpdir, file_name)
if key is not None:
crypt_ls = {"AES-128": AES}
crypt_obj = crypt_ls[key.method]
cryptor = crypt_obj.new(key.value.encode(), crypt_obj.MODE_CBC)
encrypted_data = cryptor.decrypt(encrypted_data)
with open(file_path, 'wb') as fin:
fin.write(encrypted_data)
| 4,812 | 112 | 133 |
17114d469127b50bd167d2bb7eaef372e81edb17 | 295 | py | Python | todo/TodoBackend/serializers.py | SumitPatel12/Atomic_Task | bfb2f36b5e00b0ff6952abb7896dc09975f838bf | [
"BSD-3-Clause"
] | null | null | null | todo/TodoBackend/serializers.py | SumitPatel12/Atomic_Task | bfb2f36b5e00b0ff6952abb7896dc09975f838bf | [
"BSD-3-Clause"
] | null | null | null | todo/TodoBackend/serializers.py | SumitPatel12/Atomic_Task | bfb2f36b5e00b0ff6952abb7896dc09975f838bf | [
"BSD-3-Clause"
] | 1 | 2021-06-20T10:43:16.000Z | 2021-06-20T10:43:16.000Z | from django.db import models
from django.db.models import fields
from rest_framework import serializers
from .models import Todo | 32.777778 | 76 | 0.735593 | from django.db import models
from django.db.models import fields
from rest_framework import serializers
from .models import Todo
class TodoSerializer(serializers.ModelSerializer):
class Meta:
model = Todo
fields = ['todo_id', 'title', 'description', 'completed', 'user_id'] | 0 | 144 | 23 |
fddde5375c0f8287d9c9492737894aa70c445e98 | 362 | py | Python | rippl/legislature/queries/districts.py | gnmerritt/dailyrippl | 9a0f9615ba597a475dbd6305b589827cb2d97b03 | [
"MIT"
] | 6 | 2016-12-03T20:30:43.000Z | 2017-01-10T01:50:09.000Z | rippl/legislature/queries/districts.py | gnmerritt/dailyrippl | 9a0f9615ba597a475dbd6305b589827cb2d97b03 | [
"MIT"
] | 24 | 2016-11-30T02:31:13.000Z | 2020-02-25T22:47:27.000Z | rippl/legislature/queries/districts.py | gnmerritt/dailyrippl | 9a0f9615ba597a475dbd6305b589827cb2d97b03 | [
"MIT"
] | 1 | 2016-12-25T21:42:31.000Z | 2016-12-25T21:42:31.000Z | from rest_framework import serializers
from legislature.models import District, State
| 20.111111 | 54 | 0.709945 | from rest_framework import serializers
from legislature.models import District, State
class StateSerializer(serializers.ModelSerializer):
class Meta:
model = State
fields = '__all__'
class DistrictSerializer(serializers.ModelSerializer):
state = StateSerializer()
class Meta:
model = District
fields = '__all__'
| 0 | 227 | 46 |
ec32def252ff18f8e32835cc67626d633326507f | 1,089 | py | Python | forms/register_form.py | VNCompany/vnforum | 770aca3a94ad1ed54628d48867c299d83215f75a | [
"Unlicense"
] | null | null | null | forms/register_form.py | VNCompany/vnforum | 770aca3a94ad1ed54628d48867c299d83215f75a | [
"Unlicense"
] | null | null | null | forms/register_form.py | VNCompany/vnforum | 770aca3a94ad1ed54628d48867c299d83215f75a | [
"Unlicense"
] | null | null | null | from flask_wtf import FlaskForm
from wtforms import PasswordField, RadioField, StringField
from wtforms.fields.html5 import EmailField
import wtforms.validators as validate
| 60.5 | 108 | 0.584022 | from flask_wtf import FlaskForm
from wtforms import PasswordField, RadioField, StringField
from wtforms.fields.html5 import EmailField
import wtforms.validators as validate
class RegisterForm(FlaskForm):
email = EmailField("Email: ", validators=[validate.DataRequired()])
login = StringField("Логин (нужен для авторизации): ", validators=[validate.DataRequired(),
validate.Length(min=4, max=100)])
password = PasswordField("Пароль: ", validators=[validate.DataRequired(),
validate.Length(min=8, max=64),
validate.EqualTo("confirm", "Пароли не совпадают")])
confirm = PasswordField("Повтор:", validators=[validate.DataRequired()])
nickname = StringField("Никнейм: ", validators=[validate.DataRequired(),
validate.Length(max=25)])
sex = RadioField("Выберите пол: ", choices=[("male", "Мужской"), ("female", "Женский")], default="male")
| 0 | 977 | 23 |
9417b738b7f84aca758c1404b51b45d994effd14 | 6,665 | py | Python | robotic_object_search/House3D/utils/helper.py | Xin-Ye-1/HRL-GRG | b36c0c3f9f7ec3a1b59925df0be514f7540ee122 | [
"Apache-2.0"
] | 7 | 2021-03-28T03:13:01.000Z | 2022-03-30T03:16:04.000Z | utils/helper.py | Xin-Ye-1/HIEM | 6764f579eef6ec92dd85a005af27419f630df7da | [
"Apache-2.0"
] | null | null | null | utils/helper.py | Xin-Ye-1/HIEM | 6764f579eef6ec92dd85a005af27419f630df7da | [
"Apache-2.0"
] | 2 | 2021-06-22T08:06:28.000Z | 2021-11-09T07:33:59.000Z | #!/usr/bin/env python
import tensorflow as tf
import numpy as np
import scipy.signal
from scipy import misc
import scipy.io
from PIL import Image
import json
import os
from offline_feature import *
from bbox_tool import *
import glob
from reward_function import *
from semantic_environment import *
from shortest_path import *
IMAGE_WIDTH = 600
IMAGE_HEIGHT = 450
# cfg = json.load(open('../config.json','r'))
cfg = json.load(open(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'config.json'),'r'))
| 37.44382 | 114 | 0.635259 | #!/usr/bin/env python
import tensorflow as tf
import numpy as np
import scipy.signal
from scipy import misc
import scipy.io
from PIL import Image
import json
import os
from offline_feature import *
from bbox_tool import *
import glob
from reward_function import *
from semantic_environment import *
from shortest_path import *
IMAGE_WIDTH = 600
IMAGE_HEIGHT = 450
# cfg = json.load(open('../config.json','r'))
cfg = json.load(open(os.path.join(os.path.dirname(os.path.dirname(__file__)), 'config.json'),'r'))
def get_distinct_list(inputs, add_on=None, remove=None):
result = []
if add_on is not None:
result.append(add_on)
for input in inputs:
for element in input:
if element != remove and element not in result:
result.append(element)
return result
def global2loc(global_id, map):
orien = global_id % 4
idx = global_id / 4
(x, y) = map[idx]
return (x, y, orien)
def load_map(scene_dir):
map = {}
map_path = '%s/map.txt'%scene_dir
with open(map_path,'r') as f:
for line in f:
nums = line.split()
if len(nums) == 3:
idx = int(nums[0])
pos = (int(nums[1]), int(nums[2]))
map[idx] = pos
return map
def get_starting_points_according_to_distance(scene, targets):
starting_points = []
all_starting_points = json.load(open('%s/Environment/houses/%s/starting_points_according_to_distance_1.json' %
(cfg['codeDir'], scene), 'r'))
def string2tuple(string):
string = string.replace('(', '').replace(')','')
x,y,orien = string.split(',')
return(int(x),int(y), int(orien))
for target in targets:
str_starting_points = all_starting_points[target] if target in all_starting_points else []
starting_points.append([string2tuple(s) for s in str_starting_points])
return starting_points
def sort_starting_points_according_to_distance(scene, targets, starting_points):
min_steps = json.load(open('%s/Environment/houses/%s/minimal_steps_1.json' %
(cfg['codeDir'], scene), 'r'))
sorted_starting_points = []
for i, target in enumerate(targets):
dis_starting_points = [(min_steps[str(pos)][target], pos) for pos in starting_points[i]]
sorted_pos = sorted(dis_starting_points)
sorted_pos = [item[-1] for item in sorted_pos]
sorted_starting_points.append(sorted_pos)
return sorted_starting_points
def get_starting_points(scene, targets, use_gt=True, use_semantic=False):
if use_semantic:
#print targets
feature_tool = Feature_Tool(scene_name=scene, feature_pattern='_deeplab_depth_semantic_10')
map = load_map(feature_tool.feature_dir)
num_states = len(feature_tool.all_states_features)
starting_points = [[] for i in range(len(targets))]
class2id = json.load(open('%s/Environment/class2id.json' % cfg['codeDir'], 'r'))
for global_id in range(num_states):
semantic = feature_tool.get_state_feature(global_id)
unique_labels, counts = np.unique(semantic, return_counts=True)
for i, target in enumerate(targets):
target_id = class2id[target]
area = counts[unique_labels==target_id]
_, done,_ = increasing_area_reward(scene, target, area, area)
if area>0 and not done:
starting_point = global2loc(global_id, map)
# target_points = get_target_points(scene, [target])[0]
# min_steps, _ = get_minimal_steps(scene, [starting_point], target_points)
# if min_steps[0] is not None:
starting_points[i].append(starting_point)
#print np.array(starting_points).shape
return starting_points
else:
bbox_tool = Bbox_Tool(scene, use_gt=use_gt)
map = load_map(bbox_tool.house_dir)
starting_points = [[] for i in range(len(targets))]
num_states = len(glob.glob(os.path.join(bbox_tool.bbox_dir, '*')))
for global_id in range(num_states):
for i, target in enumerate(targets):
# threshold = get_threshod(scene,target)
(x,y,w,h) = bbox_tool.get_gt_bbox(global_id, target)
# x1 = x - (1.0 * w) / 2
# y1 = y - (1.0 * h) / 2
# x2 = x1 + w
# y2 = y1 + h
# if x1 != 0 and y1 != 0 and x2 != IMAGE_WIDTH - 1 and y2 != IMAGE_HEIGHT - 1:
if w*h != 0: # and w*h < threshold:
starting_points[i].append(global2loc(global_id, map))
#starting_points[i].append(global_id)
return starting_points
def get_target_points(scene, targets, use_gt=True):
bbox_tool = Bbox_Tool(scene, use_gt=use_gt)
map = load_map(bbox_tool.house_dir)
# target_points = [[] for _ in range(len(targets))]
target_points = {}
for t in targets:
target_points[t] = []
num_states = len(glob.glob(os.path.join(bbox_tool.bbox_dir, '*')))
for global_id in range(num_states):
for i, target in enumerate(targets):
threshod = get_threshod(scene, target, use_gt=use_gt)
(x,y,w,h) = bbox_tool.get_gt_bbox(global_id, target)
if w*h >= threshod:
# target_points[i].append(global2loc(global_id, map))
target_points[target].append(global2loc(global_id, map))
# with open('all_target_positions.json','wb') as f:
# json.dump(target_points, f)
return target_points
def get_minimal_steps(scene, starting_points, target_points):
steps = []
trajectories = []
env = Semantic_Environment(scene)
for starting_point in starting_points:
trajectory, step = uniformCostSearch(env, starting_point, target_points)
steps.append(step)
trajectories.append(trajectory)
return steps, trajectories
def update_target_graph(from_scope, to_scope, tau=1):
from_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, from_scope)
to_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, to_scope)
op_holder = []
for from_var,to_var in zip(from_vars,to_vars):
op_holder.append(to_var.assign(from_var.value()*tau + (1-tau)*to_var.value()))
return op_holder
def update_multiple_target_graphs(from_scopes, to_scopes, tau=1):
op_holder = []
for from_scope, to_scope in zip(from_scopes, to_scopes):
op_holder += update_target_graph(from_scope, to_scope, tau)
return op_holder
| 5,914 | 0 | 230 |
b19a9236d00b7d2bec4c999cbd05278cd754cce4 | 671 | py | Python | examples/vdist.py | unjambonakap/pymap3d | 5a0ca7b6964917b954edbd50844c36c54b5e7c73 | [
"BSD-2-Clause"
] | null | null | null | examples/vdist.py | unjambonakap/pymap3d | 5a0ca7b6964917b954edbd50844c36c54b5e7c73 | [
"BSD-2-Clause"
] | null | null | null | examples/vdist.py | unjambonakap/pymap3d | 5a0ca7b6964917b954edbd50844c36c54b5e7c73 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
from pymap3d.vincenty import vdist
from argparse import ArgumentParser
if __name__ == '__main__': # pragma: no cover
main()
| 31.952381 | 79 | 0.675112 | #!/usr/bin/env python
from pymap3d.vincenty import vdist
from argparse import ArgumentParser
def main():
p = ArgumentParser(description='vdist distance between WGS-84 coordinates')
p.add_argument('lat1', help='latitude1 WGS-84 [degrees]', type=float)
p.add_argument('lon1', help='longitude1 WGS-84 [degrees]', type=float)
p.add_argument('lat2', help='latitude2 WGS-84 [degrees]', type=float)
p.add_argument('lon2', help='longitude2 WGS-84 [degrees]', type=float)
P = p.parse_args()
dist_m = vdist(P.lat1, P.lon1, P.lat2, P.lon2)
print('{:.3f} {:.3f} {:.3f}'.format(*dist_m))
if __name__ == '__main__': # pragma: no cover
main()
| 494 | 0 | 23 |
27794f69d075b4738175010914ffed940e109425 | 770 | py | Python | mytravelblog/accounts/urls.py | yetoshimo/my-travel-blog | de67dd135e66f2dda121850d54fd56fd644b9bff | [
"MIT"
] | null | null | null | mytravelblog/accounts/urls.py | yetoshimo/my-travel-blog | de67dd135e66f2dda121850d54fd56fd644b9bff | [
"MIT"
] | null | null | null | mytravelblog/accounts/urls.py | yetoshimo/my-travel-blog | de67dd135e66f2dda121850d54fd56fd644b9bff | [
"MIT"
] | null | null | null | from django.urls import path
from mytravelblog.accounts.views import *
urlpatterns = (
path('login/', UserLoginView.as_view(), name='login user'),
path('logout/', UserLogoutConfirmationView.as_view(), name='logout user confirmation'),
path('logout/signout/', UserLogoutView.as_view(), name='logout user'),
path('profile-details/<int:pk>/', UserProfileDetailsView.as_view(), name='profile details'),
path('profile/create/', UserRegisterView.as_view(), name='profile create'),
path('edit-profile/<int:pk>/', EditProfileView.as_view(), name='profile edit'),
path('delete-profile/<int:pk>/', DeleteProfileView.as_view(), name='profile delete'),
path('edit-password/<int:pk>/', ChangeUserPasswordView.as_view(), name='change password'),
)
| 45.294118 | 96 | 0.711688 | from django.urls import path
from mytravelblog.accounts.views import *
urlpatterns = (
path('login/', UserLoginView.as_view(), name='login user'),
path('logout/', UserLogoutConfirmationView.as_view(), name='logout user confirmation'),
path('logout/signout/', UserLogoutView.as_view(), name='logout user'),
path('profile-details/<int:pk>/', UserProfileDetailsView.as_view(), name='profile details'),
path('profile/create/', UserRegisterView.as_view(), name='profile create'),
path('edit-profile/<int:pk>/', EditProfileView.as_view(), name='profile edit'),
path('delete-profile/<int:pk>/', DeleteProfileView.as_view(), name='profile delete'),
path('edit-password/<int:pk>/', ChangeUserPasswordView.as_view(), name='change password'),
)
| 0 | 0 | 0 |
61cf70ddffa302ce92c009ba30a09a6df91ec3f1 | 265 | py | Python | serial_port_switch.py | truonghoangduy/esp32-gateway-devices | 6232be5219528910cff380822e2d540b608bac21 | [
"MIT"
] | null | null | null | serial_port_switch.py | truonghoangduy/esp32-gateway-devices | 6232be5219528910cff380822e2d540b608bac21 | [
"MIT"
] | null | null | null | serial_port_switch.py | truonghoangduy/esp32-gateway-devices | 6232be5219528910cff380822e2d540b608bac21 | [
"MIT"
] | 1 | 2022-03-01T03:40:39.000Z | 2022-03-01T03:40:39.000Z | # import util
import sys
import os
pioPath = ".platformio/penv/lib/python3.9/site-packages
fullPath = os.path.join(os.path.expanduser('~'), pioPath)
print(fullPath)
sys.path.append(os.path.join(os.path.expanduser('~'), pioPath))
import util
util.get_serial_ports()
| 26.5 | 63 | 0.758491 | # import util
import sys
import os
pioPath = ".platformio/penv/lib/python3.9/site-packages
fullPath = os.path.join(os.path.expanduser('~'), pioPath)
print(fullPath)
sys.path.append(os.path.join(os.path.expanduser('~'), pioPath))
import util
util.get_serial_ports()
| 0 | 0 | 0 |
72acb6e2ec44e71ce1922b88225888035123a68d | 1,536 | py | Python | build/android/pylib/local/device/local_device_environment.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5 | 2015-04-30T00:13:21.000Z | 2019-07-10T02:17:24.000Z | build/android/pylib/local/device/local_device_environment.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | build/android/pylib/local/device/local_device_environment.py | kjthegod/chromium | cf940f7f418436b77e15b1ea23e6fa100ca1c91a | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 2 | 2015-03-27T11:15:39.000Z | 2016-08-17T14:19:56.000Z | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from pylib.base import environment
from pylib.device import adb_wrapper
from pylib.device import device_errors
from pylib.device import device_utils
from pylib.utils import parallelizer
| 26.947368 | 72 | 0.727865 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from pylib.base import environment
from pylib.device import adb_wrapper
from pylib.device import device_errors
from pylib.device import device_utils
from pylib.utils import parallelizer
class LocalDeviceEnvironment(environment.Environment):
def __init__(self, args, _error_func):
super(LocalDeviceEnvironment, self).__init__()
self._device = args.test_device
self._devices = []
self._max_tries = 1 + args.num_retries
self._tool_name = args.tool
#override
def SetUp(self):
# TODO(jbudorick): This can be refined to support filters etc.
available_devices = adb_wrapper.AdbWrapper.GetDevices()
if not available_devices:
raise device_errors.NoDevicesError
if self._device:
if self._device not in available_devices:
raise device_errors.DeviceUnreachableError(
'Could not find device %r' % self._device)
self._devices = [device_utils.DeviceUtils(self._device)]
else:
self._devices = [
device_utils.DeviceUtils(s)
for s in available_devices]
@property
def devices(self):
return self._devices
@property
def parallel_devices(self):
return parallelizer.SyncParallelizer(self._devices)
@property
def max_tries(self):
return self._max_tries
@property
def tool(self):
return self._tool_name
#override
def TearDown(self):
pass
| 882 | 280 | 23 |
d050a0f9094cfd8b88db3f7e61314f6f7dc6f6fc | 328 | py | Python | miniFPL/api_url.py | sayandhar/miniFPL | c4107a8112b8f0548209cf47aad9be88262eca49 | [
"MIT"
] | 2 | 2019-02-16T18:32:42.000Z | 2020-09-11T22:33:12.000Z | miniFPL/api_url.py | sayandhar/miniFPL | c4107a8112b8f0548209cf47aad9be88262eca49 | [
"MIT"
] | null | null | null | miniFPL/api_url.py | sayandhar/miniFPL | c4107a8112b8f0548209cf47aad9be88262eca49 | [
"MIT"
] | 1 | 2018-08-31T09:04:54.000Z | 2018-08-31T09:04:54.000Z | BASE_URL = 'https://fantasy.premierleague.com/drf/'
FPL_DATA = BASE_URL + 'bootstrap-static'
# (player id)
PLAYER_DATA = BASE_URL + 'element-summary/{}'
# (gameweek)
DREAM_TEAM_DATA = BASE_URL + 'dream-team/{}'
# (team id)
USER_DATA = BASE_URL + 'entry/{}'
# (gameweek)
USER_GAMEWEEK_TEAM_DATA = USER_DATA + '/event/{}/picks'
| 25.230769 | 55 | 0.698171 | BASE_URL = 'https://fantasy.premierleague.com/drf/'
FPL_DATA = BASE_URL + 'bootstrap-static'
# (player id)
PLAYER_DATA = BASE_URL + 'element-summary/{}'
# (gameweek)
DREAM_TEAM_DATA = BASE_URL + 'dream-team/{}'
# (team id)
USER_DATA = BASE_URL + 'entry/{}'
# (gameweek)
USER_GAMEWEEK_TEAM_DATA = USER_DATA + '/event/{}/picks'
| 0 | 0 | 0 |
0c71477059bcf2d2c50224cbfe3dd457c1e6410a | 1,319 | py | Python | app/authentication.py | uk-gov-mirror/alphagov.digitalmarketplace-api | 5a1db63691d0c4a435714837196ab6914badaf62 | [
"MIT"
] | 25 | 2015-01-14T10:45:13.000Z | 2021-05-26T17:21:41.000Z | app/authentication.py | uk-gov-mirror/alphagov.digitalmarketplace-api | 5a1db63691d0c4a435714837196ab6914badaf62 | [
"MIT"
] | 641 | 2015-01-15T11:10:50.000Z | 2021-06-15T22:18:42.000Z | app/authentication.py | uk-gov-mirror/alphagov.digitalmarketplace-api | 5a1db63691d0c4a435714837196ab6914badaf62 | [
"MIT"
] | 22 | 2015-06-13T15:37:45.000Z | 2021-08-19T23:40:49.000Z | from flask import current_app, abort, request
from dmutils.authentication import UnauthorizedWWWAuthenticate
def get_allowed_tokens_from_config(config, module='main'):
"""Return a list of allowed auth tokens from the application config"""
env_variable_name = 'DM_API_AUTH_TOKENS'
if module == 'callbacks':
env_variable_name = 'DM_API_CALLBACK_AUTH_TOKENS'
return [token for token in config.get(env_variable_name, '').split(':') if token]
| 34.710526 | 94 | 0.716452 | from flask import current_app, abort, request
from dmutils.authentication import UnauthorizedWWWAuthenticate
def requires_authentication(module='main'):
if current_app.config['AUTH_REQUIRED']:
incoming_token = get_token_from_headers(request.headers)
if not incoming_token:
raise UnauthorizedWWWAuthenticate(
www_authenticate=f"Bearer realm={module}",
description="Unauthorized; bearer token must be provided",
)
if not token_is_valid(incoming_token, module=module):
abort(403, "Forbidden; invalid bearer token provided {}".format(incoming_token))
def token_is_valid(incoming_token, module):
return incoming_token in get_allowed_tokens_from_config(current_app.config, module=module)
def get_allowed_tokens_from_config(config, module='main'):
"""Return a list of allowed auth tokens from the application config"""
env_variable_name = 'DM_API_AUTH_TOKENS'
if module == 'callbacks':
env_variable_name = 'DM_API_CALLBACK_AUTH_TOKENS'
return [token for token in config.get(env_variable_name, '').split(':') if token]
def get_token_from_headers(headers):
auth_header = headers.get('Authorization', '')
if auth_header[:7] != 'Bearer ':
return None
return auth_header[7:]
| 780 | 0 | 69 |
f25042b468f867d8a3e0d386bfe6356608e13a01 | 3,565 | py | Python | tests/autoruntests/DetunersTest.py | fsoubelet/PyHEADTAIL | 51cae8845cceb61cc3f140db4ab0eeb68469110f | [
"BSD-3-Clause"
] | null | null | null | tests/autoruntests/DetunersTest.py | fsoubelet/PyHEADTAIL | 51cae8845cceb61cc3f140db4ab0eeb68469110f | [
"BSD-3-Clause"
] | null | null | null | tests/autoruntests/DetunersTest.py | fsoubelet/PyHEADTAIL | 51cae8845cceb61cc3f140db4ab0eeb68469110f | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
from scipy.constants import m_p, c, e
import matplotlib.pyplot as plt
import PyHEADTAIL.particles.generators as generators
from PyHEADTAIL.trackers.transverse_tracking import TransverseMap
from PyHEADTAIL.trackers.detuners import Chromaticity, AmplitudeDetuning
if __name__ == '__main__':
run()
| 25.464286 | 94 | 0.630295 | import numpy as np
from scipy.constants import m_p, c, e
import matplotlib.pyplot as plt
import PyHEADTAIL.particles.generators as generators
from PyHEADTAIL.trackers.transverse_tracking import TransverseMap
from PyHEADTAIL.trackers.detuners import Chromaticity, AmplitudeDetuning
def run():
def track(bunch, map_):
for i in range(n_turns):
for m in map_:
m.track(bunch)
def generate_bunch(n_macroparticles, alpha_x, alpha_y, beta_x, beta_y, alpha_0, Q_s, R):
intensity = 1.05e11
sigma_z = 0.059958
gamma = 3730.26
eta = alpha_0 - 1. / gamma**2
gamma_t = 1. / np.sqrt(alpha_0)
p0 = np.sqrt(gamma**2 - 1) * m_p * c
beta_z = eta * R / Q_s
epsn_x = 3.75e-6 # [m rad]
epsn_y = 3.75e-6 # [m rad]
epsn_z = 4 * np.pi * sigma_z**2 * p0 / (beta_z * e)
bunch = generators.generate_Gaussian6DTwiss(
macroparticlenumber=n_macroparticles, intensity=intensity, charge=e,
gamma=gamma, mass=m_p, circumference=C,
alpha_x=alpha_x, beta_x=beta_x, epsn_x=epsn_x,
alpha_y=alpha_y, beta_y=beta_y, epsn_y=epsn_y,
beta_z=beta_z, epsn_z=epsn_z)
#print bunch.sigma_z()
return bunch
# In[4]:
# Basic parameters.
n_turns = 3
n_segments = 1
n_macroparticles = 10
Q_x = 64.28
Q_y = 59.31
Q_s = 0.0020443
C = 26658.883
R = C / (2.*np.pi)
alpha_x_inj = 0.
alpha_y_inj = 0.
beta_x_inj = 66.0064
beta_y_inj = 71.5376
alpha_0 = 0.0003225
# In[5]:
# Parameters for transverse map.
s = np.arange(0, n_segments + 1) * C / n_segments
alpha_x = alpha_x_inj * np.ones(n_segments)
beta_x = beta_x_inj * np.ones(n_segments)
D_x = np.zeros(n_segments)
alpha_y = alpha_y_inj * np.ones(n_segments)
beta_y = beta_y_inj * np.ones(n_segments)
D_y = np.zeros(n_segments)
# In[6]:
# CASE I
# With amplitude detuning (python implementation)
# EXPECTED TUNE SPREADS AT THE GIVEN SETTINGS ARE 5e-4 FOR HORIZONTAL
# AND VERTICAL.
bunch = generate_bunch(
n_macroparticles, alpha_x_inj, alpha_y_inj, beta_x_inj, beta_y_inj,
alpha_0, Q_s, R)
ampl_det = AmplitudeDetuning.from_octupole_currents_LHC(i_focusing=400, i_defocusing=-400)
trans_map = TransverseMap(
s, alpha_x, beta_x, D_x, alpha_y, beta_y, D_y, Q_x, Q_y, [ampl_det])
trans_one_turn = [ m for m in trans_map ]
map_ = trans_one_turn
track(bunch, map_)
# In[7]:
# CASE II
# With first order Chromaticity (python implementation)
bunch = generate_bunch(
n_macroparticles, alpha_x_inj, alpha_y_inj, beta_x_inj, beta_y_inj,
alpha_0, Q_s, R)
chroma = Chromaticity(Qp_x=[6], Qp_y=[3])
trans_map = TransverseMap(
s, alpha_x, beta_x, D_x, alpha_y, beta_y, D_y, Q_x, Q_y, [chroma])
trans_one_turn = [ m for m in trans_map ]
map_ = trans_one_turn
track(bunch, map_)
# In[8]:
# CASE III
# With higher order Chromaticity (python implementation)
bunch = generate_bunch(
n_macroparticles, alpha_x_inj, alpha_y_inj, beta_x_inj, beta_y_inj,
alpha_0, Q_s, R)
chroma = Chromaticity(Qp_x=[6., 4e4], Qp_y=[3., 0., 2e8])
trans_map = TransverseMap(
s, alpha_x, beta_x, D_x, alpha_y, beta_y, D_y, Q_x, Q_y, [chroma])
trans_one_turn = [ m for m in trans_map ]
map_ = trans_one_turn
track(bunch, map_)
# In[ ]:
if __name__ == '__main__':
run()
| 3,219 | 0 | 23 |
97d53868b8cbceaa9480f55ebd5b0b00b648516c | 1,007 | py | Python | Chapter1/calculating_pi.py | SJHH-Nguyen-D/ClassicComputerScienceProblemsInPython | 9e178432ddb8b5e0ab63bf97334f557310ccc43c | [
"Apache-2.0"
] | null | null | null | Chapter1/calculating_pi.py | SJHH-Nguyen-D/ClassicComputerScienceProblemsInPython | 9e178432ddb8b5e0ab63bf97334f557310ccc43c | [
"Apache-2.0"
] | null | null | null | Chapter1/calculating_pi.py | SJHH-Nguyen-D/ClassicComputerScienceProblemsInPython | 9e178432ddb8b5e0ab63bf97334f557310ccc43c | [
"Apache-2.0"
] | null | null | null | # calculating_pi.py
# From Classic Computer Science Problems in Python Chapter 1
# Copyright 2018 David Kopec
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if __name__ == "__main__":
print(calculate_pi(1000000)) | 32.483871 | 74 | 0.712016 | # calculating_pi.py
# From Classic Computer Science Problems in Python Chapter 1
# Copyright 2018 David Kopec
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def calculate_pi(n_terms: int) -> float:
numerator: float = 4.0
denominator: float = 1.0
operation: float = 1.0
pi: float = 0.0
for _ in range(n_terms):
pi += operation * (numerator / denominator)
denominator += 2.0
operation *= -1.0
return pi
if __name__ == "__main__":
print(calculate_pi(1000000)) | 270 | 0 | 23 |
dd9fdc2eaacf283b85356158878ae10bfe7e7467 | 6,962 | py | Python | extensions/auth/_test_module.py | icoman/AppServer | b7715d90662e112638000b5a3c242fbcb59488a3 | [
"MIT"
] | null | null | null | extensions/auth/_test_module.py | icoman/AppServer | b7715d90662e112638000b5a3c242fbcb59488a3 | [
"MIT"
] | null | null | null | extensions/auth/_test_module.py | icoman/AppServer | b7715d90662e112638000b5a3c242fbcb59488a3 | [
"MIT"
] | null | null | null | #
# auth test module - add, update, delete
#
"""
MIT License
Copyright (c) 2017, 2018 Ioan Coman
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
DELETE_TESTS = True
import requests
import json
import os
import time
import datetime
import traceback
class AuthTests(ApiTests):
'''
def add(self, title, description, done='yes'):
try:
values = [title, description, done]
data = dict(data=json.dumps(values))
result = json.loads(self.session.post(self.url_add, data).text)
ret = result['ok'], result['data']
except Exception as ex:
ret = False, str(ex)
return ret
def update(self, obid, title, description, done='yes'):
try:
values = [title, description, done]
data = dict(data=json.dumps(values), id=obid)
result = json.loads(self.session.post(self.url_update, data).text)
ret = result['ok'], result['data']
except Exception as ex:
ret = False, str(ex)
return ret
def delete(self, obid):
try:
data = dict(id=obid)
result = json.loads(self.session.post(self.url_delete, data).text)
ret = result['ok'], result['data']
except Exception as ex:
ret = False, str(ex)
return ret
'''
if __name__ == "__main__":
import sys
py = sys.version_info
py3k = py >= (3, 0, 0)
try:
test_function()
except Exception as ex:
print("Exception found: {}".format(ex))
# traceback.print_exc(file=sys.stdout)
msg = 'Program ends, press Enter.'
if py3k:
input(msg)
else:
raw_input(msg)
| 35.161616 | 96 | 0.606148 | #
# auth test module - add, update, delete
#
"""
MIT License
Copyright (c) 2017, 2018 Ioan Coman
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
DELETE_TESTS = True
import requests
import json
import os
import time
import datetime
import traceback
class ApiTests(object):
def confirmCookie(self):
f = self.session.get(self.url_confirmcookie, verify=False)
# print(f.headers)
print('Server: {}'.format(f.headers.get('Server', '-undefined-')))
def login(self, user, password):
p = self.session.post(self.url_login, {'user': user, 'password': password})
# if is authenticated you get a redirect with meta http-equiv="refresh"
searchString = '<meta http-equiv="refresh"'
if -1 == p.text.find(searchString):
# not authenticated
return False
else:
return True
class AuthTests(ApiTests):
'''
def add(self, title, description, done='yes'):
try:
values = [title, description, done]
data = dict(data=json.dumps(values))
result = json.loads(self.session.post(self.url_add, data).text)
ret = result['ok'], result['data']
except Exception as ex:
ret = False, str(ex)
return ret
def update(self, obid, title, description, done='yes'):
try:
values = [title, description, done]
data = dict(data=json.dumps(values), id=obid)
result = json.loads(self.session.post(self.url_update, data).text)
ret = result['ok'], result['data']
except Exception as ex:
ret = False, str(ex)
return ret
def delete(self, obid):
try:
data = dict(id=obid)
result = json.loads(self.session.post(self.url_delete, data).text)
ret = result['ok'], result['data']
except Exception as ex:
ret = False, str(ex)
return ret
'''
def post(self, url, data):
try:
ret = json.loads(self.session.post(url, data).text)
except Exception as ex:
ret = False, str(ex)
traceback.print_exc(file=sys.stdout)
return ret
def __init__(self, base_url, modulename):
print('Test {}/{}'.format(base_url, modulename))
self.session = requests.session()
self.url_confirmcookie = '{}/confirmcookie'.format(base_url)
self.url_login = '{}/auth/login'.format(base_url)
self.url_users_add = '{}/{}/users/add'.format(base_url, modulename)
self.url_users_update = '{}/{}/users/update'.format(base_url, modulename)
self.url_users_delete = '{}/{}/users/delete'.format(base_url, modulename)
self.url_groups_add = '{}/{}/groups/add'.format(base_url, modulename)
self.url_groups_update = '{}/{}/groups/update'.format(base_url, modulename)
self.url_groups_delete = '{}/{}/groups/delete'.format(base_url, modulename)
def test(self, i):
now = datetime.datetime.now().strftime('%d-%b-%Y %H:%M:%S')
print(i)
# add user
username = 'user_{}'.format(time.time())
print('add {}'.format((username)))
data = dict(data=json.dumps([username, 'Test1', 'email1', '***', '["Administrators"]']))
result = self.post(self.url_users_add, data)
ok = result.get('ok')
data = result.get('data')
id = result.get('id')
assert ok == True, data
# update user
print('update {}'.format(username))
data = dict(id=id, data=json.dumps([username, 'Test2', 'email2', '***', '[]']))
result = self.post(self.url_users_update, data)
ok = result.get('ok')
id = result.get('id')
data = result.get('data')
assert ok == True, data
# delete user
print('delete {} id={}'.format(username, id))
result = self.post(self.url_users_delete, dict(id=id))
ok = result.get('ok')
data = result.get('data')
assert ok == True, data
# add group
groupname = 'group_{}'.format(time.time())
print('add {}'.format((groupname)))
data = dict(data=json.dumps([groupname, 'Test1']))
result = self.post(self.url_groups_add, data)
ok = result.get('ok')
data = result.get('data')
id = result.get('id')
assert ok == True, data
# update group
print('update {}'.format(groupname))
data = dict(id=id, data=json.dumps([groupname, 'Test2']))
result = self.post(self.url_groups_update, data)
ok = result.get('ok')
id = result.get('id')
data = result.get('data')
assert ok == True, data
# delete user
print('delete {} id={}'.format(groupname, id))
result = self.post(self.url_groups_delete, dict(id=id))
ok = result.get('ok')
data = result.get('data')
assert ok == True, data
def test_function():
base_url = os.getenv('BASEURL', 'http://localhost')
user = os.getenv('user', 'admin')
password = os.getenv('password', 'admin')
if __name__ == "__main__":
modulename = os.getcwd().split(os.path.sep)[-1]
else:
L = __name__.split('.')
if len(L) < 2:
# ignore tests because script is in templates folder
return
modulename = L[-2]
t = AuthTests(base_url, modulename)
t.confirmCookie()
flag_login = t.login(user, password)
assert flag_login == True, 'Not authenticated.'
N = 3
print('Authenticated. Run {} tests.'.format(N))
for i in range(N):
t.test(i)
if __name__ == "__main__":
import sys
py = sys.version_info
py3k = py >= (3, 0, 0)
try:
test_function()
except Exception as ex:
print("Exception found: {}".format(ex))
# traceback.print_exc(file=sys.stdout)
msg = 'Program ends, press Enter.'
if py3k:
input(msg)
else:
raw_input(msg)
| 4,094 | 2 | 180 |
4dccc417c9de8bcf074c288d63345b8ce2f1340f | 12,496 | py | Python | rs2wapy/models/models.py | tuokri/rs2wapy | 6d4d940abca0acece4c5e36c6830c6b647c736c8 | [
"MIT"
] | 3 | 2020-03-03T09:19:40.000Z | 2021-10-16T10:09:00.000Z | rs2wapy/models/models.py | tuokri/rs2wapy | 6d4d940abca0acece4c5e36c6830c6b647c736c8 | [
"MIT"
] | 9 | 2020-03-03T13:57:14.000Z | 2020-04-28T09:37:42.000Z | rs2wapy/models/models.py | tuokri/rs2wapy | 6d4d940abca0acece4c5e36c6830c6b647c736c8 | [
"MIT"
] | 1 | 2020-03-17T16:17:36.000Z | 2020-03-17T16:17:36.000Z | """TODO: Use dataclasses."""
from __future__ import annotations
import abc
import datetime
import sys
from collections import MutableMapping
from typing import Any
from typing import Dict
from typing import Iterator
from typing import KeysView
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from logbook import Logger
from logbook import StreamHandler
from steam.steamid import SteamID
from rs2wapy.adapters import adapters
from rs2wapy.epicgamesstore import EGSID
from rs2wapy.steam import SteamWebAPI
BAN_DATE_FMT = "%Y/%m/%d %H:%M:%S"
StreamHandler(sys.stdout, level="WARNING").push_application()
logger = Logger(__name__)
HEX_COLOR_BLUE_TEAM = "#50A0F0"
HEX_COLOR_RED_TEAM = "#E54927"
HEX_COLOR_UNKNOWN_TEAM = "transparent"
HEX_COLOR_ALL_TEAM = ""
HEX_COLOR_TO_TEAM = {
HEX_COLOR_BLUE_TEAM: BlueTeam,
HEX_COLOR_RED_TEAM: RedTeam,
HEX_COLOR_UNKNOWN_TEAM: UnknownTeam,
HEX_COLOR_ALL_TEAM: AllTeam,
}
TEAM_INDEX_TO_TEAM: Dict[int, Type[Team]] = {
0: RedTeam,
1: BlueTeam,
}
TEAM_TO_TEAM_INDEX: Dict[Type[Team], int] = {
RedTeam: 0,
BlueTeam: 1,
}
# TODO: SteamPlayer and EGSPlayer classes?
CHAT_CHANNEL_ALL_STR = "(ALL)"
CHAT_CHANNEL_TEAM_STR = "(TEAM)"
TEAMNOTICE_TEAM = CHAT_CHANNEL_TEAM_STR
TEAMNOTICE_TO_CHAT_CHANNEL = {
None: ChatChannelAll,
TEAMNOTICE_TEAM: ChatChannelTeam,
}
CHAT_CHANNEL_TO_STR = {
ChatChannelAll: CHAT_CHANNEL_ALL_STR,
ChatChannelTeam: CHAT_CHANNEL_TEAM_STR,
}
# TODO: Refactor attributes etc.
| 25.398374 | 85 | 0.62356 | """TODO: Use dataclasses."""
from __future__ import annotations
import abc
import datetime
import sys
from collections import MutableMapping
from typing import Any
from typing import Dict
from typing import Iterator
from typing import KeysView
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from logbook import Logger
from logbook import StreamHandler
from steam.steamid import SteamID
from rs2wapy.adapters import adapters
from rs2wapy.epicgamesstore import EGSID
from rs2wapy.steam import SteamWebAPI
BAN_DATE_FMT = "%Y/%m/%d %H:%M:%S"
StreamHandler(sys.stdout, level="WARNING").push_application()
logger = Logger(__name__)
class Model(abc.ABC):
def __init__(self):
self._timestamp = datetime.datetime.now()
@property
def timestamp(self) -> datetime.datetime:
return self._timestamp
@timestamp.setter
def timestamp(self, timestamp: datetime.datetime):
self._timestamp = timestamp
def refresh(self):
"""Update timestamp value."""
self.timestamp = datetime.datetime.now()
HEX_COLOR_BLUE_TEAM = "#50A0F0"
HEX_COLOR_RED_TEAM = "#E54927"
HEX_COLOR_UNKNOWN_TEAM = "transparent"
HEX_COLOR_ALL_TEAM = ""
class Team(abc.ABC):
HEX_COLOR: Optional[str] = None
@staticmethod
def from_hex_color(hex_color: str) -> Type[Team]:
return HEX_COLOR_TO_TEAM[hex_color]
@staticmethod
def from_team_index(index: int) -> Type[Team]:
try:
return TEAM_INDEX_TO_TEAM[index]
except KeyError:
return UnknownTeam
class BlueTeam(Team):
HEX_COLOR = HEX_COLOR_BLUE_TEAM
class RedTeam(Team):
HEX_COLOR = HEX_COLOR_RED_TEAM
class UnknownTeam(Team):
HEX_COLOR = HEX_COLOR_UNKNOWN_TEAM
class AllTeam(Team):
HEX_COLOR = HEX_COLOR_ALL_TEAM
HEX_COLOR_TO_TEAM = {
HEX_COLOR_BLUE_TEAM: BlueTeam,
HEX_COLOR_RED_TEAM: RedTeam,
HEX_COLOR_UNKNOWN_TEAM: UnknownTeam,
HEX_COLOR_ALL_TEAM: AllTeam,
}
TEAM_INDEX_TO_TEAM: Dict[int, Type[Team]] = {
0: RedTeam,
1: BlueTeam,
}
TEAM_TO_TEAM_INDEX: Dict[Type[Team], int] = {
RedTeam: 0,
BlueTeam: 1,
}
# TODO: SteamPlayer and EGSPlayer classes?
class Player(Model):
# TODO: Given int or str ID, determine Steam / EGS ID?
def __init__(self, ident: Union[SteamID, int, str, EGSID] = None,
stats: dict = None, persona_name: str = None,
id_intstr_base: int = 16):
super().__init__()
self._steam_id = SteamID(0)
self._egs_id = EGSID(0)
if not stats:
stats = {}
self._stats = stats
if isinstance(ident, SteamID):
self._steam_id = ident
elif isinstance(ident, EGSID):
self._egs_id = ident
elif isinstance(ident, int):
self._steam_id = SteamID(ident)
elif isinstance(ident, str):
self._steam_id = SteamID(int(ident, id_intstr_base))
else:
raise ValueError(
f"invalid steam_id type: {type(ident)}, expected "
f"{Union[SteamID, int, str]}")
self._persona_name = persona_name
@property
def stats(self) -> dict:
return self._stats
@property
def steam_id(self) -> SteamID:
return self._steam_id
@property
def egs_id(self) -> EGSID:
return self._egs_id
@property
def is_steam_player(self) -> bool:
return self._steam_id != 0
@property
def is_egs_player(self) -> bool:
return not self.is_steam_player
@property
def name(self) -> str:
"""Player's name as stored in RS2 WebAdmin."""
try:
return self.stats["Player Name"]
except KeyError as ke:
# logger.debug(ke, exc_info=True)
# logger.warn(f"unable to get player name for Steam ID {self.steam_id}")
self.stats["Player Name"] = ""
return ""
@property
def persona_name(self) -> Optional[str]:
"""Player's Steam persona (profile) name."""
if self._persona_name is None and self.is_steam_player:
self._persona_name = SteamWebAPI().get_persona_name(self.steam_id)
return self._persona_name
def __str__(self) -> str:
if self.is_steam_player:
ident = (self._steam_id.as_64
if isinstance(self._steam_id, SteamID)
else self._steam_id)
s = f"SteamID64={ident}"
else:
ident = self.egs_id.ident
s = f"EGSID={ident}"
return s
def __repr__(self) -> str:
return f"Player({self.__str__()})"
def __hash__(self) -> int:
if self.is_steam_player:
return self._steam_id.as_64
else:
return self._egs_id.ident
CHAT_CHANNEL_ALL_STR = "(ALL)"
CHAT_CHANNEL_TEAM_STR = "(TEAM)"
TEAMNOTICE_TEAM = CHAT_CHANNEL_TEAM_STR
class ChatChannel(abc.ABC):
@staticmethod
def from_teamnotice(teamnotice: str):
try:
teamnotice = teamnotice.upper()
except AttributeError:
pass
return TEAMNOTICE_TO_CHAT_CHANNEL[teamnotice]
@classmethod
def to_team_str(cls) -> str:
# noinspection PyTypeChecker
return CHAT_CHANNEL_TO_STR[cls]
class ChatChannelAll(ChatChannel):
pass
class ChatChannelTeam(ChatChannel):
pass
TEAMNOTICE_TO_CHAT_CHANNEL = {
None: ChatChannelAll,
TEAMNOTICE_TEAM: ChatChannelTeam,
}
CHAT_CHANNEL_TO_STR = {
ChatChannelAll: CHAT_CHANNEL_ALL_STR,
ChatChannelTeam: CHAT_CHANNEL_TEAM_STR,
}
class ChatMessage(Model):
def __init__(self,
sender: Union[Player, adapters.PlayerWrapper, str],
text: str,
team: Type[Team], channel: Type[ChatChannel]):
super().__init__()
self._sender = sender
self._text = text
self._team = team
self._channel = channel
def __str__(self) -> str:
if isinstance(self._team, UnknownTeam):
channel = f"({self._channel.to_team_str()})"
else:
channel = f"({self._team.__name__}) {self._channel.to_team_str()}"
return f"{self.timestamp.isoformat()} {self._sender} {channel}: {self._text}"
def __repr__(self) -> str:
return f"{type(self).__name__}({self.__str__()})"
@property
def sender(self) -> Union[Player, adapters.PlayerWrapper, str]:
return self._sender
@property
def text(self) -> str:
return self._text
@property
def team(self) -> Type[Team]:
return self._team
@property
def channel(self) -> Type[ChatChannel]:
return self._channel
class Scoreboard(MutableMapping):
def __init__(self, stats: dict):
self._stats = stats
def __delitem__(self, key: Any):
del self._stats[key]
def __len__(self) -> int:
return self._stats.__len__()
def __iter__(self) -> Iterator[Any]:
return self._stats.__iter__()
def __setitem__(self, key: Any, value: Any):
self._stats.__setitem__(key, value)
def __str__(self) -> str:
return self._stats.__str__()
def __repr__(self) -> str:
return f"{type(self).__name__}({self.__str__()})"
def keys(self) -> KeysView:
return self._stats.keys()
def __getitem__(self, item: Any):
return self._stats.__getitem__(item)
class PlayerScoreboard(Scoreboard):
pass
class TeamScoreboard(Scoreboard):
pass
class CurrentGame(Model):
def __init__(self, player_scoreboard: PlayerScoreboard,
team_scoreboard: TeamScoreboard,
info: dict, rules: dict):
super().__init__()
self._player_scoreboard = player_scoreboard
self._team_scoreboard = team_scoreboard
self._info = info
self._rules = rules
def __str__(self) -> str:
# TODO: implement keys() and __getitem__?
all_info = {}
all_info.update(self._info)
all_info.update(self._rules)
all_info.update(self._player_scoreboard)
all_info.update(self._team_scoreboard)
return str(all_info)
def __repr__(self) -> str:
# TODO:
return self.__str__()
@property
def player_scoreboard(self) -> PlayerScoreboard:
return self._player_scoreboard
@player_scoreboard.setter
def player_scoreboard(self, scoreboard: PlayerScoreboard):
self._player_scoreboard = scoreboard
@property
def team_scoreboard(self) -> TeamScoreboard:
return self._team_scoreboard
@team_scoreboard.setter
def team_scoreboard(self, scoreboard: TeamScoreboard):
self._team_scoreboard = scoreboard
@property
def ranked(self) -> bool:
return self._info["Ranked"]
@ranked.setter
def ranked(self, ranked: bool):
self._info["Ranked"] = ranked
@property
def info(self) -> dict:
return self._info
@info.setter
def info(self, info: dict):
self._info = info
@property
def rules(self) -> dict:
return self._rules
@rules.setter
def rules(self, rules: dict):
self._rules = rules
class AccessPolicy(Model):
def __init__(self, ip_mask: str, policy):
super().__init__()
self._ip_mask = ip_mask
self._policy = policy
class MapCycle(Model):
def __init__(self, maps: List[Tuple[str, int]], active: bool):
super().__init__()
self._maps = maps
self._active = active
@property
def active(self) -> bool:
return self._active
@active.setter
def active(self, active: bool):
self._active = active
@property
def maps(self) -> List[Tuple[str, int]]:
return self._maps
@maps.setter
def maps(self, maps: List[Tuple[str, int]]):
self._maps = maps
def __str__(self) -> str:
return f"{self._maps}"
def __repr__(self) -> str:
return f"{type(self).__name__}({self.__str__()})"
class Squad(Model):
def __init__(self, team: Type[Team], number: int, name: str):
super().__init__()
self._team = team
self._number = number
self._name = name
def __str__(self) -> str:
return f"team={self._team}, number={self._number}, name={self._name}"
def __repr__(self) -> str:
return f"{type(self).__name__}({self.__str__()})"
def __hash__(self) -> int:
return self._number
# TODO: Refactor attributes etc.
class Ban(Model):
def __init__(self, player: Player, reason: str,
when: str, admin: str,
until: Union[str, datetime.datetime] = None):
super().__init__()
self._player = player
self._reason = reason
self._admin = admin
self._when = datetime.datetime.strptime(when, BAN_DATE_FMT)
if isinstance(until, str):
until = self._parse_until(until)
self._until = until
@property
def player(self) -> Player:
return self._player
@property
def reason(self) -> str:
return self._reason
@property
def until(self) -> Optional[datetime.datetime]:
"""Ban expiration date. If None, the ban is permanent."""
return self._until
@property
def expired(self) -> bool:
"""True if ban has expired."""
if self.until is not None:
return self.until > datetime.datetime.now()
else:
return False
@property
def when(self) -> datetime.datetime:
return self._when
@property
def admin(self) -> str:
return self._admin
@staticmethod
def _parse_until(until: str) -> Optional[datetime.datetime]:
"""Ban expiration date str to datetime.datetime object."""
try:
return datetime.datetime.strptime(until, BAN_DATE_FMT)
except ValueError:
return None
def __str__(self) -> str:
return (f"player={self.player}, reason={self.reason}, "
f"when={self.when}, admin={self.admin}, until={self.until}")
def __repr__(self) -> str:
return f"{type(self).__name__}({self.__str__()})"
class SessionBan(Ban):
def __init__(self, player: Player, when: str,
reason: str, admin: str):
super().__init__(player, reason=reason,
when=when, admin=admin, until=None)
self._until = None
| 6,376 | 3,674 | 859 |
e8045703d37f2099bf362d4e174b682f893a37bb | 190 | py | Python | Coursera/ex5.1.py | FabianoBill/Estudos-em-python | 32c3f9e37b83630c923ff7c0c77aa7d80fbc3174 | [
"MIT"
] | 1 | 2021-05-24T19:44:04.000Z | 2021-05-24T19:44:04.000Z | Coursera/ex5.1.py | FabianoBill/Estudos-em-python | 32c3f9e37b83630c923ff7c0c77aa7d80fbc3174 | [
"MIT"
] | null | null | null | Coursera/ex5.1.py | FabianoBill/Estudos-em-python | 32c3f9e37b83630c923ff7c0c77aa7d80fbc3174 | [
"MIT"
] | null | null | null | l = int(input("digite a largura: "))
a = int(input("digite a altura: "))
L = A = 1
while A <= a:
while L <= l:
print("#", end="")
L += 1
A += 1
L = 1
print()
| 17.272727 | 36 | 0.426316 | l = int(input("digite a largura: "))
a = int(input("digite a altura: "))
L = A = 1
while A <= a:
while L <= l:
print("#", end="")
L += 1
A += 1
L = 1
print()
| 0 | 0 | 0 |
a7e25c99021f2163a65c42569320cf53b830ccb4 | 4,219 | py | Python | plotly/tests/test_core/test_graph_objs/test_properties_validated.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 1 | 2018-07-16T01:51:47.000Z | 2018-07-16T01:51:47.000Z | plotly/tests/test_core/test_graph_objs/test_properties_validated.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | null | null | null | plotly/tests/test_core/test_graph_objs/test_properties_validated.py | faezs/plotly.py | 6009b5b9c746e5d2a2849ad255a4eb234b551ed7 | [
"MIT"
] | 1 | 2019-02-18T04:12:56.000Z | 2019-02-18T04:12:56.000Z | from unittest import TestCase
import plotly.graph_objs as go
from nose.tools import raises
| 28.506757 | 74 | 0.605831 | from unittest import TestCase
import plotly.graph_objs as go
from nose.tools import raises
class TestPropertyValidation(TestCase):
def setUp(self):
# Construct initial scatter object
self.scatter = go.Scatter()
self.scatter.name = 'Scatter 1'
@raises(ValueError)
def test_validators_work_attr(self):
"""
Note: all of the individual validators are tested in
`_plotly_utils/tests/validators`. Here we're just making sure that
datatypes make use of validators
"""
self.scatter.name = [1, 2, 3]
@raises(ValueError)
def test_validators_work_item(self):
"""
Note: all of the individual validators are tested in
`_plotly_utils/tests/validators`. Here we're just making sure that
datatypes make use of validators
"""
self.scatter['name'] = [1, 2, 3]
@raises(ValueError)
def test_invalid_attr_assignment(self):
self.scatter.bogus = 87
@raises(ValueError)
def test_invalid_item_assignment(self):
self.scatter['bogus'] = 87
@raises(ValueError)
def test_invalid_dot_assignment(self):
self.scatter['marker.bogus'] = 87
@raises(ValueError)
def test_invalid_tuple_assignment(self):
self.scatter[('marker', 'bogus')] = 87
@raises(ValueError)
def test_invalid_constructor_kwarg(self):
go.Scatter(bogus=87)
class TestPropertyPresentation(TestCase):
def setUp(self):
# Construct initial scatter object
self.scatter = go.Scatter()
self.scatter.name = 'Scatter 1'
self.layout = go.Layout()
def test_present_dataarray(self):
self.assertIsNone(self.scatter.x)
# Assign list
self.scatter.x = [1, 2, 3, 4]
# Stored as list
self.assertEqual(self.scatter.to_plotly_json()['x'],
[1, 2, 3, 4])
# Returned as tuple
self.assertEqual(self.scatter.x,
(1, 2, 3, 4))
def test_present_compound_array(self):
self.assertEqual(self.layout.images, ())
# Assign compound list
self.layout.images = [go.layout.Image(layer='above'),
go.layout.Image(layer='below')]
# Stored as list of dicts
self.assertEqual(self.layout.to_plotly_json()['images'],
[{'layer': 'above'}, {'layer': 'below'}])
# Presented as compound tuple
self.assertEqual(self.layout.images,
(go.layout.Image(layer='above'),
go.layout.Image(layer='below')))
def test_present_colorscale(self):
self.assertIsNone(self.scatter.marker.colorscale)
# Assign list of tuples
self.scatter.marker.colorscale = [(0, 'red'), (1, 'green')]
# Stored as list of lists
self.assertEqual(
self.scatter.to_plotly_json()['marker']['colorscale'],
[[0, 'red'], [1, 'green']])
# Presented as tuple of tuples
self.assertEqual(self.scatter.marker.colorscale,
((0, 'red'), (1, 'green')))
class TestPropertyIterContains(TestCase):
def setUp(self):
# Construct initial scatter object
self.parcoords = go.Parcoords()
self.parcoords.name = 'Scatter 1'
def test_contains(self):
# Primitive property
self.assertTrue('name' in self.parcoords)
# Compound property
self.assertTrue('line' in self.parcoords)
# Literal
self.assertTrue('type' in self.parcoords)
# Compound array property
self.assertTrue('dimensions' in self.parcoords)
# Bogus
self.assertFalse('bogus' in self.parcoords)
def test_iter(self):
parcoords_list = list(self.parcoords)
# Primitive property
self.assertTrue('name' in parcoords_list)
# Compound property
self.assertTrue('line' in parcoords_list)
# Literal
self.assertTrue('type' in parcoords_list)
# Compound array property
self.assertTrue('dimensions' in parcoords_list)
# Bogus
self.assertFalse('bogus' in parcoords_list)
| 2,914 | 953 | 258 |
d3cbc1601e2628492af0ecc8529e4d4bfd2791df | 1,829 | py | Python | sharq/utils.py | bhuvanvenkat-plivo/sharq | 32bbfbdcbbaa8e154271ffd125ac4500382f3d19 | [
"MIT"
] | null | null | null | sharq/utils.py | bhuvanvenkat-plivo/sharq | 32bbfbdcbbaa8e154271ffd125ac4500382f3d19 | [
"MIT"
] | null | null | null | sharq/utils.py | bhuvanvenkat-plivo/sharq | 32bbfbdcbbaa8e154271ffd125ac4500382f3d19 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Plivo Team. See LICENSE.txt for details.
import time
import msgpack
VALID_IDENTIFIER_SET = set(list('abcdefghijklmnopqrstuvwxyz0123456789_-'))
def is_valid_identifier(identifier):
"""Checks if the given identifier is valid or not. A valid
identifier may consists of the following characters with a
maximum length of 100 characters, minimum of 1 character.
Valid characters for an identifier,
- A to Z
- a to z
- 0 to 9
- _ (underscore)
- - (hypen)
"""
if not isinstance(identifier, basestring):
return False
if len(identifier) > 100 or len(identifier) < 1:
return False
condensed_form = set(list(identifier.lower()))
return condensed_form.issubset(VALID_IDENTIFIER_SET)
def is_valid_interval(interval):
"""Checks if the given interval is valid. A valid interval
is always a positive, non-zero integer value.
"""
if not isinstance(interval, (int, long)):
return False
if interval <= 0:
return False
return True
def is_valid_requeue_limit(requeue_limit):
"""Checks if the given requeue limit is valid.
A valid requeue limit is always greater than
or equal to -1.
"""
if not isinstance(requeue_limit, (int, long)):
return False
if requeue_limit <= -2:
return False
return True
def serialize_payload(payload):
"""Tries to serialize the payload using msgpack. If it is
not serializable, raises a TypeError.
"""
return msgpack.packb(payload)
def deserialize_payload(payload):
"""Tries to deserialize the payload using msgpack.
"""
return msgpack.unpackb(payload)
def generate_epoch():
"""Generates an unix epoch in ms.
"""
return int(time.time() * 1000)
| 24.386667 | 74 | 0.666484 | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Plivo Team. See LICENSE.txt for details.
import time
import msgpack
VALID_IDENTIFIER_SET = set(list('abcdefghijklmnopqrstuvwxyz0123456789_-'))
def is_valid_identifier(identifier):
"""Checks if the given identifier is valid or not. A valid
identifier may consists of the following characters with a
maximum length of 100 characters, minimum of 1 character.
Valid characters for an identifier,
- A to Z
- a to z
- 0 to 9
- _ (underscore)
- - (hypen)
"""
if not isinstance(identifier, basestring):
return False
if len(identifier) > 100 or len(identifier) < 1:
return False
condensed_form = set(list(identifier.lower()))
return condensed_form.issubset(VALID_IDENTIFIER_SET)
def is_valid_interval(interval):
"""Checks if the given interval is valid. A valid interval
is always a positive, non-zero integer value.
"""
if not isinstance(interval, (int, long)):
return False
if interval <= 0:
return False
return True
def is_valid_requeue_limit(requeue_limit):
"""Checks if the given requeue limit is valid.
A valid requeue limit is always greater than
or equal to -1.
"""
if not isinstance(requeue_limit, (int, long)):
return False
if requeue_limit <= -2:
return False
return True
def serialize_payload(payload):
"""Tries to serialize the payload using msgpack. If it is
not serializable, raises a TypeError.
"""
return msgpack.packb(payload)
def deserialize_payload(payload):
"""Tries to deserialize the payload using msgpack.
"""
return msgpack.unpackb(payload)
def generate_epoch():
"""Generates an unix epoch in ms.
"""
return int(time.time() * 1000)
| 0 | 0 | 0 |
c15ecbb686f6aadc4d7e1e63436d36887d403ee5 | 3,030 | py | Python | lab4_simulated_annealing/task_1/main.py | j-adamczyk/Numerical-Algorithms | 47cfa8154bab448d1bf87b892d83e45c68dd2e2a | [
"MIT"
] | 6 | 2020-03-16T11:23:32.000Z | 2021-01-16T21:04:01.000Z | lab4_simulated_annealing/task_1/main.py | j-adamczyk/Numerical-Algorithms | 47cfa8154bab448d1bf87b892d83e45c68dd2e2a | [
"MIT"
] | null | null | null | lab4_simulated_annealing/task_1/main.py | j-adamczyk/Numerical-Algorithms | 47cfa8154bab448d1bf87b892d83e45c68dd2e2a | [
"MIT"
] | null | null | null | import copy
import generators
import math
import neighbor_states as ns
import numpy as np
import plotter
import random as rand
n = 200
iterations = 500000
temperature = 1000
decay_rate = 0.99995
swap_type = "consecutive"
low = 0
high = 1
distribution = "uniform"
first_path, best_path, distances_plot_data, temperatures_plot_data = travelling_salesman_problem(n, iterations, temperature, decay_rate, swap_type, low, high, distribution)
plotter.plot_data(first_path, best_path, distances_plot_data, temperatures_plot_data)
| 30.3 | 172 | 0.665017 | import copy
import generators
import math
import neighbor_states as ns
import numpy as np
import plotter
import random as rand
def get_matrix(cities):
n = len(cities)
result = np.zeros((n, n))
for x in range(n):
for y in range(n):
x1 = cities[x][0]
y1 = cities[x][1]
x2 = cities[y][0]
y2 = cities[y][1]
distance = np.sqrt((x2 - x1) ** 2 + (y2 - y1) ** 2)
result[x][y] = distance
result[y][x] = distance
return result
def path_distance(path, distances):
distance = 0
for i in range(len(path) - 1):
distance += distances[path[i][2]][path[i + 1][2]]
return distance
def travelling_salesman_problem(n, iterations, temperature, decay_rate, swap_type, low, high, distribution):
if distribution == "uniform":
path = generators.get_uniform_distribution_points(low, high, n)
elif distribution == "normal" or distribution == "gaussian":
path = generators.get_normal_distribution_points(low, high, n)
elif distribution == "groups" or distribution == "9 groups":
path = generators.get_9_groups_of_points(low, high, n)
else:
raise ValueError("Error: distribution argument was not uniform, normal or groups!")
first_path = copy.copy(path)
if swap_type != "consecutive" and swap_type != "arbitrary":
raise ValueError("Error: swap type argument was not consecutive or arbitrary!")
distances = get_matrix(path)
rand.shuffle(path)
best_path = path
min_distance = path_distance(best_path, distances)
iters = []
dists = []
temperatures = []
for i in range(iterations):
iters.append(i)
new_path = copy.copy(path)
if swap_type == "consecutive":
new_path = ns.consecutive_swap(new_path)
else:
new_path = ns.consecutive_swap(new_path)
old_path_distance = path_distance(path, distances)
new_path_distance = path_distance(new_path, distances)
dists.append(new_path_distance)
if new_path_distance < old_path_distance:
path = new_path
if new_path_distance < min_distance:
min_distance = new_path_distance
elif math.exp(-(new_path_distance - old_path_distance)/temperature) > rand.uniform(0, 1):
path = new_path
temperatures.append(temperature)
temperature *= decay_rate
distances_plot_data = (copy.copy(iters), dists)
temperatures_plot_data = (iters, temperatures)
return first_path, best_path, distances_plot_data, temperatures_plot_data
n = 200
iterations = 500000
temperature = 1000
decay_rate = 0.99995
swap_type = "consecutive"
low = 0
high = 1
distribution = "uniform"
first_path, best_path, distances_plot_data, temperatures_plot_data = travelling_salesman_problem(n, iterations, temperature, decay_rate, swap_type, low, high, distribution)
plotter.plot_data(first_path, best_path, distances_plot_data, temperatures_plot_data)
| 2,432 | 0 | 69 |
4dc65ac449ff42d60012e9346f5cadec81deb3cc | 10,088 | py | Python | src/rcmodel/reinforce.py | BFourcin/rcmodel | 160eb2ad7ea60f328abde157baeef7378e28d815 | [
"MIT"
] | 5 | 2021-07-05T13:48:59.000Z | 2021-12-08T11:40:51.000Z | src/rcmodel/reinforce.py | BFourcin/rcmodel | 160eb2ad7ea60f328abde157baeef7378e28d815 | [
"MIT"
] | null | null | null | src/rcmodel/reinforce.py | BFourcin/rcmodel | 160eb2ad7ea60f328abde157baeef7378e28d815 | [
"MIT"
] | null | null | null | import gym
from gym import spaces
import torch
import torch.nn as nn
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
from xitorch.interpolate import Interp1D
from tqdm.auto import tqdm, trange
import time
from rcmodel.room import Room
from rcmodel.building import Building
from rcmodel.RCModel import RCModel
from rcmodel.tools import InputScaling
from rcmodel.tools import BuildingTemperatureDataset
class LSIEnv(gym.Env):
"""Custom Environment that follows gym interface"""
metadata = {'render.modes': ['human']}
# action
# (observation, reward, done, info)
# self.state, reward, done, {}
if __name__ == '__main__':
path_sorted = '/Users/benfourcin/OneDrive - University of Exeter/PhD/LSI/Data/210813data_sorted.csv'
time_data = torch.tensor(pd.read_csv(path_sorted, skiprows=0).iloc[:, 1], dtype=torch.float64)
temp_data = torch.tensor(pd.read_csv(path_sorted, skiprows=0).iloc[:, 2:].to_numpy(dtype=np.float32),
dtype=torch.float32)
######
path = '/Users/benfourcin/OneDrive - University of Exeter/PhD/LSI/Data/DummyData/'
dt = 30 # timestep (seconds), data and the model are sampled at this frequency
sample_size = int(5 * (60 ** 2 * 24) / dt) # one day of data
training_data = BuildingTemperatureDataset(path + 'train5d.csv', sample_size)
train_dataloader = torch.utils.data.DataLoader(training_data, batch_size=1, shuffle=False)
######
time_data = time_data[0:100]
temp_data = temp_data[0:100, :]
policy = PolicyNetwork(7, 2)
RC, Tout_continuous = initialise_model(policy)
env = LSIEnv(RC, time_data)
reinforce = Reinforce(env, time_data, temp_data, alpha=1e-2)
num_episodes = 10
step_size = 24*60**2 / 30 # timesteps in 1 day
start_time = time.time()
plot_total_rewards, plot_ER = reinforce.train(num_episodes, step_size)
print(f'fin, duration: {(time.time() - start_time) / 60:.1f} minutes')
fig, axs = plt.subplots(1, 2, figsize=(10, 7),)
axs[0].plot(torch.stack(plot_ER).detach().numpy(), label='expected rewards')
axs[0].legend()
axs[1].plot(torch.stack(plot_total_rewards).detach().numpy(), label='total rewards')
axs[1].legend()
plt.savefig('Rewards.png')
plt.show()
| 34.19661 | 139 | 0.618656 | import gym
from gym import spaces
import torch
import torch.nn as nn
from matplotlib import pyplot as plt
import pandas as pd
import numpy as np
from xitorch.interpolate import Interp1D
from tqdm.auto import tqdm, trange
import time
from rcmodel.room import Room
from rcmodel.building import Building
from rcmodel.RCModel import RCModel
from rcmodel.tools import InputScaling
from rcmodel.tools import BuildingTemperatureDataset
class PolicyNetwork(nn.Module):
def __init__(self, in_dim, out_dim):
super().__init__()
n = 10
self.flatten = nn.Flatten()
self.linear_relu_stack = nn.Sequential(
nn.Linear(in_dim, n),
nn.ReLU(),
nn.Linear(n, n),
nn.ReLU(),
nn.Linear(n, out_dim),
)
self.on_policy_reset()
def forward(self, state):
logits = self.linear_relu_stack(state)
return logits
def get_action(self, state):
pd = torch.distributions.categorical.Categorical(logits=self.forward(state)) # make a probability distribution
action = pd.sample() # sample from distribution pi(a|s) (action given state)
return action, pd.log_prob(action)
def on_policy_reset(self):
# this stores log_probs during an integration step.
self.log_probs = []
# self.rewards = []
class Reinforce:
def __init__(self, env, time_data, temp_data, gamma=0.99, alpha=1e-3):
assert len(time_data) == len(temp_data)
self.env = env
# self.pi = pi
self.time_data = time_data
self.temp_data = temp_data
self.gamma = gamma
self.alpha = alpha
self.optimiser = torch.optim.Adam(self.env.RC.cooling_policy.parameters(), lr=self.alpha)
def update_policy(self, rewards, log_probs):
# Calculate Discounted Reward:
discounted_rewards = torch.zeros(len(rewards))
R = 0
indx = len(rewards) - 1
for r in reversed(rewards):
R = r + self.gamma * R # Discounted Reward is calculated from last reward to first.
discounted_rewards[indx] = R # Fill array back to front to un-reverse the order
indx -= 1
# Normalise rewards
discounted_rewards = (discounted_rewards - discounted_rewards.mean()) / (
discounted_rewards.std() + 1e-9)
# discounted_rewards = torch.tensor(np.array(discounted_rewards.detach().numpy()))
expected_reward = -torch.stack(log_probs) * discounted_rewards # negative for maximising
expected_reward = torch.sum(expected_reward)
# print(f'ER {expected_reward}')
# Update parameters in pi
self.optimiser.zero_grad()
expected_reward.backward()
self.optimiser.step()
# print(list(self.pi.parameters())[0].grad) # check on grads if needed
return expected_reward
def train(self, num_episodes, step_size):
self.env.RC.cooling_policy.train() # Put in training mode
total_ER = []
total_rewards = []
loss_fn = torch.nn.MSELoss(reduction='none') # Squared Error
# with tqdm(total=len(self.env.time_data) * num_episodes, position=0, leave=False) as pbar: # progress bar
for episode in range(num_episodes):
self.env.reset()
episode_rewards = []
episode_ER = []
# Time is increased in steps, with the policy updating after every step.
while self.env.t_index < len(self.env.time_data) - 1:
# takes a step_size forward in time
pred = self.env.step(step_size).squeeze(-1) # state and action produced in step
actual = self.temp_data[self.env.t_index:int(self.env.t_index + step_size), 0:self.env.n_rooms]
# negative so reward can be maximised
reward = -loss_fn(pred[:, 2:], actual)
# Do gradient decent on sample
ER = self.update_policy(reward, self.env.RC.cooling_policy.log_probs)
self.env.RC.cooling_policy.on_policy_reset() # empty buffer
# get last output and use for next initial value
self.env.RC.iv = pred[-1, :].unsqueeze(1).detach() # MUST DETACH GRAD
episode_rewards.append(sum(reward))
episode_ER.append(ER)
self.env.t_index += int(step_size) # increase environment time
# print(f'Episode {episode+1}, Expected Reward: {sum(episode_ER).item():.2f}, total_reward: {sum(episode_rewards).item():.2f}')
total_ER.append(sum(episode_ER).detach())
total_rewards.append(sum(episode_rewards).detach())
return total_rewards, total_ER
class LSIEnv(gym.Env):
"""Custom Environment that follows gym interface"""
metadata = {'render.modes': ['human']}
def __init__(self, RC, time_data):
super().__init__()
self.RC = RC # RCModel Class
self.time_data = time_data # timeseries
self.t_index = 0 # used to keep track of index through timeseries
# ----- GYM Stuff -----
self.n_rooms = len(self.RC.building.rooms)
self.low_state = -10
self.high_state = 50
# Define action and observation space
# They must be gym.spaces objects
# Example when using discrete actions:
self.action_space = spaces.Discrete(2, )
# Observation is temperature of each room.
self.observation_space = spaces.Box(
low=self.low_state,
high=self.high_state,
shape=(self.n_rooms,),
dtype=np.float32
)
# action
def step(self, step_size):
# Execute a chunk of timeseries
t_eval = self.time_data[self.t_index:int(self.t_index + step_size)]
# actions are decided and stored by the policy while integrating the ODE:
pred = self.RC(t_eval)
return pred.detach() # No need for grad
# (observation, reward, done, info)
# self.state, reward, done, {}
def reset(self):
# Reset the state of the environment to an initial state
self.t_index = 0
self.RC.reset_iv()
self.RC.cooling_policy.on_policy_reset()
def render(self, mode='human', close=False):
# Render the environment to the screen
return
if __name__ == '__main__':
def initialise_model(pi):
torch.cuda.is_available = lambda: False
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
def change_origin(coords):
x0 = 92.07
y0 = 125.94
for i in range(len(coords)):
coords[i][0] = round((coords[i][0] - x0) / 10, 2)
coords[i][1] = round((coords[i][1] - y0) / 10, 2)
return coords
capacitance = 3000 # Variable changed later
rooms = []
name = "seminar_rm_a_t0106"
coords = change_origin(
[[92.07, 125.94], [92.07, 231.74], [129.00, 231.74], [154.45, 231.74], [172.64, 231.74], [172.64, 125.94]])
rooms.append(Room(name, capacitance, coords))
# Initialise Building
height = 1
Re = [4, 1, 0.55] # Sum of R makes Uval=0.18 #Variable changed later
Ce = [1.2 * 10 ** 3, 0.8 * 10 ** 3] # Variable changed later
Rint = 0.66 # Uval = 1/R = 1.5 #Variable changed later
bld = Building(rooms, height, Re, Ce, Rint)
rm_CA = [200, 800] # [min, max] Capacitance/area
ex_C = [1.5 * 10 ** 4, 10 ** 6] # Capacitance
R = [0.2, 1.2] # Resistance ((K.m^2)/W)
scaling = InputScaling(rm_CA, ex_C, R)
scale_fn = scaling.physical_scaling # function to scale parameters back to physical values
path_Tout = '/Users/benfourcin/OneDrive - University of Exeter/PhD/LSI/Data/Met Office Weather Files/JuneSept.csv'
df = pd.read_csv(path_Tout)
Tout = torch.tensor(df['Hourly Temperature (°C)'], device=device)
t = torch.tensor(df['time'], device=device)
Tout_continuous = Interp1D(t, Tout, method='linear')
# Initialise RCModel with the building
# transform = torch.sigmoid
transform = torch.sigmoid
model = RCModel(bld, scaling, Tout_continuous, transform, pi)
model.to(device) # put model on GPU if available
model.Q_lim = 10000
return model, Tout_continuous
path_sorted = '/Users/benfourcin/OneDrive - University of Exeter/PhD/LSI/Data/210813data_sorted.csv'
time_data = torch.tensor(pd.read_csv(path_sorted, skiprows=0).iloc[:, 1], dtype=torch.float64)
temp_data = torch.tensor(pd.read_csv(path_sorted, skiprows=0).iloc[:, 2:].to_numpy(dtype=np.float32),
dtype=torch.float32)
######
path = '/Users/benfourcin/OneDrive - University of Exeter/PhD/LSI/Data/DummyData/'
dt = 30 # timestep (seconds), data and the model are sampled at this frequency
sample_size = int(5 * (60 ** 2 * 24) / dt) # one day of data
training_data = BuildingTemperatureDataset(path + 'train5d.csv', sample_size)
train_dataloader = torch.utils.data.DataLoader(training_data, batch_size=1, shuffle=False)
######
time_data = time_data[0:100]
temp_data = temp_data[0:100, :]
policy = PolicyNetwork(7, 2)
RC, Tout_continuous = initialise_model(policy)
env = LSIEnv(RC, time_data)
reinforce = Reinforce(env, time_data, temp_data, alpha=1e-2)
num_episodes = 10
step_size = 24*60**2 / 30 # timesteps in 1 day
start_time = time.time()
plot_total_rewards, plot_ER = reinforce.train(num_episodes, step_size)
print(f'fin, duration: {(time.time() - start_time) / 60:.1f} minutes')
fig, axs = plt.subplots(1, 2, figsize=(10, 7),)
axs[0].plot(torch.stack(plot_ER).detach().numpy(), label='expected rewards')
axs[0].legend()
axs[1].plot(torch.stack(plot_total_rewards).detach().numpy(), label='total rewards')
axs[1].legend()
plt.savefig('Rewards.png')
plt.show()
| 7,394 | 5 | 367 |
b62f59c6e173df9ffd9859826fa97d14c5d0dc78 | 679 | py | Python | main.py | UntriexTv/favorable-fishers | e8aa4f83982fb43a36bb150cc749db03d361a58e | [
"MIT"
] | 2 | 2021-07-09T15:07:59.000Z | 2021-07-10T08:51:03.000Z | main.py | UntriexTv/favorable-fishers | e8aa4f83982fb43a36bb150cc749db03d361a58e | [
"MIT"
] | null | null | null | main.py | UntriexTv/favorable-fishers | e8aa4f83982fb43a36bb150cc749db03d361a58e | [
"MIT"
] | 2 | 2021-07-12T09:16:23.000Z | 2021-07-31T20:19:37.000Z | import sys
from WidgetManager import WidgetManager
from Dashboard import Dashboard
from asciimatics.screen import Screen
from asciimatics.exceptions import ResizeScreenError
"""
The application is initialized using an asciimatics wrapper
"""
widgetmanager = WidgetManager()
last_scene = None
while True:
try:
Screen.wrapper(app, catch_interrupt=True, arguments=[last_scene])
sys.exit(0)
except ResizeScreenError as e:
pass
last_scene = e.scene
| 23.413793 | 86 | 0.715758 | import sys
from WidgetManager import WidgetManager
from Dashboard import Dashboard
from asciimatics.screen import Screen
from asciimatics.exceptions import ResizeScreenError
"""
The application is initialized using an asciimatics wrapper
"""
def app(screen, scene):
dashboard = Dashboard(screen, widgetmanager)
screen.play([dashboard], stop_on_resize=True, start_scene=scene, allow_int=True)
widgetmanager = WidgetManager()
last_scene = None
while True:
try:
Screen.wrapper(app, catch_interrupt=True, arguments=[last_scene])
sys.exit(0)
except ResizeScreenError as e:
pass
last_scene = e.scene
| 138 | 0 | 26 |
a46b7510dff06a94c65ea3c9f20676b354311ca0 | 416 | py | Python | tabluar/metrics.py | xubujie/DataScience_utils | e064a84d1d20c70781bb6cf85dd0ce151d6decaa | [
"MIT"
] | null | null | null | tabluar/metrics.py | xubujie/DataScience_utils | e064a84d1d20c70781bb6cf85dd0ce151d6decaa | [
"MIT"
] | null | null | null | tabluar/metrics.py | xubujie/DataScience_utils | e064a84d1d20c70781bb6cf85dd0ce151d6decaa | [
"MIT"
] | null | null | null | @jit | 24.470588 | 47 | 0.574519 | @jit
def fast_auc(y_true, y_prob):
y_true = np.asarray(y_true)
y_true = y_true[np.argsort(y_prob)]
nfalse = 0
auc = 0
n = len(y_true)
for i in range(n):
y_i = y_true[i]
nfalse += (1 - y_i)
auc += y_i * nfalse
auc /= (nfalse * (n - nfalse))
return auc
def eval_auc(preds, dtrain):
labels = dtrain.get_label()
return 'auc', fast_auc(labels, preds), True | 367 | 0 | 45 |
c1cbb4533ea139bb4fcbe6f0800fc5384c15add6 | 631 | py | Python | ho/00_hello_world.py | cie/python | b953f5e9d159abe9bd865c9642595a37ac43661b | [
"CC-BY-4.0"
] | 1 | 2019-11-19T01:06:36.000Z | 2019-11-19T01:06:36.000Z | ho/00_hello_world.py | cie/python | b953f5e9d159abe9bd865c9642595a37ac43661b | [
"CC-BY-4.0"
] | 1 | 2020-05-07T22:09:11.000Z | 2020-05-08T06:52:10.000Z | ho/00_hello_world.py | cie/python | b953f5e9d159abe9bd865c9642595a37ac43661b | [
"CC-BY-4.0"
] | null | null | null | import pyxel
import math
# ablak szélessége, magassága, címe
pyxel.init(255,255, caption="Hello")
# Mit csináljunk egy képkocka előtt
# Hogyan rajzolunk ki egy-egy képkockát
# Elindítjuk
pyxel.run(update, draw) | 28.681818 | 65 | 0.673534 | import pyxel
import math
# ablak szélessége, magassága, címe
pyxel.init(255,255, caption="Hello")
# Mit csináljunk egy képkocka előtt
def update():
# kiírjuk alul, hogy Hello 0!, Hello 1! stb. mindig az
# aktuális képkocka sorszámát
# ***** a)feladat: módosítsd úgy, hogy Hello World!-öt írjon!
print("Hello {n}!".format(n = pyxel.frame_count))
# Hogyan rajzolunk ki egy-egy képkockát
def draw():
# Letöröljük a képernyőt 0-es színnel (fekete)
pyxel.cls(0)
# ***** b)feladat: húzz egy vonalat 11-es színnel (neonzöld)
# Így:
#pyxel.line(0, 0, 30, 30, 11)
# Elindítjuk
pyxel.run(update, draw) | 390 | 0 | 44 |
db7891c16fb3e81ec2b1cee7753530ba6ee0e883 | 438 | py | Python | crisiscleanup/calls/api/serializers/gateway.py | CrisisCleanup/wcicp-call-service | 0a00e092625e2a48c9807737a4b72e343e1ab0b9 | [
"Apache-1.1"
] | null | null | null | crisiscleanup/calls/api/serializers/gateway.py | CrisisCleanup/wcicp-call-service | 0a00e092625e2a48c9807737a4b72e343e1ab0b9 | [
"Apache-1.1"
] | null | null | null | crisiscleanup/calls/api/serializers/gateway.py | CrisisCleanup/wcicp-call-service | 0a00e092625e2a48c9807737a4b72e343e1ab0b9 | [
"Apache-1.1"
] | null | null | null | from rest_framework import serializers
from crisiscleanup.calls.models import Gateway
from crisiscleanup.calls.models import Language
| 33.692308 | 111 | 0.753425 | from rest_framework import serializers
from crisiscleanup.calls.models import Gateway
from crisiscleanup.calls.models import Language
class GatewaySerializer(serializers.ModelSerializer):
language = serializers.PrimaryKeyRelatedField(queryset=Language.objects)
class Meta:
model = Gateway
fields = ('id','external_gateway_id', 'name', 'agent_username', 'agent_password', 'active', 'language')
depth = 1
| 0 | 280 | 23 |
8026d66a42c5933da5b8c6df58e0453ba24b348b | 478 | py | Python | bot/db_helpers/db_helpers.py | Dakhnovskiy/pomogator_bot | 9a8b9d5f79b800020d99ffd6034df054d405e434 | [
"Apache-2.0"
] | null | null | null | bot/db_helpers/db_helpers.py | Dakhnovskiy/pomogator_bot | 9a8b9d5f79b800020d99ffd6034df054d405e434 | [
"Apache-2.0"
] | null | null | null | bot/db_helpers/db_helpers.py | Dakhnovskiy/pomogator_bot | 9a8b9d5f79b800020d99ffd6034df054d405e434 | [
"Apache-2.0"
] | null | null | null | from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from ..config import settings
__engine = create_engine(settings.DATABASE)
__session_maker = sessionmaker(bind=__engine)
@contextmanager
| 18.384615 | 45 | 0.728033 | from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from ..config import settings
__engine = create_engine(settings.DATABASE)
__session_maker = sessionmaker(bind=__engine)
def get_engine():
return __engine
@contextmanager
def get_session():
session = __session_maker()
try:
yield session
except Exception:
session.rollback()
raise
finally:
session.close()
| 176 | 0 | 45 |
2b1b96b36e62531c1a72b6f5204b04ac339df9fd | 239 | py | Python | Python Functionals/Validating Email Addresses with a Filter.py | Code-With-Aagam/python-hackerrank | 270c75cf2ca30916183c7fe5ca130a64c7a8ed6d | [
"MIT"
] | 3 | 2022-03-05T15:38:26.000Z | 2022-03-09T13:39:30.000Z | Python Functionals/Validating Email Addresses with a Filter.py | Code-With-Aagam/python-hackerrank | 270c75cf2ca30916183c7fe5ca130a64c7a8ed6d | [
"MIT"
] | null | null | null | Python Functionals/Validating Email Addresses with a Filter.py | Code-With-Aagam/python-hackerrank | 270c75cf2ca30916183c7fe5ca130a64c7a8ed6d | [
"MIT"
] | null | null | null | import re
| 39.833333 | 80 | 0.60251 | import re
def fun(s):
# return True if s is a valid email, else return False
# Even though this is a filter challenge, it can easily be done using regex
return re.match(r'^[a-z][\w-]*@[a-z0-9]+\.[a-z]{1,3}$', s, re.I)
| 201 | 0 | 23 |
e88ad22dda814f02fbaab397294bd50fd28688c5 | 269 | py | Python | cd4ml/readers/file_reader.py | gobert/CD4ML-Scenarios | 9abbbb271febea015d5d21256d6312d6ba058445 | [
"MIT"
] | 1 | 2020-12-24T19:52:58.000Z | 2020-12-24T19:52:58.000Z | cd4ml/readers/file_reader.py | gobert/CD4ML-Scenarios | 9abbbb271febea015d5d21256d6312d6ba058445 | [
"MIT"
] | null | null | null | cd4ml/readers/file_reader.py | gobert/CD4ML-Scenarios | 9abbbb271febea015d5d21256d6312d6ba058445 | [
"MIT"
] | 1 | 2020-05-04T18:21:41.000Z | 2020-05-04T18:21:41.000Z | from csv import DictReader
from cd4ml.filenames import file_names
| 24.454545 | 72 | 0.72119 | from csv import DictReader
from cd4ml.filenames import file_names
class CSVDictionaryReader:
def __init__(self):
self.input_file = file_names['raw_data']
def stream_data(self):
return (item for item in DictReader(open(self.input_file, 'r')))
| 121 | 5 | 76 |
025737d3d3b41f3afb2122fc8d30280b19fe4156 | 249 | py | Python | skytour/skytour/apps/dso/urls_priority.py | ursomniac/skytour | 3320d96d7ca41b8f52ede87e7112477902a37a0e | [
"MIT"
] | null | null | null | skytour/skytour/apps/dso/urls_priority.py | ursomniac/skytour | 3320d96d7ca41b8f52ede87e7112477902a37a0e | [
"MIT"
] | 1 | 2022-03-17T01:19:23.000Z | 2022-03-17T01:19:23.000Z | skytour/skytour/apps/dso/urls_priority.py | ursomniac/skytour | 3320d96d7ca41b8f52ede87e7112477902a37a0e | [
"MIT"
] | null | null | null | from django.urls import path
from .views import PriorityListView, PriorityDetailView
urlpatterns = (
path('', PriorityListView.as_view(), name='priority-list'),
path('<str:priority>', PriorityDetailView.as_view(), name='priority-detail'),
) | 35.571429 | 81 | 0.742972 | from django.urls import path
from .views import PriorityListView, PriorityDetailView
urlpatterns = (
path('', PriorityListView.as_view(), name='priority-list'),
path('<str:priority>', PriorityDetailView.as_view(), name='priority-detail'),
) | 0 | 0 | 0 |
d5dabdb3ee14b6b74a3ae95c363cf62ee4281bd3 | 2,790 | py | Python | docs/source/conf.py | jsam/redgrease | 245755b34bce287c63abb6624478cdf8189816b6 | [
"MIT"
] | null | null | null | docs/source/conf.py | jsam/redgrease | 245755b34bce287c63abb6624478cdf8189816b6 | [
"MIT"
] | null | null | null | docs/source/conf.py | jsam/redgrease | 245755b34bce287c63abb6624478cdf8189816b6 | [
"MIT"
] | null | null | null | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from typing import List
sys.path.insert(0, os.path.abspath("../../src/"))
on_rtd = os.environ.get("READTHEDOCS") == "True"
# -- Project information -----------------------------------------------------
project = "redgrease"
copyright = "2021, Lyngon Pte. Ltd."
author = "Anders Åström"
version = "0.1" # can this be dynamic somehow?
# -- General configuration ---------------------------------------------------
autoclass_content = "both"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions: List[str] = [
"sphinx.ext.napoleon",
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinx_tabs.tabs",
# "sphinxcontrib.osexample",
]
# ["recommonmark"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns: List[str] = []
# -- Options for HTML output -------------------------------------------------
# pygments_style = "fruity"
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_options = {
# 'analytics_id': 'UA-XXXXXXX-1', # Provided by Google in your dashboard
# "analytics_anonymize_ip": True,
"display_version": True,
"prev_next_buttons_location": "both",
"style_external_links": True,
# "style_nav_header_background": "#7a0c00",
}
html_logo = "../images/redgrease_icon_02.png"
html_favicon = "../images/LyngonIcon_v3.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# custom.css is inside one of the html_static_path folders (e.g. _static)
html_css_files = ["custom.css"]
ml_css_files = [] # type: List[str]
# def setup(app):
# app.add_stylesheet("custom.css")
| 32.44186 | 79 | 0.670251 | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
from typing import List
sys.path.insert(0, os.path.abspath("../../src/"))
on_rtd = os.environ.get("READTHEDOCS") == "True"
# -- Project information -----------------------------------------------------
project = "redgrease"
copyright = "2021, Lyngon Pte. Ltd."
author = "Anders Åström"
version = "0.1" # can this be dynamic somehow?
# -- General configuration ---------------------------------------------------
autoclass_content = "both"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions: List[str] = [
"sphinx.ext.napoleon",
"sphinx.ext.autodoc",
"sphinx.ext.viewcode",
"sphinx_tabs.tabs",
# "sphinxcontrib.osexample",
]
# ["recommonmark"]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns: List[str] = []
# -- Options for HTML output -------------------------------------------------
# pygments_style = "fruity"
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_options = {
# 'analytics_id': 'UA-XXXXXXX-1', # Provided by Google in your dashboard
# "analytics_anonymize_ip": True,
"display_version": True,
"prev_next_buttons_location": "both",
"style_external_links": True,
# "style_nav_header_background": "#7a0c00",
}
html_logo = "../images/redgrease_icon_02.png"
html_favicon = "../images/LyngonIcon_v3.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# custom.css is inside one of the html_static_path folders (e.g. _static)
html_css_files = ["custom.css"]
ml_css_files = [] # type: List[str]
# def setup(app):
# app.add_stylesheet("custom.css")
| 0 | 0 | 0 |
748ad3066400275f1cbf9df97b8a3dbd41db4235 | 1,229 | py | Python | leetcode/433.minimum-genetic-mutation.py | geemaple/algorithm | 68bc5032e1ee52c22ef2f2e608053484c487af54 | [
"MIT"
] | 177 | 2017-08-21T08:57:43.000Z | 2020-06-22T03:44:22.000Z | leetcode/433.minimum-genetic-mutation.py | geemaple/algorithm | 68bc5032e1ee52c22ef2f2e608053484c487af54 | [
"MIT"
] | 2 | 2018-09-06T13:39:12.000Z | 2019-06-03T02:54:45.000Z | leetcode/433.minimum-genetic-mutation.py | geemaple/algorithm | 68bc5032e1ee52c22ef2f2e608053484c487af54 | [
"MIT"
] | 23 | 2017-08-23T06:01:28.000Z | 2020-04-20T03:17:36.000Z | CHOICES = ["A", "C", "G", "T"] | 28.581395 | 67 | 0.436941 | CHOICES = ["A", "C", "G", "T"]
class Solution(object):
def minMutation(self, start, end, bank):
"""
:type start: str
:type end: str
:type bank: List[str]
:rtype: int
"""
queue = [start]
level = 0
bank_set = set(bank)
visited = set([start])
while(len(queue) > 0):
size = len(queue)
for _ in range(size):
top = queue.pop(0)
if top == end:
return level
for neighbor in self.find_neighbors(top, bank_set):
if neighbor not in visited:
visited.add(neighbor)
queue.append(neighbor)
level += 1
return -1
def find_neighbors(self, gene, bank):
neighbors = []
for i in range(len(gene)):
origin = gene[i]
for mutated in CHOICES:
if mutated == origin:
continue
new_gene = gene[:i] + mutated + gene[i + 1:]
if new_gene not in bank:
continue
neighbors.append(new_gene)
return neighbors | 408 | 769 | 22 |
7c179bba7fe82aa4cf7a9cd397c544496afb53e9 | 1,530 | py | Python | DailyProgrammer/DP20170826A.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | 2 | 2020-12-23T18:59:22.000Z | 2021-04-14T13:16:09.000Z | DailyProgrammer/DP20170826A.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | null | null | null | DailyProgrammer/DP20170826A.py | DayGitH/Python-Challenges | bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf | [
"MIT"
] | null | null | null | """
[17-08-21] Challenge #328 [Easy] Latin Squares
https://www.reddit.com/r/dailyprogrammer/comments/6v29zk/170821_challenge_328_easy_latin_squares/
#**Description**
A [Latin square](https://en.wikipedia.org/wiki/Latin_square) is an n × n array filled with n different symbols, each
occurring exactly once in each row and exactly once in each column.
For example:
>1
And,
>1 2
>2 1
Another one,
>1 2 3
>3 1 2
>2 3 1
In this challenge, you have to check whether a given array is a Latin square.
#**Input Description**
Let the user enter the length of the array followed by *n x n* numbers. Fill an array from left to right starting from
above.
#**Output Description**
If it is a Latin square, then display true. Else, display false.
#**Challenge Input**
> 5
> 1 2 3 4 5 5 1 2 3 4 4 5 1 2 3 3 4 5 1 2 2 3 4 5 1
> 2
> 1 3 3 4
> 4
> 1 2 3 4 1 3 2 4 2 3 4 1 4 3 2 1
#**Challenge Output**
> true
> false
> false
---------
#**Bonus**
A Latin square is said to be reduced if both its first row and its first column are in their natural order.
You can reduce a Latin square by reordering the rows and columns. The example in the description can be reduced to this
>1 2 3
>2 3 1
>3 1 2
If a given array turns out to be a Latin square, then your program should reduce it and display it.
Edit: /u/tomekanco has pointed out that many solutions which have an error. I shall look into this. Meanwhile, I have
added an extra challenge input-output for you to check.
"""
if __name__ == "__main__":
main()
| 28.333333 | 119 | 0.710458 | """
[17-08-21] Challenge #328 [Easy] Latin Squares
https://www.reddit.com/r/dailyprogrammer/comments/6v29zk/170821_challenge_328_easy_latin_squares/
#**Description**
A [Latin square](https://en.wikipedia.org/wiki/Latin_square) is an n × n array filled with n different symbols, each
occurring exactly once in each row and exactly once in each column.
For example:
>1
And,
>1 2
>2 1
Another one,
>1 2 3
>3 1 2
>2 3 1
In this challenge, you have to check whether a given array is a Latin square.
#**Input Description**
Let the user enter the length of the array followed by *n x n* numbers. Fill an array from left to right starting from
above.
#**Output Description**
If it is a Latin square, then display true. Else, display false.
#**Challenge Input**
> 5
> 1 2 3 4 5 5 1 2 3 4 4 5 1 2 3 3 4 5 1 2 2 3 4 5 1
> 2
> 1 3 3 4
> 4
> 1 2 3 4 1 3 2 4 2 3 4 1 4 3 2 1
#**Challenge Output**
> true
> false
> false
---------
#**Bonus**
A Latin square is said to be reduced if both its first row and its first column are in their natural order.
You can reduce a Latin square by reordering the rows and columns. The example in the description can be reduced to this
>1 2 3
>2 3 1
>3 1 2
If a given array turns out to be a Latin square, then your program should reduce it and display it.
Edit: /u/tomekanco has pointed out that many solutions which have an error. I shall look into this. Meanwhile, I have
added an extra challenge input-output for you to check.
"""
def main():
pass
if __name__ == "__main__":
main()
| -1 | 0 | 23 |
6b69d1ff458af77522bd52bd85a4421763aabc2a | 3,419 | py | Python | main.py | ufopilot/AutomicTerminal | 5e049838eb51d3b7a4f3b562187f28257dde8c23 | [
"MIT"
] | null | null | null | main.py | ufopilot/AutomicTerminal | 5e049838eb51d3b7a4f3b562187f28257dde8c23 | [
"MIT"
] | null | null | null | main.py | ufopilot/AutomicTerminal | 5e049838eb51d3b7a4f3b562187f28257dde8c23 | [
"MIT"
] | null | null | null | from qore import *
from libs.menu import Menu
from libs.monitor import Monitor
from libs.agents import Agents
from libs.header import AppHeader
from libs.health import Health
AutomicTerminal.run(title="Automic", log="textual.log")
| 32.254717 | 87 | 0.678853 | from qore import *
from libs.menu import Menu
from libs.monitor import Monitor
from libs.agents import Agents
from libs.header import AppHeader
from libs.health import Health
class AutomicTerminal(App):
async def on_load(self) -> None:
"""Bind keys here."""
await self.bind("a", "toggle_assembly", "Toggle Assembly")
await self.bind("r", "toggle_report", "Toggle Report Viewer")
await self.bind("q", "quit", "Quit")
#await self.bind("m", "quit", "Monitor")
show_assembly = Reactive(False)
show_report = Reactive(False)
def watch_show_assembly(self, show_assembly: bool) -> None:
"""Called when show_assembly changes."""
self.assembly.animate("layout_offset_x", 0 if show_assembly else -40, duration=0.5)
self.explorer.animate("layout_offset_x", 0 if show_assembly else -240, duration=0.5)
def watch_show_report(self, show_report: bool) -> None:
"""Called when show_report changes."""
self.report.animate("layout_offset_x", 0 if show_report else -40, duration=0.5)
def action_toggle_assembly(self) -> None:
"""Called when user hits toggle key."""
self.show_assembly = not self.show_assembly
def action_toggle_report(self) -> None:
"""Called when user hits toggle key."""
self.show_report = not self.show_report
async def on_mount(self) -> None:
"""Build layout here."""
self.footer = Footer()
self.header = header = ScrollView(auto_width=False)
self.right = right = Menu()
self.right.terminal = self
self.left = left = ScrollView(auto_width=False)
self.main = main = ScrollView(auto_width=False)
self.sub_header = Widget(name="sub_header")
self.top_header = top_header = ScrollView(auto_width=False)
#= Widget(name="top_header")
self.bottom = Widget(name="bottom")
self.assembly = Widget(name="assembly")
self.explorer = Widget(name="explorer")
self.report = Widget(name="report")
# docks
await self.view.dock(self.header, edge="top", size=3)
await self.view.dock(self.footer, edge="bottom")
await self.view.dock(self.bottom, edge="bottom", size=5)
await self.view.dock(self.left, edge="left", size=30)
await self.view.dock(self.top_header, self.sub_header, edge="top", size=5)
await self.view.dock(self.right, edge="right", size=17)
await self.view.dock(self.main, edge="right")
await self.view.dock(self.assembly, edge="left", size=40, z=1)
await self.view.dock(self.explorer, edge="left", size=200, z=1)
await self.view.dock(self.report, edge="left", size=40, z=2)
self.assembly.layout_offset_x = -40
self.explorer.layout_offset_x = -240
self.report.layout_offset_x = -40
#self.right.layout_offset_x = +400
async def add_content():
#### init Header
appheader = AppHeader()
agents = Agents()
monitor = Monitor()
health = Health()
right.appheader = appheader
right.health = health
right.agents = agents
right.monitor = monitor
await header.update(appheader)
await left.update(agents)
await main.update(monitor)
await top_header.update(health)
#init Monitor Panel
#await main.update(Monitor())
#init Agents Panel
#await left.update(Agents())
#init Menu Panel
#######await right.update(Menu())
########
await self.call_later(add_content)
AutomicTerminal.run(title="Automic", log="textual.log")
| 580 | 2,574 | 25 |
04584df5159930bd46ea83f3a8fdbd6456b725d6 | 3,909 | py | Python | scripts/misc/quality.py | young-oct/OCT_jupyter | 2d8e6897b967b28901da019b8d7ccde4bb3fee58 | [
"MIT"
] | 1 | 2021-03-09T03:57:27.000Z | 2021-03-09T03:57:27.000Z | scripts/misc/quality.py | young-oct/OCT_jupyter | 2d8e6897b967b28901da019b8d7ccde4bb3fee58 | [
"MIT"
] | null | null | null | scripts/misc/quality.py | young-oct/OCT_jupyter | 2d8e6897b967b28901da019b8d7ccde4bb3fee58 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Time : 2021-04-26 7:52 p.m.
# @Author : young wang
# @FileName: quality.py
# @Software: PyCharm
import numpy as np
from skimage.filters import gaussian
from scipy.ndimage import median_filter
from misc.processing import imag2uint
def ROI(x, y, width, height,s):
'''obtain the ROI from the standard layout [330x512]
parameters
----------
s has the standard dimension [330x512]
y is defined as the axial direction: 330
x is defined as the lateral direction: 512
height refers the increment in the axial direction > 0
width refers the increment in the lateral direction > 0
'''
# fetch ROI
if height > 0 and width > 0:
if (x >= 0) and (y >= 0) and (x + width <= s.shape[1]) and (y + height <= s.shape[0]):
roi = s[y:y + height, x:x + width]
return roi
def SF(s):
'''obtain the sparsity fraction from given region of interest
parameters
----------
i_{mn} represents the matrix of pixel intensities at
each location \left(m,n\right) in an N by M image patch,
where im,n0 is the l_0 norm of i_{mn}, i.e.,
the number of nonzero elements
'''
return (1 - np.count_nonzero(s) / s.size)
def SNR(roi_h,roi_b):
'''compute the SNR of a given homogenous region
SNR = 10*log10(uh/σb)
Improving ultrasound images with
elevational angular compounding based on
acoustic refraction
https://doi.org/10.1038/s41598-020-75092-8
parameters
----------
roi_h: array_like
homogeneous region
roi_b: array_like
background region
'''
mean_h = np.mean(roi_h)
std_b = np.std(roi_b)
with np.errstate(divide='ignore'):
snr = 10*np.log10(mean_h/ std_b)
return snr
def CNR(roi_h,roi_a):
'''compute the CNR between homogeneous and region free of
structure
CNR = 10*log((|uh-ub|/σb)
Reference:
Improving ultrasound images with
elevational angular compounding based on
acoustic refraction
https://doi.org/10.1038/s41598-020-75092-8
parameters
----------
roi_h: array_like
homogeneous region
roi_a: array_like
region free of structure
'''
h_mean = np.mean(roi_h)
a_mean = np.mean(roi_a)
a_std = np.std(roi_a)
with np.errstate(divide='ignore'):
cnr = abs(h_mean - a_mean) / a_std
return 10*np.log10(cnr)
| 23.835366 | 94 | 0.63648 | # -*- coding: utf-8 -*-
# @Time : 2021-04-26 7:52 p.m.
# @Author : young wang
# @FileName: quality.py
# @Software: PyCharm
import numpy as np
from skimage.filters import gaussian
from scipy.ndimage import median_filter
from misc.processing import imag2uint
def gaussian_blur(noisy, sigma=0.5):
out = gaussian(noisy, sigma=sigma, output=None, mode='nearest', cval=0,
multichannel=None, preserve_range=False, truncate=4.0)
return (out)
def ROI(x, y, width, height,s):
'''obtain the ROI from the standard layout [330x512]
parameters
----------
s has the standard dimension [330x512]
y is defined as the axial direction: 330
x is defined as the lateral direction: 512
height refers the increment in the axial direction > 0
width refers the increment in the lateral direction > 0
'''
# fetch ROI
if height > 0 and width > 0:
if (x >= 0) and (y >= 0) and (x + width <= s.shape[1]) and (y + height <= s.shape[0]):
roi = s[y:y + height, x:x + width]
return roi
def SF(s):
'''obtain the sparsity fraction from given region of interest
parameters
----------
i_{mn} represents the matrix of pixel intensities at
each location \left(m,n\right) in an N by M image patch,
where im,n0 is the l_0 norm of i_{mn}, i.e.,
the number of nonzero elements
'''
return (1 - np.count_nonzero(s) / s.size)
def SNR(roi_h,roi_b):
'''compute the SNR of a given homogenous region
SNR = 10*log10(uh/σb)
Improving ultrasound images with
elevational angular compounding based on
acoustic refraction
https://doi.org/10.1038/s41598-020-75092-8
parameters
----------
roi_h: array_like
homogeneous region
roi_b: array_like
background region
'''
mean_h = np.mean(roi_h)
std_b = np.std(roi_b)
with np.errstate(divide='ignore'):
snr = 10*np.log10(mean_h/ std_b)
return snr
def CNR(roi_h,roi_a):
'''compute the CNR between homogeneous and region free of
structure
CNR = 10*log((|uh-ub|/σb)
Reference:
Improving ultrasound images with
elevational angular compounding based on
acoustic refraction
https://doi.org/10.1038/s41598-020-75092-8
parameters
----------
roi_h: array_like
homogeneous region
roi_a: array_like
region free of structure
'''
h_mean = np.mean(roi_h)
a_mean = np.mean(roi_a)
a_std = np.std(roi_a)
with np.errstate(divide='ignore'):
cnr = abs(h_mean - a_mean) / a_std
return 10*np.log10(cnr)
def Contrast(region_h, region_b):
h_mean = np.mean(region_h)
b_mean = np.mean(region_b)
with np.errstate(divide='ignore'):
contrast = h_mean / b_mean
return 10*np.log10(contrast)
# return contrast
def log_gCNR(region_h, region_b, improvement = False):
assert np.size(region_h) == np.size(region_b), \
'size of image patch'
if improvement == True:
region_h = median_filter(region_h,size=(3,3))
region_b = median_filter(region_b,size=(3,3))
else:
pass
region_h = np.ravel(region_h)
region_b = np.ravel(region_b)
N = 256
rvmin, vmax = 5, 55 #dB
# in histogram when density flag is set to be true, the integral is
# 1 instead of the cumulative PDF, to address this, bin width needs to
# be the same
log_h1 = imag2uint(10*np.log10(region_h), rvmin, vmax)
log_h2 = imag2uint(10*np.log10(region_b), rvmin, vmax)
min_val, max_val = 0, 255
h_hist, edge = np.histogram(log_h1, bins=N, range=(min_val, max_val), density=True)
h_hist = h_hist * np.diff(edge)
b_hist, edge = np.histogram(log_h2, bins=N, range=(min_val, max_val), density=True)
b_hist = b_hist * np.diff(edge)
ovl = 0
for i in range(0,N):
ovl += min(h_hist[i], b_hist[i])
return 1 - ovl
| 1,442 | 0 | 69 |
dc811f4c086d939f2566ed85ebb5c52fd7df8113 | 3,467 | py | Python | db.py | BPtacek/twitter_bot | 9df4175cc507ce8b1f9d847e92f0b69809f61a12 | [
"MIT"
] | null | null | null | db.py | BPtacek/twitter_bot | 9df4175cc507ce8b1f9d847e92f0b69809f61a12 | [
"MIT"
] | null | null | null | db.py | BPtacek/twitter_bot | 9df4175cc507ce8b1f9d847e92f0b69809f61a12 | [
"MIT"
] | null | null | null | import requests
import sqlite3
conn = sqlite3.connect('twitter_resources.db', check_same_thread=False)
c = conn.cursor()
| 37.27957 | 88 | 0.592443 | import requests
import sqlite3
conn = sqlite3.connect('twitter_resources.db', check_same_thread=False)
c = conn.cursor()
def setup_db():
c.execute("""CREATE TABLE IF NOT EXISTS teams_twitter (
id INTEGER PRIMARY KEY,
team_name TEXT NOT NULL UNIQUE,
twitter_handle TEXT NOT NULL UNIQUE,
hashtag TEXT NOT NULL UNIQUE,
season INTEGER NOT NULL,
tricode TEXT UNIQUE)
""")
conn.commit()
data = ["Anaheim Ducks, @AnaheimDucks, #LetsGoDucks",
"Arizona Coyotes, @ArizonaCoyotes, #Yotes",
"Boston Bruins, @NHLBruins, #NHLBruins",
"Buffalo Sabres, @BuffaloSabres, #Sabres50",
"Calgary Flames, @NHLFlames, #Flames",
"Carolina Hurricanes, @NHLCanes, #LetsGoCanes",
"Chicago Blackhawks, @NHLBlackhawks, #Blackhawks",
"Colorado Avalanche, @Avalanche, #GoAvsGo",
"Columbus Blue Jackets, @BlueJacketsNHL, #CBJ",
"Dallas Stars, @DallasStars, #GoStars",
"Detroit Red Wings, @DetroitRedWings, #LGRW",
"Edmonton Oilers, @EdmontonOilers, #LetsGoOilers",
"Florida Panthers, @FlaPanthers, #FlaPanthers",
"Los Angeles Kings, @LAKings, #GoKingsGo",
"Minnesota Wild, @mnwild, #MNWild",
"Montreal Canadiens, @CanadiensMTL, #GoHabsGo",
"Nashville Predators, @PredsNHL, #Preds",
"New Jersey Devils, @NJDevils, #NJDevils",
"New York Islanders, @NYIslanders, #Isles",
"New York Rangers, @NYRangers, #PlayLikeANewYorker",
"Ottawa Senators, @Senators, #goSensgo",
"Philadelphia Flyers, @NHLFlyers, #FlyOrDie",
"Pittsburgh Penguins, @penguins, #LetsGoPens",
"San Jose Sharks, @SanJoseSharks, #SJSharks",
"St. Louis Blues, @StLouisBlues, #STLBlues",
"Tampa Bay Lightning, @TBLightning, #GoBolts",
"Toronto Maple Leafs, @MapleLeafs, #LeafsForever",
"Vancouver Canucks, @Canucks, #Canucks",
"Vegas Golden Knights, @GoldenKnights, #VegasBorn",
"Washington Capitals, @Capitals, #ALLCAPS",
"Winnipeg Jets, @NHLJets, #GoJetsGo"]
for team in data:
name, handle, hashtag = team.split(",")
handle = handle.strip()
hashtag = hashtag.strip()
c.execute(
"""INSERT INTO teams_twitter (team_name, twitter_handle, hashtag, season)
VALUES ('{}', '{}', '{}', 2019)""".format(name, handle, hashtag))
conn.commit()
def update_db_with_tricodes():
r = requests.get("https://statsapi.web.nhl.com//api/v1/teams")
teams = r.json()["teams"]
for team in teams:
teamname = team.get("name")
tricode = team.get("abbreviation")
print(tricode, teamname)
c.execute("""UPDATE teams_twitter
SET tricode = "{}"
WHERE team_name = '{}'""".format(tricode, teamname))
conn.commit()
def get_hashtags(*teams):
results = []
for team in teams:
c.execute("SELECT hashtag FROM teams_twitter WHERE team_name='{}'".format(team))
results.append(*c.fetchone())
return results
def get_tricodes(*teams):
results = []
for team in teams:
c.execute("SELECT tricode FROM teams_twitter WHERE team_name='{}'".format(team))
results.append(*c.fetchone())
return results
| 3,249 | 0 | 92 |
16c75f1f1c9db46d3d956b6d325f41a44060954c | 1,729 | py | Python | tests/unit/test_CXCSVData.py | docinfosci/canvasxpress-python | 532a981b04d0f50bbde1852c695117a6220f4589 | [
"MIT"
] | 4 | 2021-03-18T17:23:40.000Z | 2022-02-01T19:07:01.000Z | tests/unit/test_CXCSVData.py | docinfosci/canvasxpress-python | 532a981b04d0f50bbde1852c695117a6220f4589 | [
"MIT"
] | 8 | 2021-04-30T20:46:57.000Z | 2022-03-10T07:25:31.000Z | tests/unit/test_CXCSVData.py | docinfosci/canvasxpress-python | 532a981b04d0f50bbde1852c695117a6220f4589 | [
"MIT"
] | 1 | 2022-02-03T00:35:14.000Z | 2022-02-03T00:35:14.000Z | import csv
from copy import copy, deepcopy
import pytest
from hypothesis import given
from hypothesis.extra.pandas import data_frames, column
from canvasxpress.data.matrix import CXCSVData
from tests.util.hypothesis_support import everything_except
csv_sample = """
"C1","C2","C3"
1,2,3
4,5,6
"""
@given(
data_frames([column('A', dtype=int), column('B', dtype=int)])
)
@given(everything_except(dict, str))
@given(everything_except(dict, str))
| 23.684932 | 73 | 0.728167 | import csv
from copy import copy, deepcopy
import pytest
from hypothesis import given
from hypothesis.extra.pandas import data_frames, column
from canvasxpress.data.matrix import CXCSVData
from tests.util.hypothesis_support import everything_except
csv_sample = """
"C1","C2","C3"
1,2,3
4,5,6
"""
@given(
data_frames([column('A', dtype=int), column('B', dtype=int)])
)
def test_CXCSVData_init_valid_input(sample):
csv_sample = sample.to_csv(index=False, quoting=csv.QUOTE_NONNUMERIC)
cxdata = CXCSVData(csv_sample)
assert csv_sample == cxdata.csv
@given(everything_except(dict, str))
def test_CXCSVData_init_invalid_input(sample):
if sample is not None:
with pytest.raises(TypeError):
CXCSVData(sample)
@given(everything_except(dict, str))
def test_CXCSVData_set_data_invalid(sample):
csvdata = CXCSVData()
if sample is not None:
with pytest.raises(TypeError):
csvdata.csv = sample
def test_CXCSVData_get_valid_data():
cxdata = CXCSVData()
cxdf_sample = csv_sample
cxdata.csv = cxdf_sample
def test_copy_CXCSVData():
cxdata1 = CXCSVData(csv_sample)
cxdata2 = copy(cxdata1)
assert cxdata1 == cxdata2
def test_deepcopy_CXCSVData():
cxdata1 = CXCSVData(csv_sample)
cxdata2 = deepcopy(cxdata1)
assert cxdata1 == cxdata2
def test_CXCSVData_str_perspective():
cxdata1 = CXCSVData(csv_sample)
cxdata1_str = str(cxdata1).strip()
assert cxdata1_str == csv_sample.strip()
def test_CXCSVData_repr_perspective():
cxdata1 = CXCSVData(csv_sample)
cxdata1_repr = repr(cxdata1)
assert isinstance(cxdata1_repr, str)
cxdata2: CXCSVData = eval(cxdata1_repr)
assert cxdata1.csv == cxdata2.csv
| 1,087 | 0 | 181 |
94eb96d9701dd1faff5feeed8a843a2f935f8186 | 30 | py | Python | vis_tools/__init__.py | constantinpape/vis_tools | 79bf51be97d305edd83f10f31bd5b530587bd021 | [
"MIT"
] | null | null | null | vis_tools/__init__.py | constantinpape/vis_tools | 79bf51be97d305edd83f10f31bd5b530587bd021 | [
"MIT"
] | null | null | null | vis_tools/__init__.py | constantinpape/vis_tools | 79bf51be97d305edd83f10f31bd5b530587bd021 | [
"MIT"
] | null | null | null | from .edges import make_edges
| 15 | 29 | 0.833333 | from .edges import make_edges
| 0 | 0 | 0 |
7e81a55414327b9f16871ef65751544ca50f1ea5 | 359 | py | Python | chap_18/test_add2.py | jieyanzhu/codes-effective-computation-in-physics | 0c99f2da9d462229e6b174a010d7c7b08af4482b | [
"MIT"
] | null | null | null | chap_18/test_add2.py | jieyanzhu/codes-effective-computation-in-physics | 0c99f2da9d462229e6b174a010d7c7b08af4482b | [
"MIT"
] | 1 | 2021-12-23T10:09:01.000Z | 2021-12-23T12:06:25.000Z | chap_18/test_add2.py | jieyanzhu/codes-effective-computation-in-physics | 0c99f2da9d462229e6b174a010d7c7b08af4482b | [
"MIT"
] | null | null | null | from nose.tools import assert_equal
| 17.95 | 35 | 0.470752 | from nose.tools import assert_equal
def add2(x, y):
return x + y
def check_add2(exp, x, y):
obs = add2(x, y)
assert_equal(exp, obs)
def test_add2():
cases = [
(4, 2, 2),
(5, -5, 10),
(42, 40, 2),
(16, 3, 13),
(-128, 0, -128),
]
for exp, x, y in cases:
yield check_add2, exp, x, y
| 254 | 0 | 69 |
37622db5e295df54423f2cd1b72fd2fb623225b2 | 1,335 | py | Python | env/Lib/site-packages/pangocffi/__init__.py | kodelaben/manimce | fef4f1bd748d1cc8c1ade73e7561d4235dc253ca | [
"MIT"
] | 1 | 2021-06-01T19:10:23.000Z | 2021-06-01T19:10:23.000Z | env/Lib/site-packages/pangocffi/__init__.py | kodelaben/manimce | fef4f1bd748d1cc8c1ade73e7561d4235dc253ca | [
"MIT"
] | null | null | null | env/Lib/site-packages/pangocffi/__init__.py | kodelaben/manimce | fef4f1bd748d1cc8c1ade73e7561d4235dc253ca | [
"MIT"
] | null | null | null | import ctypes.util
from .ffi_build import ffi
def _dlopen(generated_ffi, *names):
"""Try various names for the same library, for different platforms."""
for name in names:
for lib_name in (name, 'lib' + name):
try:
path = ctypes.util.find_library(lib_name)
lib = generated_ffi.dlopen(path or lib_name)
if lib:
return lib
except OSError:
pass
raise OSError("dlopen() failed to load a library: %s" % ' / '.join(names))
pango = _dlopen(ffi, 'pango', 'pango-1', 'pango-1.0', 'pango-1.0-0')
gobject = _dlopen(ffi, 'gobject-2.0', 'gobject-2.0-0')
# Imports are normally always put at the top of the file.
# But the wrapper API requires that the pango library be loaded first.
# Therefore, we have to disable linting rules for these lines.
from .version import * # noqa
from .enums import * # noqa
from .convert import * # noqa
from .font_description import FontDescription # noqa
from .rectangle import Rectangle # noqa
from .item import Item # noqa
from .context import Context # noqa
from .glyph_item import GlyphItem # noqa
from .glyph_item_iter import GlyphItemIter # noqa
from .layout_run import LayoutRun # noqa
from .layout_iter import LayoutIter # noqa
from .layout import Layout # noqa
| 35.131579 | 78 | 0.668165 | import ctypes.util
from .ffi_build import ffi
def _dlopen(generated_ffi, *names):
"""Try various names for the same library, for different platforms."""
for name in names:
for lib_name in (name, 'lib' + name):
try:
path = ctypes.util.find_library(lib_name)
lib = generated_ffi.dlopen(path or lib_name)
if lib:
return lib
except OSError:
pass
raise OSError("dlopen() failed to load a library: %s" % ' / '.join(names))
pango = _dlopen(ffi, 'pango', 'pango-1', 'pango-1.0', 'pango-1.0-0')
gobject = _dlopen(ffi, 'gobject-2.0', 'gobject-2.0-0')
# Imports are normally always put at the top of the file.
# But the wrapper API requires that the pango library be loaded first.
# Therefore, we have to disable linting rules for these lines.
from .version import * # noqa
from .enums import * # noqa
from .convert import * # noqa
from .font_description import FontDescription # noqa
from .rectangle import Rectangle # noqa
from .item import Item # noqa
from .context import Context # noqa
from .glyph_item import GlyphItem # noqa
from .glyph_item_iter import GlyphItemIter # noqa
from .layout_run import LayoutRun # noqa
from .layout_iter import LayoutIter # noqa
from .layout import Layout # noqa
| 0 | 0 | 0 |
bf6253393c8bcc238caab589dc52fa201083043f | 3,454 | py | Python | word-formatter/src/classifier/classifier.py | MonsterPixel/WordFormat-Python | b6d4e4c74ca66d2076163a4187e8b2851262da65 | [
"MIT"
] | null | null | null | word-formatter/src/classifier/classifier.py | MonsterPixel/WordFormat-Python | b6d4e4c74ca66d2076163a4187e8b2851262da65 | [
"MIT"
] | null | null | null | word-formatter/src/classifier/classifier.py | MonsterPixel/WordFormat-Python | b6d4e4c74ca66d2076163a4187e8b2851262da65 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import os
from datetime import datetime
from src.utils.filemeta import get_filename
from src.loader.converter import Converter
from src.utils.logging import logger
| 27.854839 | 109 | 0.555298 | # -*- coding: utf-8 -*-
import os
from datetime import datetime
from src.utils.filemeta import get_filename
from src.loader.converter import Converter
from src.utils.logging import logger
class Classifier(object):
NAME = None
MODEL = None
def __init__(self, args=None, model=None, files=None, raw=None, formatted=None) -> None:
super().__init__()
self.args = args
self.model = model
self.files = files
self.raw = raw
self.formatted = formatted
@classmethod
def build(cls, raw=None, formatted=None, **kargs):
raise NotImplementedError
@classmethod
def load(cls, **kargs):
raise NotImplementedError
def train(self, **kargs):
args = self.args.update(locals())
logger.info("Training the data")
start = datetime.now()
self._train()
elapsed = datetime.now() - start
logger.info(f"{elapsed}s elapsed")
self.save(args.path)
def evaluate(self, **kargs):
args = self.args.update(locals())
logger.info("Evaluating the data")
start = datetime.now()
self._evaluate()
elapsed = datetime.now() - start
logger.info(f"{elapsed}s elapsed")
def predict(self, **kargs):
args = self.args.update(locals())
logger.info("Predicting the data")
self.dcpoint = []
self.ccpoint = []
self.label = []
with open(args.path) as fmodel:
for m in fmodel:
i, _, l = m.split('\t')
self.dcpoint.append(i.split(';')[0])
self.ccpoint.append(i.split(';')[1])
self.label.append(l)
start = datetime.now()
self._predict()
elapsed = datetime.now() - start
logger.info(f"{elapsed}s elapsed")
def _train(self):
raise NotImplementedError
def _evaluate(self):
raise NotImplementedError
def _predict(self):
raise NotImplementedError
def save(self, path):
with open(path, 'w') as fmodel:
for p in self.centrality_points:
fmodel.write(p)
fmodel.write('\n')
def preprocess(self, **kargs):
"""Preprocess files and convert them to graph data structure.
"""
for file in self.files:
filename_raw = get_filename(file)
if os.path.exists(f'{self.formatted}{filename_raw}'):
graph_path = self.raw + 'graph/'
if not os.path.isdir(graph_path):
os.makedirs(graph_path, exist_ok=True)
graph_raw = filename_raw.replace('.docx', '')
raw_converter = Converter(file)
raw_converter.convert()
formatted_converter = Converter(f'{self.formatted}{filename_raw}')
formatted_converter.convert()
with open(f'{graph_path}{graph_raw}.graph', 'w') as fgraph:
raw_graph = raw_converter.graphs
formatted_graph = formatted_converter.graphs
for rg in raw_graph:
fgraph.write(f'{rg[0].replace(os.linesep, " ")}\t{rg[1].replace(os.linesep, " ")}\n')
fgraph.write('[SEP]\n')
for fg in formatted_graph:
fgraph.write(f'{fg[0].replace(os.linesep, " ")}\t{fg[1].replace(os.linesep, " ")}\n')
| 1,632 | 1,608 | 23 |
fbf1ae27d9bcac66c8206372667b4d1af0f93cae | 1,223 | py | Python | .asv/results/benchmarkers-MacBook-Pro.local/clean.py | qwhelan/conda-benchmarks | 7b7de2c0057e0d1ec1d35e2ca4d24aa8ac6b9b69 | [
"BSD-3-Clause"
] | 3 | 2018-12-29T02:39:09.000Z | 2021-11-23T22:42:57.000Z | .asv/results/benchmarkers-MacBook-Pro.local/clean.py | qwhelan/conda-benchmarks | 7b7de2c0057e0d1ec1d35e2ca4d24aa8ac6b9b69 | [
"BSD-3-Clause"
] | 1 | 2021-08-23T05:42:36.000Z | 2021-08-23T05:42:36.000Z | .asv/results/benchmarkers-MacBook-Pro.local/clean.py | LaudateCorpus1/conda-benchmarks | 7b7de2c0057e0d1ec1d35e2ca4d24aa8ac6b9b69 | [
"BSD-3-Clause"
] | 8 | 2019-01-26T09:42:52.000Z | 2022-01-28T20:19:21.000Z | import os
import json
import glob
# test_name = 'time_solve_r_essentials_r_base_conda_forge'
test_name = 'time_solve_anaconda_44'
for f in glob.glob("*.json"):
with open(f) as fd:
d = json.load(fd)
if f != 'machine.json' and d:
# timeval = ((d.get('results', {}) or {}).get(test_name, {}) or {}).get('result', [0])[0]
# if timeval < 1.0 and f != "machine.json":
# print("remove {}".format(f))
# os.remove(f)
if 'params' in d:
if 'conda-env' not in d['params']:
d['params']['conda-env'] = ""
d['requirements']['conda-env'] = ""
elif d['params']['conda-env'] == []:
d['params']['conda-env'] = ""
d['requirements']['conda-env'] = ""
else:
d['params'] = {'conda-env': ""}
d['requirements']['conda-env'] = ""
if "chardet-mock" in d['env_name']:
d['env_name'] = d['env_name'].replace("chardet-mock", 'chardet-conda-env-mock')
f = f.replace("chardet-mock", 'chardet-conda-env-mock')
with open(f, 'w') as fd:
json.dump(d, fd, indent=2)
else:
print("file {} appears corrupt".format(f))
| 37.060606 | 97 | 0.503679 | import os
import json
import glob
# test_name = 'time_solve_r_essentials_r_base_conda_forge'
test_name = 'time_solve_anaconda_44'
for f in glob.glob("*.json"):
with open(f) as fd:
d = json.load(fd)
if f != 'machine.json' and d:
# timeval = ((d.get('results', {}) or {}).get(test_name, {}) or {}).get('result', [0])[0]
# if timeval < 1.0 and f != "machine.json":
# print("remove {}".format(f))
# os.remove(f)
if 'params' in d:
if 'conda-env' not in d['params']:
d['params']['conda-env'] = ""
d['requirements']['conda-env'] = ""
elif d['params']['conda-env'] == []:
d['params']['conda-env'] = ""
d['requirements']['conda-env'] = ""
else:
d['params'] = {'conda-env': ""}
d['requirements']['conda-env'] = ""
if "chardet-mock" in d['env_name']:
d['env_name'] = d['env_name'].replace("chardet-mock", 'chardet-conda-env-mock')
f = f.replace("chardet-mock", 'chardet-conda-env-mock')
with open(f, 'w') as fd:
json.dump(d, fd, indent=2)
else:
print("file {} appears corrupt".format(f))
| 0 | 0 | 0 |
8b6a420f8f67c2fa137e731c7ca7ec8cb49f00ef | 8,035 | py | Python | src/model/setup.py | solevis/PSS-Fleet-Helper | 4d36a31b4737a53964cde1625a3d6ff2c04bd76a | [
"MIT"
] | 1 | 2022-01-11T11:52:36.000Z | 2022-01-11T11:52:36.000Z | src/model/setup.py | solevis/PSS-Fleet-Helper | 4d36a31b4737a53964cde1625a3d6ff2c04bd76a | [
"MIT"
] | 16 | 2021-10-08T10:15:21.000Z | 2022-01-23T12:59:03.000Z | src/model/setup.py | solevis/PSS-Fleet-Helper | 4d36a31b4737a53964cde1625a3d6ff2c04bd76a | [
"MIT"
] | 1 | 2022-02-07T19:48:26.000Z | 2022-02-07T19:48:26.000Z | import asyncio as _asyncio
from typing import Callable as _Callable
from . import database as _database
from .chat_log import PssChatLogger as _PssChatLogger
from .reaction_role import ReactionRole as _ReactionRole
from .reaction_role import ReactionRoleChange as _ReactionRoleChange
from .reaction_role import ReactionRoleRequirement as _ReactionRoleRequirement
from .. import utils as _utils
# ---------- Initialization ----------
# ---------- DB Schema ----------
# ---------- Helper -----------
# ---------- Testing ----------
if __name__ == '__main__':
_asyncio.get_event_loop().run_until_complete(test()) | 39.387255 | 155 | 0.692097 | import asyncio as _asyncio
from typing import Callable as _Callable
from . import database as _database
from .chat_log import PssChatLogger as _PssChatLogger
from .reaction_role import ReactionRole as _ReactionRole
from .reaction_role import ReactionRoleChange as _ReactionRoleChange
from .reaction_role import ReactionRoleRequirement as _ReactionRoleRequirement
from .. import utils as _utils
# ---------- Initialization ----------
async def setup() -> None:
await _database.init()
await __setup_db_schema()
# ---------- DB Schema ----------
async def __setup_db_schema() -> None:
init_functions = [
('0.1.0', __create_db_schema),
('0.2.0', __update_db_schema_0_2_0),
('0.3.0', __update_db_schema_0_3_0),
('0.4.0', __update_db_schema_0_4_0),
]
for version, callable in init_functions:
if not (await __update_schema(version, callable)):
raise Exception('DB initialization failed')
print('DB initialization succeeded')
async def __update_db_schema_0_4_0() -> bool:
target_version = '0.4.0'
column_definitions_chat_log = [
(_PssChatLogger.ID_COLUMN_NAME, 'SERIAL', True, True, None),
('created_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('modified_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('guild_id', 'BIGINT', False, True, None),
('channel_id', 'BIGINT', False, True, None),
('pss_channel_key', 'TEXT', False, True, None),
('last_pss_message_id', 'BIGINT', False, True, 0),
('name', 'TEXT', False, True, None),
]
schema_version = await _database.get_schema_version()
if schema_version:
compare_0_4_0 = _utils.compare_versions(schema_version, target_version)
if compare_0_4_0 < 1:
return True
print(f'[update_schema_0_4_0] Updating to database schema v{target_version}')
success_chat_log = await _database.try_create_table(_PssChatLogger.TABLE_NAME, column_definitions_chat_log)
if not success_chat_log:
print(f'[update_schema_0_4_0] Could not create table \'{_PssChatLogger.TABLE_NAME}\'')
return False
success = await _database.try_set_schema_version(target_version)
return success
async def __update_db_schema_0_3_0() -> bool:
target_version = '0.3.0'
column_definition = ('message_embed', 'TEXT', False, False, None)
schema_version = await _database.get_schema_version()
if schema_version:
compare_0_3_0 = _utils.compare_versions(schema_version, target_version)
if compare_0_3_0 < 1:
return True
print(f'[update_schema_0_3_0] Updating to database schema v{target_version}')
success_reaction_role = await _database.try_add_column(_ReactionRoleChange.TABLE_NAME, *column_definition)
if not success_reaction_role:
print(f'[update_schema_0_3_0] Could not add column \'{column_definition[0]}\' to table \'{_ReactionRoleChange.TABLE_NAME}\'')
return False
success = await _database.try_set_schema_version(target_version)
return success
async def __update_db_schema_0_2_0() -> bool:
target_version = '0.2.0'
column_definitions_reaction_role = [
(_ReactionRole.ID_COLUMN_NAME, 'SERIAL', True, True, None),
('created_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('modified_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('guild_id', 'BIGINT', False, True, None),
('channel_id', 'BIGINT', False, True, None),
('message_id', 'BIGINT', False, True, None),
('name', 'TEXT', False, True, None),
('reaction', 'TEXT', False, True, None),
('is_active', 'BOOLEAN', False, True, False),
]
column_definitions_reaction_role_change = [
(_ReactionRoleChange.ID_COLUMN_NAME, 'SERIAL', True, True, None),
('created_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('modified_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
(_ReactionRole.ID_COLUMN_NAME, 'INT', False, True, None),
('role_id', 'BIGINT', False, True, None),
('add', 'BOOLEAN', False, True, True), # True to add, False to remove
('allow_toggle', 'BOOLEAN', False, True, False),
('message_content', 'TEXT', False, False, None),
('message_channel_id', 'BIGINT', False, False, None),
]
column_definitions_reaction_role_requirement = [
(_ReactionRoleRequirement.ID_COLUMN_NAME, 'SERIAL', True, True, None),
('created_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('modified_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
(_ReactionRole.ID_COLUMN_NAME, 'INT', False, True, None),
('role_id', 'BIGINT', False, True, None),
]
schema_version = await _database.get_schema_version()
if schema_version:
compare_0_2_0 = _utils.compare_versions(schema_version, target_version)
if compare_0_2_0 < 1:
return True
print(f'[update_schema_0_2_0] Updating to database schema v{target_version}')
success_reaction_role = await _database.try_create_table(_ReactionRole.TABLE_NAME, column_definitions_reaction_role)
if not success_reaction_role:
print(f'[update_schema_0_2_0] Could not create table \'{_ReactionRole.TABLE_NAME}\'')
return False
success_reaction_role_change = await _database.try_create_table(_ReactionRoleChange.TABLE_NAME, column_definitions_reaction_role_change)
if not success_reaction_role_change:
print(f'[update_schema_0_2_0] Could not create table \'{_ReactionRoleChange.TABLE_NAME}\'. Rolling back changes made.')
await _database.drop_table(_ReactionRole.TABLE_NAME)
return False
success_reaction_role_requirement = await _database.try_create_table(_ReactionRoleRequirement.TABLE_NAME, column_definitions_reaction_role_requirement)
if not success_reaction_role_requirement:
print(f'[update_schema_0_2_0] Could not create table \'{_ReactionRoleRequirement.TABLE_NAME}\'. Rolling back changes made.')
await _database.drop_table(_ReactionRole.TABLE_NAME)
await _database.drop_table(_ReactionRoleChange.TABLE_NAME)
return False
success = await _database.try_set_schema_version(target_version)
return success
async def __create_db_schema() -> bool:
target_version = '0.1.0'
column_definition_bot_settings = [
('setting_name', 'TEXT' , True, True, None),
('created_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('modified_at', 'TIMESTAMPTZ', False, True, 'CURRENT_TIMESTAMP'),
('setting_boolean', 'BOOLEAN', False, False, None),
('setting_float', 'FLOAT', False, False, None),
('setting_int', 'INT', False, False, None),
('setting_text', 'TEXT', False, False, None),
('setting_timestamp', 'TIMESTAMPTZ', False, False, None),
]
schema_version = await _database.get_schema_version()
if schema_version:
compare_0_1_0 = _utils.compare_versions(schema_version, target_version)
if compare_0_1_0 < 1:
return True
print(f'[create_schema] Creating database schema v{target_version}')
success_bot_settings = await _database.try_create_table(_database.TABLE_NAME_BOT_SETTINGS, column_definition_bot_settings)
if not success_bot_settings:
print(f'[create_schema] Could not create table \'{_database.TABLE_NAME_BOT_SETTINGS}\'')
return False
success = await _database.try_set_schema_version(target_version)
return success
# ---------- Helper -----------
async def __update_schema(version: str, update_function: _Callable) -> bool:
success = await update_function()
if not success:
print(f'Failed to update database schema to version: {version}.')
return success
# ---------- Testing ----------
async def test() -> bool:
await _database.init()
await setup()
if __name__ == '__main__':
_asyncio.get_event_loop().run_until_complete(test()) | 7,213 | 0 | 184 |
5a22f91d70ec399fbc9a12f833abc6e6fb9565b2 | 1,178 | py | Python | python/main.py | gurux13/babagram | 6910130f1373e09154da4ce9a744f94287d754f2 | [
"CC0-1.0"
] | 5 | 2022-02-15T20:06:58.000Z | 2022-02-18T10:18:03.000Z | python/main.py | SkyFox277/babagram | 6910130f1373e09154da4ce9a744f94287d754f2 | [
"CC0-1.0"
] | null | null | null | python/main.py | SkyFox277/babagram | 6910130f1373e09154da4ce9a744f94287d754f2 | [
"CC0-1.0"
] | 1 | 2022-02-16T08:39:03.000Z | 2022-02-16T08:39:03.000Z | from button_logic import ButtonLogic
from fake_hw import FakeHardware
from hardware import Hardware, is_pi
from internet_checker import InternetChecker
from paper_status import PaperStatus
from recording import Recording
from tg import Telegram
import os
if __name__ == '__main__':
main() | 28.047619 | 53 | 0.679966 | from button_logic import ButtonLogic
from fake_hw import FakeHardware
from hardware import Hardware, is_pi
from internet_checker import InternetChecker
from paper_status import PaperStatus
from recording import Recording
from tg import Telegram
import os
def main():
print("CWD:", os)
hardware = Hardware()
recording = Recording(hardware)
hardware.set_recording(recording)
internet_checker = InternetChecker(hardware)
paper_status = PaperStatus(hardware)
tg = Telegram(hardware)
buttons = ButtonLogic(hardware, tg, recording)
tg.set_sos_cancel_callback(buttons.on_tg_stopsos)
tg.set_dbgprint_callback(buttons.on_tg_dbgprint)
hardware.on_btn_press(buttons.on_btn_click)
fake = None
if not is_pi:
print("Starting fake hardware")
fake = FakeHardware(hardware)
fake.start()
try:
internet_checker.start()
paper_status.start()
tg.main()
finally:
try:
if fake is not None:
fake.stop()
internet_checker.stop()
paper_status.stop()
finally:
hardware.cleanup()
if __name__ == '__main__':
main() | 861 | 0 | 23 |
e5fed20291a4c45d2300812bfeebdc77b27f19c1 | 23,594 | py | Python | builds/Beta 0.4/src/main.py | CupOfJoeCode/ivy3d | e31a96b698029f5075fd21fd0bdc52288f842216 | [
"MIT"
] | 1 | 2020-10-30T01:15:19.000Z | 2020-10-30T01:15:19.000Z | builds/Beta 0.4/src/main.py | CupOfJoeCode/ivy3d | e31a96b698029f5075fd21fd0bdc52288f842216 | [
"MIT"
] | null | null | null | builds/Beta 0.4/src/main.py | CupOfJoeCode/ivy3d | e31a96b698029f5075fd21fd0bdc52288f842216 | [
"MIT"
] | null | null | null | #Import Modules
import pygame as pg
from pygame import gfxdraw
from math import *
from random import randint
from time import sleep
from glob import glob
import easygui
import types
#Custom Modules
from sceneFile import scene,world,player
import guibuttons
import shapes
pg.init() #Initialize pygame
VERSION = "Beta 0.4" # Version
textFont = "font/opensans.ttf" # set default font
w = 800 # Screen size
h = 600
translate = (w/2,h/2)
d = pg.display.set_mode((w,h)) # Initialize the display
pg.display.set_caption("Ivy3d v " + VERSION)
pg.display.set_icon(pg.image.load("favicon.png"))
running = True
#Projects 3d Point To 2d Point
#Multiplies two vectors
#Adds two vectors
#Rotates vector by another vector
# Calculates the normals for three vectors
# Gets the average of a list of vectors
# Rendering function takes in scene as a list of dictionaries (see example in sceneFile.py)
#Gets collision for physics
# Calculates all of the physics
# Return a text surface
# Display text
# Class for dealing with buttons
meshes = shapes.meshes # Get meshes from shapes.py
keys = [] # List of keys and if they are pressed
for i in range(0,1024):
keys.append(False)
a = 0
sim = False
sel = 0
buff = ""
mouse = {
"x":0,
"y":0,
"pressed":False
} # Mouse dictionary with x position, y position, and if the mouse is pressed
while running: # Loop while program is running
for e in pg.event.get(): # Get all events
if e.type == pg.QUIT: # if program is closed, stop running
running = False
if e.type == pg.MOUSEMOTION: # Get mouse motion
ms = pg.mouse.get_pos()
mouse["x"] = ms[0]
mouse["y"] = ms[1]
if e.type == pg.MOUSEBUTTONDOWN: # Get mouse button
mouse["pressed"] = True
if e.type == pg.MOUSEBUTTONUP:
mouse["pressed"] = False
if e.type == pg.KEYDOWN: # Get keydown
keys[e.key] = True
if(e.key == pg.K_p):
if(not(sim)):
buff = str(scene)
else:
scene = eval(buff)
sim = not(sim)
if e.type == pg.KEYUP:
keys[e.key] = False
# Clear screen
d.fill(world["background"])
if sim: # Simulate
scene = simulate(scene,world,keys)
render(scene) #Render
if(not(sim)):
pos = scene[sel]["position"]
pg.draw.line(d,(255,0,0),project(pos),project( (pos[0]+25,pos[1],pos[2]) ) ,4) #Draw axies at position
pg.draw.line(d,(0,255,0),project(pos),project( (pos[0] ,pos[1]+25,pos[2]) ) ,4)
pg.draw.line(d,(0,0,255),project(pos),project( (pos[0] ,pos[1],pos[2]+25) ) ,4)
guiEvents = gui(mouse["x"],mouse["y"],mouse["pressed"],scene[sel],sel) #Get all button
if(guiEvents[0]["pressed"]):
sel+=1
if(sel > len(scene)-1):
sel = 0
sleep(0.1)
if(guiEvents[1]["pressed"]):
scene[sel]["shape"]+=1
if(scene[sel]["shape"] > len(shapes.meshes)-1):
scene[sel]["shape"] = 0
sleep(0.1)
name = "position"
if(guiEvents[2]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]-4,pos[1],pos[2])
if(guiEvents[3]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]+4,pos[1],pos[2])
if(guiEvents[4]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]-4,pos[2])
if(guiEvents[5]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]+4,pos[2])
if(guiEvents[6]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]-4)
if(guiEvents[7]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]+4)
name = "scale"
if(guiEvents[8]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]-4,pos[1],pos[2])
if(guiEvents[9]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]+4,pos[1],pos[2])
if(guiEvents[10]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]-4,pos[2])
if(guiEvents[11]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]+4,pos[2])
if(guiEvents[12]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]-4)
if(guiEvents[13]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]+4)
if(guiEvents[14]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0]-4,pos[1],pos[2])
if(guiEvents[15]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0]+4,pos[1],pos[2])
if(guiEvents[16]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1]-4,pos[2])
if(guiEvents[17]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1]+4,pos[2])
if(guiEvents[18]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1],pos[2]-4)
if(guiEvents[19]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1],pos[2]+4)
if(guiEvents[20]["pressed"]):
scene[sel]["physics"]["rigidBody"] = not(scene[sel]["physics"]["rigidBody"])
sleep(0.1)
if(guiEvents[21]["pressed"]):
scene[sel]["physics"]["control"] = not(scene[sel]["physics"]["control"])
sleep(0.1)
if(guiEvents[22]["pressed"]):
scene[sel]["physics"]["bounce"]-= 0.01
if(guiEvents[23]["pressed"]):
scene[sel]["physics"]["bounce"]+= 0.01
if(guiEvents[24]["pressed"]):
scene[sel]["physics"]["friction"]-= 0.01
if(guiEvents[25]["pressed"]):
scene[sel]["physics"]["friction"]+= 0.01
if(guiEvents[26]["pressed"]):
inText = easygui.fileopenbox(default="maps/",filetypes=["*.png","*.jpg","*.jpeg","*.gif","*.*"])
if type(inText) == type(" "):
scene[sel]["material"]["map"] = inText
else:
scene[sel]["material"]["map"] = "none"
if(guiEvents[27]["pressed"]):
aaa = {
"shape":0,
"position":(0,0,0),
"scale":(25,25,25),
"rotation":(0,0,0),
"material":{
"wire":False,
"color":(200,63,63),
"map":"none"
},
"physics": {
"rigidBody":False,
"velocity":(0,0,0),
"friction":0.05,
"bounce":0,
"control":False
}
}
scene.append(aaa)
sel = len(scene)-1
sleep(0.1)
if(guiEvents[28]["pressed"]):
if(sel != 0):
scene.pop(sel)
sel = 0
print("Removed Mesh.")
else:
easygui.msgbox(msg="Cannot Remove Mesh",title="Error:")
sleep(0.1)
if(guiEvents[29]["pressed"]):
d.fill(world["background"])
render(scene)
# antialias()
pg.image.save(d,easygui.filesavebox(filetypes=["*.png","*.jpg","*.*"]))
easygui.msgbox(msg="Rendered And Exported",title="Success!")
sleep(0.1)
if(guiEvents[30]["pressed"]):
buff = str(scene)
try:
numFrames = int(easygui.enterbox("How Many Frames?"))
except:
easygui.msgbox(msg="Number Must Be A Valid Integer",title="Error:")
numFrames = 0
for i in range(numFrames):
scene = simulate(scene,world,keys)
d.fill(world["background"])
render(scene)
pg.display.update()
pg.image.save(d,"output/" + str(i) + ".png")
print("Done " +str(i) + "/" + str(numFrames))
scene = eval(buff)
if(guiEvents[31]["pressed"]):
scene[sel]["material"]["wire"] = not(scene[sel]["material"]["wire"])
sleep(0.1)
if(guiEvents[32]["pressed"]):
try:
f = open(easygui.fileopenbox(default="scenes/",filetypes=["*.ivy","*.*"]),"r")
buff = str(scene)
try:
scene = eval(f.read())
except:
easygui.msgbox(msg="Invalid Input File",title="Error:")
scene = eval(buff)
f.close()
except:
print("Blank File")
if(guiEvents[33]["pressed"]):
try:
f = open(easygui.filesavebox(default="scenes/",filetypes=["*.ivy","*.*"]),"w")
f.write(str(scene))
f.close()
except:
print("Blank File")
if(guiEvents[36]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0],rt[1]-0.1)
scene[sel]["rotation"] = rt
if(guiEvents[37]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0],rt[1]+0.1)
scene[sel]["rotation"] = rt
if(guiEvents[34]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0]-0.1,rt[1])
scene[sel]["rotation"] = rt
if(guiEvents[35]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0]+0.1,rt[1])
scene[sel]["rotation"] = rt
clr = scene[sel]["material"]["color"]
r = clr[0]
g = clr[1]
b = clr[2]
r = max(min(r,255),0)
g = max(min(g,255),0)
b = max(min(b,255),0)
scene[sel]["material"]["color"] = (r,g,b)
pg.display.update()
| 34.494152 | 284 | 0.510384 | #Import Modules
import pygame as pg
from pygame import gfxdraw
from math import *
from random import randint
from time import sleep
from glob import glob
import easygui
import types
#Custom Modules
from sceneFile import scene,world,player
import guibuttons
import shapes
pg.init() #Initialize pygame
VERSION = "Beta 0.4" # Version
textFont = "font/opensans.ttf" # set default font
w = 800 # Screen size
h = 600
translate = (w/2,h/2)
d = pg.display.set_mode((w,h)) # Initialize the display
pg.display.set_caption("Ivy3d v " + VERSION)
pg.display.set_icon(pg.image.load("favicon.png"))
running = True
#Projects 3d Point To 2d Point
def project(point):
zscale = 400.0 #Z scaling constant
x = point[0]
y = point[1]
z = point[2]
return (x*(2**(z/zscale)) + translate[0],-y*(2**(z/zscale)) + translate[1]) #x = x1*2^z1, y = y1*2^z1
#Multiplies two vectors
def scaleVector(vec,vec2):
a = list(vec) # Convert tuple to list so it can be assigned to
for i in range(0,len(a)):
a[i]*=vec2[i]
return tuple(a) # Converting list back to tuple
#Adds two vectors
def translateVector(vec,vec2):
a = list(vec) # Convert tuple to list so it can be assigned to
for i in range(0,len(a)):
a[i]+=vec2[i]
return tuple(a) # Converting list back to tuple
#Rotates vector by another vector
def rotateVector(vec,vec2):
x1 = vec[0]
y1 = vec[1]
z1 = vec[2]
xr = vec2[0]
yr = vec2[1]
# X-Axis
x1 = x1
y1 = y1*cos(xr)-z1*sin(xr)
z1 = y1*sin(xr)+z1*cos(xr)
# Y-Axis
x1 = z1*sin(yr)+x1*cos(yr)
y1 = y1
z1 = z1*cos(yr)-x1*sin(yr)
return (x1,y1,z1)
# Calculates the normals for three vectors
def getNormal(vec1,vec2,vec3):
u = (vec2[0] - vec1[0],vec2[1] - vec1[1],vec2[2] - vec1[2])
v = (vec3[0] - vec1[0],vec3[1] - vec1[1],vec3[2] - vec1[2])
nx = (u[1]*v[2]) - (u[2]*v[1])
ny = (u[2]*v[0]) - (u[0]*v[2])
nz = (u[0]*v[1]) - (u[1]*v[0])
return (nx,ny,nz)
# Gets the average of a list of vectors
def vectorPos(a):
x = 0
y = 0
z = 0
for vec in a:
x+=vec[0]
y+=vec[1]
z+=vec[2]
x = x / len(a)
x = y / len(a)
x = z / len(a)
return (x,y,z)
# Rendering function takes in scene as a list of dictionaries (see example in sceneFile.py)
def render(s):
zBuffer = [] # Z buffer for rendering triangles in correct order
for mesh in s:
for tris in meshes[mesh["shape"]]:
pos = mesh["position"] # Gets position of mesh
scl = mesh["scale"] # Gets scale of mesh
rot = mesh["rotation"] # Gets rotation of mesh
point1 = translateVector( rotateVector( scaleVector( tris[0],scl ) ,rot) , pos) # Rotates, scales, and translates each vector according to the position and scale of the mesh
point2 = translateVector( rotateVector( scaleVector( tris[1],scl ) ,rot) , pos)
point3 = translateVector( rotateVector( scaleVector( tris[2],scl ) ,rot) , pos)
facePos = vectorPos([point1,point2,point3]) # Average of all three vector positions to add to the z buffer
normal = getNormal(point1,point2,point3) # Gets normal for face
normal = float((normal[0] + normal[1] + normal[2])/3.0)/300.0 # Gets average normal
shade = float( max(min(255 - abs(normal),255),1) ) # Calculates shading from normal
color = mesh["material"]["color"] # Gets meshes color
r = color[0] # Gets each channel from color
g = color[1]
b = color[2]
r = int(r / (255/shade)) # Applies shading to each channel
g = int(g / (255/shade))
b = int(b / (255/shade))
color = (r,g,b) # Applies shaded rgb back to color
zBuffer.append([point1,point2,point3,facePos,color,mesh["material"]["wire"],mesh["material"]["map"]]) #Adds the triangle data to the z buffer
#Sorts the z buffer (bubble sort is not the best solution, but I don't really care it's easy to implement)
for j in range(0,len(zBuffer)):
for i in range(0,len(zBuffer)-1):
zPos1 = [zBuffer[i][0],zBuffer[i][1],zBuffer[i][2]]
zPos2 = [zBuffer[i+1][0],zBuffer[i+1][1],zBuffer[i+1][2]]
zPos1 = vectorPos(zPos1)[2]
zPos2 = vectorPos(zPos2)[2]
if zPos1 > zPos2:
store = zBuffer[i]
zBuffer[i] = zBuffer[i+1]
zBuffer[i+1] = store
#Renders the triangles from the z buffer in the correct order
for i in range(0,len(zBuffer)):
tri = zBuffer[i]
gfxdraw.aapolygon(d, (project(tri[0]) , project(tri[1]) , project(tri[2])) , tri[4]) # Draws anti-aliased outline around the triangle
if(not(tri[5])): # Checks to see if the triangle is wireframe
if(tri[6] == "none"):
gfxdraw.filled_polygon(d, (project(tri[0]) , project(tri[1]) , project(tri[2])) , tri[4]) # Draws solid color triangle if there is no texture
else:
imgMap = pg.image.load(tri[6]) # loads texture
offs = project(tri[0]) # Calculates image offset
sz = imgMap.get_size() # Gets the size of the image
if( min(project(tri[0])[0] , project(tri[1])[0] , project(tri[2])[0] ) >= 0 ): # Checks to see if the triangle is within the bounds of the screen(pygame crashes otherwise)
if( max(project(tri[0])[0] , project(tri[1])[0] , project(tri[2])[0] ) <= w ):
if( min(project(tri[0])[1] , project(tri[1])[1] , project(tri[2])[1] ) >= 0 ):
if( max(project(tri[0])[1] , project(tri[1])[1] , project(tri[2])[1] ) <= h ):
# Draw textured polygon with offset we calculated before
gfxdraw.textured_polygon(d, (project(tri[0]) , project(tri[1]) , project(tri[2])),imgMap, int(offs[0]) % sz[0] , int(h-offs[1]) % sz[1] )
#Gets collision for physics
def col(mesh1,ss):
isc = False # Set collision to false
fric = 1 # Set default fricion
for msh in ss: # Checks every mesh in the scene
if mesh1 != msh: # Checking to see if the mesh isn't itself
pos1 = mesh1["position"]
scl1 = mesh1["scale"]
pos2 = msh["position"]
scl2 = msh["scale"]
# Checks if the two meshes aren't clipping
if (pos1[0] > pos2[0] - (scl1[0] + scl2[0]) and pos1[0] < pos2[0] + (scl1[0] + scl2[0]) ) and (pos1[1] > pos2[1] - (scl1[1] + scl2[1]) and pos1[1] < pos2[1] + (scl1[1] + scl2[1]) ) and (pos1[2] > pos2[2] - (scl1[2] + scl2[2]) and pos1[2] < pos2[2] + (scl1[2] + scl2[2]) ):
# If they are, set the collision to true and the friction to the object's fricion value
isc = True
fric = msh["physics"]["friction"]
return [isc,fric] # Return values
# Calculates all of the physics
def simulate(s,w,k):
scn = s
for mesh in scn: # Checks every mesh in the scene
if(mesh["physics"]["rigidBody"]): # Checks to see if rigidBody is true for the mesh
if(mesh["physics"]["control"]): # If the mesh has the CharacterController enabled
maxSpeed = player["maxSpeed"] # Gets max speed, acceleration, and jump strength
accel = player["accel"]
jumpS = player["jumpStrength"]
#movement script
if(k[pg.K_w]):
vel = mesh["physics"]["velocity"]
mesh["physics"]["velocity"] = (vel[0],vel[1], max(-maxSpeed,vel[2]-accel) )
if(k[pg.K_s]):
vel = mesh["physics"]["velocity"]
mesh["physics"]["velocity"] = (vel[0],vel[1], min(maxSpeed,vel[2]+accel) )
if(k[pg.K_a]):
vel = mesh["physics"]["velocity"]
mesh["physics"]["velocity"] = (max(-maxSpeed,vel[0]-accel),vel[1], vel[2] )
if(k[pg.K_d]):
vel = mesh["physics"]["velocity"]
mesh["physics"]["velocity"] = (min(maxSpeed,vel[0]+accel),vel[1], vel[2] )
vel = mesh["physics"]["velocity"]
mesh["physics"]["velocity"] = (vel[0],vel[1]-w["gravity"], vel[2])
pos = mesh["position"]
vel = mesh["physics"]["velocity"]
mesh["position"] = (pos[0],pos[1]+vel[1],pos[2]) # Move mesh in the y direction by the y velocity
cl = col(mesh,scn) # Check for collision
if cl[0]: # If there is collision, move back and multiply the y-velocity by the bounce value
pos = mesh["position"]
vel = mesh["physics"]["velocity"]
mesh["position"] = (pos[0],pos[1]-vel[1],pos[2])
vel = mesh["physics"]["velocity"]
bnc = mesh["physics"]["bounce"]
mesh["physics"]["velocity"] = (vel[0] * (1-cl[1]),-vel[1] * bnc,vel[2] * (1-cl[1]))
if(k[pg.K_SPACE] and mesh["physics"]["control"]): # If the space key is pressed, jump
vel = mesh["physics"]["velocity"]
mesh["physics"]["velocity"] = (vel[0],jumpS, vel[2] )
pos = mesh["position"]
vel = mesh["physics"]["velocity"]
mesh["position"] = (pos[0]+vel[0],pos[1],pos[2]) # Move mesh in the x direction
cl = col(mesh,scn) # Check for Collision
if cl[0]:
pos = mesh["position"] # If there is collision, move back and bounce back
vel = mesh["physics"]["velocity"]
mesh["position"] = (pos[0]-vel[0],pos[1],pos[2])
vel = mesh["physics"]["velocity"]
bnc = mesh["physics"]["bounce"]
mesh["physics"]["velocity"] = (vel[0]*bnc,vel[1],vel[2])
pos = mesh["position"]
vel = mesh["physics"]["velocity"]
mesh["position"] = (pos[0],pos[1],pos[2]+vel[2]) # Move mesh in the x direction
cl = col(mesh,scn) # If there is collision, move back and bounce back
if cl[0]: # If there is collision, move back and bounce back
pos = mesh["position"]
vel = mesh["physics"]["velocity"]
mesh["position"] = (pos[0],pos[1],pos[2]-vel[2])
vel = mesh["physics"]["velocity"]
bnc = mesh["physics"]["bounce"]
mesh["physics"]["velocity"] = (vel[0],vel[1],vel[2]*bnc)
return scn
# Return a text surface
def text_objects(text, font):
textSurface = font.render(text, True, (0,0,0))
return textSurface, textSurface.get_rect()
# Display text
def textdisplay(text,x,y,fff,ffs):
largeText = pg.font.Font(fff,ffs)
TextSurf, TextRect = text_objects(text, largeText)
d.blit(TextSurf, (x,y))
# Class for dealing with buttons
class GUIButton():
def __init__(self,xpos,ypos,textstr): # Initialize variables
self.x = xpos
self.y = ypos
self.w = 1
self.h = 1
self.text = textstr
self.clicked = False
def render(self,mx,my,mp): # Render the button and process the mouse
fnt = textFont
fntSize = 12
lText = pg.font.Font(fnt,fntSize)
tSurf, tRect = text_objects(self.text, lText)
tSize = tSurf.get_size()
self.w = tSize[0]+2
self.h = tSize[1]+2
pg.draw.rect(d,(0,0,0),(self.x,self.y,self.w,self.h) ) # draw button outline
if (mx > self.x and mx < self.x + self.w) and (my > self.y and my < self.y + self.h): # If mouse is within the bounds of the button
if(mp): # If mouse is pressed
pg.draw.rect(d,(127,127,127),(self.x+1,self.y+1,self.w-2,self.h-2) ) # Draw dark button
self.clicked = True
else:
pg.draw.rect(d,(200,200,200),(self.x+1,self.y+1,self.w-2,self.h-2) ) # Draw slightly dark button
self.clicked = False
else:
self.clicked = False
pg.draw.rect(d,(255,255,255),(self.x+1,self.y+1,self.w-2,self.h-2) ) # Draw white button
textdisplay(self.text,self.x,self.y,fnt,fntSize) # Draw text
def gui(mouseX,mouseY,mousePressed,msh,idx):
nams = ["Cube","Pyramid","Octahedron","Sphere","PlaneXZ","PlaneXY","PlaneYZ"] # Names of shapes
fnt = textFont # Set font and font size
fntSize = 16
buttons = guibuttons.buttons # Get buttons
pg.draw.rect(d,(0,0,0),(0,0,256,h)) # Draw GUI outline
pg.draw.rect(d,(255,255,255),(0,0,255,h)) # Draw GUI background
# Draw object data
textdisplay("Object: " + str(nams[msh["shape"]]) + ' ' + str(idx),w - 200,8,fnt,fntSize )
textdisplay("Position: " + str(msh["position"]),w - 200,28,fnt,fntSize )
textdisplay("Scale: " + str(msh["scale"]),w - 200,48,fnt,fntSize )
textdisplay("Color: " + str(msh["material"]["color"] ),w - 200,68,fnt,fntSize )
textdisplay("IsRigidBody: " + str(msh["physics"]["rigidBody"] ),w - 200,88,fnt,fntSize )
textdisplay("CharControl: " + str(msh["physics"]["control"] ),w - 200,108,fnt,fntSize )
textdisplay("Bounciness: " + str(msh["physics"]["bounce"] ),w - 200,128,fnt,fntSize )
textdisplay("Friction: " + str(msh["physics"]["friction"] ),w - 200,148,fnt,fntSize )
textdisplay("Texture: " + str(msh["material"]["map"] ),w - 200,168,fnt,fntSize )
textdisplay("Wireframe: " + str(msh["material"]["wire"] ),w - 200,188,fnt,fntSize )
rtt = msh["rotation"]
rtt = ( degrees(rtt[0]),degrees(rtt[1]) )
textdisplay("Rotation: " + str( (round(rtt[0],3),round(rtt[1],3)) ),w - 200,208,fnt,fntSize )
# Check if each button is pressed
for i in buttons:
btn = GUIButton(i["x"],i["y"],i["name"] )
btn.render(mouseX,mouseY,mousePressed)
i["pressed"] = btn.clicked
return buttons
meshes = shapes.meshes # Get meshes from shapes.py
keys = [] # List of keys and if they are pressed
for i in range(0,1024):
keys.append(False)
a = 0
sim = False
sel = 0
buff = ""
mouse = {
"x":0,
"y":0,
"pressed":False
} # Mouse dictionary with x position, y position, and if the mouse is pressed
while running: # Loop while program is running
for e in pg.event.get(): # Get all events
if e.type == pg.QUIT: # if program is closed, stop running
running = False
if e.type == pg.MOUSEMOTION: # Get mouse motion
ms = pg.mouse.get_pos()
mouse["x"] = ms[0]
mouse["y"] = ms[1]
if e.type == pg.MOUSEBUTTONDOWN: # Get mouse button
mouse["pressed"] = True
if e.type == pg.MOUSEBUTTONUP:
mouse["pressed"] = False
if e.type == pg.KEYDOWN: # Get keydown
keys[e.key] = True
if(e.key == pg.K_p):
if(not(sim)):
buff = str(scene)
else:
scene = eval(buff)
sim = not(sim)
if e.type == pg.KEYUP:
keys[e.key] = False
# Clear screen
d.fill(world["background"])
if sim: # Simulate
scene = simulate(scene,world,keys)
render(scene) #Render
if(not(sim)):
pos = scene[sel]["position"]
pg.draw.line(d,(255,0,0),project(pos),project( (pos[0]+25,pos[1],pos[2]) ) ,4) #Draw axies at position
pg.draw.line(d,(0,255,0),project(pos),project( (pos[0] ,pos[1]+25,pos[2]) ) ,4)
pg.draw.line(d,(0,0,255),project(pos),project( (pos[0] ,pos[1],pos[2]+25) ) ,4)
guiEvents = gui(mouse["x"],mouse["y"],mouse["pressed"],scene[sel],sel) #Get all button
if(guiEvents[0]["pressed"]):
sel+=1
if(sel > len(scene)-1):
sel = 0
sleep(0.1)
if(guiEvents[1]["pressed"]):
scene[sel]["shape"]+=1
if(scene[sel]["shape"] > len(shapes.meshes)-1):
scene[sel]["shape"] = 0
sleep(0.1)
name = "position"
if(guiEvents[2]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]-4,pos[1],pos[2])
if(guiEvents[3]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]+4,pos[1],pos[2])
if(guiEvents[4]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]-4,pos[2])
if(guiEvents[5]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]+4,pos[2])
if(guiEvents[6]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]-4)
if(guiEvents[7]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]+4)
name = "scale"
if(guiEvents[8]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]-4,pos[1],pos[2])
if(guiEvents[9]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0]+4,pos[1],pos[2])
if(guiEvents[10]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]-4,pos[2])
if(guiEvents[11]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1]+4,pos[2])
if(guiEvents[12]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]-4)
if(guiEvents[13]["pressed"]):
pos = scene[sel][name]
scene[sel][name] = (pos[0],pos[1],pos[2]+4)
if(guiEvents[14]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0]-4,pos[1],pos[2])
if(guiEvents[15]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0]+4,pos[1],pos[2])
if(guiEvents[16]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1]-4,pos[2])
if(guiEvents[17]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1]+4,pos[2])
if(guiEvents[18]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1],pos[2]-4)
if(guiEvents[19]["pressed"]):
pos = scene[sel]["material"]["color"]
scene[sel]["material"]["color"] = (pos[0],pos[1],pos[2]+4)
if(guiEvents[20]["pressed"]):
scene[sel]["physics"]["rigidBody"] = not(scene[sel]["physics"]["rigidBody"])
sleep(0.1)
if(guiEvents[21]["pressed"]):
scene[sel]["physics"]["control"] = not(scene[sel]["physics"]["control"])
sleep(0.1)
if(guiEvents[22]["pressed"]):
scene[sel]["physics"]["bounce"]-= 0.01
if(guiEvents[23]["pressed"]):
scene[sel]["physics"]["bounce"]+= 0.01
if(guiEvents[24]["pressed"]):
scene[sel]["physics"]["friction"]-= 0.01
if(guiEvents[25]["pressed"]):
scene[sel]["physics"]["friction"]+= 0.01
if(guiEvents[26]["pressed"]):
inText = easygui.fileopenbox(default="maps/",filetypes=["*.png","*.jpg","*.jpeg","*.gif","*.*"])
if type(inText) == type(" "):
scene[sel]["material"]["map"] = inText
else:
scene[sel]["material"]["map"] = "none"
if(guiEvents[27]["pressed"]):
aaa = {
"shape":0,
"position":(0,0,0),
"scale":(25,25,25),
"rotation":(0,0,0),
"material":{
"wire":False,
"color":(200,63,63),
"map":"none"
},
"physics": {
"rigidBody":False,
"velocity":(0,0,0),
"friction":0.05,
"bounce":0,
"control":False
}
}
scene.append(aaa)
sel = len(scene)-1
sleep(0.1)
if(guiEvents[28]["pressed"]):
if(sel != 0):
scene.pop(sel)
sel = 0
print("Removed Mesh.")
else:
easygui.msgbox(msg="Cannot Remove Mesh",title="Error:")
sleep(0.1)
if(guiEvents[29]["pressed"]):
d.fill(world["background"])
render(scene)
# antialias()
pg.image.save(d,easygui.filesavebox(filetypes=["*.png","*.jpg","*.*"]))
easygui.msgbox(msg="Rendered And Exported",title="Success!")
sleep(0.1)
if(guiEvents[30]["pressed"]):
buff = str(scene)
try:
numFrames = int(easygui.enterbox("How Many Frames?"))
except:
easygui.msgbox(msg="Number Must Be A Valid Integer",title="Error:")
numFrames = 0
for i in range(numFrames):
scene = simulate(scene,world,keys)
d.fill(world["background"])
render(scene)
pg.display.update()
pg.image.save(d,"output/" + str(i) + ".png")
print("Done " +str(i) + "/" + str(numFrames))
scene = eval(buff)
if(guiEvents[31]["pressed"]):
scene[sel]["material"]["wire"] = not(scene[sel]["material"]["wire"])
sleep(0.1)
if(guiEvents[32]["pressed"]):
try:
f = open(easygui.fileopenbox(default="scenes/",filetypes=["*.ivy","*.*"]),"r")
buff = str(scene)
try:
scene = eval(f.read())
except:
easygui.msgbox(msg="Invalid Input File",title="Error:")
scene = eval(buff)
f.close()
except:
print("Blank File")
if(guiEvents[33]["pressed"]):
try:
f = open(easygui.filesavebox(default="scenes/",filetypes=["*.ivy","*.*"]),"w")
f.write(str(scene))
f.close()
except:
print("Blank File")
if(guiEvents[36]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0],rt[1]-0.1)
scene[sel]["rotation"] = rt
if(guiEvents[37]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0],rt[1]+0.1)
scene[sel]["rotation"] = rt
if(guiEvents[34]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0]-0.1,rt[1])
scene[sel]["rotation"] = rt
if(guiEvents[35]["pressed"]):
rt = scene[sel]["rotation"]
rt = (rt[0]+0.1,rt[1])
scene[sel]["rotation"] = rt
clr = scene[sel]["material"]["color"]
r = clr[0]
g = clr[1]
b = clr[2]
r = max(min(r,255),0)
g = max(min(g,255),0)
b = max(min(b,255),0)
scene[sel]["material"]["color"] = (r,g,b)
pg.display.update()
| 12,686 | -3 | 341 |
4c90072340fcbafd34ed47f1674ba9b82fd3e4b6 | 121 | py | Python | src/daipecore/decorator/tests/notebook_function_fixture.py | daipe-ai/daipe-core | aa205495fa6b464fa6078d17e439c60345ac99ea | [
"MIT"
] | 1 | 2021-09-17T09:07:09.000Z | 2021-09-17T09:07:09.000Z | src/daipecore/decorator/tests/notebook_function_fixture.py | daipe-ai/daipe-core | aa205495fa6b464fa6078d17e439c60345ac99ea | [
"MIT"
] | 2 | 2021-12-20T07:46:33.000Z | 2022-02-24T07:02:05.000Z | src/daipecore/decorator/tests/notebook_function_fixture.py | daipe-ai/daipe-core | aa205495fa6b464fa6078d17e439c60345ac99ea | [
"MIT"
] | null | null | null | from daipecore.decorator.notebook_function import notebook_function
@notebook_function
| 17.285714 | 67 | 0.826446 | from daipecore.decorator.notebook_function import notebook_function
@notebook_function
def load_data():
return 155
| 10 | 0 | 22 |
2aaecf9aca1b3795f1ceb691479472897e523a63 | 2,172 | py | Python | setup.py | lusi1990/betterlifepsi | 8e7f8562967ab1816d8c25db3251c550a357f39c | [
"MIT"
] | 33 | 2018-10-19T03:41:56.000Z | 2022-01-23T16:26:02.000Z | setup.py | lusi1990/betterlifepsi | 8e7f8562967ab1816d8c25db3251c550a357f39c | [
"MIT"
] | 318 | 2018-09-23T15:16:54.000Z | 2022-03-31T22:58:55.000Z | setup.py | lusi1990/betterlifepsi | 8e7f8562967ab1816d8c25db3251c550a357f39c | [
"MIT"
] | 19 | 2018-10-22T18:04:18.000Z | 2021-12-06T19:49:05.000Z | # coding=utf-8
import re
import ast
from setuptools import setup
from os import path
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('psi/app/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
with open('etc/requirements/common.txt', 'r') as f:
install_reqs = [
s for s in [
line.strip(' \n') for line in f
] if not s.startswith('#') and s != ''
]
with open('etc/requirements/test.txt', 'r') as f:
tests_reqs = [
s for s in [
line.strip(' \n') for line in f
] if not s.startswith('#') and s != '' and not s.startswith('-r ')
]
# read the contents of your README file
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="betterlifepsi",
version=version,
packages=['psi'],
include_package_data=True,
author="Lawrence Liu",
author_email="lawrence@betterlife.io",
description="Betterlife Intelligent PSI(Purchase, Sales and Inventory) system",
long_description=long_description,
long_description_content_type='text/markdown',
license="MIT",
keywords="Betterlife, Intelligent, Purchase Order, Sales Order, Inventory Management, Retail",
url="https://github.com/betterlife/psi",
install_requires=install_reqs,
tests_require=tests_reqs,
setup_requires=install_reqs,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Office/Business :: Financial',
'Topic :: Office/Business :: Financial :: Accounting',
'Natural Language :: Chinese (Simplified)',
'Natural Language :: English',
'Framework :: Flask',
],
)
| 33.415385 | 98 | 0.632597 | # coding=utf-8
import re
import ast
from setuptools import setup
from os import path
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('psi/app/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
with open('etc/requirements/common.txt', 'r') as f:
install_reqs = [
s for s in [
line.strip(' \n') for line in f
] if not s.startswith('#') and s != ''
]
with open('etc/requirements/test.txt', 'r') as f:
tests_reqs = [
s for s in [
line.strip(' \n') for line in f
] if not s.startswith('#') and s != '' and not s.startswith('-r ')
]
# read the contents of your README file
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="betterlifepsi",
version=version,
packages=['psi'],
include_package_data=True,
author="Lawrence Liu",
author_email="lawrence@betterlife.io",
description="Betterlife Intelligent PSI(Purchase, Sales and Inventory) system",
long_description=long_description,
long_description_content_type='text/markdown',
license="MIT",
keywords="Betterlife, Intelligent, Purchase Order, Sales Order, Inventory Management, Retail",
url="https://github.com/betterlife/psi",
install_requires=install_reqs,
tests_require=tests_reqs,
setup_requires=install_reqs,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Office/Business :: Financial :: Point-Of-Sale',
'Topic :: Office/Business :: Financial',
'Topic :: Office/Business :: Financial :: Accounting',
'Natural Language :: Chinese (Simplified)',
'Natural Language :: English',
'Framework :: Flask',
],
)
| 0 | 0 | 0 |
1da281e07bc31e868ecd89b10b7296abf13775a4 | 1,046 | py | Python | fasta/fasta_qual_to_fastq.py | linsalrob/EdwardsLab | 3d4eef1dda61c31ce8163d94d86f186275a6e4a4 | [
"MIT"
] | 30 | 2015-01-25T16:22:51.000Z | 2022-01-20T15:56:47.000Z | fasta/fasta_qual_to_fastq.py | linsalrob/EdwardsLab | 3d4eef1dda61c31ce8163d94d86f186275a6e4a4 | [
"MIT"
] | 2 | 2020-04-13T15:00:37.000Z | 2020-09-23T12:35:59.000Z | fasta/fasta_qual_to_fastq.py | linsalrob/EdwardsLab | 3d4eef1dda61c31ce8163d94d86f186275a6e4a4 | [
"MIT"
] | 24 | 2015-04-17T00:52:05.000Z | 2021-11-26T17:50:01.000Z | """
Convert a fasta/quality files to a fastq file. I can't believe I'm writing this in 2020
"""
import os
import sys
import argparse
from roblib import read_fasta, write_fastq, message
__author__ = 'Rob Edwards'
__copyright__ = 'Copyright 2020, Rob Edwards'
__credits__ = ['Rob Edwards']
__license__ = 'MIT'
__maintainer__ = 'Rob Edwards'
__email__ = 'raedwards@gmail.com'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('-f', help='fasta file', required=True)
parser.add_argument('-q', help='quality file', required=True)
parser.add_argument('-o', help='output fastq file', required=True)
parser.add_argument('-v', help='verbose output', action='store_true')
args = parser.parse_args()
if not os.path.exists(args.f) and not os.path.exists(args.q):
message("FATAL: either {args.f} or {args.q} not found", "RED")
sys.exit(-1)
fa = read_fasta(args.f, True, False)
qu = read_fasta(args.q, True, True)
write_fastq(fa, qu, args.o, args.v)
| 30.764706 | 87 | 0.691205 | """
Convert a fasta/quality files to a fastq file. I can't believe I'm writing this in 2020
"""
import os
import sys
import argparse
from roblib import read_fasta, write_fastq, message
__author__ = 'Rob Edwards'
__copyright__ = 'Copyright 2020, Rob Edwards'
__credits__ = ['Rob Edwards']
__license__ = 'MIT'
__maintainer__ = 'Rob Edwards'
__email__ = 'raedwards@gmail.com'
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('-f', help='fasta file', required=True)
parser.add_argument('-q', help='quality file', required=True)
parser.add_argument('-o', help='output fastq file', required=True)
parser.add_argument('-v', help='verbose output', action='store_true')
args = parser.parse_args()
if not os.path.exists(args.f) and not os.path.exists(args.q):
message("FATAL: either {args.f} or {args.q} not found", "RED")
sys.exit(-1)
fa = read_fasta(args.f, True, False)
qu = read_fasta(args.q, True, True)
write_fastq(fa, qu, args.o, args.v)
| 0 | 0 | 0 |
f0d4eb63ef0ed095a17e63a5117da5f1eae16b93 | 170 | py | Python | 002_time.py | a1852rw/004_Python_Training | e88906d7e7f33ca07d5e6ff87d82f526c57e71a8 | [
"MIT"
] | null | null | null | 002_time.py | a1852rw/004_Python_Training | e88906d7e7f33ca07d5e6ff87d82f526c57e71a8 | [
"MIT"
] | null | null | null | 002_time.py | a1852rw/004_Python_Training | e88906d7e7f33ca07d5e6ff87d82f526c57e71a8 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- codint: utf-8 -*-
import datetime
d1 = datetime.datetime.today()
print("today: " ,d1)
d2 = datetime.datetime.now()
print("now: " ,d2)
| 11.333333 | 30 | 0.623529 | #!/usr/bin/env python3
# -*- codint: utf-8 -*-
import datetime
d1 = datetime.datetime.today()
print("today: " ,d1)
d2 = datetime.datetime.now()
print("now: " ,d2)
| 0 | 0 | 0 |
101a1972077d97fa989bbf8d25127ff3b11d3c6e | 1,276 | py | Python | amokryshev/utils/dedup_file_field.py | amokryshev/amokryshev-com | c78c8579ff1c87c07761c58f2b59846d0e16afed | [
"MIT"
] | null | null | null | amokryshev/utils/dedup_file_field.py | amokryshev/amokryshev-com | c78c8579ff1c87c07761c58f2b59846d0e16afed | [
"MIT"
] | null | null | null | amokryshev/utils/dedup_file_field.py | amokryshev/amokryshev-com | c78c8579ff1c87c07761c58f2b59846d0e16afed | [
"MIT"
] | null | null | null | import os
from amokryshev import settings
from django.db.models.fields.files import FieldFile, FileField
class DeduplicatedFieldFile(FieldFile):
"""The implementation of simple deduplication functional while saving the file,
I couldn't find deduplication feature in Django FileField and didn't want to install
additional batteries from third party developers, because the functional, required by me,
too easy, so I wrote a little crutch"""
class FileFieldDedupByName(FileField):
"""The implementation of simple deduplication functional while saving the file,
I couldn't find deduplication feature in Django FileField and didn't want to install
additional batteries from third party developers, because the functional, required by me,
too easy, so I wrote a little crutch"""
attr_class = DeduplicatedFieldFile
| 39.875 | 97 | 0.709248 | import os
from amokryshev import settings
from django.db.models.fields.files import FieldFile, FileField
class DeduplicatedFieldFile(FieldFile):
"""The implementation of simple deduplication functional while saving the file,
I couldn't find deduplication feature in Django FileField and didn't want to install
additional batteries from third party developers, because the functional, required by me,
too easy, so I wrote a little crutch"""
def save(self, name, content, save=True):
if os.path.isfile(os.path.join(settings.MEDIA_ROOT, self.field.upload_to, name)):
self.name = os.path.join(self.field.upload_to, name)
setattr(self.instance, self.field.name, self.name)
self._committed = True
if save:
self.instance.save()
else:
super().save(name, content)
class FileFieldDedupByName(FileField):
"""The implementation of simple deduplication functional while saving the file,
I couldn't find deduplication feature in Django FileField and didn't want to install
additional batteries from third party developers, because the functional, required by me,
too easy, so I wrote a little crutch"""
attr_class = DeduplicatedFieldFile
| 387 | 0 | 27 |
ac1dbd9bbcd51ffad9cf1a7e52c26d803fb67bd8 | 13,900 | py | Python | texar/torch/modules/pretrained/bert.py | wwt17/texar-pytorch | 9fb3ae8f7b541da5c808357033a93fba1817bfbd | [
"Apache-2.0"
] | null | null | null | texar/torch/modules/pretrained/bert.py | wwt17/texar-pytorch | 9fb3ae8f7b541da5c808357033a93fba1817bfbd | [
"Apache-2.0"
] | null | null | null | texar/torch/modules/pretrained/bert.py | wwt17/texar-pytorch | 9fb3ae8f7b541da5c808357033a93fba1817bfbd | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utils of BERT Modules.
"""
import json
import os
from abc import ABC
from typing import Any, Dict
import torch
from texar.torch.modules.pretrained.pretrained_base import PretrainedMixin
__all__ = [
"PretrainedBERTMixin",
]
_BERT_PATH = "https://storage.googleapis.com/bert_models/"
_BIOBERT_PATH = "https://github.com/naver/biobert-pretrained/releases/download/"
class PretrainedBERTMixin(PretrainedMixin, ABC):
r"""A mixin class to support loading pre-trained checkpoints for modules
that implement the BERT model.
Both standard BERT models and many domain specific BERT-based models are
supported. You can specify the :attr:`pretrained_model_name` argument to
pick which pre-trained BERT model to use. All available categories of
pre-trained models (and names) include:
* **Standard BERT**: proposed in (`Devlin et al`. 2018)
`BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`_
. A bidirectional Transformer language model pre-trained on large text
corpora. Available model names include:
* ``bert-base-uncased``: 12-layer, 768-hidden, 12-heads,
110M parameters.
* ``bert-large-uncased``: 24-layer, 1024-hidden, 16-heads,
340M parameters.
* ``bert-base-cased``: 12-layer, 768-hidden, 12-heads , 110M parameters.
* ``bert-large-cased``: 24-layer, 1024-hidden, 16-heads,
340M parameters.
* ``bert-base-multilingual-uncased``: 102 languages, 12-layer,
768-hidden, 12-heads, 110M parameters.
* ``bert-base-multilingual-cased``: 104 languages, 12-layer, 768-hidden,
12-heads, 110M parameters.
* ``bert-base-chinese``: Chinese Simplified and Traditional, 12-layer,
768-hidden, 12-heads, 110M parameters.
* **BioBERT**: proposed in (`Lee et al`. 2019)
`BioBERT: a pre-trained biomedical language representation model for biomedical text mining`_
. A domain specific language representation model pre-trained on
large-scale biomedical corpora. Based on the BERT architecture, BioBERT
effectively transfers the knowledge from a large amount of biomedical
texts to biomedical text mining models with minimal task-specific
architecture modifications. Available model names include:
* ``biobert-v1.0-pmc``: BioBERT v1.0 (+ PMC 270K) - based on
BERT-base-Cased (same vocabulary)
* ``biobert-v1.0-pubmed-pmc``: BioBERT v1.0 (+ PubMed 200K + PMC 270K) -
based on BERT-base-Cased (same vocabulary)
* ``biobert-v1.0-pubmed``: BioBERT v1.0 (+ PubMed 200K) - based on
BERT-base-Cased (same vocabulary)
* ``biobert-v1.1-pubmed``: BioBERT v1.1 (+ PubMed 1M) - based on
BERT-base-Cased (same vocabulary)
We provide the following BERT classes:
* :class:`~texar.torch.modules.BERTEncoder` for text encoding.
* :class:`~texar.torch.modules.BERTClassifier` for text classification and
sequence tagging.
.. _`BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`:
https://arxiv.org/abs/1810.04805
.. _`BioBERT: a pre-trained biomedical language representation model for biomedical text mining`:
https://arxiv.org/abs/1901.08746
"""
_MODEL_NAME = "BERT"
_MODEL2URL = {
# Standard BERT
'bert-base-uncased':
_BERT_PATH + "2018_10_18/uncased_L-12_H-768_A-12.zip",
'bert-large-uncased':
_BERT_PATH + "2018_10_18/uncased_L-24_H-1024_A-16.zip",
'bert-base-cased':
_BERT_PATH + "2018_10_18/cased_L-12_H-768_A-12.zip",
'bert-large-cased':
_BERT_PATH + "2018_10_18/cased_L-24_H-1024_A-16.zip",
'bert-base-multilingual-uncased':
_BERT_PATH + "2018_11_23/multi_cased_L-12_H-768_A-12.zip",
'bert-base-multilingual-cased':
_BERT_PATH + "2018_11_03/multilingual_L-12_H-768_A-12.zip",
'bert-base-chinese':
_BERT_PATH + "2018_11_03/chinese_L-12_H-768_A-12.zip",
# BioBERT
'biobert-v1.0-pmc':
_BIOBERT_PATH + 'v1.0-pmc/biobert_v1.0_pmc.tar.gz',
'biobert-v1.0-pubmed-pmc':
_BIOBERT_PATH + 'v1.0-pubmed-pmc/biobert_v1.0_pubmed_pmc.tar.gz',
'biobert-v1.0-pubmed':
_BIOBERT_PATH + 'v1.0-pubmed/biobert_v1.0_pubmed.tar.gz',
'biobert-v1.1-pubmed':
_BIOBERT_PATH + 'v1.1-pubmed/biobert_v1.1_pubmed.tar.gz',
}
_MODEL2CKPT = {
# Standard BERT
'bert-base-uncased': 'bert_model.ckpt',
'bert-large-uncased': 'bert_model.ckpt',
'bert-base-cased': 'bert_model.ckpt',
'bert-large-cased': 'bert_model.ckpt',
'bert-base-multilingual-uncased': 'bert_model.ckpt',
'bert-base-multilingual-cased': 'bert_model.ckpt',
'bert-base-chinese': 'bert_model.ckpt',
# BioBERT
'biobert-v1.0-pmc': 'biobert_model.ckpt',
'biobert-v1.0-pubmed-pmc': 'biobert_model.ckpt',
'biobert-v1.0-pubmed': 'biobert_model.ckpt',
'biobert-v1.1-pubmed': 'model.ckpt-1000000',
}
@classmethod
| 43.167702 | 101 | 0.576043 | # Copyright 2019 The Texar Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Utils of BERT Modules.
"""
import json
import os
from abc import ABC
from typing import Any, Dict
import torch
from texar.torch.modules.pretrained.pretrained_base import PretrainedMixin
__all__ = [
"PretrainedBERTMixin",
]
_BERT_PATH = "https://storage.googleapis.com/bert_models/"
_BIOBERT_PATH = "https://github.com/naver/biobert-pretrained/releases/download/"
class PretrainedBERTMixin(PretrainedMixin, ABC):
r"""A mixin class to support loading pre-trained checkpoints for modules
that implement the BERT model.
Both standard BERT models and many domain specific BERT-based models are
supported. You can specify the :attr:`pretrained_model_name` argument to
pick which pre-trained BERT model to use. All available categories of
pre-trained models (and names) include:
* **Standard BERT**: proposed in (`Devlin et al`. 2018)
`BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`_
. A bidirectional Transformer language model pre-trained on large text
corpora. Available model names include:
* ``bert-base-uncased``: 12-layer, 768-hidden, 12-heads,
110M parameters.
* ``bert-large-uncased``: 24-layer, 1024-hidden, 16-heads,
340M parameters.
* ``bert-base-cased``: 12-layer, 768-hidden, 12-heads , 110M parameters.
* ``bert-large-cased``: 24-layer, 1024-hidden, 16-heads,
340M parameters.
* ``bert-base-multilingual-uncased``: 102 languages, 12-layer,
768-hidden, 12-heads, 110M parameters.
* ``bert-base-multilingual-cased``: 104 languages, 12-layer, 768-hidden,
12-heads, 110M parameters.
* ``bert-base-chinese``: Chinese Simplified and Traditional, 12-layer,
768-hidden, 12-heads, 110M parameters.
* **BioBERT**: proposed in (`Lee et al`. 2019)
`BioBERT: a pre-trained biomedical language representation model for biomedical text mining`_
. A domain specific language representation model pre-trained on
large-scale biomedical corpora. Based on the BERT architecture, BioBERT
effectively transfers the knowledge from a large amount of biomedical
texts to biomedical text mining models with minimal task-specific
architecture modifications. Available model names include:
* ``biobert-v1.0-pmc``: BioBERT v1.0 (+ PMC 270K) - based on
BERT-base-Cased (same vocabulary)
* ``biobert-v1.0-pubmed-pmc``: BioBERT v1.0 (+ PubMed 200K + PMC 270K) -
based on BERT-base-Cased (same vocabulary)
* ``biobert-v1.0-pubmed``: BioBERT v1.0 (+ PubMed 200K) - based on
BERT-base-Cased (same vocabulary)
* ``biobert-v1.1-pubmed``: BioBERT v1.1 (+ PubMed 1M) - based on
BERT-base-Cased (same vocabulary)
We provide the following BERT classes:
* :class:`~texar.torch.modules.BERTEncoder` for text encoding.
* :class:`~texar.torch.modules.BERTClassifier` for text classification and
sequence tagging.
.. _`BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding`:
https://arxiv.org/abs/1810.04805
.. _`BioBERT: a pre-trained biomedical language representation model for biomedical text mining`:
https://arxiv.org/abs/1901.08746
"""
_MODEL_NAME = "BERT"
_MODEL2URL = {
# Standard BERT
'bert-base-uncased':
_BERT_PATH + "2018_10_18/uncased_L-12_H-768_A-12.zip",
'bert-large-uncased':
_BERT_PATH + "2018_10_18/uncased_L-24_H-1024_A-16.zip",
'bert-base-cased':
_BERT_PATH + "2018_10_18/cased_L-12_H-768_A-12.zip",
'bert-large-cased':
_BERT_PATH + "2018_10_18/cased_L-24_H-1024_A-16.zip",
'bert-base-multilingual-uncased':
_BERT_PATH + "2018_11_23/multi_cased_L-12_H-768_A-12.zip",
'bert-base-multilingual-cased':
_BERT_PATH + "2018_11_03/multilingual_L-12_H-768_A-12.zip",
'bert-base-chinese':
_BERT_PATH + "2018_11_03/chinese_L-12_H-768_A-12.zip",
# BioBERT
'biobert-v1.0-pmc':
_BIOBERT_PATH + 'v1.0-pmc/biobert_v1.0_pmc.tar.gz',
'biobert-v1.0-pubmed-pmc':
_BIOBERT_PATH + 'v1.0-pubmed-pmc/biobert_v1.0_pubmed_pmc.tar.gz',
'biobert-v1.0-pubmed':
_BIOBERT_PATH + 'v1.0-pubmed/biobert_v1.0_pubmed.tar.gz',
'biobert-v1.1-pubmed':
_BIOBERT_PATH + 'v1.1-pubmed/biobert_v1.1_pubmed.tar.gz',
}
_MODEL2CKPT = {
# Standard BERT
'bert-base-uncased': 'bert_model.ckpt',
'bert-large-uncased': 'bert_model.ckpt',
'bert-base-cased': 'bert_model.ckpt',
'bert-large-cased': 'bert_model.ckpt',
'bert-base-multilingual-uncased': 'bert_model.ckpt',
'bert-base-multilingual-cased': 'bert_model.ckpt',
'bert-base-chinese': 'bert_model.ckpt',
# BioBERT
'biobert-v1.0-pmc': 'biobert_model.ckpt',
'biobert-v1.0-pubmed-pmc': 'biobert_model.ckpt',
'biobert-v1.0-pubmed': 'biobert_model.ckpt',
'biobert-v1.1-pubmed': 'model.ckpt-1000000',
}
@classmethod
def _transform_config(cls, pretrained_model_name: str,
cache_dir: str) -> Dict[str, Any]:
info = list(os.walk(cache_dir))
root, _, files = info[0]
config_path = None
for file in files:
if file == 'bert_config.json':
config_path = os.path.join(root, file)
with open(config_path) as f:
config_ckpt = json.loads(f.read())
hidden_dim = config_ckpt['hidden_size']
vocab_size = config_ckpt['vocab_size']
type_vocab_size = config_ckpt['type_vocab_size']
position_size = config_ckpt['max_position_embeddings']
embedding_dropout = config_ckpt['hidden_dropout_prob']
num_blocks = config_ckpt['num_hidden_layers']
num_heads = config_ckpt['num_attention_heads']
dropout_rate = config_ckpt['attention_probs_dropout_prob']
residual_dropout = config_ckpt['hidden_dropout_prob']
intermediate_size = config_ckpt['intermediate_size']
hidden_act = config_ckpt['hidden_act']
if config_path is None:
raise ValueError(f"Cannot find the config file in {cache_dir}")
configs = {
'hidden_size': hidden_dim,
'embed': {
'name': 'word_embeddings',
'dim': hidden_dim
},
'vocab_size': vocab_size,
'segment_embed': {
'name': 'token_type_embeddings',
'dim': hidden_dim
},
'type_vocab_size': type_vocab_size,
'position_embed': {
'name': 'position_embeddings',
'dim': hidden_dim
},
'position_size': position_size,
'encoder': {
'name': 'encoder',
'embedding_dropout': embedding_dropout,
'num_blocks': num_blocks,
'multihead_attention': {
'use_bias': True,
'num_units': hidden_dim,
'num_heads': num_heads,
'output_dim': hidden_dim,
'dropout_rate': dropout_rate,
'name': 'self'
},
'residual_dropout': residual_dropout,
'dim': hidden_dim,
'use_bert_config': True,
'poswise_feedforward': {
"layers": [{
'type': 'Linear',
'kwargs': {
'in_features': hidden_dim,
'out_features': intermediate_size,
'bias': True,
}
}, {
'type': 'Bert' + hidden_act.upper()
}, {
'type': 'Linear',
'kwargs': {
'in_features': intermediate_size,
'out_features': hidden_dim,
'bias': True,
}
}],
},
}
}
return configs
def _init_from_checkpoint(self, pretrained_model_name: str,
cache_dir: str, **kwargs):
try:
import numpy as np
import tensorflow as tf
except ImportError:
print("Loading TensorFlow models in PyTorch requires installing "
"TensorFlow. Please see https://www.tensorflow.org/install/ "
"for installation instructions.")
raise
global_tensor_map = {
'bert/embeddings/word_embeddings': 'word_embedder._embedding',
'bert/embeddings/token_type_embeddings':
'segment_embedder._embedding',
'bert/embeddings/position_embeddings':
'position_embedder._embedding',
'bert/embeddings/LayerNorm/beta':
'encoder.input_normalizer.bias',
'bert/embeddings/LayerNorm/gamma':
'encoder.input_normalizer.weight',
}
layer_tensor_map = {
"attention/self/key/bias": "self_attns.{}.K_dense.bias",
"attention/self/query/bias": "self_attns.{}.Q_dense.bias",
"attention/self/value/bias": "self_attns.{}.V_dense.bias",
"attention/output/dense/bias": "self_attns.{}.O_dense.bias",
"attention/output/LayerNorm/gamma": "poswise_layer_norm.{}.weight",
"attention/output/LayerNorm/beta": "poswise_layer_norm.{}.bias",
"intermediate/dense/bias": "poswise_networks.{}._layers.0.bias",
"output/dense/bias": "poswise_networks.{}._layers.2.bias",
"output/LayerNorm/gamma": "output_layer_norm.{}.weight",
"output/LayerNorm/beta": "output_layer_norm.{}.bias",
}
layer_transpose_map = {
"attention/self/key/kernel": "self_attns.{}.K_dense.weight",
"attention/self/query/kernel": "self_attns.{}.Q_dense.weight",
"attention/self/value/kernel": "self_attns.{}.V_dense.weight",
"attention/output/dense/kernel": "self_attns.{}.O_dense.weight",
"intermediate/dense/kernel": "poswise_networks.{}._layers.0.weight",
"output/dense/kernel": "poswise_networks.{}._layers.2.weight",
}
pooler_map = {
'bert/pooler/dense/bias': 'pooler.0.bias',
'bert/pooler/dense/kernel': 'pooler.0.weight'
}
tf_path = os.path.abspath(os.path.join(
cache_dir, self._MODEL2CKPT[pretrained_model_name]))
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
tfnames, arrays = [], []
for name, _ in init_vars:
array = tf.train.load_variable(tf_path, name)
tfnames.append(name)
arrays.append(array.squeeze())
py_prefix = "encoder."
idx = 0
for name, array in zip(tfnames, arrays):
if name.startswith('cls'):
# ignore those variables begin with cls
continue
if name in global_tensor_map:
v_name = global_tensor_map[name]
pointer = self._name_to_variable(v_name)
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
idx += 1
elif name in pooler_map:
pointer = self._name_to_variable(pooler_map[name])
if name.endswith('bias'):
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
idx += 1
else:
array_t = np.transpose(array)
assert pointer.shape == array_t.shape
pointer.data = torch.from_numpy(array_t)
idx += 1
else:
# here name is the TensorFlow variable name
name_tmp = name.split("/")
# e.g. layer_
layer_no = name_tmp[2][6:]
name_tmp = "/".join(name_tmp[3:])
if name_tmp in layer_tensor_map:
v_name = layer_tensor_map[name_tmp].format(layer_no)
pointer = self._name_to_variable(py_prefix + v_name)
assert pointer.shape == array.shape
pointer.data = torch.from_numpy(array)
elif name_tmp in layer_transpose_map:
v_name = layer_transpose_map[name_tmp].format(layer_no)
pointer = self._name_to_variable(py_prefix + v_name)
array_t = np.transpose(array)
assert pointer.shape == array_t.shape
pointer.data = torch.from_numpy(array_t)
else:
raise NameError(f"Variable with name '{name}' not found")
idx += 1
| 8,066 | 0 | 53 |
54fa1fa2261b14d9874756198f220bae47f20d65 | 7,647 | py | Python | loadingmenu.py | documenti-aperti/sw-offline | 4dfb34e0a3359c4e765634fdba0b26dcc4ecd32c | [
"MIT"
] | null | null | null | loadingmenu.py | documenti-aperti/sw-offline | 4dfb34e0a3359c4e765634fdba0b26dcc4ecd32c | [
"MIT"
] | null | null | null | loadingmenu.py | documenti-aperti/sw-offline | 4dfb34e0a3359c4e765634fdba0b26dcc4ecd32c | [
"MIT"
] | null | null | null | #Libraries
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from os.path import split as PATHSPLIT
#Pythons
from settings import *
| 42.483333 | 134 | 0.641166 | #Libraries
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from os.path import split as PATHSPLIT
#Pythons
from settings import *
class LoadingMenu(QGroupBox):
def __init__(self, parent, w, h):
#Initialize father
super(LoadingMenu, self).__init__(parent)
#Setupping
self.Setup(w, h)
#Initialize Things
self.Init()
def Setup(self, w, h):
#Setupping datas
self.selected = -1
self.width = w
self.height = h
self.file_names = []
def Init(self):
#Initializing various
self.InitLayout()
self.InitWidgets()
self.InitWindow()
def InitLayout(self):
#Adding Layouts
self.Hlayout = QHBoxLayout()
self.Vlayout = QVBoxLayout()
self.Vlayout_labels = QVBoxLayout()
self.Vlayout_button = QVBoxLayout()
self.setLayout(self.Hlayout)
#Setupping layouts
self.Hlayout.setContentsMargins(LOADINGMENU_HMARGIN, LOADINGMENU_HMARGIN, LOADINGMENU_HMARGIN, LOADINGMENU_HMARGIN)
self.Hlayout.setSpacing(LOADINGMENU_HSPACING)
self.Vlayout.setContentsMargins(LOADINGMENU_VMARGIN, LOADINGMENU_VMARGIN, LOADINGMENU_VMARGIN, LOADINGMENU_VMARGIN)
self.Vlayout.setSpacing(LOADINGMENU_VSPACING)
self.Vlayout_labels.setContentsMargins(LABELS_MARGIN, LABELS_MARGIN, LABELS_MARGIN, LABELS_MARGIN)
self.Vlayout_labels.setSpacing(LABELS_SPACING)
def InitWidgets(self):
#Creating Widgets
#Labels Group
self.MyLabelsGroup = QGroupBox()
self.MyLabelsGroup.setLayout(self.Vlayout_labels)
#ScrollArea
self.scrollArea = QScrollArea()
self.scrollArea.setWidgetResizable(True)
self.scrollArea.setFrameShape(QFrame.NoFrame)
#Enter Button
self.EnterButton = QPushButton("Press to Start")
self.EnterButton.setFont(ENTERBUTTON_FONT)
self.EnterButton.setMinimumHeight(MINIMUM_ENTERBUTTON_HEIGHT)
#Drag Frame
self.DragButton = DragLabel("Drag files here")
self.DragButton.signal.connect(self.AddUrl)
#Label's list
self.Labels = []
#Add layouts
self.Hlayout.addWidget(self.DragButton, LOADINGMENU_STRETCH["DRAGBUTTON"])
self.Hlayout.addLayout(self.Vlayout, LOADINGMENU_STRETCH["VLAYOUT"])
self.Vlayout.addWidget(self.scrollArea)
self.scrollArea.setWidget(self.MyLabelsGroup)
self.Vlayout.addLayout(self.Vlayout_button)
self.Vlayout_button.addWidget(self.EnterButton)
#Set names
self.MyLabelsGroup.setObjectName("scroll_groupbox")
self.scrollArea.setObjectName("scroll_area")
self.EnterButton.setObjectName("enter_button")
self.DragButton.setObjectName("drag_button")
def InitWindow(self):
#Initializing window
self.setGeometry(0, 0, self.width, self.height)
def AddUrl(self, files):
#Add url to list
for s in files.split("\n"):
if s not in self.file_names and s[s.find("."):] in EXTENSIONS:
self.file_names.append(s)
self.Labels.append(CustomLabel(s))
self.Labels[-1].swButton.clicked.connect(self.SwButtonClicked)
self.Labels[-1].delButton.clicked.connect(self.DelButtonClicked)
self.Vlayout_labels.addWidget(self.Labels[-1])
def SwButtonClicked(self):
#Select to Swap
gp = self.sender().parent()
newi = self.Labels.index(gp)
if not gp.selected:
if self.selected > -1:
#Switch
self.file_names[self.selected], self.file_names[newi] = self.file_names[newi], self.file_names[self.selected]
self.Labels[self.selected].select()
self.Labels[self.selected].title, self.Labels[newi].title = self.Labels[newi].title, self.Labels[self.selected].title
self.Labels[self.selected].label.setText(PATHSPLIT(self.Labels[self.selected].title)[1])
self.Labels[newi].label.setText(PATHSPLIT(self.Labels[newi].title)[1])
self.selected = -1
else:
self.selected = self.Labels.index(gp)
gp.select()
else:
self.selected = -1
gp.select()
def DelButtonClicked(self):
#Delete selected item
gp = self.sender().parent()
self.selected = -1
self.file_names.remove(gp.title)
self.Labels.remove(gp)
self.Vlayout_labels.removeWidget(gp)
gp.setParent(None)
class DragLabel(QLabel):
signal = pyqtSignal(str)
def __init__(self, title):
#Initialize father
super(DragLabel, self).__init__(title)
#Setupping
self.setFont(DRAGBUTTON_FONT)
self.setAlignment(Qt.AlignCenter)
self.setAcceptDrops(True)
self.show()
def dragEnterEvent(self, e):
#Creating Drag Event
if e.mimeData().hasFormat('text/uri-list'):
e.accept()
else:
e.ignore()
def dropEvent(self, e):
#Creating Drop Event
s = "\n".join([i[8:] for i in (e.mimeData().text()).split("\n")])
self.signal.emit(s)
class CustomLabel(QGroupBox):
def __init__(self, title):
#Initialize father
super(CustomLabel, self).__init__()
self.setObjectName("customlabel")
self.title, self.selected = title, False
self.setStyleSheet(CUSTOMLABEL_STYLESHEET_NORMAL)
self.setFixedHeight(CUSTOMLABELS_HEIGHT)
#Add layout
self.layout = QHBoxLayout()
self.setLayout(self.layout)
#Setupping layout
self.layout.setContentsMargins(CUSTOMLABEL_MARGIN, CUSTOMLABEL_MARGIN, CUSTOMLABEL_MARGIN, CUSTOMLABEL_MARGIN)
self.layout.setSpacing(CUSTOMLABEL_SPACING)
#Creating Widgets
self.label = QLabel(PATHSPLIT(title)[1])
self.delButton = QPushButton()
self.swButton = QPushButton()
#Setupping Widgets
#Label
self.label.setAlignment(Qt.AlignCenter)
self.label.setFont(CUSTOMLABEL_FONT)
#DelButton
self.delButton.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.delButton.setIcon(QIcon(DELBUTTON_ICON))
self.delButton.setIconSize(QSize(CUSTOMLABEL_ICON_SIZE, CUSTOMLABEL_ICON_SIZE))
#SwButton
self.swButton.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.swButton.setIcon(QIcon(SWBUTTON_ICON))
self.swButton.setIconSize(QSize(CUSTOMLABEL_ICON_SIZE, CUSTOMLABEL_ICON_SIZE))
#Adding Widgets to layout
self.layout.addWidget(self.swButton, CUSTOMLABEL_STRETCH["SW"])
self.layout.addWidget(self.delButton, CUSTOMLABEL_STRETCH["DEL"])
self.layout.addWidget(self.label, CUSTOMLABEL_STRETCH["LABEL"])
#Setting names
self.delButton.setObjectName("customlabel_del_button")
self.swButton.setObjectName("customlabel_sw_button")
self.label.setObjectName("customlabel_textlabel")
def select(self):
#Selected
self.selected = not self.selected
if self.selected:
self.setStyleSheet(CUSTOMLABEL_STYLESHEET_SELECTED)
self.swButton.setIcon(QIcon(SWBUTTON_ICON_SELECTED))
else:
self.setStyleSheet(CUSTOMLABEL_STYLESHEET_NORMAL)
self.swButton.setIcon(QIcon(SWBUTTON_ICON)) | 6,973 | 131 | 370 |
0fc9e2a005f7bc91a275f0d524d82bc08f3c3abb | 14,849 | py | Python | congressclient/tests/v1/test_datasource.py | mail2nsrajesh/python-congressclient | ec89672c4a7af02af075f82285f1124ef44d942c | [
"Apache-2.0"
] | null | null | null | congressclient/tests/v1/test_datasource.py | mail2nsrajesh/python-congressclient | ec89672c4a7af02af075f82285f1124ef44d942c | [
"Apache-2.0"
] | null | null | null | congressclient/tests/v1/test_datasource.py | mail2nsrajesh/python-congressclient | ec89672c4a7af02af075f82285f1124ef44d942c | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from oslo_serialization import jsonutils
from congressclient.common import utils
from congressclient.osc.v1 import datasource
from congressclient.tests import common
| 38.270619 | 79 | 0.57647 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from oslo_serialization import jsonutils
from congressclient.common import utils
from congressclient.osc.v1 import datasource
from congressclient.tests import common
class TestListDatasources(common.TestCongressBase):
def test_list_datasource(self):
datasource_name = 'neutron'
arglist = [
]
verifylist = [
]
response = {
"results": [{"id": datasource_name,
"name": "my_name",
"enabled": "True",
"driver": "driver1",
"config": None}]
}
lister = mock.Mock(return_value=response)
self.app.client_manager.congressclient.list_datasources = lister
cmd = datasource.ListDatasources(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
lister.assert_called_with()
self.assertEqual(['id', 'name', 'enabled', 'driver', 'config'],
result[0])
for row in result[1]:
self.assertEqual(datasource_name, row[0])
self.assertEqual("my_name", row[1])
self.assertEqual("True", row[2])
self.assertEqual("driver1", row[3])
self.assertEqual("None", row[4])
def test_list_datasource_output_not_unicode(self):
# response json string is converted to dict by oslo jsonutils.loads(),
# so the key and value in the dict should be unicode type.
response = {
u"results": [{u"id": u"neutron",
u"name": u"my_name",
u"enabled": True,
u"driver": 'driver1',
u"config": {
u'username': u'admin',
u'tenant_name': u'admin',
u'poll_time': u'10',
u'password': u'<hidden>',
u'auth_url': u'http://127.0.0.1:5000/v2.0'
}}]
}
lister = mock.Mock(return_value=response)
self.app.client_manager.congressclient.list_datasources = lister
cmd = datasource.ListDatasources(self.app, self.namespace)
parsed_args = self.check_parser(cmd, [], [])
result = cmd.take_action(parsed_args)
lister.assert_called_with()
self.assertEqual(['id', 'name', 'enabled', 'driver', 'config'],
result[0])
# get 'config' column
config = list(result[1])[0][-1]
self.assertIn("'username': 'admin'", config)
self.assertNotIn("u'username': u'admin'", config)
class TestListDatasourceTables(common.TestCongressBase):
def test_list_datasource_tables(self):
datasource_name = 'neutron'
arglist = [
datasource_name
]
verifylist = [
('datasource_name', datasource_name)
]
response = {
"results": [{"id": "ports"},
{"id": "networks"}]
}
lister = mock.Mock(return_value=response)
self.app.client_manager.congressclient.list_datasource_tables = lister
cmd = datasource.ListDatasourceTables(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
lister.assert_called_with(datasource_name)
self.assertEqual(['id'], result[0])
class TestListDatasourceStatus(common.TestCongressBase):
def test_list_datasource_status(self):
datasource_name = 'neutron'
arglist = [
datasource_name
]
verifylist = [
('datasource_name', datasource_name)
]
response = {'last_updated': "now",
'last_error': "None"}
lister = mock.Mock(return_value=response)
self.app.client_manager.congressclient.list_datasource_status = lister
cmd = datasource.ShowDatasourceStatus(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = list(cmd.take_action(parsed_args))
lister.assert_called_with(datasource_name)
self.assertEqual([('last_error', 'last_updated'),
('None', 'now')],
result)
class TestShowDatasourceActions(common.TestCongressBase):
def test_show_datasource_actions(self):
datasource_name = 'fake'
arglist = [
datasource_name
]
verifylist = [
('datasource_name', datasource_name)
]
response = {
"results":
[{'name': 'execute',
'args': [{"name": "name", "description": "None"},
{"name": "status", "description": "None"},
{"name": "id", "description": "None"}],
'description': 'execute action'}]
}
lister = mock.Mock(return_value=response)
self.app.client_manager.congressclient.list_datasource_actions = lister
cmd = datasource.ShowDatasourceActions(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
lister.assert_called_once_with(datasource_name)
self.assertEqual(['action', 'args', 'description'], result[0])
class TestShowDatasourceSchema(common.TestCongressBase):
def test_show_datasource_schema(self):
datasource_name = 'neutron'
arglist = [
datasource_name
]
verifylist = [
('datasource_name', datasource_name)
]
response = {
"tables":
[{'table_id': 'ports',
'columns': [{"name": "name", "description": "None"},
{"name": "status", "description": "None"},
{"name": "id", "description": "None"}]},
{'table_id': 'routers',
'columns': [{"name": "name", "description": "None"},
{"name": "floating_ip", "description": "None"},
{"name": "id", "description": "None"}]}]
}
lister = mock.Mock(return_value=response)
self.app.client_manager.congressclient.show_datasource_schema = lister
cmd = datasource.ShowDatasourceSchema(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
lister.assert_called_with(datasource_name)
self.assertEqual(['table', 'columns'], result[0])
class TestShowDatasourceTableSchema(common.TestCongressBase):
def test_show_datasource_table_schema(self):
datasource_name = 'neutron'
table_name = 'ports'
arglist = [
datasource_name, table_name
]
verifylist = [
('datasource_name', datasource_name),
('table_name', table_name)
]
response = {
'table_id': 'ports',
'columns': [{"name": "name", "description": "None"},
{"name": "status", "description": "None"},
{"name": "id", "description": "None"}]
}
lister = mock.Mock(return_value=response)
client = self.app.client_manager.congressclient
client.show_datasource_table_schema = lister
cmd = datasource.ShowDatasourceTableSchema(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
lister.assert_called_with(datasource_name, table_name)
self.assertEqual(['name', 'description'], result[0])
class TestListDatasourceRows(common.TestCongressBase):
def test_list_datasource_row(self):
datasource_name = 'neutron'
table_name = 'ports'
arglist = [
datasource_name, table_name
]
verifylist = [
('datasource_name', datasource_name),
('table', table_name)
]
response = {
"results": [{"data": ["69abc88b-c950-4625-801b-542e84381509",
"default"]}]
}
schema_response = {
'table_id': 'ports',
'columns': [{"name": "ID", "description": "None"},
{"name": "name", "description": "None"}]
}
client = self.app.client_manager.congressclient
lister = mock.Mock(return_value=response)
client.list_datasource_rows = lister
schema_lister = mock.Mock(return_value=schema_response)
client.show_datasource_table_schema = schema_lister
cmd = datasource.ListDatasourceRows(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
lister.assert_called_with(datasource_name, table_name)
self.assertEqual(['ID', 'name'], result[0])
class TestShowDatasourceTable(common.TestCongressBase):
def test_show_datasource_table(self):
datasource_name = 'neutron'
table_id = 'ports'
arglist = [
datasource_name, table_id
]
verifylist = [
('datasource_name', datasource_name),
('table_id', table_id)
]
response = {
'id': 'ports',
}
lister = mock.Mock(return_value=response)
client = self.app.client_manager.congressclient
client.show_datasource_table = lister
cmd = datasource.ShowDatasourceTable(self.app, self.namespace)
expected_ret = [('id',), ('ports',)]
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = list(cmd.take_action(parsed_args))
self.assertEqual(expected_ret, result)
class TestCreateDatasource(common.TestCongressBase):
def test_create_datasource(self):
driver = 'neutronv2'
name = 'arosen-neutronv2'
response = {"description": '',
"config": {"username": "admin",
"tenant_name": "admin",
"password": "password",
"auth_url": "http://127.0.0.1:5000/v2.0"},
"enabled": True,
"owner": "user",
"driver": "neutronv2",
"type": None,
"id": "b72f81a0-32b5-4bf4-a1f6-d69c09c42cec",
"name": "arosen-neutronv2"}
arglist = [driver, name,
"--config", "username=admin",
"--config", "password=password",
"--config", "auth_url=http://1.1.1.1/foo",
"--config", "tenant_name=admin"]
verifylist = [
('driver', driver),
('name', name),
('config', {'username': 'admin', 'password': 'password',
'auth_url': 'http://1.1.1.1/foo',
'tenant_name': 'admin'}),
]
mocker = mock.Mock(return_value=response)
self.app.client_manager.congressclient.create_datasource = mocker
cmd = datasource.CreateDatasource(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = list(cmd.take_action(parsed_args))
filtered = [('config', 'description',
'driver', 'enabled', 'id', 'name',
'owner', 'type'),
(response['config'], response['description'],
response['driver'], response['enabled'],
response['id'], response['name'],
response['owner'], response['type'])]
self.assertEqual(filtered, result)
class TestDeleteDatasourceDriver(common.TestCongressBase):
def test_delete_datasource(self):
driver = 'neutronv2'
arglist = [driver]
verifylist = [('datasource', driver), ]
mocker = mock.Mock(return_value=None)
self.app.client_manager.congressclient.delete_datasource = mocker
cmd = datasource.DeleteDatasource(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
result = cmd.take_action(parsed_args)
mocker.assert_called_with(driver)
self.assertIsNone(result)
class TestUpdateDatasourceRow(common.TestCongressBase):
def test_update_datasource_row(self):
driver = 'push'
table_name = 'table'
rows = [["data1", "data2"],
["data3", "data4"]]
arglist = [driver, table_name, jsonutils.dumps(rows)]
verifylist = [('datasource', driver),
('table', table_name),
('rows', rows)]
mocker = mock.Mock(return_value=None)
self.app.client_manager.congressclient.update_datasource_rows = mocker
self.app.client_manager.congressclient.list_datasources = mock.Mock()
cmd = datasource.UpdateDatasourceRow(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
with mock.patch.object(utils, 'get_resource_id_from_name',
return_value="push"):
cmd.take_action(parsed_args)
mocker.assert_called_with(driver, table_name, rows)
class TestDatasourceRequestRefresh(common.TestCongressBase):
def test_datasource_request_refresh(self):
driver = 'neutronv2'
arglist = [driver]
verifylist = [('datasource', driver), ]
mocker = mock.Mock(return_value=None)
self.app.client_manager.congressclient.request_refresh = mocker
self.app.client_manager.congressclient.list_datasources = mock.Mock()
cmd = datasource.DatasourceRequestRefresh(self.app, self.namespace)
parsed_args = self.check_parser(cmd, arglist, verifylist)
with mock.patch.object(utils, "get_resource_id_from_name",
return_value="id"):
result = cmd.take_action(parsed_args)
mocker.assert_called_with("id", {})
self.assertIsNone(result)
| 13,053 | 419 | 620 |
ca4497e32d8554e6566a2fee5b27ac735b5f8a0c | 519 | py | Python | 2.py | gor-dimm/lr3 | dcc1ad607a3144381c8f53759ef8546651c82555 | [
"MIT"
] | null | null | null | 2.py | gor-dimm/lr3 | dcc1ad607a3144381c8f53759ef8546651c82555 | [
"MIT"
] | null | null | null | 2.py | gor-dimm/lr3 | dcc1ad607a3144381c8f53759ef8546651c82555 | [
"MIT"
] | null | null | null | # Напечатать три данных действительных числа , и сначала в порядке их возрастания, затем - в порядке убывания
a = input("Введите первое число: ")
b = input("Введите второе число: ")
c = input("Введите третье число: ")
print(min(a, b, c), (max(min(a, b), min(b, c))
if not
min(a, b) == min(b, c)
else min(a, c)), max(a, b, c))
print(max(a, b, c), (min(max(a, b), max(b, c))
if not
max(a, b) == max(b, c)
else max(a, c)), min(a, b, c))
| 37.071429 | 109 | 0.516378 | # Напечатать три данных действительных числа , и сначала в порядке их возрастания, затем - в порядке убывания
a = input("Введите первое число: ")
b = input("Введите второе число: ")
c = input("Введите третье число: ")
print(min(a, b, c), (max(min(a, b), min(b, c))
if not
min(a, b) == min(b, c)
else min(a, c)), max(a, b, c))
print(max(a, b, c), (min(max(a, b), max(b, c))
if not
max(a, b) == max(b, c)
else max(a, c)), min(a, b, c))
| 0 | 0 | 0 |
2fcb27cc7a4f4d1bdf595bd91bd8f9abe77e380f | 784 | py | Python | users/migrations/0003_user_card_number.py | MAKENTNU/web | 7a5b512bf4c087d1561cdb623d7df4b3d04811a2 | [
"MIT"
] | 10 | 2017-11-25T01:47:20.000Z | 2020-03-24T18:28:24.000Z | users/migrations/0003_user_card_number.py | MAKENTNU/web | 7a5b512bf4c087d1561cdb623d7df4b3d04811a2 | [
"MIT"
] | 319 | 2017-11-16T09:56:03.000Z | 2022-03-28T00:24:37.000Z | users/migrations/0003_user_card_number.py | MAKENTNU/web | 7a5b512bf4c087d1561cdb623d7df4b3d04811a2 | [
"MIT"
] | 6 | 2017-11-12T14:04:08.000Z | 2021-03-10T09:41:18.000Z | # Generated by Django 2.2.5 on 2019-10-18 17:41
import card.modelfields
import django.core.validators
from django.db import migrations
| 34.086957 | 134 | 0.576531 | # Generated by Django 2.2.5 on 2019-10-18 17:41
import card.modelfields
import django.core.validators
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0002_rename_user_table'),
]
operations = [
migrations.AddField(
model_name='user',
name='card_number',
field=card.modelfields.CardNumberField(blank=True, error_messages={'unique': 'Card number already in use'}, max_length=10,
null=True, unique=True, validators=[
django.core.validators.RegexValidator('^\\d{10}$', 'Card number must be ten digits long.')],
verbose_name='Card number'),
),
]
| 0 | 624 | 23 |
bb8089210e933344c7763a5a9f8498d7c72e6f93 | 2,010 | py | Python | controller/packages.py | fossabot/do | 18a76fdb611b4d4aca97b71be87d3ab4df470d81 | [
"MIT"
] | null | null | null | controller/packages.py | fossabot/do | 18a76fdb611b4d4aca97b71be87d3ab4df470d81 | [
"MIT"
] | null | null | null | controller/packages.py | fossabot/do | 18a76fdb611b4d4aca97b71be87d3ab4df470d81 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# BEWARE: to not import this package at startup,
# but only into functions otherwise pip will go crazy
# (we cannot understand why, but it does!)
# which version of python is this?
# Retrocompatibility for Python < 3.6
from sultan.api import Sultan
try:
import_exceptions = (ModuleNotFoundError, ImportError)
except NameError:
import_exceptions = ImportError
| 26.8 | 77 | 0.651741 | # -*- coding: utf-8 -*-
# BEWARE: to not import this package at startup,
# but only into functions otherwise pip will go crazy
# (we cannot understand why, but it does!)
# which version of python is this?
# Retrocompatibility for Python < 3.6
from sultan.api import Sultan
try:
import_exceptions = (ModuleNotFoundError, ImportError)
except NameError:
import_exceptions = ImportError
def install(package, editable=False, user=False, use_pip3=True):
with Sultan.load(sudo=not user) as sultan:
command = 'install --upgrade'
if editable:
command += " --editable"
if user:
command += " --user"
command += ' {}'.format(package)
if use_pip3:
result = sultan.pip3(command).run()
else:
result = sultan.pip(command).run()
for r in result.stdout:
print(r)
for r in result.stderr:
print(r)
return result.rc == 0
def check_version(package_name):
# Don't import before or pip will mess up everything! Really crazy
from pip._internal.utils.misc import get_installed_distributions
for pkg in get_installed_distributions(local_only=True, user_only=False):
if pkg._key == package_name: # pylint:disable=protected-access
return pkg._version # pylint:disable=protected-access
return None
def import_package(package_name):
from importlib import import_module
try:
package = import_module(package_name)
except import_exceptions: # pylint:disable=catching-non-exception
return None
else:
return package
def package_version(package_name):
package = import_package(package_name)
if package is None:
return None
try:
version = package.__version__
if isinstance(version, str):
return version
# Fix required for requests
return version.__version__
except BaseException as e:
print(str(e))
return None
| 1,521 | 0 | 92 |
11f97299c93769f17d9b167d578d54fe3e28a177 | 6,341 | py | Python | Vertretungsplan - BBS II Emden.py | zlyfer/Vertretungsplan-Client-BBS2-Emden | eb9e1e28f0e76465003ae7e54e3ce37d28feb9a6 | [
"MIT"
] | null | null | null | Vertretungsplan - BBS II Emden.py | zlyfer/Vertretungsplan-Client-BBS2-Emden | eb9e1e28f0e76465003ae7e54e3ce37d28feb9a6 | [
"MIT"
] | null | null | null | Vertretungsplan - BBS II Emden.py | zlyfer/Vertretungsplan-Client-BBS2-Emden | eb9e1e28f0e76465003ae7e54e3ce37d28feb9a6 | [
"MIT"
] | null | null | null | from tkinter import ttk
from tkinter import *
import requests
import json
import os
# variables
page = 0
vplan = None
tableContent = []
navigationLabel = None
leftArrow = None
rightArrow = None
tableHeaders = ["Datum", "Kurs", "Stunde", "Fach", "Raum", "Lehrer", "Info"]
#tableHeaders = ["ID", "Datum", "Kurs", "Stunde", "Fach", "Raum", "Lehrer", "Info"] # test
# config setup
config = {}
configNames = ["Kurs", "History", "Isolate", "width", "height"]
for i in configNames:
config[i] = False
if os.path.exists('config.json'):
config = json.loads(open('config.json', 'r').read())
width = config['width']
height = config['height']
if width < 600:
width = 600
if height < 150:
height = 150
if (width / 50) != (width // 50):
width = (width // 50) * 50
if (height / 50) != (height // 50):
height = (height // 50) * 50
amountRows = (height - 100) // 50
# Tkinter GUI
root = Tk()
root.iconbitmap("icon.ico")
root.title("Vertretungsplan - BBS II Emden //./ by zlyfa! 2018")
root.geometry('%sx%s+0+0' % (width, height))
root.configure(background='#257BF4')
loadingLabel = placeLoadingLabel()
tableHeaderGen()
tableContentGen()
navigationArrowsGen()
navigationLabelGen()
removeLoadingLabel(loadingLabel)
leftArrow.bind('<Enter>', hoverLeftArrowEnter)
leftArrow.bind('<Leave>', hoverLeftArrowLeave)
rightArrow.bind('<Enter>', hoverRightArrowEnter)
rightArrow.bind('<Leave>', hoverRightArrowLeave)
root.mainloop()
| 23.928302 | 121 | 0.657783 | from tkinter import ttk
from tkinter import *
import requests
import json
import os
# variables
page = 0
vplan = None
tableContent = []
navigationLabel = None
leftArrow = None
rightArrow = None
tableHeaders = ["Datum", "Kurs", "Stunde", "Fach", "Raum", "Lehrer", "Info"]
#tableHeaders = ["ID", "Datum", "Kurs", "Stunde", "Fach", "Raum", "Lehrer", "Info"] # test
# config setup
config = {}
configNames = ["Kurs", "History", "Isolate", "width", "height"]
for i in configNames:
config[i] = False
if os.path.exists('config.json'):
config = json.loads(open('config.json', 'r').read())
width = config['width']
height = config['height']
if width < 600:
width = 600
if height < 150:
height = 150
if (width / 50) != (width // 50):
width = (width // 50) * 50
if (height / 50) != (height // 50):
height = (height // 50) * 50
amountRows = (height - 100) // 50
# Tkinter GUI
root = Tk()
root.iconbitmap("icon.ico")
root.title("Vertretungsplan - BBS II Emden //./ by zlyfa! 2018")
root.geometry('%sx%s+0+0' % (width, height))
root.configure(background='#257BF4')
def getVplan():
url = "https://zlyfer.net/vertretungsplan/api/api.php?interface=false&vshistory=false"
if config['History']:
url = url.replace('vshistory', 'vsnormal')
if config['Isolate']:
if config['Kurs']:
url += "&Kurs=%s" % (config['Kurs'])
vplan = requests.get(url).text
vplan = json.loads(vplan)
return (vplan)
def tableHeaderGen():
for header_text in tableHeaders:
header = Label(
root,
text=header_text,
fg="#fff",
bg="#257BF4",
font="Helvetica 16 bold"
)
header.place(
x=tableHeaders.index(header_text)*width/len(tableHeaders),
y=0,
width=width/len(tableHeaders),
height=50
)
root.update()
return
def tableContentGen():
global vplan
global tableContent
if vplan == None:
vplan = getVplan()
for header_text in tableHeaders:
for counter in range((page * amountRows), ((page * amountRows) + amountRows)):
if counter < len(vplan['vertretungen']):
entry = vplan['vertretungen'][counter]
if counter <= (page * amountRows) + amountRows:
if header_text == "Datum":
text = entry[header_text.lower()].split(" ")[0].replace(',', ' - ') + entry[header_text.lower()].split(" ")[1][:-5]
elif header_text == "ID": # test
text = str(counter+1) # test
else:
text = entry[header_text.lower()]
bgcolor = "#3D8AF5"
fgcolor = "#fff"
if not config['Isolate']:
if entry['kurs'] == config['Kurs']:
bgcolor = "#fff"
fgcolor = "#3D8AF5"
block = Label(
root,
text=text,
fg=fgcolor,
bg=bgcolor,
font="Helvetica 12 bold"
)
tableContent.append(block)
block.place(
x=tableHeaders.index(header_text)*width/len(tableHeaders),
y=((counter+1) - (page * amountRows)) * 50,
width=width/len(tableHeaders),
height=50
)
root.update()
return
def placeLoadingLabel():
label = Label(
root,
text="Laden..",
fg="#fff",
bg="#3D8AF5",
font="Helvetica 20 bold"
)
label.place(
x=0,
y=50,
width=width,
height=height-50
)
return (label)
def removeLoadingLabel(label):
label.destroy()
return
def clearTableContent():
for i in tableContent:
i.destroy()
return
def navigationLabelGen():
global navigationLabel
if navigationLabel != None:
navigationLabel.destroy()
labelPage = 0
if (len(vplan['vertretungen']) // amountRows) == (len(vplan['vertretungen']) / amountRows):
labelPage = len(vplan['vertretungen']) // amountRows
elif (len(vplan['vertretungen']) // amountRows) != (len(vplan['vertretungen']) / amountRows):
labelPage = len(vplan['vertretungen']) // amountRows + 1
navigationLabel = Label(
root,
text="%s / %s" % (page + 1, labelPage),
fg="#fff",
bg="#5598F7",
font="Helvetica 20 bold"
)
navigationLabel.place(
x=(width/5)*2,
y=height-50,
width=width/5,
height=50
)
return
def navigationArrows(arrow):
global page
newpage = page
if arrow == "next":
if (len(vplan['vertretungen']) // amountRows) == (len(vplan['vertretungen']) / amountRows):
if page + 1== (len(vplan['vertretungen']) // amountRows):
page = 0
elif (page * amountRows + amountRows) < len(vplan['vertretungen']):
page += 1
elif (len(vplan['vertretungen']) // amountRows) != (len(vplan['vertretungen']) / amountRows):
if page == (len(vplan['vertretungen']) // amountRows):
page = 0
elif ((page * amountRows) + amountRows) < len(vplan['vertretungen']):
page += 1
elif arrow == "prev":
if page > 0:
page -= 1
elif page == 0:
if (len(vplan['vertretungen']) // amountRows) == (len(vplan['vertretungen']) / amountRows):
page = (len(vplan['vertretungen']) // amountRows) - 1
else:
page = (len(vplan['vertretungen']) // amountRows)
if newpage != page:
navigationLabelGen()
clearTableContent()
tableContentGen()
return
def navigationArrowsNext():
navigationArrows("next")
return
def navigationArrowsPrev():
navigationArrows("prev")
return
def hoverLeftArrowEnter(event):
global leftArrow
leftArrow['bg'] = "#3D8AF5"
return
def hoverLeftArrowLeave(event):
global leftArrow
leftArrow['bg'] = "#257BF4"
return
def hoverRightArrowEnter(event):
global leftArrow
rightArrow['bg'] = "#3D8AF5"
return
def hoverRightArrowLeave(event):
global leftArrow
rightArrow['bg'] = "#257BF4"
return
def navigationArrowsGen():
global leftArrow
global rightArrow
leftArrow = Button(
root,
text="<",
fg="#fff",
bg="#257BF4",
activeforeground="#fff",
activebackground="#5598F7",
font="Helvetica 20 bold",
borderwidth=0,
command=navigationArrowsPrev
)
leftArrow.place(
x=0,
y=height-50,
width=(width/5)*2,
height=50
)
rightArrow = Button(
root,
text=">",
fg="#fff",
bg="#257BF4",
activeforeground="#fff",
activebackground="#5598F7",
font="Helvetica 20 bold",
borderwidth=0,
command=navigationArrowsNext
)
rightArrow.place(
x=(width/5)*3,
y=height-50,
width=(width/5)*2,
height=50
)
return
loadingLabel = placeLoadingLabel()
tableHeaderGen()
tableContentGen()
navigationArrowsGen()
navigationLabelGen()
removeLoadingLabel(loadingLabel)
leftArrow.bind('<Enter>', hoverLeftArrowEnter)
leftArrow.bind('<Leave>', hoverLeftArrowLeave)
rightArrow.bind('<Enter>', hoverRightArrowEnter)
rightArrow.bind('<Leave>', hoverRightArrowLeave)
root.mainloop()
| 4,584 | 0 | 341 |
63a798a89b68173775b421c9846d23031e773480 | 885 | py | Python | karabo/simulation/station.py | i4Ds/Karabo-Pipeline | c9a7f120b08b56af93bb953e284b33c107fd865c | [
"MIT"
] | null | null | null | karabo/simulation/station.py | i4Ds/Karabo-Pipeline | c9a7f120b08b56af93bb953e284b33c107fd865c | [
"MIT"
] | 42 | 2022-01-27T14:12:46.000Z | 2022-03-31T14:21:26.000Z | karabo/simulation/station.py | i4Ds/Karabo-Pipeline | c9a7f120b08b56af93bb953e284b33c107fd865c | [
"MIT"
] | null | null | null | from karabo.simulation.coordinate_helper import east_north_to_long_lat
from karabo.simulation.east_north_coordinate import EastNorthCoordinate
| 38.478261 | 101 | 0.683616 | from karabo.simulation.coordinate_helper import east_north_to_long_lat
from karabo.simulation.east_north_coordinate import EastNorthCoordinate
class Station:
def __init__(self, position: EastNorthCoordinate,
parent_longitude: float = 0,
parent_latitude: float = 0,
parent_altitude: float = 0):
"""
:param position: Position of station in relation to the telescope.png centre
"""
self.position: EastNorthCoordinate = position
self.antennas: [EastNorthCoordinate] = []
long, lat = east_north_to_long_lat(position.x, position.y, parent_longitude, parent_latitude)
self.longitude: float = long
self.latitude: float = lat
self.altitude: float = position.z
def add_station_antenna(self, antenna: EastNorthCoordinate):
self.antennas.append(antenna)
| 77 | 642 | 23 |
9c665169a686b2b7b6718c895e3cab0b8a5b858a | 3,386 | py | Python | spec2model/file_manager.py | vsoch/map2model | 4d5c187ab660da5d3fe088b49f92c298b9fd9a7b | [
"MIT"
] | null | null | null | spec2model/file_manager.py | vsoch/map2model | 4d5c187ab660da5d3fe088b49f92c298b9fd9a7b | [
"MIT"
] | null | null | null | spec2model/file_manager.py | vsoch/map2model | 4d5c187ab660da5d3fe088b49f92c298b9fd9a7b | [
"MIT"
] | null | null | null | from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
import spec2model.config_manager as yml_manager
config_file_path = 'spec2model/configuration.yml'
| 40.795181 | 108 | 0.682812 | from pydrive.auth import GoogleAuth
from pydrive.drive import GoogleDrive
import spec2model.config_manager as yml_manager
config_file_path = 'spec2model/configuration.yml'
class FolderDigger:
gauth = "This variable will have the Google Authorization file"
specs_id = '0Bw_p-HKWUjHoNThZOWNKbGhOODg'
drive = "This variable will be the Google drive's object"
yml_config = ''
def __init__(self):
creds_path="spec2model/mycreds.txt"
self.gauth = GoogleAuth()
# Try to load saved client credentials
self.gauth.LoadCredentialsFile(creds_path)
if self.gauth.credentials is None:
# Authenticate if they're not there
self.gauth.LocalWebserverAuth()
elif self.gauth.access_token_expired:
# Refresh them if expired
self.gauth.Refresh()
else:
# Initialize the saved creds
self.gauth.Authorize()
# Save the current credentials to a file
self.gauth.SaveCredentialsFile(creds_path)
# Save the current credentials to a file
self.gauth.SaveCredentialsFile(creds_path)
self.drive = GoogleDrive(self.gauth)
#This is the id of the folder Specification
self.specs_id = '0Bw_p-HKWUjHoNThZOWNKbGhOODg'
self.specs_list = {}
self.yml_config = yml_manager.YamlIO()
def set_spec_file_id(self, file_id):
self.specs_id=file_id
def __get_spec_folder_files(self):
file_list = self.drive.ListFile({'q':"'"+self.specs_id+"' in parents and trashed =false"}).GetList()
return file_list
def __get_spec_folder_files_by_id(self, folder_id):
file_list = self.drive.ListFile({'q':"'"+folder_id+"' in parents and trashed =false"}).GetList()
return file_list
def __get_gfolder_id(self, current_cfg_yml, spec_folder_files):
current_spec_g_folder = current_cfg_yml['g_folder']
for folder_file in spec_folder_files:
if folder_file['title']==current_spec_g_folder:
return folder_file['id']
return ''
def __get_gfile_dic(self, current_cfg_yml, folder_id):
folder_files=self.__get_spec_folder_files_by_id(folder_id)
current_spec_g_file = current_cfg_yml['g_mapping_file']
for folder_file in folder_files:
if folder_file['title']==current_spec_g_file:
return folder_file
return {}
def __get_bsc_specs(self, spec_config, spec_folder_files):
specs_list = {}
for current_config in spec_config:
print("Searching %s mapping file." % current_config['name'])
spec_folder_id = self.__get_gfolder_id(current_config, spec_folder_files)
spec_file_dic = self.__get_gfile_dic(current_config, spec_folder_id)
current_config['spec_mapping_url'] = spec_file_dic['alternateLink']
specs_list[spec_file_dic['id']] = current_config
return specs_list
def get_specification_list(self):
print("Reading Configuration file.")
self.yml_config.set_yml_path(config_file_path)
spec_config = self.yml_config.get_spec_yml_config()
spec_folder_files = self.__get_spec_folder_files()
all_bsc_specs=self.__get_bsc_specs(spec_config, spec_folder_files)
print("All mapping files obtained.")
return all_bsc_specs
| 2,779 | 411 | 23 |
6f303a9a3b31a25b5dc723634c5d2449fbf6c36e | 2,581 | py | Python | postprocessing/check_budgets_balance/futurizedReadBinaryData.py | larson-group/clubb_release | b4d671e3e238dbe00752c0dead6a0d4f9897350a | [
"Intel",
"Unlicense",
"NetCDF"
] | null | null | null | postprocessing/check_budgets_balance/futurizedReadBinaryData.py | larson-group/clubb_release | b4d671e3e238dbe00752c0dead6a0d4f9897350a | [
"Intel",
"Unlicense",
"NetCDF"
] | null | null | null | postprocessing/check_budgets_balance/futurizedReadBinaryData.py | larson-group/clubb_release | b4d671e3e238dbe00752c0dead6a0d4f9897350a | [
"Intel",
"Unlicense",
"NetCDF"
] | 1 | 2022-01-28T22:22:04.000Z | 2022-01-28T22:22:04.000Z | #! /usr/bin/python3
# Author: Cavyn VonDeylen
# Date: August 2010
# Larson-Group UWM
# Updated to python 3 by Tyler Cernik
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import struct # Handles binary data
#--------------------------------------------------------------------------------------------------
def readGradsData(fileName, numLevels, begTime, endTime, varNum, numVars):
"""
Reads a GrADS *.dat file and obtains a single-time or time-averaged profile.
Input: filename: A GrADS *.dat file
numLevels: Number of z levels in profile
begTime: Iteration to start averaging at
endTime: Iteration to end averaging at
varNum: Which variable to read (see .ctl file)
numVars: Total number of variables in grads file (see .ctl file)
"""
timeInterval = (endTime-begTime) + 1
# Open in read-binary mode
dataFile = open(fileName, "rb")
# Declare array with one slot per z level
avgField = [0] * numLevels
# Add data from each time iteration to avgField
time = begTime
while True: # Strange loop construct because python doesn't have do-while loops
byte_position = 4*( (varNum-1)*numLevels+numVars*numLevels*(time-1) )
dataFile.seek(byte_position)
# Read data in for each z level
zLevel = 0
while zLevel < numLevels:
# Read 4 bytes
binaryData = dataFile.read(4)
# Translate binary data to a float.
avgField[zLevel] = avgField[zLevel] + list(struct.unpack("f", binaryData))[0]
zLevel += 1
time += 1
if time >= endTime:
break
# Divide by total number of iterations to come up
# with average value across all iterations for each z level
zLevel = 0
while zLevel < numLevels:
avgField[zLevel] = avgField[zLevel]//timeInterval
zLevel += 1
dataFile.close()
return avgField
#--------------------------------------------------------------------------------------------------
# Allows this module to be run as a script
if __name__ == "__main__":
import sys
# If wrong arguments were given, print a helpful message
if len(sys.argv) != 7:
print('Arguments must be: filename z_levels beg_time end_time var_number number_vars')
sys.exit(0)
print(readNetcdfData( sys.argv[1], int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4]), \
int(sys.argv[5]), int(sys.argv[6]) ))
| 34.878378 | 99 | 0.588532 | #! /usr/bin/python3
# Author: Cavyn VonDeylen
# Date: August 2010
# Larson-Group UWM
# Updated to python 3 by Tyler Cernik
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import struct # Handles binary data
#--------------------------------------------------------------------------------------------------
def readGradsData(fileName, numLevels, begTime, endTime, varNum, numVars):
"""
Reads a GrADS *.dat file and obtains a single-time or time-averaged profile.
Input: filename: A GrADS *.dat file
numLevels: Number of z levels in profile
begTime: Iteration to start averaging at
endTime: Iteration to end averaging at
varNum: Which variable to read (see .ctl file)
numVars: Total number of variables in grads file (see .ctl file)
"""
timeInterval = (endTime-begTime) + 1
# Open in read-binary mode
dataFile = open(fileName, "rb")
# Declare array with one slot per z level
avgField = [0] * numLevels
# Add data from each time iteration to avgField
time = begTime
while True: # Strange loop construct because python doesn't have do-while loops
byte_position = 4*( (varNum-1)*numLevels+numVars*numLevels*(time-1) )
dataFile.seek(byte_position)
# Read data in for each z level
zLevel = 0
while zLevel < numLevels:
# Read 4 bytes
binaryData = dataFile.read(4)
# Translate binary data to a float.
avgField[zLevel] = avgField[zLevel] + list(struct.unpack("f", binaryData))[0]
zLevel += 1
time += 1
if time >= endTime:
break
# Divide by total number of iterations to come up
# with average value across all iterations for each z level
zLevel = 0
while zLevel < numLevels:
avgField[zLevel] = avgField[zLevel]//timeInterval
zLevel += 1
dataFile.close()
return avgField
#--------------------------------------------------------------------------------------------------
# Allows this module to be run as a script
if __name__ == "__main__":
import sys
# If wrong arguments were given, print a helpful message
if len(sys.argv) != 7:
print('Arguments must be: filename z_levels beg_time end_time var_number number_vars')
sys.exit(0)
print(readNetcdfData( sys.argv[1], int(sys.argv[2]), int(sys.argv[3]), int(sys.argv[4]), \
int(sys.argv[5]), int(sys.argv[6]) ))
| 0 | 0 | 0 |