hexsha stringlengths 40 40 | size int64 4 1.02M | ext stringclasses 8 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 4 209 | max_stars_repo_name stringlengths 5 121 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 209 | max_issues_repo_name stringlengths 5 121 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 209 | max_forks_repo_name stringlengths 5 121 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 1.02M | avg_line_length float64 1.07 66.1k | max_line_length int64 4 266k | alphanum_fraction float64 0.01 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
959646eef9433e3e87420a73eb27d8ede2fa91e0 | 1,460 | py | Python | python/dataingest/grammar/dmo/python_directory_loader.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
] | null | null | null | python/dataingest/grammar/dmo/python_directory_loader.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
] | null | null | null | python/dataingest/grammar/dmo/python_directory_loader.py | jiportilla/ontology | 8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
from typing import Optional
from base import BaseObject
class PythonDirectoryLoader(BaseObject):
""" Extract Python File from a Directory Path """
def __init__(self,
directory_path: str,
is_debug: bool = False):
"""
Created:
24-Dec-2019
craig.trim@ibm.com
* refactored from existing code
https://github.ibm.com/GTS-CDO/unstructured-analytics/issues/1637#issuecomment-16802191
:param directory_path:
a directory path to python code
:param is_debug:
"""
BaseObject.__init__(self, __name__)
self._is_debug = is_debug
self._directory_path = directory_path
def _load_files(self) -> list:
results = []
for dirpath, dirnames, files in os.walk(self._directory_path):
for name in files:
if name.lower().endswith(".py"):
results.append(os.path.join(dirpath, name))
return results
def process(self) -> Optional[list]:
files = self._load_files()
files = [file for file in files if "__init__.py" not in file]
if self._is_debug and not len(files):
self.logger.warning('\n'.join([
"No Files Found",
f"\tDirectory: {self._directory_path}"]))
return None
return files
| 27.54717 | 103 | 0.574658 |
84d174cd455ea84f97632c778603bbe141b404b9 | 1,580 | py | Python | test/resource_test.py | IlluminatiFish/DotHub-DiscordBot | e34ac76e14fa7eaa09c43346b5505fd2371ec751 | [
"MIT"
] | null | null | null | test/resource_test.py | IlluminatiFish/DotHub-DiscordBot | e34ac76e14fa7eaa09c43346b5505fd2371ec751 | [
"MIT"
] | null | null | null | test/resource_test.py | IlluminatiFish/DotHub-DiscordBot | e34ac76e14fa7eaa09c43346b5505fd2371ec751 | [
"MIT"
] | null | null | null | from models.resource_item import ResourceItem
from models.resource import Resource
import unittest
class ResourceTest(unittest.TestCase):
resource_item_1 = ResourceItem("title1", "https://google.com.ar")
resource_item_2 = ResourceItem("title2", "https://google.com.cl", mini_description="google from chile")
resource = Resource("title", "description", [resource_item_1, resource_item_2])
def test_title(self):
self.assertEqual("**title**", self.resource.title)
def test_description(self):
self.assertEqual(self.resource.description, "description\n\n📚 » [title1](https://google.com.ar)\n\n📚 » ["
"title2](https://google.com.cl) (google from chile)\n\n")
def test_thumbnail_url(self):
res_with_thumbnail = Resource("title", "description", [self.resource_item_1, self.resource_item_2], thumbnail_url= "https://google.com/image.jpg")
self.assertEqual("https://google.com/image.jpg", res_with_thumbnail.thumbnail.url)
def test_annotation(self):
res_with_annotation = Resource("title", "description", [self.resource_item_1, self.resource_item_2], annotation= "an annotation")
self.assertEqual(res_with_annotation.description, "description\n\n📚 » [title1](https://google.com.ar)\n\n📚 » ["
"title2](https://google.com.cl) (google from chile)\n\nan "
"annotation")
def test_color(self):
self.assertEqual(self.resource.color, '46079')
if __name__ == '__main__':
unittest.main() | 47.878788 | 150 | 0.66519 |
cd307db579ec2a192a2ffdfc16114caa0097c4d7 | 522 | py | Python | h2o/logo.py | H2O-YT/manim | ae65662823a95b1110536fae34e06d5c540cc424 | [
"MIT"
] | null | null | null | h2o/logo.py | H2O-YT/manim | ae65662823a95b1110536fae34e06d5c540cc424 | [
"MIT"
] | null | null | null | h2o/logo.py | H2O-YT/manim | ae65662823a95b1110536fae34e06d5c540cc424 | [
"MIT"
] | null | null | null | from manimlib.constants import WHITE
from manimlib.mobject.geometry import Circle
from manimlib.mobject.svg.tex_mobject import Tex
from manimlib.mobject.types.vectorized_mobject import VMobject
MY_GREEN = "#419425"
class H2OLogo(VMobject):
def __init__(self, surround_class=Circle, stroke_opacity=1.0):
super().__init__()
name = Tex("\\mathrm{H_2O}")
surround = surround_class().set_stroke(WHITE, opacity=stroke_opacity).set_fill(MY_GREEN, opacity=1.0)
self.add(surround, name) | 32.625 | 109 | 0.739464 |
8cb34be927f581fd23a76e6ef201f5c7e899157d | 6,933 | py | Python | homeassistant/components/dlna_dms/config_flow.py | mtarjoianu/core | 44e9146463ac505eb3d1c0651ad126cb25c28a54 | [
"Apache-2.0"
] | 30,023 | 2016-04-13T10:17:53.000Z | 2020-03-02T12:56:31.000Z | homeassistant/components/dlna_dms/config_flow.py | mtarjoianu/core | 44e9146463ac505eb3d1c0651ad126cb25c28a54 | [
"Apache-2.0"
] | 24,710 | 2016-04-13T08:27:26.000Z | 2020-03-02T12:59:13.000Z | homeassistant/components/dlna_dms/config_flow.py | mtarjoianu/core | 44e9146463ac505eb3d1c0651ad126cb25c28a54 | [
"Apache-2.0"
] | 11,956 | 2016-04-13T18:42:31.000Z | 2020-03-02T09:32:12.000Z | """Config flow for DLNA DMS."""
from __future__ import annotations
import logging
from pprint import pformat
from typing import Any, cast
from urllib.parse import urlparse
from async_upnp_client.profiles.dlna import DmsDevice
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.components import ssdp
from homeassistant.const import CONF_DEVICE_ID, CONF_HOST, CONF_URL
from homeassistant.data_entry_flow import AbortFlow, FlowResult
from .const import CONF_SOURCE_ID, CONFIG_VERSION, DEFAULT_NAME, DOMAIN
from .util import generate_source_id
LOGGER = logging.getLogger(__name__)
class DlnaDmsFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a DLNA DMS config flow.
The Unique Service Name (USN) of the DMS device is used as the unique_id for
config entries and for entities. This USN may differ from the root USN if
the DMS is an embedded device.
"""
VERSION = CONFIG_VERSION
def __init__(self) -> None:
"""Initialize flow."""
self._discoveries: dict[str, ssdp.SsdpServiceInfo] = {}
self._location: str | None = None
self._usn: str | None = None
self._name: str | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Handle a flow initialized by the user by listing unconfigured devices."""
LOGGER.debug("async_step_user: user_input: %s", user_input)
if user_input is not None and (host := user_input.get(CONF_HOST)):
# User has chosen a device
discovery = self._discoveries[host]
await self._async_parse_discovery(discovery, raise_on_progress=False)
return self._create_entry()
if not (discoveries := await self._async_get_discoveries()):
# Nothing found, abort configuration
return self.async_abort(reason="no_devices_found")
self._discoveries = {
cast(str, urlparse(discovery.ssdp_location).hostname): discovery
for discovery in discoveries
}
discovery_choices = {
host: f"{discovery.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)} ({host})"
for host, discovery in self._discoveries.items()
}
data_schema = vol.Schema({vol.Optional(CONF_HOST): vol.In(discovery_choices)})
return self.async_show_form(step_id="user", data_schema=data_schema)
async def async_step_ssdp(self, discovery_info: ssdp.SsdpServiceInfo) -> FlowResult:
"""Handle a flow initialized by SSDP discovery."""
LOGGER.debug("async_step_ssdp: discovery_info %s", pformat(discovery_info))
await self._async_parse_discovery(discovery_info)
# Abort if the device doesn't support all services required for a DmsDevice.
# Use the discovery_info instead of DmsDevice.is_profile_device to avoid
# contacting the device again.
discovery_service_list = discovery_info.upnp.get(ssdp.ATTR_UPNP_SERVICE_LIST)
if not discovery_service_list:
return self.async_abort(reason="not_dms")
services = discovery_service_list.get("service")
if not services:
discovery_service_ids: set[str] = set()
elif isinstance(services, list):
discovery_service_ids = {service.get("serviceId") for service in services}
else:
# Only one service defined (etree_to_dict failed to make a list)
discovery_service_ids = {services.get("serviceId")}
if not DmsDevice.SERVICE_IDS.issubset(discovery_service_ids):
return self.async_abort(reason="not_dms")
# Abort if another config entry has the same location, in case the
# device doesn't have a static and unique UDN (breaking the UPnP spec).
self._async_abort_entries_match({CONF_URL: self._location})
self.context["title_placeholders"] = {"name": self._name}
return await self.async_step_confirm()
async def async_step_confirm(
self, user_input: dict[str, Any] | None = None
) -> FlowResult:
"""Allow the user to confirm adding the device."""
if user_input is not None:
return self._create_entry()
self._set_confirm_only()
return self.async_show_form(step_id="confirm")
def _create_entry(self) -> FlowResult:
"""Create a config entry, assuming all required information is now known."""
LOGGER.debug(
"_create_entry: name: %s, location: %s, USN: %s",
self._name,
self._location,
self._usn,
)
assert self._name
assert self._location
assert self._usn
data = {
CONF_URL: self._location,
CONF_DEVICE_ID: self._usn,
CONF_SOURCE_ID: generate_source_id(self.hass, self._name),
}
return self.async_create_entry(title=self._name, data=data)
async def _async_parse_discovery(
self, discovery_info: ssdp.SsdpServiceInfo, raise_on_progress: bool = True
) -> None:
"""Get required details from an SSDP discovery.
Aborts if a device matching the SSDP USN has already been configured.
"""
LOGGER.debug(
"_async_parse_discovery: location: %s, USN: %s",
discovery_info.ssdp_location,
discovery_info.ssdp_usn,
)
if not discovery_info.ssdp_location or not discovery_info.ssdp_usn:
raise AbortFlow("bad_ssdp")
if not self._location:
self._location = discovery_info.ssdp_location
self._usn = discovery_info.ssdp_usn
await self.async_set_unique_id(self._usn, raise_on_progress=raise_on_progress)
# Abort if already configured, but update the last-known location
self._abort_if_unique_id_configured(
updates={CONF_URL: self._location}, reload_on_update=False
)
self._name = (
discovery_info.upnp.get(ssdp.ATTR_UPNP_FRIENDLY_NAME)
or urlparse(self._location).hostname
or DEFAULT_NAME
)
async def _async_get_discoveries(self) -> list[ssdp.SsdpServiceInfo]:
"""Get list of unconfigured DLNA devices discovered by SSDP."""
# Get all compatible devices from ssdp's cache
discoveries: list[ssdp.SsdpServiceInfo] = []
for udn_st in DmsDevice.DEVICE_TYPES:
st_discoveries = await ssdp.async_get_discovery_info_by_st(
self.hass, udn_st
)
discoveries.extend(st_discoveries)
# Filter out devices already configured
current_unique_ids = {
entry.unique_id
for entry in self._async_current_entries(include_ignore=False)
}
discoveries = [
disc for disc in discoveries if disc.ssdp_udn not in current_unique_ids
]
return discoveries
| 37.885246 | 88 | 0.666234 |
ae59337680c83336bf379ae350966c9f09140115 | 343 | py | Python | art/migrations/0004_auto_20200627_1202.py | brayokenya/The-Louvre | 4c01d3aafe1cb41b521aa3b06b9988979d967857 | [
"MIT"
] | null | null | null | art/migrations/0004_auto_20200627_1202.py | brayokenya/The-Louvre | 4c01d3aafe1cb41b521aa3b06b9988979d967857 | [
"MIT"
] | 7 | 2021-03-30T13:54:23.000Z | 2021-09-22T19:19:10.000Z | art/migrations/0004_auto_20200627_1202.py | brayokenya/The-Louvre | 4c01d3aafe1cb41b521aa3b06b9988979d967857 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.7 on 2020-06-27 09:02
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('art', '0003_auto_20200626_1611'),
]
operations = [
migrations.AlterModelOptions(
name='pic',
options={'ordering': ['-pub_date']},
),
]
| 19.055556 | 48 | 0.58309 |
e2c2f6f5c53fde5ee989aff4861d83530d7eebed | 6,706 | py | Python | swagger_client/api/transaction_summary_api.py | chbndrhnns/ahoi-client | 8bd25f541c05af17c82904fa250272514b7971f2 | [
"MIT"
] | null | null | null | swagger_client/api/transaction_summary_api.py | chbndrhnns/ahoi-client | 8bd25f541c05af17c82904fa250272514b7971f2 | [
"MIT"
] | null | null | null | swagger_client/api/transaction_summary_api.py | chbndrhnns/ahoi-client | 8bd25f541c05af17c82904fa250272514b7971f2 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) © 2016‐2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501
OpenAPI spec version: 2.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class TransactionSummaryApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def list_summary(self, access_id, account_id, **kwargs): # noqa: E501
"""List account summaries # noqa: E501
Retrieve account summaries and provide a sum for incoming and outgoing transactions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_summary(access_id, account_id, async=True)
>>> result = thread.get()
:param async bool
:param int access_id: The **accessId** for which to list the summaries (required)
:param int account_id: The **accountId** for which to list the summaries (required)
:param int limit: Optional — limit the number of returned summary entries
:param int offset: Optional — skip the first **offset** summary entries in the result
:param str _from: Optional — only return summary entries later than **from**; an ISO8601 Date (2014-11-17)
:param str to: Optional — only return summary entries prior or equal to **to**; an ISO8601 Date
:return: list[MonthlySummary]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.list_summary_with_http_info(access_id, account_id, **kwargs) # noqa: E501
else:
(data) = self.list_summary_with_http_info(access_id, account_id, **kwargs) # noqa: E501
return data
def list_summary_with_http_info(self, access_id, account_id, **kwargs): # noqa: E501
"""List account summaries # noqa: E501
Retrieve account summaries and provide a sum for incoming and outgoing transactions. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_summary_with_http_info(access_id, account_id, async=True)
>>> result = thread.get()
:param async bool
:param int access_id: The **accessId** for which to list the summaries (required)
:param int account_id: The **accountId** for which to list the summaries (required)
:param int limit: Optional — limit the number of returned summary entries
:param int offset: Optional — skip the first **offset** summary entries in the result
:param str _from: Optional — only return summary entries later than **from**; an ISO8601 Date (2014-11-17)
:param str to: Optional — only return summary entries prior or equal to **to**; an ISO8601 Date
:return: list[MonthlySummary]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['access_id', 'account_id', 'limit', 'offset', '_from', 'to'] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_summary" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'access_id' is set
if ('access_id' not in params or
params['access_id'] is None):
raise ValueError("Missing the required parameter `access_id` when calling `list_summary`") # noqa: E501
# verify the required parameter 'account_id' is set
if ('account_id' not in params or
params['account_id'] is None):
raise ValueError("Missing the required parameter `account_id` when calling `list_summary`") # noqa: E501
collection_formats = {}
path_params = {}
if 'access_id' in params:
path_params['accessId'] = params['access_id'] # noqa: E501
if 'account_id' in params:
path_params['accountId'] = params['account_id'] # noqa: E501
query_params = []
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if '_from' in params:
query_params.append(('from', params['_from'])) # noqa: E501
if 'to' in params:
query_params.append(('to', params['to'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/accesses/{accessId}/accounts/{accountId}/transactionsummaries', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[MonthlySummary]', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 43.545455 | 277 | 0.631077 |
18104e8c9b7732e46700415b2c404b46321e2022 | 663 | py | Python | data/train/python/18104e8c9b7732e46700415b2c404b46321e2022app.py | harshp8l/deep-learning-lang-detection | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | [
"MIT"
] | 84 | 2017-10-25T15:49:21.000Z | 2021-11-28T21:25:54.000Z | data/train/python/18104e8c9b7732e46700415b2c404b46321e2022app.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 5 | 2018-03-29T11:50:46.000Z | 2021-04-26T13:33:18.000Z | data/train/python/18104e8c9b7732e46700415b2c404b46321e2022app.py | vassalos/deep-learning-lang-detection | cbb00b3e81bed3a64553f9c6aa6138b2511e544e | [
"MIT"
] | 24 | 2017-11-22T08:31:00.000Z | 2022-03-27T01:22:31.000Z | import argparse
import json
import os
from urllib import urlencode
from cement.core import foundation, controller
from pulldb import interfaces
from pulldb import oauth
class BaseController(controller.CementBaseController):
class Meta:
label = 'base'
@controller.expose(aliases=['help'], aliases_only=True)
def default(self):
self.app.args.print_help()
def run():
app = foundation.CementApp(label='pulldb',
base_controller=BaseController)
interfaces.load()
oauth.load()
try:
app.setup()
app.run()
finally:
app.close
if __name__ == '__main__':
main()
| 20.71875 | 62 | 0.6546 |
ed2db70d6560ae6cc156e115595330c7fc8d8160 | 233 | py | Python | dsstock/stock/report/stock_raport/stock_raport.py | KKallas/Frappe-DS-Stock | 521714791faaf56ea0862a8f5de984e305d344cc | [
"MIT"
] | null | null | null | dsstock/stock/report/stock_raport/stock_raport.py | KKallas/Frappe-DS-Stock | 521714791faaf56ea0862a8f5de984e305d344cc | [
"MIT"
] | null | null | null | dsstock/stock/report/stock_raport/stock_raport.py | KKallas/Frappe-DS-Stock | 521714791faaf56ea0862a8f5de984e305d344cc | [
"MIT"
] | null | null | null | # Copyright (c) 2013, digitalsputnik and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
def execute(filters=None):
columns, data = [], []
return columns, data
| 23.3 | 53 | 0.772532 |
ec65dad7582455afe47559c87a07351966139f6b | 3,282 | py | Python | examples/uv/UV_tf_singletask.py | cjgalvin/deepchem | 64993a129e7f0f78fed9500298b1828ac8a0757a | [
"MIT"
] | 3,782 | 2016-02-21T03:53:11.000Z | 2022-03-31T16:10:26.000Z | examples/uv/UV_tf_singletask.py | cjgalvin/deepchem | 64993a129e7f0f78fed9500298b1828ac8a0757a | [
"MIT"
] | 2,666 | 2016-02-11T01:54:54.000Z | 2022-03-31T11:14:33.000Z | examples/uv/UV_tf_singletask.py | cjgalvin/deepchem | 64993a129e7f0f78fed9500298b1828ac8a0757a | [
"MIT"
] | 1,597 | 2016-02-21T03:10:08.000Z | 2022-03-30T13:21:28.000Z | """
Script that trains Tensorflow Multitask models on UV dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import numpy as np
import tempfile
import shutil
import deepchem as dc
###Load data###
shard_size = 2000
num_trials = 1
print("About to load UV data.")
UV_tasks, datasets, transformers = dc.molnet.load_uv(shard_size=shard_size)
train_dataset, valid_dataset, test_dataset = datasets
print("Number of compounds in train set")
print(len(train_dataset))
print("Number of compounds in validation set")
print(len(valid_dataset))
print("Number of compounds in test set")
print(len(test_dataset))
metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, task_averager=np.mean)
###Create model###
n_layers = 3
nb_epoch = 30
n_features = train_dataset.get_data_shape()[0]
def task_model_builder(m_dir):
return dc.models.TensorflowMultitaskRegressor(
n_tasks=1,
n_features=n_features,
logdir=m_dir,
layer_sizes=[1000] * n_layers,
dropouts=[.25] * n_layers,
weight_init_stddevs=[.02] * n_layers,
bias_init_consts=[1.] * n_layers,
learning_rate=.0003,
penalty=.0001,
penalty_type="l2",
optimizer="adam",
batch_size=100)
all_results = []
for trial in range(num_trials):
model = dc.models.SingletaskToMultitask(
UV_tasks, task_model_builder, model_dir="UV_tf_singletask")
print("Fitting Model")
model.fit(train_dataset, nb_epoch=nb_epoch)
print("Evaluating models")
train_score, train_task_scores = model.evaluate(
train_dataset, [metric], transformers, per_task_metrics=True)
valid_score, valid_task_scores = model.evaluate(
valid_dataset, [metric], transformers, per_task_metrics=True)
test_score, test_task_scores = model.evaluate(
test_dataset, [metric], transformers, per_task_metrics=True)
all_results.append((train_score, train_task_scores, valid_score,
valid_task_scores, test_score, test_task_scores))
print("----------------------------------------------------------------")
print("Scores for trial %d" % trial)
print("----------------------------------------------------------------")
print("train_task_scores")
print(train_task_scores)
print("Mean Train score")
print(train_score)
print("valid_task_scores")
print(valid_task_scores)
print("Mean Validation score")
print(valid_score)
print("test_task_scores")
print(test_task_scores)
print("Mean Test score")
print(test_score)
print("####################################################################")
for trial in range(num_trials):
(train_score, train_task_scores, valid_score, valid_task_scores, test_score,
test_task_scores) = all_results[trial]
print("----------------------------------------------------------------")
print("Scores for trial %d" % trial)
print("----------------------------------------------------------------")
print("train_task_scores")
print(train_task_scores)
print("Mean Train score")
print(train_score)
print("valid_task_scores")
print(valid_task_scores)
print("Mean Validation score")
print(valid_score)
print("test_task_scores")
print(test_task_scores)
print("Mean Test score")
print(test_score)
| 30.388889 | 78 | 0.666667 |
6fe32073febb378982f34b683e6ee4bd2a9def77 | 1,630 | py | Python | pbu/default_options.py | ilfrich/python-basic-utils | 195aa0226477e7c83bb68ca5459b538a937c9994 | [
"Apache-2.0"
] | null | null | null | pbu/default_options.py | ilfrich/python-basic-utils | 195aa0226477e7c83bb68ca5459b538a937c9994 | [
"Apache-2.0"
] | 2 | 2020-02-13T11:15:16.000Z | 2020-02-13T11:16:54.000Z | pbu/default_options.py | ilfrich/python-basic-utils | 195aa0226477e7c83bb68ca5459b538a937c9994 | [
"Apache-2.0"
] | null | null | null | from typing import Any, List
def default_options(default: dict = {}, override: dict = None, allow_unknown_keys: bool = True):
"""
Combines the dictionaries provided as parameters into one, where keys in override will replace keys in default.
The inputs are not mutated.
:param default: the default dictionary containing fall-backs
:param override: the custom options provided by a user
:param allow_unknown_keys: flag to determine whether parameters from the override for which there is no default
should be included as well
:return: a dictionary containing the combined keys (or just default keys) and values from the override using
defaults as fall-back.
"""
if override is None or default is None:
return default
result = {}
for key in default:
result[key] = default[key]
for key in override:
if key in default or allow_unknown_keys:
result[key] = override[key]
return result
def default_value(value: Any, fallback: Any, disallowed: List[Any] = [None]) -> Any:
"""
Checks whether the provided value is (by default) None or matches any other disallowed value, as provided. If the
value is disallowed, the fallback will be returned.
:param value: the value to check
:param fallback: the fallback in case the check fails
:param disallowed: the list of values to check the value against, if it matches any of them, the fallback will be
returned
:return: the value or the fallback, depending on the outcome of the check
"""
if value in disallowed:
return fallback
return value
| 38.809524 | 117 | 0.706135 |
8490babeaa519c9697f4b3393c84c47cb7311a65 | 421 | py | Python | freecodecamp/brownie/FundMe/scripts/transact.py | People-Help-People/contract-collection | 003a481b8c7a8ed660f24098c4f7c5cdab4549dd | [
"MIT"
] | null | null | null | freecodecamp/brownie/FundMe/scripts/transact.py | People-Help-People/contract-collection | 003a481b8c7a8ed660f24098c4f7c5cdab4549dd | [
"MIT"
] | null | null | null | freecodecamp/brownie/FundMe/scripts/transact.py | People-Help-People/contract-collection | 003a481b8c7a8ed660f24098c4f7c5cdab4549dd | [
"MIT"
] | null | null | null | from brownie import accounts, FundMe
from scripts.helper import get_account
def fund():
fund_me = FundMe[-1]
account = get_account()
minimum_amount = fund_me.getEntranceFee()
fund_me.fund({"from": account, "value": minimum_amount})
def withdraw():
fund_me = FundMe[-1]
account = get_account()
fund_me.withdraw({"from": account})
def main():
fund()
withdraw()
| 20.047619 | 61 | 0.63658 |
994cab1bd675ccc854ca9cc63429fb6862591eba | 6,985 | py | Python | gym_extensions/discrete/classic/cartpole.py | vBarbaros/gym-extensions | f17058459b36756cd9bb0877689ab29a729e9dd0 | [
"MIT"
] | 1 | 2018-06-06T21:30:49.000Z | 2018-06-06T21:30:49.000Z | gym_extensions/discrete/classic/cartpole.py | vBarbaros/gym-extensions | f17058459b36756cd9bb0877689ab29a729e9dd0 | [
"MIT"
] | null | null | null | gym_extensions/discrete/classic/cartpole.py | vBarbaros/gym-extensions | f17058459b36756cd9bb0877689ab29a729e9dd0 | [
"MIT"
] | 1 | 2019-01-27T12:09:43.000Z | 2019-01-27T12:09:43.000Z | """
Classic cart-pole system implemented by Rich Sutton et al.
Copied from https://webdocs.cs.ualberta.ca/~sutton/book/code/pole.c
"""
import logging
import math
import gym
from gym import spaces
from gym.utils import seeding
import numpy as np
logger = logging.getLogger(__name__)
def register_custom_cartpole(tag, gravity=9.8, masscart=1.0, masspole=0.1, pole_length=.5, force_mag=10.0):
"""
Tag - What to call your env (e.g. CustomCartpoleLongPole-v0, CustomCartpoleLongPole-v1)
gravity - if you want to modify the gravity factor (default 9.8)
masscart - the mass of the cartpole base
masspole - the mass of the pole
length - the length of the pole
force_mag - the magnitude of the exerted action force
"""
return gym.envs.register(
id=tag,
entry_point="envs.transfer.classic.cartpole:CustomizableCartPoleEnv",
max_episode_steps=200,
reward_threshold=195.0,
kwargs= dict(gravity = gravity, masscart = masscart, masspole = masspole, length = pole_length, force_mag = force_mag)
)
class CustomizableCartPoleEnv(gym.Env):
metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second' : 50
}
def __init__(self, gravity=9.8, masscart=1.0, masspole=0.1, length = .5, force_mag = 10.0):
self.gravity = gravity
self.masscart = masscart
self.masspole = masspole
self.total_mass = (self.masspole + self.masscart)
self.length = length # actually half the pole's length
self.polemass_length = (self.masspole * self.length)
self.force_mag = force_mag
self.tau = 0.02 # seconds between state updates
# Angle at which to fail the episode
self.theta_threshold_radians = 12 * 2 * math.pi / 360
self.x_threshold = 2.4
# Angle limit set to 2 * theta_threshold_radians so failing observation is still within bounds
high = np.array([
self.x_threshold * 2,
np.finfo(np.float32).max,
self.theta_threshold_radians * 2,
np.finfo(np.float32).max])
self.action_space = spaces.Discrete(2)
self.observation_space = spaces.Box(-high, high)
self._seed()
self.viewer = None
self.state = None
self.steps_beyond_done = None
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def _step(self, action):
assert self.action_space.contains(action), "%r (%s) invalid"%(action, type(action))
state = self.state
x, x_dot, theta, theta_dot = state
force = self.force_mag if action==1 else -self.force_mag
costheta = math.cos(theta)
sintheta = math.sin(theta)
temp = (force + self.polemass_length * theta_dot * theta_dot * sintheta) / self.total_mass
thetaacc = (self.gravity * sintheta - costheta* temp) / (self.length * (4.0/3.0 - self.masspole * costheta * costheta / self.total_mass))
xacc = temp - self.polemass_length * thetaacc * costheta / self.total_mass
x = x + self.tau * x_dot
x_dot = x_dot + self.tau * xacc
theta = theta + self.tau * theta_dot
theta_dot = theta_dot + self.tau * thetaacc
self.state = (x,x_dot,theta,theta_dot)
done = x < -self.x_threshold \
or x > self.x_threshold \
or theta < -self.theta_threshold_radians \
or theta > self.theta_threshold_radians
done = bool(done)
if not done:
reward = 1.0
elif self.steps_beyond_done is None:
# Pole just fell!
self.steps_beyond_done = 0
reward = 1.0
else:
if self.steps_beyond_done == 0:
logger.warning("You are calling 'step()' even though this environment has already returned done = True. You should always call 'reset()' once you receive 'done = True' -- any further steps are undefined behavior.")
self.steps_beyond_done += 1
reward = 0.0
return np.array(self.state), reward, done, {}
def _reset(self):
self.state = self.np_random.uniform(low=-0.05, high=0.05, size=(4,))
self.steps_beyond_done = None
return np.array(self.state)
def _render(self, mode='human', close=False):
if close:
if self.viewer is not None:
self.viewer.close()
self.viewer = None
return
screen_width = 600
screen_height = 400
world_width = self.x_threshold*2
scale = screen_width/world_width
carty = 100 # TOP OF CART
polewidth = 10.0
polelen = scale * 1.0
cartwidth = 50.0
cartheight = 30.0
if self.viewer is None:
from gym.envs.classic_control import rendering
self.viewer = rendering.Viewer(screen_width, screen_height)
l,r,t,b = -cartwidth/2, cartwidth/2, cartheight/2, -cartheight/2
axleoffset =cartheight/4.0
cart = rendering.FilledPolygon([(l,b), (l,t), (r,t), (r,b)])
self.carttrans = rendering.Transform()
cart.add_attr(self.carttrans)
self.viewer.add_geom(cart)
l,r,t,b = -polewidth/2,polewidth/2,polelen-polewidth/2,-polewidth/2
pole = rendering.FilledPolygon([(l,b), (l,t), (r,t), (r,b)])
pole.set_color(.8,.6,.4)
self.poletrans = rendering.Transform(translation=(0, axleoffset))
pole.add_attr(self.poletrans)
pole.add_attr(self.carttrans)
self.viewer.add_geom(pole)
self.axle = rendering.make_circle(polewidth/2)
self.axle.add_attr(self.poletrans)
self.axle.add_attr(self.carttrans)
self.axle.set_color(.5,.5,.8)
self.viewer.add_geom(self.axle)
self.track = rendering.Line((0,carty), (screen_width,carty))
self.track.set_color(0,0,0)
self.viewer.add_geom(self.track)
if self.state is None: return None
x = self.state
cartx = x[0]*scale+screen_width/2.0 # MIDDLE OF CART
self.carttrans.set_translation(cartx, carty)
self.poletrans.set_rotation(-x[2])
return self.viewer.render(return_rgb_array = mode=='rgb_array')
if __name__ == "__main__":
import time
env = gym.make('CartPole-v0')
time_steps = 0
start_time = time.time()
for i_episode in range(500):
observation = env.reset()
for t in range(500):
env.render()
print(observation)
action = env.action_space.sample()
observation, reward, done, info = env.step(action)
time_steps += 1
if done:
print("Episode finished after {} timesteps".format(t+1))
break
print 'time in seconds elapsed: ', time.time() - start_time, ' time steps: ', time_steps
| 38.169399 | 230 | 0.609878 |
24262e78acd4c62d1bedcf92ffcdab5349e9e5bc | 1,697 | py | Python | Chapter08/seleniumLocator.py | athertahir/web-scraping-with-python | 5f23f64133b383c555be26ea344522cf629045ea | [
"MIT"
] | null | null | null | Chapter08/seleniumLocator.py | athertahir/web-scraping-with-python | 5f23f64133b383c555be26ea344522cf629045ea | [
"MIT"
] | null | null | null | Chapter08/seleniumLocator.py | athertahir/web-scraping-with-python | 5f23f64133b383c555be26ea344522cf629045ea | [
"MIT"
] | null | null | null | from selenium import webdriver
chrome_path = '/home/scrapbook/tutorial/web-scraping-with-python/Chapter08/chromedriver'
from selenium.webdriver.chrome.options import Options
options = Options()
options.add_argument("--headless") # Runs Chrome in headless mode.
options.add_argument('--no-sandbox') # # Bypass OS security model
options.add_argument('start-maximized')
options.add_argument('disable-infobars')
options.add_argument("--disable-extensions")
driver = webdriver.Chrome(chrome_options=options, executable_path=chrome_path)
#driver = webdriver.Chrome(executable_path=chrome_path)
driver.get('http://automationpractice.com')
print("Current Page URL: ",driver.current_url)
searchBox = driver.find_element_by_id('search_query_top')
print("Type :",type(searchBox))
print("Attribute Value :",searchBox.get_attribute("value"))
print("Attribute Class :",searchBox.get_attribute("class"))
print("Tag Name :",searchBox.tag_name)
searchBox.clear()
searchBox.send_keys("Dress")
submitButton = driver.find_element_by_name("submit_search")
submitButton.click()
resultsShowing = driver.find_element_by_class_name("product-count")
print("Results Showing: ",resultsShowing.text)
resultsFound = driver.find_element_by_xpath('//*[@id="center_column"]//span[@class="heading-counter"]')
print("Results Found: ",resultsFound.text)
products = driver.find_elements_by_xpath('//*[@id="center_column"]//a[@class="product-name"]')
#products = driver.find_elements_by_css_selector('ul.product_list li.ajax_block_product a.product-name')
foundProducts=[]
for product in products:
foundProducts.append([product.text,product.get_attribute("href")])
print(foundProducts)
driver.close()
driver.quit()
| 36.106383 | 104 | 0.786682 |
c96c0ced9f9705493ce0ba53eda7a618e8f2c893 | 4,517 | py | Python | container_service_extension/pksclient/models/v1/network_profile_request.py | YiouZhu1010/container-service-extension | f36bc250d226609b9a64e99073bb7a752ffb9f9b | [
"BSD-2-Clause"
] | 1 | 2019-02-22T22:10:02.000Z | 2019-02-22T22:10:02.000Z | container_service_extension/pksclient/models/v1/network_profile_request.py | YiouZhu1010/container-service-extension | f36bc250d226609b9a64e99073bb7a752ffb9f9b | [
"BSD-2-Clause"
] | null | null | null | container_service_extension/pksclient/models/v1/network_profile_request.py | YiouZhu1010/container-service-extension | f36bc250d226609b9a64e99073bb7a752ffb9f9b | [
"BSD-2-Clause"
] | null | null | null | # coding: utf-8
"""
PKS
PKS API # noqa: E501
OpenAPI spec version: 1.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class NetworkProfileRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'description': 'str',
'parameters': 'str'
}
attribute_map = {
'name': 'name',
'description': 'description',
'parameters': 'parameters'
}
def __init__(self, name=None, description=None, parameters=None): # noqa: E501
"""NetworkProfileRequest - a model defined in Swagger""" # noqa: E501
self._name = None
self._description = None
self._parameters = None
self.discriminator = None
self.name = name
if description is not None:
self.description = description
self.parameters = parameters
@property
def name(self):
"""Gets the name of this NetworkProfileRequest. # noqa: E501
:return: The name of this NetworkProfileRequest. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this NetworkProfileRequest.
:param name: The name of this NetworkProfileRequest. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def description(self):
"""Gets the description of this NetworkProfileRequest. # noqa: E501
:return: The description of this NetworkProfileRequest. # noqa: E501
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this NetworkProfileRequest.
:param description: The description of this NetworkProfileRequest. # noqa: E501
:type: str
"""
self._description = description
@property
def parameters(self):
"""Gets the parameters of this NetworkProfileRequest. # noqa: E501
:return: The parameters of this NetworkProfileRequest. # noqa: E501
:rtype: str
"""
return self._parameters
@parameters.setter
def parameters(self, parameters):
"""Sets the parameters of this NetworkProfileRequest.
:param parameters: The parameters of this NetworkProfileRequest. # noqa: E501
:type: str
"""
if parameters is None:
raise ValueError("Invalid value for `parameters`, must not be `None`") # noqa: E501
self._parameters = parameters
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NetworkProfileRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.047904 | 96 | 0.574718 |
01667ef92e998b6f91968d85d39b0926da17a6b1 | 5,262 | py | Python | examples/toy_mixture_model_discrete_enumeration.py | kashif/pyro | b65b329d8b851c7402acaef9c176a8964caadaf3 | [
"Apache-2.0"
] | 2 | 2021-01-04T01:35:23.000Z | 2021-01-04T01:35:32.000Z | examples/toy_mixture_model_discrete_enumeration.py | kashif/pyro | b65b329d8b851c7402acaef9c176a8964caadaf3 | [
"Apache-2.0"
] | null | null | null | examples/toy_mixture_model_discrete_enumeration.py | kashif/pyro | b65b329d8b851c7402acaef9c176a8964caadaf3 | [
"Apache-2.0"
] | null | null | null | # Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
"""
A toy mixture model to provide a simple example for implementing discrete enumeration.
(A) -> [B] -> (C)
A is an observed Bernoulli variable with Beta prior.
B is a hidden variable which is a mixture of two Bernoulli distributions (with Beta priors),
chosen by A being true or false.
C is observed, and like B, is a mixture of two Bernoulli distributions (with Beta priors),
chosen by B being true or false.
There is a plate over the three variables for n independent observations of data.
Because B is hidden and discrete we wish to marginalize it out of the model.
This is done by:
1) marking the model method with `@pyro.infer.config_enumerate`
2) marking the B sample site in the model with `infer={"enumerate": "parallel"}`
3) passing `pyro.infer.SVI` the `pyro.infer.TraceEnum_ELBO` loss function
"""
import argparse
import matplotlib.pyplot as plt
import numpy as np
import torch
from torch.distributions import constraints
from torch.distributions.bernoulli import Bernoulli
from torch.distributions.beta import Beta
from tqdm import tqdm
import pyro
import pyro.distributions as dist
import pyro.infer
import pyro.optim
from pyro.ops.indexing import Vindex
def main(args):
num_obs = args.num_obs
num_steps = args.num_steps
prior, CPDs, data = generate_data(num_obs)
posterior_params = train(prior, data, num_steps, num_obs)
evaluate(CPDs, posterior_params)
def generate_data(num_obs):
# domain = [False, True]
prior = {'A': torch.tensor([1., 10.]),
'B': torch.tensor([[10., 1.],
[1., 10.]]),
'C': torch.tensor([[10., 1.],
[1., 10.]])}
CPDs = {'p_A': Beta(prior['A'][0], prior['A'][1]).sample(),
'p_B': Beta(prior['B'][:, 0], prior['B'][:, 1]).sample(),
'p_C': Beta(prior['C'][:, 0], prior['C'][:, 1]).sample(),
}
data = {'A': Bernoulli(torch.ones(num_obs) * CPDs['p_A']).sample()}
data['B'] = Bernoulli(torch.gather(CPDs['p_B'], 0, data['A'].type(torch.long))).sample()
data['C'] = Bernoulli(torch.gather(CPDs['p_C'], 0, data['B'].type(torch.long))).sample()
return prior, CPDs, data
@pyro.infer.config_enumerate
def model(prior, obs, num_obs):
p_A = pyro.sample('p_A', dist.Beta(1, 1))
p_B = pyro.sample('p_B', dist.Beta(torch.ones(2), torch.ones(2)).to_event(1))
p_C = pyro.sample('p_C', dist.Beta(torch.ones(2), torch.ones(2)).to_event(1))
with pyro.plate('data_plate', num_obs):
A = pyro.sample('A', dist.Bernoulli(p_A.expand(num_obs)), obs=obs['A'])
# Vindex used to ensure proper indexing into the enumerated sample sites
B = pyro.sample('B', dist.Bernoulli(Vindex(p_B)[A.type(torch.long)]), infer={"enumerate": "parallel"})
pyro.sample('C', dist.Bernoulli(Vindex(p_C)[B.type(torch.long)]), obs=obs['C'])
def guide(prior, obs, num_obs):
a = pyro.param('a', prior['A'], constraint=constraints.positive)
pyro.sample('p_A', dist.Beta(a[0], a[1]))
b = pyro.param('b', prior['B'], constraint=constraints.positive)
pyro.sample('p_B', dist.Beta(b[:, 0], b[:, 1]).to_event(1))
c = pyro.param('c', prior['C'], constraint=constraints.positive)
pyro.sample('p_C', dist.Beta(c[:, 0], c[:, 1]).to_event(1))
def train(prior, data, num_steps, num_obs):
pyro.clear_param_store()
# max_plate_nesting = 1 because there is a single plate in the model
loss_func = pyro.infer.TraceEnum_ELBO(max_plate_nesting=1)
svi = pyro.infer.SVI(model,
guide,
pyro.optim.Adam({'lr': .01}),
loss=loss_func
)
losses = []
for _ in tqdm(range(num_steps)):
loss = svi.step(prior, data, num_obs)
losses.append(loss)
plt.figure()
plt.plot(losses)
plt.show()
posterior_params = {k: np.array(v.data) for k, v in pyro.get_param_store().items()}
posterior_params['a'] = posterior_params['a'][None, :] # reshape to same as other variables
return posterior_params
def evaluate(CPDs, posterior_params):
true_p_A, pred_p_A = get_true_pred_CPDs(CPDs['p_A'], posterior_params['a'])
true_p_B, pred_p_B = get_true_pred_CPDs(CPDs['p_B'], posterior_params['b'])
true_p_C, pred_p_C = get_true_pred_CPDs(CPDs['p_C'], posterior_params['c'])
print('\np_A = True')
print('actual: ', true_p_A)
print('predicted:', pred_p_A)
print('\np_B = True | A = False/True')
print('actual: ', true_p_B)
print('predicted:', pred_p_B)
print('\np_C = True | B = False/True')
print('actual: ', true_p_C)
print('predicted:', pred_p_C)
def get_true_pred_CPDs(CPD, posterior_param):
true_p = CPD.numpy()
pred_p = posterior_param[:, 0]/np.sum(posterior_param, axis=1)
return true_p, pred_p
if __name__ == "__main__":
assert pyro.__version__.startswith('1.5.1')
parser = argparse.ArgumentParser(description="Toy mixture model")
parser.add_argument("-n", "--num-steps", default=4000, type=int)
parser.add_argument("-o", "--num-obs", default=10000, type=int)
args = parser.parse_args()
main(args)
| 38.977778 | 110 | 0.644812 |
ff4ccbfd416a30473e79e449f9e9fefaf2c52a02 | 688 | py | Python | setup.py | andyhu4023/backtest_pkg | 00f57244307a740245c6419c8a52cb07e80f171a | [
"MIT"
] | 3 | 2020-04-22T09:27:33.000Z | 2021-02-04T14:55:13.000Z | setup.py | andyhu4023/backtest_pkg | 00f57244307a740245c6419c8a52cb07e80f171a | [
"MIT"
] | 1 | 2021-02-04T14:54:06.000Z | 2021-02-04T14:54:06.000Z | setup.py | andyhu4023/backtest_pkg | 00f57244307a740245c6419c8a52cb07e80f171a | [
"MIT"
] | 1 | 2021-12-18T10:03:13.000Z | 2021-12-18T10:03:13.000Z | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="backtest_pkg",
version="0.2.0",
author="Andy Hu",
author_email="andyhu2014@gmail.com",
description="A package for backtesting portfolio strategies and trading signals",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/andyhu4023/backtest_pkg.git",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires='>=3.6',
) | 31.272727 | 85 | 0.678779 |
d60d16ff42d4d6a8eb0c612e97dcc58cfbde350e | 270 | py | Python | sololearn/thatsodd.py | ehlodex/Python3 | 126c4662d1371ec6cbc1f257bd3de5c1dcdc86a6 | [
"MIT"
] | null | null | null | sololearn/thatsodd.py | ehlodex/Python3 | 126c4662d1371ec6cbc1f257bd3de5c1dcdc86a6 | [
"MIT"
] | null | null | null | sololearn/thatsodd.py | ehlodex/Python3 | 126c4662d1371ec6cbc1f257bd3de5c1dcdc86a6 | [
"MIT"
] | null | null | null | #!/usr/bin/env/ python3
"""SoloLearn > Code Coach > That's odd..."""
quantity = int(input('How many numbers? '))
sum = int(0)
for i in range(0, quantity):
number = int(input('Please type a number: '))
if number % 2 == 0:
sum = sum + number
print(sum)
| 20.769231 | 49 | 0.592593 |
0a7756d1b411f0ed732bac1d896d18b99b98f80a | 7,816 | py | Python | tests/test_managers.py | kmmbvnr/django-polymodels | 7a9b64b1851fea23a64d3d9421a69911e1669a49 | [
"MIT"
] | 1 | 2020-09-29T09:27:30.000Z | 2020-09-29T09:27:30.000Z | tests/test_managers.py | kmmbvnr/django-polymodels | 7a9b64b1851fea23a64d3d9421a69911e1669a49 | [
"MIT"
] | null | null | null | tests/test_managers.py | kmmbvnr/django-polymodels | 7a9b64b1851fea23a64d3d9421a69911e1669a49 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from polymodels.managers import PolymorphicManager
from .base import TestCase
from .models import Animal, BigSnake, HugeSnake, Mammal, Monkey, Snake, Zoo
class PolymorphicQuerySetTest(TestCase):
def test_select_subclasses(self):
Animal.objects.create(name='animal')
Mammal.objects.create(name='mammal')
Monkey.objects.create(name='monkey')
Snake.objects.create(name='snake', length=10)
BigSnake.objects.create(name='big snake', length=101)
HugeSnake.objects.create(name='huge snake', length=155)
# Assert `select_subclasses` correctly calls `select_related` and `filter`.
animals = Animal.objects.select_subclasses()
animals_expected_query_select_related = {
'mammal': {'monkey': {}},
'snake': {},
}
self.assertEqual(animals.query.select_related, animals_expected_query_select_related)
with self.assertNumQueries(1):
self.assertQuerysetEqual(animals.all(),
['<Animal: animal>',
'<Mammal: mammal>',
'<Monkey: monkey>',
'<Snake: snake>',
'<BigSnake: big snake>',
'<HugeSnake: huge snake>'])
with self.assertNumQueries(1):
self.assertQuerysetEqual(animals.iterator(),
['<Animal: animal>',
'<Mammal: mammal>',
'<Monkey: monkey>',
'<Snake: snake>',
'<BigSnake: big snake>',
'<HugeSnake: huge snake>'])
# Filter out non-mammal (direct subclass)
animal_mammals = Animal.objects.select_subclasses(Mammal)
animal_mammals_expected_query_select_related = {
'mammal': {'monkey': {}}
}
self.assertEqual(
animal_mammals.query.select_related,
animal_mammals_expected_query_select_related
)
with self.assertNumQueries(1):
self.assertQuerysetEqual(animal_mammals.all(),
['<Mammal: mammal>',
'<Monkey: monkey>'])
# Filter out non-snake (subclass through an abstract one)
animal_snakes = Animal.objects.select_subclasses(Snake)
self.assertEqual(animal_snakes.query.select_related, {'snake': {}})
with self.assertNumQueries(1):
self.assertQuerysetEqual(animal_snakes.all(),
['<Snake: snake>',
'<BigSnake: big snake>',
'<HugeSnake: huge snake>'])
# Subclass with only proxies
snakes = Snake.objects.select_subclasses()
self.assertFalse(snakes.query.select_related)
with self.assertNumQueries(1):
self.assertQuerysetEqual(snakes.all(),
['<Snake: snake>',
'<BigSnake: big snake>',
'<HugeSnake: huge snake>'])
# Subclass filter proxies
snake_bigsnakes = Snake.objects.select_subclasses(BigSnake)
self.assertFalse(snakes.query.select_related)
with self.assertNumQueries(1):
self.assertQuerysetEqual(snake_bigsnakes.all(),
['<BigSnake: big snake>',
'<HugeSnake: huge snake>'])
def test_select_subclasses_get(self):
snake = Snake.objects.create(name='snake', length=10)
self.assertEqual(Animal.objects.select_subclasses().get(), snake)
def test_select_subclasses_values(self):
Animal.objects.create(name='animal')
self.assertQuerysetEqual(
Animal.objects.select_subclasses().values_list('name', flat=True), ['animal'], lambda x: x
)
def test_exclude_subclasses(self):
Animal.objects.create(name='animal')
Mammal.objects.create(name='first mammal')
Mammal.objects.create(name='second mammal')
Monkey.objects.create(name='donkey kong')
self.assertQuerysetEqual(Animal.objects.exclude_subclasses(),
['<Animal: animal>'])
self.assertQuerysetEqual(Mammal.objects.exclude_subclasses(),
['<Mammal: first mammal>',
'<Mammal: second mammal>'])
self.assertQuerysetEqual(Monkey.objects.exclude_subclasses(),
['<Monkey: donkey kong>'])
def test_select_subclasses_prefetch_related(self):
zoo = Zoo.objects.create()
animal = Animal.objects.create(name='animal')
mammal = Mammal.objects.create(name='mammal')
monkey = Monkey.objects.create(name='monkey')
zoo.animals.add(animal, mammal, monkey)
other_monkey = Monkey.objects.create(name='monkey')
monkey.friends.add(other_monkey)
queryset = Animal.objects.select_subclasses().prefetch_related('zoos')
with self.assertNumQueries(2):
self.assertSequenceEqual(queryset, [
animal,
mammal,
monkey,
other_monkey,
])
self.assertSequenceEqual(queryset[0].zoos.all(), [zoo])
self.assertSequenceEqual(queryset[1].zoos.all(), [zoo])
self.assertSequenceEqual(queryset[2].zoos.all(), [zoo])
# Test prefetch related combination.
queryset = Animal.objects.select_subclasses().prefetch_related(
'zoos',
'mammal__monkey__friends',
)
with self.assertNumQueries(3):
self.assertSequenceEqual(queryset, [
animal,
mammal,
monkey,
other_monkey,
])
self.assertSequenceEqual(queryset[0].zoos.all(), [zoo])
self.assertSequenceEqual(queryset[1].zoos.all(), [zoo])
self.assertSequenceEqual(queryset[2].zoos.all(), [zoo])
self.assertSequenceEqual(queryset[2].friends.all(), [other_monkey])
self.assertSequenceEqual(queryset[3].friends.all(), [monkey])
class PolymorphicManagerTest(TestCase):
def test_improperly_configured(self):
with self.assertRaisesMessage(
ImproperlyConfigured, '`PolymorphicManager` can only be used on `BasePolymorphicModel` subclasses.'
):
class NonPolymorphicModel(models.Model):
objects = PolymorphicManager()
class Meta:
app_label = 'polymodels'
def test_proxy_filtering(self):
"""
Make sure managers attached to proxy models returns a queryset of
proxies only.
"""
Snake.objects.create(name='snake', length=1)
BigSnake.objects.create(name='big snake', length=10)
HugeSnake.objects.create(name='huge snake', length=100)
self.assertQuerysetEqual(Snake.objects.all(),
['<Snake: snake>',
'<Snake: big snake>',
'<Snake: huge snake>'])
self.assertQuerysetEqual(BigSnake.objects.all(),
['<BigSnake: big snake>',
'<BigSnake: huge snake>'])
self.assertQuerysetEqual(HugeSnake.objects.all(),
['<HugeSnake: huge snake>'])
| 46.248521 | 111 | 0.553096 |
89f7676ecc366d8dd11ef6260f8d50c950ce28e7 | 2,073 | py | Python | predict.py | mandalbiswadip/hierarchical-attention-networks | c1d15db91fdad075267a9b6943e06a1da7b076ad | [
"MIT"
] | 1 | 2019-08-29T09:53:43.000Z | 2019-08-29T09:53:43.000Z | predict.py | mandalbiswadip/hierarchical-attention-networks | c1d15db91fdad075267a9b6943e06a1da7b076ad | [
"MIT"
] | 1 | 2022-02-10T02:00:50.000Z | 2022-02-10T02:00:50.000Z | predict.py | mandalbiswadip/hierarchical-attention-networks | c1d15db91fdad075267a9b6943e06a1da7b076ad | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
import numpy as np
import pandas as pd
from models.text import CustomTokenizer
from models.model import word_list, max_sentences, maxlen
print("load")
MODEL_PATH = ""
# Note: also change the layer type (lstm vs attention) while loading
#=================================================================================
# For attention + lstm:
# model = tf.keras.models.load_model(
# MODEL_PATH,
# custom_objects={"HierarchicalAttentionLayer": HierarchicalAttentionLayer}
# )
# For lstm:
# model = tf.keras.models.load_model(
# MODEL_PATH,
# custom_objects={"HierarchicalAttentionLayer": HierarchicalLSTMLayer}
# )
# =================================================================================
tokenizer = CustomTokenizer(word_list=word_list)
data = pd.read_csv("tagged_data.csv")
# data = data.iloc[:10]
# =================================================
import tensorflow as tf
inp = tokenizer.doc_to_sequences(data.text.tolist())
inputs = []
for doc in inp:
inputs.append(
tf.keras.preprocessing.sequence.pad_sequences(
doc, padding="post", value=0, maxlen=maxlen, dtype=None
)
)
a = np.zeros((len(inputs), max_sentences, maxlen))
for row, x in zip(a, inputs):
row[:len(x)] = x[:max_sentences]
# Define Model
from models.model import HierarchicalAttentionLayer, HierarchicalLSTMLayer
# from models.tuner import tuner
# model.compile(
# optimizer="adam",
# loss="categorical_crossentropy",
# metrics=["acc"]
# )
# y = pd.get_dummies(data.sentiment).values
y = data.sentiment.values
print("data shape {}".format(a.shape))
print("Beginning prediction.....")
model = tf.keras.models.load_model(
MODEL_PATH,
custom_objects={"HierarchicalAttentionLayer": HierarchicalAttentionLayer}
)
# model = tf.keras.models.load_model(
# MODEL_PATH,
# custom_objects={"HierarchicalAttentionLayer": HierarchicalLSTMLayer}
# )
prediction = model.predict(a)
data["prediction"] = np.argmax(prediction)
data.to_csv("prediction.csv", index=False)
| 25.280488 | 83 | 0.643029 |
42126c197db5e8f75f40ff7a856b2f56272010fb | 2,683 | py | Python | Python/readTLEwriteCSV.py | kgkIEEE/Gemini2 | 23ac265c1191664a93c394d1ab2b11ad050876f1 | [
"MIT"
] | 1 | 2020-05-29T06:22:10.000Z | 2020-05-29T06:22:10.000Z | Python/readTLEwriteCSV.py | kgkIEEE/Gemini2 | 23ac265c1191664a93c394d1ab2b11ad050876f1 | [
"MIT"
] | null | null | null | Python/readTLEwriteCSV.py | kgkIEEE/Gemini2 | 23ac265c1191664a93c394d1ab2b11ad050876f1 | [
"MIT"
] | null | null | null | import argparse
import csv
from skyfield import api
from skyfield.api import EarthSatellite
from skyfield.constants import AU_KM, AU_M
from skyfield.sgp4lib import TEME_to_ITRF
from skyfield.api import Topos, load
# Read TLE file and write key parameters in CSV format
def readTLE(tleFilename):
# Open TLE filename
tleFile = open(tleFilename,'r')
# print("Opened TLE file: ",tleFilename)
# Read TLEs into catalog
catalog = []
line0 = None
line1 = None
line2 = None
for line in tleFile:
if line[0] == '0':
line0 = line
elif line[0] == '1':
line1 = line
elif line[0] == '2':
line2 = line
else:
# Error - TLE lines start with 0, 1 or 2
print("Error: line does not start with 0, 1 or 2: ",line)
if line1 and line2:
# Check if object number is same in both line 1 and 2
catalog.append(EarthSatellite(line1,line2))
line1 = None;
line2 = None;
# print("Read ", len(catalog), "TLEs into catalog")
return catalog
def writeSatelliteCSV(catalog, tleDefaultCSVFilename):
with open(tleDefaultCSVFilename, 'w', newline='') as csvfile:
fieldnames = ['satnum', 'epochyr', 'epochdays', 'jdsatepoch', 'ndot', \
'nddot', 'bstar', 'inclination', 'rightascension', 'eccentricity', \
'argofperigee', 'meanmotion', 'meananomaly']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
for satellite in catalog:
writer.writerow({\
'satnum':satellite.model.satnum, \
'epochyr':satellite.model.epochyr, \
'epochdays':satellite.model.epochdays, \
'jdsatepoch':satellite.model.jdsatepoch, \
'ndot':satellite.model.ndot, \
'nddot':satellite.model.nddot, \
'bstar':satellite.model.bstar, \
'inclination':satellite.model.inclo, \
'rightascension':satellite.model.nodeo, \
'eccentricity':satellite.model.ecco, \
'argofperigee':satellite.model.argpo, \
'meananomaly':satellite.model.mo, \
'meanmotion':satellite.model.no})
parser = argparse.ArgumentParser(description='Read TLE files')
parser.add_argument("--tleFilename")
args = parser.parse_args()
tleDefaultFilename = 'catalogTest.txt'
tleDefaultCSVFilename = 'catalogTest.csv'
if args.tleFilename:
tleFilename = args.tleFilename
else:
tleFilename = tleDefaultFilename
catalog = readTLE(tleFilename)
writeSatelliteCSV(catalog,tleDefaultCSVFilename)
| 33.5375 | 80 | 0.619456 |
95de7f2588e260a3254cd84e47f4fd36478f50cd | 1,061 | py | Python | src/intensio_obfuscator/obfuscation_examples/python/intermediate/input/basicRAT-example/core/toolkit.py | bbhunter/Intensio-Obfuscator | f66a22b50c19793edac673cfd7dc319405205c39 | [
"MIT"
] | 553 | 2019-06-08T17:47:41.000Z | 2022-03-29T03:12:11.000Z | intensio/test/python/intermediate/input/basicRAT-example/core/toolkit.py | Chudry/Intensio-Obfuscator | 62f6c8871704693ca79342efb03dcd2530d7614e | [
"MIT"
] | 69 | 2019-06-08T13:25:47.000Z | 2022-02-15T08:34:07.000Z | intensio/test/python/intermediate/input/basicRAT-example/core/toolkit.py | Chudry/Intensio-Obfuscator | 62f6c8871704693ca79342efb03dcd2530d7614e | [
"MIT"
] | 130 | 2019-06-08T18:44:13.000Z | 2022-03-27T01:00:52.000Z | # -*- coding: utf-8 -*-
#---------------------------------------------------------- [Lib] -----------------------------------------------------------#
import datetime
import os
import urllib
import zipfile
#--------------------------------------------------- [Function(s)/Class] ----------------------------------------------------#
def unzip(f):
if os.path.isfile(f):
try:
with zipfile.ZipFile(f) as zf:
zf.extractall('.')
return 'File {} extracted.'.format(f)
except zipfile.BadZipfile:
return 'Error: Failed to unzip file.'
else:
return 'Error: File not found.'
def wget(url):
if not url.startswith('http'):
return 'Error: URL must begin with http:// or https:// .'
fname = url.split('/')[-1]
if not fname:
fname = 'file-'.format(str(datetime.datetime.now()).replace(' ', '-'))
try:
urllib.urlretrieve(url, fname)
except IOError:
return 'Error: Download failed.'
return 'File {} downloaded.'.format(fname)
| 28.675676 | 126 | 0.447691 |
e5cdd72385da843dd82d7a55ede32f2e86c71d49 | 656 | py | Python | cmd/suggestion/random/v1alpha2/main.py | samplise/katib | 08234c3eccd43b11484fd78c9352eafbdc9152d8 | [
"Apache-2.0"
] | 1 | 2020-11-08T17:04:21.000Z | 2020-11-08T17:04:21.000Z | cmd/suggestion/random/v1alpha2/main.py | samplise/katib | 08234c3eccd43b11484fd78c9352eafbdc9152d8 | [
"Apache-2.0"
] | null | null | null | cmd/suggestion/random/v1alpha2/main.py | samplise/katib | 08234c3eccd43b11484fd78c9352eafbdc9152d8 | [
"Apache-2.0"
] | 1 | 2020-08-03T15:42:48.000Z | 2020-08-03T15:42:48.000Z | import grpc
import time
from pkg.apis.manager.v1alpha2.python import api_pb2_grpc
from pkg.suggestion.v1alpha2.random_service import RandomService
from concurrent import futures
_ONE_DAY_IN_SECONDS = 60 * 60 * 24
DEFAULT_PORT = "0.0.0.0:6789"
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
api_pb2_grpc.add_SuggestionServicer_to_server(RandomService(), server)
server.add_insecure_port(DEFAULT_PORT)
print("Listening...")
server.start()
try:
while True:
time.sleep(_ONE_DAY_IN_SECONDS)
except KeyboardInterrupt:
server.stop(0)
if __name__ == "__main__":
serve()
| 27.333333 | 74 | 0.734756 |
e0b44ffa75c375976690b9d37df676899875c15b | 14,948 | py | Python | src/ipyradiant/visualization/cytoscape/interactive.py | pattersoniv/ipyradiant | bcfb88c09cadc732b0accbdd7c07be0342e2b918 | [
"BSD-3-Clause"
] | 5 | 2021-02-12T14:55:49.000Z | 2022-03-21T21:30:54.000Z | src/ipyradiant/visualization/cytoscape/interactive.py | pattersoniv/ipyradiant | bcfb88c09cadc732b0accbdd7c07be0342e2b918 | [
"BSD-3-Clause"
] | 92 | 2020-07-24T20:17:22.000Z | 2022-02-18T13:35:56.000Z | src/ipyradiant/visualization/cytoscape/interactive.py | pattersoniv/ipyradiant | bcfb88c09cadc732b0accbdd7c07be0342e2b918 | [
"BSD-3-Clause"
] | 6 | 2020-07-28T18:28:07.000Z | 2022-01-19T11:39:51.000Z | # Copyright (c) 2021 ipyradiant contributors.
# Distributed under the terms of the Modified BSD License.
import ipywidgets as W
import traitlets as T
from IPython.display import JSON, display
from networkx import Graph as NXGraph
from pandas import DataFrame
from rdflib.graph import Graph as RDFGraph
from rdflib.term import URIRef
from ipyradiant.basic_tools.custom_uri_ref import CustomURIRef
from ipyradiant.rdf2nx.converter import RDF2NX
from ipyradiant.visualization.cytoscape import style
from ipyradiant.visualization.cytoscape.viewer import CytoscapeViewer
# Ranked by pairwise distance (generated by https://mokole.com/palette.html)
COLOR_LIST = [
(47, 79, 79),
(85, 107, 47),
(139, 69, 19),
(34, 139, 34),
(72, 61, 139),
(184, 134, 11),
(70, 130, 180),
(0, 0, 128),
(127, 0, 127),
(143, 188, 143),
(176, 48, 96),
(255, 69, 0),
(255, 255, 0),
(0, 255, 0),
(138, 43, 226),
(0, 255, 127),
(220, 20, 60),
(0, 255, 255),
(0, 0, 255),
(173, 255, 47),
(218, 112, 214),
(255, 127, 80),
(255, 0, 255),
(30, 144, 255),
(144, 238, 144),
(173, 216, 230),
(255, 20, 147),
(123, 104, 238),
(255, 222, 173),
(255, 192, 203),
]
def get_color_list_css(color_list):
"""Convert color list to ipycytoscape css format."""
return [f"rgb({r},{g},{b})" for r, g, b in color_list]
def get_desc(uri, namespaces, count=None):
"""Get a shorthand way to describe a URI and its counts."""
shorthand = str(CustomURIRef(uri, namespaces=namespaces))
if count:
return f"{shorthand} [{count}]"
def get_type_counts(klass, graph: NXGraph) -> DataFrame:
"""Function to return the types and their counts in a networkx graph."""
type_dict = {}
for node, data in graph.nodes(data=True):
# node_type can be a list (make all list)
type_attr = data.get("rdf:type")
if not type_attr:
raise ValueError(f"Node has no 'rdf:type': {data.keys()}")
if not isinstance(type_attr, (list, tuple)):
node_types = [
type_attr,
]
else:
node_types = type_attr
for node_type in node_types:
if node_type not in type_dict:
type_dict[node_type] = 0
type_dict[node_type] = type_dict[node_type] + 1
return DataFrame(type_dict.items(), columns=["type_", "count"]).sort_values(
by=["count"], ascending=False
)
def get_predicate_counts(klass, graph: NXGraph) -> DataFrame:
"""Function to return the predicates and their counts in a networkx graph."""
predicate_dict = {}
for source, target, data in graph.edges(data=True):
predicate = data.get("predicate")
if not predicate:
raise ValueError(f"Edge ({source}, {target}) has no predicate attribute.")
elif predicate and predicate not in predicate_dict:
predicate_dict[predicate] = 0
predicate_dict[predicate] = predicate_dict[predicate] + 1
return DataFrame(
predicate_dict.items(), columns=["predicate", "count"]
).sort_values(by=["count"], ascending=False)
class InteractiveViewer(W.GridspecLayout):
"""Graph visualization for viewing RDF graphs as LPGs. The InteractiveViewer
provides a method for reducing the amount of displayed information through a
multi-select widget for `rdf:type` and all predicates in the graph. Users
can choose which types/edges they want to see, and the visualization will
update the corresponding nodes/edges.
Note: we use a separate RDF2NX converter to avoid issues with larger graphs
TODO document how users can extend the count functions
:param rdf_graph: the rdflib.graph.Graph to display
:param allow_large_graphs: boolean flag to allow graphs over the allowed size
:param type_count_callable: the function used to collect valid rdf:types
:param predicate_count_query: the function used used to collect valid predicates
"""
allow_large_graphs = T.Bool(default_value=False)
rdf_graph = T.Instance(RDFGraph, kw={})
type_selector = T.Instance(W.SelectMultiple)
predicate_selector = T.Instance(W.SelectMultiple)
viewer = T.Instance(CytoscapeViewer)
json_output = T.Instance(W.Output)
type_count_callable = get_type_counts
predicate_count_callable = get_predicate_counts
uri_to_string_type = {} # map
iri_to_node = {} # map
_rdf_converter: RDF2NX = RDF2NX()
_nx_graph: NXGraph = NXGraph()
def __init__(self, n_rows=4, n_columns=5, **kwargs):
super().__init__(n_rows=n_rows, n_columns=n_columns, **kwargs)
def load_json(self, node):
data = node["data"]
data.pop("_label", None)
data.pop("_attrs", None)
with self.json_output:
self.json_output.clear_output()
display(JSON(data))
def _ipython_display_(self, **kwargs):
super()._ipython_display_(**kwargs)
self._set_layout()
def _set_layout(self):
layout = self.layout
layout.height = "80vh"
layout.width = "auto"
self[0:3, :1] = W.VBox(
[
W.VBox(
[
W.Label("Types:"),
self.type_selector,
W.Label("Edges:"),
self.predicate_selector,
]
),
]
)
self[0:3, 1:] = self.viewer
self[3, 1:] = self.json_output
for widget in (
self.type_selector,
self.predicate_selector,
self.viewer,
):
widget.layout.height = "auto"
widget.layout.width = "auto"
widget.layout.min_height = None
widget.layout.max_height = None
widget.layout.max_width = None
widget.layout.min_width = None
self.layout = layout
def assign_css_classes(self):
# assign colors to css classes
color_list = COLOR_LIST.copy()
n_to_add = len(self.uri_to_string_type.keys()) - len(color_list)
if n_to_add > 0 and self.allow_large_graphs:
color_list.extend([(255, 255, 255)] * n_to_add)
elif n_to_add > 0:
raise ValueError(
f"Cannot render more than {len(COLOR_LIST)} visually distinct colors."
)
color_type_map = list(
zip(
[*self.uri_to_string_type.values(), "multi-type"],
get_color_list_css(color_list),
)
)
# use css data attribute style to color based on type
color_classes = []
for class_name, rgb_code in color_type_map:
color_classes.append(
{
"selector": f"node[type_ = '{class_name}']",
"style": {
"background-color": f"{rgb_code}",
},
}
)
return color_classes
def update_iri_map(self):
"""Updates the internal mapping from IRI to cytoscape node instances."""
# node_iri to node (for mapping to edges)
# TODO is there a way to enhance/use the adjacency matrix?
self.iri_to_node = {
str(node.data["iri"]): node
for node in self.viewer.cytoscape_widget.graph.nodes
}
def update_classes(self, change):
"""Updates the CSS classes for nodes/edges.
TODO optimize so that we don't have to iterate through every node/edge
"""
self.update_iri_map()
# use selectors to determine visible nodes/edges
visible_node_types = set(self.type_selector.value)
visible_edge_types = set(self.predicate_selector.value)
# set visibility for all nodes (only needed for node changes)
try:
change_type = getattr(change.owner, "type_", "both")
except AttributeError:
change_type = "both"
if change_type in {"node_type", "both"}:
for node in self.viewer.cytoscape_widget.graph.nodes:
raw_types = node.data["rdf:type"]
types = raw_types if type(raw_types) is tuple else (raw_types,)
if not any([_type in visible_node_types for _type in types]):
node.classes = "invisible"
else:
node.classes = ""
# set visibility for all edges (needed for node and edge changes)
for edge in self.viewer.cytoscape_widget.graph.edges:
source_node = self.iri_to_node[edge.data["source"]]
target_node = self.iri_to_node[edge.data["target"]]
if edge.data["predicate"] not in visible_edge_types:
edge.classes = "invisible"
elif (
"invisible" in source_node.classes or "invisible" in target_node.classes
):
edge.classes = "invisible"
else:
edge.classes = "directed"
# update front-end (set_style must receive a copy)
self.viewer.cytoscape_widget.set_style(
list(self.viewer.cytoscape_widget.get_style())
)
def apply_node_styling(self, change):
"""Iterates through cytoscape nodes and sets the node data 'type_'
based on the 'rdf:type'.
"""
self.update_classes(change=None)
# assign CSS classes to nodes based on their rdf:type
# TODO add types instead of replacing once we figure out how to make partial matches of css classes in ipycytoscape
for node in self.viewer.cytoscape_widget.graph.nodes:
node_types = node.data.get("rdf:type", [])
if type(node_types) == URIRef:
node_types = (node_types,)
if len(node_types) == 1:
# assign specific class to node
assert node_types[0] in self.uri_to_string_type
css_class = self.uri_to_string_type[node_types[0]]
node.data["type_"] = css_class
else:
node.data["type_"] = "multi-type"
@T.default("json_output")
def _make_default_json_output(self):
widget = W.Output()
# Prevent resizing the JSON output from changing other widgets
widget.layout.overflow_y = "auto"
widget.layout.width = "auto"
return widget
@T.default("viewer")
def _make_default_viewer(self):
widget = CytoscapeViewer()
widget.allow_disconnected = True
# Change networkx graph label (because we are converting from RDF)
widget._nx_label = "rdfs:label"
widget._render_large_graphs = self.allow_large_graphs
widget.cytoscape_widget.on("node", "click", self.load_json)
widget.allow_disc_check.disabled = True
# When the cytoscape widget is updated, have to re-apply node/edge style
widget.observe(self.apply_node_styling, "cytoscape_widget")
return widget
@T.default("type_selector")
def _make_default_type_selector(self):
widget = W.SelectMultiple()
# set a type for the observer to read
widget.type_ = "node_type"
widget.observe(self.update_classes, "value")
return widget
@T.default("predicate_selector")
def _make_default_predicate_selector(self):
widget = W.SelectMultiple()
# set a type for the observer to read
widget.type_ = "predicate"
widget.observe(self.update_classes, "value")
return widget
@T.validate("children")
def validate_children(self, proposal):
"""
Validate method for default children.
This is necessary because @trt.default does not work on children.
"""
children = proposal.value
if not children:
children = (
W.HBox(
[
W.VBox(
[
W.Label("Types:"),
self.type_selector,
W.Label("Edges:"),
self.predicate_selector,
]
),
self.viewer,
]
),
self.json_output,
)
return children
@T.observe("rdf_graph")
def update_rdf_graph(self, change):
"""When the graph is changed, updates the corresponding widget elements.
TODO break up method into smaller parts?
TODO is it possible to only add types/predicates for the converted graph (e.g. post RDF2NX)
"""
rdf_graph = change.new
# Run the converter (let viewer do the post-processing)
self._nx_graph = self._rdf_converter.convert(rdf_graph)
# force update now TODO remove?
self.viewer.graph = self._nx_graph
# needed for type/predicate count methods
self.update_iri_map()
# run type and predicate counter functions
type_count = self.type_count_callable(self._nx_graph)
predicate_count = self.predicate_count_callable(self._nx_graph)
# map type URIs to their css class name for ipycytoscape
self.uri_to_string_type = {
uri: str(CustomURIRef(uri, namespaces=rdf_graph.namespace_manager)).replace(
":", "-"
)
for uri in type_count.type_
}
self.uri_to_string_type["multi-type"] = "multi-type"
# build options for the type MultiSelect
select_options = []
for uri, count in type_count.values:
description = get_desc(uri, rdf_graph.namespace_manager, count)
select_options.append((description, uri))
# set options, value, and row counts
self.type_selector.options = select_options
self.type_selector.value = tuple(uri for _, uri in select_options)
self.type_selector.rows = len(select_options) + 1
# build options for the predicate MultiSelect
select_options = []
for uri, count in predicate_count.values:
description = get_desc(uri, rdf_graph.namespace_manager, count)
select_options.append((description, uri))
# set options, value, and row counts
self.predicate_selector.options = select_options
self.predicate_selector.value = tuple(uri for _, uri in select_options)
self.predicate_selector.rows = len(select_options) + 1
color_classes = self.assign_css_classes()
self.apply_node_styling(change=None)
# change the cytoscape widget style
old_style = list(self.viewer.cytoscape_widget.get_style()) # must be a copy!
old_style.extend([*color_classes, style.INVISIBLE_NODE, style.INVISIBLE_EDGE])
self.viewer.cytoscape_widget.set_style(old_style)
| 35.760766 | 123 | 0.603559 |
672e3ab8a1a6fee1351eaceaaba2bb202e67a285 | 910 | py | Python | backend/clubs/migrations/0065_auto_20201218_1830.py | pennlabs/penn-clubs | 6165e56ee5745295adc14fe114c4973173c2cb43 | [
"MIT"
] | 23 | 2020-01-15T20:11:06.000Z | 2022-01-01T12:47:50.000Z | backend/clubs/migrations/0065_auto_20201218_1830.py | pennlabs/penn-clubs | 6165e56ee5745295adc14fe114c4973173c2cb43 | [
"MIT"
] | 397 | 2020-01-17T03:42:30.000Z | 2022-03-07T23:37:16.000Z | backend/clubs/migrations/0065_auto_20201218_1830.py | pennlabs/penn-clubs | 6165e56ee5745295adc14fe114c4973173c2cb43 | [
"MIT"
] | 7 | 2020-01-29T05:11:38.000Z | 2022-01-03T19:41:59.000Z | # Generated by Django 3.1.4 on 2020-12-18 23:30
import uuid
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("clubs", "0064_auto_20201215_1904"),
]
operations = [
migrations.AddField(
model_name="club",
name="ics_import_url",
field=models.URLField(blank=True, null=True),
),
migrations.AddField(
model_name="event",
name="ics_uuid",
field=models.UUIDField(default=uuid.uuid4),
),
migrations.AddField(
model_name="event",
name="is_ics_event",
field=models.BooleanField(blank=True, default=False),
),
migrations.AddField(
model_name="historicalclub",
name="ics_import_url",
field=models.URLField(blank=True, null=True),
),
]
| 25.277778 | 65 | 0.568132 |
fd5f45ce54dbf36e6050a421f773bb06e628f2b4 | 10,721 | py | Python | packages/python/plotly/plotly/tests/test_optional/test_utils/test_utils.py | miriad/plotly.py | f083bea25691ff64a30008f46f77fc1edc11ad63 | [
"MIT"
] | 1 | 2020-08-08T21:56:11.000Z | 2020-08-08T21:56:11.000Z | packages/python/plotly/plotly/tests/test_optional/test_utils/test_utils.py | pythonthings/plotly.py | b770468d392bf89cbfc630e2c02bd54ec5db97a7 | [
"MIT"
] | null | null | null | packages/python/plotly/plotly/tests/test_optional/test_utils/test_utils.py | pythonthings/plotly.py | b770468d392bf89cbfc630e2c02bd54ec5db97a7 | [
"MIT"
] | null | null | null | """
Module to test plotly.utils with optional dependencies.
"""
from __future__ import absolute_import
import datetime
import math
import decimal
from datetime import datetime as dt
from unittest import TestCase
import numpy as np
import pandas as pd
import pytz
from nose.plugins.attrib import attr
from pandas.util.testing import assert_series_equal
import json as _json
from plotly import optional_imports, utils
from plotly.graph_objs import Scatter, Scatter3d, Figure, Data
matplotlylib = optional_imports.get_module("plotly.matplotlylib")
if matplotlylib:
import matplotlib.pyplot as plt
from plotly.matplotlylib import Exporter, PlotlyRenderer
## JSON encoding
numeric_list = [1, 2, 3]
np_list = np.array([1, 2, 3, np.NaN, np.NAN, np.Inf, dt(2014, 1, 5)])
mixed_list = [
1,
"A",
dt(2014, 1, 5),
dt(2014, 1, 5, 1, 1, 1),
dt(2014, 1, 5, 1, 1, 1, 1),
]
dt_list = [dt(2014, 1, 5), dt(2014, 1, 5, 1, 1, 1), dt(2014, 1, 5, 1, 1, 1, 1)]
df = pd.DataFrame(
columns=["col 1"], data=[1, 2, 3, dt(2014, 1, 5), pd.NaT, np.NaN, np.Inf]
)
rng = pd.date_range("1/1/2011", periods=2, freq="H")
ts = pd.Series([1.5, 2.5], index=rng)
class TestJSONEncoder(TestCase):
def test_encode_as_plotly(self):
# should *fail* when object doesn't have `to_plotly_json` attribute
objs_without_attr = [1, "one", set(["a", "set"]), {"a": "dict"}, ["a", "list"]]
for obj in objs_without_attr:
self.assertRaises(
utils.NotEncodable, utils.PlotlyJSONEncoder.encode_as_plotly, obj
)
# should return without exception when obj has `to_plotly_josn` attr
expected_res = "wedidit"
class ObjWithAttr(object):
def to_plotly_json(self):
return expected_res
res = utils.PlotlyJSONEncoder.encode_as_plotly(ObjWithAttr())
self.assertEqual(res, expected_res)
def test_encode_as_list(self):
# should *fail* when object doesn't have `tolist` method
objs_without_attr = [1, "one", set(["a", "set"]), {"a": "dict"}, ["a", "list"]]
for obj in objs_without_attr:
self.assertRaises(
utils.NotEncodable, utils.PlotlyJSONEncoder.encode_as_list, obj
)
# should return without exception when obj has `tolist` attr
expected_res = ["some", "list"]
class ObjWithAttr(object):
def tolist(self):
return expected_res
res = utils.PlotlyJSONEncoder.encode_as_list(ObjWithAttr())
self.assertEqual(res, expected_res)
def test_encode_as_pandas(self):
# should *fail* on things that are not specific pandas objects
not_pandas = ["giraffe", 6, float("nan"), ["a", "list"]]
for obj in not_pandas:
self.assertRaises(
utils.NotEncodable, utils.PlotlyJSONEncoder.encode_as_pandas, obj
)
# should succeed when we've got specific pandas thingies
res = utils.PlotlyJSONEncoder.encode_as_pandas(pd.NaT)
self.assertTrue(res is None)
def test_encode_as_numpy(self):
# should *fail* on non-numpy-y things
not_numpy = ["hippo", 8, float("nan"), {"a": "dict"}]
for obj in not_numpy:
self.assertRaises(
utils.NotEncodable, utils.PlotlyJSONEncoder.encode_as_numpy, obj
)
# should succeed with numpy-y-thingies
res = utils.PlotlyJSONEncoder.encode_as_numpy(np.ma.core.masked)
self.assertTrue(math.isnan(res))
def test_encode_as_datetime(self):
# should succeed with 'utcoffset', 'isoformat' and '__sub__' attrs
res = utils.PlotlyJSONEncoder.encode_as_datetime(datetime.datetime(2013, 10, 1))
self.assertEqual(res, "2013-10-01T00:00:00")
def test_encode_as_datetime_with_microsecond(self):
# should not include extraneous microsecond info if DNE
res = utils.PlotlyJSONEncoder.encode_as_datetime(
datetime.datetime(2013, 10, 1, microsecond=0)
)
self.assertEqual(res, "2013-10-01T00:00:00")
# should include microsecond info if present
res = utils.PlotlyJSONEncoder.encode_as_datetime(
datetime.datetime(2013, 10, 1, microsecond=10)
)
self.assertEqual(res, "2013-10-01T00:00:00.000010")
def test_encode_as_datetime_with_localized_tz(self):
# should convert tzinfo to utc. Note that in october, we're in EDT!
# therefore the 4 hour difference is correct.
naive_datetime = datetime.datetime(2013, 10, 1)
aware_datetime = pytz.timezone("US/Eastern").localize(naive_datetime)
res = utils.PlotlyJSONEncoder.encode_as_datetime(aware_datetime)
self.assertEqual(res, "2013-10-01T00:00:00-04:00")
def test_encode_as_date(self):
# should *fail* without 'utcoffset' and 'isoformat' and '__sub__' attrs
non_datetimes = ["noon", 56, "00:00:00"]
for obj in non_datetimes:
self.assertRaises(
utils.NotEncodable, utils.PlotlyJSONEncoder.encode_as_date, obj
)
# should work with a date
a_date = datetime.date(2013, 10, 1)
res = utils.PlotlyJSONEncoder.encode_as_date(a_date)
self.assertEqual(res, "2013-10-01")
# should also work with a date time without a utc offset!
res = utils.PlotlyJSONEncoder.encode_as_date(
datetime.datetime(2013, 10, 1, microsecond=10)
)
self.assertEqual(res, "2013-10-01 00:00:00.000010")
def test_encode_as_decimal(self):
# should work with decimal values
res = utils.PlotlyJSONEncoder.encode_as_decimal(decimal.Decimal(1.023452))
self.assertAlmostEqual(res, 1.023452) # Checks upto 7 decimal places
self.assertIsInstance(res, float)
def test_figure_json_encoding(self):
df = pd.DataFrame(columns=["col 1"], data=[1, 2, 3])
s1 = Scatter3d(x=numeric_list, y=np_list, z=mixed_list)
s2 = Scatter(x=df["col 1"])
data = Data([s1, s2])
figure = Figure(data=data)
js1 = _json.dumps(s1, cls=utils.PlotlyJSONEncoder, sort_keys=True)
js2 = _json.dumps(s2, cls=utils.PlotlyJSONEncoder, sort_keys=True)
assert (
js1 == '{"type": "scatter3d", "x": [1, 2, 3], '
'"y": [1, 2, 3, null, null, null, "2014-01-05T00:00:00"], '
'"z": [1, "A", "2014-01-05T00:00:00", '
'"2014-01-05T01:01:01", "2014-01-05T01:01:01.000001"]}'
)
assert js2 == '{"type": "scatter", "x": [1, 2, 3]}'
# Test JSON encoding works
_json.dumps(data, cls=utils.PlotlyJSONEncoder, sort_keys=True)
_json.dumps(figure, cls=utils.PlotlyJSONEncoder, sort_keys=True)
# Test data wasn't mutated
np_array = np.array([1, 2, 3, np.NaN, np.NAN, np.Inf, dt(2014, 1, 5)])
for k in range(len(np_array)):
if k in [3, 4]:
# check NaN
assert np.isnan(np_list[k]) and np.isnan(np_array[k])
else:
# non-NaN
assert np_list[k] == np_array[k]
assert set(data[0]["z"]) == set(
[
1,
"A",
dt(2014, 1, 5),
dt(2014, 1, 5, 1, 1, 1),
dt(2014, 1, 5, 1, 1, 1, 1),
]
)
def test_datetime_json_encoding(self):
j1 = _json.dumps(dt_list, cls=utils.PlotlyJSONEncoder)
assert (
j1 == '["2014-01-05T00:00:00", '
'"2014-01-05T01:01:01", '
'"2014-01-05T01:01:01.000001"]'
)
j2 = _json.dumps({"x": dt_list}, cls=utils.PlotlyJSONEncoder)
assert (
j2 == '{"x": ["2014-01-05T00:00:00", '
'"2014-01-05T01:01:01", '
'"2014-01-05T01:01:01.000001"]}'
)
def test_pandas_json_encoding(self):
j1 = _json.dumps(df["col 1"], cls=utils.PlotlyJSONEncoder)
print (j1)
print ("\n")
assert j1 == '[1, 2, 3, "2014-01-05T00:00:00", null, null, null]'
# Test that data wasn't mutated
assert_series_equal(
df["col 1"],
pd.Series([1, 2, 3, dt(2014, 1, 5), pd.NaT, np.NaN, np.Inf], name="col 1"),
)
j2 = _json.dumps(df.index, cls=utils.PlotlyJSONEncoder)
assert j2 == "[0, 1, 2, 3, 4, 5, 6]"
nat = [pd.NaT]
j3 = _json.dumps(nat, cls=utils.PlotlyJSONEncoder)
assert j3 == "[null]"
assert nat[0] is pd.NaT
j4 = _json.dumps(rng, cls=utils.PlotlyJSONEncoder)
assert j4 == '["2011-01-01T00:00:00", "2011-01-01T01:00:00"]'
j5 = _json.dumps(ts, cls=utils.PlotlyJSONEncoder)
assert j5 == "[1.5, 2.5]"
assert_series_equal(ts, pd.Series([1.5, 2.5], index=rng))
j6 = _json.dumps(ts.index, cls=utils.PlotlyJSONEncoder)
assert j6 == '["2011-01-01T00:00:00", "2011-01-01T01:00:00"]'
def test_numpy_masked_json_encoding(self):
l = [1, 2, np.ma.core.masked]
j1 = _json.dumps(l, cls=utils.PlotlyJSONEncoder)
print (j1)
assert j1 == "[1, 2, null]"
def test_numpy_dates(self):
a = np.arange(np.datetime64("2011-07-11"), np.datetime64("2011-07-18"))
j1 = _json.dumps(a, cls=utils.PlotlyJSONEncoder)
assert (
j1 == '["2011-07-11", "2011-07-12", "2011-07-13", '
'"2011-07-14", "2011-07-15", "2011-07-16", '
'"2011-07-17"]'
)
def test_datetime_dot_date(self):
a = [datetime.date(2014, 1, 1), datetime.date(2014, 1, 2)]
j1 = _json.dumps(a, cls=utils.PlotlyJSONEncoder)
assert j1 == '["2014-01-01", "2014-01-02"]'
if matplotlylib:
@attr("matplotlib")
def test_masked_constants_example():
# example from: https://gist.github.com/tschaume/d123d56bf586276adb98
data = {
"esN": [0, 1, 2, 3],
"ewe_is0": [-398.11901997, -398.11902774, -398.11897111, -398.11882215],
"ewe_is1": [-398.11793027, -398.11792966, -398.11786308, None],
"ewe_is2": [-398.11397008, -398.11396421, None, None],
}
df = pd.DataFrame.from_dict(data)
plotopts = {"x": "esN"}
fig, ax = plt.subplots(1, 1)
df.plot(ax=ax, **plotopts)
renderer = PlotlyRenderer()
Exporter(renderer).run(fig)
_json.dumps(renderer.plotly_fig, cls=utils.PlotlyJSONEncoder)
jy = _json.dumps(
renderer.plotly_fig["data"][1]["y"], cls=utils.PlotlyJSONEncoder
)
print (jy)
array = _json.loads(jy)
assert array == [-398.11793027, -398.11792966, -398.11786308, None]
| 35.035948 | 88 | 0.594348 |
c051411636fc1556fa881099adaa7e199ce80376 | 525 | py | Python | src/action/migrations/0016_auto_20180522_2146.py | japesone/ontask_b | 17af441f9893c521d2e14011e7790ba4077e3318 | [
"MIT"
] | 3 | 2018-08-24T10:48:40.000Z | 2020-05-29T06:33:23.000Z | src/action/migrations/0016_auto_20180522_2146.py | japesone/ontask_b | 17af441f9893c521d2e14011e7790ba4077e3318 | [
"MIT"
] | null | null | null | src/action/migrations/0016_auto_20180522_2146.py | japesone/ontask_b | 17af441f9893c521d2e14011e7790ba4077e3318 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-05-22 12:16
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('action', '0015_condition_n_rows_selected_update'),
]
operations = [
migrations.AlterModelOptions(
name='condition',
options={},
),
migrations.AlterUniqueTogether(
name='condition',
unique_together=set([]),
),
]
| 21.875 | 60 | 0.6 |
d4b0359d949225a3d7c1aa0294cc6d5e19dc1fa7 | 10,718 | py | Python | tests/contrib/utils/base_gcp_system_test_case.py | FlyrInc/airflow-1 | 74b22337b45a1eb25585d52e35694e6b0eb81f03 | [
"Apache-2.0"
] | 2 | 2019-01-26T06:04:11.000Z | 2019-01-26T12:54:21.000Z | tests/contrib/utils/base_gcp_system_test_case.py | FlyrInc/airflow-1 | 74b22337b45a1eb25585d52e35694e6b0eb81f03 | [
"Apache-2.0"
] | 1 | 2019-02-19T01:15:12.000Z | 2019-02-19T01:15:12.000Z | tests/contrib/utils/base_gcp_system_test_case.py | FlyrInc/airflow-1 | 74b22337b45a1eb25585d52e35694e6b0eb81f03 | [
"Apache-2.0"
] | 2 | 2020-04-24T10:51:17.000Z | 2020-05-26T01:50:29.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import subprocess
import unittest
from glob import glob
from shutil import move
from tempfile import mkdtemp
from airflow.utils import db as db_utils
from airflow import models, AirflowException, LoggingMixin
from airflow.utils.timezone import datetime
from tests.contrib.utils.gcp_authenticator import GcpAuthenticator
from tests.contrib.utils.run_once_decorator import run_once
AIRFLOW_MAIN_FOLDER = os.path.realpath(os.path.join(
os.path.dirname(os.path.realpath(__file__)),
os.pardir, os.pardir, os.pardir))
AIRFLOW_PARENT_FOLDER = os.path.realpath(os.path.join(AIRFLOW_MAIN_FOLDER,
os.pardir, os.pardir, os.pardir))
ENV_FILE_RETRIEVER = os.path.join(AIRFLOW_PARENT_FOLDER,
"get_system_test_environment_variables.py")
# Retrieve environment variables from parent directory retriever - it should be
# in the path ${AIRFLOW_SOURCE_DIR}/../../get_system_test_environment_variables.py
# and it should print all the variables in form of key=value to the stdout
class RetrieveVariables:
@staticmethod
@run_once
def retrieve_variables():
if os.path.isfile(ENV_FILE_RETRIEVER):
if os.environ.get('AIRFLOW__CORE__UNIT_TEST_MODE'):
raise Exception("Please unset the AIRFLOW__CORE__UNIT_TEST_MODE")
variables = subprocess.check_output([ENV_FILE_RETRIEVER]).decode("utf-8")
print("Applying variables retrieved")
for line in variables.split("\n"):
try:
variable, key = line.split("=")
except ValueError:
continue
print("{}={}".format(variable, key))
os.environ[variable] = key
RetrieveVariables.retrieve_variables()
DEFAULT_DATE = datetime(2015, 1, 1)
CONTRIB_OPERATORS_EXAMPLES_DAG_FOLDER = os.path.join(
AIRFLOW_MAIN_FOLDER, "airflow", "contrib", "example_dags")
OPERATORS_EXAMPLES_DAG_FOLDER = os.path.join(
AIRFLOW_MAIN_FOLDER, "airflow", "example_dags")
AIRFLOW_HOME = os.environ.get('AIRFLOW_HOME',
os.path.join(os.path.expanduser('~'), 'airflow'))
DAG_FOLDER = os.path.join(AIRFLOW_HOME, "dags")
SKIP_TEST_WARNING = """
The test is only run when the test is run in with GCP-system-tests enabled
environment. You can enable it in one of two ways:
* Set GCP_CONFIG_DIR environment variable to point to the GCP configuration
directory which keeps variables.env file with environment variables to set
and keys directory which keeps service account keys in .json format
* Run this test within automated environment variable workspace where
config directory is checked out next to the airflow one.
""".format(__file__)
class BaseGcpSystemTestCase(unittest.TestCase, LoggingMixin):
def __init__(self,
method_name,
gcp_key,
project_extra=None):
super(BaseGcpSystemTestCase, self).__init__(methodName=method_name)
self.gcp_authenticator = GcpAuthenticator(gcp_key=gcp_key,
project_extra=project_extra)
self.setup_called = False
@staticmethod
def skip_check(key_name):
return GcpAuthenticator(key_name).full_key_path is None
def setUp(self):
self.gcp_authenticator.gcp_store_authentication()
self.gcp_authenticator.gcp_authenticate()
# We checked that authentication works. Ne we revoke it to make
# sure we are not relying on the default authentication
self.gcp_authenticator.gcp_revoke_authentication()
self.setup_called = True
# noinspection PyPep8Naming
def tearDown(self):
self.gcp_authenticator.gcp_restore_authentication()
class DagGcpSystemTestCase(BaseGcpSystemTestCase):
def __init__(self,
method_name,
dag_id,
gcp_key,
dag_name=None,
require_local_executor=False,
example_dags_folder=CONTRIB_OPERATORS_EXAMPLES_DAG_FOLDER,
project_extra=None):
super(DagGcpSystemTestCase, self).__init__(method_name=method_name,
gcp_key=gcp_key,
project_extra=project_extra)
self.dag_id = dag_id
self.dag_name = self.dag_id + '.py' if not dag_name else dag_name
self.example_dags_folder = example_dags_folder
self.require_local_executor = require_local_executor
self.temp_dir = None
@staticmethod
def _get_dag_folder():
return DAG_FOLDER
@staticmethod
def _get_files_to_link(path):
"""
Returns all file names (note - file names not paths)
that have the same base name as the .py dag file (for example dag_name.sql etc.)
:param path: path to the dag file.
:return: list of files matching the base name
"""
prefix, ext = os.path.splitext(path)
assert ext == '.py', "Dag name should be a .py file and is {} file".format(ext)
files_to_link = []
for file in glob(prefix + ".*"):
files_to_link.append(os.path.basename(file))
return files_to_link
def _symlink_dag_and_associated_files(self, remove=False):
target_folder = self._get_dag_folder()
source_path = os.path.join(self.example_dags_folder, self.dag_name)
for file_name in self._get_files_to_link(source_path):
source_path = os.path.join(self.example_dags_folder, file_name)
target_path = os.path.join(target_folder, file_name)
if remove:
try:
self.log.info("Remove symlink: {} -> {} ".format(
target_path, source_path))
os.remove(target_path)
except OSError:
pass
else:
if not os.path.exists(target_path):
self.log.info("Symlink: {} -> {} ".format(target_path, source_path))
os.symlink(source_path, target_path)
else:
self.log.info("Symlink {} already exists. Not symlinking it.".
format(target_path))
def _store_dags_to_temporary_directory(self):
dag_folder = self._get_dag_folder()
self.temp_dir = mkdtemp()
self.log.info("Storing DAGS from {} to temporary directory {}".
format(dag_folder, self.temp_dir))
try:
os.mkdir(dag_folder)
except OSError:
pass
for file in os.listdir(dag_folder):
move(os.path.join(dag_folder, file), os.path.join(self.temp_dir, file))
def _restore_dags_from_temporary_directory(self):
dag_folder = self._get_dag_folder()
self.log.info("Restoring DAGS to {} from temporary directory {}"
.format(dag_folder, self.temp_dir))
for file in os.listdir(self.temp_dir):
move(os.path.join(self.temp_dir, file), os.path.join(dag_folder, file))
def _run_dag(self):
self.log.info("Attempting to run DAG: {}".format(self.dag_id))
if not self.setup_called:
raise AirflowException("Please make sure to call super.setUp() in your "
"test class!")
dag_folder = self._get_dag_folder()
dag_bag = models.DagBag(dag_folder=dag_folder, include_examples=False)
self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
dag = dag_bag.get_dag(self.dag_id)
if dag is None:
raise AirflowException(
"The Dag {} could not be found. It's either an import problem or "
"the dag {} was not symlinked to the DAGs folder. "
"The content of the {} folder is {}".
format(self.dag_id,
self.dag_id + ".py",
dag_folder,
os.listdir(dag_folder)))
dag.clear(reset_dag_runs=True)
dag.run(ignore_first_depends_on_past=True, verbose=True)
@staticmethod
def _check_local_executor_setup():
postgres_path = os.path.realpath(os.path.join(
AIRFLOW_MAIN_FOLDER,
"tests", "contrib", "operators", "postgres_local_executor.cfg"))
if postgres_path != os.environ.get('AIRFLOW_CONFIG'):
raise AirflowException(
"""
Please set AIRFLOW_CONFIG variable to '{}'
and make sure you have a Postgres server running locally and
airflow/airflow.db database created.
You can create the database via these commands:
'createuser root'
'createdb airflow/airflow.db`
""".format(postgres_path))
# noinspection PyPep8Naming
def setUp(self):
if self.require_local_executor:
self._check_local_executor_setup()
try:
# We want to avoid random errors while database got reset - those
# Are apparently triggered by parser trying to parse DAGs while
# The tables are dropped. We move the dags temporarily out of the dags folder
# and move them back after reset
self._store_dags_to_temporary_directory()
try:
db_utils.upgradedb()
db_utils.resetdb()
finally:
self._restore_dags_from_temporary_directory()
self._symlink_dag_and_associated_files()
super(DagGcpSystemTestCase, self).setUp()
except Exception as e:
# In case of any error during setup - restore the authentication
self.gcp_authenticator.gcp_restore_authentication()
raise e
def tearDown(self):
self._symlink_dag_and_associated_files(remove=True)
super(DagGcpSystemTestCase, self).tearDown()
| 41.065134 | 89 | 0.64387 |
c7fa4e3ad3ae8075ddf7bf92b09c6d1045af716f | 1,242 | py | Python | xldlib/definitions/modules.py | Alexhuszagh/XLDiscoverer | 60937b1f7f2e23af4219eb26519d6b83fb4232d6 | [
"Apache-2.0",
"MIT"
] | null | null | null | xldlib/definitions/modules.py | Alexhuszagh/XLDiscoverer | 60937b1f7f2e23af4219eb26519d6b83fb4232d6 | [
"Apache-2.0",
"MIT"
] | null | null | null | xldlib/definitions/modules.py | Alexhuszagh/XLDiscoverer | 60937b1f7f2e23af4219eb26519d6b83fb4232d6 | [
"Apache-2.0",
"MIT"
] | null | null | null | '''
Definitions/modules
___________________
Simplifies imports between Python2 and 3 by specifying module
imports for specific types.
:copyright: (c) 2015 The Regents of the University of California.
:license: GNU GPL, see licenses/GNU GPLv3.txt for more details.
'''
# load modules/submodules
import six
try:
# re2 has superior performance generally speaking to re, however,
# this is somewhat limited in a few cases
# Performance tests:
# time python -c "import re2; re2.match(\"^(a?){25}(a){25}$\", 'a'*25)"
# 0m0.010s
# time python -c "import re; re.match(\"^(a?){25}(a){25}$\", 'a'*25)"
# 0m10.206s
# Or, re2 is 1000x faster due to the exponential growth in Python SREs
# The DFAs in re2 have a linear or polynomial expansion, making them
# much faster.
import re2 as re
# set this for proper pattern checking, to test if compiled
re._pattern_type = re.Pattern
except (NameError, ImportError):
# basestring isn't properly defined on re2
import re
# load objects/functions
if six.PY2:
import cPickle as pickle
import httplib
else:
import pickle
import http.client as httplib
__all__ = [
're',
'pickle',
'httplib'
]
| 24.352941 | 75 | 0.668277 |
3c192923f719474b74af0e068cf308915dd6ba8e | 13,737 | py | Python | tensorflow_probability/python/distributions/pareto_test.py | mjul/tensorflow-probability | c733f06bccceb983f3e9db8e6e3c98b3bd4d23c9 | [
"Apache-2.0"
] | 1 | 2020-04-13T12:31:12.000Z | 2020-04-13T12:31:12.000Z | tensorflow_probability/python/distributions/pareto_test.py | mjul/tensorflow-probability | c733f06bccceb983f3e9db8e6e3c98b3bd4d23c9 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/distributions/pareto_test.py | mjul/tensorflow-probability | c733f06bccceb983f3e9db8e6e3c98b3bd4d23c9 | [
"Apache-2.0"
] | 1 | 2020-06-04T23:26:31.000Z | 2020-06-04T23:26:31.000Z | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from scipy import stats
import tensorflow.compat.v1 as tf1
import tensorflow.compat.v2 as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import test_util
tfd = tfp.distributions
@test_util.test_all_tf_execution_regimes
class ParetoTest(test_util.TestCase):
def _scipy_pareto(self, concentration, scale):
# In scipy pareto is defined with scale = 1, so we need to scale.
return stats.pareto(concentration, scale=scale)
def testParetoShape(self):
scale = tf.constant([2.] * 5)
concentration = tf.constant([2.] * 5)
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(self.evaluate(pareto.batch_shape_tensor()), (5,))
self.assertEqual(pareto.batch_shape, tf.TensorShape([5]))
self.assertAllEqual(self.evaluate(pareto.event_shape_tensor()), [])
self.assertEqual(pareto.event_shape, tf.TensorShape([]))
def testParetoShapeBroadcast(self):
scale = tf.constant([[3., 2.]])
concentration = tf.constant([[4.], [5.], [6.]])
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertAllEqual(self.evaluate(pareto.batch_shape_tensor()), (3, 2))
self.assertAllEqual(pareto.batch_shape, tf.TensorShape([3, 2]))
self.assertAllEqual(self.evaluate(pareto.event_shape_tensor()), [])
self.assertEqual(pareto.event_shape, tf.TensorShape([]))
def testInvalidScale(self):
invalid_scales = [-.01, 0., -2.]
concentration = 3.
for scale in invalid_scales:
with self.assertRaisesOpError('`scale` must be positive'):
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.evaluate(pareto.mean())
def testInvalidConcentration(self):
scale = 1.
invalid_concentrations = [-.01, 0., -2.]
for concentration in invalid_concentrations:
with self.assertRaisesOpError('`concentration` must be positive'):
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.evaluate(pareto.mean())
def testParetoLogPdf(self):
batch_size = 6
scale = tf.constant([3.] * batch_size)
scale_v = 3.
concentration = tf.constant([2.])
concentration_v = 2.
x = [3., 3.1, 4., 5., 6., 7.]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
log_prob = pareto.log_prob(x)
self.assertEqual(log_prob.shape, (6,))
self.assertAllClose(
self.evaluate(log_prob),
self._scipy_pareto(concentration_v, scale_v).logpdf(x))
pdf = pareto.prob(x)
self.assertEqual(pdf.shape, (6,))
self.assertAllClose(
self.evaluate(pdf),
self._scipy_pareto(concentration_v, scale_v).pdf(x))
def testParetoLogPdfValidateArgs(self):
batch_size = 3
scale = tf.constant([2., 3., 4.])
concentration = tf.constant([2.] * batch_size)
pareto = tfd.Pareto(concentration, scale, validate_args=True)
with self.assertRaisesOpError('not in the support'):
x = tf1.placeholder_with_default([2., 3., 3.], shape=[3])
log_prob = pareto.log_prob(x)
self.evaluate(log_prob)
with self.assertRaisesOpError('not in the support'):
x = tf1.placeholder_with_default([2., 2., 5.], shape=[3])
log_prob = pareto.log_prob(x)
self.evaluate(log_prob)
with self.assertRaisesOpError('not in the support'):
x = tf1.placeholder_with_default([1., 3., 5.], shape=[3])
log_prob = pareto.log_prob(x)
self.evaluate(log_prob)
def testParetoLogPdfMultidimensional(self):
batch_size = 6
scale = tf.constant([[2., 4., 5.]] * batch_size)
scale_v = [2., 4., 5.]
concentration = tf.constant([[1.]] * batch_size)
concentration_v = 1.
x = np.array([[6., 7., 9.2, 5., 6., 7.]], dtype=np.float32).T
pareto = tfd.Pareto(concentration, scale, validate_args=True)
log_prob = pareto.log_prob(x)
self.assertEqual(log_prob.shape, (6, 3))
self.assertAllClose(
self.evaluate(log_prob),
self._scipy_pareto(concentration_v, scale_v).logpdf(x))
prob = pareto.prob(x)
self.assertEqual(prob.shape, (6, 3))
self.assertAllClose(
self.evaluate(prob),
self._scipy_pareto(concentration_v, scale_v).pdf(x))
def testParetoLogCdf(self):
batch_size = 6
scale = tf.constant([3.] * batch_size)
scale_v = 3.
concentration = tf.constant([2.])
concentration_v = 2.
x = [3., 3.1, 4., 5., 6., 7.]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
log_cdf = pareto.log_cdf(x)
self.assertEqual(log_cdf.shape, (6,))
self.assertAllClose(
self.evaluate(log_cdf),
self._scipy_pareto(concentration_v, scale_v).logcdf(x))
cdf = pareto.cdf(x)
self.assertEqual(cdf.shape, (6,))
self.assertAllClose(
self.evaluate(cdf),
self._scipy_pareto(concentration_v, scale_v).cdf(x))
def testParetoLogCdfMultidimensional(self):
batch_size = 6
scale = tf.constant([[2., 4., 5.]] * batch_size)
scale_v = [2., 4., 5.]
concentration = tf.constant([[1.]] * batch_size)
concentration_v = 1.
x = np.array([[6., 7., 9.2, 5., 6., 7.]], dtype=np.float32).T
pareto = tfd.Pareto(concentration, scale, validate_args=True)
log_cdf = pareto.log_cdf(x)
self.assertEqual(log_cdf.shape, (6, 3))
self.assertAllClose(
self.evaluate(log_cdf),
self._scipy_pareto(concentration_v, scale_v).logcdf(x))
cdf = pareto.cdf(x)
self.assertEqual(cdf.shape, (6, 3))
self.assertAllClose(
self.evaluate(cdf),
self._scipy_pareto(concentration_v, scale_v).cdf(x))
def testParetoPDFGradientZeroOutsideSupport(self):
scale = tf.constant(1.)
concentration = tf.constant(3.)
# Check the gradient on the undefined portion.
x = scale - 1
self.assertAlmostEqual(
self.evaluate(
tfp.math.value_and_gradient(
tfd.Pareto(concentration, scale, validate_args=False).prob,
x)[1]), 0.)
def testParetoCDFGradientZeroOutsideSupport(self):
scale = tf.constant(1.)
concentration = tf.constant(3.)
# Check the gradient on the undefined portion.
x = scale - 1
self.assertAlmostEqual(
self.evaluate(
tfp.math.value_and_gradient(
tfd.Pareto(concentration, scale, validate_args=False).cdf,
x)[1]), 0.)
def testParetoMean(self):
scale = [1.4, 2., 2.5]
concentration = [2., 3., 2.5]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(pareto.mean().shape, (3,))
self.assertAllClose(
self.evaluate(pareto.mean()),
self._scipy_pareto(concentration, scale).mean())
def testParetoMeanInf(self):
scale = [1.4, 2., 2.5]
concentration = [0.4, 0.9, 0.99]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(pareto.mean().shape, (3,))
self.assertTrue(
np.all(np.isinf(self.evaluate(pareto.mean()))))
def testParetoVariance(self):
scale = [1.4, 2., 2.5]
concentration = [2., 3., 2.5]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(pareto.variance().shape, (3,))
self.assertAllClose(
self.evaluate(pareto.variance()),
self._scipy_pareto(concentration, scale).var())
def testParetoVarianceInf(self):
scale = [1.4, 2., 2.5]
concentration = [0.4, 0.9, 0.99]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(pareto.variance().shape, (3,))
self.assertTrue(
np.all(np.isinf(self.evaluate(pareto.variance()))))
def testParetoStd(self):
scale = [1.4, 2., 2.5]
concentration = [2., 3., 2.5]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(pareto.stddev().shape, (3,))
self.assertAllClose(
self.evaluate(pareto.stddev()),
self._scipy_pareto(concentration, scale).std())
def testParetoMode(self):
scale = [0.4, 1.4, 2., 2.5]
concentration = [1., 2., 3., 2.5]
pareto = tfd.Pareto(concentration, scale, validate_args=True)
self.assertEqual(pareto.mode().shape, (4,))
self.assertAllClose(self.evaluate(pareto.mode()), scale)
def testParetoSampleMean(self):
scale = 4.
concentration = 3.
n = int(100e3)
pareto = tfd.Pareto(concentration, scale, validate_args=True)
samples = pareto.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual(samples.shape, (n,))
self.assertEqual(sample_values.shape, (n,))
self.assertAllClose(
sample_values.mean(),
self._scipy_pareto(concentration, scale).mean(),
rtol=.01,
atol=0)
def testParetoSampleVariance(self):
scale = 1.
concentration = 3.
n = int(6e5)
pareto = tfd.Pareto(concentration, scale, validate_args=True)
samples = pareto.sample(
n, seed=test_util.test_seed(hardcoded_seed=123456))
sample_values = self.evaluate(samples)
self.assertEqual(samples.shape, (n,))
self.assertEqual(sample_values.shape, (n,))
self.assertAllClose(
sample_values.var(),
self._scipy_pareto(concentration, scale).var(),
rtol=.05,
atol=0)
def testParetoSampleMultidimensionalMean(self):
scale = np.array([np.arange(1, 21, dtype=np.float32)])
concentration = 3.
pareto = tfd.Pareto(concentration, scale, validate_args=True)
n = int(2e5)
samples = pareto.sample(n, seed=test_util.test_seed())
sample_values = self.evaluate(samples)
self.assertEqual(samples.shape, (n, 1, 20))
self.assertEqual(sample_values.shape, (n, 1, 20))
self.assertAllClose(
sample_values.mean(axis=0),
self._scipy_pareto(concentration, scale).mean(),
rtol=.01,
atol=0)
def testParetoSampleMultidimensionalVariance(self):
scale = np.array([np.arange(1, 11, dtype=np.float32)])
concentration = 4.
pareto = tfd.Pareto(concentration, scale, validate_args=True)
n = int(800e3)
samples = pareto.sample(
n, seed=test_util.test_seed(hardcoded_seed=123456))
sample_values = self.evaluate(samples)
self.assertEqual(samples.shape, (n, 1, 10))
self.assertEqual(sample_values.shape, (n, 1, 10))
self.assertAllClose(
sample_values.var(axis=0),
self._scipy_pareto(concentration, scale).var(),
rtol=.05,
atol=0)
def testParetoParetoKLFinite(self):
a_scale = np.arange(1., 5.)
a_concentration = 1.
b_scale = 1.
b_concentration = np.arange(2., 10., 2)
a = tfd.Pareto(
concentration=a_concentration, scale=a_scale, validate_args=True)
b = tfd.Pareto(
concentration=b_concentration, scale=b_scale, validate_args=True)
true_kl = (b_concentration * (np.log(a_scale) - np.log(b_scale)) +
np.log(a_concentration) - np.log(b_concentration) +
b_concentration / a_concentration - 1.)
kl = tfd.kl_divergence(a, b)
x = a.sample(
int(3e5),
seed=test_util.test_seed(hardcoded_seed=0, set_eager_seed=False))
kl_sample = tf.reduce_mean(a.log_prob(x) - b.log_prob(x), axis=0)
kl_, kl_sample_ = self.evaluate([kl, kl_sample])
self.assertAllEqual(true_kl, kl_)
self.assertAllClose(true_kl, kl_sample_, atol=0., rtol=1e-2)
zero_kl = tfd.kl_divergence(a, a)
true_zero_kl_, zero_kl_ = self.evaluate([tf.zeros_like(true_kl), zero_kl])
self.assertAllEqual(true_zero_kl_, zero_kl_)
def testParetoParetoKLInfinite(self):
a = tfd.Pareto(concentration=1.0, scale=1., validate_args=True)
b = tfd.Pareto(concentration=1.0, scale=2., validate_args=True)
kl = tfd.kl_divergence(a, b)
kl_ = self.evaluate(kl)
self.assertAllEqual(np.inf, kl_)
def testConcentrationVariable(self):
c = tf.Variable([1., 2.])
self.evaluate(c.initializer)
d = tfd.Pareto(concentration=c, scale=1., validate_args=True)
self.assertIs(d.concentration, c)
self.evaluate(d.mean())
with self.assertRaisesOpError('`concentration` must be positive'):
with tf.control_dependencies([c.assign([-1, 2.])]):
self.evaluate(d.mean())
def testScaleVariable(self):
s = tf.Variable([1., 2.])
self.evaluate(s.initializer)
d = tfd.Pareto(concentration=1., scale=s, validate_args=True)
self.assertIs(d.scale, s)
self.evaluate(d.mean())
with self.assertRaisesOpError('`scale` must be positive'):
with tf.control_dependencies([s.assign([-1, 2.])]):
self.evaluate(d.mean())
def testSupportBijectorOutsideRange(self):
dist = tfd.Pareto(
concentration=1., scale=[2., 5., 12.], validate_args=True)
eps = 1e-6
x = np.array([[2. - eps, 5. - eps, 12. - eps], [-0.5, 2.3, 10.]])
bijector_inverse_x = dist._experimental_default_event_space_bijector(
).inverse(x)
self.assertAllNan(self.evaluate(bijector_inverse_x))
if __name__ == '__main__':
tf.test.main()
| 35.773438 | 78 | 0.665284 |
626479c978a12ab0814b9ddce1934cc5f5097416 | 2,998 | py | Python | experiments/ashvin/gcirl/airl/gail1.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | experiments/ashvin/gcirl/airl/gail1.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | experiments/ashvin/gcirl/airl/gail1.py | Asap7772/railrl_evalsawyer | baba8ce634d32a48c7dfe4dc03b123e18e96e0a3 | [
"MIT"
] | null | null | null | from rlkit.launchers.arglauncher import run_variants
import rlkit.misc.hyperparameter as hyp
from rlkit.launchers.experiments.goal_distribution.irl_launcher import (
irl_experiment,
process_args
)
from rlkit.launchers.launcher_util import run_experiment
from multiworld.envs.pygame import PickAndPlaceEnv
from rlkit.torch.irl.gail_trainer import GAILTrainer
if __name__ == '__main__':
imsize = 200
variant = dict(
algo_kwargs=dict(
num_epochs=1001,
batch_size=128,
num_eval_steps_per_epoch=1000,
num_expl_steps_per_train_loop=1000,
num_trains_per_train_loop=1000, #4000,
min_num_steps_before_training=1000,
eval_epoch_freq=1,
),
max_path_length=100,
trainer_kwargs=dict(
soft_target_tau=1e-3,
target_update_period=1,
use_automatic_entropy_tuning=True,
reward_scale=100,
discount=0.99,
),
contextual_replay_buffer_kwargs=dict(
max_size=int(1E6),
fraction_future_context=0.0,
fraction_distribution_context=0.0,
fraction_replay_buffer_context=0.0,
# recompute_rewards=True,
),
qf_kwargs=dict(
hidden_sizes=[400, 300],
),
policy_kwargs=dict(
hidden_sizes=[400, 300],
),
observation_key='observation',
context_keys=[],
save_env_in_snapshot=False,
save_video=True,
dump_video_kwargs=dict(
rows=1,
columns=8,
pad_color=0,
pad_length=0,
subpad_length=1,
),
save_video_period=50,
renderer_kwargs=dict(
width=imsize,
height=imsize,
),
env_id="HalfCheetah-v2",
logger_config=dict(
snapshot_gap=50,
snapshot_mode='gap_and_last',
),
reward_trainer_kwargs=dict(
data_split=0.5,
train_split=0.5,
),
add_env_demos=True,
path_loader_kwargs=dict(
do_preprocess=False,
),
score_fn_kwargs=dict(
hidden_sizes=[64, 64, ],
),
launcher_config=dict(
unpack_variant=True,
num_exps_per_instance=1,
region='us-west-2',
),
reward_trainer_class=GAILTrainer,
)
search_space = {
'seedid': range(3),
'env_id': [
'HalfCheetah-v2', 'Ant-v2', 'Walker2d-v2',
"pen-binary-v0",
"door-binary-v0", "relocate-binary-v0",
],
'trainer_kwargs.reward_scale': [1, 10, 100],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
variants = []
for variant in sweeper.iterate_hyperparameters():
variants.append(variant)
run_variants(irl_experiment, variants, process_args, run_id=0)
| 28.552381 | 72 | 0.585057 |
fd9dbc6ba72bd1f11f9ebfff33507edc325002dc | 3,453 | py | Python | tensorflow_probability/python/experimental/__init__.py | rmothukuru/probability | 24352279e5e255e054bfe9c7bdc7080ecb280fba | [
"Apache-2.0"
] | 1 | 2021-07-21T15:54:17.000Z | 2021-07-21T15:54:17.000Z | tensorflow_probability/python/experimental/__init__.py | rmothukuru/probability | 24352279e5e255e054bfe9c7bdc7080ecb280fba | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/experimental/__init__.py | rmothukuru/probability | 24352279e5e255e054bfe9c7bdc7080ecb280fba | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""TensorFlow Probability API-unstable package.
This package contains potentially useful code which is under active development
with the intention eventually migrate to TFP proper. All code in
`tfp.experimental` should be of production quality, i.e., idiomatically
consistent, well tested, and extensively documented. `tfp.experimental` code
relaxes the TFP non-experimental contract in two regards:
1. `tfp.experimental` has no API stability guarantee. The public footprint of
`tfp.experimental` code may change without notice or warning.
2. Code outside `tfp.experimental` cannot depend on code within
`tfp.experimental`.
You are welcome to try any of this out (and tell us how well it works for you!).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_probability.python.experimental import auto_batching
from tensorflow_probability.python.experimental import bijectors
from tensorflow_probability.python.experimental import distribute
from tensorflow_probability.python.experimental import distributions
from tensorflow_probability.python.experimental import lazybones
from tensorflow_probability.python.experimental import linalg
from tensorflow_probability.python.experimental import marginalize
from tensorflow_probability.python.experimental import mcmc
from tensorflow_probability.python.experimental import nn
from tensorflow_probability.python.experimental import parallel_filter
from tensorflow_probability.python.experimental import sequential
from tensorflow_probability.python.experimental import stats
from tensorflow_probability.python.experimental import substrates
from tensorflow_probability.python.experimental import util
from tensorflow_probability.python.experimental import vi
from tensorflow_probability.python.experimental.composite_tensor import as_composite
from tensorflow_probability.python.experimental.composite_tensor import register_composite
from tensorflow_probability.python.internal import all_util
from tensorflow_probability.python.internal.auto_composite_tensor import auto_composite_tensor
from tensorflow_probability.python.internal.auto_composite_tensor import AutoCompositeTensor
from tensorflow_probability.python.internal.auto_composite_tensor import register_type_spec
_allowed_symbols = [
'auto_batching',
'as_composite',
'auto_composite_tensor',
'AutoCompositeTensor',
'bijectors',
'distribute',
'distributions',
'lazybones',
'linalg',
'marginalize',
'mcmc',
'nn',
'parallel_filter',
'register_composite',
'register_type_spec',
'sequential',
'stats',
'substrates',
'unnest',
'util',
'vi',
]
all_util.remove_undocumented(__name__, _allowed_symbols)
| 42.109756 | 94 | 0.797857 |
cff160213c801e80e6cd02a7d91970a71f42b6a4 | 37,600 | py | Python | plugins/camtrawl/python/viame/processes/camtrawl/algos.py | readicculus/VIAME | 3e65fa4ec9139f73f8f9f4b3c0f1b18ba6209733 | [
"BSD-3-Clause"
] | null | null | null | plugins/camtrawl/python/viame/processes/camtrawl/algos.py | readicculus/VIAME | 3e65fa4ec9139f73f8f9f4b3c0f1b18ba6209733 | [
"BSD-3-Clause"
] | null | null | null | plugins/camtrawl/python/viame/processes/camtrawl/algos.py | readicculus/VIAME | 3e65fa4ec9139f73f8f9f4b3c0f1b18ba6209733 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Reimplementation of matlab algorithms for fishlength detection in python.
The next step is to move them into kwiver.
TODO:
fix hard coded paths for doctests
"""
from __future__ import division, print_function
from collections import namedtuple
import cv2
import itertools as it
import numpy as np
import scipy.optimize
from .imutils import (imscale, ensure_grayscale, from_homog, to_homog)
from os.path import splitext
import ubelt as ub
import logging
import warnings
from six.moves import zip
logger = logging.getLogger(__name__)
OrientedBBox = namedtuple('OrientedBBox', ('center', 'extent', 'angle'))
__OPENCV_VERSION_2__ = cv2.__version__.startswith('2')
if __OPENCV_VERSION_2__:
warnings.warn('Using an old OpenCV version. '
'This may cause unexpected results. '
'Please update to 3.x')
import skimage # NOQA
import skimage.measure # NOQA
def dict_subset(dict_, keys):
return {k: dict_[k] for k in keys}
def dict_update_subset(dict_, other):
"""
updates items in `dict_` based on `other`. `other` is not allowed to
specify any keys that do not already exist in `dict_`.
"""
for k, v in other.items():
if k not in dict_:
raise KeyError(k)
dict_[k] = v
class ParamInfo(object):
def __init__(self, name, default, doc=None):
self.name = name
self.default = default
self.doc = doc
class BoundingBox(ub.NiceRepr):
def __init__(bbox, coords):
bbox.coords = coords
def __nice__(self):
return 'center={}, wh={}'.format(self.center, (self.width, self.height))
@classmethod
def from_coords(self, xmin, ymin, xmax, ymax):
coords = np.array([xmin, ymin, xmax, ymax])
return BoundingBox(coords)
@property
def xmin(bbox):
return bbox.coords[0]
@property
def ymin(bbox):
return bbox.coords[1]
@property
def xmax(bbox):
return bbox.coords[2]
@property
def ymax(bbox):
return bbox.coords[3]
@property
def width(bbox):
return bbox.xmax - bbox.xmin
@property
def height(bbox):
return bbox.ymax - bbox.ymin
@property
def center(self):
if self.coords is None:
return None
(xmin, ymin, xmax, ymax) = self.coords
cx = (xmax + xmin) / 2
cy = (ymax + ymin) / 2
return cx, cy
def scale(self, factor):
"""
inplace upscaling of bounding boxes and points
(masks are not upscaled)
"""
self.coords = np.array(self.coords) * factor
class DetectedObject(ub.NiceRepr):
"""
Example:
>>> from viame.processes.camtrawl.algos import *
>>> cc_mask = np.zeros((11, 11), dtype=np.uint8)
>>> cc_mask[3:5, 2:7] = 1
>>> self = DetectedObject.from_connected_component(cc_mask)
>>> print(self)
<DetectedObject(center=(4.0, 3.5), wh=(4, 1))>
"""
def __init__(self, bbox, mask):
# bbox is kept in the image coordinate frame
self.bbox = bbox
# mask is kept in its own coordinate frame
self.mask = mask
# keep track of the scale factor from mask to bbox
self.bbox_factor = 1.0
def __nice__(self):
return self.bbox.__nice__()
def num_pixels(self):
"""
Example:
>>> from viame.processes.camtrawl.algos import *
>>> cc_mask = np.zeros((11, 11), dtype=np.uint8)
>>> cc_mask[3:5, 2:7] = 1
>>> self = DetectedObject.from_connected_component(cc_mask)
>>> print(str(self.num_pixels()))
10
"""
# number of pixels in the mask
# scale to the number that would be in the original image
n_pixels = int((self.mask > 0).sum() * (self.bbox_factor ** 2))
return n_pixels
def hull(self):
"""
Example:
>>> from viame.processes.camtrawl.algos import *
>>> cc_mask = np.zeros((11, 11), dtype=np.uint8)
>>> cc_mask[3:5, 2:7] = 1
>>> self = DetectedObject.from_connected_component(cc_mask)
>>> print(self.hull().tolist())
[[[6, 4]], [[2, 4]], [[2, 3]], [[6, 3]]]
"""
cc_y, cc_x = np.where(self.mask)
points = np.vstack([cc_x, cc_y]).T
# Find a minimum oriented bounding box around the points
hull = cv2.convexHull(points)
# move points from mask coordinates to image coordinates
if self.bbox_factor != 1.0:
hull = hull * self.bbox_factor
hull = hull + [[self.bbox.xmin, self.bbox.ymin]]
hull = np.round(hull).astype(np.int)
return hull
def oriented_bbox(self):
"""
Example:
>>> from viame.processes.camtrawl.algos import *
>>> cc_mask = np.zeros((11, 11), dtype=np.uint8)
>>> cc_mask[3:5, 2:7] = 1
>>> self = DetectedObject.from_connected_component(cc_mask)
>>> print(str(self.oriented_bbox()))
OrientedBBox(center=(4.0, 3.5), extent=(1.0, 4.0), angle=-90.0)
"""
hull = self.hull()
oriented_bbox = OrientedBBox(*cv2.minAreaRect(hull))
return oriented_bbox
def box_points(self):
"""
CommandLine:
python -m viame.processes.camtrawl.algos DetectedObject.box_points
Example:
>>> from viame.processes.camtrawl.algos import *
>>> _, B = 0, 1
>>> cc_mask = np.array([
>>> [_, _, _, _, _, _, _, _],
>>> [_, _, _, B, _, _, _, _],
>>> [_, _, B, B, B, _, _, _],
>>> [_, B, B, B, B, B, _, _],
>>> [_, _, B, B, B, B, B, _],
>>> [_, _, _, B, B, B, _, _],
>>> [_, _, _, _, B, _, _, _],
>>> [_, _, _, _, _, _, _, _],
>>> ])
>>> self = DetectedObject.from_connected_component(cc_mask)
>>> points = self.box_points()
>>> print(ub.repr2(points.tolist(), precision=2, nl=0))
[[4.00, 6.00], [1.00, 3.00], [3.00, 1.00], [6.00, 4.00]]
"""
if __OPENCV_VERSION_2__:
return np.array(cv2.cv.BoxPoints(self.oriented_bbox()),
dtype=np.float32)
else:
return cv2.boxPoints(self.oriented_bbox())
def scale(self, factor):
""" inplace """
if factor != 1.0:
self.bbox_factor *= factor
self.bbox.scale(factor)
@classmethod
def from_connected_component(DetectedObject, cc_mask):
# note, `np.where` returns coords in (r, c)
ys, xs = np.where(cc_mask)
xmin, xmax = xs.min(), xs.max()
ymin, ymax = ys.min(), ys.max()
bbox = BoundingBox.from_coords(xmin, ymin, xmax, ymax)
yslice = slice(bbox.ymin, bbox.ymax + 1)
xslice = slice(bbox.xmin, bbox.xmax + 1)
mask = cc_mask[yslice, xslice]
self = DetectedObject(bbox, mask)
return self
class GMMForegroundObjectDetector(object):
"""
Uses background subtraction and 4-way connected compoments algorithm to
detect potential fish objects. Objects are filtered by size, aspect ratio,
and closeness to the image border to remove bad detections.
References:
https://stackoverflow.com/questions/37300698/gaussian-mixture-model
http://docs.opencv.org/trunk/db/d5c/tutorial_py_bg_subtraction.html
"""
@staticmethod
def default_params():
default_params = {
'preproc': [
ParamInfo(name='factor', default=1.0,
doc='image downsample factor'),
ParamInfo(name='smooth_ksize', default=(10, 10),
doc=('postprocessing filter size for noise removal '
'(wrt orig image size)'))
],
'gmm': [
ParamInfo(name='n_startup_frames', default=3,
doc=('number of frames before the background model '
'will wait before returning any detections')),
ParamInfo(name='n_training_frames', default=300,
doc='number of frames to use for training'),
ParamInfo(name='gmm_thresh', default=30,
doc='GMM variance threshold'),
],
'filter': DetectionShapeFilter.default_params()['filter'],
}
return default_params
def __init__(detector, **kwargs):
# setup default config
detector.config = {}
default_params = detector.default_params()
for pinfos in default_params.values():
detector.config.update({pi.name: pi.default for pi in pinfos})
# modify based on user args
dict_update_subset(detector.config, kwargs)
# Setup GMM background subtraction algorithm
logger.debug('Using GMM from cv2.__version__ = {}'.format(cv2.__version__))
if cv2.__version__.startswith('2'):
# not sure about these params
detector.background_model = cv2.BackgroundSubtractorMOG2(
history=detector.config['n_training_frames'],
varThreshold=detector.config['gmm_thresh'],
bShadowDetection=False
)
else:
detector.background_model = cv2.createBackgroundSubtractorMOG2(
history=detector.config['n_training_frames'],
varThreshold=detector.config['gmm_thresh'],
detectShadows=False)
# Setup detection filter algorithm
filter_config = dict_subset(
detector.config, [pi.name for pi in default_params['filter']])
detector.filter = DetectionShapeFilter(**filter_config)
detector.n_iters = 0
# masks from previous iter are kept in memory for visualization
detector._masks = {}
def detect(detector, img):
"""
Main algorithm step.
Detects the objects in the image and update the background model.
Args:
img (ndarray): image to perform detection on
Returns:
detections : list of DetectedObjects
CommandLine:
python -m viame.processes.camtrawl.algos GMMForegroundObjectDetector.detect
Doctest:
>>> import matplotlib as mpl
>>> mpl.use('agg')
>>> from viame.processes.camtrawl.algos import *
>>> from viame.processes.camtrawl.demo import *
>>> detector, img = demodata_detections(target_step='detect', target_frame_num=7)
>>> detections = detector.detect(img)
>>> print('detections = {!r}'.format(detections))
>>> masks = detector._masks
>>> # xdoc: REQUIRES(--show)
>>> draw_img = DrawHelper.draw_detections(img, detections, masks)
>>> fpath = ub.ensure_app_cache_dir('camtrawl') + '/GMMForegroundObjectDetector.detect.png'
>>> cv2.imwrite(fpath, draw_img)
>>> ub.startfile(fpath)
>>> #from matplotlib import pyplot as plt
>>> #plt.imshow(cv2.cvtColor(draw_img, cv2.COLOR_BGR2RGB))
>>> #plt.gca().grid(False)
>>> #plt.show()
"""
detector._masks = {}
# Downsample and convert to grayscale
img_, upfactor = detector.preprocess_image(img)
# Run detection / update background model
logger.debug('detect background')
mask = detector.background_model.apply(img_)
detector._masks['orig'] = mask.copy()
if detector.n_iters < detector.config['n_startup_frames']:
# Skip the first few frames while the model is learning
detections = []
else:
# Remove noise
if detector.config['smooth_ksize'] is not None:
mask = detector.postprocess_mask(mask)
detector._masks['post'] = mask.copy()
# Find detections using CC algorithm
detectgen = detector.detections_in_mask(mask)
# Filter detections by shape and size
img_dsize = tuple(img.shape[0:2][::-1])
detectgen = detector.filter.filter_detections(detectgen, img_dsize)
# return detections in a list
detections = list(detectgen)
detector.n_iters += 1
logger.debug('made {} detections'.format(len(detections)))
return detections
def preprocess_image(detector, img):
"""
Preprocess image before subtracting backround
"""
logger.debug('preprocess image before detect')
# Convert to grayscale
img_ = ensure_grayscale(img)
# Downsample image before running detection
factor = detector.config['factor']
if factor != 1.0:
downfactor_ = 1 / factor
img_, downfactor = imscale(img, downfactor_)
upfactor = 1 / downfactor[0]
else:
upfactor = 1.0
img_ = img
return img_, upfactor
def postprocess_mask(detector, mask):
""" remove noise from detection intensity masks """
logger.debug('postprocess mask')
ksize = np.array(detector.config['smooth_ksize'])
ksize = tuple(np.round(ksize / detector.config['factor']).astype(np.int))
kernel = cv2.getStructuringElement(cv2.MORPH_ELLIPSE, ksize)
# opening is erosion followed by dilation
mask = cv2.morphologyEx(mask, cv2.MORPH_OPEN, kernel, dst=mask)
# Do a second dilation
mask = cv2.dilate(src=mask, kernel=kernel, dst=mask)
return mask
def detections_in_mask(detector, mask):
"""
Find pixel locs of each cc and determine if its a valid detection
Args:
mask (ndarray): mask where non-zero pixels indicate all candidate
objects
Example:
>>> from viame.processes.camtrawl.algos import *
>>> detector = GMMForegroundObjectDetector()
>>> detector.config['min_num_pixels'] = 2
>>> x, y = np.indices((10, 10))
>>> s = 2
>>> mask = (((x // s) % s == 0) & ((y // s) % s == 0)).astype(np.uint8)
>>> mask[0, -2:] = 0
>>> mask[1, -1] = 0
>>> mask[2, 2] = 1
>>> mask[-1, 2] = 1
>>> mask[-5:-1, -3] = 1
>>> detections = list(detector.detections_in_mask(mask))
>>> assert len(detections) == 7
"""
# 4-way connected compoment algorithm
if __OPENCV_VERSION_2__:
# opencv2 doesnt have a builtin CC algo, need to use skimage
cc_mask, n_ccs_sk = skimage.measure.label(mask, neighbors=8,
background=0,
return_num=True)
# Be consistent with opencv, which always includes the background
# label in the num (even if no background exists).
n_ccs = n_ccs_sk + 1
else:
n_ccs, cc_mask = cv2.connectedComponents(mask, connectivity=8)
factor = detector.config['factor']
# Process only labels with enough points
min_num_pixels = detector.config['min_num_pixels']
if min_num_pixels is None:
valid_labels = np.arange(1, n_ccs + 1)
else:
# speed optimization: quickly determine num pixels for each cc
# using a histogram instead of checking in the filter func
hist, bins = np.histogram(cc_mask[cc_mask > 0].ravel(),
bins=np.arange(1, n_ccs + 1))
min_num_pixels_ = min_num_pixels / (factor ** 2)
# only consider large enough regions
valid_labels = bins[0:-1][hist >= min_num_pixels_]
# Filter ccs to generate only "good" detections
# We may be able to speed this up using ndimage.find_objects
for cc_label in valid_labels:
cc = (cc_mask == cc_label)
detection = DetectedObject.from_connected_component(cc)
# Upscale back to input img coords (to agree with camera calib)
detection.scale(factor)
yield detection
class DetectionShapeFilter(object):
"""
Filters masked detections based on their size and shape
"""
@staticmethod
def default_params():
default_params = {
'filter': [
ParamInfo(name='min_num_pixels', default=800,
doc=('keep detections only with the number of pixels '
'wrt original image size')),
ParamInfo(name='edge_trim', default=(12, 12),
doc=('limits accepable targets to be within the region '
'[padx, pady, img_w - padx, img_h - pady]. '
'These are wrt the original image size')),
ParamInfo(name='aspect_thresh', default=(3.5, 7.5,),
doc='range of valid aspect ratios for detections')
]
}
return default_params
def __init__(self, **kwargs):
# setup default config
self.config = {}
default_params = self.default_params()
for pinfos in default_params.values():
self.config.update({pi.name: pi.default for pi in pinfos})
# modify based on user args
dict_update_subset(self.config, kwargs)
def filter_detections(self, detections, img_dsize=None):
logger.debug('filter detections')
for detection in detections:
if self.is_valid(detection, img_dsize):
yield detection
def is_valid(self, detection, img_dsize=None):
"""
Checks if the detection passes filtering constraints
Args:
detection (DetectedObject): mask where non-zero pixels indicate a single
candidate object
img_dsize (tuple): w/h of the original image
Returns:
bool: True if the detection is valid else False
"""
if self.config['min_num_pixels'] is not None:
# Remove small regions
if detection.num_pixels() < self.config['min_num_pixels']:
return False
if self.config['edge_trim'] is not None:
# Define thresholds to filter edges
img_width, img_height = img_dsize
xmin_lim, ymin_lim = self.config['edge_trim']
xmax_lim = img_width - (xmin_lim)
ymax_lim = img_height - (ymin_lim)
# Filter objects detected on the edge of the image region
(xmin, ymin, xmax, ymax) = detection.bbox.coords
if any([xmin < xmin_lim, xmax > xmax_lim,
ymin < ymin_lim, ymax > ymax_lim]):
return None
# Find a minimum oriented bounding box around the points
w, h = detection.oriented_bbox().extent
if w == 0 or h == 0:
# assert w == 0 or h == 0, 'detection has no width or height'
return False
# Filter objects without fishy aspect ratios
ar = max([(w / h), (h / w)])
min_aspect, max_aspect = self.config['aspect_thresh']
if any([ar < min_aspect, ar > max_aspect]):
return False
return True
class FishStereoMeasurments(object):
"""
Algo for matching detections in left and right camera and determening the
fish length in milimeters.
"""
@staticmethod
def default_params():
default_params = {
'thresholds': [
ParamInfo('max_err', (6, 14), doc=(
'Threshold for errors between before & after projected '
'points to make matches between left and right')),
ParamInfo('small_len', 150, doc=(
'length (in milimeters) to switch between high and low '
'max error thresholds ')),
]
}
return default_params
def __init__(self, **kwargs):
# setup default config
self.config = {}
default_params = self.default_params()
for pinfos in default_params.values():
self.config.update({pi.name: pi.default for pi in pinfos})
# modify based on user args
dict_update_subset(self.config, kwargs)
def triangulate(self, cal, det1, det2):
"""
Assuming, det1 matches det2, we determine 3d-coordinates of each
detection and measure the reprojection error.
References:
http://answers.opencv.org/question/117141
https://gist.github.com/royshil/7087bc2560c581d443bc
https://stackoverflow.com/a/29820184/887074
Doctest:
>>> # Rows are detections in img1, cols are detections in img2
>>> from viame.processes.camtrawl.algos import *
>>> from viame.processes.camtrawl.demo import *
>>> detections1, detections2, cal = demodata_detections(target_step='triangulate', target_frame_num=6)
>>> det1, det2 = detections1[0], detections2[0]
>>> self = FishStereoMeasurments()
>>> assignment, assign_data, cand_errors = self.triangulate(cal, det1, det2)
"""
logger.debug('triangulate')
_debug = 0
if _debug:
# Use 4 corners and center to ensure matrix math is good
# (hard to debug when ndims == npts, so make npts >> ndims)
pts1 = np.vstack([det1.box_points(), det1.oriented_bbox().center])
pts2 = np.vstack([det2.box_points(), det2.oriented_bbox().center])
else:
# Use only the corners of the bbox
pts1 = det1.box_points()[[0, 2]]
pts2 = det2.box_points()[[0, 2]]
# Move into opencv point format (num x 1 x dim)
pts1_cv = pts1[:, None, :]
pts2_cv = pts2[:, None, :]
# Grab camera parameters
K1, K2 = cal.intrinsic_matrices()
kc1, kc2 = cal.distortions()
rvec1, tvec1, rvec2, tvec2 = cal.extrinsic_vecs()
# Make extrincic matrices
R1 = cv2.Rodrigues(rvec1)[0]
R2 = cv2.Rodrigues(rvec2)[0]
T1 = tvec1[:, None]
T2 = tvec2[:, None]
RT1 = np.hstack([R1, T1])
RT2 = np.hstack([R2, T2])
# Undistort points
# This puts points in "normalized camera coordinates" making them
# independent of the intrinsic parameters. Moving to world coordinates
# can now be done using only the RT transform.
unpts1_cv = cv2.undistortPoints(pts1_cv, K1, kc1)
unpts2_cv = cv2.undistortPoints(pts2_cv, K2, kc2)
# note: trinagulatePoints docs say that it wants a 3x4 projection
# matrix (ie K.dot(RT)), but we only need to use the RT extrinsic
# matrix because the undistorted points already account for the K
# intrinsic matrix.
world_pts_homog = cv2.triangulatePoints(RT1, RT2, unpts1_cv, unpts2_cv)
world_pts = from_homog(world_pts_homog)
# Compute distance between 3D bounding box points
if _debug:
corner1, corner2 = world_pts.T[[0, 2]]
else:
corner1, corner2 = world_pts.T
# Length is in milimeters
fishlen = np.linalg.norm(corner1 - corner2)
# Reproject points
world_pts_cv = world_pts.T[:, None, :]
proj_pts1_cv = cv2.projectPoints(world_pts_cv, rvec1, tvec1, K1, kc1)[0]
proj_pts2_cv = cv2.projectPoints(world_pts_cv, rvec2, tvec2, K2, kc2)[0]
# Check error
err1 = ((proj_pts1_cv - pts1_cv)[:, 0, :] ** 2).sum(axis=1)
err2 = ((proj_pts2_cv - pts2_cv)[:, 0, :] ** 2).sum(axis=1)
errors = np.hstack([err1, err2])
# Get 3d points in each camera's reference frame
# Note RT1 is the identity and RT are 3x4, so no need for `from_homog`
# Return points in with shape (N,3)
pts1_3d = RT1.dot(to_homog(world_pts)).T
pts2_3d = RT2.dot(to_homog(world_pts)).T
return pts1_3d, pts2_3d, errors, fishlen
@staticmethod
def minimum_weight_assignment(cost_errors):
"""
Finds optimal assignment of left-camera to right-camera detections
Doctest:
>>> # Rows are detections in img1, cols are detections in img2
>>> from viame.processes.camtrawl.algos import *
>>> self = FishStereoMeasurments()
>>> cost_errors = np.array([
>>> [9, 2, 1, 9],
>>> [4, 1, 5, 5],
>>> [9, 9, 2, 4],
>>> ])
>>> assign1 = self.minimum_weight_assignment(cost_errors)
>>> assign2 = self.minimum_weight_assignment(cost_errors.T)
"""
n1, n2 = cost_errors.shape
n = max(n1, n2)
# Embed the [n1 x n2] matrix in a padded (with inf) [n x n] matrix
cost_matrix = np.full((n, n), fill_value=np.inf)
cost_matrix[0:n1, 0:n2] = cost_errors
# Find an effective infinite value for infeasible assignments
is_infeasible = np.isinf(cost_matrix)
is_positive = cost_matrix > 0
feasible_vals = cost_matrix[~(is_infeasible & is_positive)]
large_val = (n + feasible_vals.sum()) * 2
# replace infinite values with effective infinite values
cost_matrix[is_infeasible] = large_val
# Solve munkres problem for minimum weight assignment
indexes = list(zip(*scipy.optimize.linear_sum_assignment(cost_matrix)))
# Return only the feasible assignments
assignment = [(i, j) for (i, j) in indexes
if cost_matrix[i, j] < large_val]
return assignment
def find_matches(self, cal, detections1, detections2):
"""
Match detections from the left camera to detections in the right camera
Doctest:
>>> # Rows are detections in img1, cols are detections in img2
>>> from viame.processes.camtrawl.algos import *
>>> detections1, detections2, cal = demodata_detections(target_step='triangulate', target_frame_num=6)
>>> self = FishStereoMeasurments()
>>> assignment, assign_data, cand_errors = self.find_matches(cal, detections1, detections2)
"""
logger.debug('find matches')
n_detect1, n_detect2 = len(detections1), len(detections2)
cand_data = {}
# Initialize matrix of reprojection errors
cost_errors = np.full((n_detect1, n_detect2), fill_value=np.inf)
cand_errors = np.full((n_detect1, n_detect2), fill_value=np.inf)
# Find the liklihood that each pair of detections matches by
# triangulating and then measuring the reprojection error.
for (i, det1), (j, det2) in it.product(enumerate(detections1),
enumerate(detections2)):
# Triangulate assuming det1 and det2 match, but return the
# reprojection error so we can check if this assumption holds
pts1_3d, pts2_3d, errors, fishlen = self.triangulate(cal, det1, det2)
error = errors.mean()
# Mark the pair (i, j) as a potential candidate match
cand_errors[i, j] = error
# Record information about this candidate match
cand_data[(i, j)] = {
'world_pts': pts1_3d,
'error': error,
'fishlen': fishlen,
'range': pts1_3d.T[2].mean(),
'dz': np.abs(np.diff(pts1_3d.T[2]))[0],
'box_pts1': det1.box_points(),
'box_pts2': det2.box_points(),
}
# Check chirality
# Both Z-coordinates must be positive (i.e. in front the cameras)
z_coords1 = pts1_3d.T[2]
z_coords2 = pts2_3d.T[2]
both_in_front = np.all(z_coords1 > 0) and np.all(z_coords2 > 0)
if not both_in_front:
# Ignore out-of-view correspondences
continue
# Check if reprojection error is too high
max_error = self.config['max_err']
small_len = self.config['small_len'] # hardcoded to 15cm in matlab
if len(max_error) == 2:
error_thresh = max_error[0] if fishlen <= small_len else max_error[1]
else:
error_thresh = max_error[0]
if error >= error_thresh:
# Ignore correspondences with high reprojection error
continue
cost_errors[i, j] = error
# Find the matching with minimum reprojection error, such that each
# detection in one camera can match at most one detection in the other.
assignment = self.minimum_weight_assignment(cost_errors)
# get associated data with each assignment
assign_data = []
for i, j in assignment:
data = {'ij': (i, j)}
data.update(cand_data[(i, j)])
assign_data.append(data)
return assignment, assign_data, cand_errors
class StereoCalibration(object):
"""
Helper class for reading / accessing stereo camera calibration params
Doctest:
>>> from viame.processes.camtrawl.algos import *
>>> cal_fpath = '/home/joncrall/data/camtrawl_stereo_sample_data/201608_calibration_data/selected/Camtrawl_2016.npz'
>>> cal = StereoCalibration.from_file(cal_fpath)
"""
def __init__(cal, data=None):
cal.data = data
cal.unit = 'milimeters'
def __str__(cal):
return '{}({})'.format(cal.__class__.__name__, cal.data)
def extrinsic_vecs(cal):
rvec1 = cal.data['left']['extrinsic']['om']
tvec1 = cal.data['right']['extrinsic']['om']
rvec2 = cal.data['right']['extrinsic']['om']
tvec2 = cal.data['right']['extrinsic']['T']
return rvec1, tvec1, rvec2, tvec2
def distortions(cal):
kc1 = cal.data['right']['intrinsic']['kc']
kc2 = cal.data['left']['intrinsic']['kc']
return kc1, kc2
def intrinsic_matrices(cal):
K1 = cal._make_intrinsic_matrix(cal.data['left']['intrinsic'])
K2 = cal._make_intrinsic_matrix(cal.data['right']['intrinsic'])
return K1, K2
@staticmethod
def _make_intrinsic_matrix(intrin):
""" convert intrinsic dict to matrix """
fc = intrin['fc']
cc = intrin['cc']
alpha_c = intrin['alpha_c']
KK = np.array([
[fc[0], alpha_c * fc[0], cc[0]],
[ 0, fc[1], cc[1]],
[ 0, 0, 1],
])
return KK
@staticmethod
def _make_intrinsic_params(K):
""" convert intrinsic matrix to dict """
intrin = {}
fc = intrin['fc'] = np.zeros(2)
cc = intrin['cc'] = np.zeros(2)
[[fc[0], alpha_c_fc0, cc[0]],
[ _, fc[1], cc[1]],
[ _, _, _]] = K
intrin['alpha_c'] = np.array([alpha_c_fc0 / fc[0]])
return intrin
@classmethod
def from_file(StereoCalibration, cal_fpath):
"""
Loads a camera calebration from a .mat or .npz file
SeeAlso:
from_npzfile
from_matfile
"""
ext = splitext(cal_fpath)[1].lower()
if ext == '.mat':
return StereoCalibration.from_matfile(cal_fpath)
elif ext == '.npz':
return StereoCalibration.from_npzfile(cal_fpath)
else:
raise ValueError('unknown extension {}'.format(ext))
@classmethod
def from_npzfile(StereoCalibration, cal_fpath):
"""
For the npz file the root object should be a dict with the following
keys and values:
R: extrinsic rotation matrix
T: extrinsic translation
cameraMatrixL: dict of intrinsict parameters for the left camera
fc: focal length
cc: principle point
alpha_c: skew
cameraMatrixR: dict of intrinsict parameters for the right camera
fc: focal length
cc: principle point
alpha_c: skew
distCoeffsL: distortion coefficients for the left camera
distCoeffsR: distortion coefficients for the right camera
"""
logger.debug('Loading npzfile {}'.format(cal_fpath))
data = dict(np.load(cal_fpath))
flat_dict = {}
flat_dict['om'] = cv2.Rodrigues(data['R'])[0].ravel()
flat_dict['T'] = data['T'].ravel()
K1 = data['cameraMatrixL']
intrin1 = StereoCalibration._make_intrinsic_params(K1)
flat_dict['fc_left'] = intrin1['fc']
flat_dict['cc_left'] = intrin1['cc']
flat_dict['alpha_c_left'] = intrin1['alpha_c']
flat_dict['kc_left'] = data['distCoeffsL'].ravel()
K2 = data['cameraMatrixR']
intrin2 = StereoCalibration._make_intrinsic_params(K2)
flat_dict['fc_right'] = intrin2['fc']
flat_dict['cc_right'] = intrin2['cc']
flat_dict['alpha_c_right'] = intrin2['alpha_c']
flat_dict['kc_right'] = data['distCoeffsR'].ravel()
return StereoCalibration._from_flat_dict(flat_dict)
def from_cameras(StereoCalibration, camera1, camera2):
pass
@classmethod
def from_matfile(StereoCalibration, cal_fpath):
"""
Loads a matlab camera calibration file from disk
For the mat file, the root structure should be a dict with the key
`Cal` whose value is a dict with the following items:
om: extrinsic rotation vector (note rotation matrix is rodrigues(om))
T: extrinsic translation
fc_left: focal length of the left camera
cc_left: principle point
alpha_c_left: skew
kc_left: distortion coefficients for the left camera
fc_right: focal length of the right camera
cc_right: principle point
alpha_c_right: skew
kc_right: distortion coefficients for the right camera
References:
http://www.vision.caltech.edu/bouguetj/calib_doc/htmls/parameters.html
http://www.vision.caltech.edu/bouguetj/calib_doc/htmls/example5.html
Doctest:
>>> from viame.processes.camtrawl.algos import *
>>> from viame.processes.camtrawl.demo import *
>>> cal_fpath = '/home/joncrall/data/autoprocess_test_set/cal_201608.mat'
>>> cal = StereoCalibration.from_matfile(cal_fpath)
>>> print('cal = {}'.format(cal))
"""
import scipy.io
logger.debug('Loading matfile {}'.format(cal_fpath))
cal_data = scipy.io.loadmat(cal_fpath)
keys = ['om', 'T', 'fc_left', 'fc_right', 'cc_left', 'cc_right',
'kc_left', 'kc_right', 'alpha_c_left', 'alpha_c_right']
if isinstance(cal_data, dict) and 'Cal' in cal_data:
vals = cal_data['Cal'][0][0]
flat_dict = {k: v.ravel() for k, v in zip(keys, vals)}
else:
flat_dict = {key: cal_data[key].ravel() for key in keys}
return StereoCalibration._from_flat_dict(flat_dict)
@classmethod
def _from_flat_dict(StereoCalibration, flat_dict):
""" helper used by matlab and numpy readers """
data = {
'left': {
'extrinsic': {
# Center wold on the left camera
'om': np.zeros(3), # rotation vector
'T': np.zeros(3), # translation vector
},
'intrinsic': {
'fc': flat_dict['fc_left'], # focal point
'cc': flat_dict['cc_left'], # principle point
'alpha_c': flat_dict['alpha_c_left'][0], # skew
'kc': flat_dict['kc_left'], # distortion
}
},
'right': {
'extrinsic': {
'om': flat_dict['om'], # rotation vector
'T': flat_dict['T'], # translation vector
},
'intrinsic': {
'fc': flat_dict['fc_right'], # focal point
'cc': flat_dict['cc_right'], # principle point
'alpha_c': flat_dict['alpha_c_right'][0], # skew
'kc': flat_dict['kc_right'], # distortion
}
},
}
cal = StereoCalibration()
cal.data = data
return cal
# if __name__ == '__main__':
# from . import demo
# logging.basicConfig()
# demo.demo()
# # import camtrawl_demo
# # camtrawl_demo.demo()
# # import utool as ut
# # ut.dump_profile_text()
# # import pytest
# # pytest.main([__file__, '--doctest-modules'])
if __name__ == '__main__':
r"""
CommandLine:
python -m viame.processes.camtrawl.algos
Ignore:
workon_py2
source ~/code/VIAME/build/install/setup_viame.sh
export SPROKIT_PYTHON_MODULES=camtrawl_processes:kwiver.processes:viame.processes
export PYTHONPATH=$(pwd):$PYTHONPATH
python ~/code/VIAME/plugins/camtrawl/python/camtrawl_demo.py
ffmpeg -y -f image2 -i out_haul83/%*.png -vcodec mpeg4 -vf "setpts=10*PTS" haul83-results.avi
"""
import xdoctest
xdoctest.doctest_module(__file__)
| 37.375746 | 124 | 0.572261 |
47c57b7ac4e38744b834409caec1fecf789f3f90 | 2,710 | py | Python | DeepCFR/workers/la/buffers/AdvReservoirBuffer.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
] | 192 | 2019-06-19T13:11:04.000Z | 2022-03-30T05:55:57.000Z | DeepCFR/workers/la/buffers/AdvReservoirBuffer.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
] | 7 | 2019-09-18T19:50:10.000Z | 2021-06-21T02:27:12.000Z | DeepCFR/workers/la/buffers/AdvReservoirBuffer.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
] | 52 | 2019-07-07T23:08:37.000Z | 2022-01-08T08:11:25.000Z | import torch
from DeepCFR.workers.la.buffers._ReservoirBufferBase import ReservoirBufferBase as _ResBufBase
class AdvReservoirBuffer(_ResBufBase):
def __init__(self, owner, nn_type, max_size, env_bldr, iter_weighting_exponent):
super().__init__(owner=owner, max_size=max_size, env_bldr=env_bldr, nn_type=nn_type,
iter_weighting_exponent=iter_weighting_exponent)
self._adv_buffer = torch.zeros((max_size, env_bldr.N_ACTIONS), dtype=torch.float32, device=self.device)
def add(self, pub_obs, range_idx, legal_action_mask, adv, iteration):
if self.size < self._max_size:
self._add(idx=self.size,
pub_obs=pub_obs,
range_idx=range_idx,
legal_action_mask=legal_action_mask,
adv=adv,
iteration=iteration)
self.size += 1
elif self._should_add():
self._add(idx=self._random_idx(),
pub_obs=pub_obs,
range_idx=range_idx,
legal_action_mask=legal_action_mask,
adv=adv,
iteration=iteration)
self.n_entries_seen += 1
def sample(self, batch_size, device):
indices = torch.randint(0, self.size, (batch_size,), dtype=torch.long, device=self.device)
if self._nn_type == "recurrent":
obses = self._pub_obs_buffer[indices.cpu().numpy()]
elif self._nn_type == "feedforward":
obses = self._pub_obs_buffer[indices].to(device)
else:
raise NotImplementedError
return \
obses, \
self._range_idx_buffer[indices].to(device), \
self._legal_action_mask_buffer[indices].to(device), \
self._adv_buffer[indices].to(device), \
self._iteration_buffer[indices].to(device) / self._last_cfr_iteration_seen
def _add(self, idx, pub_obs, range_idx, legal_action_mask, adv, iteration):
if self._nn_type == "feedforward":
pub_obs = torch.from_numpy(pub_obs)
self._pub_obs_buffer[idx] = pub_obs
self._range_idx_buffer[idx] = range_idx
self._legal_action_mask_buffer[idx] = legal_action_mask
self._adv_buffer[idx] = adv
self._iteration_buffer[idx] = float(iteration) ** self._iter_weighting_exponent
self._last_cfr_iteration_seen = iteration
def state_dict(self):
return {
"base": super().state_dict(),
"adv": self._adv_buffer,
}
def load_state_dict(self, state):
super().load_state_dict(state["base"])
self._adv_buffer = state["adv"]
| 37.123288 | 111 | 0.615867 |
241800e279dc3c8fafca63a9aa991cc7b9b0e02a | 1,188 | py | Python | counter_ordered.py | akiselev1/hackerrank-solutions | 53c2a76c71c9b3553c077ccfde5178b27594ae72 | [
"MIT"
] | null | null | null | counter_ordered.py | akiselev1/hackerrank-solutions | 53c2a76c71c9b3553c077ccfde5178b27594ae72 | [
"MIT"
] | null | null | null | counter_ordered.py | akiselev1/hackerrank-solutions | 53c2a76c71c9b3553c077ccfde5178b27594ae72 | [
"MIT"
] | null | null | null | """
Created by akiselev on 2019-06-12
Input Format
A single line of input containing the string
.
Constraints
Output Format
Print the three most common characters along with their occurrence count each on a separate line.
Sort output in descending order of occurrence count.
If the occurrence count is the same, sort the characters in alphabetical order.
Sample Input 0
aabbbccde
Sample Output 0
b 3
a 2
c 2
Explanation 0
Here, b occurs times. It is printed first.
Both a and c occur times. So, a is printed in the second line and c in the third line because a comes before c in the alphabet.
"""
from collections import Counter
for each in Counter(sorted(input())).most_common(3):
print(*each)
"""
from collections import Counter, OrderedDict
class OrderedCounter(Counter, OrderedDict):
pass
[print(*c) for c in OrderedCounter(sorted(input())).most_common(3)]
if __name__ == '__main__':
s = list(input().strip())
#print(s)
s=sorted(s, key = s.count, reverse = True)
#print(s)
for _ in range(3):
print("{} {}".format(s[0], s.count(s[0])))
for item in range(s.count(s[0])):
s.remove(s[0])
#print(s)
""" | 20.135593 | 127 | 0.68771 |
0e964bfdf519e06f1f1ea45c1fd188d418b3936a | 5,528 | py | Python | main.py | KL35-Ronaldo/Calculator-Bot | c9f08cf626876269acc2b5910bf75c2c0df8bf75 | [
"MIT"
] | null | null | null | main.py | KL35-Ronaldo/Calculator-Bot | c9f08cf626876269acc2b5910bf75c2c0df8bf75 | [
"MIT"
] | null | null | null | main.py | KL35-Ronaldo/Calculator-Bot | c9f08cf626876269acc2b5910bf75c2c0df8bf75 | [
"MIT"
] | null | null | null | from os import getenv
from pyrogram.types import *
from pyrogram import Client, filters
from traceback import format_exc as exc
Bot = Client(
session_name=getenv("SESSION_NAME", "Calculator Bot"),
api_id=int(getenv("API_ID", "0")),
api_hash=getenv("API_HASH", ""),
bot_token = getenv("BOT_TOKEN", ""),
sleep_threshold=int(getenv("SLEEP_THRESHOLD", "5"))
)
START_TEXT = """Hai **__{}__** 👋,
__I'm a Simple and Powerful Telegram **--Calculator--** Bot. Send **--/calculator--** to Start Calculating.__
__Made by **--@KL35Ronaldo--**__"""
START_BUTTONS = InlineKeyboardMarkup(
[
[
InlineKeyboardButton('😎 Master 😎', user_id=1790509785)
]
]
)
CALCULATE_TEXT = "__Made by --**@KL35Ronaldo**--__"
CALCULATE_BUTTONS = InlineKeyboardMarkup(
[
[
InlineKeyboardButton("DEL", callback_data="DEL"),
InlineKeyboardButton("AC", callback_data="AC"),
InlineKeyboardButton("(", callback_data="("),
InlineKeyboardButton(")", callback_data=")")
],
[
InlineKeyboardButton("7", callback_data="7"),
InlineKeyboardButton("8", callback_data="8"),
InlineKeyboardButton("9", callback_data="9"),
InlineKeyboardButton("÷", callback_data="/")
],
[
InlineKeyboardButton("4", callback_data="4"),
InlineKeyboardButton("5", callback_data="5"),
InlineKeyboardButton("6", callback_data="6"),
InlineKeyboardButton("×", callback_data="*")
],
[
InlineKeyboardButton("1", callback_data="1"),
InlineKeyboardButton("2", callback_data="2"),
InlineKeyboardButton("3", callback_data="3"),
InlineKeyboardButton("-", callback_data="-"),
],
[
InlineKeyboardButton(".", callback_data="."),
InlineKeyboardButton("0", callback_data="0"),
InlineKeyboardButton("=", callback_data="="),
InlineKeyboardButton("+", callback_data="+"),
],
[
InlineKeyboardButton("√", callback_data="sqrt"),
InlineKeyboardButton("^", callback_data="**")
]
]
)
CALC_BTN = InlineKeyboardMarkup(
[
[
InlineKeyboardButton("🔢 Calculate 🔢")
]
]
)
@Bot.on_message(filters.command(["start"]))
async def start(c: Client, m: Message):
if len(m.command) != 1 and m.command[1] == "calculate":
await m.reply(CALCULATE_TEXT, True, reply_markup=CALCULATE_BUTTONS)
return
await m.reply(START_TEXT.format(m.from_user.mention), True, reply_markup=START_BUTTONS)
@Bot.on_message(filters.private & filters.command(["calc", "calculate", "calculator"]))
async def calculate(c: Client, m: Message):
await m.reply("#", True, reply_markup=CALCULATE_BUTTONS)
@Bot.on_callback_query()
async def cb_data(c: Client, q: CallbackQuery):
m, d = q.message, q.data
try:
t = m.text.strip()
if d == "calculate":
await m.edit("#", reply_markup=CALCULATE_BUTTONS)
elif d == "=":
text = float(eval(t))
elif d == "DEL":
text = t[:-1] if not len(t) == 1 else "#"
elif d == "AC":
text = "#"
elif d == "sqrt":
text = f"sqrt()"
else:
text = str(t + d) if t[0] != "#" else d
await m.edit(f"{text}", reply_markup=CALCULATE_BUTTONS)
except:
if "you tried to edit it using the same content" in exc():
await q.answer()
else:
await m.edit(f"Sorry Something Went Wrong..!\n\n\n`{exc()}`\n\n\nPlease Try Again.")
print(exc())
@Bot.on_inline_query()
async def inline(c: Client, q: InlineQuery):
d = q.query
if len(d) == 0:
try:
answers = [
InlineQueryResultArticle(
"Calculator",
InputTextMessageContent(CALCULATE_TEXT),
None,
CALCULATE_BUTTONS,
None,
"A Simple Calculator Made By Ronaldo Fan.",
"https://telegra.ph/file/3ed71fa60172e09e96794.jpg"
)
]
except:
print(exc())
else:
try:
t = d.strip().split("=")[0].strip() if "=" in d else d.strip()
dd = t.replace("×", "*").replace("x", "*").replace("X", "*").replace("÷", "/")
t = float(eval(dd))
answers = [
InlineQueryResultArticle(
"Results",
InputTextMessageContent(f"{dd} = {t}"),
None,
None,
None,
"A Simple Calculator Made By Ronaldo Fan.",
"https://telegra.ph/file/3ed71fa60172e09e96794.jpg"
)
]
except:
print(exc())
answers = [
InlineQueryResultArticle(
"Error",
InputTextMessageContent(f"Sorry Something Went Wrong..!\n\n\n`{exc()}`\n\n\nPlease Try Again."),
None,
None,
None,
"A Simple Calculator Made By Ronaldo Fan.",
"https://telegra.ph/file/3ed71fa60172e09e96794.jpg"
)
]
await q.answer(answers, 300, False, False, "", "🔥 A Simple Calculator Made By Ronaldo Fan. 🔥", "calculate")
Bot.run()
| 33.101796 | 116 | 0.52623 |
cbdba2fff9f67c5eb1304b70ee0b2ee734adcf3b | 811 | py | Python | office365/directory/subscribedSku.py | andrewcchoi/Office365-REST-Python-Client | 43db12ae532c804c75a3a34f7b0d7d79e30fdac3 | [
"MIT"
] | null | null | null | office365/directory/subscribedSku.py | andrewcchoi/Office365-REST-Python-Client | 43db12ae532c804c75a3a34f7b0d7d79e30fdac3 | [
"MIT"
] | null | null | null | office365/directory/subscribedSku.py | andrewcchoi/Office365-REST-Python-Client | 43db12ae532c804c75a3a34f7b0d7d79e30fdac3 | [
"MIT"
] | null | null | null | from office365.directory.servicePlanInfo import ServicePlanInfo
from office365.entity import Entity
from office365.entity_collection import EntityCollection
from office365.runtime.client_value_collection import ClientValueCollection
class SubscribedSku(Entity):
"""Contains information about a service SKU that a company is subscribed to."""
@property
def servicePlans(self):
"""Information about the service plans that are available with the SKU. Not nullable"""
return self.properties.get('servicePlans',
ClientValueCollection(ServicePlanInfo))
class SubscribedSkuCollection(EntityCollection):
def __init__(self, context, resource_path=None):
super(SubscribedSkuCollection, self).__init__(context, SubscribedSku, resource_path)
| 38.619048 | 95 | 0.762022 |
7e9a5868eaad5db08481423f21eea9fdb30d734a | 454 | py | Python | product/migrations/0012_auto_20200622_1430.py | IHIMEKPEN/Pegasus-beta | f2c1daf97225ef0bc21c0754de5717de65216bce | [
"MIT"
] | 2 | 2022-01-07T15:06:34.000Z | 2022-03-15T20:17:05.000Z | product/migrations/0012_auto_20200622_1430.py | IHIMEKPEN/Pegasus-beta | f2c1daf97225ef0bc21c0754de5717de65216bce | [
"MIT"
] | null | null | null | product/migrations/0012_auto_20200622_1430.py | IHIMEKPEN/Pegasus-beta | f2c1daf97225ef0bc21c0754de5717de65216bce | [
"MIT"
] | null | null | null | # Generated by Django 2.2 on 2020-06-22 14:30
import autoslug.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('product', '0011_auto_20200622_1401'),
]
operations = [
migrations.AlterField(
model_name='order',
name='slug',
field=autoslug.fields.AutoSlugField(editable=False, populate_from=['customer__username', 'id']),
),
]
| 22.7 | 108 | 0.632159 |
c6de4da30bf6784d860cf4044b877678439697a0 | 414 | py | Python | bot/main.py | NixProGaming/hyper | ff4de188c8dc3ee5c69830530c92e8a1bd8d8890 | [
"MIT"
] | null | null | null | bot/main.py | NixProGaming/hyper | ff4de188c8dc3ee5c69830530c92e8a1bd8d8890 | [
"MIT"
] | null | null | null | bot/main.py | NixProGaming/hyper | ff4de188c8dc3ee5c69830530c92e8a1bd8d8890 | [
"MIT"
] | null | null | null | import discord
import os
#import pynacl
#import dnspython
import server
from discord.ext import commands
bot = commands.Bot(command_prefix="!")
TOKEN = os.getenv("NzcyMTA0MDY1NTc1ODEzMTMw.X51z8g.2o7hnez2L8yWaSCKW4vNCKS1pWs")
@bot.event
async def on_ready():
print(f"Logged in as {bot.user.name}({bot.user.id})")
@bot.command()
async def ping(ctx):
await ctx.send("pong")
server.server()
bot.run(TOKEN)
| 19.714286 | 80 | 0.748792 |
32adc1d216f3c5a9f9995d65040305508429f520 | 6,481 | py | Python | homeassistant/components/nuheat/climate.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/nuheat/climate.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | 3 | 2021-09-08T03:34:57.000Z | 2022-03-12T00:59:48.000Z | homeassistant/components/nuheat/climate.py | VirtualL/home-assistant | 301829d02be8d865ab46c8901ac046d060849320 | [
"Apache-2.0"
] | null | null | null | """Support for NuHeat thermostats."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.climate import ClimateDevice
from homeassistant.components.climate.const import (
DOMAIN, STATE_AUTO, STATE_HEAT, STATE_IDLE, SUPPORT_HOLD_MODE,
SUPPORT_OPERATION_MODE, SUPPORT_TARGET_TEMPERATURE)
from homeassistant.const import (
ATTR_ENTITY_ID, ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
from . import DOMAIN as NUHEAT_DOMAIN
DEPENDENCIES = ["nuheat"]
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:thermometer"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
# Hold modes
MODE_AUTO = STATE_AUTO # Run device schedule
MODE_HOLD_TEMPERATURE = "temperature"
MODE_TEMPORARY_HOLD = "temporary_temperature"
OPERATION_LIST = [STATE_HEAT, STATE_IDLE]
SCHEDULE_HOLD = 3
SCHEDULE_RUN = 1
SCHEDULE_TEMPORARY_HOLD = 2
SERVICE_RESUME_PROGRAM = "nuheat_resume_program"
RESUME_PROGRAM_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids
})
SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_HOLD_MODE |
SUPPORT_OPERATION_MODE)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NuHeat thermostat(s)."""
if discovery_info is None:
return
temperature_unit = hass.config.units.temperature_unit
api, serial_numbers = hass.data[NUHEAT_DOMAIN]
thermostats = [
NuHeatThermostat(api, serial_number, temperature_unit)
for serial_number in serial_numbers
]
add_entities(thermostats, True)
def resume_program_set_service(service):
"""Resume the program on the target thermostats."""
entity_id = service.data.get(ATTR_ENTITY_ID)
if entity_id:
target_thermostats = [device for device in thermostats
if device.entity_id in entity_id]
else:
target_thermostats = thermostats
for thermostat in target_thermostats:
thermostat.resume_program()
thermostat.schedule_update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_RESUME_PROGRAM, resume_program_set_service,
schema=RESUME_PROGRAM_SCHEMA)
class NuHeatThermostat(ClimateDevice):
"""Representation of a NuHeat Thermostat."""
def __init__(self, api, serial_number, temperature_unit):
"""Initialize the thermostat."""
self._thermostat = api.get_thermostat(serial_number)
self._temperature_unit = temperature_unit
self._force_update = False
@property
def name(self):
"""Return the name of the thermostat."""
return self._thermostat.room
@property
def icon(self):
"""Return the icon to use in the frontend."""
return ICON
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self):
"""Return the unit of measurement."""
if self._temperature_unit == "C":
return TEMP_CELSIUS
return TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
if self._temperature_unit == "C":
return self._thermostat.celsius
return self._thermostat.fahrenheit
@property
def current_operation(self):
"""Return current operation. ie. heat, idle."""
if self._thermostat.heating:
return STATE_HEAT
return STATE_IDLE
@property
def min_temp(self):
"""Return the minimum supported temperature for the thermostat."""
if self._temperature_unit == "C":
return self._thermostat.min_celsius
return self._thermostat.min_fahrenheit
@property
def max_temp(self):
"""Return the maximum supported temperature for the thermostat."""
if self._temperature_unit == "C":
return self._thermostat.max_celsius
return self._thermostat.max_fahrenheit
@property
def target_temperature(self):
"""Return the currently programmed temperature."""
if self._temperature_unit == "C":
return self._thermostat.target_celsius
return self._thermostat.target_fahrenheit
@property
def current_hold_mode(self):
"""Return current hold mode."""
schedule_mode = self._thermostat.schedule_mode
if schedule_mode == SCHEDULE_RUN:
return MODE_AUTO
if schedule_mode == SCHEDULE_HOLD:
return MODE_HOLD_TEMPERATURE
if schedule_mode == SCHEDULE_TEMPORARY_HOLD:
return MODE_TEMPORARY_HOLD
return MODE_AUTO
@property
def operation_list(self):
"""Return list of possible operation modes."""
return OPERATION_LIST
def resume_program(self):
"""Resume the thermostat's programmed schedule."""
self._thermostat.resume_schedule()
self._force_update = True
def set_hold_mode(self, hold_mode):
"""Update the hold mode of the thermostat."""
if hold_mode == MODE_AUTO:
schedule_mode = SCHEDULE_RUN
if hold_mode == MODE_HOLD_TEMPERATURE:
schedule_mode = SCHEDULE_HOLD
if hold_mode == MODE_TEMPORARY_HOLD:
schedule_mode = SCHEDULE_TEMPORARY_HOLD
self._thermostat.schedule_mode = schedule_mode
self._force_update = True
def set_temperature(self, **kwargs):
"""Set a new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if self._temperature_unit == "C":
self._thermostat.target_celsius = temperature
else:
self._thermostat.target_fahrenheit = temperature
_LOGGER.debug(
"Setting NuHeat thermostat temperature to %s %s",
temperature, self.temperature_unit)
self._force_update = True
def update(self):
"""Get the latest state from the thermostat."""
if self._force_update:
self._throttled_update(no_throttle=True)
self._force_update = False
else:
self._throttled_update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def _throttled_update(self, **kwargs):
"""Get the latest state from the thermostat with a throttle."""
self._thermostat.get_data()
| 30.00463 | 74 | 0.678753 |
8c2b2bddd67300af1624023c84463fb9487121e6 | 3,372 | py | Python | homeassistant/components/zha/core/channels/lighting.py | mikan-megane/core | 837220cce40890e296920d33a623adbc11bd15a6 | [
"Apache-2.0"
] | 5 | 2018-10-23T14:15:05.000Z | 2021-11-26T06:38:44.000Z | homeassistant/components/zha/core/channels/lighting.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 79 | 2020-07-23T07:13:37.000Z | 2022-03-22T06:02:37.000Z | homeassistant/components/zha/core/channels/lighting.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 3 | 2022-01-17T20:10:54.000Z | 2022-01-17T20:17:22.000Z | """Lighting channels module for Zigbee Home Automation."""
from __future__ import annotations
from collections.abc import Coroutine
from contextlib import suppress
import zigpy.zcl.clusters.lighting as lighting
from .. import registries
from ..const import REPORT_CONFIG_DEFAULT
from .base import ClientChannel, ZigbeeChannel
@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Ballast.cluster_id)
class Ballast(ZigbeeChannel):
"""Ballast channel."""
@registries.CLIENT_CHANNELS_REGISTRY.register(lighting.Color.cluster_id)
class ColorClientChannel(ClientChannel):
"""Color client channel."""
@registries.BINDABLE_CLUSTERS.register(lighting.Color.cluster_id)
@registries.ZIGBEE_CHANNEL_REGISTRY.register(lighting.Color.cluster_id)
class ColorChannel(ZigbeeChannel):
"""Color channel."""
CAPABILITIES_COLOR_XY = 0x08
CAPABILITIES_COLOR_TEMP = 0x10
UNSUPPORTED_ATTRIBUTE = 0x86
REPORT_CONFIG = (
{"attr": "current_x", "config": REPORT_CONFIG_DEFAULT},
{"attr": "current_y", "config": REPORT_CONFIG_DEFAULT},
{"attr": "color_temperature", "config": REPORT_CONFIG_DEFAULT},
)
MAX_MIREDS: int = 500
MIN_MIREDS: int = 153
@property
def color_capabilities(self) -> int:
"""Return color capabilities of the light."""
with suppress(KeyError):
return self.cluster["color_capabilities"]
if self.cluster.get("color_temperature") is not None:
return self.CAPABILITIES_COLOR_XY | self.CAPABILITIES_COLOR_TEMP
return self.CAPABILITIES_COLOR_XY
@property
def color_loop_active(self) -> int | None:
"""Return cached value of the color_loop_active attribute."""
return self.cluster.get("color_loop_active")
@property
def color_temperature(self) -> int | None:
"""Return cached value of color temperature."""
return self.cluster.get("color_temperature")
@property
def current_x(self) -> int | None:
"""Return cached value of the current_x attribute."""
return self.cluster.get("current_x")
@property
def current_y(self) -> int | None:
"""Return cached value of the current_y attribute."""
return self.cluster.get("current_y")
@property
def min_mireds(self) -> int:
"""Return the coldest color_temp that this channel supports."""
return self.cluster.get("color_temp_physical_min", self.MIN_MIREDS)
@property
def max_mireds(self) -> int:
"""Return the warmest color_temp that this channel supports."""
return self.cluster.get("color_temp_physical_max", self.MAX_MIREDS)
def async_configure_channel_specific(self) -> Coroutine:
"""Configure channel."""
return self.fetch_color_capabilities(False)
def async_initialize_channel_specific(self, from_cache: bool) -> Coroutine:
"""Initialize channel."""
return self.fetch_color_capabilities(True)
async def fetch_color_capabilities(self, from_cache: bool) -> None:
"""Get the color configuration."""
attributes = [
"color_temp_physical_min",
"color_temp_physical_max",
"color_capabilities",
"color_temperature",
]
# just populates the cache, if not already done
await self.get_attributes(attributes, from_cache=from_cache)
| 34.762887 | 79 | 0.696619 |
2b83755bedf49d4a7ae7e69cab0a7d4277534490 | 347 | py | Python | examples/hello_world.py | web-i/ripple | d028fbf13bd3c2438c03ae896ac211c1f0c587b3 | [
"MIT"
] | 2 | 2015-01-28T13:20:12.000Z | 2015-03-28T14:58:33.000Z | examples/hello_world.py | web-i/ripple | d028fbf13bd3c2438c03ae896ac211c1f0c587b3 | [
"MIT"
] | null | null | null | examples/hello_world.py | web-i/ripple | d028fbf13bd3c2438c03ae896ac211c1f0c587b3 | [
"MIT"
] | null | null | null | '''
A simple webi application, run by wsgi adaptor.
'''
def hello(environ):
return 200, {'Content-Type': 'text/html'}, '<h3>Hello, from Ripple.</h3>'
if __name__ == '__main__':
import ripple.adaptors
from werkzeug.serving import run_simple
run_simple('localhost', 4000, ripple.adaptors.wsgi(hello), use_debugger=True, use_reloader=True)
| 28.916667 | 98 | 0.723343 |
53f4f57290a59682491bbb6907798347bc03bb10 | 12,900 | py | Python | folding_v3.py | ctada/napCAD | 59377bcd2e958b48d7ded820b8a43db632049a2b | [
"MIT"
] | 7 | 2015-05-03T05:26:42.000Z | 2022-02-12T20:38:24.000Z | folding_v3.py | ctada/napCAD | 59377bcd2e958b48d7ded820b8a43db632049a2b | [
"MIT"
] | 1 | 2020-02-28T11:04:07.000Z | 2020-02-28T11:05:51.000Z | folding_v3.py | ctada/napCAD | 59377bcd2e958b48d7ded820b8a43db632049a2b | [
"MIT"
] | 4 | 2016-12-07T04:58:36.000Z | 2020-07-13T09:19:09.000Z | """Shivali Chandra
Restructured code that takes coordinates of sides and axes of rotation, and rotates the sides
until they meet, forming a closed 3D shape. As of 5/3/15 can fold cubes, square pyramids, and
other prisms with square bases.
"""
import numpy as np
import math
import collections
def find_intersection_distances(p1,y1,x1,y2,x2):
#checks if axis is horizontal or vertical
if y2-y1 == 0:
dist = math.fabs(p1[1]-y1)
elif x2-x1 == 0:
dist = math.fabs(p1[0]-x1)
else:
#not used as of now, calculating new distance based off of diagonal axis
axis_line_slope = (y2-y1)/(x2-x1)
perp_slope = -1/(axis_line_slope)
c1 = axis_line_slope * x1 - y1
c2 = perp_slope * p1[0] - p1[1]
x_intersect = (c2 - c1)/(perp_slope - axis_line_slope)
y_intersect = perp_slope * x_intersect + c2
dist = math.sqrt((x_intersect-p1[0])**2+(y_intersect-p1[1])**2)
hypotenuse = math.sqrt((x2-p1[0])**2+(y2-p1[1])**2)
return dist
def move_to_actual_coord(old_side,side_dict,side_num,theta):
"""Change the xy coordinates of the sides to the correct values from the original image
Input: side coordinates, sides dictionary
Output: new side coordinates in the sides dictionary
"""
final_side = list()
#y1,x1,y2,x2 are coordinates of axis normalized to zero
y1 = old_side[0][1]
x1 = old_side[0][0]
y2 = old_side[len(old_side)-1][1]
x2 = old_side[len(old_side)-1][0]
#variables are coordinates of actual axis
new_xaxis = side_dict[side_num][1][len(old_side)-1][0]
new_xaxis1 = side_dict[side_num][1][0][0]
new_yaxis = side_dict[side_num][1][len(old_side)-1][1]
new_yaxis1 = side_dict[side_num][1][0][1]
#iterate through each point of a side
for i,j in enumerate(old_side):
x = side_dict[side_num][1][i][0]
y = side_dict[side_num][1][i][1]
intersections = find_intersection_distances((j[0],j[1]),y1,x1,y2,x2)
dist = intersections
coordinates = {1:(x,(dist+new_yaxis)),2:(x,(new_yaxis-dist)),3:((new_xaxis-dist),y),4:((new_xaxis+dist),y)}
intersections = find_intersection_distances((j[0],j[1]),y1,x1,y2,x2)
#check which direction the fold needs to be (inwards or outwards) depending on axis orientation
if (new_xaxis>new_xaxis1) and (new_yaxis-new_yaxis1==0):
if (math.degrees(theta) <= 90):
if (new_yaxis<0):
[fin_x,fin_y] = coordinates[1]
else:
[fin_x,fin_y] = coordinates[2]
else:
if (new_yaxis<0):
[fin_x,fin_y] = coordinates[2]
else:
[fin_x,fin_y] = coordinates[1]
elif (new_xaxis<new_xaxis1) and (new_yaxis-new_yaxis1==0):
if math.degrees(theta) <= 90:
if (new_yaxis<0):
[fin_x,fin_y] = coordinates[2]
else:
[fin_x,fin_y] = coordinates[1]
else:
if (new_yaxis<0):
[fin_x,fin_y] = coordinates[1]
else:
[fin_x,fin_y] = coordinates[2]
elif (new_yaxis>new_yaxis1) and (new_xaxis-new_xaxis1==0):
if math.degrees(theta) <= 90:
if (new_xaxis<0):
[fin_x,fin_y] = coordinates[3]
else:
[fin_x,fin_y] = coordinates[4]
else:
if (new_xaxis<0):
[fin_x,fin_y] = coordinates[4]
else:
[fin_x,fin_y] = coordinates[3]
else:
if math.degrees(theta) <=90:
if (new_xaxis<0):
[fin_x,fin_y] = coordinates[4]
else:
[fin_x,fin_y] = coordinates[3]
else:
if (new_xaxis<0):
[fin_x,fin_y] = coordinates[3]
else:
[fin_x,fin_y] = coordinates[4]
z = j[2]
#add coordinates to final_side
final_coord = fin_x/1.0,fin_y/1.0,z
final_coord = list(final_coord)
final_side.append(final_coord)
#add final_side to dictionary of sides
side_dict[side_num] = list(side_dict[side_num])
side_dict[side_num].append(final_side)
return side_dict
def transform_side(side_num,side_dict,theta):
"""Transform the coordinates of the side onto the perpendicular plane using Euler-Rodrigues formula
Input: side coordinates, sides dictionary, theta
Output: new coordinates
"""
side = side_dict[side_num][0]
new_side = list()
#calculating axis of rotation
axis = side[len(side)-1][0]-side[0][0],0.0,0.0
#converting theta to radians
rad = math.radians(theta)
for i in side:
#calculating vector for each point in side
side_vector = i[0],i[1],0.0
#Euler-Rodrigues formula to rotate vectors
axis = np.asarray(axis)
theta = np.asarray(rad)
axis = axis/math.sqrt(np.dot(axis, axis))
a = math.cos(theta/2.0)
b, c, d = -axis*math.sin(theta/2.0)
aa, bb, cc, dd = a*a, b*b, c*c, d*d
bc, ad, ac, ab, bd, cd = b*c, a*d, a*c, a*b, b*d, c*d
multiplier = np.array([[aa+bb-cc-dd, 2.0*(bc+ad), 2.0*(bd-ac)],
[2.0*(bc-ad), aa+cc-bb-dd, 2.0*(cd+ab)],
[2.0*(bd+ac), 2.0*(cd-ab), aa+dd-bb-cc]])
transform_vector = (np.dot(multiplier, side_vector))
#round points to nearest whole number, add to list of transformed side coordinates
folded_vector = round(transform_vector[0]),round(transform_vector[1]),round(transform_vector[2])
new_side.append(folded_vector)
#call function to translate the x,y coordinates with relation to the actual side coordinates
moved_side = move_to_actual_coord(new_side,side_dict,side_num,theta)
return moved_side
def output(theta,final_list):
"""Function takes each point and returns the x,y,z coordinates in three lists.
Input: theta (for testing), final sides dictionary
Output: three lists of x,y,z
"""
#create 3 empty lists for the x,y,z coordinate values
x = list()
y = list()
z = list()
final_coordinates = []
#iterate through dictionary to add the side coordinates to a list
for i in final_list:
final_coordinates.append(final_list[i][2])
#iterate through coordinate list to create three lists of x,y, and z respectively
for i in final_coordinates:
for j in i:
x.append(j[0])
y.append(j[1])
z.append(j[2])
return x,y,z
def check_sides(run,temp,theta,fin):
"""Recursive function takes in each side and continuously checks for sides to meet up
Input: sides that are being folded, temporary dict with sides to check against, theta,
full list of sides_old_coordinates
Output: final theta, dictionary of final sides
"""
#create list and dictionaries for storing points
vector_sides = temp
data = []
redo_sides = {}
new_run = {}
#iterate through dictionary of the sides still being folded
for i in run:
position = run[i][2]
#create keys of the sets of two points that define each line in a side
for j in range(1,len(position)):
if ((position[j][0]==position[j-1][0]) or (position[j][1]==position[j-1][1])):
key = str(position[j][0])+str(position[j][1])+str(position[j-1][0])+str(position[j-1][1])
elif ((position[j][0]>position[j-1][0]) or (position[j][1]>position[j-1][1])):
key = str(position[j][0])+str(position[j][1])+str(position[j-1][0])+str(position[j-1][1])
else:
key = str(position[j-1][0])+str(position[j-1][1])+str(position[j][0])+str(position[j][1])
#add keys, side numbers as values to a dictionary
if key in vector_sides:
vector_sides[key].append(i)
else:
vector_sides[key] = [i]
for k in vector_sides:
#if a key has more than one value, two sides have folded to meet up
if len(vector_sides[k])>1:
data.append(vector_sides[k][0])
data.append(vector_sides[k][1])
#check to see if all the sides have met another side, new_side_list returns those that have not
new_side_list = list(set(run.keys())-set(data))
#if there are no sides left to fold, return the final list of coordinates
if not new_side_list:
return theta,run
else:
#if there are, add to theta to have the side fold again
new_t = theta+1
#create a new dictionary with only the sides that need to be folded
for i in run.keys():
if i in new_side_list:
redo_sides[i] = [run[i][0]]
redo_sides[i].append(run[i][1])
else:
#create a temp dictionary of sides that don't need to be folded (to compare coordinates with)
new_run[i] = run[i]
#call function to fold sides again
redone = transform_side(i,redo_sides,new_t)
#add the completed and the newly folded sides to a new list
final = redone.copy()
final.update(new_run)
#call this function again to check if all sides have met another side
if theta < 360:
return check_sides(redone,new_run,new_t,final)
else:
return -1
def make_sides(sides,theta):
"""Function iterates through the sides and folds each one, returning new coordinates
Input: sides dictionary, theta
Output: the transformed sides
"""
length = len(sides)
#iterate through sides to fold each side individually
for i in sides:
side = sides[i][0]
transformed_side = transform_side(i,sides,theta)
return transformed_side
def main(sides,xy_coord):
"""Main function that creates a dictionary with the two lists and calls other functions
Input: list of sides normalized to 0,0, and list of actual side coordinates
Output: the list of x,y,z coordinates of the final folded shape
"""
theta = 20
sides_old_coordinates = {}
#iterate through the sides to create a dictionary with side number as key,
#val 1 as the side normalized to 0, and val 2 as the actual side coordinates
for i in range(0,len(sides)):
val1 = sides[i]
val2 = xy_coord[i]
if i not in sides_old_coordinates:
sides_old_coordinates[i] = val1,val2
#call function to iterate through sides and fold each one
run_fxn = make_sides(sides_old_coordinates,theta)
#call function to check that sides have met up (prism has been folded)
final_sides = check_sides(run_fxn,{},theta,{})
#call function to output the final lists of x,y,z coordinates
if final_sides == -1:
return "Sorry, these sides never meet up!Here is what I am trying to fold:",xy_coord
else:
return output(final_sides[0],final_sides[1])
if __name__ == "__main__":
#following hard-coded shapes are for testing purposes
"""CUBE (WORKS)"""
#side_coordinates = (([0,0],[0,6],[6,6],[6,0]),([0,0],[0,6],[6,6],[6,0]),([0,0],[0,6],[6,6],[6,0]),([0,0],[0,6],[6,6],[6,0]))
#actual_coordinates = (([6,6],[0,6],[0,12],[6,12]),([6,12],[6,18],[12,18],[12,12]),([12,12],[18,12],[18,6],[12,6]),([12,6],[12,0],[6,0],[6,6]))
"""side_coordinates = (([0.0, 0.0], [0.0, 194.0], [176.00000000000003, 193.99999999999997], [176.0, 0.0]), ([0.0, 0.0], [0.0, 176.0], [173.0, 176.0], [173.0, 0.0]), ([0.0, 0.0], [0.0, 194.0], [165.00000000000003, 193.99999999999997], [165.0, 0.0]), ([0.0, 0.0], [0.0, 176.0], [147.99999999999997, 176.00000000000003], [148.0, 0.0]))
actual_coordinates = (([381, 451], [381, 645], [557, 645], [557, 451]), ([205, 451], [381, 451], [381, 278], [205, 278]), ([205, 645], [205, 451], [40, 451], [40, 645]), ([381, 645], [205, 645], [205, 793], [381, 793]))
print main(side_coordinates,actual_coordinates)"""
"""side_coordinates = (([0,0],[0,110],[100,100],[100,0]),([0,0],[0,110],[100,100],[100,0]),([0,0],[0,115],[100,100],[100,0]),([0,0],[0,110],[100,100],[100,0]))
actual_coordinates = (([100,200],[100,310],[200,300],[200,200]),([200,200],[310,200],[300,100],[200,100]),([200,100],[200,-15],[100,0],[100,100]),([100,100],[-10,100],[0,200],[100,200]))
print main(side_coordinates,actual_coordinates)"""
side_coordinates = (([0.0, 0.0], [0.0, 173.0], [176.00000000000003, 172.99999999999997], [176.0, 0.0]), ([0.0, 0.0], [0.0, 165.0], [194.0, 165.0], [194.0, 0.0]), ([0.0, 0.0], [0.0, 148.0], [176.0, 148.0], [176.0, 0.0]), ([0.0, 0.0], [0.0, 176.0], [194.0, 176.0], [194.0, 0.0]))
actual_coordinates = (([381, 451], [381, 278], [205, 278], [205, 451]), ([205, 451], [40, 451], [40, 645], [205, 645]), ([205, 645], [205, 793], [381, 793], [381, 645]), ([381, 645], [557, 645], [557, 451], [381, 451]))
print main(side_coordinates,actual_coordinates)
"""RECTANGULAR PRISM
side_coordinates = (([0.0, 0.4], [0.0, 2.0], [1.5, 2.0], [1.0, 0.0]), ([0.0, 0.0], [0.0, 2.0], [1.0, 2.0], [1.0, 0.0]), ([0.0, 0.0], [0.0, 2.0], [0.9999999999999998, 2.0], [1.0, 0.0]), ([0.0, 0.0], [0.0, 2.0], [0.9999999999999999, 2.0], [1.0, 0.0]))
actual_coordinates = (([3, 2.2], [3, 0], [2, 0], [2, 2]), ([3, 3], [5, 3], [5, 2], [3, 2]), ([2, 3], [2, 5], [3, 5], [3, 3]), ([2, 2], [0, 2], [0, 3], [2, 3]))
print main(side_coordinates,actual_coordinates)"""
"""SQUARE PYRAMID (WORKS)
side_coordinates = (([0,0],[3,6],[6,0]),([0,0],[3,6],[6,0]),([0,0],[3,6],[6,0]),([0,0],[3,6],[6,0]))
actual_coordinates = (([6,6],[0,9],[6,12]),([6,12],[9,18],[12,12]),([12,12],[18,9],[12,6]),([12,6],[9,0],[6,6]))
print main(side_coordinates,actual_coordinates)"""
"""TRIANGULAR PRISM (WIP)
side_coordinates = (([0,0],[0,6],[6,6],[6,0]),([0,0],[3,6],[6,0]),([0,0],[0,6],[6,6],[6,0]),([0,0],[3,6],[6,0]))
actual_coordinates = (([6,6],[0,6],[0,12],[6,12]),([6,12],[9,18],[12,12]),([12,12],[18,12],[18,6],[12,6]),([12,6],[9,0],[6,6]))
print main(side_coordinates,actual_coordinates)"""
"""PYRAMID (DOESN'T WORK)
side_coordinates = (([0,0],[3,6],[6,0]),([0,0],[3,6],[6,0]),([0,0],[3,6],[6,0]))
actual_coordinates = (([6,0],[0,0],[3,6]),([3,6],[6,12],[9,6]),([9,6],[12,0],[6,0]))
print main(side_coordinates,actual_coordinates)""" | 41.4791 | 333 | 0.653721 |
69b971f81b4dc949fa0d88234559f961465f9fdd | 19,335 | py | Python | AmosPy/amosTokens.py | dschwen/AmosPy | 87deee623ca806e8acb08b85935de2b56a0e3585 | [
"OML",
"CNRI-Python-GPL-Compatible"
] | 1 | 2018-01-16T00:45:49.000Z | 2018-01-16T00:45:49.000Z | AmosPy/amosTokens.py | dschwen/AmosPy | 87deee623ca806e8acb08b85935de2b56a0e3585 | [
"OML",
"CNRI-Python-GPL-Compatible"
] | null | null | null | AmosPy/amosTokens.py | dschwen/AmosPy | 87deee623ca806e8acb08b85935de2b56a0e3585 | [
"OML",
"CNRI-Python-GPL-Compatible"
] | null | null | null | """Token table and code to deal with special cases."""
import struct
def readRem(byteStream):
"""The rem - remark/comment type tokens"""
commentLength = struct.unpack('bb', byteStream.read(2))[1]
bytesRead = 2
comment = struct.unpack('%ds' % commentLength, byteStream.read(commentLength))[0].rstrip("\x00")
bytesRead += commentLength
return bytesRead, comment
def readVal(byteStream):
"""Values - all seem to be 4 bytes long"""
intVal = struct.unpack('>i', byteStream.read(4))[0]
return 4, intVal
def readFloatVal(byteStream):
"""Read a floating point value"""
floatVal = struct.unpack(">f", byteStream.read(4))[0]
return 4, floatVal
def readLabelType(byteStream):
"""Labels - for goto, variables, procedure calls etc"""
bytesRead = 0
unknown, length, flags = struct.unpack("Hbb", byteStream.read(4))
bytesRead += 4
name = str(struct.unpack("%ds" % length, byteStream.read(length))[0].rstrip(b"\x00"))
if flags & 1:
name += "#" # Floats in amos
elif flags and 2:
name += "$"
bytesRead += length
return bytesRead, name
def unknownSize(size):
"""Some tokens have an 'unknown' extra bytes. Make sure to eat them"""
def _read(byteStream):
byteStream.read(size)
return size, None
return _read
def readString(byteStream):
"""String constants"""
bytesRead = 0
length = struct.unpack(">h", byteStream.read(2))[0]
bytesRead += 2
#Round to next word boundary
if length % 2:
length += 1
unpacked = struct.unpack("%ds" % length, byteStream.read(length))
data = unpacked[0].rstrip(b"\x00")
bytesRead += length
return bytesRead, data
def readProcedure(byteStream):
"""This is the procedure declaration. So far - nothing can be done for a compiled or encrypted one"""
bytesRead = 0
bytesToEnd, encSeed, flagsB, encSeed2 = struct.unpack(">ihbb", byteStream.read(8))
bytesRead += 8
flags = set()
if flagsB & 2 ** 7:
flags.add('folded')
if flagsB & 2 ** 6:
flags.add('locked')
if flagsB & 2 ** 5:
flags.add('encrypted')
if flagsB & 2 ** 4:
flags.add('compiled')
if 'compiled' in flags:
byteStream.read(bytesToEnd)
bytesRead += bytesToEnd
return bytesRead, {'bytesToEnd': bytesToEnd, 'encSeed': (encSeed, encSeed2), 'flags': flags}
def readExtension(byteStream):
"""An extension token. For now - look in extensions.py for their mappings"""
extNo, unused, extToken = struct.unpack('>2bH', byteStream.read(4))
return 6, (extNo, extToken)
#Given majority have no extra, a simple string, or length 1 tuple is the default
token_map = {
0x0000: (None,),
0x0006: ('Variable', readLabelType),
0x000c: ('Label', readLabelType),
0x0012: ('Call', readLabelType),
0x0018: ('Goto Label Ref', readLabelType),
0x001e: ('BinVal', readVal),
0x0026: ('Dbl Str', readString),
0x002e: ('Sgl Str', readString),
0x0036: ('HexVal', readVal),
0x003e: ('DecVal', readVal),
0x0046: ('Float', readFloatVal),
0x004e: ('Extension', readExtension),
0x0054: ':',
0x005c: ',',
0x0064: ';',
0x006c: '#',
0x0074: '(',
0x007c: ')',
0x0084: '[',
0x008c: ']',
0x0094: 'To',
0x009c: 'Not',
0x00a6: 'Swap',
0x00b0: 'Def Fn',
0x00bc: 'Fn',
0x00c4: 'Follow Off',
0x00d4: 'Follow',
0x00e0: 'Resume Next',
0x00f2: 'Inkey$',
0x00fe: 'Repeat$',
0x010e: 'Zone$',
0x011c: 'Border$',
0x012c: 'Double Buffer',
0x0140: 'Start',
0x014c: 'Length',
0x015a: 'Doke',
0x0168: 'On Menu Del',
0x017a: 'On Menu On',
0x018a: 'On Menu Off',
0x019c: 'Every On',
0x01aa: 'Every Off',
0x01ba: 'Logbase',
0x01c8: 'Logic',
0x01d4: 'Logic',
0x01dc: 'Asc',
0x01e6: 'As',
0x01ee: 'Call',
0x01f8: 'Execall',
0x0206: 'Gfxcall',
0x0214: 'Doscall',
0x0222: 'Intcall',
0x0230: 'Freeze',
0x023c: ('For', unknownSize(2)),
0x0246: 'Next',
0x0250: ('Repeat', unknownSize(2)),
0x025c: 'Until',
0x0268: ('While', unknownSize(2)),
0x0274: 'Wend',
0x027e: ('Do', unknownSize(2)),
0x0286: 'Loop',
0x0290: ('Exit If', unknownSize(4)),
0x029e: ('Exit', unknownSize(4)),
0x02a8: 'Goto',
0x02b2: 'Gosub',
0x02be: ('If', unknownSize(2)),
0x02c6: 'Then',
0x02d0: ('Else', unknownSize(2)),
0x02da: 'EndIf',
0x02e6: 'On Error',
0x02f4: 'On Break Proc',
0x0308: 'On Menu',
0x0316: ('On', unknownSize(4)),
0x031e: 'Resume Label',
0x0330: 'Resume',
0x033c: 'Pop Proc',
0x034a: 'Every',
0x0356: 'Step',
0x0360: 'Return',
0x036c: 'Pop',
0x0376: ('Procedure', readProcedure),
0x0386: 'Proc',
0x0390: 'End Proc',
0x039e: 'Shared',
0x03aa: 'Global',
0x03b6: 'End',
0x03c0: 'Stop',
0x03ca: 'Param#',
0x03d6: 'Param$',
0x03e2: 'Param',
0x03ee: 'Error',
0x03fa: 'Errn',
0x0404: ('Data', unknownSize(2)),
0x040e: 'Read',
0x0418: 'Restore',
0x0426: 'Break Off',
0x0436: 'Break On',
0x0444: 'Inc',
0x044e: 'Dec',
0x0458: 'Add',
0x0462: 'Add',
0x046a: 'Print #',
0x0476: 'Print',
0x0482: 'Lprint',
0x048e: 'Input$',
0x049c: 'Input$',
0x04a6: 'Using',
0x04b2: 'Input #',
0x04be: 'Line Input #',
0x04d0: 'Input',
0x04dc: 'Line Input',
0x04ec: 'Run',
0x04f6: 'Run',
0x04fe: 'Set Buffer',
0x050e: 'Mid$',
0x051e: 'Mid$',
0x0528: 'Left$',
0x0536: 'Right$',
0x0546: 'Flip$',
0x0552: 'Chr$',
0x055e: 'Space$',
0x056c: 'String$',
0x057c: 'Upper$',
0x058a: 'Lower$',
0x0598: 'Str$',
0x05a4: 'Val',
0x05ae: 'Bin$',
0x05ba: 'Bin$',
0x05c4: 'Hex$',
0x05d0: 'Hex$',
0x05da: 'Len',
0x05e4: 'Instr$',
0x05f4: 'Instr$',
0x0600: 'Tab$',
0x060a: 'Free',
0x0614: 'Varptr',
0x0620: 'Remember X',
0x0630: 'Remember Y',
0x0640: 'Dim',
0x064a: ('Rem', readRem),
0x0652: ("'", readRem),
0x0658: 'Sort',
0x0662: 'Match',
0x0670: 'Edit',
0x067a: 'Direct',
0x0686: 'Rnd',
0x0690: 'Randomize',
0x06a0: 'Sgn',
0x06aa: 'Abs',
0x06b4: 'Int',
0x06be: 'Radian',
0x06ca: 'Degree',
0x06d6: 'Pi#',
0x06e0: 'Fix',
0x06ea: 'Min',
0x06f6: 'Max',
0x0702: 'Sin',
0x070c: 'Cos',
0x0716: 'Tan',
0x0720: 'Asin',
0x072c: 'Acos',
0x0738: 'Atan',
0x0744: 'Hsin',
0x0750: 'Hcos',
0x075c: 'Htan',
0x0768: 'Sqrt',
0x0772: 'Log',
0x077c: 'Ln',
0x0786: 'Exp',
0x0790: 'Menu To Bank',
0x07a4: 'Bank To Menu',
0x07b8: 'Menu On',
0x07c6: 'Menu Off',
0x07d4: 'Menu Calc',
0x07e4: 'Menu Mouse On',
0x07f8: 'Menu Mouse Off',
0x080c: 'Menu Base',
0x081e: 'Set Menu',
0x0832: 'X Menu',
0x0840: 'Y Menu',
0x084e: 'Menu Key',
0x0862: 'Menu Bar',
0x0872: 'Menu Line',
0x0882: 'Menu Tline',
0x0894: 'Menu Movable',
0x08a8: 'Menu Static',
0x08ba: 'Menu Item Movable',
0x08d2: 'Menu Item Static',
0x08ea: 'Menu Active',
0x08fc: 'Menu Inactive',
0x0910: 'Menu Separate',
0x0924: 'Menu Link',
0x0934: 'Menu Called',
0x0946: 'Menu Once',
0x0956: 'Menu Del',
0x0964: 'Menu$',
0x0970: 'Choice',
0x097e: 'Choice',
0x0986: 'Screen Copy',
0x099c: 'Screen Copy',
0x09a8: 'Screen Copy',
0x09be: 'Screen Copy',
0x09d6: 'Screen Clone',
0x09ea: 'Screen Open',
0x0a04: 'Screen Close',
0x0a18: 'Screen Display',
0x0a36: 'Screen Offset',
0x0a4e: 'Screen Size',
0x0a5e: 'Screen Colour',
0x0a72: 'Screen To Front',
0x0a88: 'Screen To Front',
0x0a90: 'Screen To Back',
0x0aa6: 'Screen To Back',
0x0aae: 'Screen Hide',
0x0ac0: 'Screen Hide',
0x0ac8: 'Screen Show',
0x0ada: 'Screen Show',
0x0ae2: 'Screen Swap',
0x0af4: 'Screen Swap',
0x0afc: 'Save If',
0x0b0c: 'Save Iff',
0x0b16: 'View',
0x0b20: 'Auto View Off',
0x0b34: 'Auto View On',
0x0b46: 'Screen Base',
0x0b58: 'Screen Width',
0x0b6c: 'Screen Width',
0x0b74: 'Screen Height',
0x0b88: 'Screen Height',
0x0b90: 'Get Palette',
0x0ba4: 'Get Palette',
0x0bae: 'Cls',
0x0bb8: 'Cls',
0x0bc0: 'Cls',
0x0bd0: 'Def Scroll',
0x0bee: 'X Hard',
0x0bfc: 'X Hard',
0x0c06: 'Y Hard',
0x0c14: 'Y Hard',
0x0c1e: 'X Screen',
0x0c2e: 'X Screen',
0x0c38: 'Y Screen',
0x0c48: 'Y Screen',
0x0c52: 'X Text',
0x0c60: 'Y Text',
0x0c6e: 'Screen',
0x0c7c: 'Screen',
0x0c84: 'Hires',
0x0c90: 'Lowres',
0x0c9c: 'Dual Playfield',
0x0cb4: 'Dual Priority',
0x0cca: 'Wait Vbl',
0x0cd8: 'Default Palette',
0x0cee: 'Default',
0x0cfc: 'Palette',
0x0d0a: 'Colour Back',
0x0d1c: 'Colour',
0x0d2c: 'Colour',
0x0d34: 'Flash Off',
0x0d44: 'Flash',
0x0d52: 'Shift Off',
0x0d62: 'Shift Up',
0x0d78: 'Shift Down',
0x0d90: 'Set Rainbow',
0x0dae: 'Set Rainbow',
0x0dc2: 'Rainbow Del',
0x0dd4: 'Rainbow Del',
0x0ddc: 'Rainbow',
0x0df0: 'Rain',
0x0dfe: 'Fade',
0x0e08: 'Phybase',
0x0e16: 'Physic',
0x0e24: 'Physic',
0x0e2c: 'Autoback',
0x0e3c: 'Plot',
0x0e4a: 'Plot',
0x0e56: 'Point',
0x0e64: 'Draw To',
0x0e74: 'Draw',
0x0e86: 'Ellipse',
0x0e9a: 'Circle',
0x0eac: 'Polyline to',
0x0eba: 'Polygon',
0x0ec8: 'Bar',
0x0ed8: 'Box',
0x0ee8: 'Paint',
0x0ef8: 'Paint',
0x0f04: 'Gr Locate',
0x0f16: 'Text Length',
0x0f28: 'Text Style',
0x0f38: 'Text Base',
0x0f3a: 'Text Base',
0x0f4a: 'Text',
0x0f5a: 'Set Text',
0x0f6a: 'Set Paint',
0x0f7a: 'Get Fonts',
0x0f8a: 'Get Disc Fonts',
0x0f9e: 'Get Rom Fonts',
0x0fb2: 'Set Font',
0x0fc2: 'Font',
0x0fce: 'HSlider',
0x0fe8: 'VSlider',
0x1002: 'Set Slider',
0x1022: 'Set Pattern',
0x1034: 'Set Line',
0x1044: 'Ink',
0x1050: 'Ink',
0x105a: 'Ink',
0x1066: 'Gr Writing',
0x1078: 'Clip',
0x1084: 'Clip',
0x1092: 'Set Tempras',
0x10a4: 'Set Tempras',
0x10ac: 'Set Tempras',
0x10b6: 'Appear',
0x10c8: 'Appear',
0x10d6: 'Zoom',
0x10f4: 'Get Cblock',
0x110e: 'Put Cblock',
0x1120: 'Put Cblock',
0x112c: 'Del Cblock',
0x113e: 'Del Cblock',
0x1146: 'Get Block',
0x1160: 'Get Block',
0x1172: 'Put Block',
0x1184: 'Put Block',
0x1190: 'Put Block',
0x119e: 'Put Block',
0x11ae: 'Del Block',
0x11be: 'Del Block',
0x11c6: 'Key Speed',
0x11d8: 'Key State',
0x11e8: 'Key Shift',
0x11f8: 'Joy',
0x1202: 'Jup',
0x120c: 'Jdown',
0x1218: 'Jleft',
0x1224: 'Jright',
0x1232: 'Fire',
0x123e: 'True',
0x1248: 'False',
0x1254: 'Put Key',
0x1262: 'Scancode',
0x1270: 'Scanshift',
0x1280: 'Clear Key',
0x1290: 'Wait Key',
0x129e: 'Wait',
0x12aa: 'Key$',
0x12b6: 'Scan$',
0x12bc: 'Scan$',
0x12c4: 'Scan$',
0x12ce: 'Timer',
0x12da: 'Wind Open',
0x12f4: 'Wind Open',
0x1306: 'Wind Open',
0x131a: 'Wind Close',
0x132a: 'Wind Save',
0x133a: 'Wind Move',
0x134c: 'Wind Size',
0x135e: 'Window',
0x136c: 'Windon',
0x1378: 'Locate',
0x1388: 'Clw',
0x1392: 'Home',
0x139c: 'Curs Pen',
0x13ac: 'Pen$',
0x13b8: 'Paper$',
0x13c6: 'At',
0x13d2: 'Pen',
0x13dc: 'Paper',
0x13e8: 'Center',
0x13f6: 'Border',
0x1408: 'Writing',
0x1418: 'Writing',
0x1422: 'Title Top',
0x1432: 'Title Bottom',
0x1446: 'Curs Off',
0x1454: 'Curs On',
0x1462: 'Inverse Off',
0x1474: 'Inverse On',
0x1484: 'Under Off',
0x1494: 'Under On',
0x14a2: 'Shade Off',
0x14b2: 'Shade On',
0x14c0: 'Scroll Off',
0x14d0: 'Scroll On',
0x14e0: 'Scroll',
0x14ee: 'Cup$',
0x14f8: 'CDown$',
0x1504: 'CLeft$',
0x1510: 'CRight$',
0x151e: 'Cup',
0x1528: 'Cdown',
0x1534: 'Cleft',
0x1540: 'Cright',
0x154c: 'Memorize X',
0x155c: 'Memorize Y',
0x156c: 'Cmove$',
0x157c: 'CMove',
0x158a: 'Cline',
0x1596: 'Cline',
0x159e: 'Hscroll',
0x15ac: 'Vscroll',
0x15ba: 'Set Tab',
0x15c8: 'Set Curs',
0x15e6: 'X Curs',
0x15f2: 'Y Curs',
0x15fe: 'X Graphics',
0x160e: 'Y Graphics',
0x161e: 'Xgr',
0x1628: 'Ygr',
0x1632: 'Reserve Zone',
0x1646: 'Reserve Zone',
0x164e: 'Reset Zone',
0x1660: 'Reset Zone',
0x1668: 'Set Zone',
0x1680: 'Zone',
0x168e: 'Zone',
0x169a: 'HZone',
0x16aa: 'Hzone',
0x16b6: 'Scin',
0x16c4: 'Scin',
0x16d0: 'Mouse Screen',
0x16e2: 'Mouse Zone',
0x16f2: 'Set Input',
0x1704: 'Close Workbench',
0x171a: 'Close Editor',
0x172c: 'Dir First',
0x173e: 'Dir Next',
0x174e: 'Exist',
0x175a: 'Dir$',
0x1766: 'Ldir/w',
0x1774: 'Ldir/w',
0x177c: 'Dir/w',
0x1788: 'Dir/w',
0x1790: 'Ldir',
0x179c: 'Ldir',
0x17a4: 'Dir',
0x17ae: 'Dir',
0x17b6: 'Set Dir',
0x17c4: 'Set Dir',
0x17d4: 'Load Iff',
0x17e4: 'Load Iff',
0x17ee: 'Mask Iff',
0x17fe: 'Picture',
0x180c: 'Bload',
0x181a: 'Bsave',
0x1820: 'Pload',
0x182a: 'Pload',
0x1838: 'Save',
0x1844: 'Save',
0x184e: 'Load',
0x185a: 'Load',
0x1864: 'Dfree',
0x1870: 'Mkdir',
0x187c: 'Lof',
0x1886: 'Eof',
0x1890: 'Pof',
0x189c: 'Port',
0x18a8: 'Open Random',
0x18bc: 'Open In',
0x18cc: 'Open Out',
0x18de: 'Open Port',
0x18f0: 'Append',
0x1900: 'Close',
0x190c: 'Close',
0x1914: 'Parent',
0x1920: 'Rename',
0x1930: 'Kill',
0x193c: 'Drive',
0x1948: 'Field',
0x1954: 'Fsel$',
0x1962: 'Fsel$',
0x196c: 'Fsel$',
0x1978: 'Fsel$',
0x1986: 'Set Sprite Buffer',
0x199e: 'Sprite Off',
0x19b0: 'Sprite Off',
0x19b8: 'Sprite Priority',
0x19ce: 'Sprite Update Off',
0x19e6: 'Sprite Update On',
0x19fc: 'Sprite Update',
0x1a10: 'Spritebob Col',
0x1a26: 'Spritebob Col',
0x1a32: 'Sprite Col',
0x1a44: 'Sprite Col',
0x1a50: 'Set Hardcol',
0x1a64: 'Hardcol',
0x1a72: 'Sprite Base',
0x1a84: 'Icon Base',
0x1a94: 'Sprite',
0x1aa8: 'Bob Off',
0x1ab6: 'Bob Off',
0x1abe: 'Bob Update Off',
0x1ad2: 'Bob Update On',
0x1ae6: 'Bob Update',
0x1af6: 'Bob Clear',
0x1b06: 'Bob Draw',
0x1b14: 'Bobsprite Col',
0x1b2a: 'Bobsprite Col',
0x1b36: 'Bob Col',
0x1b46: 'Bob Col',
0x1b52: 'Col',
0x1b5c: 'Limit Bob',
0x1b64: 'Limit Bob',
0x1b6c: 'Limit Bob',
0x1b7a: 'Limit Bob',
0x1b8a: 'Set Bob',
0x1b9e: 'Bob',
0x1bae: 'Get Sprite Palette',
0x1bc8: 'Get Sprite Palette',
0x1bd0: 'Get Sprite',
0x1bea: 'Get Sprite',
0x1bfc: 'Get Bob',
0x1c14: 'Get Bob',
0x1c26: 'Del Sprite',
0x1c38: 'Del Sprite',
0x1c42: 'Del Bob',
0x1c52: 'Del Bob',
0x1c5c: 'Del Icon',
0x1c6c: 'Del Icon',
0x1c76: 'Ins Sprite',
0x1c88: 'Ins Bob',
0x1c96: 'Ins Icon',
0x1ca6: 'Get Icon Palette',
0x1cbe: 'Get Icon Palette',
0x1cc6: 'Get Icon',
0x1cde: 'Get Icon',
0x1cf0: 'Put Bob',
0x1cfe: 'Paste Bob',
0x1d12: 'Paste Icon',
0x1d28: 'Make Mask',
0x1d38: 'Make Mask',
0x1d40: 'No Mask',
0x1d4e: 'No Mask',
0x1d56: 'Make Icon Mask',
0x1d6c: 'Make Icon Mask',
0x1d74: 'No Icon Mask',
0x1d88: 'No Icon Mask',
0x1d90: 'Hot Spot',
0x1da2: 'Hot Spot',
0x1dae: 'Priority On',
0x1dc0: 'Priority Off',
0x1dd2: 'Hide On',
0x1de0: 'Hide',
0x1dea: 'Show On',
0x1df8: 'Show',
0x1e02: 'Change Mouse',
0x1e16: 'X Mouse',
0x1e24: 'Y Mouse',
0x1e32: 'Mouse Key',
0x1e42: 'Mouse Click',
0x1e54: 'Limit Mouse',
0x1e66: 'Limit Mouse',
0x1e6e: 'Limit Mouse',
0x1e7c: 'Unfreeze',
0x1e8a: 'Move X',
0x1e9a: 'Move X',
0x1ea6: 'Move Y',
0x1eb6: 'Move y',
0x1ec2: 'Move Off',
0x1ed2: 'Move Off',
0x1eda: 'Move On',
0x1ee8: 'Move On',
0x1ef0: 'Move Freeze',
0x1f02: 'Move Freeze',
0x1f0a: 'Anim Off',
0x1f1a: 'Anim Off',
0x1f22: 'Anim On',
0x1f30: 'Anim On',
0x1f38: 'Anim Freeze',
0x1f4a: 'Anim Freeze',
0x1f52: 'Anim',
0x1f60: 'Anim',
0x1f6c: 'Movon',
0x1f78: 'Chanan',
0x1f86: 'Chanmv',
0x1f94: 'Channel',
0x1fa2: 'Amreg',
0x1fb0: 'Amreg',
0x1fbc: 'Amal On',
0x1fca: 'Amal On',
0x1fd2: 'Amal Off',
0x1fe2: 'Amal Off',
0x1fea: 'Amal Freeze',
0x1ffc: 'Amal Freeze',
0x2004: 'Amalerr',
0x2012: 'Amal',
0x2020: 'Amal',
0x202c: 'Amplay',
0x203c: 'Amplay',
0x204a: 'Synchro On',
0x205a: 'Synchro Off',
0x206c: 'Synchro',
0x207a: 'Update Off',
0x208a: 'Update On',
0x209a: 'Update Every',
0x20ae: 'Update',
0x20ba: 'X Bob',
0x20c6: 'Y Bob',
0x20d2: 'X Sprite',
0x20e2: 'Y Sprite',
0x20f2: 'Reserve As Work',
0x210a: 'Reserve As Chip Work',
0x2128: 'Reserve As Data',
0x2140: 'Reserve As Chip Data',
0x215e: 'Erase',
0x216a: 'List Bank',
0x217a: 'Chip Free',
0x218a: 'Fast Free',
0x219a: 'Fill',
0x21aa: 'Copy',
0x21ba: 'Hunt',
0x21ca: 'Poke',
0x21d8: 'Loke',
0x21e6: 'Peek',
0x21f2: 'Deek',
0x21fe: 'Leek',
0x220a: 'Bset.<>',
0x2218: 'Bclr',
0x2226: 'Bchg',
0x2234: 'Btst',
0x2242: 'Ror.<>',
0x2250: 'Ror.w',
0x225e: 'Ror.l',
0x226c: 'Rol.<>',
0x227a: 'Rol.w',
0x2288: 'Rol.l',
0x2296: 'Areg',
0x22a2: 'Dreg',
0x22ae: 'Copper On',
0x22be: 'Copper Off',
0x22ce: 'Cop Swap',
0x22dc: 'Cop Reset',
0x22ec: 'Cop Wait',
0x22fe: 'Cop Wait',
0x230c: 'Cop Movel',
0x231e: 'Cop Move',
0x2330: 'Cop Logic',
0x2340: 'Prg First$',
0x2352: 'Prg Next$',
0x2362: 'Psel$',
0x2370: 'Psel$',
0x237a: 'Psel$',
0x2386: 'Psel$',
0x2394: 'Prun',
0x23a0: 'Bgrab',
0x23ac: 'Put',
0x23b8: 'Get',
0x23c4: 'System',
0x23d0: 'Multi Wait',
0x23e0: 'I Bob',
0x23ec: 'I Sprite',
0x23fc: 'Priority Reverse On',
0x2416: 'Priority Reverse Off',
0x2430: 'Dev First',
0x2442: 'Dev Next',
0x2452: 'Hrev Block',
0x2464: 'Vrev Block',
0x2476: 'Hrev',
0x2482: 'Vrev',
0x248e: 'Rev',
0x2498: 'Bank Swap',
0x24aa: 'Amos To Front',
0x24be: 'Amos To Back',
0x24d0: 'Amos Here',
0x24e0: 'Amos Lock',
0x24f0: 'Amos Unlock',
0x2502: 'Display Height',
0x2516: 'Ntsc',
0x2520: 'Laced',
0x252c: 'Prg State',
0x253c: 'Command Line$',
0x2550: 'Disc Info$',
0x292a: 'Read Text',
0x2ac0: 'Arexx Open',
0x2ad2: 'Arexx Close',
0x2ae4: 'Arexx Exist',
0x2af6: 'Arexx',
0x2b02: 'Arexx$',
0x2b10: 'Arexx Wait',
0x2b34: 'Arexx Answer',
0x2b3e: 'Exec',
0xff3e: 'Xor',
0xff4c: 'Or',
0xff58: 'And',
0xff66: '<>',
0xff70: '><',
0xff7a: '<=',
0xff84: '=<',
0xff8e: '>=',
0xff98: '=>',
0xffa2: '=',
0xffac: '<',
0xffb6: '>',
0xffc0: '+',
0xffca: '-',
0xffd4: 'Mod',
0xffe2: '*',
0xffec: '/',
0xfff6: '^',
}
| 24.443742 | 105 | 0.555883 |
208c3be07f867c7a15a47556bb33749a94550fee | 667 | py | Python | AltitudeProfile.py | arodland/IRI2016 | ab9873e597cc78cec40f11e133b83efdbe61ae7c | [
"MIT"
] | 1 | 2020-12-18T11:49:18.000Z | 2020-12-18T11:49:18.000Z | AltitudeProfile.py | arodland/IRI2016 | ab9873e597cc78cec40f11e133b83efdbe61ae7c | [
"MIT"
] | null | null | null | AltitudeProfile.py | arodland/IRI2016 | ab9873e597cc78cec40f11e133b83efdbe61ae7c | [
"MIT"
] | 1 | 2020-12-18T11:49:20.000Z | 2020-12-18T11:49:20.000Z | #!/usr/bin/env python
""" Height Profile Example """
import iri2016 as iri
from argparse import ArgumentParser
from matplotlib.pyplot import show
import iri2016.plots as piri
def main():
p = ArgumentParser()
p.add_argument('time', help='time of simulation')
p.add_argument('latlon', help='geodetic latitude, longitude (degrees)',
type=float, nargs=2)
p.add_argument('-alt_km', help='altitude START STOP STEP (km)',
type=float, nargs=3, default=(80, 1000, 10))
P = p.parse_args()
iono = iri.IRI(P.time, P.alt_km, *P.latlon)
piri.altprofile(iono)
show()
if __name__ == '__main__':
main()
| 24.703704 | 75 | 0.643178 |
1e38a6a36bc36b6f446a015b928cd6ad70e2b2e8 | 367 | py | Python | setup.py | petr-fedorov/btfxwss | 42f00f0c78fb9962d47e6d24132a753b1823c54b | [
"MIT"
] | 1 | 2018-06-08T08:41:01.000Z | 2018-06-08T08:41:01.000Z | setup.py | qichwi/btfxwss | 42f00f0c78fb9962d47e6d24132a753b1823c54b | [
"MIT"
] | null | null | null | setup.py | qichwi/btfxwss | 42f00f0c78fb9962d47e6d24132a753b1823c54b | [
"MIT"
] | null | null | null | from distutils.core import setup
setup(name='btfxwss', version='1.1.15', author='Nils Diefenbach',
author_email='23okrs20+pypi@mykolab.com',
url="https://github.com/nlsdfnbch/bitfinex_wss", license='LICENCSE',
packages=['btfxwss'], install_requires=['websocket-client'],
description="Python 3.5+ Websocket Client for the Bitfinex WSS API.")
| 40.777778 | 75 | 0.713896 |
84d6b58977a829de03cd97f433a8d631bf754889 | 1,444 | py | Python | senlin/drivers/base.py | chenyb4/senlin | 8b9ec31566890dc9989fe08e221172d37c0451b4 | [
"Apache-2.0"
] | null | null | null | senlin/drivers/base.py | chenyb4/senlin | 8b9ec31566890dc9989fe08e221172d37c0451b4 | [
"Apache-2.0"
] | null | null | null | senlin/drivers/base.py | chenyb4/senlin | 8b9ec31566890dc9989fe08e221172d37c0451b4 | [
"Apache-2.0"
] | null | null | null | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_config import cfg
from senlin.engine import environment
CONF = cfg.CONF
class DriverBase(object):
'''Base class for all drivers.'''
def __init__(self, params):
self.conn_params = copy.deepcopy(params)
class SenlinDriver(object):
'''Generic driver class'''
def __init__(self, backend_name=None):
if backend_name is None:
backend_name = cfg.CONF.cloud_backend
backend = environment.global_env().get_driver(backend_name)
self.compute = backend.compute
self.loadbalancing = backend.loadbalancing
self.network = backend.network
self.octavia = backend.octavia
self.orchestration = backend.orchestration
self.identity = backend.identity
self.message = backend.message
self.workflow = backend.workflow
self.block_storage = backend.block_storage
| 30.083333 | 75 | 0.713296 |
bc48384ff225fbea060eeed0a66e2d8319596c49 | 2,429 | py | Python | libs/labelDialog.py | cclauss/labelImg | 7cef42c8dfab32147ab29526e7ed6d73df16943e | [
"MIT"
] | 67 | 2018-02-03T01:25:55.000Z | 2022-03-08T09:02:20.000Z | libs/labelDialog.py | cclauss/labelImg | 7cef42c8dfab32147ab29526e7ed6d73df16943e | [
"MIT"
] | 1 | 2018-04-03T19:31:42.000Z | 2018-04-05T15:48:38.000Z | libs/labelDialog.py | cclauss/labelImg | 7cef42c8dfab32147ab29526e7ed6d73df16943e | [
"MIT"
] | 35 | 2018-07-18T07:39:00.000Z | 2022-03-08T12:13:42.000Z | try:
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
except ImportError:
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from libs.lib import newIcon, labelValidator
BB = QDialogButtonBox
class LabelDialog(QDialog):
def __init__(self, text="Enter object label", parent=None, listItem=None):
super(LabelDialog, self).__init__(parent)
self.edit = QLineEdit()
self.edit.setText(text)
self.edit.setValidator(labelValidator())
self.edit.editingFinished.connect(self.postProcess)
layout = QVBoxLayout()
layout.addWidget(self.edit)
self.buttonBox = bb = BB(BB.Ok | BB.Cancel, Qt.Horizontal, self)
bb.button(BB.Ok).setIcon(newIcon('done'))
bb.button(BB.Cancel).setIcon(newIcon('undo'))
bb.accepted.connect(self.validate)
bb.rejected.connect(self.reject)
layout.addWidget(bb)
if listItem is not None and len(listItem) > 0:
self.listWidget = QListWidget(self)
for item in listItem:
self.listWidget.addItem(item)
self.listWidget.itemDoubleClicked.connect(self.listItemClick)
layout.addWidget(self.listWidget)
self.setLayout(layout)
def validate(self):
try:
if self.edit.text().trimmed():
self.accept()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
if self.edit.text().strip():
self.accept()
def postProcess(self):
try:
self.edit.setText(self.edit.text().trimmed())
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
self.edit.setText(self.edit.text())
def popUp(self, text='', move=True):
self.edit.setText(text)
self.edit.setSelection(0, len(text))
self.edit.setFocus(Qt.PopupFocusReason)
if move:
self.move(QCursor.pos())
return self.edit.text() if self.exec_() else None
def listItemClick(self, tQListWidgetItem):
try:
text = tQListWidgetItem.text().trimmed()
except AttributeError:
# PyQt5: AttributeError: 'str' object has no attribute 'trimmed'
text = tQListWidgetItem.text().strip()
self.edit.setText(text)
self.validate()
| 33.736111 | 78 | 0.619185 |
cd2c632f74286975f7175a4186724ecbfe4be4f5 | 7,565 | py | Python | lib/historiad.py | HistoriaOffical/sentinel | 345fd0579ecd1ebab7ebfa9749e976709f023fda | [
"MIT"
] | null | null | null | lib/historiad.py | HistoriaOffical/sentinel | 345fd0579ecd1ebab7ebfa9749e976709f023fda | [
"MIT"
] | null | null | null | lib/historiad.py | HistoriaOffical/sentinel | 345fd0579ecd1ebab7ebfa9749e976709f023fda | [
"MIT"
] | 1 | 2019-06-05T00:37:57.000Z | 2019-06-05T00:37:57.000Z | """
historiad JSONRPC interface
"""
import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'lib'))
import config
import base58
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from masternode import Masternode
from decimal import Decimal
import time
class HistoriaDaemon():
def __init__(self, **kwargs):
host = kwargs.get('host', '127.0.0.1')
user = kwargs.get('user')
password = kwargs.get('password')
port = kwargs.get('port')
self.creds = (user, password, host, port)
# memoize calls to some historiad methods
self.governance_info = None
self.gobject_votes = {}
@property
def rpc_connection(self):
return AuthServiceProxy("http://{0}:{1}@{2}:{3}".format(*self.creds))
@classmethod
def from_historia_conf(self, historia_dot_conf):
from historia_config import HistoriaConfig
config_text = HistoriaConfig.slurp_config_file(historia_dot_conf)
creds = HistoriaConfig.get_rpc_creds(config_text, config.network)
creds[u'host'] = config.rpc_host
return self(**creds)
def rpc_command(self, *params):
return self.rpc_connection.__getattr__(params[0])(*params[1:])
# common RPC convenience methods
def get_masternodes(self):
mnlist = self.rpc_command('masternodelist', 'full')
return [Masternode(k, v) for (k, v) in mnlist.items()]
def get_current_masternode_vin(self):
from historialib import parse_masternode_status_vin
my_vin = None
try:
status = self.rpc_command('masternode', 'status')
mn_outpoint = status.get('outpoint') or status.get('vin')
my_vin = parse_masternode_status_vin(mn_outpoint)
except JSONRPCException as e:
pass
return my_vin
def governance_quorum(self):
# TODO: expensive call, so memoize this
total_masternodes = self.rpc_command('masternode', 'count', 'enabled')
min_quorum = self.govinfo['governanceminquorum']
# the minimum quorum is calculated based on the number of masternodes
quorum = max(min_quorum, (total_masternodes // 10))
return quorum
@property
def govinfo(self):
if (not self.governance_info):
self.governance_info = self.rpc_command('getgovernanceinfo')
return self.governance_info
# governance info convenience methods
def superblockcycle(self):
return self.govinfo['superblockcycle']
def last_superblock_height(self):
height = self.rpc_command('getblockcount')
cycle = self.superblockcycle()
return cycle * (height // cycle)
def next_superblock_height(self):
return self.last_superblock_height() + self.superblockcycle()
def is_masternode(self):
return not (self.get_current_masternode_vin() is None)
def is_synced(self):
mnsync_status = self.rpc_command('mnsync', 'status')
synced = (mnsync_status['IsSynced'] and not mnsync_status['IsFailed'])
return synced
def current_block_hash(self):
height = self.rpc_command('getblockcount')
block_hash = self.rpc_command('getblockhash', height)
return block_hash
def get_superblock_budget_allocation(self, height=None):
if height is None:
height = self.rpc_command('getblockcount')
return Decimal(self.rpc_command('getsuperblockbudget', height))
def next_superblock_max_budget(self):
cycle = self.superblockcycle()
current_block_height = self.rpc_command('getblockcount')
last_superblock_height = (current_block_height // cycle) * cycle
next_superblock_height = last_superblock_height + cycle
last_allocation = self.get_superblock_budget_allocation(last_superblock_height)
next_allocation = self.get_superblock_budget_allocation(next_superblock_height)
next_superblock_max_budget = next_allocation
return next_superblock_max_budget
# "my" votes refers to the current running masternode
# memoized on a per-run, per-object_hash basis
def get_my_gobject_votes(self, object_hash):
import historialib
if not self.gobject_votes.get(object_hash):
my_vin = self.get_current_masternode_vin()
# if we can't get MN vin from output of `masternode status`,
# return an empty list
if not my_vin:
return []
(txid, vout_index) = my_vin.split('-')
cmd = ['gobject', 'getcurrentvotes', object_hash, txid, vout_index]
raw_votes = self.rpc_command(*cmd)
self.gobject_votes[object_hash] = historialib.parse_raw_votes(raw_votes)
return self.gobject_votes[object_hash]
def is_govobj_maturity_phase(self):
# 3-day period for govobj maturity
maturity_phase_delta = 1662 # ~(60*24*3)/2.6
if config.network == 'testnet':
maturity_phase_delta = 24 # testnet
event_block_height = self.next_superblock_height()
maturity_phase_start_block = event_block_height - maturity_phase_delta
current_height = self.rpc_command('getblockcount')
event_block_height = self.next_superblock_height()
# print "current_height = %d" % current_height
# print "event_block_height = %d" % event_block_height
# print "maturity_phase_delta = %d" % maturity_phase_delta
# print "maturity_phase_start_block = %d" % maturity_phase_start_block
return (current_height >= maturity_phase_start_block)
def we_are_the_winner(self):
import historialib
# find the elected MN vin for superblock creation...
current_block_hash = self.current_block_hash()
mn_list = self.get_masternodes()
winner = historialib.elect_mn(block_hash=current_block_hash, mnlist=mn_list)
my_vin = self.get_current_masternode_vin()
# print "current_block_hash: [%s]" % current_block_hash
# print "MN election winner: [%s]" % winner
# print "current masternode VIN: [%s]" % my_vin
return (winner == my_vin)
def estimate_block_time(self, height):
import historialib
"""
Called by block_height_to_epoch if block height is in the future.
Call `block_height_to_epoch` instead of this method.
DO NOT CALL DIRECTLY if you don't want a "Oh Noes." exception.
"""
current_block_height = self.rpc_command('getblockcount')
diff = height - current_block_height
if (diff < 0):
raise Exception("Oh Noes.")
future_seconds = historialib.blocks_to_seconds(diff)
estimated_epoch = int(time.time() + future_seconds)
return estimated_epoch
def block_height_to_epoch(self, height):
"""
Get the epoch for a given block height, or estimate it if the block hasn't
been mined yet. Call this method instead of `estimate_block_time`.
"""
epoch = -1
try:
bhash = self.rpc_command('getblockhash', height)
block = self.rpc_command('getblock', bhash)
epoch = block['time']
except JSONRPCException as e:
if e.message == 'Block height out of range':
epoch = self.estimate_block_time(height)
else:
print("error: %s" % e)
raise e
return epoch
| 35.186047 | 87 | 0.660145 |
f512d465b9f81f3dfb42c9052c989a132310ce31 | 2,702 | py | Python | day01.py | zzjwx/PythonByzzj | 16ce2eefd3042a771e4901cb6d1d6c86eb0a2b42 | [
"Apache-2.0"
] | null | null | null | day01.py | zzjwx/PythonByzzj | 16ce2eefd3042a771e4901cb6d1d6c86eb0a2b42 | [
"Apache-2.0"
] | null | null | null | day01.py | zzjwx/PythonByzzj | 16ce2eefd3042a771e4901cb6d1d6c86eb0a2b42 | [
"Apache-2.0"
] | null | null | null | # a = int(input('输入一个年份'))
# if (a % 4 ==0 and a % 100 !=0) or (a % 400 ==0):
# print ('%d 是闰年'%a )
# else:
# print ('%d 不是闰年'%a )
# 1
# celsius = float(input('请输入摄氏温度'))
# fahrenheit = 1.8 * celsius + 32
# print('%.1f摄氏度 = %.1f华氏度' % (celsius,fahrenheit))
# 2
# import math
# π = math.pi
# radius = float(input('请输入半径:'))
# high = float(input('请输入高:'))
# area = radius * radius * π
# volume = area * high
# print('底面积:%f'%area)
# print('体积:%f'%volume)
# 3
# feet = float(input('请输入英尺数:'))
# maters = feet/0.305
# print ('%.1ffeet is %.4f methers'%(feet,maters))
# 4
# M=float(input('输入水量'))
# f=float(input('输入最终温度'))
# i=float(input('输入开始温度'))
# Q= M*(f-i)*4184
# print('输出你所需要的能量%.1f'%Q)
# 5
# c=float(input('输入差额'))
# n=float(input('输入年利率'))
# l=c*(n/1200)
# print('输出利息为%.6f'%l)
# 6
# v0=float(input('输入v0'))
# v1=float(input('输入v1'))
# t=float(input('输入时间t'))
# a=(v1-v0)/t
# print('输出平均加速度a%.4f'%a)
# 7
# total = 0
# for x in range(0,6):
# total = (total+100)*(1+0.00417)
# print (total)
# money = float(input('请输入每个月存款数'))
# a = money * ( 1 + 0.00417)
# b = ( a + money) * ( 1 + 0.00417)
# c = ( b + money) * ( 1 + 0.00417)
# d = ( c + money) * ( 1 + 0.00417)
# e = ( d + money) * ( 1 + 0.00417)
# f = ( e + money) * ( 1 + 0.00417)
# print('六个月后账户余额为',f)
# n=float(input('请输入每月存款数:'))
# m=1
# a=n*1.00417
# print(a)
# while m<=5:
# a=(a+100)*1.00417
# m+=1
# print('六个月后的账户总额:', a)
# 8
# number = int (input('请输入0到1000的数字:'))
# bai = number//100
# shi = number//10%10
# ge = number%10
# sum_ = bai + shi + ge
# print('the sum of the digits is:%d'%sum_)
# 9
# import math
# π = math.pi
# tan = math.tan
# sin = math.sin
# r = float(input('输入五边形定点到中心的距离:'))
# s = 2 * r *sin (π /5)
# area = 5 * s * s /(4*tan(π/5))
# print('五边形的面积:%f'%area)
# 10
# import math
# π = math.pi
# tan = math.tan
# s = float(input('输入五角形的边'))
# area = 5 * s * s/(4 * tan(π/5))
# print('五角形的面积为:%f'%area)
# 11
# import math
# π = math.pi
# tan = math.tan
# s = float(input('输入边长:'))
# n = int(input('输入边数:'))
# area = n * s * s/(4 * tan(π/n))
# print('正多边形的面积为:%f'%area)
# 12
# a = int(input('请输入一个数:'))
# b = chr(a)
# print ('输出的字符:%s'%b)
# 13
# name = str(input('请输入姓名:'))
# hours = float(input('一周工作时间:'))
# pay = float (input('每小时报酬:'))
# tax = float (input('联邦预扣税率'))
# a_tax = (input('州预扣税率'))
# print ('雇员名字:%s'%name)
# print('工作小时:%f'%hours)
# print('每小时报酬:%f'%tax)
14
# number = str(input('请输入一个数:'))
# print(number[::-1])
| 14.526882 | 52 | 0.492228 |
609663bba5c57af263d925ecab10273866907ed8 | 204 | py | Python | python_modules/dagster/dagster_tests/cli_tests/command_tests/file_with_local_import.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 4,606 | 2018-06-21T17:45:20.000Z | 2022-03-31T23:39:42.000Z | python_modules/dagster/dagster_tests/cli_tests/command_tests/file_with_local_import.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 6,221 | 2018-06-12T04:36:01.000Z | 2022-03-31T21:43:05.000Z | python_modules/dagster/dagster_tests/cli_tests/command_tests/file_with_local_import.py | rpatil524/dagster | 6f918d94cbd543ab752ab484a65e3a40fd441716 | [
"Apache-2.0"
] | 619 | 2018-08-22T22:43:09.000Z | 2022-03-31T22:48:06.000Z | import dummy_local_file # pylint:disable=import-error,unused-import
from dagster_tests.cli_tests.command_tests.test_cli_commands import ( # pylint:disable=unused-import
foo_pipeline,
qux_job,
)
| 34 | 101 | 0.803922 |
7b5fce4c359ae57d6a893349812735d131b5afcc | 4,072 | py | Python | neuralogic/core/settings/__init__.py | LukasZahradnik/PyNeurologic | 99025a92e86f772b139369acf75d08a3b506994d | [
"MIT"
] | null | null | null | neuralogic/core/settings/__init__.py | LukasZahradnik/PyNeurologic | 99025a92e86f772b139369acf75d08a3b506994d | [
"MIT"
] | null | null | null | neuralogic/core/settings/__init__.py | LukasZahradnik/PyNeurologic | 99025a92e86f772b139369acf75d08a3b506994d | [
"MIT"
] | null | null | null | from typing import Any, Optional
import weakref
from neuralogic.core.settings.settings_proxy import SettingsProxy
from neuralogic.core.enums import Optimizer, Initializer, ErrorFunction, Activation
class Settings:
def __init__(
self,
*,
optimizer: Optimizer = Optimizer.ADAM,
learning_rate: Optional[float] = None,
epochs: int = 3000,
error_function: ErrorFunction = ErrorFunction.SQUARED_DIFF,
initializer: Initializer = Initializer.UNIFORM,
initializer_const: float = 0.1,
initializer_uniform_scale: float = 2.0,
rule_neuron_activation: Activation = Activation.TANH,
relation_neuron_activation: Activation = Activation.TANH,
):
self.params = locals().copy()
self.params.pop("self")
self._proxies = weakref.WeakSet()
if learning_rate is None:
self.params["learning_rate"] = 0.1 if optimizer == Optimizer.SGD else 0.001
@property
def iso_value_compression(self) -> bool:
return self.params["iso_value_compression"]
@iso_value_compression.setter
def iso_value_compression(self, iso_value_compression: bool):
self._update("iso_value_compression", iso_value_compression)
@property
def seed(self) -> int:
return self.params["seed"]
@seed.setter
def seed(self, seed: int):
self._update("seed", seed)
@property
def learning_rate(self) -> float:
return self.params["learning_rate"]
@learning_rate.setter
def learning_rate(self, learning_rate: float):
self._update("learning_rate", learning_rate)
@property
def optimizer(self) -> Optimizer:
return self.params["optimizer"]
@optimizer.setter
def optimizer(self, optimizer: Optimizer):
self._update("optimizer", optimizer)
@property
def initializer_const(self) -> float:
return self.params["initializer_const"]
@initializer_const.setter
def initializer_const(self, value: float):
self._update("initializer_const", value)
@property
def initializer_uniform_scale(self) -> float:
return self.params["initializer_uniform_scale"]
@initializer_uniform_scale.setter
def initializer_uniform_scale(self, value: float):
self._update("initializer_uniform_scale", value)
@property
def error_function(self) -> ErrorFunction:
return self.params["error_function"]
@error_function.setter
def error_function(self, error_function: ErrorFunction):
self._update("error_function", error_function)
@property
def epochs(self) -> int:
return self.params["epochs"]
@epochs.setter
def epochs(self, epochs: int):
self._update("epochs", epochs)
@property
def initializer(self) -> Initializer:
return self.params["initializer"]
@initializer.setter
def initializer(self, initializer: Initializer):
self._update("initializer", initializer)
@property
def relation_neuron_activation(self) -> Activation:
return self.params["relation_neuron_activation"]
@relation_neuron_activation.setter
def relation_neuron_activation(self, value: Activation):
self._update("relation_neuron_activation", value)
@property
def rule_neuron_activation(self) -> Activation:
return self.params["rule_neuron_activation"]
@rule_neuron_activation.setter
def rule_neuron_activation(self, value: Activation):
self._update("rule_neuron_activation", value)
def create_proxy(self) -> SettingsProxy:
proxy = SettingsProxy(**self.params)
self._proxies.add(proxy)
return proxy
def create_disconnected_proxy(self) -> SettingsProxy:
return SettingsProxy(**self.params)
def _update(self, parameter: str, value: Any) -> None:
if parameter not in self.params:
raise NotImplementedError
self.params[parameter] = value
for proxy in self._proxies.copy():
proxy.__setattr__(parameter, value)
| 30.616541 | 87 | 0.68443 |
d938dd20e67eafd942f071adfe1e1aec04796648 | 9,219 | py | Python | homeassistant/components/cloud/__init__.py | boojew/home-assistant | 697c331903f8a440a4ce324a4fb0788351dc86c3 | [
"Apache-2.0"
] | 3 | 2019-01-24T20:32:14.000Z | 2022-03-22T14:25:48.000Z | homeassistant/components/cloud/__init__.py | boojew/home-assistant | 697c331903f8a440a4ce324a4fb0788351dc86c3 | [
"Apache-2.0"
] | 6 | 2021-02-08T21:02:40.000Z | 2022-03-12T00:52:16.000Z | homeassistant/components/cloud/__init__.py | boojew/home-assistant | 697c331903f8a440a4ce324a4fb0788351dc86c3 | [
"Apache-2.0"
] | 1 | 2019-08-13T11:54:30.000Z | 2019-08-13T11:54:30.000Z | """
Component to integrate the Home Assistant cloud.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/cloud/
"""
from datetime import datetime, timedelta
import json
import logging
import os
import voluptuous as vol
from homeassistant.const import (
EVENT_HOMEASSISTANT_START, CLOUD_NEVER_EXPOSED_ENTITIES, CONF_REGION,
CONF_MODE, CONF_NAME)
from homeassistant.helpers import entityfilter, config_validation as cv
from homeassistant.util import dt as dt_util
from homeassistant.components.alexa import smart_home as alexa_sh
from homeassistant.components.google_assistant import helpers as ga_h
from homeassistant.components.google_assistant import const as ga_c
from . import http_api, iot, auth_api, prefs, cloudhooks
from .const import CONFIG_DIR, DOMAIN, SERVERS
REQUIREMENTS = ['warrant==0.6.1']
_LOGGER = logging.getLogger(__name__)
CONF_ALEXA = 'alexa'
CONF_ALIASES = 'aliases'
CONF_COGNITO_CLIENT_ID = 'cognito_client_id'
CONF_ENTITY_CONFIG = 'entity_config'
CONF_FILTER = 'filter'
CONF_GOOGLE_ACTIONS = 'google_actions'
CONF_RELAYER = 'relayer'
CONF_USER_POOL_ID = 'user_pool_id'
CONF_GOOGLE_ACTIONS_SYNC_URL = 'google_actions_sync_url'
CONF_SUBSCRIPTION_INFO_URL = 'subscription_info_url'
CONF_CLOUDHOOK_CREATE_URL = 'cloudhook_create_url'
DEFAULT_MODE = 'production'
DEPENDENCIES = ['http']
MODE_DEV = 'development'
ALEXA_ENTITY_SCHEMA = vol.Schema({
vol.Optional(alexa_sh.CONF_DESCRIPTION): cv.string,
vol.Optional(alexa_sh.CONF_DISPLAY_CATEGORIES): cv.string,
vol.Optional(alexa_sh.CONF_NAME): cv.string,
})
GOOGLE_ENTITY_SCHEMA = vol.Schema({
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_ALIASES): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(ga_c.CONF_ROOM_HINT): cv.string,
})
ASSISTANT_SCHEMA = vol.Schema({
vol.Optional(CONF_FILTER, default={}): entityfilter.FILTER_SCHEMA,
})
ALEXA_SCHEMA = ASSISTANT_SCHEMA.extend({
vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: ALEXA_ENTITY_SCHEMA}
})
GACTIONS_SCHEMA = ASSISTANT_SCHEMA.extend({
vol.Optional(CONF_ENTITY_CONFIG): {cv.entity_id: GOOGLE_ENTITY_SCHEMA},
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_MODE, default=DEFAULT_MODE):
vol.In([MODE_DEV] + list(SERVERS)),
# Change to optional when we include real servers
vol.Optional(CONF_COGNITO_CLIENT_ID): str,
vol.Optional(CONF_USER_POOL_ID): str,
vol.Optional(CONF_REGION): str,
vol.Optional(CONF_RELAYER): str,
vol.Optional(CONF_GOOGLE_ACTIONS_SYNC_URL): str,
vol.Optional(CONF_SUBSCRIPTION_INFO_URL): str,
vol.Optional(CONF_CLOUDHOOK_CREATE_URL): str,
vol.Optional(CONF_ALEXA): ALEXA_SCHEMA,
vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA,
}),
}, extra=vol.ALLOW_EXTRA)
async def async_setup(hass, config):
"""Initialize the Home Assistant cloud."""
if DOMAIN in config:
kwargs = dict(config[DOMAIN])
else:
kwargs = {CONF_MODE: DEFAULT_MODE}
alexa_conf = kwargs.pop(CONF_ALEXA, None) or ALEXA_SCHEMA({})
if CONF_GOOGLE_ACTIONS not in kwargs:
kwargs[CONF_GOOGLE_ACTIONS] = GACTIONS_SCHEMA({})
kwargs[CONF_ALEXA] = alexa_sh.Config(
endpoint=None,
async_get_access_token=None,
should_expose=alexa_conf[CONF_FILTER],
entity_config=alexa_conf.get(CONF_ENTITY_CONFIG),
)
cloud = hass.data[DOMAIN] = Cloud(hass, **kwargs)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, cloud.async_start)
await http_api.async_setup(hass)
return True
class Cloud:
"""Store the configuration of the cloud connection."""
def __init__(self, hass, mode, alexa, google_actions,
cognito_client_id=None, user_pool_id=None, region=None,
relayer=None, google_actions_sync_url=None,
subscription_info_url=None, cloudhook_create_url=None):
"""Create an instance of Cloud."""
self.hass = hass
self.mode = mode
self.alexa_config = alexa
self.google_actions_user_conf = google_actions
self._gactions_config = None
self.prefs = prefs.CloudPreferences(hass)
self.id_token = None
self.access_token = None
self.refresh_token = None
self.iot = iot.CloudIoT(self)
self.cloudhooks = cloudhooks.Cloudhooks(self)
if mode == MODE_DEV:
self.cognito_client_id = cognito_client_id
self.user_pool_id = user_pool_id
self.region = region
self.relayer = relayer
self.google_actions_sync_url = google_actions_sync_url
self.subscription_info_url = subscription_info_url
self.cloudhook_create_url = cloudhook_create_url
else:
info = SERVERS[mode]
self.cognito_client_id = info['cognito_client_id']
self.user_pool_id = info['user_pool_id']
self.region = info['region']
self.relayer = info['relayer']
self.google_actions_sync_url = info['google_actions_sync_url']
self.subscription_info_url = info['subscription_info_url']
self.cloudhook_create_url = info['cloudhook_create_url']
@property
def is_logged_in(self):
"""Get if cloud is logged in."""
return self.id_token is not None
@property
def subscription_expired(self):
"""Return a boolean if the subscription has expired."""
return dt_util.utcnow() > self.expiration_date + timedelta(days=7)
@property
def expiration_date(self):
"""Return the subscription expiration as a UTC datetime object."""
return datetime.combine(
dt_util.parse_date(self.claims['custom:sub-exp']),
datetime.min.time()).replace(tzinfo=dt_util.UTC)
@property
def claims(self):
"""Return the claims from the id token."""
return self._decode_claims(self.id_token)
@property
def user_info_path(self):
"""Get path to the stored auth."""
return self.path('{}_auth.json'.format(self.mode))
@property
def gactions_config(self):
"""Return the Google Assistant config."""
if self._gactions_config is None:
conf = self.google_actions_user_conf
def should_expose(entity):
"""If an entity should be exposed."""
if entity.entity_id in CLOUD_NEVER_EXPOSED_ENTITIES:
return False
return conf['filter'](entity.entity_id)
self._gactions_config = ga_h.Config(
should_expose=should_expose,
allow_unlock=self.prefs.google_allow_unlock,
agent_user_id=self.claims['cognito:username'],
entity_config=conf.get(CONF_ENTITY_CONFIG),
)
return self._gactions_config
def path(self, *parts):
"""Get config path inside cloud dir.
Async friendly.
"""
return self.hass.config.path(CONFIG_DIR, *parts)
async def fetch_subscription_info(self):
"""Fetch subscription info."""
await self.hass.async_add_executor_job(auth_api.check_token, self)
websession = self.hass.helpers.aiohttp_client.async_get_clientsession()
return await websession.get(
self.subscription_info_url, headers={
'authorization': self.id_token
})
async def logout(self):
"""Close connection and remove all credentials."""
await self.iot.disconnect()
self.id_token = None
self.access_token = None
self.refresh_token = None
self._gactions_config = None
await self.hass.async_add_job(
lambda: os.remove(self.user_info_path))
def write_user_info(self):
"""Write user info to a file."""
with open(self.user_info_path, 'wt') as file:
file.write(json.dumps({
'id_token': self.id_token,
'access_token': self.access_token,
'refresh_token': self.refresh_token,
}, indent=4))
async def async_start(self, _):
"""Start the cloud component."""
def load_config():
"""Load config."""
# Ensure config dir exists
path = self.hass.config.path(CONFIG_DIR)
if not os.path.isdir(path):
os.mkdir(path)
user_info = self.user_info_path
if not os.path.isfile(user_info):
return None
with open(user_info, 'rt') as file:
return json.loads(file.read())
info = await self.hass.async_add_job(load_config)
await self.prefs.async_initialize()
if info is None:
return
self.id_token = info['id_token']
self.access_token = info['access_token']
self.refresh_token = info['refresh_token']
self.hass.add_job(self.iot.connect())
def _decode_claims(self, token): # pylint: disable=no-self-use
"""Decode the claims in a token."""
from jose import jwt
return jwt.get_unverified_claims(token)
| 33.893382 | 79 | 0.66569 |
7ca541c5a956aab370689451dd38bc323ed4404b | 380 | py | Python | yatube/posts/migrations/0003_auto_20220402_1623.py | DKSenior/yatube_project | 31a11cab68fc3dcfd8a88234a5bdbeda21de2891 | [
"MIT"
] | null | null | null | yatube/posts/migrations/0003_auto_20220402_1623.py | DKSenior/yatube_project | 31a11cab68fc3dcfd8a88234a5bdbeda21de2891 | [
"MIT"
] | null | null | null | yatube/posts/migrations/0003_auto_20220402_1623.py | DKSenior/yatube_project | 31a11cab68fc3dcfd8a88234a5bdbeda21de2891 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.19 on 2022-04-02 13:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('posts', '0002_auto_20220331_1829'),
]
operations = [
migrations.AlterField(
model_name='group',
name='slug',
field=models.SlugField(unique=True),
),
]
| 20 | 48 | 0.592105 |
1c58e38cc53cdc621b9daae10b4646c09fadcc81 | 1,157 | py | Python | core/entity.py | kaiiyer/uba | c99bd0bf1c664bf4666963d9e87f1a0dcdd018b9 | [
"MIT"
] | null | null | null | core/entity.py | kaiiyer/uba | c99bd0bf1c664bf4666963d9e87f1a0dcdd018b9 | [
"MIT"
] | 1 | 2019-11-29T09:58:10.000Z | 2019-11-29T09:58:10.000Z | core/entity.py | kaiiyer/uba | c99bd0bf1c664bf4666963d9e87f1a0dcdd018b9 | [
"MIT"
] | null | null | null | '''
Copyright 2019-Present The OpenUB Platform Authors
This file is part of the OpenUB Platform library.
The OpenUB Platform is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The OpenUB Platform is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with the OpenUB Platform. If not, see <http://www.gnu.org/licenses/>.
'''
'''
@name entity
@description purposed for entity management
'''
import logging
class Entity():
def __init__(self):
logging.info("Entity is initiated")
'''
check for new entities
check for dormant entities
adjust scores
'''
def perform_entity_analysis(self):
#check for new entities
#for each new entity, register in entity db
pass
| 30.447368 | 75 | 0.74503 |
c05719d8e888db4e52c513c4a5780b6ccee44bed | 9,137 | py | Python | D_PostProcessSOMResults/accuracy_calculator.py | marccasian/KaryML-Framework | 367549f397f2d1d245358988670572a0546538f2 | [
"MIT"
] | 4 | 2020-06-12T07:56:26.000Z | 2021-03-29T22:37:21.000Z | D_PostProcessSOMResults/accuracy_calculator.py | marccasian/KaryML-Framework | 367549f397f2d1d245358988670572a0546538f2 | [
"MIT"
] | null | null | null | D_PostProcessSOMResults/accuracy_calculator.py | marccasian/KaryML-Framework | 367549f397f2d1d245358988670572a0546538f2 | [
"MIT"
] | null | null | null | import datetime
import traceback
import math
from D_PostProcessSOMResults.expected_karyotype_loader import ExpectedKaryotype
from a_Common.my_logger import LOGGER
from D_PostProcessSOMResults.SOM_results_constants import *
class AccuracyCalculator:
def __init__(self, pairs_file_path, dist_matrix_file_path):
self.pairs_file_path = pairs_file_path
self.dist_matrix_file_path = dist_matrix_file_path
self.pairs = list()
self.dist_matrix = list()
self.exponential_scale_accuracy = list()
self.normal_scale_accuracy = list()
self.chromosomes_matching_lists = list()
def __load_dist_matrix_from_file(self):
"""
File format:
n,m
n lines and c columns
:return:
"""
try:
with open(self.dist_matrix_file_path, "r") as f:
content = f.read()
lines = content.split("\n")
first_line = lines[0]
lines = lines[1:]
n = int(first_line.split(",")[0].strip())
m = int(first_line.split(",")[1].strip())
self.dist_matrix = list()
for i in range(0, n):
current_dist_matrix_line = list()
for elem in lines[i].split(CH_DIST_MATRIX_FILE_VALUES_SEPARATOR):
dist = float(elem.strip())
current_dist_matrix_line.append(dist)
if len(current_dist_matrix_line) == 0:
# skip blank lines
continue
if len(current_dist_matrix_line) != m:
raise ValueError("Invalid file format!")
self.dist_matrix.append(current_dist_matrix_line)
except:
print("Exception occurred while trying to load dist matrix from file. Traceback: %s"
% traceback.format_exc())
def __load_data_from_files(self):
self.__load_dist_matrix_from_file()
self.pairs = ExpectedKaryotype(self.pairs_file_path).load()
def get_accuracy(self):
if self.pairs == list() or self.dist_matrix == list():
self.__load_data_from_files()
self.__compute_chromosomes_matching_lists()
norm_scale_accuracy_values = self.__get_normal_scale_accuracy_values(len(self.dist_matrix))
exp_scale_accuracy_values = self.__get_exponential_scale_accuracy_values(len(self.dist_matrix))
self.exponential_scale_accuracy = [exp_scale_accuracy_values[len(exp_scale_accuracy_values) // 2]
for _ in range(len(self.dist_matrix))]
self.normal_scale_accuracy = [norm_scale_accuracy_values[len(norm_scale_accuracy_values) // 2]
for _ in range(len(self.dist_matrix))]
for pair in self.pairs:
if len(pair) == 2:
pair_1_0_index = self.chromosomes_matching_lists[pair[1]].index(pair[0])
pair_0_1_index = self.chromosomes_matching_lists[pair[0]].index(pair[1])
self.exponential_scale_accuracy[pair[0]] = exp_scale_accuracy_values[pair_0_1_index]
self.normal_scale_accuracy[pair[0]] = norm_scale_accuracy_values[pair_0_1_index]
self.exponential_scale_accuracy[pair[1]] = exp_scale_accuracy_values[pair_1_0_index]
self.normal_scale_accuracy[pair[1]] = norm_scale_accuracy_values[pair_1_0_index]
if len(pair) == 3:
pair_0_1_index = self.chromosomes_matching_lists[pair[0]].index(pair[1])
pair_0_2_index = self.chromosomes_matching_lists[pair[0]].index(pair[2])
if pair_0_1_index > pair_0_2_index:
pair_0_1_index -= 1
else:
pair_0_2_index -= 1
pair_1_0_index = self.chromosomes_matching_lists[pair[1]].index(pair[0])
pair_1_2_index = self.chromosomes_matching_lists[pair[1]].index(pair[2])
if pair_1_0_index > pair_1_2_index:
pair_1_0_index -= 1
else:
pair_1_2_index -= 1
pair_2_0_index = self.chromosomes_matching_lists[pair[2]].index(pair[0])
pair_2_1_index = self.chromosomes_matching_lists[pair[2]].index(pair[1])
if pair_2_0_index > pair_2_1_index:
pair_2_0_index -= 1
else:
pair_2_1_index -= 1
self.exponential_scale_accuracy[pair[0]] = (exp_scale_accuracy_values[pair_0_1_index] +
exp_scale_accuracy_values[pair_0_2_index]) / 2
self.normal_scale_accuracy[pair[0]] = (norm_scale_accuracy_values[pair_0_1_index] +
norm_scale_accuracy_values[pair_0_2_index]) / 2
self.exponential_scale_accuracy[pair[1]] = (exp_scale_accuracy_values[pair_1_0_index] +
exp_scale_accuracy_values[pair_1_2_index]) / 2
self.normal_scale_accuracy[pair[1]] = (norm_scale_accuracy_values[pair_1_0_index] +
norm_scale_accuracy_values[pair_1_2_index]) / 2
self.exponential_scale_accuracy[pair[2]] = (exp_scale_accuracy_values[pair_2_0_index] +
exp_scale_accuracy_values[pair_2_1_index]) / 2
self.normal_scale_accuracy[pair[2]] = (norm_scale_accuracy_values[pair_2_0_index] +
norm_scale_accuracy_values[pair_2_1_index]) / 2
def __compute_chromosomes_matching_lists(self):
for i in range(len(self.dist_matrix)):
self.chromosomes_matching_lists.append(self.__get_chromosome_matching_list(i))
def __get_chromosome_matching_list(self, ch_index):
a = sorted(range(len(self.dist_matrix[ch_index])), key=lambda x: self.dist_matrix[ch_index][x])
a.remove(ch_index)
return a
@staticmethod
def __get_exponential_scale_accuracy_values(instances):
step = math.log(100) / instances
val = [100 - math.exp(i * step) for i in range(instances)]
val[0] = 100
return val
@staticmethod
def __get_normal_scale_accuracy_values(instances):
step = 100 / instances
return [100 - i * step for i in range(instances)]
def init_logger():
current_logger = LOGGER.getChild("Accuracy Calculator")
return current_logger
def compute_accuracy(pairs_file,
dist_matrix_file,
features_file="",
neurons_file="",
deserialize=False,
timestamp_str=None):
logger = init_logger()
obj = AccuracyCalculator(pairs_file, dist_matrix_file)
import os
acc_dir = os.path.join(os.path.dirname(pairs_file), "Notes")
if timestamp_str:
acc_file = os.path.join(acc_dir, '%s.acc' % (timestamp_str.replace(" ", "_").replace(":", "-")))
else:
acc_file = os.path.join(acc_dir, '%s.acc' % (datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')))
if not os.path.exists(acc_dir):
os.makedirs(acc_dir)
obj.get_accuracy()
with open(acc_file, 'w') as f:
__triple_log_msg('\n'.join([str(i) for i in obj.pairs]), f, logger)
__triple_log_msg("=============================\n", f, logger)
__triple_log_msg('\n'.join([str(i) for i in enumerate(obj.chromosomes_matching_lists)]), f, logger)
__triple_log_msg("=============================\n", f, logger)
__triple_log_msg('\n'.join([str(i) for i in enumerate(obj.exponential_scale_accuracy)]), f, logger)
preckar = sum(obj.exponential_scale_accuracy) / float(len(obj.exponential_scale_accuracy))
avg_str = str(preckar)
__triple_log_msg("neurons file: %s\n" % neurons_file, f, logger)
__triple_log_msg("features file: %s\n" % features_file, f, logger)
__triple_log_msg("deserialize: %s\n" % str(deserialize), f, logger)
__triple_log_msg("AVG = %s\n" % avg_str, f, logger)
if os.path.exists(neurons_file) and os.path.exists(features_file) and not deserialize and timestamp_str is None:
try:
os.rename(neurons_file, features_file + "_%s.neurons" % avg_str.replace(".", "_"))
__triple_log_msg("Successfully_ renamed neurons file", f, logger)
except:
__triple_log_msg("Failed to rename neurons file using os.rename to features_file_path_acc.neurons, "
"will try to add current date_time to neurons file name", f, logger)
os.rename(neurons_file, features_file + "_%s_%s.neurons"
% (avg_str.replace(".", "_"), datetime.datetime.now().strftime('%Y-%m-%d_%H-%M-%S')))
return preckar
def __triple_log_msg(msg, f, logger):
print(msg)
logger.info(msg)
f.write(msg)
| 50.480663 | 120 | 0.600744 |
4ec8ad935201b0ed564ae201ec8a5eabb0edc26e | 910 | py | Python | dev/test_package_by_doc/explore_dash_graph_object.py | StefanHeng/test_ECG-Signal-Viewer | 4fc1849aa5d8ff9bf18d3662bc405d7f20663b64 | [
"MIT"
] | null | null | null | dev/test_package_by_doc/explore_dash_graph_object.py | StefanHeng/test_ECG-Signal-Viewer | 4fc1849aa5d8ff9bf18d3662bc405d7f20663b64 | [
"MIT"
] | null | null | null | dev/test_package_by_doc/explore_dash_graph_object.py | StefanHeng/test_ECG-Signal-Viewer | 4fc1849aa5d8ff9bf18d3662bc405d7f20663b64 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# import base64
# import io
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go
# import plotly.express as px
# import pandas as pd
# import os
# import numpy as np
# from dash.dependencies import Input, Output # , State
# from scipy import stats
import ecg_record
def get_ecg_plot(segment, lead):
return go.Figure(
data=go.Scatter(
x=segment.counts_to_time_values(),
y=lead.get_ecg_values()
)
)
if __name__ == "__main__":
ecg_record, seg, lead = ecg_record.EcgRecord.example()
app = dash.Dash(
__name__
)
server = app.server
app.title = "Explore graph object"
app.layout = html.Div(children=[
dcc.Graph(
id='graph-signal',
figure=get_ecg_plot(seg, lead),
)
])
server.run(debug=True)
| 19.361702 | 58 | 0.636264 |
7d5714acd1b2f18bff09330a6e4a55ab885aedb5 | 3,983 | py | Python | 1D_Diffusion.py | bennski/1D_Diffusion | d811547eb5af1119b513b957ebae0acbdc76261f | [
"MIT"
] | 1 | 2021-11-25T21:56:04.000Z | 2021-11-25T21:56:04.000Z | 1D_Diffusion.py | bennski/1D_Diffusion | d811547eb5af1119b513b957ebae0acbdc76261f | [
"MIT"
] | null | null | null | 1D_Diffusion.py | bennski/1D_Diffusion | d811547eb5af1119b513b957ebae0acbdc76261f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 4 16:10:54 2016
@author: bennski
-----------------------------------------------------------------------------
Backward time, centered space (BTCS) scheme to solve the 1D diffusion equation
for a molecules outflowing from a meteorite into a warm little pond.
The equation solved is
du d du
Phi -- = D -- --
dt dx dx
Phi: porosity
D: diffusion coefficient
x = 0: center of meteorite
x = xmax: edge of meteorite
-----------------------------------------------------------------------------
"""
from matplotlib import pyplot as plt
import matplotlib.animation as animation
import numpy as np
import numpy.matlib
# Initialize writer
Writer = animation.writers['ffmpeg']
writer = Writer(fps=30, metadata=dict(artist='Me'), bitrate=1800)
# Initialization function for animation
def init():
line.set_data([], [])
time_text.set_text('')
return line, time_text
# Set the diffusion of mass coefficient
D = (5.36e-11/0.25)*3600.*24. # diffusion/porosity [m^2 day^-1]
T = 273.15+65
xmax = 1e-2 # 1 cm radius meteorite
tmax = 10 # days
level = 8
nx = (2**level) + 1
nt = (2**(level+1)) + 1 # choosing nt to have twice as many grid points as nx
# Concentration array
u = np.matlib.zeros(shape=(nt, nx))
# Arrays for plotting
x = np.linspace(0,xmax,nx)
t = np.linspace(0,tmax,nt)
# Calculate delta_x, delta_t
delta_x = x[1] - x[0]
delta_t = t[2] - t[1]
# Initial condition
maxconc = 234
smooth = 1.
sum_init = 0 # sum of the initial concentration
for k in xrange(0,nx-1):
u[0,k] = maxconc*(1-np.e**(-smooth*(nx-1-k))) # smoothed outer edge
sum_init = sum_init + u[0,k]
# For plotting initial condition
r1 = np.zeros(shape=(nx))
for r in xrange(0,nx):
r1[r] = u[0,r]
# Arrays for making system of equations
s = np.matlib.zeros(shape=(nx, 1))
c = np.matlib.zeros(shape=(nx, nx))
# Set up animation
ylim = 1.1
fig = plt.figure()
ax = fig.add_subplot(111, autoscale_on=False, xlim=(0, xmax*1e2), ylim=(0, ylim)) #units to cm
line, = ax.plot([], [], '-', color='#550000', lw=2)
time_template = '%.1f days'
time_text = ax.text(0.01, 0.96, '', ha='left', va='center', transform=plt.gca().transAxes, fontsize=14, weight='bold', color='black')
# System of equations solver of finite difference approximation of schrodinger equation
def animate(n):
if n == -1:
line.set_data(x*1e2,r1*nx/sum_init) # units to cm
time_text.set_text(time_template % (0))
else:
# Input Neumann Boundary Condition at x = 0
c[0,0] = 1 + D*delta_t/(delta_x**2)
s[0] = u[n,0]
c[0,1] = -(D*delta_t)/(delta_x**2)
# Input Open Boundary Condition at x = nx-1
c[nx-1,nx-1] = 1
s[nx-1] = u[n,nx-1]
for j in xrange(1,nx-1):
# Terms on RHS
s[j] = u[n,j]
# Coefficients on LHS with varying potential depending on position
c[j,j-1] = -D*delta_t/(delta_x**2)
c[j,j] = (1 + 2*D*delta_t/(delta_x**2))
c[j,j+1] = -D*delta_t/(delta_x**2)
# Compute the values of the solution using matrix division (i.e. cu = s -> u = c^-1*s)
sol = np.linalg.inv(c)*s
# Input solutions into the solution array at time n+1
for k in xrange(0,nx):
u[n+1, k] = sol[k]
line.set_data(x*1e2, sol*nx/sum_init) # units to cm
time_text.set_text(time_template % ((n+1)*delta_t))
return line, time_text
ani = animation.FuncAnimation(fig, animate, np.arange(-1,nt-1),
interval=1, save_count=25, blit=True, init_func=init)
plt.xticks(fontsize=14)
plt.yticks(fontsize=14)
plt.title(r'1D Diffusion of Molcules from a Meteorite',fontsize=14)
plt.ylabel('Fraction of total initial concentration',fontsize=14)
plt.xlabel(r'r (cm)',fontsize=14)
ani.save('1D_Diffusion.mp4', dpi = 300, writer=writer)
| 30.638462 | 133 | 0.592016 |
a2ef8a8b0d8d7292d2343f6a68e34bb712d7e975 | 117 | py | Python | kns/test_empty.py | Daiiqi/horikun_toulove | e506e399ea48816921c9ef9a8eea3538fec44bee | [
"Apache-2.0"
] | null | null | null | kns/test_empty.py | Daiiqi/horikun_toulove | e506e399ea48816921c9ef9a8eea3538fec44bee | [
"Apache-2.0"
] | null | null | null | kns/test_empty.py | Daiiqi/horikun_toulove | e506e399ea48816921c9ef9a8eea3538fec44bee | [
"Apache-2.0"
] | null | null | null | # 这是一段空代码,仅创建一个循环并输出log
log("接下来将输出3次”Hello Love!“")
for k in range (3):
log("Hello Love!")
wait(800)
| 16.714286 | 29 | 0.606838 |
ecafb6d02833e32df5aec25da3e5bcf5a9f80713 | 3,250 | py | Python | Robinhood/endpoints.py | apockill/Robinhood | 3df46da8d9bd31d27adcae9bc5d306282bb8a3a4 | [
"MIT"
] | null | null | null | Robinhood/endpoints.py | apockill/Robinhood | 3df46da8d9bd31d27adcae9bc5d306282bb8a3a4 | [
"MIT"
] | null | null | null | Robinhood/endpoints.py | apockill/Robinhood | 3df46da8d9bd31d27adcae9bc5d306282bb8a3a4 | [
"MIT"
] | null | null | null | api_url = "https://api.robinhood.com"
crypto_api_url = "https://nummus.robinhood.com"
def login():
return api_url + "/oauth2/token/"
def logout():
return api_url + "/oauth2/revoke_token/"
def investment_profile():
return api_url + "/user/investment_profile/"
def accounts():
return api_url + "/accounts/"
def ach(option):
'''
Combination of 3 ACH endpoints. Options include:
* iav
* relationships
* transfers
'''
return api_url + "/ach/iav/auth/" if option == "iav" else api_url + "/ach/{_option}/".format(
_option=option)
def applications():
return api_url + "/applications/"
def dividends():
return api_url + "/dividends/"
def settled_transactions():
return "https://minerva.robinhood.com/history/settled_transactions/"
def crypto_orders():
return crypto_api_url + "/orders/"
def crypto_quotes(currency_pair_id: str):
return api_url + f"/marketdata/forex/quotes/{currency_pair_id}/"
def crypto_holdings():
return crypto_api_url + "/holdings/"
def sweeps():
return accounts() + "/sweeps/"
def edocuments():
return api_url + "/documents/"
def instruments(instrumentId=None, option=None):
'''
Return information about a specific instrument by providing its instrument id.
Add extra options for additional information such as "popularity"
'''
return api_url + "/instruments/" + (
"{id}/".format(id=instrumentId) if instrumentId else "") + (
"{_option}/".format(_option=option) if option else "")
def margin_upgrades():
return api_url + "/margin/upgrades/"
def markets():
return api_url + "/markets/"
def notifications():
return api_url + "/notifications/"
def orders(orderId=None):
return api_url + "/orders/" + (
"{id}/".format(id=orderId) if orderId else "")
def password_reset():
return api_url + "/password_reset/request/"
def portfolios():
return api_url + "/portfolios/"
def positions():
return api_url + "/positions/"
def quotes():
return api_url + "/quotes/"
def historicals():
return api_url + "/quotes/historicals/"
def document_requests():
return api_url + "/upload/document_requests/"
def user():
return api_url + "/user/"
def watchlists():
return api_url + "/watchlists/"
def news(stock):
return api_url + "/midlands/news/{_stock}/".format(_stock=stock)
def fundamentals(stock):
return api_url + "/fundamentals/{_stock}/".format(_stock=stock)
def tags(tag=None):
'''
Returns endpoint with tag concatenated.
'''
return api_url + "/midlands/tags/tag/{_tag}/".format(_tag=tag)
def chain(instrumentid):
return api_url + "/options/chains/?equity_instrument_ids={_instrumentid}".format(
_instrumentid=instrumentid)
def options(chainid, dates, option_type):
return api_url + "/options/instruments/?chain_id={_chainid}&expiration_dates={_dates}&state=active&tradability=tradable&type={_type}".format(
_chainid=chainid, _dates=dates, _type=option_type)
def market_data(optionid):
return api_url + "/marketdata/options/{_optionid}/".format(
_optionid=optionid)
def convert_token():
return api_url + "/oauth2/migrate_token/"
| 20.967742 | 145 | 0.670462 |
df966786039d7f23e45fc41909c5a76b8ce9c400 | 2,194 | py | Python | neutron/tests/unit/vmware/__init__.py | SnabbCo/neutron | a657c06d10f2171149c6b1863df36522bdc11cd7 | [
"Apache-2.0"
] | 7 | 2015-03-27T15:41:04.000Z | 2021-12-12T19:22:47.000Z | neutron/tests/unit/vmware/__init__.py | SnabbCo/neutron | a657c06d10f2171149c6b1863df36522bdc11cd7 | [
"Apache-2.0"
] | 12 | 2015-01-08T18:30:45.000Z | 2015-03-13T21:04:15.000Z | neutron/tests/unit/vmware/__init__.py | SnabbCo/neutron | a657c06d10f2171149c6b1863df36522bdc11cd7 | [
"Apache-2.0"
] | 7 | 2015-02-05T10:23:52.000Z | 2019-05-18T17:11:19.000Z | # Copyright 2013 OpenStack Foundation.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from neutron.plugins.vmware.api_client import client as nsx_client
from neutron.plugins.vmware.api_client import eventlet_client
from neutron.plugins.vmware import extensions
import neutron.plugins.vmware.plugin as neutron_plugin
from neutron.plugins.vmware.vshield.common import VcnsApiClient as vcnsapi
from neutron.plugins.vmware.vshield import vcns
import neutron.plugins.vmware.vshield.vcns_driver as vcnsdriver
plugin = neutron_plugin.NsxPlugin
service_plugin = neutron_plugin.NsxServicePlugin
api_client = nsx_client.NsxApiClient
evt_client = eventlet_client.EventletApiClient
vcns_class = vcns.Vcns
vcns_driver = vcnsdriver.VcnsDriver
vcns_api_helper = vcnsapi.VcnsApiHelper
STUBS_PATH = os.path.join(os.path.dirname(__file__), 'etc')
NSXEXT_PATH = os.path.dirname(extensions.__file__)
NSXAPI_NAME = '%s.%s' % (api_client.__module__, api_client.__name__)
PLUGIN_NAME = '%s.%s' % (plugin.__module__, plugin.__name__)
SERVICE_PLUGIN_NAME = '%s.%s' % (service_plugin.__module__,
service_plugin.__name__)
CLIENT_NAME = '%s.%s' % (evt_client.__module__, evt_client.__name__)
VCNS_NAME = '%s.%s' % (vcns_class.__module__, vcns_class.__name__)
VCNS_DRIVER_NAME = '%s.%s' % (vcns_driver.__module__, vcns_driver.__name__)
VCNSAPI_NAME = '%s.%s' % (vcns_api_helper.__module__, vcns_api_helper.__name__)
def get_fake_conf(filename):
return os.path.join(STUBS_PATH, filename)
def nsx_method(method_name, module_name='nsxlib'):
return '%s.%s.%s' % ('neutron.plugins.vmware', module_name, method_name)
| 40.62963 | 79 | 0.767548 |
e6a5fd4ab28312b6a21cde9b843cd62ad86b3ded | 1,484 | py | Python | Codewars_Python/sudoku_solver.py | nlantau/Codewars_2020_2021 | 055fbf8785ddd52b9f8e8c2b59294ead01852467 | [
"MIT"
] | null | null | null | Codewars_Python/sudoku_solver.py | nlantau/Codewars_2020_2021 | 055fbf8785ddd52b9f8e8c2b59294ead01852467 | [
"MIT"
] | null | null | null | Codewars_Python/sudoku_solver.py | nlantau/Codewars_2020_2021 | 055fbf8785ddd52b9f8e8c2b59294ead01852467 | [
"MIT"
] | null | null | null | # nlantau, 2021-02-01
import numpy as np
puzzle = [[5,3,0,0,7,0,0,0,0],
[6,0,0,1,9,5,0,0,0],
[0,9,8,0,0,0,0,6,0],
[8,0,0,0,6,0,0,0,3],
[4,0,0,8,0,3,0,0,1],
[7,0,0,0,2,0,0,0,6],
[0,6,0,0,0,0,2,8,0],
[0,0,0,4,1,9,0,0,5],
[0,0,0,0,8,0,0,7,9]]
solution = [[5,3,4,6,7,8,9,1,2],
[6,7,2,1,9,5,3,4,8],
[1,9,8,3,4,2,5,6,7],
[8,5,9,7,6,1,4,2,3],
[4,2,6,8,5,3,7,9,1],
[7,1,3,9,2,4,8,5,6],
[9,6,1,5,3,7,2,8,4],
[2,8,7,4,1,9,6,3,5],
[3,4,5,2,8,6,1,7,9]]
p = list()
def sudoku(puzzle):
global p
for r in range(9):
for c in range(9):
if puzzle[r][c] == 0:
for n in range(1,10):
if possible(puzzle,r,c,n):
puzzle[r][c] = n
sudoku(puzzle)
puzzle[r][c] = 0
return
p.append(puzzle)
print(np.matrix(puzzle))
return p
def possible(p,r,c,n):
for i in range(0,9):
if p[r][i] == n:
return False
for i in range(0,9):
if p[i][c] == n:
return False
c0 = (c//3)*3
r0 = (r//3)*3
for i in range(0,3):
for j in range(0,3):
if p[r0+i][c0+j] == n:
return False
return True
if __name__ == "__main__":
print(np.matrix(puzzle))
print(np.matrix(sudoku(puzzle)))
| 23.1875 | 46 | 0.390836 |
c0edf4d006b03d5811a9abae8d27db41bab4b844 | 5,653 | py | Python | ambari-common/src/main/python/ambari_commons/os_utils.py | fangxingli/mambari | 6da9f6090d4d42623529b73413c8feb8b7f6fe45 | [
"Apache-2.0",
"MIT"
] | null | null | null | ambari-common/src/main/python/ambari_commons/os_utils.py | fangxingli/mambari | 6da9f6090d4d42623529b73413c8feb8b7f6fe45 | [
"Apache-2.0",
"MIT"
] | null | null | null | ambari-common/src/main/python/ambari_commons/os_utils.py | fangxingli/mambari | 6da9f6090d4d42623529b73413c8feb8b7f6fe45 | [
"Apache-2.0",
"MIT"
] | null | null | null | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import os
import shutil
import string
from ambari_commons import OSCheck
if OSCheck.is_windows_family():
pass
else:
import pwd
if OSCheck.is_windows_family():
from ambari_commons.os_windows import os_change_owner, os_getpass, os_is_root, os_run_os_command, \
os_set_open_files_limit, os_set_file_permissions
else:
# MacOS not supported
from ambari_commons.os_linux import os_change_owner, os_getpass, os_is_root, os_run_os_command, \
os_set_open_files_limit, os_set_file_permissions
pass
from ambari_commons.exceptions import FatalException
from ambari_commons.logging_utils import print_info_msg, print_warning_msg
def is_valid_filepath(filepath):
if not filepath or not os.path.exists(filepath) or os.path.isdir(filepath):
print 'Invalid path, please provide the absolute file path.'
return False
else:
return True
def quote_path(filepath):
if(filepath.find(' ') != -1):
filepath_ret = '"' + filepath + '"'
else:
filepath_ret = filepath
return filepath_ret
def trim_uri(file_uri):
if file_uri.startswith("file:///"):
return file_uri[8:].replace("/", os.sep)
return file_uri
def _search_file(filename, search_path, pathsep):
for path in string.split(search_path, pathsep):
candidate = os.path.join(path, filename)
if os.path.exists(candidate):
return os.path.abspath(candidate)
return None
def search_file(filename, search_path, pathsep=os.pathsep):
""" Given a search path, find file with requested name """
return _search_file(filename, search_path, pathsep)
def copy_file(src, dest_file):
try:
shutil.copyfile(src, dest_file)
except Exception, e:
err = "Can not copy file {0} to {1} due to: {2} . Please check file " \
"permissions and free disk space.".format(src, dest_file, e.message)
raise FatalException(1, err)
def copy_files(files, dest_dir):
if os.path.isdir(dest_dir):
for filepath in files:
shutil.copy(filepath, dest_dir)
return 0
else:
return -1
def remove_file(filePath):
if os.path.exists(filePath):
try:
os.remove(filePath)
except Exception, e:
print_warning_msg('Unable to remove file: ' + str(e))
return 1
pass
return 0
def set_file_permissions(file, mod, user, recursive):
if os.path.exists(file):
os_set_file_permissions(file, mod, recursive, user)
else:
print_info_msg("File %s does not exist" % file)
def run_os_command(cmd, env=None, cwd=None):
return os_run_os_command(cmd, env, False, cwd)
def run_in_shell(cmd, env=None, cwd=None):
return os_run_os_command(cmd, env, True, cwd)
def is_root():
return os_is_root()
# Proxy to the os implementation
def change_owner(filePath, user, recursive):
os_change_owner(filePath, user, recursive)
# Proxy to the os implementation
def set_open_files_limit(maxOpenFiles):
os_set_open_files_limit(maxOpenFiles)
def get_password(prompt):
return os_getpass(prompt)
def find_in_path(file):
full_path = _search_file(file, os.environ["PATH"], os.pathsep)
if full_path is None:
raise Exception("File {0} not found in PATH".format(file))
return full_path
def extract_path_component(path, path_fragment):
iFragment = path.find(path_fragment)
if iFragment != -1:
iComponentStart = 0
while iComponentStart < iFragment:
iComponentStartTemp = path.find(os.pathsep, iComponentStart)
if iComponentStartTemp == -1 or iComponentStartTemp > iFragment:
break
iComponentStart = iComponentStartTemp
iComponentEnd = path.find(os.pathsep, iFragment)
if iComponentEnd == -1:
iComponentEnd = len(path)
path_component = path[iComponentStart:iComponentEnd]
return path_component
else:
return None
# Gets the full path of the ambari repo file for the current OS
def get_ambari_repo_file_full_name():
if OSCheck.is_ubuntu_family():
ambari_repo_file = "/etc/apt/sources.list.d/ambari.list"
elif OSCheck.is_redhat_family():
ambari_repo_file = "/etc/yum.repos.d/ambari.repo"
elif OSCheck.is_suse_family():
ambari_repo_file = "/etc/zypp/repos.d/ambari.repo"
elif OSCheck.is_windows_family():
ambari_repo_file = os.path.join(os.environ[ChocolateyConsts.CHOCOLATEY_INSTALL_VAR_NAME],
ChocolateyConsts.CHOCOLATEY_CONFIG_DIR, ChocolateyConsts.CHOCOLATEY_CONFIG_FILENAME)
else:
raise Exception('Ambari repo file path not set for current OS.')
return ambari_repo_file
# Gets the owner of the specified file
def get_file_owner(file_full_name):
if OSCheck.is_windows_family():
return ""
else:
return pwd.getpwuid(os.stat(file_full_name).st_uid).pw_name
#
# Chololatey package manager constants for Windows
#
class ChocolateyConsts:
CHOCOLATEY_INSTALL_VAR_NAME = "ChocolateyInstall"
CHOCOLATEY_CONFIG_DIR = "config"
CHOCOLATEY_CONFIG_FILENAME = "chocolatey.config" | 31.405556 | 120 | 0.745268 |
95134bc42c51accf06c11af11701014a5dbbc73d | 5,523 | py | Python | gluon/sanitizer.py | arsfeld/fog-web2py | 32263a03d4183dcaf7537c87edcb4e574d4bec6e | [
"BSD-3-Clause"
] | null | null | null | gluon/sanitizer.py | arsfeld/fog-web2py | 32263a03d4183dcaf7537c87edcb4e574d4bec6e | [
"BSD-3-Clause"
] | null | null | null | gluon/sanitizer.py | arsfeld/fog-web2py | 32263a03d4183dcaf7537c87edcb4e574d4bec6e | [
"BSD-3-Clause"
] | 1 | 2019-03-13T08:20:25.000Z | 2019-03-13T08:20:25.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
::
# from http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/496942
# Title: Cross-site scripting (XSS) defense
# Submitter: Josh Goldfoot (other recipes)
# Last Updated: 2006/08/05
# Version no: 1.0
"""
from htmllib import HTMLParser
from cgi import escape
from urlparse import urlparse
from formatter import AbstractFormatter
from htmlentitydefs import entitydefs
from xml.sax.saxutils import quoteattr
__all__ = ['sanitize']
def xssescape(text):
"""Gets rid of < and > and & and, for good measure, :"""
return escape(text, quote=True).replace(':', ':')
class XssCleaner(HTMLParser):
def __init__(
self,
permitted_tags=[
'a',
'b',
'blockquote',
'br/',
'i',
'li',
'ol',
'ul',
'p',
'cite',
'code',
'pre',
'img/',
],
allowed_attributes={'a': ['href', 'title'], 'img': ['src', 'alt'
], 'blockquote': ['type']},
fmt=AbstractFormatter,
):
HTMLParser.__init__(self, fmt)
self.result = ''
self.open_tags = []
self.permitted_tags = [i for i in permitted_tags if i[-1] != '/']
self.requires_no_close = [i[:-1] for i in permitted_tags
if i[-1] == '/']
self.permitted_tags += self.requires_no_close
self.allowed_attributes = allowed_attributes
# The only schemes allowed in URLs (for href and src attributes).
# Adding "javascript" or "vbscript" to this list would not be smart.
self.allowed_schemes = ['http', 'https', 'ftp']
def handle_data(self, data):
if data:
self.result += xssescape(data)
def handle_charref(self, ref):
if len(ref) < 7 and ref.isdigit():
self.result += '&#%s;' % ref
else:
self.result += xssescape('&#%s' % ref)
def handle_entityref(self, ref):
if ref in entitydefs:
self.result += '&%s;' % ref
else:
self.result += xssescape('&%s' % ref)
def handle_comment(self, comment):
if comment:
self.result += xssescape('<!--%s-->' % comment)
def handle_starttag(
self,
tag,
method,
attrs,
):
if tag not in self.permitted_tags:
self.result += xssescape('<%s>' % tag)
else:
bt = '<' + tag
if tag in self.allowed_attributes:
attrs = dict(attrs)
self.allowed_attributes_here = [x for x in
self.allowed_attributes[tag] if x in attrs
and len(attrs[x]) > 0]
for attribute in self.allowed_attributes_here:
if attribute in ['href', 'src', 'background']:
if self.url_is_acceptable(attrs[attribute]):
bt += ' %s="%s"' % (attribute,
attrs[attribute])
else:
bt += ' %s=%s' % (xssescape(attribute),
quoteattr(attrs[attribute]))
if bt == '<a' or bt == '<img':
return
if tag in self.requires_no_close:
bt += ' /'
bt += '>'
self.result += bt
self.open_tags.insert(0, tag)
def handle_endtag(self, tag, attrs):
bracketed = '</%s>' % tag
if tag not in self.permitted_tags:
self.result += xssescape(bracketed)
elif tag in self.open_tags:
self.result += bracketed
self.open_tags.remove(tag)
def unknown_starttag(self, tag, attributes):
self.handle_starttag(tag, None, attributes)
def unknown_endtag(self, tag):
self.handle_endtag(tag, None)
def url_is_acceptable(self, url):
"""
Requires all URLs to be \"absolute.\"
"""
parsed = urlparse(url)
return parsed[0] in self.allowed_schemes and '.' in parsed[1]
def strip(self, rawstring):
"""
Returns the argument stripped of potentially harmful
HTML or Javascript code
"""
for tag in self.requires_no_close:
rawstring = rawstring.replace("<%s/>" % tag, "<%s />" % tag)
self.result = ''
self.feed(rawstring)
for endtag in self.open_tags:
if endtag not in self.requires_no_close:
self.result += '</%s>' % endtag
return self.result
def xtags(self):
"""
Returns a printable string informing the user which tags are allowed
"""
tg = ''
for x in sorted(self.permitted_tags):
tg += '<' + x
if x in self.allowed_attributes:
for y in self.allowed_attributes[x]:
tg += ' %s=""' % y
tg += '> '
return xssescape(tg.strip())
def sanitize(text, permitted_tags=[
'a',
'b',
'blockquote',
'br/',
'i',
'li',
'ol',
'ul',
'p',
'cite',
'code',
'pre',
'img/',
], allowed_attributes={'a': ['href', 'title'], 'img': ['src', 'alt'
], 'blockquote': ['type']}):
return XssCleaner(permitted_tags=permitted_tags,
allowed_attributes=allowed_attributes).strip(text)
| 28.91623 | 76 | 0.505703 |
b369048027eb471b57fa4d78c03b533ea14561d5 | 6,496 | py | Python | figures/code/Figure2.py | milo-lab/anthropogenic_mass | 5a0170a51d164c1cc98b232452e86feb1e4ee334 | [
"MIT"
] | 24 | 2020-12-09T19:09:52.000Z | 2022-03-26T14:04:32.000Z | figures/code/Figure2.py | milo-lab/anthropogenic_mass | 5a0170a51d164c1cc98b232452e86feb1e4ee334 | [
"MIT"
] | null | null | null | figures/code/Figure2.py | milo-lab/anthropogenic_mass | 5a0170a51d164c1cc98b232452e86feb1e4ee334 | [
"MIT"
] | 5 | 2020-12-10T03:40:12.000Z | 2021-06-27T11:53:48.000Z | # -*- coding: utf-8 -*-
import pandas as pd
import os
file_path = os.path.dirname(os.path.realpath(__file__))
# File uploads - figure2
anthro = (pd.read_excel(file_path + "/../../data/anthropogenic_mass_2015.xlsx", index_col='Year')).iloc[:,:7]
anthro_ext = (pd.read_excel(file_path + "/../../data/anthropogenic_mass_2037.xlsx", index_col='Year')).iloc[:,:7]
biomass_dry = pd.read_excel(file_path + "/../../data/biomass_dry.xlsx")
biomass_dry_uc = pd.read_excel(file_path + "/../../data/biomass_dry_uc.xlsx")
biomass_wet = pd.read_excel(file_path + "/../../data/biomass_wet.xlsx")
biomass_wet_uc = pd.read_excel(file_path + "/../../data/biomass_wet_uc.xlsx")
# Initializing with 1900 values
anthro1900 = [1900,0.002258563,0.016639681,0.011143095,0 ,0.000838412,0.004274367, 0]
anthro = anthro.shift(periods=1)[1:].reset_index()
anthro_ext = anthro_ext.shift(periods=1)[1:].reset_index()
anthro = pd.concat([pd.DataFrame([anthro1900], columns = list(anthro.columns)),anthro])
anthro = pd.concat([anthro, pd.DataFrame([list(anthro_ext.iloc[0])], columns = list(anthro.columns))])
# Combine categories
anthro_wet = pd.concat([anthro["Year"],anthro.iloc[:, 1:7].sum(axis=1), anthro.iloc[:, 7]], keys=["Year", 'in-use', 'waste'], axis=1)
anthro_wet_ext = pd.concat([anthro_ext["Year"], anthro_ext.iloc[:, 1:7].sum(axis=1), anthro_ext.iloc[:, 7]], keys=["Year", 'in-use', 'waste'], axis=1)
# Adding and subtracting standard deviation
biomass_dryh = pd.DataFrame({'year': biomass_dry['year'][:95], 'biomass (Tt)': biomass_dry['biomass (Tt)'][:95] + ((biomass_dry['biomass (Tt)'][:95]* biomass_dry_uc['biomass std (%)'][:95])/100)})
biomass_dryl = pd.DataFrame({'year': biomass_dry['year'][:95], 'biomass (Tt)': biomass_dry['biomass (Tt)'][:95] - ((biomass_dry['biomass (Tt)'][:95]* biomass_dry_uc['biomass std (%)'][:95])/100)})
biomass_weth = pd.DataFrame({'year': biomass_wet['year'][:95], 'biomass (Tt)': biomass_wet['biomass (Tt)'][:95] + ((biomass_wet['biomass (Tt)'][:95]* biomass_wet_uc['biomass std (%)'][:95])/100)})
biomass_wetl = pd.DataFrame({'year': biomass_wet['year'][:95], 'biomass (Tt)': biomass_wet['biomass (Tt)'][:95] - ((biomass_wet['biomass (Tt)'][:95]* biomass_wet_uc['biomass std (%)'][:95])/100)})
biomass_dry['biomass (Tt)'] = biomass_dry['biomass (Tt)'].rolling(window=5, min_periods=1).mean()
biomass_wet['biomass (Tt)'] = biomass_wet['biomass (Tt)'].rolling(window=5, min_periods=1).mean()
biomass_dryh['biomass (Tt)'][:89] = biomass_dryh['biomass (Tt)'][:89].rolling(window=5, min_periods=1).mean()
biomass_dryl['biomass (Tt)'][:89] = biomass_dryl['biomass (Tt)'][:89].rolling(window=5, min_periods=1).mean()
biomass_weth['biomass (Tt)'][:89] = biomass_weth['biomass (Tt)'][:89].rolling(window=5, min_periods=1).mean()
biomass_wetl['biomass (Tt)'][:89] = biomass_wetl['biomass (Tt)'][:89].rolling(window=5, min_periods=1).mean()
# Plotting
bx = anthro_wet.plot(x='Year', legend='reverse', xlim=(1900, 2037), ylim=(0, 3.300),xticks=[1900, 1920, 1940, 1960, 1980, 2000, 2020], yticks=[0.0, 1.0, 2.0, 3.0],color=['#352a86', '#969696'], kind= 'area', lw=0)
bx.set_xticklabels([1900, 1920, 1940, 1960, 1980, 2000, 2020], rotation=0, fontsize=6)
bx.set_yticklabels([0, 1, 2, 3], rotation=0, fontsize=6)
biomass_wet[:95].plot(x='year', ax=bx, legend=None, color='#006400',lw =1)
biomass_wet[94:].plot(x='year',ax=bx, legend=None, color='#006400', lw =1, linestyle=':', alpha = 0.5, dashes=(0.5, 0.5))
biomass_dry[:95].plot(x='year', ax=bx, legend=None, color='#006400', lw =1)
biomass_dry[94:98].plot(x='year',ax=bx, legend=None, color='#006400', lw =1, linestyle=':', alpha = 0.5,dashes=(0.5, 0.5))
biomass_dryh[:90].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw =0.5, linestyle='--',alpha = 0.4,dashes=(2, 4, 2,4))
biomass_dryh[90:95].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw =0.5, linestyle='--',alpha = 0.4,dashes=(2, 4, 2,4))
biomass_dryl[:90].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw=0.5, linestyle='--', alpha = 0.4,dashes=(2, 4, 2,4))
biomass_dryl[90:95].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw=0.5, linestyle='--', alpha = 0.4,dashes=(2, 4, 2,4))
biomass_weth[:90].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw =0.5, linestyle='--',alpha = 0.4)
biomass_weth[90:95].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw =0.5, linestyle='--',alpha = 0.4)
biomass_wetl[:90].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw=0.5, linestyle='--', alpha = 0.4)
biomass_wetl[90:95].plot(x='year',ax=bx, legend=None, color='#a9ddb5', lw=0.5, linestyle='--', alpha = 0.4)
anthro_wet_ext.plot(x='Year', ax=bx, legend=None, color=['#352a86', '#969696'], alpha=0.5, kind= 'area', lw=0)
bx.set_xlabel('year', fontsize=7)
bx.set_ylabel('weight (Teratonnes)', fontsize=7)
bx.text(1915.5, 2.380, 'biomass (wet)', rotation=0, fontsize=7)
bx.text(1915.5, 1.230, 'biomass (dry)', rotation=0, fontsize=7)
bx.axvline(x=2037.5, ymax=2250.0 / 3300, linestyle='--', linewidth=0.5, color='w', alpha=0.7)
bx.axvline(x=2020.0, ymax=1120.0 / 3300, linestyle='--', linewidth=0.5, color='w', alpha=0.7)
bx.axvline(x=2031.5, ymax=2250.0 / 3300, linestyle='--', linewidth=0.5, color='w', alpha=0.7)
bx.axvline(x=2013.0, ymax=1125.0 / 3300, linestyle='--', linewidth=0.5, color='w', alpha=0.7)
handles, labels = bx.get_legend_handles_labels()
bx.legend(reversed(handles[12:14]),['anthropogenic mass waste', 'anthropogenic mass'],prop={'size': 6},bbox_to_anchor=(0, 0.150/4.200), loc="lower left",frameon=False)
bx.spines['right'].set_visible(False)
bx.spines['top'].set_visible(False)
bx.text(2019.7, 2.350, '2037'r'$\pm$'+'10', size=6)
bx.text(2013.6, 2.030, '2031'+r'$\pm$'+'9', size=6)
bx.text(2006.0, 1.250, '2020'+r'$\pm$'+'6', size=6)
bx.text(1995.3, 0.900, '2013'+r'$\pm$'+'5', size=6)
bx.scatter(2037.0, 2.245, color='black', s=6, zorder = 10, clip_on=False)
bx.scatter(2031.5, 2.245, color='black', s=6, zorder = 10)
bx.scatter(2020.0, 1.122, color='black', s=6, zorder = 10)
bx.scatter(2013.0, 1.122, color='black', s=6, zorder = 10)
bx.figure.set_figheight(2.8)
bx.figure.set_figwidth(3.5)
file_out_name = file_path + '/../output/figure2'
bx.figure.savefig(file_out_name+'.png', bbox_inches='tight', pad_inches = 0.05, dpi = 600)
bx.figure.savefig(file_out_name+'.svg', bbox_inches='tight', pad_inches = 0.05)
bx.figure.savefig(file_out_name+'.pdf', bbox_inches='tight', pad_inches = 0.05) | 79.219512 | 213 | 0.658097 |
3d431a43a274af265fe39b66fd4b56d42f27c264 | 57 | py | Python | heat/nn/tests/__init__.py | shssf/heat | 9db0a936c92491fa5aa862f558cb385c9916216b | [
"MIT"
] | 105 | 2018-05-18T11:34:03.000Z | 2022-03-29T06:37:23.000Z | heat/nn/tests/__init__.py | shssf/heat | 9db0a936c92491fa5aa862f558cb385c9916216b | [
"MIT"
] | 909 | 2018-05-18T07:50:26.000Z | 2022-03-31T20:16:30.000Z | heat/nn/tests/__init__.py | shssf/heat | 9db0a936c92491fa5aa862f558cb385c9916216b | [
"MIT"
] | 28 | 2018-05-24T14:39:18.000Z | 2022-03-31T19:18:47.000Z | from .test_nn import *
from .test_data_parallel import *
| 19 | 33 | 0.789474 |
a6660a6697389f29b57cfdf909ec6c3269e862df | 21,266 | py | Python | azure-mgmt-network/azure/mgmt/network/v2017_09_01/network_management_client.py | wawon-msft/azure-sdk-for-python | 8004d3ac11f4b5d7a43a955c79527d21ebd68850 | [
"MIT"
] | 1 | 2018-07-23T08:59:24.000Z | 2018-07-23T08:59:24.000Z | azure-mgmt-network/azure/mgmt/network/v2017_09_01/network_management_client.py | wawon-msft/azure-sdk-for-python | 8004d3ac11f4b5d7a43a955c79527d21ebd68850 | [
"MIT"
] | null | null | null | azure-mgmt-network/azure/mgmt/network/v2017_09_01/network_management_client.py | wawon-msft/azure-sdk-for-python | 8004d3ac11f4b5d7a43a955c79527d21ebd68850 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller, NoPolling
from msrestazure.polling.arm_polling import ARMPolling
import uuid
from .operations.application_gateways_operations import ApplicationGatewaysOperations
from .operations.application_security_groups_operations import ApplicationSecurityGroupsOperations
from .operations.available_endpoint_services_operations import AvailableEndpointServicesOperations
from .operations.express_route_circuit_authorizations_operations import ExpressRouteCircuitAuthorizationsOperations
from .operations.express_route_circuit_peerings_operations import ExpressRouteCircuitPeeringsOperations
from .operations.express_route_circuits_operations import ExpressRouteCircuitsOperations
from .operations.express_route_service_providers_operations import ExpressRouteServiceProvidersOperations
from .operations.load_balancers_operations import LoadBalancersOperations
from .operations.load_balancer_backend_address_pools_operations import LoadBalancerBackendAddressPoolsOperations
from .operations.load_balancer_frontend_ip_configurations_operations import LoadBalancerFrontendIPConfigurationsOperations
from .operations.inbound_nat_rules_operations import InboundNatRulesOperations
from .operations.load_balancer_load_balancing_rules_operations import LoadBalancerLoadBalancingRulesOperations
from .operations.load_balancer_network_interfaces_operations import LoadBalancerNetworkInterfacesOperations
from .operations.load_balancer_probes_operations import LoadBalancerProbesOperations
from .operations.network_interfaces_operations import NetworkInterfacesOperations
from .operations.network_interface_ip_configurations_operations import NetworkInterfaceIPConfigurationsOperations
from .operations.network_interface_load_balancers_operations import NetworkInterfaceLoadBalancersOperations
from .operations.network_security_groups_operations import NetworkSecurityGroupsOperations
from .operations.security_rules_operations import SecurityRulesOperations
from .operations.default_security_rules_operations import DefaultSecurityRulesOperations
from .operations.network_watchers_operations import NetworkWatchersOperations
from .operations.packet_captures_operations import PacketCapturesOperations
from .operations.operations import Operations
from .operations.public_ip_addresses_operations import PublicIPAddressesOperations
from .operations.route_filters_operations import RouteFiltersOperations
from .operations.route_filter_rules_operations import RouteFilterRulesOperations
from .operations.route_tables_operations import RouteTablesOperations
from .operations.routes_operations import RoutesOperations
from .operations.bgp_service_communities_operations import BgpServiceCommunitiesOperations
from .operations.usages_operations import UsagesOperations
from .operations.virtual_networks_operations import VirtualNetworksOperations
from .operations.subnets_operations import SubnetsOperations
from .operations.virtual_network_peerings_operations import VirtualNetworkPeeringsOperations
from .operations.virtual_network_gateways_operations import VirtualNetworkGatewaysOperations
from .operations.virtual_network_gateway_connections_operations import VirtualNetworkGatewayConnectionsOperations
from .operations.local_network_gateways_operations import LocalNetworkGatewaysOperations
from . import models
class NetworkManagementClientConfiguration(AzureConfiguration):
"""Configuration for NetworkManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription credentials which uniquely
identify the Microsoft Azure subscription. The subscription ID forms part
of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'https://management.azure.com'
super(NetworkManagementClientConfiguration, self).__init__(base_url)
self.add_user_agent('azure-mgmt-network/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class NetworkManagementClient(SDKClient):
"""Network Client
:ivar config: Configuration for client.
:vartype config: NetworkManagementClientConfiguration
:ivar application_gateways: ApplicationGateways operations
:vartype application_gateways: azure.mgmt.network.v2017_09_01.operations.ApplicationGatewaysOperations
:ivar application_security_groups: ApplicationSecurityGroups operations
:vartype application_security_groups: azure.mgmt.network.v2017_09_01.operations.ApplicationSecurityGroupsOperations
:ivar available_endpoint_services: AvailableEndpointServices operations
:vartype available_endpoint_services: azure.mgmt.network.v2017_09_01.operations.AvailableEndpointServicesOperations
:ivar express_route_circuit_authorizations: ExpressRouteCircuitAuthorizations operations
:vartype express_route_circuit_authorizations: azure.mgmt.network.v2017_09_01.operations.ExpressRouteCircuitAuthorizationsOperations
:ivar express_route_circuit_peerings: ExpressRouteCircuitPeerings operations
:vartype express_route_circuit_peerings: azure.mgmt.network.v2017_09_01.operations.ExpressRouteCircuitPeeringsOperations
:ivar express_route_circuits: ExpressRouteCircuits operations
:vartype express_route_circuits: azure.mgmt.network.v2017_09_01.operations.ExpressRouteCircuitsOperations
:ivar express_route_service_providers: ExpressRouteServiceProviders operations
:vartype express_route_service_providers: azure.mgmt.network.v2017_09_01.operations.ExpressRouteServiceProvidersOperations
:ivar load_balancers: LoadBalancers operations
:vartype load_balancers: azure.mgmt.network.v2017_09_01.operations.LoadBalancersOperations
:ivar load_balancer_backend_address_pools: LoadBalancerBackendAddressPools operations
:vartype load_balancer_backend_address_pools: azure.mgmt.network.v2017_09_01.operations.LoadBalancerBackendAddressPoolsOperations
:ivar load_balancer_frontend_ip_configurations: LoadBalancerFrontendIPConfigurations operations
:vartype load_balancer_frontend_ip_configurations: azure.mgmt.network.v2017_09_01.operations.LoadBalancerFrontendIPConfigurationsOperations
:ivar inbound_nat_rules: InboundNatRules operations
:vartype inbound_nat_rules: azure.mgmt.network.v2017_09_01.operations.InboundNatRulesOperations
:ivar load_balancer_load_balancing_rules: LoadBalancerLoadBalancingRules operations
:vartype load_balancer_load_balancing_rules: azure.mgmt.network.v2017_09_01.operations.LoadBalancerLoadBalancingRulesOperations
:ivar load_balancer_network_interfaces: LoadBalancerNetworkInterfaces operations
:vartype load_balancer_network_interfaces: azure.mgmt.network.v2017_09_01.operations.LoadBalancerNetworkInterfacesOperations
:ivar load_balancer_probes: LoadBalancerProbes operations
:vartype load_balancer_probes: azure.mgmt.network.v2017_09_01.operations.LoadBalancerProbesOperations
:ivar network_interfaces: NetworkInterfaces operations
:vartype network_interfaces: azure.mgmt.network.v2017_09_01.operations.NetworkInterfacesOperations
:ivar network_interface_ip_configurations: NetworkInterfaceIPConfigurations operations
:vartype network_interface_ip_configurations: azure.mgmt.network.v2017_09_01.operations.NetworkInterfaceIPConfigurationsOperations
:ivar network_interface_load_balancers: NetworkInterfaceLoadBalancers operations
:vartype network_interface_load_balancers: azure.mgmt.network.v2017_09_01.operations.NetworkInterfaceLoadBalancersOperations
:ivar network_security_groups: NetworkSecurityGroups operations
:vartype network_security_groups: azure.mgmt.network.v2017_09_01.operations.NetworkSecurityGroupsOperations
:ivar security_rules: SecurityRules operations
:vartype security_rules: azure.mgmt.network.v2017_09_01.operations.SecurityRulesOperations
:ivar default_security_rules: DefaultSecurityRules operations
:vartype default_security_rules: azure.mgmt.network.v2017_09_01.operations.DefaultSecurityRulesOperations
:ivar network_watchers: NetworkWatchers operations
:vartype network_watchers: azure.mgmt.network.v2017_09_01.operations.NetworkWatchersOperations
:ivar packet_captures: PacketCaptures operations
:vartype packet_captures: azure.mgmt.network.v2017_09_01.operations.PacketCapturesOperations
:ivar operations: Operations operations
:vartype operations: azure.mgmt.network.v2017_09_01.operations.Operations
:ivar public_ip_addresses: PublicIPAddresses operations
:vartype public_ip_addresses: azure.mgmt.network.v2017_09_01.operations.PublicIPAddressesOperations
:ivar route_filters: RouteFilters operations
:vartype route_filters: azure.mgmt.network.v2017_09_01.operations.RouteFiltersOperations
:ivar route_filter_rules: RouteFilterRules operations
:vartype route_filter_rules: azure.mgmt.network.v2017_09_01.operations.RouteFilterRulesOperations
:ivar route_tables: RouteTables operations
:vartype route_tables: azure.mgmt.network.v2017_09_01.operations.RouteTablesOperations
:ivar routes: Routes operations
:vartype routes: azure.mgmt.network.v2017_09_01.operations.RoutesOperations
:ivar bgp_service_communities: BgpServiceCommunities operations
:vartype bgp_service_communities: azure.mgmt.network.v2017_09_01.operations.BgpServiceCommunitiesOperations
:ivar usages: Usages operations
:vartype usages: azure.mgmt.network.v2017_09_01.operations.UsagesOperations
:ivar virtual_networks: VirtualNetworks operations
:vartype virtual_networks: azure.mgmt.network.v2017_09_01.operations.VirtualNetworksOperations
:ivar subnets: Subnets operations
:vartype subnets: azure.mgmt.network.v2017_09_01.operations.SubnetsOperations
:ivar virtual_network_peerings: VirtualNetworkPeerings operations
:vartype virtual_network_peerings: azure.mgmt.network.v2017_09_01.operations.VirtualNetworkPeeringsOperations
:ivar virtual_network_gateways: VirtualNetworkGateways operations
:vartype virtual_network_gateways: azure.mgmt.network.v2017_09_01.operations.VirtualNetworkGatewaysOperations
:ivar virtual_network_gateway_connections: VirtualNetworkGatewayConnections operations
:vartype virtual_network_gateway_connections: azure.mgmt.network.v2017_09_01.operations.VirtualNetworkGatewayConnectionsOperations
:ivar local_network_gateways: LocalNetworkGateways operations
:vartype local_network_gateways: azure.mgmt.network.v2017_09_01.operations.LocalNetworkGatewaysOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: The subscription credentials which uniquely
identify the Microsoft Azure subscription. The subscription ID forms part
of the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = NetworkManagementClientConfiguration(credentials, subscription_id, base_url)
super(NetworkManagementClient, self).__init__(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.application_gateways = ApplicationGatewaysOperations(
self._client, self.config, self._serialize, self._deserialize)
self.application_security_groups = ApplicationSecurityGroupsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.available_endpoint_services = AvailableEndpointServicesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_circuit_authorizations = ExpressRouteCircuitAuthorizationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_circuit_peerings = ExpressRouteCircuitPeeringsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_circuits = ExpressRouteCircuitsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.express_route_service_providers = ExpressRouteServiceProvidersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancers = LoadBalancersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_backend_address_pools = LoadBalancerBackendAddressPoolsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_frontend_ip_configurations = LoadBalancerFrontendIPConfigurationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.inbound_nat_rules = InboundNatRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_load_balancing_rules = LoadBalancerLoadBalancingRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_network_interfaces = LoadBalancerNetworkInterfacesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.load_balancer_probes = LoadBalancerProbesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_interfaces = NetworkInterfacesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_interface_ip_configurations = NetworkInterfaceIPConfigurationsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_interface_load_balancers = NetworkInterfaceLoadBalancersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_security_groups = NetworkSecurityGroupsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.security_rules = SecurityRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.default_security_rules = DefaultSecurityRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.network_watchers = NetworkWatchersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.packet_captures = PacketCapturesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.operations = Operations(
self._client, self.config, self._serialize, self._deserialize)
self.public_ip_addresses = PublicIPAddressesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.route_filters = RouteFiltersOperations(
self._client, self.config, self._serialize, self._deserialize)
self.route_filter_rules = RouteFilterRulesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.route_tables = RouteTablesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.routes = RoutesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.bgp_service_communities = BgpServiceCommunitiesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.usages = UsagesOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_networks = VirtualNetworksOperations(
self._client, self.config, self._serialize, self._deserialize)
self.subnets = SubnetsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_network_peerings = VirtualNetworkPeeringsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_network_gateways = VirtualNetworkGatewaysOperations(
self._client, self.config, self._serialize, self._deserialize)
self.virtual_network_gateway_connections = VirtualNetworkGatewayConnectionsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.local_network_gateways = LocalNetworkGatewaysOperations(
self._client, self.config, self._serialize, self._deserialize)
def check_dns_name_availability(
self, location, domain_name_label, custom_headers=None, raw=False, **operation_config):
"""Checks whether a domain name in the cloudapp.azure.com zone is
available for use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must
conform to the following regular expression:
^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
:type domain_name_label: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: DnsNameAvailabilityResult or ClientRawResponse if raw=true
:rtype:
~azure.mgmt.network.v2017_09_01.models.DnsNameAvailabilityResult or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
api_version = "2017-09-01"
# Construct URL
url = self.check_dns_name_availability.metadata['url']
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['domainNameLabel'] = self._serialize.query("domain_name_label", domain_name_label, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('DnsNameAvailabilityResult', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
check_dns_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability'}
| 63.861862 | 159 | 0.791545 |
5be0bbbc171b7699601d7f8987142307d669b5c7 | 2,757 | py | Python | pythonx/LanguageClient_wrapper.py | SpaceVim/LanguageClient-neovim | fdd50f8453fda24455a12b41582996b452095448 | [
"MIT"
] | 5 | 2018-11-02T00:48:13.000Z | 2021-10-15T03:46:14.000Z | pythonx/LanguageClient_wrapper.py | SpaceVim/LanguageClient-neovim | fdd50f8453fda24455a12b41582996b452095448 | [
"MIT"
] | null | null | null | pythonx/LanguageClient_wrapper.py | SpaceVim/LanguageClient-neovim | fdd50f8453fda24455a12b41582996b452095448 | [
"MIT"
] | 3 | 2018-01-17T16:50:08.000Z | 2021-12-25T14:57:20.000Z | from LanguageClient import LanguageClient
import vim
lc = LanguageClient(vim)
def getState(*args):
return lc.getState_vim(args)
def registerServerCommands(*args):
return lc.registerServerCommands(args)
def alive_vim(*args):
return lc.alive_nvim(args)
def setLoggingLevel(*args):
return lc.setLoggingLevel_vim(args)
def start(*args):
return lc.start(args)
def stop(*args):
return lc.stop(args)
def initialize(*args):
return lc.initialize(*args)
def handle_BufReadPost(*args):
return lc.handle_BufReadPost(args)
def textDocument_didOpen(*args):
return lc.textDocument_didOpen(args)
def textDocument_didClose(*args):
return lc.textDocument_didClose(args)
def workspace_didChangeConfiguration(self, *args):
return lc.workspace_didChangeConfiguration_vim(args)
def textDocument_hover(*args):
return lc.textDocument_hover(args)
def textDocument_definition(*args):
return lc.textDocument_definition(args)
def textDocument_rename(*args):
return lc.textDocument_rename(args)
def textDocument_documentSymbol(*args):
return lc.textDocument_documentSymbol(args)
def workspace_symbol(*args):
return lc.workspace_symbol(args)
def textDocument_references(*args):
return lc.textDocument_references(args)
def rustDocument_implementations(*args):
return lc.rustDocument_implementations(args)
def handle_TextChanged(*args):
return lc.handle_BufReadPost(args)
def handle_TextChangedI(*args):
return lc.handle_TextChangedI(args)
def textDocument_didChange(*args):
return lc.textDocument_didChange(args)
def handle_BufWritePost(*args):
return lc.handle_BufWritePost(args)
def textDocument_didSave(*args):
return lc.textDocument_didSave(args)
def textDocument_completion(*args):
return lc.textDocument_completion(args)
def textDocument_completionOmnifunc(*args):
return lc.textDocument_completionOmnifunc(args)
def completionManager_refresh(*args):
return lc.completionManager_refresh(args)
def exit(*args):
return lc.exit(args)
def handle_CursorMoved(*args):
return lc.handle_CursorMoved(args)
def completionItem_resolve(*args):
return lc.completionItem_resolve(args)
def textDocument_signatureHelp(*args):
return lc.textDocument_signatureHelp(args)
def textDocument_codeAction(*args):
return lc.textDocument_codeAction(args)
def workspace_executeCommand(*args):
return lc.workspace_executeCommand(args)
def textDocument_formatting(*args):
return lc.textDocument_formatting(args)
def textDocument_rangeFormatting(*args):
return lc.textDocument_rangeFormatting(args)
def call_vim(*args):
return lc.call_vim(args)
def notify_vim(*args):
return lc.notify_vim(args)
| 18.503356 | 56 | 0.772216 |
bf9ff09c74561c85c0e0140c50592080a1bc9106 | 1,938 | py | Python | nucleus/istore/i_method_fetch.py | 1x-eng/PROTON | 2f27352f7eb9b46642325d800fcdb98ba5c99596 | [
"BSD-3-Clause"
] | 31 | 2018-09-28T05:00:02.000Z | 2021-11-09T11:06:57.000Z | nucleus/istore/i_method_fetch.py | PruthviKumarBK/PROTON | 2f27352f7eb9b46642325d800fcdb98ba5c99596 | [
"BSD-3-Clause"
] | 23 | 2019-05-17T08:48:07.000Z | 2020-01-20T22:34:28.000Z | nucleus/istore/i_method_fetch.py | 1x-eng/PROTON | 2f27352f7eb9b46642325d800fcdb98ba5c99596 | [
"BSD-3-Clause"
] | 7 | 2018-09-28T16:57:35.000Z | 2019-11-23T07:36:41.000Z | # BSD 3-Clause License
#
# Copyright (c) 2018, Pruthvi Kumar All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
# following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions and the following
# disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
#
# Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
__author__ = "Pruthvi Kumar, pruthvikumar.123@gmail.com"
__copyright__ = "Copyright (C) 2018 Pruthvi Kumar | http://www.apricity.co.in"
__license__ = "BSD 3-Clause License"
__version__ = "1.0"
class IFetch(object):
def __init__(self):
super(IFetch, self).__init__()
def extract_controller_methods(self):
controller_methods = []
return controller_methods | 47.268293 | 119 | 0.77451 |
bc7c3c45eb1369ecefff23026e276416d81c25c1 | 16,835 | py | Python | example/Funny_Strings/change_setting_gui.py | MoeinMavini/sema | 219cd1c7e04a03422ebb41a982d936da3ba18e6c | [
"MIT"
] | 1 | 2021-08-02T07:00:02.000Z | 2021-08-02T07:00:02.000Z | example/Funny_Strings/change_setting_gui.py | MoeinMavini/sema | 219cd1c7e04a03422ebb41a982d936da3ba18e6c | [
"MIT"
] | null | null | null | example/Funny_Strings/change_setting_gui.py | MoeinMavini/sema | 219cd1c7e04a03422ebb41a982d936da3ba18e6c | [
"MIT"
] | null | null | null | from sema.common import check, get, set
from sema import extract
import sys
import textwrap
import tkinter as tk
from tkinter import messagebox
import tkinter.ttk as ttk
import os
#If sema_files file is available in this directory, files written in there will be used.
#Otherwise line below will be used
dot_setting_list = []#List of .setting files associated to this program
class ChangeSettingMainApp:
file_description_part_1 = 'Setting file description: '
current_value_part_1 = 'Current value is: '
dot_setting_list = dot_setting_list
approved_dot_setting_list = []
has_dot_xml = False # If selected .setting has .setting.xml file with it
possible_values = []
def __init__(self, master=None):
# build ui
self.toplevel1 = tk.Tk() if master is None else tk.Toplevel(master)
# main frame
self.frame1 = ttk.Frame(self.toplevel1)
self.file_label = ttk.Label(self.frame1)
self.file_label.configure(text='Select the setting file')
self.file_label.pack(padx='80', pady='5', side='top')
self.file_combobox = ttk.Combobox(self.frame1)
self.file_combobox.configure(justify='left', state='readonly')
self.file_combobox.pack(expand='true', fill='x', padx='5', side='top')
self.file_button = ttk.Button(self.frame1)
self.file_button.configure(text='Accept')
self.file_button.pack(pady='5', side='top')
self.file_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.switch_to_inside_file(self))
self.frame1.configure(height='200', width='200')
self.frame1.pack(fill='both', side='top')
self.toplevel1.configure(height='0', width='0')
self.toplevel1.title('sema setting maker - change setting')
# inside_file_frame
self.inside_file_frame = ttk.Frame()
self.back_to_main_button = ttk.Button(self.inside_file_frame)
self.back_to_main_button.configure(text='Back')
self.back_to_main_button.pack(anchor='w', side='top')
self.back_to_main_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.switch_to_main(self))
self.inside_file_sparator = ttk.Separator(self.inside_file_frame)
self.inside_file_sparator.configure(orient='horizontal')
self.inside_file_sparator.pack(expand='true', fill='x', pady='5 0', side='top')
self.select_setting_labelframe = ttk.Labelframe(self.inside_file_frame)
self.select_setting_combobox = ttk.Combobox(self.select_setting_labelframe)
self.select_setting_combobox.configure(state='readonly')
self.select_setting_combobox.pack(fill='x', side='top')
self.select_setting_button = ttk.Button(self.select_setting_labelframe)
self.select_setting_button.configure(text='Accept')
self.select_setting_button.pack(side='bottom')
self.select_setting_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.switch_to_possible_values(self))
self.select_setting_labelframe.configure(height='200', text="Select the option you want to change it's value", width='200')
self.select_setting_labelframe.pack(padx='7', side='bottom')
self.file_description_label = ttk.Label(self.inside_file_frame)
self.file_description_label.configure(text=self.file_description_part_1)
self.file_description_label.pack(padx='7', pady='7 12', side='left')
self.inside_file_frame.configure(height='500', width='500')
self.inside_file_frame.pack(side='top')
self.inside_file_frame.pack_forget()
# possible_values
self.possible_values_frame = ttk.Frame()
self.back_to_inside_file_button = ttk.Button(self.possible_values_frame)
self.back_to_inside_file_button.configure(text='Back')
self.back_to_inside_file_button.pack(anchor='w', side='top')
self.back_to_inside_file_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.switch_to_inside_file(self))
self.possible_value_sparator = ttk.Separator(self.possible_values_frame)
self.possible_value_sparator.configure(orient='horizontal')
self.possible_value_sparator.pack(expand='true', fill='x', pady='5 0', side='top')
self.possible_values_current_label = ttk.Label(self.possible_values_frame)
self.possible_values_current_label.configure(text='Current value is: ')
self.possible_values_current_label.pack(anchor='w', expand='true', padx='7', pady='5 0', side='top')
self.select_value_labelframe = ttk.Labelframe(self.possible_values_frame)
self.select_value_combobox = ttk.Combobox(self.select_value_labelframe)
self.select_value_combobox.configure(state='readonly')
self.select_value_combobox.pack(fill='x', side='top')
self.select_value_button = ttk.Button(self.select_value_labelframe)
self.select_value_button.configure(text='Accept')
self.select_value_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.select_possible_value_event(self))
self.select_value_button.pack(side='bottom')
self.select_value_labelframe.configure(height='200', text='Choose the new value', width='200')
self.select_value_labelframe.pack(padx='7', pady='15', side='bottom')
self.possible_values_frame.configure(height='500', width='500')
self.possible_values_frame.pack(side='top')
self.possible_values_frame.pack_forget()
# generic_value
self.generic_value_frame = ttk.Frame()
self.back_from_generic_value_button = ttk.Button(self.generic_value_frame)
self.back_from_generic_value_button.configure(text='Back')
self.back_from_generic_value_button.pack(anchor='w', side='top')
self.back_from_generic_value_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.switch_to_before_set_value(self))
self.separator1 = ttk.Separator(self.generic_value_frame)
self.separator1.configure(orient='horizontal')
self.separator1.pack(fill='x', pady='5 0', side='top')
self.generic_value_label = ttk.Label(self.generic_value_frame)
self.generic_value_label.configure(text='Enter the new value:')
self.generic_value_label.pack(padx='15', pady='8', side='top')
self.generic_value_entery = ttk.Entry(self.generic_value_frame)
self.generic_value_entery.pack(side='top')
self.generic_value_submit_button = ttk.Button(self.generic_value_frame)
self.generic_value_submit_button.configure(text='Submit')
self.generic_value_submit_button.pack(pady='7', side='top')
self.generic_value_submit_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.submit_generic_value(self))
self.generic_value_frame.configure(height='500', width='500')
self.generic_value_frame.pack(side='top')
self.generic_value_frame.pack_forget()
# numeric_value
self.numeric_value_frame = ttk.Frame()
self.back_from_numeric_value_button = ttk.Button(self.numeric_value_frame)
self.back_from_numeric_value_button.configure(text='Back')
self.back_from_numeric_value_button.pack(anchor='w', side='top')
self.back_from_numeric_value_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.switch_to_before_set_value(self))
self.separator2 = ttk.Separator(self.numeric_value_frame)
self.separator2.configure(orient='horizontal')
self.separator2.pack(fill='x', pady='5 0', side='top')
self.numeric_value_spinbox = ttk.Spinbox(self.numeric_value_frame)
_text_ = '''Choose the new number'''
self.numeric_value_spinbox.delete('0', 'end')
self.numeric_value_spinbox.insert('0', _text_)
self.numeric_value_spinbox.pack(padx='15', pady='8', side='top')
self.numeric_value_submit_button = ttk.Button(self.numeric_value_frame)
self.numeric_value_submit_button.configure(text='Submit')
self.numeric_value_submit_button.pack(pady='0 5', side='top')
self.numeric_value_submit_button.bind('<Button-1>', lambda event: ChangeSettingMainApp.submit_numeric_value(self))
self.numeric_value_frame.configure(height='500', width='500')
self.numeric_value_frame.pack(side='top')
self.numeric_value_frame.pack_forget()
# Main widget
self.mainwindow = self.toplevel1
def run_main(self, show_error = True):
if os.path.isfile('sema_files'):
files = open('sema_files', 'r')
self.dot_setting_list = []
for line in files.readlines():
if line.strip() != '\n' and line.strip() != '':
self.dot_setting_list.append(line.strip())
files.close()
error_list = ''
for item in self.dot_setting_list:
if os.path.isfile(item):
self.approved_dot_setting_list.append(item)
elif show_error:
error_list += 'Caution: ' + item + ' not found!\n'
if len(error_list) != 0 and show_error:
tk.messagebox.showwarning(title='File not found', message=error_list)
if len(self.approved_dot_setting_list) == 0:
if show_error:
tk.messagebox.showwarning(title='No Setting File Found', message='No Setting File Found!')
else:
self.file_combobox.configure(values=self.approved_dot_setting_list)
self.file_combobox.current(0)
self.mainwindow.mainloop()
def switch_to_main(self):
self.inside_file_frame.pack_forget()
self.frame1.pack(side='top')
def switch_to_inside_file(self):
self.possible_values_frame.pack_forget()
path = self.file_combobox.get()
option_list = get.option_names_in_file(path)
if len(option_list) == 0:
tk.messagebox.showwarning(title='No options', message='This file has no options!')
else:
if os.path.isfile(path + '.xml'):
self.has_dot_xml = True
file_description = get.file_description(path)
if file_description != None:
file_description = self.file_description_part_1 + file_description
file_description = file_description.replace('\n', ' ')
file_description = textwrap.fill(file_description, width=75)
else:
file_description = ''
self.file_description_label.configure(text=file_description)
else:
self.file_description_label.configure(text='Caution: .setting.xml file missing, no additional data is provided for this setting!')
self.frame1.pack_forget()
for i in range(len(option_list)):
comment = get.option_comment(path, option_list[i])
if comment != None:
option_list[i] = option_list[i] + ': ' + comment
self.select_setting_combobox.configure(values=option_list)
self.select_setting_combobox.current(0)
self.inside_file_frame.pack(side='top')
def switch_to_possible_values(self):
path = self.file_combobox.get()
option = get.option_names_in_file(path)[self.select_setting_combobox.current()]
self.possible_values_current_label.configure(text = self.current_value_part_1 + extract.get_value(path, option)['Value'])
self.possible_values = []
self.inside_file_frame.pack_forget()
if self.has_dot_xml:
i = 1
for value in get.option_values(path, option):
comment = get.possible_value_by_number(path, option, i-1)['comment']
if comment == None:
comment = ''
else:
comment = ': ' + comment
self.possible_values.append(value + comment)
i += 1
self.possible_values.append('Manual Value')
self.select_value_combobox.configure(values=self.possible_values)
self.select_value_combobox.current(0)
self.possible_values_frame.pack(side='top')
else:
self.generic_value_entery.delete(0, 'end')
self.generic_value_frame.pack(side='top')
def switch_to_before_set_value(self):
self.generic_value_frame.pack_forget()
self.numeric_value_frame.pack_forget()
self.switch_to_inside_file()
def submit_generic_value(self):
value = self.generic_value_entery.get()
check_value = check.general_value(value)
if check_value == 700:
tk.messagebox.showwarning(title='Bad Input', message="Value must not contain ','")
elif check_value == 701:
tk.messagebox.showwarning(title='Bad Input', message="Error: Value includes new line")
elif check_value != 200:
tk.messagebox.showwarning(title="Unspecified Error", message="Unspecified Error")
else:
path = self.file_combobox.get()
option = get.option_names_in_file(path)[self.select_setting_combobox.current()]
response = set.option_value(path, option, value)
if response == 200:
self.switch_to_before_set_value()
elif response == 701:
tk.messagebox.showwarning(title="Error", message="701 Error")
def select_possible_value_event(self):
self.possible_values_frame.pack_forget()
if self.select_value_combobox.current() == len(self.possible_values) - 1:
self.generic_value_entery.delete(0, 'end')
self.generic_value_frame.pack(side='top')
else:
path = self.file_combobox.get()
option = get.option_names_in_file(path)[self.select_setting_combobox.current()]
value = get.possible_value_by_number(path, option, self.select_value_combobox.current())
if 'min' in value:# Check if value is ranged
self.numeric_value_spinbox.delete(0, 'end')
min = max = step = initial = 0
style = ''
if value['min'] == None or value['min'] == '':
min = -sys.maxsize
else:
initial = min = float(value['min'])
if value['max'] == None or value['max'] == '':
max = sys.maxsize
else:
max = float(value['max'])
if value['step'] == None or value['step'] == '':
step = 1
else:
step = float(value['step'])
style = '% ' + str(len(str(step).split('.')[1])) + 'f'
self.numeric_value_spinbox.configure(from_=min, to=max, format=style, increment=step)
self.numeric_value_spinbox.set(initial)
self.numeric_value_frame.pack(side='top')
elif 'name' in value: # Value is a single choice
response = set.option_value(path, option, value['name'])
if response == 200:
self.switch_to_before_set_value()
elif response == 701:
tk.messagebox.showwarning(title="Error", message="701 Error")
def submit_numeric_value(self):
number = float(self.numeric_value_spinbox.get())
path = self.file_combobox.get()
option = get.option_names_in_file(path)[self.select_setting_combobox.current()]
value = get.possible_value_by_number(path, option, self.select_value_combobox.current())
diff_from_step = check.diff_to_step(value['min'], value['max'], value['step'], number)
if value['max'] != None and value['max'] != '' and number > float(value['max']):
tk.messagebox.showwarning(title='Out of range', message="Number is bigger than max")
elif value['min'] != None and value['min'] != '' and number < float(value['min']):
tk.messagebox.showwarning(title='Out of range', message="Number is less than min")
elif value['step'] != None and value['step'] != '' and diff_from_step != 0:
tk.messagebox.showwarning(title='Out of range', message='Number fails the step constraint by ' + str(diff_from_step))
else:
response = set.option_value(path, option, number)
if response == 200:
self.switch_to_before_set_value()
elif response == 701:
tk.messagebox.showwarning(title="Error", message="701 Error")
if __name__ == '__main__':
app = ChangeSettingMainApp()
app.run_main(True)
| 46.25 | 146 | 0.647461 |
d14739bf64e477e8cd12aad239d1869487c516c5 | 5,146 | py | Python | Router/routersploit/test/test_completer.py | dendisuhubdy/grokmachine | 120a21a25c2730ed356739231ec8b99fc0575c8b | [
"BSD-3-Clause"
] | 46 | 2017-05-15T11:15:08.000Z | 2018-07-02T03:32:52.000Z | Router/routersploit/test/test_completer.py | dendisuhubdy/grokmachine | 120a21a25c2730ed356739231ec8b99fc0575c8b | [
"BSD-3-Clause"
] | null | null | null | Router/routersploit/test/test_completer.py | dendisuhubdy/grokmachine | 120a21a25c2730ed356739231ec8b99fc0575c8b | [
"BSD-3-Clause"
] | 24 | 2017-05-17T03:26:17.000Z | 2018-07-09T07:00:50.000Z | import unittest
import os
import pexpect
class RoutersploitCompleterTest(unittest.TestCase):
def __init__(self, methodName='runTest'):
super(RoutersploitCompleterTest, self).__init__(methodName)
self.cli_path = os.path.abspath(os.path.join(__file__, os.pardir, os.pardir, os.pardir, 'rsf.py'))
self.raw_prompt = "\033[4mrsf\033[0m > "
self.module_prompt = lambda x: "\033[4mrsf\033[0m (\033[91m{}\033[0m) > ".format(x)
def setUp(self):
self.rsf = pexpect.spawn('python {}'.format(self.cli_path))
self.rsf.send('\r\n')
self.assertPrompt(self.raw_prompt)
def tearDown(self):
self.rsf.terminate(force=True)
def assertPrompt(self, *args):
value = ''.join(args)
self.rsf.expect_exact(value, timeout=1)
def set_module(self):
self.rsf.send("use creds/ftp_bruteforce\r\n")
self.assertPrompt(self.module_prompt('FTP Bruteforce'))
def test_raw_commands_no_module(self):
self.rsf.send("\t\t")
self.assertPrompt('debug exit use \r\n', self.raw_prompt)
def test_complete_use_raw(self):
self.rsf.send("u\t\t")
self.assertPrompt(self.raw_prompt, 'use ')
def test_complete_use(self):
self.rsf.send("use \t\t")
self.assertPrompt(
'creds exploits scanners \r\n',
self.raw_prompt,
'use '
)
def test_complete_use_creds(self):
self.rsf.send("use cr\t\t")
self.assertPrompt(
self.raw_prompt,
'use creds/'
)
def test_complete_use_creds_2(self):
self.rsf.send("use creds/\t\t")
self.assertPrompt(
'creds/http_basic_default'
)
def test_complete_use_exploits(self):
self.rsf.send("use ex\t\t")
self.assertPrompt(
self.raw_prompt,
'use exploits/'
)
def test_complete_use_exploits_2(self):
self.rsf.send("use exploits/\t\t")
self.assertPrompt(
'exploits/dlink/'
)
def test_complete_use_exploits_3(self):
self.rsf.send("use exploits/dli\t")
self.assertPrompt(
self.raw_prompt,
'use exploits/dlink/'
)
def test_complete_use_exploits_4(self):
self.rsf.send("use exploits/dlink/dir_300_320_\t\t\t")
self.assertPrompt(
'exploits/dlink/dir_300_320_615_auth_bypass'
)
def test_raw_commands_with_module(self):
self.set_module()
self.rsf.send("\t\t")
self.assertPrompt(
'back check debug exit run set show \r\n',
self.module_prompt('FTP Bruteforce')
)
def test_complete_back_raw(self):
self.set_module()
self.rsf.send("b\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'back'
)
def test_complete_check_raw(self):
self.set_module()
self.rsf.send("c\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'check'
)
def test_complete_run_raw(self):
self.set_module()
self.rsf.send("r\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'run'
)
def test_complete_set_raw(self):
self.set_module()
self.rsf.send("s\t\t")
self.assertPrompt(
'set show \r\n',
self.module_prompt('FTP Bruteforce')
)
def test_complete_set_raw_2(self):
self.set_module()
self.rsf.send("se\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'set ',
)
def test_complete_set(self):
self.set_module()
self.rsf.send("set \t\t")
self.assertPrompt(
'passwords port target threads usernames verbosity \r\n',
self.module_prompt('FTP Bruteforce'),
'set ',
)
def test_complete_set_2(self):
self.set_module()
self.rsf.send("set u\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'set usernames ',
)
def test_complete_show_raw(self):
self.set_module()
self.rsf.send("sh\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'show ',
)
def test_complete_show(self):
self.set_module()
self.rsf.send("show \t\t")
self.assertPrompt(
'info options \r\n',
self.module_prompt('FTP Bruteforce')
)
def test_complete_show_info(self):
self.set_module()
self.rsf.send("show i\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'show info'
)
def test_complete_show_options(self):
self.set_module()
self.rsf.send("show o\t\t")
self.assertPrompt(
self.module_prompt('FTP Bruteforce'),
'show options'
)
if __name__ == '__main__':
unittest.main() | 27.967391 | 106 | 0.566848 |
7cc39bc13ba90ca3d39f420e5ffd733ba6bf3310 | 987 | py | Python | admin_panel_finder.py | TeamSOTD/Admin_Finder | 5b391e9f19d831391952f246b0e5cb7b12520a28 | [
"Apache-2.0"
] | null | null | null | admin_panel_finder.py | TeamSOTD/Admin_Finder | 5b391e9f19d831391952f246b0e5cb7b12520a28 | [
"Apache-2.0"
] | null | null | null | admin_panel_finder.py | TeamSOTD/Admin_Finder | 5b391e9f19d831391952f246b0e5cb7b12520a28 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from urllib2 import Request, urlopen, URLError, HTTPError
def Space(j):
i = 0
while i<=j:
print " ",
i+=1
def findAdmin():
f = open("link.txt","r");
link = raw_input("Silahkan Masukan Link Korban yg ingin di entot guys \n(ex : example.com or www.example.com ): ")
print "\n\nAvilable links : \n"
while True:
sub_link = f.readline()
if not sub_link:
break
req_link = "http://"+link+"/"+sub_link
req = Request(req_link)
try:
response = urlopen(req)
except HTTPError as e:
continue
except URLError as e:
continue
else:
print "OK => ",req_link
def Credit():
Space(9); print "#####################################"
Space(9); print "# *** Admin Panel Finder *** #"
Space(9); print "# Script by Cyber Indonesian #"
Space(9); print "# Thanks By GhostName #"
Space(9); print "# Stay llegal #"
Space(9); print "#####################################"
Credit()
findAdmin()
| 23.5 | 115 | 0.568389 |
0a0c132d0e0dfca95664bb3b225282eb45078e1b | 22,694 | py | Python | pnc_cli/swagger_client/apis/products_api.py | vibe13/pnc-cli | 9020462cac5254bdd40cc7d8fa239433242cce45 | [
"Apache-2.0"
] | 2 | 2016-05-18T15:01:34.000Z | 2016-08-11T14:04:17.000Z | pnc_cli/swagger_client/apis/products_api.py | vibe13/pnc-cli | 9020462cac5254bdd40cc7d8fa239433242cce45 | [
"Apache-2.0"
] | 47 | 2016-06-23T19:58:40.000Z | 2020-03-10T17:58:11.000Z | pnc_cli/swagger_client/apis/products_api.py | vibe13/pnc-cli | 9020462cac5254bdd40cc7d8fa239433242cce45 | [
"Apache-2.0"
] | 21 | 2016-05-30T20:34:17.000Z | 2021-09-07T13:22:20.000Z | # coding: utf-8
"""
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProductsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_new(self, **kwargs):
"""
Creates a new Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_new(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductRest body:
:return: ProductSingleton
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_new_with_http_info(**kwargs)
else:
(data) = self.create_new_with_http_info(**kwargs)
return data
def create_new_with_http_info(self, **kwargs):
"""
Creates a new Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_new_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param ProductRest body:
:return: ProductSingleton
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_new" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/products', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSingleton',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all(self, **kwargs):
"""
Gets all Products
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: ProductPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_all_with_http_info(**kwargs)
else:
(data) = self.get_all_with_http_info(**kwargs)
return data
def get_all_with_http_info(self, **kwargs):
"""
Gets all Products
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_all_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: ProductPage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_index', 'page_size', 'sort', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page_index' in params:
query_params.append(('pageIndex', params['page_index']))
if 'page_size' in params:
query_params.append(('pageSize', params['page_size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'q' in params:
query_params.append(('q', params['q']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/products', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductPage',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_versions(self, id, **kwargs):
"""
Get all versions for a Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_versions(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: ProductVersionPage
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_product_versions_with_http_info(id, **kwargs)
else:
(data) = self.get_product_versions_with_http_info(id, **kwargs)
return data
def get_product_versions_with_http_info(self, id, **kwargs):
"""
Get all versions for a Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_versions_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product id (required)
:param int page_index: Page Index
:param int page_size: Pagination size
:param str sort: Sorting RSQL
:param str q: RSQL Query
:return: ProductVersionPage
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page_index', 'page_size', 'sort', 'q']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_versions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_product_versions`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'page_index' in params:
query_params.append(('pageIndex', params['page_index']))
if 'page_size' in params:
query_params.append(('pageSize', params['page_size']))
if 'sort' in params:
query_params.append(('sort', params['sort']))
if 'q' in params:
query_params.append(('q', params['q']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/products/{id}/product-versions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductVersionPage',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_specific(self, id, **kwargs):
"""
Get specific Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_specific(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product id (required)
:return: ProductSingleton
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_specific_with_http_info(id, **kwargs)
else:
(data) = self.get_specific_with_http_info(id, **kwargs)
return data
def get_specific_with_http_info(self, id, **kwargs):
"""
Get specific Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_specific_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product id (required)
:return: ProductSingleton
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_specific" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_specific`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/products/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProductSingleton',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update(self, id, **kwargs):
"""
Updates an existing Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product id (required)
:param ProductRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_with_http_info(id, **kwargs)
else:
(data) = self.update_with_http_info(id, **kwargs)
return data
def update_with_http_info(self, id, **kwargs):
"""
Updates an existing Product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int id: Product id (required)
:param ProductRest body:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/products/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 38.0134 | 105 | 0.547149 |
eb9f75ea37dc261d04dd1cf21e12dbc61512bb09 | 886 | py | Python | tests/test_plugin_oldlivestream.py | NghiemTrung/livecli | 6a21b1b144b045963b6d1db8d4d8dc8471b62737 | [
"BSD-2-Clause"
] | 1 | 2019-12-04T11:54:52.000Z | 2019-12-04T11:54:52.000Z | tests/test_plugin_oldlivestream.py | NghiemTrung/livecli | 6a21b1b144b045963b6d1db8d4d8dc8471b62737 | [
"BSD-2-Clause"
] | null | null | null | tests/test_plugin_oldlivestream.py | NghiemTrung/livecli | 6a21b1b144b045963b6d1db8d4d8dc8471b62737 | [
"BSD-2-Clause"
] | null | null | null | import unittest
from livecli.plugins.oldlivestream import OldLivestream
class TestPluginOldLivestream(unittest.TestCase):
def test_can_handle_url(self):
# should match
self.assertTrue(OldLivestream.can_handle_url("https://cdn.livestream.com/embed/channel"))
self.assertTrue(OldLivestream.can_handle_url("https://original.livestream.com/embed/channel"))
self.assertTrue(OldLivestream.can_handle_url("https://original.livestream.com/channel"))
# shouldn't match
self.assertFalse(OldLivestream.can_handle_url("https://cdn.livestream.com"))
self.assertFalse(OldLivestream.can_handle_url("https://original.livestream.com"))
# other plugin livestream.py
self.assertFalse(OldLivestream.can_handle_url("https://livestream.com"))
self.assertFalse(OldLivestream.can_handle_url("https://www.livestream.com"))
| 46.631579 | 102 | 0.747178 |
61835aa31f935e567a3233ad70cb56e534c75a2c | 5,310 | py | Python | src/models/replay_buffer.py | rudyn2/tsad_v2 | 5ab48872b807b775e70465123a46b9bc9d7b6bbc | [
"MIT"
] | null | null | null | src/models/replay_buffer.py | rudyn2/tsad_v2 | 5ab48872b807b775e70465123a46b9bc9d7b6bbc | [
"MIT"
] | null | null | null | src/models/replay_buffer.py | rudyn2/tsad_v2 | 5ab48872b807b775e70465123a46b9bc9d7b6bbc | [
"MIT"
] | null | null | null | import numpy as np
import torch
import random
from collections import deque, namedtuple
def batch_to_torch(batch, device):
return {
k: torch.from_numpy(v).to(device=device, non_blocking=True)
for k, v in batch.items()
}
class ReplayBuffer(object):
def __init__(self, max_size):
self._memory = deque([], maxlen=max_size)
self._max_size = max_size
self._empty_transition = {
"observations": np.array([]),
"next_observations": np.array([]),
"actions": np.array([]),
"rewards": np.array([]),
"dones": np.array([]),
"hlcs": np.array([]),
}
self._Transition = namedtuple(
"Transition", tuple(self._empty_transition.keys())
)
self._total_steps = 0
def empty(self):
"""Empty memory"""
self._memory.clear()
def push(self, *args):
"""Save a experiences"""
self._memory.append(self._Transition(*args))
def __len__(self):
return len(self._memory)
def add_sample(self, observation, action, reward, next_observation, done, hlc):
self.push(
np.array(observation, dtype=np.float32)[np.newaxis,:],
np.array(next_observation, dtype=np.float32)[np.newaxis,:],
np.array(action, dtype=np.float32)[np.newaxis,:],
np.array([reward], dtype=np.float32),
np.array([done], dtype=np.float32),
np.array([hlc], dtype=np.float32),
)
self._total_steps += 1
def add_traj(self, observations, actions, rewards, next_observations, dones, hlcs):
for o, a, r, no, d, h in zip(observations, actions, rewards, next_observations, dones, hlcs):
self.add_sample(o, a, r, no, d, h)
def sample(self, batch_size):
size = min(len(self), batch_size)
if size == 0:
sample = self._empty_transition
else:
sample = random.sample(self._memory, size)
sample = self._Transition(*zip(*sample))
sample = self._unpack(sample)
return sample
def _unpack(self, sample):
unpacked = {}
for name, value in sample._asdict().items():
unpacked[name] = np.concatenate(value, axis=0)
return unpacked
def torch_sample(self, batch_size, device):
return batch_to_torch(self.sample(batch_size), device)
def sample_generator(self, batch_size, n_batchs=None):
i = 0
while n_batchs is None or i < n_batchs:
yield self.sample(batch_size)
i += 1
def torch_sample_generator(self, batch_size, device, n_batchs=None):
for batch in self.sample_generator(batch_size, n_batchs):
yield batch_to_torch(batch, device)
@property
def total_steps(self):
return self._total_steps
class ReplayBufferHLC(object):
def __init__(self, max_size, hlcs=(0, 1, 2, 3)):
self._buffers = {str(hlc): ReplayBuffer(max_size) for hlc in hlcs}
self._hlcs = hlcs
self._total_steps = np.sum([buffer.total_steps for buffer in self._buffers.values()])
def __len__(self):
return np.sum([len(buffer) for buffer in self._buffers.values()])
def add_sample(self, observation, action, reward, next_observation, done, hlc):
self._buffers[str(hlc)].add_sample(
observation,
action,
reward,
next_observation,
done,
hlc,
)
def add_traj(self, observations, actions, rewards, next_observations, dones, hlcs):
for i, buffer in self._buffers.items():
f_observations = observations[hlcs == i]
f_actions = actions[hlcs == i]
f_rewards = rewards[hlcs == i]
f_next_observations = next_observations[hlcs == i]
f_dones = dones[hlcs == i]
f_hlcs = hlcs[hlcs == i]
buffer.add_traj(
f_observations,
f_actions,
f_rewards,
f_next_observations,
f_dones,
f_hlcs,
)
def sample(self, batch_size):
samples = {}
for buffer in self._buffers.values():
for key, value in buffer.sample(batch_size).items():
if key not in samples or len(samples[key].shape) == 0 or samples[key].shape[0] == 0:
samples[key] = value
elif len(value.shape) > 0:
# If key is already in dict and samples[key] has length and value has length
samples[key] = np.concatenate((value, samples[key]), axis=0)
return samples
def torch_sample(self, batch_size, device):
return batch_to_torch(self.sample(batch_size), device)
def sample_generator(self, batch_size, n_batchs=None):
i = 0
while n_batchs is None or i < n_batchs:
yield self.sample(batch_size)
i += 1
def torch_sample_generator(self, batch_size, device, n_batchs=None):
for batch in self.sample_generator(batch_size, n_batchs):
yield batch_to_torch(batch, device)
@property
def total_steps(self):
return self._total_steps | 35.165563 | 101 | 0.581356 |
1ce0e540c13b20af0031c47687a4edd0560bbb2d | 2,739 | py | Python | l-systems/model.py | complexbear/tinkering | 6ad260f065aa9938a760051816d916347271974d | [
"Apache-2.0"
] | null | null | null | l-systems/model.py | complexbear/tinkering | 6ad260f065aa9938a760051816d916347271974d | [
"Apache-2.0"
] | null | null | null | l-systems/model.py | complexbear/tinkering | 6ad260f065aa9938a760051816d916347271974d | [
"Apache-2.0"
] | null | null | null | '''
This module applies Rules to the document in iterative generations.
There are two modes in which this can be done
* Edge replacement
* Node replacement
Both generate the same document that describes the structure according
to the Rules used.
Node generation may be easier to use in the renderer to keep track of
which generation produced a given piece of the structure. This can be
then used to vary the thickness of branches between generations.
'''
import logging
from rules import *
class EdgeGenerate(object):
logger = logging.getLogger('EdgeGenerate')
def _applyRules(self, rules, doc, generation):
newDoc = ''
idx = 0
while idx < len(doc):
ruleMatched = False
for rule in rules:
consumed, ruleDoc = rule.apply(doc[idx:])
if consumed:
idx += consumed
ruleMatched = True
newDoc += ruleDoc
if not ruleMatched:
newDoc += doc[idx]
idx += 1
return newDoc
def __call__(self, program, generations):
doc = program.initiator
self.logger.debug('generation %s: %s' % (0, doc))
for n in range(generations):
doc = self._applyRules(program.rules, doc, n)
self.logger.debug('generation %s: %s' %(n+1, doc))
self.logger.debug('doc: %s' %doc)
return doc
Node = namedtuple('Node', 'data generation')
class NodeGenerate(object):
logger = logging.getLogger('NodeGenerate')
def _applyRules(self, rules, node):
newData = []
idx = 0
self.logger.debug('apply rule: %s' %node.data)
while idx < len(node.data):
if type(node.data[idx]) == Node:
subNode = node.data[idx]
newData.append(self._applyRules(rules, subNode))
idx += 1
else:
ruleMatched = False
for rule in rules:
consumed, ruleDoc = rule.apply(node.data[idx])
if consumed:
idx += consumed
ruleMatched = True
newData.append(Node(ruleDoc, node.generation+1))
if not ruleMatched:
newData.append(node.data[idx])
idx += 1
return Node(newData, node.generation)
def __call__(self, program, generations):
root = Node(list(program.initiator), 1)
for n in range(generations):
self.logger.info('generation %s' %n)
root = self._applyRules(program.rules, root)
return Node(list(program.initiator) + [root], 1)
| 32.223529 | 74 | 0.557138 |
01d064a995afa8d3edb007b012298a64611351b5 | 3,028 | py | Python | nettrix/benchmarks/bert/implementations/implementation_closed/convert_tf_checkpoint.py | CaoZhongZ/training_results_v1.0 | 8200377f425ae24b6ed6c2816b9273aab0996d43 | [
"Apache-2.0"
] | 27 | 2021-07-01T00:34:52.000Z | 2022-03-29T08:49:53.000Z | nettrix/benchmarks/bert/implementations/implementation_closed/convert_tf_checkpoint.py | CaoZhongZ/training_results_v1.0 | 8200377f425ae24b6ed6c2816b9273aab0996d43 | [
"Apache-2.0"
] | 21 | 2021-08-31T08:34:50.000Z | 2022-03-17T11:42:10.000Z | nettrix/benchmarks/bert/implementations/implementation_closed/convert_tf_checkpoint.py | CaoZhongZ/training_results_v1.0 | 8200377f425ae24b6ed6c2816b9273aab0996d43 | [
"Apache-2.0"
] | 39 | 2021-07-02T00:46:14.000Z | 2022-03-13T16:59:55.000Z | # Copyright (c) 2019 NVIDIA CORPORATION. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import argparse
from modeling import BertForPretraining, BertConfig
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("--bert_model", default="bert-large-uncased", type=str,
help="Bert pre-trained model selected in the list: bert-base-uncased, "
"bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.")
parser.add_argument('--tf_checkpoint',
type=str,
default="/google_bert_data",
help="Path to directory containing TF checkpoint")
parser.add_argument('--bert_config_path',
type=str,
default="/workspace/phase1",
help="Path bert_config.json is located in")
parser.add_argument('--output_checkpoint', type=str,
default='./checkpoint.pt',
help="Path to output PyT checkpoint")
return parser.parse_args()
def prepare_model(args, device):
# Prepare model
config = BertConfig.from_json_file(args.bert_config_path)
# Padding for divisibility by 8
if config.vocab_size % 8 != 0:
config.vocab_size += 8 - (config.vocab_size % 8)
print('padded vocab size to: {}'.format(config.vocab_size))
# Set some options that the config file is expected to have (but don't need to be set properly
# at this point)
config.pad = False
config.unpad = False
config.dense_seq_output = False
config.fused_mha = False
config.fused_gelu_bias = False
config.fuse_qkv = False
config.fuse_scale = False
config.fuse_mask = False
config.fuse_dropout = False
config.apex_softmax = False
config.enable_stream = False
config.unpad_fmha = False
config.fused_dropout_add = False
if config.fuse_mask == True: config.apex_softmax = True
if config.pad == False: config.enable_stream = True
if config.unpad == True: config.fused_mha = False
#Load from TF checkpoint
model = BertForPretraining.from_pretrained(args.tf_checkpoint, from_tf=True, config=config)
return model
def main():
args = parse_arguments()
device = torch.device("cuda")
model = prepare_model(args, device)
torch.save({'model' : model.state_dict() }, args.output_checkpoint)
if __name__ == "__main__":
main()
| 35.623529 | 111 | 0.667437 |
1263d67e76e8dd014422fe7ab4ab2d65bf80fa25 | 15,096 | py | Python | Lib/site-packages/pycparser/c_generator.py | ldepaula3/TextAnalyticsApp | cd87f2017cf301266a82355d4c781de67b9c6ac9 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/pycparser/c_generator.py | ldepaula3/TextAnalyticsApp | cd87f2017cf301266a82355d4c781de67b9c6ac9 | [
"bzip2-1.0.6"
] | null | null | null | Lib/site-packages/pycparser/c_generator.py | ldepaula3/TextAnalyticsApp | cd87f2017cf301266a82355d4c781de67b9c6ac9 | [
"bzip2-1.0.6"
] | null | null | null | #------------------------------------------------------------------------------
# pycparser: c_generator.py
#
# C code generator from pycparser AST nodes.
#
# Eli Bendersky [https://eli.thegreenplace.net/]
# License: BSD
#------------------------------------------------------------------------------
from . import c_ast
class CGenerator(object):
""" Uses the same visitor pattern as c_ast.NodeVisitor, but modified to
return a value from each visit method, using string accumulation in
generic_visit.
"""
def __init__(self):
# Statements start with indentation of self.indent_level spaces, using
# the _make_indent method
#
self.indent_level = 0
def _make_indent(self):
return ' ' * self.indent_level
def visit(self, node):
method = 'visit_' + node.__class__.__name__
return getattr(self, method, self.generic_visit)(node)
def generic_visit(self, node):
#~ print('generic:', type(node))
if node is None:
return ''
else:
return ''.join(self.visit(c) for c_name, c in node.children())
def visit_Constant(self, n):
return n.value
def visit_ID(self, n):
return n.name
def visit_Pragma(self, n):
ret = '#pragma'
if n.string:
ret += ' ' + n.string
return ret
def visit_ArrayRef(self, n):
arrref = self._parenthesize_unless_simple(n.name)
return arrref + '[' + self.visit(n.subscript) + ']'
def visit_StructRef(self, n):
sref = self._parenthesize_unless_simple(n.name)
return sref + n.type + self.visit(n.field)
def visit_FuncCall(self, n):
fref = self._parenthesize_unless_simple(n.name)
return fref + '(' + self.visit(n.args) + ')'
def visit_UnaryOp(self, n):
operand = self._parenthesize_unless_simple(n.expr)
if n.op == 'p++':
return '%s++' % operand
elif n.op == 'p--':
return '%s--' % operand
elif n.op == 'sizeof':
# Always parenthesize the argument of sizeof since it can be
# a name.
return 'sizeof(%s)' % self.visit(n.expr)
else:
return '%s%s' % (n.op, operand)
def visit_BinaryOp(self, n):
lval_str = self._parenthesize_if(n.left,
lambda d: not self._is_simple_node(d))
rval_str = self._parenthesize_if(n.right,
lambda d: not self._is_simple_node(d))
return '%s %s %s' % (lval_str, n.op, rval_str)
def visit_Assignment(self, n):
rval_str = self._parenthesize_if(
n.rvalue,
lambda n: isinstance(n, c_ast.Assignment))
return '%s %s %s' % (self.visit(n.lvalue), n.op, rval_str)
def visit_IdentifierType(self, n):
return ' '.join(n.names)
def _visit_expr(self, n):
if isinstance(n, c_ast.InitList):
return '{' + self.visit(n) + '}'
elif isinstance(n, c_ast.ExprList):
return '(' + self.visit(n) + ')'
else:
return self.visit(n)
def visit_Decl(self, n, no_type=False):
# no_type is used when a Decl is part of a DeclList, where the type is
# explicitly only for the first declaration in a list.
#
s = n.name if no_type else self._generate_decl(n)
if n.bitsize: s += ' : ' + self.visit(n.bitsize)
if n.init:
s += ' = ' + self._visit_expr(n.init)
return s
def visit_DeclList(self, n):
s = self.visit(n.decls[0])
if len(n.decls) > 1:
s += ', ' + ', '.join(self.visit_Decl(decl, no_type=True)
for decl in n.decls[1:])
return s
def visit_Typedef(self, n):
s = ''
if n.storage: s += ' '.join(n.storage) + ' '
s += self._generate_type(n.type)
return s
def visit_Cast(self, n):
s = '(' + self._generate_type(n.to_type) + ')'
return s + ' ' + self._parenthesize_unless_simple(n.expr)
def visit_ExprList(self, n):
visited_subexprs = []
for expr in n.exprs:
visited_subexprs.append(self._visit_expr(expr))
return ', '.join(visited_subexprs)
def visit_InitList(self, n):
visited_subexprs = []
for expr in n.exprs:
visited_subexprs.append(self._visit_expr(expr))
return ', '.join(visited_subexprs)
def visit_Enum(self, n):
return self._generate_struct_union_enum(n, name='enum')
def visit_Enumerator(self, n):
if not n.value:
return '{indent}{name},\n'.format(
indent=self._make_indent(),
name=n.name,
)
else:
return '{indent}{name} = {value},\n'.format(
indent=self._make_indent(),
name=n.name,
value=self.visit(n.value),
)
def visit_FuncDef(self, n):
decl = self.visit(n.decl)
self.indent_level = 0
body = self.visit(n.body)
if n.param_decls:
knrdecls = ';\n'.join(self.visit(p) for p in n.param_decls)
return decl + '\n' + knrdecls + ';\n' + body + '\n'
else:
return decl + '\n' + body + '\n'
def visit_FileAST(self, n):
s = ''
for ext in n.ext:
if isinstance(ext, c_ast.FuncDef):
s += self.visit(ext)
elif isinstance(ext, c_ast.Pragma):
s += self.visit(ext) + '\n'
else:
s += self.visit(ext) + ';\n'
return s
def visit_Compound(self, n):
s = self._make_indent() + '{\n'
self.indent_level += 2
if n.block_items:
s += ''.join(self._generate_stmt(stmt) for stmt in n.block_items)
self.indent_level -= 2
s += self._make_indent() + '}\n'
return s
def visit_CompoundLiteral(self, n):
return '(' + self.visit(n.type) + '){' + self.visit(n.init) + '}'
def visit_EmptyStatement(self, n):
return ';'
def visit_ParamList(self, n):
return ', '.join(self.visit(param) for param in n.params)
def visit_Return(self, n):
s = 'return'
if n.expr: s += ' ' + self.visit(n.expr)
return s + ';'
def visit_Break(self, n):
return 'break;'
def visit_Continue(self, n):
return 'continue;'
def visit_TernaryOp(self, n):
s = '(' + self._visit_expr(n.cond) + ') ? '
s += '(' + self._visit_expr(n.iftrue) + ') : '
s += '(' + self._visit_expr(n.iffalse) + ')'
return s
def visit_If(self, n):
s = 'if ('
if n.cond: s += self.visit(n.cond)
s += ')\n'
s += self._generate_stmt(n.iftrue, add_indent=True)
if n.iffalse:
s += self._make_indent() + 'else\n'
s += self._generate_stmt(n.iffalse, add_indent=True)
return s
def visit_For(self, n):
s = 'for ('
if n.init: s += self.visit(n.init)
s += ';'
if n.cond: s += ' ' + self.visit(n.cond)
s += ';'
if n.next: s += ' ' + self.visit(n.next)
s += ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_While(self, n):
s = 'while ('
if n.cond: s += self.visit(n.cond)
s += ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_DoWhile(self, n):
s = 'do\n'
s += self._generate_stmt(n.stmt, add_indent=True)
s += self._make_indent() + 'while ('
if n.cond: s += self.visit(n.cond)
s += ');'
return s
def visit_Switch(self, n):
s = 'switch (' + self.visit(n.cond) + ')\n'
s += self._generate_stmt(n.stmt, add_indent=True)
return s
def visit_Case(self, n):
s = 'case ' + self.visit(n.expr) + ':\n'
for stmt in n.stmts:
s += self._generate_stmt(stmt, add_indent=True)
return s
def visit_Default(self, n):
s = 'default:\n'
for stmt in n.stmts:
s += self._generate_stmt(stmt, add_indent=True)
return s
def visit_Label(self, n):
return n.name + ':\n' + self._generate_stmt(n.stmt)
def visit_Goto(self, n):
return 'goto ' + n.name + ';'
def visit_EllipsisParam(self, n):
return '...'
def visit_Struct(self, n):
return self._generate_struct_union_enum(n, 'struct')
def visit_Typename(self, n):
return self._generate_type(n.type)
def visit_Union(self, n):
return self._generate_struct_union_enum(n, 'union')
def visit_NamedInitializer(self, n):
s = ''
for name in n.name:
if isinstance(name, c_ast.ID):
s += '.' + name.name
else:
s += '[' + self.visit(name) + ']'
s += ' = ' + self._visit_expr(n.expr)
return s
def visit_FuncDecl(self, n):
return self._generate_type(n)
def _generate_struct_union_enum(self, n, name):
""" Generates code for structs, unions, and enums. name should be
'struct', 'union', or 'enum'.
"""
if name in ('struct', 'union'):
members = n.decls
body_function = self._generate_struct_union_body
else:
assert name == 'enum'
members = None if n.values is None else n.values.enumerators
body_function = self._generate_enum_body
s = name + ' ' + (n.name or '')
if members is not None:
# None means no members
# Empty sequence means an empty list of members
s += '\n'
s += self._make_indent()
self.indent_level += 2
s += '{\n'
s += body_function(members)
self.indent_level -= 2
s += self._make_indent() + '}'
return s
def _generate_struct_union_body(self, members):
return ''.join(self._generate_stmt(decl) for decl in members)
def _generate_enum_body(self, members):
# `[:-2] + '\n'` removes the final `,` from the enumerator list
return ''.join(self.visit(value) for value in members)[:-2] + '\n'
def _generate_stmt(self, n, add_indent=False):
""" Generation from a statement node. This method exists as a wrapper
for individual visit_* methods to handle different treatment of
some statements in this context.
"""
typ = type(n)
if add_indent: self.indent_level += 2
indent = self._make_indent()
if add_indent: self.indent_level -= 2
if typ in (
c_ast.Decl, c_ast.Assignment, c_ast.Cast, c_ast.UnaryOp,
c_ast.BinaryOp, c_ast.TernaryOp, c_ast.FuncCall, c_ast.ArrayRef,
c_ast.StructRef, c_ast.Constant, c_ast.ID, c_ast.Typedef,
c_ast.ExprList):
# These can also appear in an expression context so no semicolon
# is added to them automatically
#
return indent + self.visit(n) + ';\n'
elif typ in (c_ast.Compound,):
# No extra indentation required before the opening brace of a
# compound - because it consists of multiple lines it has to
# compute its own indentation.
#
return self.visit(n)
else:
return indent + self.visit(n) + '\n'
def _generate_decl(self, n):
""" Generation from a Decl node.
"""
s = ''
if n.funcspec: s = ' '.join(n.funcspec) + ' '
if n.storage: s += ' '.join(n.storage) + ' '
s += self._generate_type(n.type)
return s
def _generate_type(self, n, modifiers=[]):
""" Recursive generation from a type node. n is the type node.
modifiers collects the PtrDecl, ArrayDecl and FuncDecl modifiers
encountered on the way down to a TypeDecl, to allow proper
generation from it.
"""
typ = type(n)
#~ print(n, modifiers)
if typ == c_ast.TypeDecl:
s = ''
if n.quals: s += ' '.join(n.quals) + ' '
s += self.visit(n.type)
nstr = n.declname if n.declname else ''
# Resolve modifiers.
# Wrap in parens to distinguish pointer to array and pointer to
# function syntax.
#
for i, modifier in enumerate(modifiers):
if isinstance(modifier, c_ast.ArrayDecl):
if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)):
nstr = '(' + nstr + ')'
nstr += '[' + self.visit(modifier.dim) + ']'
elif isinstance(modifier, c_ast.FuncDecl):
if (i != 0 and isinstance(modifiers[i - 1], c_ast.PtrDecl)):
nstr = '(' + nstr + ')'
nstr += '(' + self.visit(modifier.args) + ')'
elif isinstance(modifier, c_ast.PtrDecl):
if modifier.quals:
nstr = '* %s %s' % (' '.join(modifier.quals), nstr)
else:
nstr = '*' + nstr
if nstr: s += ' ' + nstr
return s
elif typ == c_ast.Decl:
return self._generate_decl(n.type)
elif typ == c_ast.Typename:
return self._generate_type(n.type)
elif typ == c_ast.IdentifierType:
return ' '.join(n.names) + ' '
elif typ in (c_ast.ArrayDecl, c_ast.PtrDecl, c_ast.FuncDecl):
return self._generate_type(n.type, modifiers + [n])
else:
return self.visit(n)
def _parenthesize_if(self, n, condition):
""" Visits 'n' and returns its string representation, parenthesized
if the condition function applied to the node returns True.
"""
s = self._visit_expr(n)
if condition(n):
return '(' + s + ')'
else:
return s
def _parenthesize_unless_simple(self, n):
""" Common use case for _parenthesize_if
"""
return self._parenthesize_if(n, lambda d: not self._is_simple_node(d))
def _is_simple_node(self, n):
""" Returns True for nodes that are "simple" - i.e. nodes that always
have higher precedence than operators.
"""
return isinstance(n, (c_ast.Constant, c_ast.ID, c_ast.ArrayRef,
c_ast.StructRef, c_ast.FuncCall))
| 35.188811 | 81 | 0.514043 |
6bc441464e52d38ef20c9c50ddb7196f7804c634 | 941 | py | Python | hooks/extensions.py | n3storm/django-hooks | 26ea2150c9be110e90b9ee60fbfd1065ac30ab1d | [
"MIT"
] | 13 | 2015-04-24T01:30:08.000Z | 2022-03-31T04:34:44.000Z | hooks/extensions.py | n3storm/django-hooks | 26ea2150c9be110e90b9ee60fbfd1065ac30ab1d | [
"MIT"
] | 7 | 2015-05-06T09:26:40.000Z | 2021-01-02T12:01:20.000Z | hooks/extensions.py | n3storm/django-hooks | 26ea2150c9be110e90b9ee60fbfd1065ac30ab1d | [
"MIT"
] | 2 | 2017-11-18T17:31:47.000Z | 2019-05-21T08:58:46.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
__all__ = [
'autodiscover',
'apps',
'urls'
]
apps = []
urls = []
def autodiscover(import_path, app_config='Extension'):
# Relative path to the extension's package in
# dot notation such as 'my_app.extensions'
global apps, urls
extensions_dir = os.path.join(os.getcwd(), *import_path.split('.'))
apps_ = []
urls_ = []
for app in os.listdir(extensions_dir):
app_import_path = '.'.join((import_path, app))
if not os.path.isfile(os.path.join(extensions_dir, app, 'apps.py')):
continue
apps_.append(
'.'.join((app_import_path, 'apps', app_config))
)
if not os.path.isfile(os.path.join(extensions_dir, app, 'urls.py')):
continue
urls_.append(
'.'.join((app_import_path, 'urls'))
)
apps = apps_
urls = urls_
| 20.456522 | 76 | 0.585547 |
281d872dcff27854a25eac22ee09d0e140b3ef0a | 4,902 | py | Python | zerver/management/commands/send_to_email_mirror.py | alexagillman/zulip | 35c5e00f9d8443a435fb93e0d809980ce2e3285e | [
"Apache-2.0"
] | null | null | null | zerver/management/commands/send_to_email_mirror.py | alexagillman/zulip | 35c5e00f9d8443a435fb93e0d809980ce2e3285e | [
"Apache-2.0"
] | null | null | null | zerver/management/commands/send_to_email_mirror.py | alexagillman/zulip | 35c5e00f9d8443a435fb93e0d809980ce2e3285e | [
"Apache-2.0"
] | null | null | null | import email
import os
from email.message import Message
from email.mime.text import MIMEText
from typing import Dict, Optional
import ujson
from django.conf import settings
from django.core.management.base import CommandParser
from zerver.lib.email_mirror import mirror_email_message
from zerver.lib.email_mirror_helpers import encode_email_address
from zerver.lib.management import CommandError, ZulipBaseCommand
from zerver.models import Realm, get_realm, get_stream
# This command loads an email from a specified file and sends it
# to the email mirror. Simple emails can be passed in a JSON file,
# Look at zerver/tests/fixtures/email/1.json for an example of how
# it should look. You can also pass a file which has the raw email,
# for example by writing an email.message.Message type object
# to a file using as_string() or as_bytes() methods, or copy-pasting
# the content of "Show original" on an email in Gmail.
# See zerver/tests/fixtures/email/1.txt for a very simple example,
# but anything that the message_from_binary_file function
# from the email library can parse should work.
# Value of the TO: header doesn't matter, as it is overriden
# by the command in order for the email to be sent to the correct stream.
class Command(ZulipBaseCommand):
help = """
Send specified email from a fixture file to the email mirror
Example:
./manage.py send_to_email_mirror --fixture=zerver/tests/fixtures/emails/filename
"""
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument('-f', '--fixture',
dest='fixture',
type=str,
help='The path to the email message you\'d like to send '
'to the email mirror.\n'
'Accepted formats: json or raw email file. '
'See zerver/tests/fixtures/email/ for examples')
parser.add_argument('-s', '--stream',
dest='stream',
type=str,
help='The name of the stream to which you\'d like to send '
'the message. Default: Denmark')
self.add_realm_args(parser, help="Specify which realm to connect to; default is zulip")
def handle(self, **options: Optional[str]) -> None:
if options['fixture'] is None:
self.print_help('./manage.py', 'send_to_email_mirror')
raise CommandError
if options['stream'] is None:
stream = "Denmark"
else:
stream = options['stream']
realm = self.get_realm(options)
if realm is None:
realm = get_realm("zulip")
full_fixture_path = os.path.join(settings.DEPLOY_ROOT, options['fixture'])
# parse the input email into Message type and prepare to process_message() it
message = self._parse_email_fixture(full_fixture_path)
self._prepare_message(message, realm, stream)
data = {} # type: Dict[str, str]
data['recipient'] = str(message['To']) # Need str() here to avoid mypy throwing an error
data['msg_text'] = message.as_string()
mirror_email_message(data)
def _does_fixture_path_exist(self, fixture_path: str) -> bool:
return os.path.exists(fixture_path)
def _parse_email_json_fixture(self, fixture_path: str) -> Message:
with open(fixture_path) as fp:
json_content = ujson.load(fp)[0]
message = MIMEText(json_content['body'])
message['From'] = json_content['from']
message['Subject'] = json_content['subject']
return message
def _parse_email_fixture(self, fixture_path: str) -> Message:
if not self._does_fixture_path_exist(fixture_path):
raise CommandError('Fixture {} does not exist'.format(fixture_path))
if fixture_path.endswith('.json'):
message = self._parse_email_json_fixture(fixture_path)
else:
with open(fixture_path, "rb") as fp:
message = email.message_from_binary_file(fp)
return message
def _prepare_message(self, message: Message, realm: Realm, stream_name: str) -> None:
stream = get_stream(stream_name, realm)
# The block below ensures that the imported email message doesn't have any recipient-like
# headers that are inconsistent with the recipient we want (the stream address).
recipient_headers = ["X-Gm-Original-To", "Delivered-To", "Envelope-To",
"Resent-To", "Resent-CC", "CC"]
for header in recipient_headers:
if header in message:
del message[header]
message[header] = encode_email_address(stream)
if 'To' in message:
del message['To']
message['To'] = encode_email_address(stream)
| 41.897436 | 97 | 0.645655 |
667e25a73c00d4df89b5e0d97f743fecb052ea78 | 2,042 | py | Python | setup.py | BigEd/py65 | 57d5e7191362006c1d6fa20662da3e4854f1b7c2 | [
"BSD-3-Clause"
] | 1 | 2021-04-04T09:55:04.000Z | 2021-04-04T09:55:04.000Z | setup.py | BigEd/py65 | 57d5e7191362006c1d6fa20662da3e4854f1b7c2 | [
"BSD-3-Clause"
] | null | null | null | setup.py | BigEd/py65 | 57d5e7191362006c1d6fa20662da3e4854f1b7c2 | [
"BSD-3-Clause"
] | null | null | null | __version__ = '0.10-dev'
import os
import sys
if sys.version_info[:2] < (2, 4):
msg = ("Py65 requires Python 2.4 or better, you are attempting to "
"install it using version %s. Please install with a "
"supported version" % sys.version)
sys.stderr.write(msg)
sys.exit(1)
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
DESC = """\
Simulate 6502-based microcomputer systems in Python."""
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Assembly',
'Topic :: Software Development :: Assemblers',
'Topic :: Software Development :: Disassemblers',
'Topic :: Software Development :: Debuggers',
'Topic :: Software Development :: Embedded Systems',
'Topic :: Software Development :: Interpreters',
'Topic :: System :: Emulators',
'Topic :: System :: Hardware'
]
setup(
name = 'py65',
version = __version__,
license = 'License :: OSI Approved :: BSD License',
url = 'http://github.com/mnaberez/py65',
description = '6502 microprocessor simulation package',
long_description= DESC,
classifiers = CLASSIFIERS,
author = "Mike Naberezny",
author_email = "mike@naberezny.com",
maintainer = "Mike Naberezny",
maintainer_email = "mike@naberezny.com",
package_dir = {'':'src'},
packages = find_packages(os.path.join(here, 'src')),
# put data files in egg 'doc' dir
data_files=[ ('doc', [
'CHANGES.txt',
'LICENSE.txt',
'README.markdown',
'TODO.txt',
]
)],
install_requires = [],
extras_require = {},
tests_require = [],
include_package_data = True,
zip_safe = False,
namespace_packages = ['py65'],
test_suite = "py65.tests",
entry_points = {
'console_scripts': [
'py65mon = py65.monitor:main',
],
},
)
| 29.171429 | 71 | 0.62047 |
a7992632672da0168630fcf72a6b224079b74cbb | 1,304 | py | Python | porn-sites/stripchat.com/stripchat_com-download-for-windows.py | lbry-lab/dirty-scripts | e781ed3c3738c8b788f79592e1b46aedc1ae5cce | [
"Unlicense"
] | null | null | null | porn-sites/stripchat.com/stripchat_com-download-for-windows.py | lbry-lab/dirty-scripts | e781ed3c3738c8b788f79592e1b46aedc1ae5cce | [
"Unlicense"
] | null | null | null | porn-sites/stripchat.com/stripchat_com-download-for-windows.py | lbry-lab/dirty-scripts | e781ed3c3738c8b788f79592e1b46aedc1ae5cce | [
"Unlicense"
] | null | null | null | import requests
import re
from datetime import datetime
import os
import time
headers = {'User-Agent': 'Mozilla/5.0 (X11; OpenBSD i386) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36',
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
}
while True:
url = input(">>> ")
#print(url)
'''try:
r = requests.get(url, headers=headers).text
room_id = re.search("(\d+)_webp", r)[1]
star_id = re.search("zh\.stripchat\.com/([\w\-_]+)", r)[1]
except:
pass'''
#format_time = datetime.now().strftime("%Y-%m-%d %H_%M_%S")
cmd = 'start yt-dlp {url}'
#linux_cmd = '(streamlink https://b-hls-19.strpst.com/hls/{room_id}/master/{room_id}_auto.m3u8 best --output "{star_id} {format_time}-{star_id}.mp4"> /dev/null 2>&1 )&'
#tammarra_ 2022-02-08 19_19-_tammarra_.mp4
#print(cmd.format(room_id = room_id, star_id = star_id, format_time = format_time))
excute_cmd = cmd.format(url = url)
os.system(excute_cmd)
time.sleep(5)
#excute_cmd = linux_cmd.format(room_id = room_id, star_id = star_id, format_time = format_time)
#print(excute_cmd)
#print("excute_time: ", format_time, "\n")
| 42.064516 | 172 | 0.645706 |
75ccd24c10c2ecf4df25c72edd7edb5b13a7eedb | 1,750 | py | Python | 084/test_monopoly.py | bsamseth/project-euler | 60d70b117960f37411935bc18eab5bb2fca220e2 | [
"MIT"
] | null | null | null | 084/test_monopoly.py | bsamseth/project-euler | 60d70b117960f37411935bc18eab5bb2fca220e2 | [
"MIT"
] | null | null | null | 084/test_monopoly.py | bsamseth/project-euler | 60d70b117960f37411935bc18eab5bb2fca220e2 | [
"MIT"
] | null | null | null | from monopoly import *
def test_regmove():
g = Game()
g.move(5)
assert g.pos == squares['R1']
assert g.board[g.pos] == 1
assert sum(g.board) == 1
def test_chest_GO():
g = Game()
g.move(1)
assert g.pos == squares['A1']
g.CC.remove(squares['GO'])
g.CC.appendleft(squares['GO'])
assert g.CC[0] == squares['GO']
g.move(1)
assert g.pos == squares['GO']
assert g.board[g.pos] == 1
assert g.board[squares['CC1']] == 0
def test_chest_JAIL():
g = Game()
g.move(1)
assert g.pos == squares['A1']
g.CC.remove(squares['JAIL'])
g.CC.appendleft(squares['JAIL'])
assert g.CC[0] == squares['JAIL']
g.move(1)
assert g.pos == squares['JAIL']
assert g.board[g.pos] == 1
assert g.board[squares['CC1']] == 0
def test_chance_GO():
g = Game()
g.CH.remove(squares['GO'])
g.CH.appendleft(squares['GO'])
assert g.CH[0] == squares['GO']
g.move(7)
assert g.pos == squares['GO']
assert g.board[g.pos] == 1
assert sum(g.board) == 1
def test_chance_next_R():
g = Game()
g.CH.remove(g.next_R)
g.CH.appendleft(g.next_R)
assert g.CH[0] == g.next_R
g.move(36)
assert g.pos == squares['R1']
assert g.board[g.pos] == 1
assert sum(g.board) == 1
def test_chance_next_U():
g = Game()
g.CH.remove(g.next_U)
g.CH.appendleft(g.next_U)
assert g.CH[0] == g.next_U
g.move(36)
assert g.pos == squares['U1']
assert g.board[g.pos] == 1
assert sum(g.board) == 1
def test_chance_back_3():
g = Game()
g.CH.remove(g.back_3)
g.CH.appendleft(g.back_3)
assert g.CH[0] == g.back_3
g.move(7)
assert g.pos == squares['T1']
assert g.board[g.pos] == 1
assert sum(g.board) == 1
| 23.972603 | 39 | 0.570286 |
d520a10f766d07576679ab8451fcaf38a9d8411e | 6,236 | py | Python | astroquery/utils/schema.py | HarrietAkot/astroquery | ebeb1dd1d0abeed161aecf8c064ada1194b1b6a0 | [
"BSD-3-Clause"
] | null | null | null | astroquery/utils/schema.py | HarrietAkot/astroquery | ebeb1dd1d0abeed161aecf8c064ada1194b1b6a0 | [
"BSD-3-Clause"
] | null | null | null | astroquery/utils/schema.py | HarrietAkot/astroquery | ebeb1dd1d0abeed161aecf8c064ada1194b1b6a0 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
__version__ = '0.2.0'
class SchemaError(Exception):
"""Error during Schema validation."""
def __init__(self, autos, errors):
self.autos = autos if type(autos) is list else [autos]
self.errors = errors if type(errors) is list else [errors]
Exception.__init__(self, self.code)
@property
def code(self):
def uniq(seq):
seen = set()
seen_add = seen.add
return [x for x in seq if x not in seen and not seen_add(x)]
a = uniq(i for i in self.autos if i is not None)
e = uniq(i for i in self.errors if i is not None)
if e:
return '\n'.join(e)
return '\n'.join(a)
class And:
def __init__(self, *args, **kw):
self._args = args
assert list(kw) in (['error'], [])
self._error = kw.get('error')
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__,
', '.join(repr(a) for a in self._args))
def validate(self, data):
for s in [Schema(s, error=self._error) for s in self._args]:
data = s.validate(data)
return data
class Or(And):
def validate(self, data):
x = SchemaError([], [])
for s in [Schema(s, error=self._error) for s in self._args]:
try:
return s.validate(data)
except SchemaError as _x:
x = _x
raise SchemaError(['%r did not validate %r' % (self, data)] + x.autos,
[self._error] + x.errors)
class Use:
def __init__(self, callable_, error=None):
assert callable(callable_)
self._callable = callable_
self._error = error
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._callable)
def validate(self, data):
try:
return self._callable(data)
except SchemaError as x:
raise SchemaError([None] + x.autos, [self._error] + x.errors)
except BaseException as x:
f = self._callable.__name__
raise SchemaError('%s(%r) raised %r' % (f, data, x), self._error)
def priority(s):
"""Return priority for a give object.
:rtype: int
"""
if type(s) in (list, tuple, set, frozenset):
return 6
if type(s) is dict:
return 5
# We exclude Optional from the test, otherwise it will make a
# catch-all rule like "str" take precedence over any optional field,
# which would be inintuitive.
if hasattr(s, 'validate') and not type(s) is Optional:
return 4
if type(s) is type:
return 3
if callable(s):
return 2
else:
return 1
class Schema:
def __init__(self, schema, error=None):
self._schema = schema
self._error = error
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._schema)
def validate(self, data):
s = self._schema
e = self._error
if type(s) in (list, tuple, set, frozenset):
data = Schema(type(s), error=e).validate(data)
return type(s)(Or(*s, error=e).validate(d) for d in data)
if type(s) is dict:
data = Schema(dict, error=e).validate(data)
new = type(data)()
x = None
coverage = set() # non-optional schema keys that were matched
sorted_skeys = list(sorted(s, key=priority))
for key, value in data.items():
valid = False
skey = None
for skey in sorted_skeys:
svalue = s[skey]
try:
nkey = Schema(skey, error=e).validate(key)
except SchemaError:
pass
else:
try:
nvalue = Schema(svalue, error=e).validate(value)
except SchemaError as _x:
x = _x
raise
else:
coverage.add(skey)
valid = True
break
if valid:
new[nkey] = nvalue
elif skey is not None:
if x is not None:
raise SchemaError(['key %r is required' % key] +
x.autos, [e] + x.errors)
else:
raise SchemaError('key %r is required' % skey, e)
coverage = set(k for k in coverage if type(k) is not Optional)
required = set(k for k in s if type(k) is not Optional)
if coverage != required:
raise SchemaError('missed keys %r' % (required - coverage), e)
if len(new) != len(data):
raise SchemaError('wrong keys %r in %r' % (new, data), e)
return new
if hasattr(s, 'validate'):
try:
return s.validate(data)
except SchemaError as x:
raise SchemaError([None] + x.autos, [e] + x.errors)
except BaseException as x:
raise SchemaError('%r.validate(%r) raised %r' % (s, data, x),
self._error)
if type(s) is type:
if isinstance(data, s):
return data
else:
raise SchemaError('%r should be instance of %r' % (data, s), e)
if callable(s):
f = s.__name__
try:
if s(data):
return data
except SchemaError as x:
raise SchemaError([None] + x.autos, [e] + x.errors)
except BaseException as x:
raise SchemaError('%s(%r) raised %r' % (f, data, x),
self._error)
raise SchemaError('%s(%r) should evaluate to True' % (f, data), e)
if s == data:
return data
else:
raise SchemaError('%r does not match %r' % (s, data), e)
class Optional(Schema):
"""Marker for an optional part of Schema."""
| 33.170213 | 79 | 0.496793 |
32dc79531616b686da457760e97f2f549502cce5 | 2,288 | py | Python | movieflix/src/authorization.py | konvoulgaris/university-projects | dfb5dfa00d329c6714ec46cc476c2138e593afee | [
"Apache-2.0"
] | 5 | 2021-12-05T17:45:00.000Z | 2022-01-11T14:08:53.000Z | movieflix/src/authorization.py | konvoulgaris/university-projects | dfb5dfa00d329c6714ec46cc476c2138e593afee | [
"Apache-2.0"
] | null | null | null | movieflix/src/authorization.py | konvoulgaris/university-projects | dfb5dfa00d329c6714ec46cc476c2138e593afee | [
"Apache-2.0"
] | null | null | null | from flask import Blueprint, request, render_template, redirect, url_for, g, session
authorization = Blueprint("authorization", __name__)
@authorization.route("/login", methods=["GET", "POST"])
def login():
# Redirect if already logged in
if g.user:
return redirect("/")
if request.method == "GET":
return render_template("login.html", fail=("fail" in request.args))
else:
if not ("email" in request.form and "password" in request.form):
return redirect(url_for("authorization.login", fail=1))
email = request.form["email"]
password = request.form["password"]
# Verify that the user exists
match = g.users.find_one({
"email": email
})
if match:
# Verify credentials
if password == match["password"]:
session["email"] = email
return redirect("/")
else:
return redirect(url_for("authorization.login", fail=1))
else:
return redirect(url_for("authorization.login", fail=1))
@authorization.route("/logout", methods=["GET"])
def logout():
session.pop("email")
return redirect("/")
@authorization.route("/register", methods=["GET", "POST"])
def register():
# Redirect if already logged in
if g.user:
return redirect("/")
if request.method == "GET":
return render_template("register.html", fail=("fail" in request.args))
else:
if not ("name" in request.form and "email" in request.form and "password" in request.form):
return redirect(url_for("authorization.login", fail=1))
name = request.form["name"]
email = request.form["email"]
password = request.form["password"]
# Verify that the user doesn't exist
match = g.users.find_one({
"email": email
})
if not match:
g.users.insert_one({
"name": name,
"email": email,
"password": password,
"category": "user"
})
session["email"] = email
return redirect("/")
else:
return redirect(url_for("authorization.register", fail=1))
| 29.714286 | 99 | 0.557255 |
643916a6e5a6bc0f380a3eba3a51566a06041851 | 242 | py | Python | trigger/__init__.py | RheingoldRiver/misc-cogs | 3013af713999fa3ba5ae70f387febb8b43b33840 | [
"MIT"
] | null | null | null | trigger/__init__.py | RheingoldRiver/misc-cogs | 3013af713999fa3ba5ae70f387febb8b43b33840 | [
"MIT"
] | null | null | null | trigger/__init__.py | RheingoldRiver/misc-cogs | 3013af713999fa3ba5ae70f387febb8b43b33840 | [
"MIT"
] | null | null | null | from .trigger import Trigger
__red_end_user_data_statement__ = "Triggers are stored persistantly."
def setup(bot):
n = Trigger(bot)
bot.loop.create_task(n.save_stats())
bot.loop.create_task(n.load_triggers())
bot.add_cog(n)
| 24.2 | 69 | 0.735537 |
cb9875faee12d77eedc7f33d2c9cdb3c42fd0a28 | 2,015 | py | Python | commands.py | christopher-roelofs/7dtd-server-manager | 55a18def54516993f5f1664def2112b4b68a133e | [
"MIT"
] | null | null | null | commands.py | christopher-roelofs/7dtd-server-manager | 55a18def54516993f5f1664def2112b4b68a133e | [
"MIT"
] | null | null | null | commands.py | christopher-roelofs/7dtd-server-manager | 55a18def54516993f5f1664def2112b4b68a133e | [
"MIT"
] | null | null | null | __author__ = 'christopher'
import time
import telconn
import runtime
import logger
import memorydb
def teleport(player,location):
telconn.write_out("tele " + player + " " + location)
time.sleep(1)
telconn.write_out("tele " + player + " " + location)
time.sleep(1)
telconn.write_out("tele " + player + " " + location)
def p2p_teleport(player1,player2):
teleport(player1,player2)
def say(message):
telconn.write_out("say " + '"' + message + '"')
def pm(player,message):
try:
telconn.write_out( "pm " + player + " " + '"' + message + '"')
except Exception as e:
print "pm error: "+e.message
def kill_player(player):
telconn.write_out("kill " + player)
def help(player):
pm(player,"The following are the available commands")
pm(player,"/home : Teleports you to your set home location")
pm(player,"/setpoi <name> : Creates a new poi at the given location")
pm(player,"/poi <name> : Teleports you to the named poi")
pm(player,"/rpoi <name> : Removes the named poi")
pm(player,"/listpoi or /lpoi : Lists all of your pois")
pm(player,"/clearpoi : Clears all of your pois")
pm(player,"/killme : Instantly kills you")
pm(player,"/goto <player> : Teleports you to the named player")
pm(player,"/bag : Teleports you your last death location")
pm(player,"/where : Gives your position on the map")
pm(player,"/drop : Displays a list of airdrops that have not been claimed")
pm(player,"/claim : claims any airdrop in your radius")
def send_motd(player):
try:
time.sleep(5)
pm(player,runtime.motd)
pm(player,"Type /help for a list of available commands")
except Exception as e:
logger.log_debug("send_motd error: " + e.message)
def update_players():
telconn.write_out("lp")
def update_player_objects_timed():
time.sleep(5)
update_players()
while runtime.run:
if len(memorydb.online_players)>0:
update_players()
time.sleep(1) | 29.632353 | 79 | 0.653102 |
e51bfd75db5fef1ccaea2cd8bc4e1ba170aecf43 | 710 | py | Python | exercises/02-caproto.py | mattgibbs/melbourne2018workshop | c79a2e6dcef9ecfe92fdaac5149e87908ee75a17 | [
"BSD-3-Clause"
] | null | null | null | exercises/02-caproto.py | mattgibbs/melbourne2018workshop | c79a2e6dcef9ecfe92fdaac5149e87908ee75a17 | [
"BSD-3-Clause"
] | null | null | null | exercises/02-caproto.py | mattgibbs/melbourne2018workshop | c79a2e6dcef9ecfe92fdaac5149e87908ee75a17 | [
"BSD-3-Clause"
] | 1 | 2018-11-18T21:45:37.000Z | 2018-11-18T21:45:37.000Z | #!/usr/bin/env python3
from caproto.server import pvproperty, PVGroup, ioc_arg_parser, run
import time
class ForeverCounter(PVGroup):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# make unsigned
val = pvproperty(value=0, mock_record='ai')
# increment no more than once a second
@val.scan(period=.1, use_scan_field=True)
async def val(self, instance, async_lib):
current_time = time.monotonic()
if __name__ == '__main__':
ioc_options, run_options = ioc_arg_parser(
default_prefix='counter:',
desc="Counts up to infinity (well, integer oveflow)")
ioc = ForeverCounter(**ioc_options)
run(ioc.pvdb, **run_options)
| 28.4 | 67 | 0.68169 |
c572d238d8264e2ef24f5cf35dbbd244af9c7bb8 | 573 | py | Python | testDB.py | thebillington/DBManager | 22f7066c4860688e352c0b79a7b6ca589b7ceaf2 | [
"MIT"
] | null | null | null | testDB.py | thebillington/DBManager | 22f7066c4860688e352c0b79a7b6ca589b7ceaf2 | [
"MIT"
] | null | null | null | testDB.py | thebillington/DBManager | 22f7066c4860688e352c0b79a7b6ca589b7ceaf2 | [
"MIT"
] | null | null | null | # Import the DBManager
from DBManager import Database
# Create a new database object
db = Database("database.db", True)
# Execute the create tables sql file
# This should only be run the first time the code is executed
db.fTransaction("create_tables.sql")
# Get a username and password
uname = input("Enter your username: ")
pword = input("Enter your password: ")
# Execute sql statement
db.transaction("INSERT into Users VALUES ('{}', '{}');".format(uname,pword))
# Execute a statement to select all users
print(db.fTransaction("fetch_users.sql"))
| 28.65 | 77 | 0.719023 |
f8cc1c8d06d668fab33f6153ae53935782b61787 | 2,660 | py | Python | scripts/create_fluseverity_figs/ILINet_F2_OR_time.py | eclee25/flu-SDI-exploratory-age | 2f5a4d97b84d2116e179e85fe334edf4556aa946 | [
"MIT"
] | 3 | 2018-03-29T23:02:43.000Z | 2020-08-10T12:01:50.000Z | scripts/create_fluseverity_figs/ILINet_F2_OR_time.py | eclee25/flu-SDI-exploratory-age | 2f5a4d97b84d2116e179e85fe334edf4556aa946 | [
"MIT"
] | null | null | null | scripts/create_fluseverity_figs/ILINet_F2_OR_time.py | eclee25/flu-SDI-exploratory-age | 2f5a4d97b84d2116e179e85fe334edf4556aa946 | [
"MIT"
] | null | null | null | #!/usr/bin/python
##############################################
###Python template
###Author: Elizabeth Lee
###Date: 6/18/14
###Function: OR of incidence in children to incidence in adults vs. week number. Incidence in children and adults is normalized by the size of the child and adult populations in the second calendar year of the flu season. ILINet data
###Import data: CDC_Source/Import_Data/all_cdc_source_data.csv, Census/Import_Data/totalpop_age_Census_98-14.csv
###Command Line: python ILINet_F2_OR_time.py
##############################################
### notes ###
# Incidence per 100,000 is normalized by total population by second calendar year of the flu season
# 2013-14 ILINet data is normalized by estimated population size from December 2013 because 2014 estimates are not available at this time
### packages/modules ###
import csv
import matplotlib.pyplot as plt
## local modules ##
import functions as fxn
### data structures ###
### functions ###
### data files ###
incidin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/CDC_Source/Import_Data/all_cdc_source_data.csv','r')
incidin.readline() # remove header
incid = csv.reader(incidin, delimiter=',')
popin = open('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Census/Import_Data/totalpop_age_Census_98-14.csv', 'r')
pop = csv.reader(popin, delimiter=',')
### called/local plotting parameters ###
ps = fxn.pseasons
fw = fxn.gp_fluweeks
sl = fxn.gp_ILINet_seasonlabels
colvec = fxn.gp_ILINet_colors
wklab = fxn.gp_weeklabels
fs = 24
fssml = 16
### program ###
# import data
# d_wk[week] = seasonnum, d_incid53ls[seasonnum] = [ILI wk 40 per 100000, ILI wk 41 per 100000,...], d_OR53ls[seasonnum] = [OR wk 40, OR wk 41, ...], d_zOR53ls[seasonnum] = [zOR wk 40, zOR wk 41, ...]
d_wk, d_incid, d_OR = fxn.ILINet_week_OR_processing(incid, pop)
d_zOR = fxn.week_zOR_processing(d_wk, d_OR)
d_incid53ls, d_OR53ls, d_zOR53ls = fxn.week_plotting_dicts(d_wk, d_incid, d_OR, d_zOR)
# plot values
fig = plt.figure()
ax = plt.subplot(111)
for s, i in zip(ps, xrange(len(ps))):
ax.plot(xrange(fw), d_OR53ls[s][:fw], marker = 'o', color = colvec[i], label = sl[i], linewidth = 2)
plt.xlim([0, fw-1])
plt.xticks(range(fw)[::5], wklab[:fw:5])
plt.ylim([0, 8])
plt.xlabel('Week Number', fontsize=fs)
plt.ylabel('OR, child:adult', fontsize=fs)
# shrink current axis by 10%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width*0.9, box.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('/home/elee/Dropbox/Elizabeth_Bansal_Lab/Manuscripts/Age_Severity/fluseverity_figs/ILINet/OR_time.png', transparent=False, bbox_inches='tight', pad_inches=0)
plt.close()
| 37.464789 | 233 | 0.711278 |
79c8070d184064952a2f8ad057d52c6a8a4c8e3b | 676 | py | Python | tests/binpacking/solver/test_factory.py | Jxtopher/binpacking | 6ce2a1cd071a0660c32f17f05298dde42942a2d9 | [
"MIT"
] | 1 | 2021-12-27T12:37:58.000Z | 2021-12-27T12:37:58.000Z | tests/binpacking/solver/test_factory.py | Jxtopher/binpacking | 6ce2a1cd071a0660c32f17f05298dde42942a2d9 | [
"MIT"
] | null | null | null | tests/binpacking/solver/test_factory.py | Jxtopher/binpacking | 6ce2a1cd071a0660c32f17f05298dde42942a2d9 | [
"MIT"
] | null | null | null | from tests.base import BaseTestCase
from binpacking.solver.factory import Factory
class FactoryTest(BaseTestCase):
def test_factory_csp_backtracking(self) -> None:
config = {
"seed": 0,
"dataStructure": "Domains",
"OptimizationAlgorithm": "Backtracking",
"Backtracking": {
"BinPacking2D_overload": {"capacity": [2, 2], "items": [[2, 1], [1, 1], [1, 1]]},
"Statistics": {},
"StopCriteria": {},
},
}
instance = Factory.build_config(config)
results = Factory.run_solver(config, instance)
self.assertEqual(len(results), 49)
| 30.727273 | 97 | 0.557692 |
efbe11a40a1212c87317e9fb93fb320106655efb | 1,734 | py | Python | app/api/v1/views/house_views.py | Cyrus-Muchiri/Kukodi | f6c2066f7676ea59733cdcd095e4cc5c770a7287 | [
"MIT"
] | null | null | null | app/api/v1/views/house_views.py | Cyrus-Muchiri/Kukodi | f6c2066f7676ea59733cdcd095e4cc5c770a7287 | [
"MIT"
] | null | null | null | app/api/v1/views/house_views.py | Cyrus-Muchiri/Kukodi | f6c2066f7676ea59733cdcd095e4cc5c770a7287 | [
"MIT"
] | 1 | 2020-06-20T10:30:15.000Z | 2020-06-20T10:30:15.000Z | import re
import psycopg2
from flask import Blueprint, request, jsonify
from app.api.v1.models.house_models import HouseRecords
from app.api.v1.models.database import init_db
from app.api.v1.utils.validators import validate_house_data
from app.api.v1.utils.token import login_required
INIT_DB = init_db()
HOUSE = Blueprint('house', __name__)
HOUSE_RECORDS = HouseRecords()
@HOUSE.route('/houses', methods=['POST'])
@login_required
def house_registration():
'''house registration'''
try:
data = request.get_json()
house_number = data["house_number"]
house_type = data["house_type"]
rent_amount = data["rent_amount"]
validate_house_data(house_number, house_type, rent_amount)
cur = INIT_DB.cursor()
cur.execute("""SELECT house_number FROM houses WHERE house_number = '%s' """ %(house_number))
data = cur.fetchone()
print(data)
if data != None:
return jsonify({"message": "house already exists"}), 400
try:
return HOUSE_RECORDS.register_house(house_number,house_type,rent_amount)
except (psycopg2.Error) as error:
return jsonify(error)
except KeyError:
return jsonify({"error": "a key is missing"}), 400
@HOUSE.route('/houses', methods=['GET'])
def view_all():
'''view all houses'''
return HOUSE_RECORDS.view_houses()
@HOUSE.route('/houses/<int:house_id>', methods=['GET'])
def view_one(house_id):
'''view house by house id'''
return HOUSE_RECORDS.view_house(house_id)
@HOUSE.route('/houses/<string:house_no>', methods=['GET'])
def view_one_by_number(house_no):
'''view house by house number'''
return HOUSE_RECORDS.view_house_by_number(house_no) | 29.389831 | 101 | 0.684544 |
5a6a9c92ad1605e19de1033d70bf19b77ffb2d0f | 548 | py | Python | FitnessWatcherUI/manage.py | srasool2/SWDV-691-FitnessWatcherUI | a65e32329b98a217077cb5d1708d0afee9f5d3e1 | [
"MIT"
] | null | null | null | FitnessWatcherUI/manage.py | srasool2/SWDV-691-FitnessWatcherUI | a65e32329b98a217077cb5d1708d0afee9f5d3e1 | [
"MIT"
] | 13 | 2019-03-30T14:45:44.000Z | 2020-06-05T20:22:16.000Z | FitnessWatcherUI/manage.py | srasool2/SWDV-691-FitnessWatcherUI | a65e32329b98a217077cb5d1708d0afee9f5d3e1 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'FitnessWatcherUI.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| 34.25 | 80 | 0.691606 |
e6b25a7d356bed0835fadef42ed9be730739bdf3 | 14,741 | py | Python | pyanp/limitmatrix.py | cznewt/pyanp | 2b90363ba3f9d60aef1e4f74c6cda9f5fda83561 | [
"MIT"
] | 1 | 2021-01-02T10:31:12.000Z | 2021-01-02T10:31:12.000Z | pyanp/limitmatrix.py | ArdalanMarandi/pyanp | 731fa2c6090e274843f854081610599a2001e00a | [
"MIT"
] | null | null | null | pyanp/limitmatrix.py | ArdalanMarandi/pyanp | 731fa2c6090e274843f854081610599a2001e00a | [
"MIT"
] | null | null | null | '''
Contains all limit matrix calculations
.. moduleauthor: Dr. Bill Adams
'''
import numpy as np
import pandas as pd
from copy import deepcopy
from pyanp.general import get_matrix
def _mat_pow2(mat, power):
'''
Calculates :math:`mat^N` where :math:`N \geq power` and N is a power of 2.
It does this by squaring mat, and squaring that, etc, until it reaches
the desired level. It takes at most floor(log_2(power))+1 matrix
multiplications to do this, which is much preferred for large powers.
:param mat: The numpy array to raise to a power.
:param power: The power to be greater than or equal to
:return: The resulting power of the matrix
'''
last = deepcopy(mat)
nextm = deepcopy(mat)
count=1
while count <= power:
np.matmul(last, last, nextm)
tmp = last
last = nextm
nextm = tmp
count *= 2
return last
def normalize(mat, inplace=False):
'''
Makes the columns of a matrix add to 1 (unless the column summed to zero, in which case it is left unchanged)
Does this by dividing each column by the sum of that column.
:param mat: The matrix to normalize
:param inplace: If true normalizes the matrix sent in, otherwise it leaves that matrix alone, and returns a
normalized copy
:return: If inplace=False, it returns the normalized matrix, leaving the param mat unchanged. Otherwise it
returns nothing and normalizes the param mat.
'''
div = mat.sum(axis=0)
for i in range(len(div)):
if div[i] == 0:
div[i] = 1.0
if not inplace:
return mat/div
else:
thetype = mat.dtype
#Let's check that the matrix can do float arith
old = mat[0,0]
mat[0,0] = 1./3.
if mat[0,0] == 0:
# It is an integer type matrix, which causes a fail
raise ValueError("Matrix cannot be integer type for inplace normalization.")
#Reset
mat[0,0]=old
np.divide(mat, div, out=mat)
def hiearhcy_formula(mat):
'''
Uses the hierarchy formula to calculate the limit matrix. This is essentially the normalization of
the sum of higher powers of mat.
:param mat: A square nump.array that you wish to find the limit matrix of, using the hiearchy formula.
:return: The limit matrix, unless the matrix was not a hiearchy. If the matrix was not a
hierarchy we return None
'''
size = len(mat)
big = _mat_pow2(mat, size+1)
if np.count_nonzero(big) != 0:
# Not a hiearchy, return None to indicate this
return None
summ = deepcopy(mat)
thispow = deepcopy(mat)
nextpow = deepcopy(mat)
for i in range(size-2):
np.matmul(thispow, mat, nextpow)
np.add(summ, nextpow, summ)
tmp = thispow
thispow = nextpow
nextpow = tmp
rval = normalize(summ)
return rval
def calculus(mat, error=1e-10, max_iters=5000, use_hierarchy_formula=True, col_scale_type=None):
'''
Calculates the 'Calculus Type' limit matrix from superdecisions
:param mat: The scaled supermatrix to calculate the limit matrix of
:param error: The maximum error to allow between iterations
:param max_iters: The maximum number of iterations before we give up, after we calculate the start power
:param use_hierarchy_formula: If True and the matrix is for a hierarchy we use that formula instead.
:param col_scale_type: A string if 'all' it scales mat1 by max(mat1) and similarly for mat2
otherwise, it scales by column
:return: The calculats limit matrix as a numpy array.
'''
size = len(mat)
diff = 0.0
start_pow = 20*size * size +10
start = _mat_pow2(mat, start_pow)
if use_hierarchy_formula and (np.max(abs(start))==0):
# This matrix is for a hiearchy, use that formula
# But we need to check that it really is a hierarchy and not something
# that disappears because of round off error
start_pow = size
start = _mat_pow2(mat, start_pow)
if np.max(abs(start)) == 0:
# It truly was a hieratchy, do that
return hiearhcy_formula(mat)
# Temporary storage matrices
tmp1 = deepcopy(mat)
tmp2 = deepcopy(mat)
tmp3 = deepcopy(mat)
# Now we need matrices to store the intermediate results
pows = [start]
for i in range(size - 1):
# Add next one
pows.append(np.matmul(mat, pows[-1]))
diff = normalize_cols_dist(pows[-1], pows[-2], tmp1, tmp2, tmp3, col_scale_type)
if diff < error:
# Already converged, done
mysum = pows[-1].sum(axis=0)
for i in range(len(mysum)):
if mysum[i]==0:
mysum[i]=1
return pows[-1] / mysum
for count in range(max_iters):
nextp = pows[0]
np.matmul(pows[-1], mat, nextp)
for i in range(len(pows) - 1):
pows[i] = pows[i + 1]
pows[-1] = nextp
# Check convergence
for i in range(len(pows) - 1):
diff = normalize_cols_dist(pows[i], nextp, tmp1, tmp2, tmp3, col_scale_type)
if diff < error:
mysum = nextp.sum(axis=0)
for i in range(len(mysum)):
if mysum[i] == 0:
mysum[i] = 1
print("Count was "+str(count))
return nextp / mysum
# If we make it here, we never converged
raise ValueError("Did not converge within "+str(max_iters)+" iterations")
def normalize_cols_dist(mat1, mat2, tmp1=None, tmp2=None, tmp3=None, col_scale_type=None):
'''
Calculates the distance between matrices mat1 and mat2 after they have
been column normalized. tmp1, tmp2, tmp3
are temporary matrices to store the normalized versions of things. This
code could be called many times in a limit matrix calculation, and allocating
and freeing those temporary storage bits could take a lot of cycles. This
way you allocate them once at the top level loop of the limit matrix calculation
and they are reused again and again.
If you do not wish to avail yourself of this savings, simply leave them as None's
and the algorithm will allocate as appropriate
:param mat1: First matrix to compare
:param mat2: The other matrix to compare
:param tmp1: A temporary storage matrix of same size as mat1 and mat2. If None, it will be allocated inside the fx.
:param tmp2: A temporary storage matrix of same size as mat1 and mat2. If None, it will be allocated inside the fx.
:param tmp3: A temporary storage matrix of same size as mat1 and mat2. If None, it will be allocated inside the fx.
:param col_scale_type: A string if 'all' it scales mat1 by max(mat1) and similarly for mat2
otherwise, it scales by column
:return: The maximum difference between the column normalized versions of mat1 and mat2
'''
tmp1 = tmp1 if tmp1 is not None else deepcopy(mat1)
tmp2 = tmp2 if tmp2 is not None else deepcopy(mat1)
tmp3 = tmp3 if tmp3 is not None else deepcopy(mat1)
if col_scale_type == "all":
div1 = max(mat1)
div2 = max(mat2)
else:
div1 = mat1.max(axis=0)
div2 = mat2.max(axis=0)
for i in range(len(div1)):
if div1[i]==0:
div1[i]=1
if div2[i] == 0:
div2[i]=1
np.divide(mat1, div1, tmp1)
#I think this is wrong, I'm just doing it the way I think it should
#np.divide(mat2, div2.max(axis=0), tmp2)
np.divide(mat2, div2, tmp2)
np.subtract(tmp1, tmp2, tmp3)
np.absolute(tmp3, tmp3)
return np.max(tmp3)
def zero_cols(full_mat, non_zero=False):
'''
Returns the list of indices of columns that are zero or non_zero
depending on the parameter non_zero
:param mat: The matrix to search over
:param non_zero: If False, returns the indices of columns that are zero, otherwise
returns indices of columns that a not zero.
:return: A list of indices of columns of the type determined by the parameter non_zero.
'''
size = len(full_mat)
rval = []
rval_other = []
for col in range(size):
is_zero = True
for row in range(size):
if full_mat[row, col] != 0:
is_zero = False
break
if is_zero:
rval.append(col)
else:
rval_other.append(col)
if non_zero:
return rval_other
else:
return rval
def hierarchy_nodes(mat):
'''
Returns the indices of the nodes that are hierarchy ones. The others are not hierachy
:param mat: A supermatrix (scaled or non-scaled, both work).
:return: List of indices that are the nodes which are hierarhical.
'''
size = len(mat)
start_pow = size
full_mat = _mat_pow2(mat, start_pow)
return zero_cols(full_mat)
def two_two_breakdown(mat, upper_right_indices):
'''
Given the indices for the upper right portion of a matrix, break the matrix
down into a 2x2 matrix with the submatrices in each piece. Useful for
limit matrix calculations that split the problem up into the hierarchical and
network components and do separate calculations and then bring them together.
:param mat: The matrix to split into
== ==
A B
C D
== ==
form, where A is the "upper_right_indcies" and D is the opposite
:param upper_right_indices: List of indices of the upper right positions.
:return: A list of [A, B, C, D] of those matrices
'''
total_n = len(mat)
lower_left_indices = [i for i in range(total_n) if i not in upper_right_indices]
upper_n = len(upper_right_indices)
lower_n = total_n - upper_n
A = np.zeros([upper_n, upper_n])
B = np.zeros([upper_n, lower_n])
C = np.zeros([lower_n, upper_n])
D = np.zeros([lower_n, lower_n])
for i in range(upper_n):
row = upper_right_indices[i]
for j in range(upper_n):
col = upper_right_indices[j]
A[i,j]=mat[row,col]
for j in range(lower_n):
col = lower_left_indices[j]
B[i,j]=mat[row, col]
for i in range(lower_n):
row = lower_left_indices[i]
for j in range(upper_n):
col = upper_right_indices[j]
C[i,j]=mat[row,col]
for j in range(lower_n):
col = lower_left_indices[j]
D[i,j]=mat[row, col]
return (A, B, C, D)
def limit_sinks(mat, straight_normalizer=True):
'''
Performs the limit with sinks calculation. We break up the matrix
into sinks and nonsinks, and use those pieces.
:param mat: The matrix to do the limit sinks calculation on.
:param straight_normalizer: If False we normalize at each step, if True
we normalize at the end.
:return: The resulting numpy array result.
'''
n = len(mat)
nonsinks = zero_cols(mat, non_zero=True)
sinks = zero_cols(mat, non_zero=False)
if len(nonsinks) == n:
# There are no sinks, return calculus type instead
return calculus(mat)
# Okay we made it here, we need to get the A and B portions
(B, z1, A, z2) = two_two_breakdown(mat, nonsinks)
# Make sure z1 and z2 are zero
limitB = calculus(B)
if not straight_normalizer:
limitB = normalize(limitB)
axblimit = np.matmul(A, limitB)
rval = np.zeros([n, n])
for i in range(len(nonsinks)):
orig_row = nonsinks[i]
for j in range(len(nonsinks)):
orig_col = nonsinks[j]
rval[orig_row, orig_col] = limitB[i, j]
for i in range(len(sinks)):
orig_row = sinks[i]
for j in range(len(nonsinks)):
orig_col=nonsinks[j]
rval[orig_row, orig_col] = axblimit[i, j]
if straight_normalizer:
rval = normalize(rval)
return rval
def limit_newhierarchy(mat, with_limit=False, error=1e-10, col_scale_type = None, max_count = 1000):
'''
Performs the new hiearchy limit matrix calculation
:param mat: The matrix to perform the calculation on.
:param with_limit: Do we include the final limit step?
:return: The resulting numpy array
'''
n = len(mat)
hier_nodes = hierarchy_nodes(mat)
net_nodes = [i for i in range(n) if i not in hier_nodes]
(B, z1, A, C) = two_two_breakdown(mat, net_nodes)
if len(net_nodes) == n:
print("Our network nodes are :"+str(net_nodes))
return calculus(mat)
elif len(hier_nodes) == n:
return hiearhcy_formula(mat)
limitB = calculus(B)
limitC = calculus(C)
lowerLeftCorner = np.matmul(A, limitB) + np.matmul(C, A)
lowerLeftCorner = normalize(lowerLeftCorner)
if with_limit:
laststep = lowerLeftCorner
diff = 1
tmp1 = deepcopy(mat)
tmp2 = deepcopy(mat)
tmp3 = deepcopy(mat)
count = 0
while (diff > error) and (count < max_count):
nextstep = np.matmul(A, limitB) + np.matmul(limitC, A)
# diff = normalize_cols_dist(laststep, nextstep, tmp1, tmp2, tmp3, col_scale_type=col_scale_type)
diff = normalize_cols_dist(laststep, nextstep, None, None, None, col_scale_type=col_scale_type)
laststep = nextstep
count+=1
lowerLeftCorner = nextstep
rval = np.zeros([n, n])
for i in range(len(net_nodes)):
orig_row = net_nodes[i]
for j in range(len(net_nodes)):
orig_col = net_nodes[j]
rval[orig_row, orig_col] = limitB[i, j]
for i in range(len(hier_nodes)):
orig_row = hier_nodes[i]
for j in range(len(net_nodes)):
orig_col=net_nodes[j]
rval[orig_row, orig_col] = lowerLeftCorner[i, j]
for j in range(len(hier_nodes)):
orig_col=hier_nodes[j]
rval[orig_row, orig_col] = C[i, j]
rval = normalize(rval)
return rval
def priority_from_limit(limit_matrix):
'''
Calculates the priority from a limit matrix, i.e. sums columns and divides by the number of
columns.
:param limit_matrix: The matrix to extract the priority from
:return: 1d numpy array of the priority
'''
rval = limit_matrix.sum(axis=1)
adiv = sum(rval)
if adiv != 0:
rval /= adiv
return rval
def priority(matrix, limit_calc=calculus):
'''
Calculates the limit matrix and extracts priority from it. Really just
a convenience function.
:param matrix: The scaled supermatrix to calculate the priority for
:param limit_calc: The limit matrix calculation to use
:return: The priority as a series
'''
lmat = limit_calc(matrix)
return priority_from_limit(lmat) | 35.181384 | 120 | 0.634828 |
db1c79475fb499942a8d0678e912185baf2d5906 | 1,609 | py | Python | setup.py | darthghandi/pytest-slickqa | 7c491702b8c43093f4fa20bfec8e95080de819c5 | [
"Apache-2.0"
] | 1 | 2016-12-05T21:13:30.000Z | 2016-12-05T21:13:30.000Z | setup.py | darthghandi/pytest-slickqa | 7c491702b8c43093f4fa20bfec8e95080de819c5 | [
"Apache-2.0"
] | null | null | null | setup.py | darthghandi/pytest-slickqa | 7c491702b8c43093f4fa20bfec8e95080de819c5 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import codecs
from setuptools import setup
def read(fname):
file_path = os.path.join(os.path.dirname(__file__), fname)
return codecs.open(file_path, encoding='utf-8').read()
setup(
name='pytest-slickqa',
version='0.1.4',
author='Chris Saxey',
author_email='darthghandi@gmail.com',
maintainer='Chris Saxey',
maintainer_email='darthghandi@gmail.com',
license='Apache Software License 2.0',
url='https://github.com/darthghandi/pytest-slickqa',
description='A Pytest plugin that reports results to Slickqa',
long_description=read('README.rst'),
py_modules=['pytest_slickqa'],
install_requires=['pytest>=2.9.1', 'slickqa'],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Pytest',
'Intended Audience :: Developers',
'Topic :: Software Development :: Testing',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Operating System :: OS Independent',
'License :: OSI Approved :: Apache Software License',
],
entry_points={
'pytest11': [
'slickqa = pytest_slickqa',
],
},
)
| 32.18 | 70 | 0.619018 |
386aa26547c0e609e55c4249a453d1636d7b0738 | 628 | py | Python | runs/10KB/par-bro-iter06000.cfg.py | janpawellek/broeval | 57e31aa6e354d0bba88103b44910483e8d982d00 | [
"MIT"
] | null | null | null | runs/10KB/par-bro-iter06000.cfg.py | janpawellek/broeval | 57e31aa6e354d0bba88103b44910483e8d982d00 | [
"MIT"
] | null | null | null | runs/10KB/par-bro-iter06000.cfg.py | janpawellek/broeval | 57e31aa6e354d0bba88103b44910483e8d982d00 | [
"MIT"
] | null | null | null |
# Write results to this file
OUTFILE = 'runs/10KB/par-bro-iter06000.result.csv'
# Source computers for the requests
SOURCE = ['10.0.0.1']
# Should Bro be enabled on the source machines?
SOURCE_BRO = [True]
# Target machines for the requests (aka server)
TARGET = ['10.0.0.2']
# Should Bro be enabled on the target machines?
TARGET_BRO = [True]
# Connection mode (par = parallel, seq = sequential)
MODE = 'par'
# Number of evaluation repetitions to run
EPOCHS = 100
# Number of iterations to be run in each evaluation repetition
ITER = 6000
# Size of the file to be downloaded from target (in Bytes * 10^SIZE)
SIZE = 4
| 21.655172 | 68 | 0.721338 |
54e8519371470f857757c3983a01ad173f6fe7cf | 659 | py | Python | SOLID/es2/srp_step3.py | nick87ds/MaterialeSerate | 51627e47ff1d3c3ecfc9ce6741c04b91b3295359 | [
"MIT"
] | 12 | 2021-12-12T22:19:52.000Z | 2022-03-18T11:45:17.000Z | SOLID/es2/srp_step3.py | nick87ds/MaterialeSerate | 51627e47ff1d3c3ecfc9ce6741c04b91b3295359 | [
"MIT"
] | 1 | 2022-03-23T13:58:33.000Z | 2022-03-23T14:05:08.000Z | SOLID/es2/srp_step3.py | nick87ds/MaterialeSerate | 51627e47ff1d3c3ecfc9ce6741c04b91b3295359 | [
"MIT"
] | 7 | 2021-02-01T22:09:14.000Z | 2021-06-22T08:30:16.000Z | """
La classe degli animali sarà la punto ingresso/facciata (Facade) per la
gestione del database degli animali e gestione delle proprietà degli animali.
"""
class Animal:
def __init__(self, name: str):
self.name = name
self.db = AnimalDB()
def get_name(self):
return self.name
def get(self, id):
return self.db.get_animal(id)
def save(self):
self.db.save(animal=self)
class AnimalDB:
def get_animal(self, id) -> Animal:
pass
def save(self, animal: Animal):
pass
"""
I metodi più importanti stanno nella classe Animal, usata come
"facciata" per le funzioni minori.
"""
| 19.382353 | 77 | 0.647951 |
1f176144b8302cea5a842a60f138b88ed990114a | 500 | py | Python | server/vcr-server/api/v2/migrations/0004_auto_20180821_2221.py | brianorwhatever/aries-vcr | 96bb31a2f96406dfa2832dbd7790c46b60981e13 | [
"Apache-2.0"
] | 38 | 2019-01-07T02:49:55.000Z | 2020-01-27T17:26:09.000Z | server/vcr-server/api/v2/migrations/0004_auto_20180821_2221.py | brianorwhatever/aries-vcr | 96bb31a2f96406dfa2832dbd7790c46b60981e13 | [
"Apache-2.0"
] | 364 | 2019-01-07T20:22:15.000Z | 2020-03-10T21:59:23.000Z | server/vcr-server/api/v2/migrations/0004_auto_20180821_2221.py | brianorwhatever/aries-vcr | 96bb31a2f96406dfa2832dbd7790c46b60981e13 | [
"Apache-2.0"
] | 34 | 2019-01-04T19:16:04.000Z | 2020-02-20T19:24:25.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-08-21 22:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [("api_v2", "0003_auto_20180821_1840")]
operations = [
migrations.AlterField(
model_name="credential",
name="effective_date",
field=models.DateTimeField(default=django.utils.timezone.now),
)
]
| 25 | 74 | 0.672 |
c2929fa156a593eb7d44f63cafc55da05b7c3dae | 1,415 | py | Python | tests/test_datatypes.py | vishwas1234567/finn | cbef52d20fce8574b358e7aee01cf4a8c26c5adf | [
"BSD-3-Clause"
] | null | null | null | tests/test_datatypes.py | vishwas1234567/finn | cbef52d20fce8574b358e7aee01cf4a8c26c5adf | [
"BSD-3-Clause"
] | null | null | null | tests/test_datatypes.py | vishwas1234567/finn | cbef52d20fce8574b358e7aee01cf4a8c26c5adf | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
import finn.core.datatype as dt
def test_datatypes():
assert dt.DataType.BIPOLAR.allowed(-1)
assert dt.DataType.BIPOLAR.allowed(0) is False
assert dt.DataType.BINARY.allowed(-1) is False
assert dt.DataType.BINARY.allowed(1)
assert dt.DataType.UINT2.allowed(2)
assert dt.DataType.UINT2.allowed(10) is False
assert dt.DataType.UINT3.allowed(5)
assert dt.DataType.UINT3.allowed(-7) is False
assert dt.DataType.UINT4.allowed(15)
assert dt.DataType.UINT4.allowed(150) is False
assert dt.DataType.UINT8.allowed(150)
assert dt.DataType.UINT8.allowed(777) is False
assert dt.DataType.UINT16.allowed(14500)
assert dt.DataType.UINT16.allowed(-1) is False
assert dt.DataType.UINT32.allowed(2 ** 10)
assert dt.DataType.UINT32.allowed(-1) is False
assert dt.DataType.INT2.allowed(-1)
assert dt.DataType.INT2.allowed(-10) is False
assert dt.DataType.INT3.allowed(5) is False
assert dt.DataType.INT3.allowed(-2)
assert dt.DataType.INT4.allowed(15) is False
assert dt.DataType.INT4.allowed(-5)
assert dt.DataType.INT8.allowed(150) is False
assert dt.DataType.INT8.allowed(-127)
assert dt.DataType.INT16.allowed(-1.04) is False
assert dt.DataType.INT16.allowed(-7777)
assert dt.DataType.INT32.allowed(7.77) is False
assert dt.DataType.INT32.allowed(-5)
assert dt.DataType.INT32.allowed(5)
| 39.305556 | 52 | 0.721555 |
f1375a730648c93a44d9bd01738dc018485aa9f2 | 842 | py | Python | gbasm/core/constants.py | MrCairo/pygbasm | 96acf1f5dec168ff084b1095db8ca372fc17e7be | [
"MIT"
] | 2 | 2019-05-08T00:29:09.000Z | 2019-05-08T00:29:12.000Z | gbasm/core/constants.py | MrCairo/pygbasm | 96acf1f5dec168ff084b1095db8ca372fc17e7be | [
"MIT"
] | null | null | null | gbasm/core/constants.py | MrCairo/pygbasm | 96acf1f5dec168ff084b1095db8ca372fc17e7be | [
"MIT"
] | null | null | null | """
Commonly used constants
"""
from enum import Enum, IntEnum
DIR = "directive"
TOK = "tokens"
EXT = "extra"
NODE = "node" # Rpresents an internal tokenized node.
MULT = "MULTIPLE"
EQU = "EQU"
LBL = "LABEL"
INST = "INSTRUCTION"
STOR = "STORAGE"
SEC = "SECTION"
BAD = "INVALID"
class NodeType(Enum):
NODE = 1
EQU = 2
LBL = 3
INST = 4
STOR = 5
SEC = 6
DIR = 7
NODE_TYPES = {
NodeType.NODE: NODE,
NodeType.EQU: EQU,
NodeType.LBL: LBL,
NodeType.INST: INST,
NodeType.STOR: STOR,
NodeType.SEC: SEC,
NodeType.DIR: DIR
}
DIRECTIVES = [
"EQU", "SET", "SECTION", "EQUS", "MACRO", "ENDM", "EXPORT", "GLOBAL",
"PURGE", "INCBIN", "UNION", "NEXTU", "ENDU"
]
STORAGE_DIRECTIVES = ["DS", "DB", "DW", "DL"]
class Lexical(IntEnum):
warning = 1
syntax_error = 2
unknown_error
| 17.183673 | 73 | 0.595012 |
4773858c1cb33b9b79478f4869531aee084fab58 | 6,974 | py | Python | histoqc/AnnotationModule.py | kaczmarj/HistoQC | d31c11781f8ee86be64a08fd5200c6ef04b71365 | [
"BSD-3-Clause-Clear"
] | 140 | 2018-05-25T04:04:04.000Z | 2022-03-30T15:38:24.000Z | histoqc/AnnotationModule.py | kaczmarj/HistoQC | d31c11781f8ee86be64a08fd5200c6ef04b71365 | [
"BSD-3-Clause-Clear"
] | 300 | 2018-05-21T18:38:58.000Z | 2022-03-09T14:19:33.000Z | histoqc/AnnotationModule.py | kaczmarj/HistoQC | d31c11781f8ee86be64a08fd5200c6ef04b71365 | [
"BSD-3-Clause-Clear"
] | 66 | 2018-05-24T22:13:36.000Z | 2022-03-28T16:57:14.000Z | import logging
from histoqc.BaseImage import printMaskHelper
from skimage import io, img_as_ubyte
from skimage.draw import polygon
import os
from pathlib import PurePosixPath, Path
import json
import xml.etree.ElementTree as ET
import numpy as np
def get_points_from_xml(xml_fname):
"""
Parses the xml file to get those annotations as lists of verticies
xmlMask will create a mask that is true inside the annotated region described in the specified xml file. The xml file must follow the ImageScope format, the minimal components of which are:
```
<?xml version="1.0" encoding="UTF-8"?>
<Annotations>
<Annotation>
<Regions>
<Region>
<Vertices>
<Vertex X="56657.4765625" Y="78147.3984375"/>
<Vertex X="56657.4765625" Y="78147.3984375"/>
<Vertex X="56664.46875" Y="78147.3984375"/>
</Region>
</Regions>
</Annotation>
</Annotations>
```
With more <Annotation> or <Region> blocks as needed for additional annotations. There is no functional difference between multiple <Annotation> blocks and one <Annotation> blocks with multiple <Region> blocks
"""
# create element tree object
tree = ET.parse(xml_fname)
# get root element
root = tree.getroot()
# list of list of vertex coordinates
# i.e. a list of sets of points
points = []
for annotation in root.findall('Annotation'):
for regions in annotation.findall('Regions'):
for region in regions.findall('Region'):
for vertices in region.findall('Vertices'):
points.append([(int(float(vertex.get('X'))),int(float(vertex.get('Y')))) for vertex in vertices.findall('Vertex')])
return points
def get_points_from_geojson(s, fname):
"""
Parses a typical GeoJSON file containing one or more Polygon or MultiPolygon features.
These JSON files are the preferred way to serialize QuPath annotations, for example.
See https://qupath.readthedocs.io/en/latest/docs/scripting/overview.html#serialization-json
"""
with open(fname) as f:
geojson = json.load(f)
point_sets = []
for annot in geojson:
geometry = annot['geometry']
geom_type = geometry['type']
coordinates = geometry['coordinates']
if geom_type == 'MultiPolygon':
for roi in coordinates:
for points in roi:
point_sets.append([(coord[0], coord[1]) for coord in points])
elif geom_type == 'Polygon':
for points in coordinates:
point_sets.append([(coord[0], coord[1]) for coord in points])
elif geom_type == 'LineString':
point_sets.append([(coord[0], coord[1]) for coord in coordinates])
else:
msg = f"Skipping {geom_type} geometry in {fname}. Only Polygon, MultiPolygon, and LineString annotation types can be used."
logging.warning(s['filename'] + ' - ' + msg)
s["warnings"].append(msg)
return point_sets
def resize_points(points, resize_factor):
for k, pointSet in enumerate(points):
points[k] = [(int(p[0] * resize_factor), int(p[1] * resize_factor)) for p in pointSet]
return points.copy()
def mask_out_annotation(s, point_sets):
"""Returns the mask of annotations"""
resize_factor = np.shape(s["img_mask_use"])[1] / s["image_base_size"][0]
point_sets = resize_points(point_sets, resize_factor)
mask = np.zeros((np.shape(s["img_mask_use"])[0],np.shape(s["img_mask_use"])[1]),dtype=np.uint8)
for pointSet in point_sets:
poly = np.asarray(pointSet)
rr, cc = polygon(poly[:,1],poly[:,0],mask.shape)
mask[rr,cc] = 1
return mask
def xmlMask(s, params):
logging.info(f"{s['filename']} - \txmlMask")
mask = s["img_mask_use"]
xml_basepath = params.get("xml_filepath",None)
xml_suffix = params.get("xml_suffix", "")
if not xml_basepath:
xml_basepath = s["dir"]
xml_fname = xml_basepath + os.sep + PurePosixPath(s['filename']).stem + xml_suffix + '.xml'
if not Path(xml_fname).is_file():
msg = f"Annotation file {xml_fname} does not exist. Skipping."
logging.warning(f"{s['filename']} - {msg}")
s["warnings"].append(msg)
return
logging.info(f"{s['filename']} - \tusing {xml_fname}")
point_sets = get_points_from_xml(xml_fname)
annotationMask = mask_out_annotation(s, point_sets) > 0
io.imsave(s["outdir"] + os.sep + s["filename"] + "_xmlMask.png", img_as_ubyte(annotationMask))
prev_mask = s["img_mask_use"]
s["img_mask_use"] = prev_mask & annotationMask
s.addToPrintList("xmlMask",
printMaskHelper(params.get("mask_statistics", s["mask_statistics"]), prev_mask, s["img_mask_use"]))
if len(s["img_mask_use"].nonzero()[0]) == 0: # add warning in case the final tissue is empty
logging.warning(
f"{s['filename']} - After AnnotationModule.xmlMask NO tissue remains detectable! Downstream modules likely to be incorrect/fail")
s["warnings"].append(
f"After AnnotationModule.xmlMask NO tissue remains detectable! Downstream modules likely to be incorrect/fail")
return
def geoJSONMask(s, params):
logging.info(f"{s['filename']} - \tgeoJSONMask")
mask = s["img_mask_use"]
geojson_basepath = params.get("geojson_filepath",None)
geojson_suffix = params.get("geojson_suffix", "")
if not geojson_basepath:
geojson_basepath = s["dir"]
fname = geojson_basepath + os.sep + PurePosixPath(s['filename']).stem + geojson_suffix + '.json'
if not Path(fname).is_file():
msg = f"Annotation file {fname} does not exist. Skipping."
logging.warning(f"{s['filename']} - {msg}")
s["warnings"].append(msg)
return
logging.info(f"{s['filename']} - \tusing {fname}")
point_sets = get_points_from_geojson(s, fname)
annotationMask = mask_out_annotation(s, point_sets) > 0
io.imsave(s["outdir"] + os.sep + s["filename"] + "_geoJSONMask.png", img_as_ubyte(annotationMask))
prev_mask = s["img_mask_use"]
s["img_mask_use"] = prev_mask & annotationMask
s.addToPrintList("geoJSONMask",
printMaskHelper(params.get("mask_statistics", s["mask_statistics"]), prev_mask, s["img_mask_use"]))
if len(s["img_mask_use"].nonzero()[0]) == 0: # add warning in case the final tissue is empty
logging.warning(
f"{s['filename']} - After AnnotationModule.geoJSONMask NO tissue remains detectable! Downstream modules likely to be incorrect/fail")
s["warnings"].append(
f"After AnnotationModule.geoJSONMask NO tissue remains detectable! Downstream modules likely to be incorrect/fail")
return | 41.511905 | 213 | 0.636937 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.