hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
898072de5f9e47e21b41b92c73b1d8e7e7c47ece
| 22,046
|
py
|
Python
|
homeassistant/components/mqtt/mixins.py
|
CantankerousBullMoose/core
|
2178e27fb4c62271d4872e16838331defed82226
|
[
"Apache-2.0"
] | 1
|
2021-03-12T20:46:40.000Z
|
2021-03-12T20:46:40.000Z
|
homeassistant/components/mqtt/mixins.py
|
CantankerousBullMoose/core
|
2178e27fb4c62271d4872e16838331defed82226
|
[
"Apache-2.0"
] | 46
|
2020-12-18T07:15:15.000Z
|
2022-03-31T06:04:00.000Z
|
homeassistant/components/mqtt/mixins.py
|
CantankerousBullMoose/core
|
2178e27fb4c62271d4872e16838331defed82226
|
[
"Apache-2.0"
] | 2
|
2021-03-22T21:42:48.000Z
|
2021-04-12T12:26:39.000Z
|
"""MQTT component mixins and helpers."""
from abc import abstractmethod
import json
import logging
from typing import Optional
import voluptuous as vol
from homeassistant.const import CONF_DEVICE, CONF_ICON, CONF_NAME, CONF_UNIQUE_ID
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import ConfigType
from . import CONF_TOPIC, DATA_MQTT, debug_info, publish, subscription
from .const import (
ATTR_DISCOVERY_HASH,
ATTR_DISCOVERY_PAYLOAD,
ATTR_DISCOVERY_TOPIC,
CONF_QOS,
DEFAULT_PAYLOAD_AVAILABLE,
DEFAULT_PAYLOAD_NOT_AVAILABLE,
DOMAIN,
MQTT_CONNECTED,
MQTT_DISCONNECTED,
)
from .debug_info import log_messages
from .discovery import (
MQTT_DISCOVERY_DONE,
MQTT_DISCOVERY_NEW,
MQTT_DISCOVERY_UPDATED,
clear_discovery_hash,
set_discovery_hash,
)
from .models import Message
from .subscription import async_subscribe_topics, async_unsubscribe_topics
from .util import valid_subscribe_topic
_LOGGER = logging.getLogger(__name__)
AVAILABILITY_ALL = "all"
AVAILABILITY_ANY = "any"
AVAILABILITY_LATEST = "latest"
AVAILABILITY_MODES = [AVAILABILITY_ALL, AVAILABILITY_ANY, AVAILABILITY_LATEST]
CONF_AVAILABILITY = "availability"
CONF_AVAILABILITY_MODE = "availability_mode"
CONF_AVAILABILITY_TOPIC = "availability_topic"
CONF_PAYLOAD_AVAILABLE = "payload_available"
CONF_PAYLOAD_NOT_AVAILABLE = "payload_not_available"
CONF_JSON_ATTRS_TOPIC = "json_attributes_topic"
CONF_JSON_ATTRS_TEMPLATE = "json_attributes_template"
CONF_IDENTIFIERS = "identifiers"
CONF_CONNECTIONS = "connections"
CONF_MANUFACTURER = "manufacturer"
CONF_MODEL = "model"
CONF_SW_VERSION = "sw_version"
CONF_VIA_DEVICE = "via_device"
CONF_DEPRECATED_VIA_HUB = "via_hub"
MQTT_AVAILABILITY_SINGLE_SCHEMA = vol.Schema(
{
vol.Exclusive(CONF_AVAILABILITY_TOPIC, "availability"): valid_subscribe_topic,
vol.Optional(
CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE
): cv.string,
vol.Optional(
CONF_PAYLOAD_NOT_AVAILABLE, default=DEFAULT_PAYLOAD_NOT_AVAILABLE
): cv.string,
}
)
MQTT_AVAILABILITY_LIST_SCHEMA = vol.Schema(
{
vol.Optional(CONF_AVAILABILITY_MODE, default=AVAILABILITY_LATEST): vol.All(
cv.string, vol.In(AVAILABILITY_MODES)
),
vol.Exclusive(CONF_AVAILABILITY, "availability"): vol.All(
cv.ensure_list,
[
{
vol.Optional(CONF_TOPIC): valid_subscribe_topic,
vol.Optional(
CONF_PAYLOAD_AVAILABLE, default=DEFAULT_PAYLOAD_AVAILABLE
): cv.string,
vol.Optional(
CONF_PAYLOAD_NOT_AVAILABLE,
default=DEFAULT_PAYLOAD_NOT_AVAILABLE,
): cv.string,
}
],
),
}
)
MQTT_AVAILABILITY_SCHEMA = MQTT_AVAILABILITY_SINGLE_SCHEMA.extend(
MQTT_AVAILABILITY_LIST_SCHEMA.schema
)
def validate_device_has_at_least_one_identifier(value: ConfigType) -> ConfigType:
"""Validate that a device info entry has at least one identifying value."""
if value.get(CONF_IDENTIFIERS) or value.get(CONF_CONNECTIONS):
return value
raise vol.Invalid(
"Device must have at least one identifying value in "
"'identifiers' and/or 'connections'"
)
MQTT_ENTITY_DEVICE_INFO_SCHEMA = vol.All(
cv.deprecated(CONF_DEPRECATED_VIA_HUB, CONF_VIA_DEVICE),
vol.Schema(
{
vol.Optional(CONF_IDENTIFIERS, default=list): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_CONNECTIONS, default=list): vol.All(
cv.ensure_list, [vol.All(vol.Length(2), [cv.string])]
),
vol.Optional(CONF_MANUFACTURER): cv.string,
vol.Optional(CONF_MODEL): cv.string,
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_SW_VERSION): cv.string,
vol.Optional(CONF_VIA_DEVICE): cv.string,
}
),
validate_device_has_at_least_one_identifier,
)
MQTT_ENTITY_COMMON_SCHEMA = MQTT_AVAILABILITY_SCHEMA.extend(
{
vol.Optional(CONF_DEVICE): MQTT_ENTITY_DEVICE_INFO_SCHEMA,
vol.Optional(CONF_ICON): cv.icon,
vol.Optional(CONF_JSON_ATTRS_TOPIC): valid_subscribe_topic,
vol.Optional(CONF_JSON_ATTRS_TEMPLATE): cv.template,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
async def async_setup_entry_helper(hass, domain, async_setup, schema):
"""Set up entity, automation or tag creation dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add an MQTT entity, automation or tag."""
discovery_data = discovery_payload.discovery_data
try:
config = schema(discovery_payload)
await async_setup(config, discovery_data=discovery_data)
except Exception:
discovery_hash = discovery_data[ATTR_DISCOVERY_HASH]
clear_discovery_hash(hass, discovery_hash)
async_dispatcher_send(
hass, MQTT_DISCOVERY_DONE.format(discovery_hash), None
)
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(domain, "mqtt"), async_discover
)
class MqttAttributes(Entity):
"""Mixin used for platforms that support JSON attributes."""
def __init__(self, config: dict) -> None:
"""Initialize the JSON attributes mixin."""
self._attributes = None
self._attributes_sub_state = None
self._attributes_config = config
async def async_added_to_hass(self) -> None:
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._attributes_subscribe_topics()
async def attributes_discovery_update(self, config: dict):
"""Handle updated discovery message."""
self._attributes_config = config
await self._attributes_subscribe_topics()
async def _attributes_subscribe_topics(self):
"""(Re)Subscribe to topics."""
attr_tpl = self._attributes_config.get(CONF_JSON_ATTRS_TEMPLATE)
if attr_tpl is not None:
attr_tpl.hass = self.hass
@callback
@log_messages(self.hass, self.entity_id)
def attributes_message_received(msg: Message) -> None:
try:
payload = msg.payload
if attr_tpl is not None:
payload = attr_tpl.async_render_with_possible_json_value(payload)
json_dict = json.loads(payload)
if isinstance(json_dict, dict):
self._attributes = json_dict
self.async_write_ha_state()
else:
_LOGGER.warning("JSON result was not a dictionary")
self._attributes = None
except ValueError:
_LOGGER.warning("Erroneous JSON: %s", payload)
self._attributes = None
self._attributes_sub_state = await async_subscribe_topics(
self.hass,
self._attributes_sub_state,
{
CONF_JSON_ATTRS_TOPIC: {
"topic": self._attributes_config.get(CONF_JSON_ATTRS_TOPIC),
"msg_callback": attributes_message_received,
"qos": self._attributes_config.get(CONF_QOS),
}
},
)
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._attributes_sub_state = await async_unsubscribe_topics(
self.hass, self._attributes_sub_state
)
@property
def extra_state_attributes(self):
"""Return the state attributes."""
return self._attributes
class MqttAvailability(Entity):
"""Mixin used for platforms that report availability."""
def __init__(self, config: dict) -> None:
"""Initialize the availability mixin."""
self._availability_sub_state = None
self._available = {}
self._available_latest = False
self._availability_setup_from_config(config)
async def async_added_to_hass(self) -> None:
"""Subscribe MQTT events."""
await super().async_added_to_hass()
await self._availability_subscribe_topics()
self.async_on_remove(
async_dispatcher_connect(self.hass, MQTT_CONNECTED, self.async_mqtt_connect)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass, MQTT_DISCONNECTED, self.async_mqtt_connect
)
)
async def availability_discovery_update(self, config: dict):
"""Handle updated discovery message."""
self._availability_setup_from_config(config)
await self._availability_subscribe_topics()
def _availability_setup_from_config(self, config):
"""(Re)Setup."""
self._avail_topics = {}
if CONF_AVAILABILITY_TOPIC in config:
self._avail_topics[config[CONF_AVAILABILITY_TOPIC]] = {
CONF_PAYLOAD_AVAILABLE: config[CONF_PAYLOAD_AVAILABLE],
CONF_PAYLOAD_NOT_AVAILABLE: config[CONF_PAYLOAD_NOT_AVAILABLE],
}
if CONF_AVAILABILITY in config:
for avail in config[CONF_AVAILABILITY]:
self._avail_topics[avail[CONF_TOPIC]] = {
CONF_PAYLOAD_AVAILABLE: avail[CONF_PAYLOAD_AVAILABLE],
CONF_PAYLOAD_NOT_AVAILABLE: avail[CONF_PAYLOAD_NOT_AVAILABLE],
}
self._avail_config = config
async def _availability_subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def availability_message_received(msg: Message) -> None:
"""Handle a new received MQTT availability message."""
topic = msg.topic
if msg.payload == self._avail_topics[topic][CONF_PAYLOAD_AVAILABLE]:
self._available[topic] = True
self._available_latest = True
elif msg.payload == self._avail_topics[topic][CONF_PAYLOAD_NOT_AVAILABLE]:
self._available[topic] = False
self._available_latest = False
self.async_write_ha_state()
self._available = {topic: False for topic in self._avail_topics}
topics = {
f"availability_{topic}": {
"topic": topic,
"msg_callback": availability_message_received,
"qos": self._avail_config[CONF_QOS],
}
for topic in self._avail_topics
}
self._availability_sub_state = await async_subscribe_topics(
self.hass,
self._availability_sub_state,
topics,
)
@callback
def async_mqtt_connect(self):
"""Update state on connection/disconnection to MQTT broker."""
if not self.hass.is_stopping:
self.async_write_ha_state()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._availability_sub_state = await async_unsubscribe_topics(
self.hass, self._availability_sub_state
)
@property
def available(self) -> bool:
"""Return if the device is available."""
if not self.hass.data[DATA_MQTT].connected and not self.hass.is_stopping:
return False
if not self._avail_topics:
return True
if self._avail_config[CONF_AVAILABILITY_MODE] == AVAILABILITY_ALL:
return all(self._available.values())
if self._avail_config[CONF_AVAILABILITY_MODE] == AVAILABILITY_ANY:
return any(self._available.values())
return self._available_latest
async def cleanup_device_registry(hass, device_id):
"""Remove device registry entry if there are no remaining entities or triggers."""
# Local import to avoid circular dependencies
# pylint: disable=import-outside-toplevel
from . import device_trigger, tag
device_registry = await hass.helpers.device_registry.async_get_registry()
entity_registry = await hass.helpers.entity_registry.async_get_registry()
if (
device_id
and not hass.helpers.entity_registry.async_entries_for_device(
entity_registry, device_id, include_disabled_entities=True
)
and not await device_trigger.async_get_triggers(hass, device_id)
and not tag.async_has_tags(hass, device_id)
):
device_registry.async_remove_device(device_id)
class MqttDiscoveryUpdate(Entity):
"""Mixin used to handle updated discovery message."""
def __init__(self, discovery_data, discovery_update=None) -> None:
"""Initialize the discovery update mixin."""
self._discovery_data = discovery_data
self._discovery_update = discovery_update
self._remove_signal = None
self._removed_from_hass = False
async def async_added_to_hass(self) -> None:
"""Subscribe to discovery updates."""
await super().async_added_to_hass()
self._removed_from_hass = False
discovery_hash = (
self._discovery_data[ATTR_DISCOVERY_HASH] if self._discovery_data else None
)
async def _async_remove_state_and_registry_entry(self) -> None:
"""Remove entity's state and entity registry entry.
Remove entity from entity registry if it is registered, this also removes the state.
If the entity is not in the entity registry, just remove the state.
"""
entity_registry = (
await self.hass.helpers.entity_registry.async_get_registry()
)
if entity_registry.async_is_registered(self.entity_id):
entity_entry = entity_registry.async_get(self.entity_id)
entity_registry.async_remove(self.entity_id)
await cleanup_device_registry(self.hass, entity_entry.device_id)
else:
await self.async_remove(force_remove=True)
async def discovery_callback(payload):
"""Handle discovery update."""
_LOGGER.info(
"Got update for entity with hash: %s '%s'",
discovery_hash,
payload,
)
old_payload = self._discovery_data[ATTR_DISCOVERY_PAYLOAD]
debug_info.update_entity_discovery_data(self.hass, payload, self.entity_id)
if not payload:
# Empty payload: Remove component
_LOGGER.info("Removing component: %s", self.entity_id)
self._cleanup_discovery_on_remove()
await _async_remove_state_and_registry_entry(self)
elif self._discovery_update:
if old_payload != self._discovery_data[ATTR_DISCOVERY_PAYLOAD]:
# Non-empty, changed payload: Notify component
_LOGGER.info("Updating component: %s", self.entity_id)
await self._discovery_update(payload)
else:
# Non-empty, unchanged payload: Ignore to avoid changing states
_LOGGER.info("Ignoring unchanged update for: %s", self.entity_id)
async_dispatcher_send(
self.hass, MQTT_DISCOVERY_DONE.format(discovery_hash), None
)
if discovery_hash:
debug_info.add_entity_discovery_data(
self.hass, self._discovery_data, self.entity_id
)
# Set in case the entity has been removed and is re-added, for example when changing entity_id
set_discovery_hash(self.hass, discovery_hash)
self._remove_signal = async_dispatcher_connect(
self.hass,
MQTT_DISCOVERY_UPDATED.format(discovery_hash),
discovery_callback,
)
async_dispatcher_send(
self.hass, MQTT_DISCOVERY_DONE.format(discovery_hash), None
)
async def async_removed_from_registry(self) -> None:
"""Clear retained discovery topic in broker."""
if not self._removed_from_hass:
discovery_topic = self._discovery_data[ATTR_DISCOVERY_TOPIC]
publish(self.hass, discovery_topic, "", retain=True)
@callback
def add_to_platform_abort(self) -> None:
"""Abort adding an entity to a platform."""
if self._discovery_data:
discovery_hash = self._discovery_data[ATTR_DISCOVERY_HASH]
clear_discovery_hash(self.hass, discovery_hash)
async_dispatcher_send(
self.hass, MQTT_DISCOVERY_DONE.format(discovery_hash), None
)
super().add_to_platform_abort()
async def async_will_remove_from_hass(self) -> None:
"""Stop listening to signal and cleanup discovery data.."""
self._cleanup_discovery_on_remove()
def _cleanup_discovery_on_remove(self) -> None:
"""Stop listening to signal and cleanup discovery data."""
if self._discovery_data and not self._removed_from_hass:
debug_info.remove_entity_data(self.hass, self.entity_id)
clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH])
self._removed_from_hass = True
if self._remove_signal:
self._remove_signal()
self._remove_signal = None
def device_info_from_config(config):
"""Return a device description for device registry."""
if not config:
return None
info = {
"identifiers": {(DOMAIN, id_) for id_ in config[CONF_IDENTIFIERS]},
"connections": {tuple(x) for x in config[CONF_CONNECTIONS]},
}
if CONF_MANUFACTURER in config:
info["manufacturer"] = config[CONF_MANUFACTURER]
if CONF_MODEL in config:
info["model"] = config[CONF_MODEL]
if CONF_NAME in config:
info["name"] = config[CONF_NAME]
if CONF_SW_VERSION in config:
info["sw_version"] = config[CONF_SW_VERSION]
if CONF_VIA_DEVICE in config:
info["via_device"] = (DOMAIN, config[CONF_VIA_DEVICE])
return info
class MqttEntityDeviceInfo(Entity):
"""Mixin used for mqtt platforms that support the device registry."""
def __init__(self, device_config: Optional[ConfigType], config_entry=None) -> None:
"""Initialize the device mixin."""
self._device_config = device_config
self._config_entry = config_entry
async def device_info_discovery_update(self, config: dict):
"""Handle updated discovery message."""
self._device_config = config.get(CONF_DEVICE)
device_registry = await self.hass.helpers.device_registry.async_get_registry()
config_entry_id = self._config_entry.entry_id
device_info = self.device_info
if config_entry_id is not None and device_info is not None:
device_info["config_entry_id"] = config_entry_id
device_registry.async_get_or_create(**device_info)
@property
def device_info(self):
"""Return a device description for device registry."""
return device_info_from_config(self._device_config)
class MqttEntity(
MqttAttributes,
MqttAvailability,
MqttDiscoveryUpdate,
MqttEntityDeviceInfo,
):
"""Representation of an MQTT entity."""
def __init__(self, hass, config, config_entry, discovery_data):
"""Init the MQTT Entity."""
self.hass = hass
self._config = config
self._unique_id = config.get(CONF_UNIQUE_ID)
self._sub_state = None
# Load config
self._setup_from_config(self._config)
# Initialize mixin classes
MqttAttributes.__init__(self, config)
MqttAvailability.__init__(self, config)
MqttDiscoveryUpdate.__init__(self, discovery_data, self.discovery_update)
MqttEntityDeviceInfo.__init__(self, config.get(CONF_DEVICE), config_entry)
async def async_added_to_hass(self):
"""Subscribe mqtt events."""
await super().async_added_to_hass()
await self._subscribe_topics()
async def discovery_update(self, discovery_payload):
"""Handle updated discovery message."""
config = self.config_schema()(discovery_payload)
self._config = config
self._setup_from_config(self._config)
await self.attributes_discovery_update(config)
await self.availability_discovery_update(config)
await self.device_info_discovery_update(config)
await self._subscribe_topics()
self.async_write_ha_state()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._sub_state = await subscription.async_unsubscribe_topics(
self.hass, self._sub_state
)
await MqttAttributes.async_will_remove_from_hass(self)
await MqttAvailability.async_will_remove_from_hass(self)
await MqttDiscoveryUpdate.async_will_remove_from_hass(self)
@staticmethod
@abstractmethod
def config_schema():
"""Return the config schema."""
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
@abstractmethod
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@property
def icon(self):
"""Return icon of the entity if any."""
return self._config.get(CONF_ICON)
@property
def name(self):
"""Return the name of the device if any."""
return self._config.get(CONF_NAME)
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
| 36.621262
| 106
| 0.658623
|
613bf7fe2495ca54b58358a5a4925941fdad71d2
| 65
|
py
|
Python
|
LeitorPython.py
|
amrals/Python
|
711194814d9a4862abb18dd60814870555085d5d
|
[
"MIT"
] | null | null | null |
LeitorPython.py
|
amrals/Python
|
711194814d9a4862abb18dd60814870555085d5d
|
[
"MIT"
] | null | null | null |
LeitorPython.py
|
amrals/Python
|
711194814d9a4862abb18dd60814870555085d5d
|
[
"MIT"
] | null | null | null |
arq2 = open("C:\Matheus\Arquivos/oi.txt","r")
print(arq2.read())
| 21.666667
| 45
| 0.661538
|
d872011661142c20ba20eced2cb7c4c0d63041ef
| 235
|
py
|
Python
|
models/news_model.py
|
ismdeep/jxust-news-monitor
|
a5dc6ba689a21e85e9e44847dbbd4d6ed02a3d34
|
[
"MIT"
] | null | null | null |
models/news_model.py
|
ismdeep/jxust-news-monitor
|
a5dc6ba689a21e85e9e44847dbbd4d6ed02a3d34
|
[
"MIT"
] | null | null | null |
models/news_model.py
|
ismdeep/jxust-news-monitor
|
a5dc6ba689a21e85e9e44847dbbd4d6ed02a3d34
|
[
"MIT"
] | null | null | null |
class NewsModel:
title = None
url = None
def __init__(self, __title__, __url__):
self.title = __title__
self.url = __url__
def __str__(self):
return '''["%s", "%s"]''' % (self.title, self.url)
| 21.363636
| 58
| 0.557447
|
4adb038433ffab1ba1473d0365e278bbd5a2edff
| 1,891
|
py
|
Python
|
runners/mlcommons_box_ssh/mlcommons_box_ssh/__main__.py
|
vibhatha/mlbox
|
f76aa0043175988109d020be42dad5f32003190f
|
[
"Apache-2.0"
] | 3
|
2021-02-06T06:47:32.000Z
|
2021-08-11T10:05:29.000Z
|
runners/mlcommons_box_ssh/mlcommons_box_ssh/__main__.py
|
vibhatha/mlbox
|
f76aa0043175988109d020be42dad5f32003190f
|
[
"Apache-2.0"
] | null | null | null |
runners/mlcommons_box_ssh/mlcommons_box_ssh/__main__.py
|
vibhatha/mlbox
|
f76aa0043175988109d020be42dad5f32003190f
|
[
"Apache-2.0"
] | null | null | null |
import os
import click
from mlcommons_box import parse # Do not remove (it registers schemas on import)
from mlcommons_box.common import mlbox_metadata
from mlcommons_box_ssh import ssh_metadata
from mlcommons_box_ssh.ssh_run import SSHRun
@click.group(name='mlcommons_box_ssh')
def cli():
"""
MLCommons-Box SSH Runner runs boxes (packaged Machine Learning (ML) workloads) in the remote environment.
"""
pass
@cli.command(name='configure', help='Configure remote environment for MLCommons-Box ML workload.')
@click.option('--mlbox', required=True, type=click.Path(exists=True), help='Path to MLBox directory.')
@click.option('--platform', required=True, type=click.Path(exists=True), help='Path to MLBox Platform definition file.')
def configure(mlbox: str, platform: str):
mlbox: mlbox_metadata.MLBox = mlbox_metadata.MLBox(path=mlbox)
mlbox.platform = ssh_metadata.Platform(path=platform, mlbox=mlbox)
print(mlbox)
runner = SSHRun(mlbox)
runner.configure()
@cli.command(name='run', help='Run MLCommons-Box ML workload in the remote environment.')
@click.option('--mlbox', required=True, type=click.Path(exists=True), help='Path to MLBox directory.')
@click.option('--platform', required=True, type=click.Path(exists=True), help='Path to MLBox Platform definition file.')
@click.option('--task', required=True, type=click.Path(exists=True), help='Path to MLBox Task definition file.')
def run(mlbox: str, platform: str, task: str):
mlbox: mlbox_metadata.MLBox = mlbox_metadata.MLBox(path=mlbox)
mlbox.platform = ssh_metadata.Platform(path=platform, mlbox=mlbox)
mlbox.invoke = mlbox_metadata.MLBoxInvoke(task)
mlbox.task = mlbox_metadata.MLBoxTask(os.path.join(mlbox.tasks_path, f'{mlbox.invoke.task_name}.yaml'))
print(mlbox)
runner = SSHRun(mlbox)
runner.run(task_file=task)
if __name__ == '__main__':
cli()
| 41.108696
| 120
| 0.742993
|
c32dddbd6b2fcd643516ea430a4b02656a2ed988
| 9,897
|
py
|
Python
|
ndm/model_cnn12_mp_bn_att_a_w2t.py
|
oplatek/ndm
|
d32bd9d685902d9da52b7e7abd286fb5d9c7274a
|
[
"Apache-2.0"
] | 2
|
2016-01-26T15:29:30.000Z
|
2016-12-07T23:36:23.000Z
|
ndm/model_cnn12_mp_bn_att_a_w2t.py
|
oplatek/ndm
|
d32bd9d685902d9da52b7e7abd286fb5d9c7274a
|
[
"Apache-2.0"
] | 1
|
2018-05-10T08:03:32.000Z
|
2018-05-10T08:03:32.000Z
|
ndm/model_cnn12_mp_bn_att_a_w2t.py
|
oplatek/ndm
|
d32bd9d685902d9da52b7e7abd286fb5d9c7274a
|
[
"Apache-2.0"
] | 1
|
2016-02-25T14:35:47.000Z
|
2016-02-25T14:35:47.000Z
|
#!/usr/bin/env python3
import sys
import tensorflow as tf
from model import ModelW2T
from tfx.bricks import embedding, dense_to_one_hot, linear, conv2d, multicolumn_embedding, \
glorot_mul, reduce_max, dropout, conv2d_bn, batch_norm_lin, pow_1, max_pool
class Model(ModelW2T):
def __init__(self, data, FLAGS):
super(Model, self).__init__(data, FLAGS)
database_column_embedding_size = 8
n_database_columns = len(data.database_columns)
conv_mul = 2
histories_embedding_size = 16
histories_vocabulary_length = len(data.idx2word_history)
histories_utterance_length = data.train_set['histories'].shape[2]
history_length = data.train_set['histories'].shape[1]
histories_arguments_embedding_size = 8
histories_arguments_vocabulary_length = len(data.idx2word_history_arguments)
n_histories_arguments = data.train_set['histories_arguments'].shape[1]
action_templates_vocabulary_length = len(data.idx2word_action_template)
with tf.name_scope('data'):
database = tf.Variable(data.database, name='database', trainable=False)
batch_histories = tf.Variable(data.batch_histories, name='histories', trainable=False)
batch_histories_arguments = tf.Variable(data.batch_histories_arguments, name='histories_arguments', trainable=False)
batch_actions_template = tf.Variable(data.batch_actions_template, name='actions',
trainable=False)
histories = tf.gather(batch_histories, self.batch_idx)
histories_arguments = tf.gather(batch_histories_arguments, self.batch_idx)
actions_template = tf.gather(batch_actions_template, self.batch_idx)
# inference model
with tf.name_scope('model'):
with tf.variable_scope("batch_size"):
batch_size = tf.shape(histories)[0]
database_embedding = multicolumn_embedding(
columns=database,
lengths=[len(i2w) for i2w in [data.database_idx2word[column] for column in data.database_columns]],
sizes=[database_column_embedding_size for column in data.database_columns],
# all columns have the same size
name='database_embedding'
)
histories_embedding = embedding(
input=histories,
length=histories_vocabulary_length,
size=histories_embedding_size,
name='histories_embedding'
)
histories_arguments_embedding = embedding(
input=histories_arguments,
length=histories_arguments_vocabulary_length,
size=histories_arguments_embedding_size,
name='histories_arguments_embedding'
)
with tf.name_scope("UtterancesEncoder"):
conv3 = histories_embedding
# conv3 = dropout(conv3, pow_1(self.dropout_keep_prob, 2))
conv3 = conv2d_bn(
input=conv3,
filter=[1, 3, conv3.size, conv3.size * conv_mul],
phase_train=self.phase_train,
name='conv_utt_size_3_layer_1'
)
encoded_utterances = reduce_max(conv3, [2], keep_dims=True, name='encoded_utterances')
with tf.name_scope("HistoryEncoder"):
conv3 = encoded_utterances
conv3 = dropout(conv3, pow_1(self.dropout_keep_prob, 2))
conv3 = conv2d_bn(
input=conv3,
filter=[3, 1, conv3.size, conv3.size * conv_mul],
phase_train=self.phase_train,
name='conv_hist_size_3_layer_1'
)
conv3 = max_pool(conv3, ksize=[1, 2, 1, 1], strides=[1, 2, 1, 1])
conv3 = dropout(conv3, pow_1(self.dropout_keep_prob, 2))
conv3 = conv2d_bn(
input=conv3,
filter=[3, 1, conv3.size, conv3.size * conv_mul],
phase_train=self.phase_train,
name='conv_hist_size_3_layer_2'
)
encoded_history = reduce_max(conv3, [1, 2], name='encoded_history')
# print(encoded_history)
with tf.name_scope("DatabaseAttention"):
histories_arguments_embedding = tf.reshape(
histories_arguments_embedding,
[-1, n_histories_arguments * histories_arguments_embedding_size],
name='histories_arguments_embedding'
)
# print(histories_arguments_embedding)
history_predicate = tf.concat(
1,
[encoded_history, histories_arguments_embedding],
name='history_predicate'
)
print(history_predicate)
att_W_nx = conv3.size + n_histories_arguments * histories_arguments_embedding_size
att_W_ny = n_database_columns * database_column_embedding_size
att_W = tf.get_variable(
name='attention_W',
shape=[att_W_nx, att_W_ny],
initializer=tf.random_uniform_initializer(
-glorot_mul(att_W_nx, att_W_ny),
glorot_mul(att_W_nx, att_W_ny)
),
)
hp_x_att_W = tf.matmul(history_predicate, att_W)
attention_scores = tf.matmul(hp_x_att_W, database_embedding, transpose_b=True)
attention = tf.nn.softmax(attention_scores, name="attention_softmax")
print(attention)
attention_max = tf.reduce_max(attention, reduction_indices=1, keep_dims=True)
attention_min = tf.reduce_min(attention, reduction_indices=1, keep_dims=True)
attention_mean = tf.reduce_mean(attention_scores, reduction_indices=1, keep_dims=True)
attention_feat = tf.concat(1, [attention_max, attention_mean, attention_min], name='attention_feat')
attention_feat_size = 3
print(attention_feat)
db_result = tf.matmul(attention, database_embedding, name='db_result')
db_result_size = att_W_ny
print(db_result)
with tf.name_scope("Decoder"):
second_to_last_user_utterance = encoded_utterances[:, history_length - 3, 0, :]
last_system_utterance = encoded_utterances[:, history_length - 2, 0, :]
last_user_utterance = encoded_utterances[:, history_length - 1, 0, :]
dialogue_state = tf.concat(
1,
[
encoded_history,
last_user_utterance,
last_system_utterance,
second_to_last_user_utterance,
attention_feat,
db_result
],
name='dialogue_state'
)
dialogue_state_size = conv3.size + \
3 * histories_embedding_size * conv_mul + \
attention_feat_size + \
db_result_size
activation = tf.nn.relu(dialogue_state)
activation = dropout(activation, self.dropout_keep_prob)
projection = linear(
input=activation,
input_size=dialogue_state_size,
output_size=dialogue_state_size,
name='linear_projection_1'
)
projection = batch_norm_lin(projection, dialogue_state_size, self.phase_train, name='linear_projection_1_bn')
activation = tf.nn.relu(projection)
activation = dropout(activation, self.dropout_keep_prob)
projection = linear(
input=activation,
input_size=dialogue_state_size,
output_size=dialogue_state_size,
name='linear_projection_2'
)
projection = batch_norm_lin(projection, dialogue_state_size, self.phase_train, name='linear_projection_2_bn')
activation = tf.nn.relu(projection)
activation = dropout(activation, self.dropout_keep_prob)
projection = linear(
input=activation,
input_size=dialogue_state_size,
output_size=action_templates_vocabulary_length,
name='linear_projection_3'
)
self.predictions = tf.nn.softmax(projection, name="predictions")
# print(self.predictions)
if FLAGS.print_variables:
for v in tf.trainable_variables():
print(v.name)
with tf.name_scope('loss'):
one_hot_labels = dense_to_one_hot(actions_template, action_templates_vocabulary_length)
self.loss = tf.reduce_mean(- one_hot_labels * tf.log(tf.clip_by_value(self.predictions, 1e-10, 1.0)), name='loss')
tf.scalar_summary('loss', self.loss)
with tf.name_scope('accuracy'):
correct_prediction = tf.equal(tf.argmax(one_hot_labels, 1), tf.argmax(self.predictions, 1))
self.accuracy = tf.reduce_mean(tf.cast(correct_prediction, 'float'))
tf.scalar_summary('accuracy', self.accuracy)
| 47.128571
| 128
| 0.565929
|
9c345f3f0902c8558da5b28d6f3c02dfc8a73732
| 1,306
|
py
|
Python
|
py/g1/http/http2_servers/g1/http/http2_servers/__init__.py
|
clchiou/garage
|
446ff34f86cdbd114b09b643da44988cf5d027a3
|
[
"MIT"
] | 3
|
2016-01-04T06:28:52.000Z
|
2020-09-20T13:18:40.000Z
|
py/g1/http/http2_servers/g1/http/http2_servers/__init__.py
|
clchiou/garage
|
446ff34f86cdbd114b09b643da44988cf5d027a3
|
[
"MIT"
] | null | null | null |
py/g1/http/http2_servers/g1/http/http2_servers/__init__.py
|
clchiou/garage
|
446ff34f86cdbd114b09b643da44988cf5d027a3
|
[
"MIT"
] | null | null | null |
__all__ = [
'HttpServer',
]
import logging
import socket
import ssl
from . import nghttp2 as ng
from . import wsgi
logging.getLogger(__name__).addHandler(logging.NullHandler())
VERSION = '%s/nghttp2=%s' % (
__name__,
# pylint: disable=no-member
ng.F.nghttp2_version(0).contents.version_str.decode('utf-8'),
)
class HttpServer:
def __init__(self, server_socket, application):
address = server_socket.getsockname()
is_ssl = isinstance(server_socket.target, ssl.SSLSocket)
self._environ = {
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'https' if is_ssl else 'http',
'wsgi.multithread': True,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
'SERVER_SOFTWARE': VERSION,
'SERVER_NAME': socket.getfqdn(address[0]),
'SERVER_PORT': address[1],
'SERVER_PROTOCOL': 'HTTP/2.0',
}
self._application = application
async def __call__(self, client_socket, address):
environ = self._environ.copy()
environ['REMOTE_ADDR'] = address[0]
environ['REMOTE_PORT'] = address[1]
session = wsgi.HttpSession(
client_socket, address, self._application, environ
)
return await session.serve()
| 27.787234
| 65
| 0.61562
|
3cc63e801d166f36909f35dc299de7ba4fac34ef
| 20,694
|
py
|
Python
|
luna/gateware/soc/simplesoc.py
|
TomKeddie/luna
|
6688f7ae2a1b506eb6498cc2ad15dddcef0cc06a
|
[
"BSD-3-Clause"
] | 2
|
2020-11-04T10:54:15.000Z
|
2022-03-17T20:38:21.000Z
|
luna/gateware/soc/simplesoc.py
|
hxkrrzq/luna
|
e56a3eef6a9fa138755512bec1252725601425c1
|
[
"BSD-3-Clause"
] | null | null | null |
luna/gateware/soc/simplesoc.py
|
hxkrrzq/luna
|
e56a3eef6a9fa138755512bec1252725601425c1
|
[
"BSD-3-Clause"
] | 2
|
2021-06-26T06:06:52.000Z
|
2022-01-19T22:36:19.000Z
|
#
# This file is part of LUNA.
#
# Copyright (c) 2020 Great Scott Gadgets <info@greatscottgadgets.com>
# SPDX-License-Identifier: BSD-3-Clause
""" Simple SoC abstraction for LUNA examples."""
import os
import datetime
import logging
from nmigen import Elaboratable, Module
from nmigen_soc import wishbone
from lambdasoc.soc.cpu import CPUSoC
from lambdasoc.cpu.minerva import MinervaCPU
from lambdasoc.periph.intc import GenericInterruptController
from lambdasoc.periph.serial import AsyncSerialPeripheral
from lambdasoc.periph.sram import SRAMPeripheral
from lambdasoc.periph.timer import TimerPeripheral
from .memory import WishboneRAM, WishboneROM
from ..utils.cdc import synchronize
class SimpleSoC(CPUSoC, Elaboratable):
""" Class used for building simple, example system-on-a-chip architectures.
Intended to facilitate demonstrations (and very simple USB devices) by providing
a wrapper that can be updated as the nMigen-based-SoC landscape changes. Hopefully,
this will eventually be filled by e.g. nMigen-compatible-LiteX. :)
SimpleSoC devices intergrate:
- A simple riscv32i processor.
- One or more read-only or read-write memories.
- A number of nmigen-soc peripherals.
The current implementation uses a single, 32-bit wide Wishbone bus
as the system's backend; and uses lambdasoc as its backing technology.
This is subject to change.
"""
BUS_ADDRESS_WIDTH = 30
def __init__(self, clock_frequency=int(60e6)):
"""
Parameters:
clock_frequency -- The frequency of our `sync` domain, in MHz.
"""
self.clk_freq = clock_frequency
self._main_rom = None
self._main_ram = None
self._uart_baud = None
# Keep track of our created peripherals and interrupts.
self._submodules = []
self._irqs = {}
self._next_irq_index = 0
# By default, don't attach any debug hardware; or build a BIOS.
self._auto_debug = False
self._build_bios = False
#
# Create our core hardware.
# We'll create this hardware early, so it can be used for e.g. code generation without
# fully elaborating our design.
#
# Create our CPU.
self.cpu = MinervaCPU(with_debug=False)
# Create our interrupt controller.
self.intc = GenericInterruptController(width=32)
# Create our bus decoder and set up our memory map.
self.bus_decoder = wishbone.Decoder(addr_width=30, data_width=32, granularity=8, features={"cti", "bte"})
self.memory_map = self.bus_decoder.bus.memory_map
def add_rom(self, data, size, addr=0, is_main_rom=True):
""" Creates a simple ROM and adds it to the design.
Parameters:
data -- The data to fill the relevant ROM.
size -- The size for the rom that should be created.
addr -- The address at which the ROM should reside.
"""
# Figure out how many address bits we'll need to address the given memory size.
addr_width = (size - 1).bit_length()
rom = WishboneROM(data, addr_width=addr_width)
if self._main_rom is None and is_main_rom:
self._main_rom = rom
return self.add_peripheral(rom, addr=addr)
def add_ram(self, size: int, addr: int = None, is_main_mem: bool = True):
""" Creates a simple RAM and adds it to our design.
Parameters:
size -- The size of the RAM, in bytes. Will be rounded up to the nearest power of two.
addr -- The address at which to place the RAM.
"""
# Figure out how many address bits we'll need to address the given memory size.
addr_width = (size - 1).bit_length()
# ... and add it as a peripheral.
ram = WishboneRAM(addr_width=addr_width)
if self._main_ram is None and is_main_mem:
self._main_ram = ram
return self.add_peripheral(ram, addr=addr)
def add_peripheral(self, p, *, as_submodule=True, **kwargs):
""" Adds a peripheral to the SoC.
For now, this is identical to adding a peripheral to the SoC's wishbone bus.
For convenience, returns the peripheral provided.
"""
# Add the peripheral to our bus...
interface = getattr(p, 'bus')
self.bus_decoder.add(interface, **kwargs)
# ... add its IRQs to the IRQ controller...
try:
irq_line = getattr(p, 'irq')
self.intc.add_irq(irq_line, self._next_irq_index)
self._irqs[self._next_irq_index] = p
self._next_irq_index += 1
except (AttributeError, NotImplementedError):
# If the object has no associated IRQs, continue anyway.
# This allows us to add devices with only Wishbone interfaces to our SoC.
pass
# ... and keep track of it for later.
if as_submodule:
self._submodules.append(p)
return p
def add_debug_port(self):
""" Adds an automatically-connected Debug port to our SoC. """
self._auto_debug = True
def add_bios_and_peripherals(self, uart_pins, uart_baud_rate=115200, fixed_addresses=False):
""" Adds a simple BIOS that allows loading firmware, and the requisite peripherals.
Automatically adds the following peripherals:
self.uart -- An AsyncSerialPeripheral used for serial I/O.
self.timer -- A TimerPeripheral used for BIOS timing.
self.rom -- A ROM memory used for the BIOS.
self.ram -- The RAM used by the BIOS; not typically the program RAM.
Parameters:
uart_pins -- The UARTResource to be used for UART communications; or an equivalent record.
uart_baud_rate -- The baud rate to be used by the BIOS' uart.
"""
self._build_bios = True
self._uart_baud = uart_baud_rate
# Add our RAM and ROM.
# Note that these names are from CPUSoC, and thus must not be changed.
#
# Here, we're using SRAMPeripherals instead of our more flexible ones,
# as that's what the lambdasoc BIOS expects. These are effectively internal.
#
addr = 0x0000_0000 if fixed_addresses else None
self.rom = SRAMPeripheral(size=0x4000, writable=False)
self.add_peripheral(self.rom, addr=addr)
addr = 0x0001_0000 if fixed_addresses else None
self.ram = SRAMPeripheral(size=0x1000)
self.add_peripheral(self.ram, addr=addr)
# Add our UART and Timer.
# Again, names are fixed.
addr = 0x0002_0000 if fixed_addresses else None
self.timer = TimerPeripheral(width=32)
self.add_peripheral(self.timer, addr=addr)
addr = 0x0003_0000 if fixed_addresses else None
self.uart = AsyncSerialPeripheral(divisor=int(self.clk_freq // uart_baud_rate), pins=uart_pins)
self.add_peripheral(self.uart, addr=addr)
def elaborate(self, platform):
m = Module()
# Add our core CPU, and create its main system bus.
# Note that our default implementation uses a single bus for code and data,
# so this is both the instruction bus (ibus) and data bus (dbus).
m.submodules.cpu = self.cpu
m.submodules.bus = self.bus_decoder
# Create a basic programmable interrupt controller for our CPU.
m.submodules.pic = self.intc
# Add each of our peripherals to the bus.
for peripheral in self._submodules:
m.submodules += peripheral
# Merge the CPU's data and instruction busses. This essentially means taking the two
# separate bus masters (the CPU ibus master and the CPU dbus master), and connecting them
# to an arbiter, so they both share use of the single bus.
# Create the arbiter around our main bus...
m.submodules.bus_arbiter = arbiter = wishbone.Arbiter(addr_width=30, data_width=32, granularity=8, features={"cti", "bte"})
m.d.comb += arbiter.bus.connect(self.bus_decoder.bus)
# ... and connect it to the CPU instruction and data busses.
arbiter.add(self.cpu.ibus)
arbiter.add(self.cpu.dbus)
# Connect up our CPU interrupt lines.
m.d.comb += self.cpu.ip.eq(self.intc.ip)
# If we're automatically creating a debug connection, do so.
if self._auto_debug:
m.d.comb += [
self.cpu._cpu.jtag.tck .eq(synchronize(m, platform.request("user_io", 0, dir="i").i)),
self.cpu._cpu.jtag.tms .eq(synchronize(m, platform.request("user_io", 1, dir="i").i)),
self.cpu._cpu.jtag.tdi .eq(synchronize(m, platform.request("user_io", 2, dir="i").i)),
platform.request("user_io", 3, dir="o").o .eq(self.cpu._cpu.jtag.tdo)
]
return m
def resources(self, omit_bios_mem=True):
""" Creates an iterator over each of the device's addressable resources.
Yields (resource, address, size) for each resource.
Parameters:
omit_bios_mem -- If True, BIOS-related memories are skipped when generating our
resource listings. This hides BIOS resources from the application.
"""
# Grab the memory map for this SoC...
memory_map = self.bus_decoder.bus.memory_map
# ... find each addressable peripheral...
for peripheral, (peripheral_start, _end, _granularity) in memory_map.windows():
resources = peripheral.all_resources()
# ... find the peripheral's resources...
for resource, (register_offset, register_end_offset, _local_granularity) in resources:
if self._build_bios and omit_bios_mem:
# If we're omitting bios resources, skip the BIOS ram/rom.
if (self.ram._mem is resource) or (self.rom._mem is resource):
continue
# ... and extract the peripheral's range/vitals...
size = register_end_offset - register_offset
yield resource, peripheral_start + register_offset, size
def build(self, name=None, build_dir="build"):
""" Builds any internal artifacts necessary to create our CPU.
This is usually used for e.g. building our BIOS.
Parmeters:
name -- The name for the SoC design.
build_dir -- The directory where our main nMigen build is being performed.
We'll build in a subdirectory of it.
"""
# If we're building a BIOS, let our superclass build a BIOS for us.
if self._build_bios:
logging.info("Building SoC BIOS...")
super().build(name=name, build_dir=os.path.join(build_dir, 'soc'), do_build=True, do_init=True)
logging.info("BIOS build complete. Continuing with SoC build.")
self.log_resources()
def _range_for_peripheral(self, target_peripheral):
""" Returns size information for the given peripheral.
Returns:
addr, size -- if the given size is known; or
None, None if not
"""
# Grab the memory map for this SoC...
memory_map = self.bus_decoder.bus.memory_map
# Search our memory map for the target peripheral.
for peripheral, (start, end, _granularity) in memory_map.all_resources():
if peripheral is target_peripheral:
return start, (end - start)
return None, None
def _emit_minerva_basics(self, emit):
""" Emits the standard Minerva RISC-V CSR functionality.
Parameters
----------
emit: callable(str)
The function used to print the code lines to the output stream.
"""
emit("#ifndef read_csr")
emit("#define read_csr(reg) ({ unsigned long __tmp; \\")
emit(" asm volatile (\"csrr %0, \" #reg : \"=r\"(__tmp)); \\")
emit(" __tmp; })")
emit("#endif")
emit("")
emit("#ifndef write_csr")
emit("#define write_csr(reg, val) ({ \\")
emit(" asm volatile (\"csrw \" #reg \", %0\" :: \"rK\"(val)); })")
emit("#endif")
emit("")
emit("#ifndef set_csr")
emit("#define set_csr(reg, bit) ({ unsigned long __tmp; \\")
emit(" asm volatile (\"csrrs %0, \" #reg \", %1\" : \"=r\"(__tmp) : \"rK\"(bit)); \\")
emit(" __tmp; })")
emit("#endif")
emit("")
emit("#ifndef clear_csr")
emit("#define clear_csr(reg, bit) ({ unsigned long __tmp; \\")
emit(" asm volatile (\"csrrc %0, \" #reg \", %1\" : \"=r\"(__tmp) : \"rK\"(bit)); \\")
emit(" __tmp; })")
emit("#endif")
emit("")
emit("#ifndef MSTATUS_MIE")
emit("#define MSTATUS_MIE 0x00000008")
emit("#endif")
emit("")
emit("//")
emit("// Minerva headers")
emit("//")
emit("")
emit("static inline uint32_t irq_getie(void)")
emit("{")
emit(" return (read_csr(mstatus) & MSTATUS_MIE) != 0;")
emit("}")
emit("")
emit("static inline void irq_setie(uint32_t ie)")
emit("{")
emit(" if (ie) {")
emit(" set_csr(mstatus, MSTATUS_MIE);")
emit(" } else {")
emit(" clear_csr(mstatus, MSTATUS_MIE);")
emit(" }")
emit("}")
emit("")
emit("static inline uint32_t irq_getmask(void)")
emit("{")
emit(" return read_csr(0x330);")
emit("}")
emit("")
emit("static inline void irq_setmask(uint32_t value)")
emit("{")
emit(" write_csr(0x330, value);")
emit("}")
emit("")
emit("static inline uint32_t pending_irqs(void)")
emit("{")
emit(" return read_csr(0x360);")
emit("}")
emit("")
def generate_c_header(self, macro_name="SOC_RESOURCES", file=None, platform_name="Generic Platform"):
""" Generates a C header file that simplifies access to the platform's resources.
Parameters:
macro_name -- Optional. The name of the guard macro for the C header, as a string without spaces.
file -- Optional. If provided, this will be treated as the file= argument to the print()
function. This can be used to generate file content instead of printing to the terminal.
"""
def emit(content):
""" Utility function that emits a string to the targeted file. """
print(content, file=file)
# Create a mapping that maps our register sizes to C types.
types_for_size = {
4: 'uint32_t',
2: 'uint16_t',
1: 'uint8_t'
}
# Emit a warning header.
emit("/*")
emit(" * Automatically generated by LUNA; edits will be discarded on rebuild.")
emit(" * (Most header files phrase this 'Do not edit.'; be warned accordingly.)")
emit(" *")
emit(f" * Generated: {datetime.datetime.now()}.")
emit(" */")
emit("\n")
emit(f"#ifndef __{macro_name}_H__")
emit(f"#define __{macro_name}_H__")
emit("")
emit("#include <stdint.h>\n")
emit("#include <stdbool.h>")
emit("")
emit("//")
emit("// Environment Information")
emit("//")
emit("")
emit(f"#define PLATFORM_NAME \"{platform_name}\"")
emit("")
# Emit our constant data for all Minerva CPUs.
self._emit_minerva_basics(emit)
emit("//")
emit("// Peripherals")
emit("//")
for resource, address, size in self.resources():
# Always generate a macro for the resource's ADDRESS and size.
name = resource.name
emit(f"#define {name.upper()}_ADDRESS (0x{address:08x}U)")
emit(f"#define {name.upper()}_SIZE ({size})")
# If we have information on how to access this resource, generate convenience
# macros for reading and writing it.
if hasattr(resource, 'access'):
c_type = types_for_size[size]
# Generate a read stub, if useful...
if resource.access.readable():
emit(f"static inline {c_type} {name}_read(void) {{")
emit(f" volatile {c_type} *reg = ({c_type} *){name.upper()}_ADDRESS;")
emit(f" return *reg;")
emit(f"}}")
# ... and a write stub.
if resource.access.writable():
emit(f"static inline void {name}_write({c_type} value) {{")
emit(f" volatile {c_type} *reg = ({c_type} *){name.upper()}_ADDRESS;")
emit(f" *reg = value;")
emit(f"}}")
emit("")
emit("//")
emit("// Interrupts")
emit("//")
for irq, peripheral in self._irqs.items():
# Function that determines if a given unit has an IRQ pending.
emit(f"static inline bool {peripheral.name}_interrupt_pending(void) {{")
emit(f" return pending_irqs() & (1 << {irq});")
emit(f"}}")
# IRQ masking
emit(f"static inline void {peripheral.name}_interrupt_enable(void) {{")
emit(f" irq_setmask(irq_getmask() | (1 << {irq}));")
emit(f"}}")
emit(f"static inline void {peripheral.name}_interrupt_disable(void) {{")
emit(f" irq_setmask(irq_getmask() & ~(1 << {irq}));")
emit(f"}}")
emit("#endif")
emit("")
def generate_ld_script(self, file=None):
""" Generates an ldscript that holds our primary RAM and ROM regions.
Parameters:
file -- Optional. If provided, this will be treated as the file= argument to the print()
function. This can be used to generate file content instead of printing to the terminal.
"""
def emit(content):
""" Utility function that emits a string to the targeted file. """
print(content, file=file)
# Insert our automatically generated header.
emit("/**")
emit(" * Linker memory regions.")
emit(" *")
emit(" * Automatically generated by LUNA; edits will be discarded on rebuild.")
emit(" * (Most header files phrase this 'Do not edit.'; be warned accordingly.)")
emit(" *")
emit(f" * Generated: {datetime.datetime.now()}.")
emit(" */")
emit("")
emit("MEMORY")
emit("{")
# Add regions for our main ROM and our main RAM.
for memory in [self._main_rom, self._main_ram]:
# Figure out our fields: a region name, our start, and our size.
name = "ram" if (memory is self._main_ram) else "rom"
start, size = self._range_for_peripheral(memory)
if size:
emit(f" {name} : ORIGIN = 0x{start:08x}, LENGTH = 0x{size:08x}")
emit("}")
emit("")
def log_resources(self):
""" Logs a summary of our resource utilization to our running logs. """
# Resource addresses:
logging.info("Physical address allocations:")
for peripheral, (start, end, _granularity) in self.memory_map.all_resources():
logging.info(f" {start:08x}-{end:08x}: {peripheral}")
logging.info("")
# IRQ numbers
logging.info("IRQ allocations:")
for irq, peripheral in self._irqs.items():
logging.info(f" {irq}: {peripheral.name}")
logging.info("")
# Main memory.
if self._build_bios:
memory_location = self.main_ram_address()
logging.info(f"Main memory at 0x{memory_location:08x}; upload using:")
logging.info(f" flterm --kernel <your_firmware> --kernel-addr 0x{memory_location:08x} --speed {self._uart_baud}")
logging.info("or")
logging.info(f" lxterm --kernel <your_firmware> --kernel-adr 0x{memory_location:08x} --speed {self._uart_baud}")
logging.info("")
def main_ram_address(self):
""" Returns the address of the main system RAM. """
start, _ = self._range_for_peripheral(self._main_ram)
return start
| 36.561837
| 131
| 0.585242
|
c1c661e766b1775fc3ccbb9da940a4850bc8fcc3
| 5,859
|
py
|
Python
|
keyring/backends/_OS_X_API.py
|
mathstuf/python-keyring
|
be19f913cf85e829b1b8f718e047535823acf2ff
|
[
"MIT"
] | 7
|
2018-01-19T05:34:48.000Z
|
2022-01-19T01:53:18.000Z
|
keyring/backends/_OS_X_API.py
|
mathstuf/python-keyring
|
be19f913cf85e829b1b8f718e047535823acf2ff
|
[
"MIT"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
Lib/site-packages/hackedit/vendor/keyring/backends/_OS_X_API.py
|
fochoao/cpython
|
3dc84b260e5bced65ebc2c45c40c8fa65f9b5aa9
|
[
"bzip2-1.0.6",
"0BSD"
] | 4
|
2018-01-19T05:34:30.000Z
|
2021-09-08T08:38:21.000Z
|
import contextlib
import ctypes
from ctypes import c_void_p, c_uint16, c_uint32, c_int32, c_char_p, POINTER
sec_keychain_ref = sec_keychain_item_ref = c_void_p
OS_status = c_int32
class error:
item_not_found = -25300
fw = '/System/Library/Frameworks/{name}.framework/Versions/A/{name}'.format
_sec = ctypes.CDLL(fw(name='Security'))
_core = ctypes.CDLL(fw(name='CoreServices'))
SecKeychainOpen = _sec.SecKeychainOpen
SecKeychainOpen.argtypes = (
c_char_p,
POINTER(sec_keychain_ref),
)
SecKeychainOpen.restype = OS_status
SecKeychainCopyDefault = _sec.SecKeychainCopyDefault
SecKeychainCopyDefault.argtypes = POINTER(sec_keychain_ref),
SecKeychainCopyDefault.restype = OS_status
class Error(Exception):
@classmethod
def raise_for_status(cls, status, msg):
if status == 0:
return
raise cls(status, msg)
class NotFound(Error):
@classmethod
def raise_for_status(cls, status, msg):
if status == error.item_not_found:
raise cls(status, msg)
Error.raise_for_status(status, msg)
@contextlib.contextmanager
def open(name):
ref = sec_keychain_ref()
if name is None:
status = SecKeychainCopyDefault(ref)
msg = "Unable to open default keychain"
else:
status = SecKeychainOpen(name.encode('utf-8'), ref)
msg = "Unable to open keychain {name}".format(**locals())
Error.raise_for_status(status, msg)
try:
yield ref
finally:
_core.CFRelease(ref)
SecKeychainFindGenericPassword = _sec.SecKeychainFindGenericPassword
SecKeychainFindGenericPassword.argtypes = (
sec_keychain_ref,
c_uint32,
c_char_p,
c_uint32,
c_char_p,
POINTER(c_uint32), # passwordLength
POINTER(c_void_p), # passwordData
POINTER(sec_keychain_item_ref), # itemRef
)
SecKeychainFindGenericPassword.restype = OS_status
def find_generic_password(kc_name, service, username):
username = username.encode('utf-8')
service = service.encode('utf-8')
with open(kc_name) as keychain:
length = c_uint32()
data = c_void_p()
status = SecKeychainFindGenericPassword(
keychain,
len(service),
service,
len(username),
username,
length,
data,
None,
)
msg = "Can't fetch password from system"
NotFound.raise_for_status(status, msg)
password = ctypes.create_string_buffer(length.value)
ctypes.memmove(password, data.value, length.value)
SecKeychainItemFreeContent(None, data)
return password.raw.decode('utf-8')
SecKeychainFindInternetPassword = _sec.SecKeychainFindInternetPassword
SecKeychainFindInternetPassword.argtypes = (
sec_keychain_ref, # keychainOrArray
c_uint32, # serverNameLength
c_char_p, # serverName
c_uint32, # securityDomainLength
c_char_p, # securityDomain
c_uint32, # accountNameLength
c_char_p, # accountName
c_uint32, # pathLength
c_char_p, # path
c_uint16, # port
c_void_p, # SecProtocolType protocol,
c_void_p, # SecAuthenticationType authenticationType,
POINTER(c_uint32), # passwordLength
POINTER(c_void_p), # passwordData
POINTER(sec_keychain_item_ref), # itemRef
)
SecKeychainFindInternetPassword.restype = OS_status
SecKeychainAddGenericPassword = _sec.SecKeychainAddGenericPassword
SecKeychainAddGenericPassword.argtypes = (
sec_keychain_ref,
c_uint32,
c_char_p,
c_uint32,
c_char_p,
c_uint32,
c_char_p,
POINTER(sec_keychain_item_ref),
)
SecKeychainAddGenericPassword.restype = OS_status
def set_generic_password(name, service, username, password):
username = username.encode('utf-8')
service = service.encode('utf-8')
password = password.encode('utf-8')
with open(name) as keychain:
item = sec_keychain_item_ref()
status = SecKeychainFindGenericPassword(
keychain,
len(service), service,
len(username), username, None,
None, item)
if status:
if status == error.item_not_found:
status = SecKeychainAddGenericPassword(
keychain,
len(service), service,
len(username), username,
len(password), password, None)
else:
status = SecKeychainItemModifyAttributesAndData(
item, None, len(password), password)
_core.CFRelease(item)
NotFound.raise_for_status(status, "Unable to set password")
SecKeychainItemModifyAttributesAndData = _sec.SecKeychainItemModifyAttributesAndData
SecKeychainItemModifyAttributesAndData.argtypes = (
sec_keychain_item_ref, c_void_p, c_uint32, c_void_p,
)
SecKeychainItemModifyAttributesAndData.restype = OS_status
SecKeychainItemFreeContent = _sec.SecKeychainItemFreeContent
SecKeychainItemFreeContent.argtypes = (
c_void_p, c_void_p,
)
SecKeychainItemFreeContent.restype = OS_status
SecKeychainItemDelete = _sec.SecKeychainItemDelete
SecKeychainItemDelete.argtypes = sec_keychain_item_ref,
SecKeychainItemDelete.restype = OS_status
def delete_generic_password(name, service, username):
username = username.encode('utf-8')
service = service.encode('utf-8')
with open(name) as keychain:
length = c_uint32()
data = c_void_p()
item = sec_keychain_item_ref()
status = SecKeychainFindGenericPassword(
keychain,
len(service),
service,
len(username),
username,
length,
data,
item,
)
Error.raise_for_status(status, "Unable to delete password")
SecKeychainItemDelete(item)
_core.CFRelease(item)
| 29.442211
| 84
| 0.676566
|
954fd9b37a1d4d6ceca6c17d863ba22404a38986
| 2,658
|
py
|
Python
|
week4/trainClassifier.py
|
linkian209/search_with_machine_learning_course
|
402b4686de85b86bde96d068ebdda3b766b14b12
|
[
"Apache-2.0"
] | null | null | null |
week4/trainClassifier.py
|
linkian209/search_with_machine_learning_course
|
402b4686de85b86bde96d068ebdda3b766b14b12
|
[
"Apache-2.0"
] | null | null | null |
week4/trainClassifier.py
|
linkian209/search_with_machine_learning_course
|
402b4686de85b86bde96d068ebdda3b766b14b12
|
[
"Apache-2.0"
] | null | null | null |
import fasttext
import itertools
import sys
import pandas as pd
from multiprocessing import Pool
output_dir = '/workspace/datasets'
base_model = 'query_data'
training_file_base = output_dir + '/labeled_query_data_mq_{}.train'
testing_file_base = output_dir + '/labeled_query_data_mq_{}.test'
output_file = output_dir + '/training_results.h5'
min_queries_list = [1, 100, 1000]
epochs = [5, 10, 15, 20, 25]
lrs = [.40, .35, .25, .15, .05]
wordNgrams = [1, 2]
tests = [1, 3, 5]
results = []
'''
# Single Threaded
for(min_queries, epoch, lr, wordNgram) in itertools.product(min_queries_list, epochs, lrs, wordNgrams):
print('-----------------------------------------')
print(f'Min Queries: {min_queries} Epochs: {epoch} Learning Rate: {lr} wordNgrams: {wordNgram}')
cur_result = {'Minimum Queries': min_queries, 'Epochs': epoch, 'Learning Rate': lr, 'wordNgrams': wordNgram}
loop = True
while(loop):
try:
model = fasttext.train_supervised(input=training_file_base.format(min_queries), lr=lr, epoch=epoch, wordNgrams=wordNgram, verbose=0)
loop = False
except:
print('Trying again...')
for test in tests:
(recs, precision, recall) = model.test(testing_file_base.format(min_queries), k=test)
cur_result[f'Recs@{test}'] = recs
cur_result[f'P@{test}'] = precision
cur_result[f'R@{test}'] = recall
results.append(cur_result)
print(pd.DataFrame([cur_result]))
'''
# Multi Threaded
def thread_task(args: tuple) -> dict:
(min_queries, epoch, lr, wordNgram) = args
print('-----------------------------------------')
print(f'Min Queries: {min_queries} Epochs: {epoch} Learning Rate: {lr} wordNgrams: {wordNgram}')
cur_result = {'Minimum Queries': min_queries, 'Epochs': epoch, 'Learning Rate': lr, 'wordNgrams': wordNgram}
loop = True
while(loop):
try:
model = fasttext.train_supervised(input=training_file_base.format(min_queries), lr=lr, epoch=epoch, wordNgrams=wordNgram, verbose=0)
loop = False
except:
print('Trying again...')
for test in tests:
(recs, precision, recall) = model.test(testing_file_base.format(min_queries), k=test)
cur_result[f'Recs@{test}'] = recs
cur_result[f'P@{test}'] = precision
cur_result[f'R@{test}'] = recall
return cur_result
combos = list(itertools.product(min_queries_list, epochs, lrs, wordNgrams))
with Pool(5) as p:
results = p.map(thread_task, combos)
print(f'Saving results to {output_file}')
store = pd.HDFStore(output_file)
store['df'] = pd.DataFrame(results)
store.close()
print('Done!')
| 36.916667
| 144
| 0.64936
|
96ecbbb699caccfa18335ee9fb0eb2acce344d46
| 1,307
|
py
|
Python
|
py_client/communication/response_processing.py
|
sma-software/openviriato.algorithm-platform.py-client
|
73d4cf89aa6f4d02ab15b5504d92107848742325
|
[
"Apache-2.0"
] | 2
|
2021-06-21T06:50:29.000Z
|
2021-06-30T15:58:02.000Z
|
py_client/communication/response_processing.py
|
sma-software/openviriato.algorithm-platform.py-client
|
73d4cf89aa6f4d02ab15b5504d92107848742325
|
[
"Apache-2.0"
] | null | null | null |
py_client/communication/response_processing.py
|
sma-software/openviriato.algorithm-platform.py-client
|
73d4cf89aa6f4d02ab15b5504d92107848742325
|
[
"Apache-2.0"
] | null | null | null |
import requests
def raise_if_unsuccessful_response_code(api_response: requests.Response) -> None:
try:
api_response.raise_for_status()
except requests.HTTPError:
if api_response.text != '':
algorithm_platform_error_information = api_response.json()
raise AlgorithmPlatformHTTPError(
algorithm_platform_error_information['statusCode'],
algorithm_platform_error_information['message']
)
else:
api_response.raise_for_status()
def extract_json_if_possible(api_response) -> (dict, list, None):
raise_if_unsuccessful_response_code(api_response)
return extract_json_or_none(api_response)
def extract_json_or_none(api_response) -> (dict, list, None):
if api_response.text != '':
return api_response.json()
else:
return None
class AlgorithmPlatformHTTPError(Exception):
def __init__(self, status_code: str, message: str):
self.status_code = 'HTTPError {0}'.format(status_code)
self.message = message
class AlgorithmPlatformConversionError(Exception):
def __init__(self, message: str, wrapped_exception: Exception):
self.wrapped_exception = wrapped_exception
self.message = message
| 31.878049
| 82
| 0.680184
|
de42762bcaff5d184eb80990b9729b98752814cc
| 4,175
|
py
|
Python
|
atomic_reactor/plugins/post_rpmqa.py
|
tveverka-RH/atomic-reactor
|
e3fca7dd435250cb06565f19c8758a908f7f3c62
|
[
"BSD-3-Clause"
] | 16
|
2019-07-04T16:00:13.000Z
|
2022-01-28T19:51:58.000Z
|
atomic_reactor/plugins/post_rpmqa.py
|
tveverka-RH/atomic-reactor
|
e3fca7dd435250cb06565f19c8758a908f7f3c62
|
[
"BSD-3-Clause"
] | 577
|
2019-06-03T07:53:16.000Z
|
2022-03-31T04:01:23.000Z
|
atomic_reactor/plugins/post_rpmqa.py
|
ben-alkov/atomic-reactor
|
be6b626b7d822f77999f245193fefcc00c501ca9
|
[
"BSD-3-Clause"
] | 17
|
2020-02-21T13:30:06.000Z
|
2022-02-15T10:42:32.000Z
|
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import os
import subprocess
import tempfile
import tarfile
from atomic_reactor.plugin import PostBuildPlugin
from atomic_reactor.utils.rpm import parse_rpm_output
from atomic_reactor.utils.rpm import rpm_qf_args
RPMDB_PATH = '/var/lib/rpm'
RPMDB_DIR_NAME = 'rpm'
__all__ = ('PostBuildRPMqaPlugin', )
class PostBuildRPMqaPlugin(PostBuildPlugin):
key = "all_rpm_packages"
is_allowed_to_fail = False
sep = ';'
def __init__(self, workflow, image_id, ignore_autogenerated_gpg_keys=True):
"""
constructor
:param workflow: DockerBuildWorkflow instance
"""
# call parent constructor
super(PostBuildRPMqaPlugin, self).__init__(workflow)
self.image_id = image_id
self.ignore_autogenerated_gpg_keys = ignore_autogenerated_gpg_keys
self._container_ids = []
def run(self):
# If another component has already filled in the image component list, skip
if self.workflow.image_components is not None:
return None
plugin_output = self.gather_output()
# OSBS2 TBD
# we won't need to clean container because we will use just oc image extract
# if self.workflow.dockerfile_images.base_from_scratch:
# if not plugin_output:
# self.tasker.cleanup_containers(*self._container_ids)
# return None
# gpg-pubkey are autogenerated packages by rpm when you import a gpg key
# these are of course not signed, let's ignore those by default
if self.ignore_autogenerated_gpg_keys:
self.log.debug("ignore rpms 'gpg-pubkey'")
plugin_output = [x for x in plugin_output if not x.startswith("gpg-pubkey" + self.sep)]
# self.tasker.cleanup_containers(*self._container_ids)
self.workflow.image_components = parse_rpm_output(plugin_output)
return plugin_output
def gather_output(self):
# OSBS2 TBD
# just use oc image extract to get image content
# container_dict = self.tasker.create_container(self.image_id, command=['/bin/bash'])
# container_id = container_dict['Id']
# self._container_ids.append(container_id)
# try:
# bits, _ = self.tasker.get_archive(container_id, RPMDB_PATH)
# except APIError as ex:
# self.log.info('Could not extract rpmdb in %s : %s', RPMDB_PATH, ex)
# if self.workflow.dockerfile_images.base_from_scratch:
# return None
# raise RuntimeError(ex) from ex
# except Exception as ex:
# self.log.info('Get archive failed while extracting rpmdb in %s : %s', RPMDB_PATH, ex)
# raise RuntimeError(ex) from ex
bits = []
with tempfile.NamedTemporaryFile() as rpmdb_archive:
for chunk in bits:
rpmdb_archive.write(chunk)
rpmdb_archive.flush()
tar_archive = tarfile.TarFile(rpmdb_archive.name)
with tempfile.TemporaryDirectory() as rpmdb_dir:
tar_archive.extractall(rpmdb_dir)
rpmdb_path = os.path.join(rpmdb_dir, RPMDB_DIR_NAME)
if not os.listdir(rpmdb_path):
self.log.info('rpmdb directory %s is empty', RPMDB_PATH)
if self.workflow.dockerfile_images.base_from_scratch:
return None
raise RuntimeError(f'rpmdb directory {RPMDB_PATH} is empty')
rpm_cmd = 'rpm --dbpath {} {}'.format(rpmdb_path, rpm_qf_args())
try:
self.log.info('getting rpms from rpmdb: %s', rpm_cmd)
rpm_output = subprocess.check_output(rpm_cmd,
shell=True, universal_newlines=True) # nosec
except Exception as e:
self.log.error("Failed to get rpms from rpmdb: %s", e)
raise e
rpm_output = [line for line in rpm_output.splitlines() if line]
return rpm_output
| 36.622807
| 99
| 0.64024
|
242506ea341b335247b363883619a0d2c6a43503
| 726
|
py
|
Python
|
interview/leet/897_Increasing_Order_Search_Tree.py
|
eroicaleo/LearningPython
|
297d46eddce6e43ce0c160d2660dff5f5d616800
|
[
"MIT"
] | 1
|
2020-10-12T13:33:29.000Z
|
2020-10-12T13:33:29.000Z
|
interview/leet/897_Increasing_Order_Search_Tree.py
|
eroicaleo/LearningPython
|
297d46eddce6e43ce0c160d2660dff5f5d616800
|
[
"MIT"
] | null | null | null |
interview/leet/897_Increasing_Order_Search_Tree.py
|
eroicaleo/LearningPython
|
297d46eddce6e43ce0c160d2660dff5f5d616800
|
[
"MIT"
] | 1
|
2016-11-09T07:28:45.000Z
|
2016-11-09T07:28:45.000Z
|
#!/usr/bin/env python3
from tree import *
class Solution:
def increasingBST(self, root):
stack, node = [], root
new_root = dummy = TreeNode(None)
while stack or node:
while node:
stack, node = stack+[node], node.left
node = stack.pop()
print(f'poping node {node.val}')
dummy.right = node
node = node.right
dummy = dummy.right
dummy.left = dummy.right = None
return new_root.right
node_string = '[2,1,3]'
node_string = '[5,3,6,2,4,null,8,1,null,null,null,7,9]'
root = treeBuilder(node_string)
traverse(root)
sol = Solution()
print(f'After converting:')
traverse(sol.increasingBST(root))
| 25.928571
| 55
| 0.582645
|
13ac6430fa7f5b9a69d70676db79b2bfeafbf8b5
| 145,404
|
py
|
Python
|
sympy/physics/continuum_mechanics/beam.py
|
SirAbhi13/sympy
|
5868aa1cc649f048cdbddd0082be67f2b65f0d95
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/physics/continuum_mechanics/beam.py
|
SirAbhi13/sympy
|
5868aa1cc649f048cdbddd0082be67f2b65f0d95
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/physics/continuum_mechanics/beam.py
|
SirAbhi13/sympy
|
5868aa1cc649f048cdbddd0082be67f2b65f0d95
|
[
"BSD-3-Clause"
] | null | null | null |
"""
This module can be used to solve 2D beam bending problems with
singularity functions in mechanics.
"""
from sympy.core import S, Symbol, diff, symbols
from sympy.solvers import linsolve
from sympy.printing import sstr
from sympy.functions import SingularityFunction, Piecewise, factorial
from sympy.core import sympify
from sympy.integrals import integrate
from sympy.series import limit
from sympy.plotting import plot, PlotGrid
from sympy.geometry.entity import GeometryEntity
from sympy.external import import_module
from sympy.core.add import Add
from sympy.utilities.lambdify import lambdify
from sympy.utilities.decorator import doctest_depends_on
from sympy.utilities.iterables import iterable
numpy = import_module('numpy', import_kwargs={'fromlist':['arange']})
class Beam:
"""
A Beam is a structural element that is capable of withstanding load
primarily by resisting against bending. Beams are characterized by
their cross sectional profile(Second moment of area), their length
and their material.
.. note::
A consistent sign convention must be used while solving a beam
bending problem; the results will
automatically follow the chosen sign convention. However, the
chosen sign convention must respect the rule that, on the positive
side of beam's axis (in respect to current section), a loading force
giving positive shear yields a negative moment, as below (the
curved arrow shows the positive moment and rotation):
.. image:: allowed-sign-conventions.png
Examples
========
There is a beam of length 4 meters. A constant distributed load of 6 N/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. The deflection of the beam at the end is restricted.
Using the sign convention of downwards forces being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols, Piecewise
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(4, E, I)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(6, 2, 0)
>>> b.apply_load(R2, 4, -1)
>>> b.bc_deflection = [(0, 0), (4, 0)]
>>> b.boundary_conditions
{'deflection': [(0, 0), (4, 0)], 'slope': []}
>>> b.load
R1*SingularityFunction(x, 0, -1) + R2*SingularityFunction(x, 4, -1) + 6*SingularityFunction(x, 2, 0)
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.load
-3*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 2, 0) - 9*SingularityFunction(x, 4, -1)
>>> b.shear_force()
3*SingularityFunction(x, 0, 0) - 6*SingularityFunction(x, 2, 1) + 9*SingularityFunction(x, 4, 0)
>>> b.bending_moment()
3*SingularityFunction(x, 0, 1) - 3*SingularityFunction(x, 2, 2) + 9*SingularityFunction(x, 4, 1)
>>> b.slope()
(-3*SingularityFunction(x, 0, 2)/2 + SingularityFunction(x, 2, 3) - 9*SingularityFunction(x, 4, 2)/2 + 7)/(E*I)
>>> b.deflection()
(7*x - SingularityFunction(x, 0, 3)/2 + SingularityFunction(x, 2, 4)/4 - 3*SingularityFunction(x, 4, 3)/2)/(E*I)
>>> b.deflection().rewrite(Piecewise)
(7*x - Piecewise((x**3, x > 0), (0, True))/2
- 3*Piecewise(((x - 4)**3, x > 4), (0, True))/2
+ Piecewise(((x - 2)**4, x > 2), (0, True))/4)/(E*I)
"""
def __init__(self, length, elastic_modulus, second_moment, area=Symbol('A'), variable=Symbol('x'), base_char='C'):
"""Initializes the class.
Parameters
==========
length : Sympifyable
A Symbol or value representing the Beam's length.
elastic_modulus : Sympifyable
A SymPy expression representing the Beam's Modulus of Elasticity.
It is a measure of the stiffness of the Beam material. It can
also be a continuous function of position along the beam.
second_moment : Sympifyable or Geometry object
Describes the cross-section of the beam via a SymPy expression
representing the Beam's second moment of area. It is a geometrical
property of an area which reflects how its points are distributed
with respect to its neutral axis. It can also be a continuous
function of position along the beam. Alternatively ``second_moment``
can be a shape object such as a ``Polygon`` from the geometry module
representing the shape of the cross-section of the beam. In such cases,
it is assumed that the x-axis of the shape object is aligned with the
bending axis of the beam. The second moment of area will be computed
from the shape object internally.
area : Symbol/float
Represents the cross-section area of beam
variable : Symbol, optional
A Symbol object that will be used as the variable along the beam
while representing the load, shear, moment, slope and deflection
curve. By default, it is set to ``Symbol('x')``.
base_char : String, optional
A String that will be used as base character to generate sequential
symbols for integration constants in cases where boundary conditions
are not sufficient to solve them.
"""
self.length = length
self.elastic_modulus = elastic_modulus
if isinstance(second_moment, GeometryEntity):
self.cross_section = second_moment
else:
self.cross_section = None
self.second_moment = second_moment
self.variable = variable
self._base_char = base_char
self._boundary_conditions = {'deflection': [], 'slope': []}
self._load = 0
self._area = area
self._applied_supports = []
self._support_as_loads = []
self._applied_loads = []
self._reaction_loads = {}
self._ild_reactions = {}
self._ild_shear = 0
self._ild_moment = 0
# _original_load is a copy of _load equations with unsubstituted reaction
# forces. It is used for calculating reaction forces in case of I.L.D.
self._original_load = 0
self._composite_type = None
self._hinge_position = None
def __str__(self):
shape_description = self._cross_section if self._cross_section else self._second_moment
str_sol = 'Beam({}, {}, {})'.format(sstr(self._length), sstr(self._elastic_modulus), sstr(shape_description))
return str_sol
@property
def reaction_loads(self):
""" Returns the reaction forces in a dictionary."""
return self._reaction_loads
@property
def ild_shear(self):
""" Returns the I.L.D. shear equation."""
return self._ild_shear
@property
def ild_reactions(self):
""" Returns the I.L.D. reaction forces in a dictionary."""
return self._ild_reactions
@property
def ild_moment(self):
""" Returns the I.L.D. moment equation."""
return self._ild_moment
@property
def length(self):
"""Length of the Beam."""
return self._length
@length.setter
def length(self, l):
self._length = sympify(l)
@property
def area(self):
"""Cross-sectional area of the Beam. """
return self._area
@area.setter
def area(self, a):
self._area = sympify(a)
@property
def variable(self):
"""
A symbol that can be used as a variable along the length of the beam
while representing load distribution, shear force curve, bending
moment, slope curve and the deflection curve. By default, it is set
to ``Symbol('x')``, but this property is mutable.
Examples
========
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I, A = symbols('E, I, A')
>>> x, y, z = symbols('x, y, z')
>>> b = Beam(4, E, I)
>>> b.variable
x
>>> b.variable = y
>>> b.variable
y
>>> b = Beam(4, E, I, A, z)
>>> b.variable
z
"""
return self._variable
@variable.setter
def variable(self, v):
if isinstance(v, Symbol):
self._variable = v
else:
raise TypeError("""The variable should be a Symbol object.""")
@property
def elastic_modulus(self):
"""Young's Modulus of the Beam. """
return self._elastic_modulus
@elastic_modulus.setter
def elastic_modulus(self, e):
self._elastic_modulus = sympify(e)
@property
def second_moment(self):
"""Second moment of area of the Beam. """
return self._second_moment
@second_moment.setter
def second_moment(self, i):
self._cross_section = None
if isinstance(i, GeometryEntity):
raise ValueError("To update cross-section geometry use `cross_section` attribute")
else:
self._second_moment = sympify(i)
@property
def cross_section(self):
"""Cross-section of the beam"""
return self._cross_section
@cross_section.setter
def cross_section(self, s):
if s:
self._second_moment = s.second_moment_of_area()[0]
self._cross_section = s
@property
def boundary_conditions(self):
"""
Returns a dictionary of boundary conditions applied on the beam.
The dictionary has three keywords namely moment, slope and deflection.
The value of each keyword is a list of tuple, where each tuple
contains location and value of a boundary condition in the format
(location, value).
Examples
========
There is a beam of length 4 meters. The bending moment at 0 should be 4
and at 4 it should be 0. The slope of the beam should be 1 at 0. The
deflection should be 2 at 0.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.bc_deflection = [(0, 2)]
>>> b.bc_slope = [(0, 1)]
>>> b.boundary_conditions
{'deflection': [(0, 2)], 'slope': [(0, 1)]}
Here the deflection of the beam should be ``2`` at ``0``.
Similarly, the slope of the beam should be ``1`` at ``0``.
"""
return self._boundary_conditions
@property
def bc_slope(self):
return self._boundary_conditions['slope']
@bc_slope.setter
def bc_slope(self, s_bcs):
self._boundary_conditions['slope'] = s_bcs
@property
def bc_deflection(self):
return self._boundary_conditions['deflection']
@bc_deflection.setter
def bc_deflection(self, d_bcs):
self._boundary_conditions['deflection'] = d_bcs
def join(self, beam, via="fixed"):
"""
This method joins two beams to make a new composite beam system.
Passed Beam class instance is attached to the right end of calling
object. This method can be used to form beams having Discontinuous
values of Elastic modulus or Second moment.
Parameters
==========
beam : Beam class object
The Beam object which would be connected to the right of calling
object.
via : String
States the way two Beam object would get connected
- For axially fixed Beams, via="fixed"
- For Beams connected via hinge, via="hinge"
Examples
========
There is a cantilever beam of length 4 meters. For first 2 meters
its moment of inertia is `1.5*I` and `I` for the other end.
A pointload of magnitude 4 N is applied from the top at its free end.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b1 = Beam(2, E, 1.5*I)
>>> b2 = Beam(2, E, I)
>>> b = b1.join(b2, "fixed")
>>> b.apply_load(20, 4, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 0, -2)
>>> b.bc_slope = [(0, 0)]
>>> b.bc_deflection = [(0, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.load
80*SingularityFunction(x, 0, -2) - 20*SingularityFunction(x, 0, -1) + 20*SingularityFunction(x, 4, -1)
>>> b.slope()
(-((-80*SingularityFunction(x, 0, 1) + 10*SingularityFunction(x, 0, 2) - 10*SingularityFunction(x, 4, 2))/I + 120/I)/E + 80.0/(E*I))*SingularityFunction(x, 2, 0)
- 0.666666666666667*(-80*SingularityFunction(x, 0, 1) + 10*SingularityFunction(x, 0, 2) - 10*SingularityFunction(x, 4, 2))*SingularityFunction(x, 0, 0)/(E*I)
+ 0.666666666666667*(-80*SingularityFunction(x, 0, 1) + 10*SingularityFunction(x, 0, 2) - 10*SingularityFunction(x, 4, 2))*SingularityFunction(x, 2, 0)/(E*I)
"""
x = self.variable
E = self.elastic_modulus
new_length = self.length + beam.length
if self.second_moment != beam.second_moment:
new_second_moment = Piecewise((self.second_moment, x<=self.length),
(beam.second_moment, x<=new_length))
else:
new_second_moment = self.second_moment
if via == "fixed":
new_beam = Beam(new_length, E, new_second_moment, x)
new_beam._composite_type = "fixed"
return new_beam
if via == "hinge":
new_beam = Beam(new_length, E, new_second_moment, x)
new_beam._composite_type = "hinge"
new_beam._hinge_position = self.length
return new_beam
def apply_support(self, loc, type="fixed"):
"""
This method applies support to a particular beam object.
Parameters
==========
loc : Sympifyable
Location of point at which support is applied.
type : String
Determines type of Beam support applied. To apply support structure
with
- zero degree of freedom, type = "fixed"
- one degree of freedom, type = "pin"
- two degrees of freedom, type = "roller"
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(30, E, I)
>>> b.apply_support(10, 'roller')
>>> b.apply_support(30, 'roller')
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(120, 30, -2)
>>> R_10, R_30 = symbols('R_10, R_30')
>>> b.solve_for_reaction_loads(R_10, R_30)
>>> b.load
-8*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 10, -1)
+ 120*SingularityFunction(x, 30, -2) + 2*SingularityFunction(x, 30, -1)
>>> b.slope()
(-4*SingularityFunction(x, 0, 2) + 3*SingularityFunction(x, 10, 2)
+ 120*SingularityFunction(x, 30, 1) + SingularityFunction(x, 30, 2) + 4000/3)/(E*I)
"""
loc = sympify(loc)
self._applied_supports.append((loc, type))
if type in ("pin", "roller"):
reaction_load = Symbol('R_'+str(loc))
self.apply_load(reaction_load, loc, -1)
self.bc_deflection.append((loc, 0))
else:
reaction_load = Symbol('R_'+str(loc))
reaction_moment = Symbol('M_'+str(loc))
self.apply_load(reaction_load, loc, -1)
self.apply_load(reaction_moment, loc, -2)
self.bc_deflection.append((loc, 0))
self.bc_slope.append((loc, 0))
self._support_as_loads.append((reaction_moment, loc, -2, None))
self._support_as_loads.append((reaction_load, loc, -1, None))
def apply_load(self, value, start, order, end=None):
"""
This method adds up the loads given to a particular beam object.
Parameters
==========
value : Sympifyable
The value inserted should have the units [Force/(Distance**(n+1)]
where n is the order of applied load.
Units for applied loads:
- For moments, unit = kN*m
- For point loads, unit = kN
- For constant distributed load, unit = kN/m
- For ramp loads, unit = kN/m/m
- For parabolic ramp loads, unit = kN/m/m/m
- ... so on.
start : Sympifyable
The starting point of the applied load. For point moments and
point forces this is the location of application.
order : Integer
The order of the applied load.
- For moments, order = -2
- For point loads, order =-1
- For constant distributed load, order = 0
- For ramp loads, order = 1
- For parabolic ramp loads, order = 2
- ... so on.
end : Sympifyable, optional
An optional argument that can be used if the load has an end point
within the length of the beam.
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A point load of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point and a parabolic ramp load of magnitude
2 N/m is applied below the beam starting from 2 meters to 3 meters
away from the starting point of the beam.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(-2, 2, 2, end=3)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1) - 2*SingularityFunction(x, 2, 2) + 2*SingularityFunction(x, 3, 0) + 4*SingularityFunction(x, 3, 1) + 2*SingularityFunction(x, 3, 2)
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
self._applied_loads.append((value, start, order, end))
self._load += value*SingularityFunction(x, start, order)
self._original_load += value*SingularityFunction(x, start, order)
if end:
# load has an end point within the length of the beam.
self._handle_end(x, value, start, order, end, type="apply")
def remove_load(self, value, start, order, end=None):
"""
This method removes a particular load present on the beam object.
Returns a ValueError if the load passed as an argument is not
present on the beam.
Parameters
==========
value : Sympifyable
The magnitude of an applied load.
start : Sympifyable
The starting point of the applied load. For point moments and
point forces this is the location of application.
order : Integer
The order of the applied load.
- For moments, order= -2
- For point loads, order=-1
- For constant distributed load, order=0
- For ramp loads, order=1
- For parabolic ramp loads, order=2
- ... so on.
end : Sympifyable, optional
An optional argument that can be used if the load has an end point
within the length of the beam.
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A pointload of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point and a parabolic ramp load of magnitude
2 N/m is applied below the beam starting from 2 meters to 3 meters
away from the starting point of the beam.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(-2, 2, 2, end=3)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1) - 2*SingularityFunction(x, 2, 2) + 2*SingularityFunction(x, 3, 0) + 4*SingularityFunction(x, 3, 1) + 2*SingularityFunction(x, 3, 2)
>>> b.remove_load(-2, 2, 2, end = 3)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1)
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
if (value, start, order, end) in self._applied_loads:
self._load -= value*SingularityFunction(x, start, order)
self._original_load -= value*SingularityFunction(x, start, order)
self._applied_loads.remove((value, start, order, end))
else:
msg = "No such load distribution exists on the beam object."
raise ValueError(msg)
if end:
# load has an end point within the length of the beam.
self._handle_end(x, value, start, order, end, type="remove")
def _handle_end(self, x, value, start, order, end, type):
"""
This functions handles the optional `end` value in the
`apply_load` and `remove_load` functions. When the value
of end is not NULL, this function will be executed.
"""
if order.is_negative:
msg = ("If 'end' is provided the 'order' of the load cannot "
"be negative, i.e. 'end' is only valid for distributed "
"loads.")
raise ValueError(msg)
# NOTE : A Taylor series can be used to define the summation of
# singularity functions that subtract from the load past the end
# point such that it evaluates to zero past 'end'.
f = value*x**order
if type == "apply":
# iterating for "apply_load" method
for i in range(0, order + 1):
self._load -= (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
self._original_load -= (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
elif type == "remove":
# iterating for "remove_load" method
for i in range(0, order + 1):
self._load += (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
self._original_load += (f.diff(x, i).subs(x, end - start) *
SingularityFunction(x, end, i)/factorial(i))
@property
def load(self):
"""
Returns a Singularity Function expression which represents
the load distribution curve of the Beam object.
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A point load of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point and a parabolic ramp load of magnitude
2 N/m is applied below the beam starting from 3 meters away from the
starting point of the beam.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(-2, 3, 2)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 4*SingularityFunction(x, 2, -1) - 2*SingularityFunction(x, 3, 2)
"""
return self._load
@property
def applied_loads(self):
"""
Returns a list of all loads applied on the beam object.
Each load in the list is a tuple of form (value, start, order, end).
Examples
========
There is a beam of length 4 meters. A moment of magnitude 3 Nm is
applied in the clockwise direction at the starting point of the beam.
A pointload of magnitude 4 N is applied from the top of the beam at
2 meters from the starting point. Another pointload of magnitude 5 N
is applied at same position.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(4, E, I)
>>> b.apply_load(-3, 0, -2)
>>> b.apply_load(4, 2, -1)
>>> b.apply_load(5, 2, -1)
>>> b.load
-3*SingularityFunction(x, 0, -2) + 9*SingularityFunction(x, 2, -1)
>>> b.applied_loads
[(-3, 0, -2, None), (4, 2, -1, None), (5, 2, -1, None)]
"""
return self._applied_loads
def _solve_hinge_beams(self, *reactions):
"""Method to find integration constants and reactional variables in a
composite beam connected via hinge.
This method resolves the composite Beam into its sub-beams and then
equations of shear force, bending moment, slope and deflection are
evaluated for both of them separately. These equations are then solved
for unknown reactions and integration constants using the boundary
conditions applied on the Beam. Equal deflection of both sub-beams
at the hinge joint gives us another equation to solve the system.
Examples
========
A combined beam, with constant fkexural rigidity E*I, is formed by joining
a Beam of length 2*l to the right of another Beam of length l. The whole beam
is fixed at both of its both end. A point load of magnitude P is also applied
from the top at a distance of 2*l from starting point.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> l=symbols('l', positive=True)
>>> b1=Beam(l, E, I)
>>> b2=Beam(2*l, E, I)
>>> b=b1.join(b2,"hinge")
>>> M1, A1, M2, A2, P = symbols('M1 A1 M2 A2 P')
>>> b.apply_load(A1,0,-1)
>>> b.apply_load(M1,0,-2)
>>> b.apply_load(P,2*l,-1)
>>> b.apply_load(A2,3*l,-1)
>>> b.apply_load(M2,3*l,-2)
>>> b.bc_slope=[(0,0), (3*l, 0)]
>>> b.bc_deflection=[(0,0), (3*l, 0)]
>>> b.solve_for_reaction_loads(M1, A1, M2, A2)
>>> b.reaction_loads
{A1: -5*P/18, A2: -13*P/18, M1: 5*P*l/18, M2: -4*P*l/9}
>>> b.slope()
(5*P*l*SingularityFunction(x, 0, 1)/18 - 5*P*SingularityFunction(x, 0, 2)/36 + 5*P*SingularityFunction(x, l, 2)/36)*SingularityFunction(x, 0, 0)/(E*I)
- (5*P*l*SingularityFunction(x, 0, 1)/18 - 5*P*SingularityFunction(x, 0, 2)/36 + 5*P*SingularityFunction(x, l, 2)/36)*SingularityFunction(x, l, 0)/(E*I)
+ (P*l**2/18 - 4*P*l*SingularityFunction(-l + x, 2*l, 1)/9 - 5*P*SingularityFunction(-l + x, 0, 2)/36 + P*SingularityFunction(-l + x, l, 2)/2
- 13*P*SingularityFunction(-l + x, 2*l, 2)/36)*SingularityFunction(x, l, 0)/(E*I)
>>> b.deflection()
(5*P*l*SingularityFunction(x, 0, 2)/36 - 5*P*SingularityFunction(x, 0, 3)/108 + 5*P*SingularityFunction(x, l, 3)/108)*SingularityFunction(x, 0, 0)/(E*I)
- (5*P*l*SingularityFunction(x, 0, 2)/36 - 5*P*SingularityFunction(x, 0, 3)/108 + 5*P*SingularityFunction(x, l, 3)/108)*SingularityFunction(x, l, 0)/(E*I)
+ (5*P*l**3/54 + P*l**2*(-l + x)/18 - 2*P*l*SingularityFunction(-l + x, 2*l, 2)/9 - 5*P*SingularityFunction(-l + x, 0, 3)/108 + P*SingularityFunction(-l + x, l, 3)/6
- 13*P*SingularityFunction(-l + x, 2*l, 3)/108)*SingularityFunction(x, l, 0)/(E*I)
"""
x = self.variable
l = self._hinge_position
E = self._elastic_modulus
I = self._second_moment
if isinstance(I, Piecewise):
I1 = I.args[0][0]
I2 = I.args[1][0]
else:
I1 = I2 = I
load_1 = 0 # Load equation on first segment of composite beam
load_2 = 0 # Load equation on second segment of composite beam
# Distributing load on both segments
for load in self.applied_loads:
if load[1] < l:
load_1 += load[0]*SingularityFunction(x, load[1], load[2])
if load[2] == 0:
load_1 -= load[0]*SingularityFunction(x, load[3], load[2])
elif load[2] > 0:
load_1 -= load[0]*SingularityFunction(x, load[3], load[2]) + load[0]*SingularityFunction(x, load[3], 0)
elif load[1] == l:
load_1 += load[0]*SingularityFunction(x, load[1], load[2])
load_2 += load[0]*SingularityFunction(x, load[1] - l, load[2])
elif load[1] > l:
load_2 += load[0]*SingularityFunction(x, load[1] - l, load[2])
if load[2] == 0:
load_2 -= load[0]*SingularityFunction(x, load[3] - l, load[2])
elif load[2] > 0:
load_2 -= load[0]*SingularityFunction(x, load[3] - l, load[2]) + load[0]*SingularityFunction(x, load[3] - l, 0)
h = Symbol('h') # Force due to hinge
load_1 += h*SingularityFunction(x, l, -1)
load_2 -= h*SingularityFunction(x, 0, -1)
eq = []
shear_1 = integrate(load_1, x)
shear_curve_1 = limit(shear_1, x, l)
eq.append(shear_curve_1)
bending_1 = integrate(shear_1, x)
moment_curve_1 = limit(bending_1, x, l)
eq.append(moment_curve_1)
shear_2 = integrate(load_2, x)
shear_curve_2 = limit(shear_2, x, self.length - l)
eq.append(shear_curve_2)
bending_2 = integrate(shear_2, x)
moment_curve_2 = limit(bending_2, x, self.length - l)
eq.append(moment_curve_2)
C1 = Symbol('C1')
C2 = Symbol('C2')
C3 = Symbol('C3')
C4 = Symbol('C4')
slope_1 = S.One/(E*I1)*(integrate(bending_1, x) + C1)
def_1 = S.One/(E*I1)*(integrate((E*I)*slope_1, x) + C1*x + C2)
slope_2 = S.One/(E*I2)*(integrate(integrate(integrate(load_2, x), x), x) + C3)
def_2 = S.One/(E*I2)*(integrate((E*I)*slope_2, x) + C4)
for position, value in self.bc_slope:
if position<l:
eq.append(slope_1.subs(x, position) - value)
else:
eq.append(slope_2.subs(x, position - l) - value)
for position, value in self.bc_deflection:
if position<l:
eq.append(def_1.subs(x, position) - value)
else:
eq.append(def_2.subs(x, position - l) - value)
eq.append(def_1.subs(x, l) - def_2.subs(x, 0)) # Deflection of both the segments at hinge would be equal
constants = list(linsolve(eq, C1, C2, C3, C4, h, *reactions))
reaction_values = list(constants[0])[5:]
self._reaction_loads = dict(zip(reactions, reaction_values))
self._load = self._load.subs(self._reaction_loads)
# Substituting constants and reactional load and moments with their corresponding values
slope_1 = slope_1.subs({C1: constants[0][0], h:constants[0][4]}).subs(self._reaction_loads)
def_1 = def_1.subs({C1: constants[0][0], C2: constants[0][1], h:constants[0][4]}).subs(self._reaction_loads)
slope_2 = slope_2.subs({x: x-l, C3: constants[0][2], h:constants[0][4]}).subs(self._reaction_loads)
def_2 = def_2.subs({x: x-l,C3: constants[0][2], C4: constants[0][3], h:constants[0][4]}).subs(self._reaction_loads)
self._hinge_beam_slope = slope_1*SingularityFunction(x, 0, 0) - slope_1*SingularityFunction(x, l, 0) + slope_2*SingularityFunction(x, l, 0)
self._hinge_beam_deflection = def_1*SingularityFunction(x, 0, 0) - def_1*SingularityFunction(x, l, 0) + def_2*SingularityFunction(x, l, 0)
def solve_for_reaction_loads(self, *reactions):
"""
Solves for the reaction forces.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1) # Reaction force at x = 10
>>> b.apply_load(R2, 30, -1) # Reaction force at x = 30
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.load
R1*SingularityFunction(x, 10, -1) + R2*SingularityFunction(x, 30, -1)
- 8*SingularityFunction(x, 0, -1) + 120*SingularityFunction(x, 30, -2)
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.reaction_loads
{R1: 6, R2: 2}
>>> b.load
-8*SingularityFunction(x, 0, -1) + 6*SingularityFunction(x, 10, -1)
+ 120*SingularityFunction(x, 30, -2) + 2*SingularityFunction(x, 30, -1)
"""
if self._composite_type == "hinge":
return self._solve_hinge_beams(*reactions)
x = self.variable
l = self.length
C3 = Symbol('C3')
C4 = Symbol('C4')
shear_curve = limit(self.shear_force(), x, l)
moment_curve = limit(self.bending_moment(), x, l)
slope_eqs = []
deflection_eqs = []
slope_curve = integrate(self.bending_moment(), x) + C3
for position, value in self._boundary_conditions['slope']:
eqs = slope_curve.subs(x, position) - value
slope_eqs.append(eqs)
deflection_curve = integrate(slope_curve, x) + C4
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
deflection_eqs.append(eqs)
solution = list((linsolve([shear_curve, moment_curve] + slope_eqs
+ deflection_eqs, (C3, C4) + reactions).args)[0])
solution = solution[2:]
self._reaction_loads = dict(zip(reactions, solution))
self._load = self._load.subs(self._reaction_loads)
def shear_force(self):
"""
Returns a Singularity Function expression which represents
the shear force curve of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.shear_force()
8*SingularityFunction(x, 0, 0) - 6*SingularityFunction(x, 10, 0) - 120*SingularityFunction(x, 30, -1) - 2*SingularityFunction(x, 30, 0)
"""
x = self.variable
return -integrate(self.load, x)
def max_shear_force(self):
"""Returns maximum Shear force and its coordinate
in the Beam object."""
from sympy.core.mul import Mul
from sympy.sets.sets import Interval
from sympy.solvers.solvers import solve
shear_curve = self.shear_force()
x = self.variable
terms = shear_curve.args
singularity = [] # Points at which shear function changes
for term in terms:
if isinstance(term, Mul):
term = term.args[-1] # SingularityFunction in the term
singularity.append(term.args[1])
singularity.sort()
singularity = list(set(singularity))
intervals = [] # List of Intervals with discrete value of shear force
shear_values = [] # List of values of shear force in each interval
for i, s in enumerate(singularity):
if s == 0:
continue
try:
shear_slope = Piecewise((float("nan"), x<=singularity[i-1]),(self._load.rewrite(Piecewise), x<s), (float("nan"), True))
points = solve(shear_slope, x)
val = []
for point in points:
val.append(abs(shear_curve.subs(x, point)))
points.extend([singularity[i-1], s])
val += [abs(limit(shear_curve, x, singularity[i-1], '+')), abs(limit(shear_curve, x, s, '-'))]
max_shear = max(val)
shear_values.append(max_shear)
intervals.append(points[val.index(max_shear)])
# If shear force in a particular Interval has zero or constant
# slope, then above block gives NotImplementedError as
# solve can't represent Interval solutions.
except NotImplementedError:
initial_shear = limit(shear_curve, x, singularity[i-1], '+')
final_shear = limit(shear_curve, x, s, '-')
# If shear_curve has a constant slope(it is a line).
if shear_curve.subs(x, (singularity[i-1] + s)/2) == (initial_shear + final_shear)/2 and initial_shear != final_shear:
shear_values.extend([initial_shear, final_shear])
intervals.extend([singularity[i-1], s])
else: # shear_curve has same value in whole Interval
shear_values.append(final_shear)
intervals.append(Interval(singularity[i-1], s))
shear_values = list(map(abs, shear_values))
maximum_shear = max(shear_values)
point = intervals[shear_values.index(maximum_shear)]
return (point, maximum_shear)
def bending_moment(self):
"""
Returns a Singularity Function expression which represents
the bending moment curve of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.bending_moment()
8*SingularityFunction(x, 0, 1) - 6*SingularityFunction(x, 10, 1) - 120*SingularityFunction(x, 30, 0) - 2*SingularityFunction(x, 30, 1)
"""
x = self.variable
return integrate(self.shear_force(), x)
def max_bmoment(self):
"""Returns maximum Shear force and its coordinate
in the Beam object."""
from sympy.core.mul import Mul
from sympy.sets.sets import Interval
from sympy.solvers.solvers import solve
bending_curve = self.bending_moment()
x = self.variable
terms = bending_curve.args
singularity = [] # Points at which bending moment changes
for term in terms:
if isinstance(term, Mul):
term = term.args[-1] # SingularityFunction in the term
singularity.append(term.args[1])
singularity.sort()
singularity = list(set(singularity))
intervals = [] # List of Intervals with discrete value of bending moment
moment_values = [] # List of values of bending moment in each interval
for i, s in enumerate(singularity):
if s == 0:
continue
try:
moment_slope = Piecewise((float("nan"), x<=singularity[i-1]),(self.shear_force().rewrite(Piecewise), x<s), (float("nan"), True))
points = solve(moment_slope, x)
val = []
for point in points:
val.append(abs(bending_curve.subs(x, point)))
points.extend([singularity[i-1], s])
val += [abs(limit(bending_curve, x, singularity[i-1], '+')), abs(limit(bending_curve, x, s, '-'))]
max_moment = max(val)
moment_values.append(max_moment)
intervals.append(points[val.index(max_moment)])
# If bending moment in a particular Interval has zero or constant
# slope, then above block gives NotImplementedError as solve
# can't represent Interval solutions.
except NotImplementedError:
initial_moment = limit(bending_curve, x, singularity[i-1], '+')
final_moment = limit(bending_curve, x, s, '-')
# If bending_curve has a constant slope(it is a line).
if bending_curve.subs(x, (singularity[i-1] + s)/2) == (initial_moment + final_moment)/2 and initial_moment != final_moment:
moment_values.extend([initial_moment, final_moment])
intervals.extend([singularity[i-1], s])
else: # bending_curve has same value in whole Interval
moment_values.append(final_moment)
intervals.append(Interval(singularity[i-1], s))
moment_values = list(map(abs, moment_values))
maximum_moment = max(moment_values)
point = intervals[moment_values.index(maximum_moment)]
return (point, maximum_moment)
def point_cflexure(self):
"""
Returns a Set of point(s) with zero bending moment and
where bending moment curve of the beam object changes
its sign from negative to positive or vice versa.
Examples
========
There is is 10 meter long overhanging beam. There are
two simple supports below the beam. One at the start
and another one at a distance of 6 meters from the start.
Point loads of magnitude 10KN and 20KN are applied at
2 meters and 4 meters from start respectively. A Uniformly
distribute load of magnitude of magnitude 3KN/m is also
applied on top starting from 6 meters away from starting
point till end.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> b = Beam(10, E, I)
>>> b.apply_load(-4, 0, -1)
>>> b.apply_load(-46, 6, -1)
>>> b.apply_load(10, 2, -1)
>>> b.apply_load(20, 4, -1)
>>> b.apply_load(3, 6, 0)
>>> b.point_cflexure()
[10/3]
"""
from sympy.solvers.solvers import solve
# To restrict the range within length of the Beam
moment_curve = Piecewise((float("nan"), self.variable<=0),
(self.bending_moment(), self.variable<self.length),
(float("nan"), True))
points = solve(moment_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
return points
def slope(self):
"""
Returns a Singularity Function expression which represents
the slope the elastic curve of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.slope()
(-4*SingularityFunction(x, 0, 2) + 3*SingularityFunction(x, 10, 2)
+ 120*SingularityFunction(x, 30, 1) + SingularityFunction(x, 30, 2) + 4000/3)/(E*I)
"""
x = self.variable
E = self.elastic_modulus
I = self.second_moment
if self._composite_type == "hinge":
return self._hinge_beam_slope
if not self._boundary_conditions['slope']:
return diff(self.deflection(), x)
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
slope = 0
prev_slope = 0
prev_end = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = -S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
if i != len(args) - 1:
slope += (prev_slope + slope_value)*SingularityFunction(x, prev_end, 0) - \
(prev_slope + slope_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
slope += (prev_slope + slope_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
return slope
C3 = Symbol('C3')
slope_curve = -integrate(S.One/(E*I)*self.bending_moment(), x) + C3
bc_eqs = []
for position, value in self._boundary_conditions['slope']:
eqs = slope_curve.subs(x, position) - value
bc_eqs.append(eqs)
constants = list(linsolve(bc_eqs, C3))
slope_curve = slope_curve.subs({C3: constants[0][0]})
return slope_curve
def deflection(self):
"""
Returns a Singularity Function expression which represents
the elastic curve or deflection of the Beam object.
Examples
========
There is a beam of length 30 meters. A moment of magnitude 120 Nm is
applied in the clockwise direction at the end of the beam. A pointload
of magnitude 8 N is applied from the top of the beam at the starting
point. There are two simple supports below the beam. One at the end
and another one at a distance of 10 meters from the start. The
deflection is restricted at both the supports.
Using the sign convention of upward forces and clockwise moment
being positive.
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> E, I = symbols('E, I')
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(30, E, I)
>>> b.apply_load(-8, 0, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(120, 30, -2)
>>> b.bc_deflection = [(10, 0), (30, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.deflection()
(4000*x/3 - 4*SingularityFunction(x, 0, 3)/3 + SingularityFunction(x, 10, 3)
+ 60*SingularityFunction(x, 30, 2) + SingularityFunction(x, 30, 3)/3 - 12000)/(E*I)
"""
x = self.variable
E = self.elastic_modulus
I = self.second_moment
if self._composite_type == "hinge":
return self._hinge_beam_deflection
if not self._boundary_conditions['deflection'] and not self._boundary_conditions['slope']:
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
prev_slope = 0
prev_def = 0
prev_end = 0
deflection = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = -S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
recent_segment_slope = prev_slope + slope_value
deflection_value = integrate(recent_segment_slope, (x, prev_end, x))
if i != len(args) - 1:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0) \
- (prev_def + deflection_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
prev_def = deflection_value.subs(x, args[i][1].args[1])
return deflection
base_char = self._base_char
constants = symbols(base_char + '3:5')
return S.One/(E*I)*integrate(-integrate(self.bending_moment(), x), x) + constants[0]*x + constants[1]
elif not self._boundary_conditions['deflection']:
base_char = self._base_char
constant = symbols(base_char + '4')
return integrate(self.slope(), x) + constant
elif not self._boundary_conditions['slope'] and self._boundary_conditions['deflection']:
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
prev_slope = 0
prev_def = 0
prev_end = 0
deflection = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = -S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
recent_segment_slope = prev_slope + slope_value
deflection_value = integrate(recent_segment_slope, (x, prev_end, x))
if i != len(args) - 1:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0) \
- (prev_def + deflection_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
prev_def = deflection_value.subs(x, args[i][1].args[1])
return deflection
base_char = self._base_char
C3, C4 = symbols(base_char + '3:5') # Integration constants
slope_curve = -integrate(self.bending_moment(), x) + C3
deflection_curve = integrate(slope_curve, x) + C4
bc_eqs = []
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
bc_eqs.append(eqs)
constants = list(linsolve(bc_eqs, (C3, C4)))
deflection_curve = deflection_curve.subs({C3: constants[0][0], C4: constants[0][1]})
return S.One/(E*I)*deflection_curve
if isinstance(I, Piecewise) and self._composite_type == "fixed":
args = I.args
prev_slope = 0
prev_def = 0
prev_end = 0
deflection = 0
for i in range(len(args)):
if i != 0:
prev_end = args[i-1][1].args[1]
slope_value = S.One/E*integrate(self.bending_moment()/args[i][0], (x, prev_end, x))
recent_segment_slope = prev_slope + slope_value
deflection_value = integrate(recent_segment_slope, (x, prev_end, x))
if i != len(args) - 1:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0) \
- (prev_def + deflection_value)*SingularityFunction(x, args[i][1].args[1], 0)
else:
deflection += (prev_def + deflection_value)*SingularityFunction(x, prev_end, 0)
prev_slope = slope_value.subs(x, args[i][1].args[1])
prev_def = deflection_value.subs(x, args[i][1].args[1])
return deflection
C4 = Symbol('C4')
deflection_curve = integrate(self.slope(), x) + C4
bc_eqs = []
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
bc_eqs.append(eqs)
constants = list(linsolve(bc_eqs, C4))
deflection_curve = deflection_curve.subs({C4: constants[0][0]})
return deflection_curve
def max_deflection(self):
"""
Returns point of max deflection and its corresponding deflection value
in a Beam object.
"""
from sympy.solvers.solvers import solve
# To restrict the range within length of the Beam
slope_curve = Piecewise((float("nan"), self.variable<=0),
(self.slope(), self.variable<self.length),
(float("nan"), True))
points = solve(slope_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
deflection_curve = self.deflection()
deflections = [deflection_curve.subs(self.variable, x) for x in points]
deflections = list(map(abs, deflections))
if len(deflections) != 0:
max_def = max(deflections)
return (points[deflections.index(max_def)], max_def)
else:
return None
def shear_stress(self):
"""
Returns an expression representing the Shear Stress
curve of the Beam object.
"""
return self.shear_force()/self._area
def plot_shear_stress(self, subs=None):
"""
Returns a plot of shear stress present in the beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters and area of cross section 2 square
meters. A constant distributed load of 10 KN/m is applied from half of
the beam till the end. There are two simple supports below the beam,
one at the starting point and another at the ending point of the beam.
A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6), 2)
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_shear_stress()
Plot object containing:
[0]: cartesian line: 6875*SingularityFunction(x, 0, 0) - 2500*SingularityFunction(x, 2, 0)
- 5000*SingularityFunction(x, 4, 1) + 15625*SingularityFunction(x, 8, 0)
+ 5000*SingularityFunction(x, 8, 1) for x over (0.0, 8.0)
"""
shear_stress = self.shear_stress()
x = self.variable
length = self.length
if subs is None:
subs = {}
for sym in shear_stress.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('value of %s was not passed.' %sym)
if length in subs:
length = subs[length]
# Returns Plot of Shear Stress
return plot (shear_stress.subs(subs), (x, 0, length),
title='Shear Stress', xlabel=r'$\mathrm{x}$', ylabel=r'$\tau$',
line_color='r')
def plot_shear_force(self, subs=None):
"""
Returns a plot for Shear force present in the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_shear_force()
Plot object containing:
[0]: cartesian line: 13750*SingularityFunction(x, 0, 0) - 5000*SingularityFunction(x, 2, 0)
- 10000*SingularityFunction(x, 4, 1) + 31250*SingularityFunction(x, 8, 0)
+ 10000*SingularityFunction(x, 8, 1) for x over (0.0, 8.0)
"""
shear_force = self.shear_force()
if subs is None:
subs = {}
for sym in shear_force.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(shear_force.subs(subs), (self.variable, 0, length), title='Shear Force',
xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{V}$', line_color='g')
def plot_bending_moment(self, subs=None):
"""
Returns a plot for Bending moment present in the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_bending_moment()
Plot object containing:
[0]: cartesian line: 13750*SingularityFunction(x, 0, 1) - 5000*SingularityFunction(x, 2, 1)
- 5000*SingularityFunction(x, 4, 2) + 31250*SingularityFunction(x, 8, 1)
+ 5000*SingularityFunction(x, 8, 2) for x over (0.0, 8.0)
"""
bending_moment = self.bending_moment()
if subs is None:
subs = {}
for sym in bending_moment.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(bending_moment.subs(subs), (self.variable, 0, length), title='Bending Moment',
xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{M}$', line_color='b')
def plot_slope(self, subs=None):
"""
Returns a plot for slope of deflection curve of the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_slope()
Plot object containing:
[0]: cartesian line: -8.59375e-5*SingularityFunction(x, 0, 2) + 3.125e-5*SingularityFunction(x, 2, 2)
+ 2.08333333333333e-5*SingularityFunction(x, 4, 3) - 0.0001953125*SingularityFunction(x, 8, 2)
- 2.08333333333333e-5*SingularityFunction(x, 8, 3) + 0.00138541666666667 for x over (0.0, 8.0)
"""
slope = self.slope()
if subs is None:
subs = {}
for sym in slope.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(slope.subs(subs), (self.variable, 0, length), title='Slope',
xlabel=r'$\mathrm{x}$', ylabel=r'$\theta$', line_color='m')
def plot_deflection(self, subs=None):
"""
Returns a plot for deflection curve of the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> b.plot_deflection()
Plot object containing:
[0]: cartesian line: 0.00138541666666667*x - 2.86458333333333e-5*SingularityFunction(x, 0, 3)
+ 1.04166666666667e-5*SingularityFunction(x, 2, 3) + 5.20833333333333e-6*SingularityFunction(x, 4, 4)
- 6.51041666666667e-5*SingularityFunction(x, 8, 3) - 5.20833333333333e-6*SingularityFunction(x, 8, 4)
for x over (0.0, 8.0)
"""
deflection = self.deflection()
if subs is None:
subs = {}
for sym in deflection.atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(deflection.subs(subs), (self.variable, 0, length),
title='Deflection', xlabel=r'$\mathrm{x}$', ylabel=r'$\delta$',
line_color='r')
def plot_loading_results(self, subs=None):
"""
Returns a subplot of Shear Force, Bending Moment,
Slope and Deflection of the Beam object.
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 8 meters. A constant distributed load of 10 KN/m
is applied from half of the beam till the end. There are two simple supports
below the beam, one at the starting point and another at the ending point
of the beam. A pointload of magnitude 5 KN is also applied from top of the
beam, at a distance of 4 meters from the starting point.
Take E = 200 GPa and I = 400*(10**-6) meter**4.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> b = Beam(8, 200*(10**9), 400*(10**-6))
>>> b.apply_load(5000, 2, -1)
>>> b.apply_load(R1, 0, -1)
>>> b.apply_load(R2, 8, -1)
>>> b.apply_load(10000, 4, 0, end=8)
>>> b.bc_deflection = [(0, 0), (8, 0)]
>>> b.solve_for_reaction_loads(R1, R2)
>>> axes = b.plot_loading_results()
"""
length = self.length
variable = self.variable
if subs is None:
subs = {}
for sym in self.deflection().atoms(Symbol):
if sym == self.variable:
continue
if sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if length in subs:
length = subs[length]
ax1 = plot(self.shear_force().subs(subs), (variable, 0, length),
title="Shear Force", xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{V}$',
line_color='g', show=False)
ax2 = plot(self.bending_moment().subs(subs), (variable, 0, length),
title="Bending Moment", xlabel=r'$\mathrm{x}$', ylabel=r'$\mathrm{M}$',
line_color='b', show=False)
ax3 = plot(self.slope().subs(subs), (variable, 0, length),
title="Slope", xlabel=r'$\mathrm{x}$', ylabel=r'$\theta$',
line_color='m', show=False)
ax4 = plot(self.deflection().subs(subs), (variable, 0, length),
title="Deflection", xlabel=r'$\mathrm{x}$', ylabel=r'$\delta$',
line_color='r', show=False)
return PlotGrid(4, 1, ax1, ax2, ax3, ax4)
def _solve_for_ild_equations(self):
"""
Helper function for I.L.D. It takes the unsubstituted
copy of the load equation and uses it to calculate shear force and bending
moment equations.
"""
x = self.variable
shear_force = -integrate(self._original_load, x)
bending_moment = integrate(shear_force, x)
return shear_force, bending_moment
def solve_for_ild_reactions(self, value, *reactions):
"""
Determines the Influence Line Diagram equations for reaction
forces under the effect of a moving load.
Parameters
==========
value : Integer
Magnitude of moving load
reactions :
The reaction forces applied on the beam.
Examples
========
There is a beam of length 10 meters. There are two simple supports
below the beam, one at the starting point and another at the ending
point of the beam. Calculate the I.L.D. equations for reaction forces
under the effect of a moving load of magnitude 1kN.
.. image:: ildreaction.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_10 = symbols('R_0, R_10')
>>> b = Beam(10, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(10, 'roller')
>>> b.solve_for_ild_reactions(1,R_0,R_10)
>>> b.ild_reactions
{R_0: x/10 - 1, R_10: -x/10}
"""
shear_force, bending_moment = self._solve_for_ild_equations()
x = self.variable
l = self.length
C3 = Symbol('C3')
C4 = Symbol('C4')
shear_curve = limit(shear_force, x, l) - value
moment_curve = limit(bending_moment, x, l) - value*(l-x)
slope_eqs = []
deflection_eqs = []
slope_curve = integrate(bending_moment, x) + C3
for position, value in self._boundary_conditions['slope']:
eqs = slope_curve.subs(x, position) - value
slope_eqs.append(eqs)
deflection_curve = integrate(slope_curve, x) + C4
for position, value in self._boundary_conditions['deflection']:
eqs = deflection_curve.subs(x, position) - value
deflection_eqs.append(eqs)
solution = list((linsolve([shear_curve, moment_curve] + slope_eqs
+ deflection_eqs, (C3, C4) + reactions).args)[0])
solution = solution[2:]
# Determining the equations and solving them.
self._ild_reactions = dict(zip(reactions, solution))
def plot_ild_reactions(self, subs=None):
"""
Plots the Influence Line Diagram of Reaction Forces
under the effect of a moving load. This function
should be called after calling solve_for_ild_reactions().
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 10 meters. A point load of magnitude 5KN
is also applied from top of the beam, at a distance of 4 meters
from the starting point. There are two simple supports below the
beam, located at the starting point and at a distance of 7 meters
from the starting point. Plot the I.L.D. equations for reactions
at both support points under the effect of a moving load
of magnitude 1kN.
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_7 = symbols('R_0, R_7')
>>> b = Beam(10, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(7, 'roller')
>>> b.apply_load(5,4,-1)
>>> b.solve_for_ild_reactions(1,R_0,R_7)
>>> b.ild_reactions
{R_0: x/7 - 22/7, R_7: -x/7 - 20/7}
>>> b.plot_ild_reactions()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: x/7 - 22/7 for x over (0.0, 10.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -x/7 - 20/7 for x over (0.0, 10.0)
"""
if not self._ild_reactions:
raise ValueError("I.L.D. reaction equations not found. Please use solve_for_ild_reactions() to generate the I.L.D. reaction equations.")
x = self.variable
ildplots = []
if subs is None:
subs = {}
for reaction in self._ild_reactions:
for sym in self._ild_reactions[reaction].atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for sym in self._length.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for reaction in self._ild_reactions:
ildplots.append(plot(self._ild_reactions[reaction].subs(subs),
(x, 0, self._length.subs(subs)), title='I.L.D. for Reactions',
xlabel=x, ylabel=reaction, line_color='blue', show=False))
return PlotGrid(len(ildplots), 1, *ildplots)
def solve_for_ild_shear(self, distance, value, *reactions):
"""
Determines the Influence Line Diagram equations for shear at a
specified point under the effect of a moving load.
Parameters
==========
distance : Integer
Distance of the point from the start of the beam
for which equations are to be determined
value : Integer
Magnitude of moving load
reactions :
The reaction forces applied on the beam.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Calculate the I.L.D. equations for Shear at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_shear(4, 1, R_0, R_8)
>>> b.ild_shear
Piecewise((x/8, x < 4), (x/8 - 1, x > 4))
"""
x = self.variable
l = self.length
shear_force, _ = self._solve_for_ild_equations()
shear_curve1 = value - limit(shear_force, x, distance)
shear_curve2 = (limit(shear_force, x, l) - limit(shear_force, x, distance)) - value
for reaction in reactions:
shear_curve1 = shear_curve1.subs(reaction,self._ild_reactions[reaction])
shear_curve2 = shear_curve2.subs(reaction,self._ild_reactions[reaction])
shear_eq = Piecewise((shear_curve1, x < distance), (shear_curve2, x > distance))
self._ild_shear = shear_eq
def plot_ild_shear(self,subs=None):
"""
Plots the Influence Line Diagram for Shear under the effect
of a moving load. This function should be called after
calling solve_for_ild_shear().
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Plot the I.L.D. for Shear at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_shear(4, 1, R_0, R_8)
>>> b.ild_shear
Piecewise((x/8, x < 4), (x/8 - 1, x > 4))
>>> b.plot_ild_shear()
Plot object containing:
[0]: cartesian line: Piecewise((x/8, x < 4), (x/8 - 1, x > 4)) for x over (0.0, 12.0)
"""
if not self._ild_shear:
raise ValueError("I.L.D. shear equation not found. Please use solve_for_ild_shear() to generate the I.L.D. shear equations.")
x = self.variable
l = self._length
if subs is None:
subs = {}
for sym in self._ild_shear.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for sym in self._length.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
return plot(self._ild_shear.subs(subs), (x, 0, l), title='I.L.D. for Shear',
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{V}$', line_color='blue',show=True)
def solve_for_ild_moment(self, distance, value, *reactions):
"""
Determines the Influence Line Diagram equations for moment at a
specified point under the effect of a moving load.
Parameters
==========
distance : Integer
Distance of the point from the start of the beam
for which equations are to be determined
value : Integer
Magnitude of moving load
reactions :
The reaction forces applied on the beam.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Calculate the I.L.D. equations for Moment at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_moment(4, 1, R_0, R_8)
>>> b.ild_moment
Piecewise((-x/2, x < 4), (x/2 - 4, x > 4))
"""
x = self.variable
l = self.length
_, moment = self._solve_for_ild_equations()
moment_curve1 = value*(distance-x) - limit(moment, x, distance)
moment_curve2= (limit(moment, x, l)-limit(moment, x, distance))-value*(l-x)
for reaction in reactions:
moment_curve1 = moment_curve1.subs(reaction, self._ild_reactions[reaction])
moment_curve2 = moment_curve2.subs(reaction, self._ild_reactions[reaction])
moment_eq = Piecewise((moment_curve1, x < distance), (moment_curve2, x > distance))
self._ild_moment = moment_eq
def plot_ild_moment(self,subs=None):
"""
Plots the Influence Line Diagram for Moment under the effect
of a moving load. This function should be called after
calling solve_for_ild_moment().
Parameters
==========
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 12 meters. There are two simple supports
below the beam, one at the starting point and another at a distance
of 8 meters. Plot the I.L.D. for Moment at a distance
of 4 meters under the effect of a moving load of magnitude 1kN.
.. image:: ildshear.png
Using the sign convention of downwards forces being positive.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy import symbols
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> E, I = symbols('E, I')
>>> R_0, R_8 = symbols('R_0, R_8')
>>> b = Beam(12, E, I)
>>> b.apply_support(0, 'roller')
>>> b.apply_support(8, 'roller')
>>> b.solve_for_ild_reactions(1, R_0, R_8)
>>> b.solve_for_ild_moment(4, 1, R_0, R_8)
>>> b.ild_moment
Piecewise((-x/2, x < 4), (x/2 - 4, x > 4))
>>> b.plot_ild_moment()
Plot object containing:
[0]: cartesian line: Piecewise((-x/2, x < 4), (x/2 - 4, x > 4)) for x over (0.0, 12.0)
"""
if not self._ild_moment:
raise ValueError("I.L.D. moment equation not found. Please use solve_for_ild_moment() to generate the I.L.D. moment equations.")
x = self.variable
if subs is None:
subs = {}
for sym in self._ild_moment.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
for sym in self._length.atoms(Symbol):
if sym != x and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
return plot(self._ild_moment.subs(subs), (x, 0, self._length), title='I.L.D. for Moment',
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{M}$', line_color='blue', show=True)
@doctest_depends_on(modules=('numpy',))
def draw(self, pictorial=True):
"""
Returns a plot object representing the beam diagram of the beam.
.. note::
The user must be careful while entering load values.
The draw function assumes a sign convention which is used
for plotting loads.
Given a right handed coordinate system with XYZ coordinates,
the beam's length is assumed to be along the positive X axis.
The draw function recognizes positve loads(with n>-2) as loads
acting along negative Y direction and positve moments acting
along positive Z direction.
Parameters
==========
pictorial: Boolean (default=True)
Setting ``pictorial=True`` would simply create a pictorial (scaled) view
of the beam diagram not with the exact dimensions.
Although setting ``pictorial=False`` would create a beam diagram with
the exact dimensions on the plot
Examples
========
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam
>>> from sympy import symbols
>>> R1, R2 = symbols('R1, R2')
>>> E, I = symbols('E, I')
>>> b = Beam(50, 20, 30)
>>> b.apply_load(10, 2, -1)
>>> b.apply_load(R1, 10, -1)
>>> b.apply_load(R2, 30, -1)
>>> b.apply_load(90, 5, 0, 23)
>>> b.apply_load(10, 30, 1, 50)
>>> b.apply_support(50, "pin")
>>> b.apply_support(0, "fixed")
>>> b.apply_support(20, "roller")
>>> p = b.draw()
>>> p
Plot object containing:
[0]: cartesian line: 25*SingularityFunction(x, 5, 0) - 25*SingularityFunction(x, 23, 0)
+ SingularityFunction(x, 30, 1) - 20*SingularityFunction(x, 50, 0)
- SingularityFunction(x, 50, 1) + 5 for x over (0.0, 50.0)
[1]: cartesian line: 5 for x over (0.0, 50.0)
>>> p.show()
"""
if not numpy:
raise ImportError("To use this function numpy module is required")
x = self.variable
# checking whether length is an expression in terms of any Symbol.
from sympy.core.expr import Expr
if isinstance(self.length, Expr):
l = list(self.length.atoms(Symbol))
# assigning every Symbol a default value of 10
l = {i:10 for i in l}
length = self.length.subs(l)
else:
l = {}
length = self.length
height = length/10
rectangles = []
rectangles.append({'xy':(0, 0), 'width':length, 'height': height, 'facecolor':"brown"})
annotations, markers, load_eq,load_eq1, fill = self._draw_load(pictorial, length, l)
support_markers, support_rectangles = self._draw_supports(length, l)
rectangles += support_rectangles
markers += support_markers
sing_plot = plot(height + load_eq, height + load_eq1, (x, 0, length),
xlim=(-height, length + height), ylim=(-length, 1.25*length), annotations=annotations,
markers=markers, rectangles=rectangles, line_color='brown', fill=fill, axis=False, show=False)
return sing_plot
def _draw_load(self, pictorial, length, l):
loads = list(set(self.applied_loads) - set(self._support_as_loads))
height = length/10
x = self.variable
annotations = []
markers = []
load_args = []
scaled_load = 0
load_args1 = []
scaled_load1 = 0
load_eq = 0 # For positive valued higher order loads
load_eq1 = 0 # For negative valued higher order loads
fill = None
plus = 0 # For positive valued higher order loads
minus = 0 # For negative valued higher order loads
for load in loads:
# check if the position of load is in terms of the beam length.
if l:
pos = load[1].subs(l)
else:
pos = load[1]
# point loads
if load[2] == -1:
if isinstance(load[0], Symbol) or load[0].is_negative:
annotations.append({'text':'', 'xy':(pos, 0), 'xytext':(pos, height - 4*height), 'arrowprops':dict(width= 1.5, headlength=5, headwidth=5, facecolor='black')})
else:
annotations.append({'text':'', 'xy':(pos, height), 'xytext':(pos, height*4), 'arrowprops':dict(width= 1.5, headlength=4, headwidth=4, facecolor='black')})
# moment loads
elif load[2] == -2:
if load[0].is_negative:
markers.append({'args':[[pos], [height/2]], 'marker': r'$\circlearrowright$', 'markersize':15})
else:
markers.append({'args':[[pos], [height/2]], 'marker': r'$\circlearrowleft$', 'markersize':15})
# higher order loads
elif load[2] >= 0:
# `fill` will be assigned only when higher order loads are present
value, start, order, end = load
# Positive loads have their seperate equations
if(value>0):
plus = 1
# if pictorial is True we remake the load equation again with
# some constant magnitude values.
if pictorial:
value = 10**(1-order) if order > 0 else length/2
scaled_load += value*SingularityFunction(x, start, order)
if end:
f2 = 10**(1-order)*x**order if order > 0 else length/2*x**order
for i in range(0, order + 1):
scaled_load -= (f2.diff(x, i).subs(x, end - start)*
SingularityFunction(x, end, i)/factorial(i))
if pictorial:
if isinstance(scaled_load, Add):
load_args = scaled_load.args
else:
# when the load equation consists of only a single term
load_args = (scaled_load,)
load_eq = [i.subs(l) for i in load_args]
else:
if isinstance(self.load, Add):
load_args = self.load.args
else:
load_args = (self.load,)
load_eq = [i.subs(l) for i in load_args if list(i.atoms(SingularityFunction))[0].args[2] >= 0]
load_eq = Add(*load_eq)
# filling higher order loads with colour
expr = height + load_eq.rewrite(Piecewise)
y1 = lambdify(x, expr, 'numpy')
# For loads with negative value
else:
minus = 1
# if pictorial is True we remake the load equation again with
# some constant magnitude values.
if pictorial:
value = 10**(1-order) if order > 0 else length/2
scaled_load1 += value*SingularityFunction(x, start, order)
if end:
f2 = 10**(1-order)*x**order if order > 0 else length/2*x**order
for i in range(0, order + 1):
scaled_load1 -= (f2.diff(x, i).subs(x, end - start)*
SingularityFunction(x, end, i)/factorial(i))
if pictorial:
if isinstance(scaled_load1, Add):
load_args1 = scaled_load1.args
else:
# when the load equation consists of only a single term
load_args1 = (scaled_load1,)
load_eq1 = [i.subs(l) for i in load_args1]
else:
if isinstance(self.load, Add):
load_args1 = self.load.args1
else:
load_args1 = (self.load,)
load_eq1 = [i.subs(l) for i in load_args if list(i.atoms(SingularityFunction))[0].args[2] >= 0]
load_eq1 = -Add(*load_eq1)-height
# filling higher order loads with colour
expr = height + load_eq1.rewrite(Piecewise)
y1_ = lambdify(x, expr, 'numpy')
y = numpy.arange(0, float(length), 0.001)
y2 = float(height)
if(plus == 1 and minus == 1):
fill = {'x': y, 'y1': y1(y), 'y2': y1_(y), 'color':'darkkhaki'}
elif(plus == 1):
fill = {'x': y, 'y1': y1(y), 'y2': y2, 'color':'darkkhaki'}
else:
fill = {'x': y, 'y1': y1_(y), 'y2': y2, 'color':'darkkhaki'}
return annotations, markers, load_eq, load_eq1, fill
def _draw_supports(self, length, l):
height = float(length/10)
support_markers = []
support_rectangles = []
for support in self._applied_supports:
if l:
pos = support[0].subs(l)
else:
pos = support[0]
if support[1] == "pin":
support_markers.append({'args':[pos, [0]], 'marker':6, 'markersize':13, 'color':"black"})
elif support[1] == "roller":
support_markers.append({'args':[pos, [-height/2.5]], 'marker':'o', 'markersize':11, 'color':"black"})
elif support[1] == "fixed":
if pos == 0:
support_rectangles.append({'xy':(0, -3*height), 'width':-length/20, 'height':6*height + height, 'fill':False, 'hatch':'/////'})
else:
support_rectangles.append({'xy':(length, -3*height), 'width':length/20, 'height': 6*height + height, 'fill':False, 'hatch':'/////'})
return support_markers, support_rectangles
class Beam3D(Beam):
"""
This class handles loads applied in any direction of a 3D space along
with unequal values of Second moment along different axes.
.. note::
A consistent sign convention must be used while solving a beam
bending problem; the results will
automatically follow the chosen sign convention.
This class assumes that any kind of distributed load/moment is
applied through out the span of a beam.
Examples
========
There is a beam of l meters long. A constant distributed load of magnitude q
is applied along y-axis from start till the end of beam. A constant distributed
moment of magnitude m is also applied along z-axis from start till the end of beam.
Beam is fixed at both of its end. So, deflection of the beam at the both ends
is restricted.
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols, simplify, collect, factor
>>> l, E, G, I, A = symbols('l, E, G, I, A')
>>> b = Beam3D(l, E, G, I, A)
>>> x, q, m = symbols('x, q, m')
>>> b.apply_load(q, 0, 0, dir="y")
>>> b.apply_moment_load(m, 0, -1, dir="z")
>>> b.shear_force()
[0, -q*x, 0]
>>> b.bending_moment()
[0, 0, -m*x + q*x**2/2]
>>> b.bc_slope = [(0, [0, 0, 0]), (l, [0, 0, 0])]
>>> b.bc_deflection = [(0, [0, 0, 0]), (l, [0, 0, 0])]
>>> b.solve_slope_deflection()
>>> factor(b.slope())
[0, 0, x*(-l + x)*(-A*G*l**3*q + 2*A*G*l**2*q*x - 12*E*I*l*q
- 72*E*I*m + 24*E*I*q*x)/(12*E*I*(A*G*l**2 + 12*E*I))]
>>> dx, dy, dz = b.deflection()
>>> dy = collect(simplify(dy), x)
>>> dx == dz == 0
True
>>> dy == (x*(12*E*I*l*(A*G*l**2*q - 2*A*G*l*m + 12*E*I*q)
... + x*(A*G*l*(3*l*(A*G*l**2*q - 2*A*G*l*m + 12*E*I*q) + x*(-2*A*G*l**2*q + 4*A*G*l*m - 24*E*I*q))
... + A*G*(A*G*l**2 + 12*E*I)*(-2*l**2*q + 6*l*m - 4*m*x + q*x**2)
... - 12*E*I*q*(A*G*l**2 + 12*E*I)))/(24*A*E*G*I*(A*G*l**2 + 12*E*I)))
True
References
==========
.. [1] http://homes.civil.aau.dk/jc/FemteSemester/Beams3D.pdf
"""
def __init__(self, length, elastic_modulus, shear_modulus, second_moment,
area, variable=Symbol('x')):
"""Initializes the class.
Parameters
==========
length : Sympifyable
A Symbol or value representing the Beam's length.
elastic_modulus : Sympifyable
A SymPy expression representing the Beam's Modulus of Elasticity.
It is a measure of the stiffness of the Beam material.
shear_modulus : Sympifyable
A SymPy expression representing the Beam's Modulus of rigidity.
It is a measure of rigidity of the Beam material.
second_moment : Sympifyable or list
A list of two elements having SymPy expression representing the
Beam's Second moment of area. First value represent Second moment
across y-axis and second across z-axis.
Single SymPy expression can be passed if both values are same
area : Sympifyable
A SymPy expression representing the Beam's cross-sectional area
in a plane prependicular to length of the Beam.
variable : Symbol, optional
A Symbol object that will be used as the variable along the beam
while representing the load, shear, moment, slope and deflection
curve. By default, it is set to ``Symbol('x')``.
"""
super().__init__(length, elastic_modulus, second_moment, variable)
self.shear_modulus = shear_modulus
self._area = area
self._load_vector = [0, 0, 0]
self._moment_load_vector = [0, 0, 0]
self._load_Singularity = [0, 0, 0]
self._slope = [0, 0, 0]
self._deflection = [0, 0, 0]
@property
def shear_modulus(self):
"""Young's Modulus of the Beam. """
return self._shear_modulus
@shear_modulus.setter
def shear_modulus(self, e):
self._shear_modulus = sympify(e)
@property
def second_moment(self):
"""Second moment of area of the Beam. """
return self._second_moment
@second_moment.setter
def second_moment(self, i):
if isinstance(i, list):
i = [sympify(x) for x in i]
self._second_moment = i
else:
self._second_moment = sympify(i)
@property
def area(self):
"""Cross-sectional area of the Beam. """
return self._area
@area.setter
def area(self, a):
self._area = sympify(a)
@property
def load_vector(self):
"""
Returns a three element list representing the load vector.
"""
return self._load_vector
@property
def moment_load_vector(self):
"""
Returns a three element list representing moment loads on Beam.
"""
return self._moment_load_vector
@property
def boundary_conditions(self):
"""
Returns a dictionary of boundary conditions applied on the beam.
The dictionary has two keywords namely slope and deflection.
The value of each keyword is a list of tuple, where each tuple
contains location and value of a boundary condition in the format
(location, value). Further each value is a list corresponding to
slope or deflection(s) values along three axes at that location.
Examples
========
There is a beam of length 4 meters. The slope at 0 should be 4 along
the x-axis and 0 along others. At the other end of beam, deflection
along all the three axes should be zero.
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(30, E, G, I, A, x)
>>> b.bc_slope = [(0, (4, 0, 0))]
>>> b.bc_deflection = [(4, [0, 0, 0])]
>>> b.boundary_conditions
{'deflection': [(4, [0, 0, 0])], 'slope': [(0, (4, 0, 0))]}
Here the deflection of the beam should be ``0`` along all the three axes at ``4``.
Similarly, the slope of the beam should be ``4`` along x-axis and ``0``
along y and z axis at ``0``.
"""
return self._boundary_conditions
def polar_moment(self):
"""
Returns the polar moment of area of the beam
about the X axis with respect to the centroid.
Examples
========
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A = symbols('l, E, G, I, A')
>>> b = Beam3D(l, E, G, I, A)
>>> b.polar_moment()
2*I
>>> I1 = [9, 15]
>>> b = Beam3D(l, E, G, I1, A)
>>> b.polar_moment()
24
"""
if not iterable(self.second_moment):
return 2*self.second_moment
return sum(self.second_moment)
def apply_load(self, value, start, order, dir="y"):
"""
This method adds up the force load to a particular beam object.
Parameters
==========
value : Sympifyable
The magnitude of an applied load.
dir : String
Axis along which load is applied.
order : Integer
The order of the applied load.
- For point loads, order=-1
- For constant distributed load, order=0
- For ramp loads, order=1
- For parabolic ramp loads, order=2
- ... so on.
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
if dir == "x":
if not order == -1:
self._load_vector[0] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
elif dir == "y":
if not order == -1:
self._load_vector[1] += value
self._load_Singularity[1] += value*SingularityFunction(x, start, order)
else:
if not order == -1:
self._load_vector[2] += value
self._load_Singularity[2] += value*SingularityFunction(x, start, order)
def apply_moment_load(self, value, start, order, dir="y"):
"""
This method adds up the moment loads to a particular beam object.
Parameters
==========
value : Sympifyable
The magnitude of an applied moment.
dir : String
Axis along which moment is applied.
order : Integer
The order of the applied load.
- For point moments, order=-2
- For constant distributed moment, order=-1
- For ramp moments, order=0
- For parabolic ramp moments, order=1
- ... so on.
"""
x = self.variable
value = sympify(value)
start = sympify(start)
order = sympify(order)
if dir == "x":
if not order == -2:
self._moment_load_vector[0] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
elif dir == "y":
if not order == -2:
self._moment_load_vector[1] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
else:
if not order == -2:
self._moment_load_vector[2] += value
self._load_Singularity[0] += value*SingularityFunction(x, start, order)
def apply_support(self, loc, type="fixed"):
if type in ("pin", "roller"):
reaction_load = Symbol('R_'+str(loc))
self._reaction_loads[reaction_load] = reaction_load
self.bc_deflection.append((loc, [0, 0, 0]))
else:
reaction_load = Symbol('R_'+str(loc))
reaction_moment = Symbol('M_'+str(loc))
self._reaction_loads[reaction_load] = [reaction_load, reaction_moment]
self.bc_deflection.append((loc, [0, 0, 0]))
self.bc_slope.append((loc, [0, 0, 0]))
def solve_for_reaction_loads(self, *reaction):
"""
Solves for the reaction forces.
Examples
========
There is a beam of length 30 meters. It it supported by rollers at
of its end. A constant distributed load of magnitude 8 N is applied
from start till its end along y-axis. Another linear load having
slope equal to 9 is applied along z-axis.
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(30, E, G, I, A, x)
>>> b.apply_load(8, start=0, order=0, dir="y")
>>> b.apply_load(9*x, start=0, order=0, dir="z")
>>> b.bc_deflection = [(0, [0, 0, 0]), (30, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="y")
>>> b.apply_load(R2, start=30, order=-1, dir="y")
>>> b.apply_load(R3, start=0, order=-1, dir="z")
>>> b.apply_load(R4, start=30, order=-1, dir="z")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.reaction_loads
{R1: -120, R2: -120, R3: -1350, R4: -2700}
"""
x = self.variable
l = self.length
q = self._load_Singularity
shear_curves = [integrate(load, x) for load in q]
moment_curves = [integrate(shear, x) for shear in shear_curves]
for i in range(3):
react = [r for r in reaction if (shear_curves[i].has(r) or moment_curves[i].has(r))]
if len(react) == 0:
continue
shear_curve = limit(shear_curves[i], x, l)
moment_curve = limit(moment_curves[i], x, l)
sol = list((linsolve([shear_curve, moment_curve], react).args)[0])
sol_dict = dict(zip(react, sol))
reaction_loads = self._reaction_loads
# Check if any of the evaluated rection exists in another direction
# and if it exists then it should have same value.
for key in sol_dict:
if key in reaction_loads and sol_dict[key] != reaction_loads[key]:
raise ValueError("Ambiguous solution for %s in different directions." % key)
self._reaction_loads.update(sol_dict)
def shear_force(self):
"""
Returns a list of three expressions which represents the shear force
curve of the Beam object along all three axes.
"""
x = self.variable
q = self._load_vector
return [integrate(-q[0], x), integrate(-q[1], x), integrate(-q[2], x)]
def axial_force(self):
"""
Returns expression of Axial shear force present inside the Beam object.
"""
return self.shear_force()[0]
def shear_stress(self):
"""
Returns a list of three expressions which represents the shear stress
curve of the Beam object along all three axes.
"""
return [self.shear_force()[0]/self._area, self.shear_force()[1]/self._area, self.shear_force()[2]/self._area]
def axial_stress(self):
"""
Returns expression of Axial stress present inside the Beam object.
"""
return self.axial_force()/self._area
def bending_moment(self):
"""
Returns a list of three expressions which represents the bending moment
curve of the Beam object along all three axes.
"""
x = self.variable
m = self._moment_load_vector
shear = self.shear_force()
return [integrate(-m[0], x), integrate(-m[1] + shear[2], x),
integrate(-m[2] - shear[1], x) ]
def torsional_moment(self):
"""
Returns expression of Torsional moment present inside the Beam object.
"""
return self.bending_moment()[0]
def solve_slope_deflection(self):
from sympy.core.function import (Derivative, Function)
from sympy.core.relational import Eq
from sympy.solvers.ode.ode import dsolve
x = self.variable
l = self.length
E = self.elastic_modulus
G = self.shear_modulus
I = self.second_moment
if isinstance(I, list):
I_y, I_z = I[0], I[1]
else:
I_y = I_z = I
A = self._area
load = self._load_vector
moment = self._moment_load_vector
defl = Function('defl')
theta = Function('theta')
# Finding deflection along x-axis(and corresponding slope value by differentiating it)
# Equation used: Derivative(E*A*Derivative(def_x(x), x), x) + load_x = 0
eq = Derivative(E*A*Derivative(defl(x), x), x) + load[0]
def_x = dsolve(Eq(eq, 0), defl(x)).args[1]
# Solving constants originated from dsolve
C1 = Symbol('C1')
C2 = Symbol('C2')
constants = list((linsolve([def_x.subs(x, 0), def_x.subs(x, l)], C1, C2).args)[0])
def_x = def_x.subs({C1:constants[0], C2:constants[1]})
slope_x = def_x.diff(x)
self._deflection[0] = def_x
self._slope[0] = slope_x
# Finding deflection along y-axis and slope across z-axis. System of equation involved:
# 1: Derivative(E*I_z*Derivative(theta_z(x), x), x) + G*A*(Derivative(defl_y(x), x) - theta_z(x)) + moment_z = 0
# 2: Derivative(G*A*(Derivative(defl_y(x), x) - theta_z(x)), x) + load_y = 0
C_i = Symbol('C_i')
# Substitute value of `G*A*(Derivative(defl_y(x), x) - theta_z(x))` from (2) in (1)
eq1 = Derivative(E*I_z*Derivative(theta(x), x), x) + (integrate(-load[1], x) + C_i) + moment[2]
slope_z = dsolve(Eq(eq1, 0)).args[1]
# Solve for constants originated from using dsolve on eq1
constants = list((linsolve([slope_z.subs(x, 0), slope_z.subs(x, l)], C1, C2).args)[0])
slope_z = slope_z.subs({C1:constants[0], C2:constants[1]})
# Put value of slope obtained back in (2) to solve for `C_i` and find deflection across y-axis
eq2 = G*A*(Derivative(defl(x), x)) + load[1]*x - C_i - G*A*slope_z
def_y = dsolve(Eq(eq2, 0), defl(x)).args[1]
# Solve for constants originated from using dsolve on eq2
constants = list((linsolve([def_y.subs(x, 0), def_y.subs(x, l)], C1, C_i).args)[0])
self._deflection[1] = def_y.subs({C1:constants[0], C_i:constants[1]})
self._slope[2] = slope_z.subs(C_i, constants[1])
# Finding deflection along z-axis and slope across y-axis. System of equation involved:
# 1: Derivative(E*I_y*Derivative(theta_y(x), x), x) - G*A*(Derivative(defl_z(x), x) + theta_y(x)) + moment_y = 0
# 2: Derivative(G*A*(Derivative(defl_z(x), x) + theta_y(x)), x) + load_z = 0
# Substitute value of `G*A*(Derivative(defl_y(x), x) + theta_z(x))` from (2) in (1)
eq1 = Derivative(E*I_y*Derivative(theta(x), x), x) + (integrate(load[2], x) - C_i) + moment[1]
slope_y = dsolve(Eq(eq1, 0)).args[1]
# Solve for constants originated from using dsolve on eq1
constants = list((linsolve([slope_y.subs(x, 0), slope_y.subs(x, l)], C1, C2).args)[0])
slope_y = slope_y.subs({C1:constants[0], C2:constants[1]})
# Put value of slope obtained back in (2) to solve for `C_i` and find deflection across z-axis
eq2 = G*A*(Derivative(defl(x), x)) + load[2]*x - C_i + G*A*slope_y
def_z = dsolve(Eq(eq2,0)).args[1]
# Solve for constants originated from using dsolve on eq2
constants = list((linsolve([def_z.subs(x, 0), def_z.subs(x, l)], C1, C_i).args)[0])
self._deflection[2] = def_z.subs({C1:constants[0], C_i:constants[1]})
self._slope[1] = slope_y.subs(C_i, constants[1])
def slope(self):
"""
Returns a three element list representing slope of deflection curve
along all the three axes.
"""
return self._slope
def deflection(self):
"""
Returns a three element list representing deflection curve along all
the three axes.
"""
return self._deflection
def _plot_shear_force(self, dir, subs=None):
shear_force = self.shear_force()
if dir == 'x':
dir_num = 0
color = 'r'
elif dir == 'y':
dir_num = 1
color = 'g'
elif dir == 'z':
dir_num = 2
color = 'b'
if subs is None:
subs = {}
for sym in shear_force[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(shear_force[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Shear Force along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{V(%c)}$'%dir, line_color=color)
def plot_shear_force(self, dir="all", subs=None):
"""
Returns a plot for Shear force along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which shear force plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, A, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.plot_shear_force()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -6*x**2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: -15*x for x over (0.0, 20.0)
"""
dir = dir.lower()
# For shear force along x direction
if dir == "x":
Px = self._plot_shear_force('x', subs)
return Px.show()
# For shear force along y direction
elif dir == "y":
Py = self._plot_shear_force('y', subs)
return Py.show()
# For shear force along z direction
elif dir == "z":
Pz = self._plot_shear_force('z', subs)
return Pz.show()
# For shear force along all direction
else:
Px = self._plot_shear_force('x', subs)
Py = self._plot_shear_force('y', subs)
Pz = self._plot_shear_force('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _plot_bending_moment(self, dir, subs=None):
bending_moment = self.bending_moment()
if dir == 'x':
dir_num = 0
color = 'g'
elif dir == 'y':
dir_num = 1
color = 'c'
elif dir == 'z':
dir_num = 2
color = 'm'
if subs is None:
subs = {}
for sym in bending_moment[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(bending_moment[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Bending Moment along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{M(%c)}$'%dir, line_color=color)
def plot_bending_moment(self, dir="all", subs=None):
"""
Returns a plot for bending moment along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which bending moment plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, A, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.plot_bending_moment()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -15*x**2/2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: 2*x**3 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For bending moment along x direction
if dir == "x":
Px = self._plot_bending_moment('x', subs)
return Px.show()
# For bending moment along y direction
elif dir == "y":
Py = self._plot_bending_moment('y', subs)
return Py.show()
# For bending moment along z direction
elif dir == "z":
Pz = self._plot_bending_moment('z', subs)
return Pz.show()
# For bending moment along all direction
else:
Px = self._plot_bending_moment('x', subs)
Py = self._plot_bending_moment('y', subs)
Pz = self._plot_bending_moment('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _plot_slope(self, dir, subs=None):
slope = self.slope()
if dir == 'x':
dir_num = 0
color = 'b'
elif dir == 'y':
dir_num = 1
color = 'm'
elif dir == 'z':
dir_num = 2
color = 'g'
if subs is None:
subs = {}
for sym in slope[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(slope[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Slope along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{\theta(%c)}$'%dir, line_color=color)
def plot_slope(self, dir="all", subs=None):
"""
Returns a plot for Slope along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which Slope plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as keys and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.plot_slope()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -x**3/1600 + 3*x**2/160 - x/8 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: x**4/8000 - 19*x**2/172 + 52*x/43 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For Slope along x direction
if dir == "x":
Px = self._plot_slope('x', subs)
return Px.show()
# For Slope along y direction
elif dir == "y":
Py = self._plot_slope('y', subs)
return Py.show()
# For Slope along z direction
elif dir == "z":
Pz = self._plot_slope('z', subs)
return Pz.show()
# For Slope along all direction
else:
Px = self._plot_slope('x', subs)
Py = self._plot_slope('y', subs)
Pz = self._plot_slope('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _plot_deflection(self, dir, subs=None):
deflection = self.deflection()
if dir == 'x':
dir_num = 0
color = 'm'
elif dir == 'y':
dir_num = 1
color = 'r'
elif dir == 'z':
dir_num = 2
color = 'c'
if subs is None:
subs = {}
for sym in deflection[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(deflection[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Deflection along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\mathrm{\delta(%c)}$'%dir, line_color=color)
def plot_deflection(self, dir="all", subs=None):
"""
Returns a plot for Deflection along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which deflection plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as keys and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.plot_deflection()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: x**5/40000 - 4013*x**3/90300 + 26*x**2/43 + 1520*x/903 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: x**4/6400 - x**3/160 + 27*x**2/560 + 2*x/7 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For deflection along x direction
if dir == "x":
Px = self._plot_deflection('x', subs)
return Px.show()
# For deflection along y direction
elif dir == "y":
Py = self._plot_deflection('y', subs)
return Py.show()
# For deflection along z direction
elif dir == "z":
Pz = self._plot_deflection('z', subs)
return Pz.show()
# For deflection along all direction
else:
Px = self._plot_deflection('x', subs)
Py = self._plot_deflection('y', subs)
Pz = self._plot_deflection('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def plot_loading_results(self, dir='x', subs=None):
"""
Returns a subplot of Shear Force, Bending Moment,
Slope and Deflection of the Beam object along the direction specified.
Parameters
==========
dir : string (default : "x")
Direction along which plots are required.
If no direction is specified, plots along x-axis are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, A, x)
>>> subs = {E:40, G:21, I:100, A:25}
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.plot_loading_results('y',subs)
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: -6*x**2 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -15*x**2/2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: -x**3/1600 + 3*x**2/160 - x/8 for x over (0.0, 20.0)
Plot[3]:Plot object containing:
[0]: cartesian line: x**5/40000 - 4013*x**3/90300 + 26*x**2/43 + 1520*x/903 for x over (0.0, 20.0)
"""
dir = dir.lower()
if subs is None:
subs = {}
ax1 = self._plot_shear_force(dir, subs)
ax2 = self._plot_bending_moment(dir, subs)
ax3 = self._plot_slope(dir, subs)
ax4 = self._plot_deflection(dir, subs)
return PlotGrid(4, 1, ax1, ax2, ax3, ax4)
def _plot_shear_stress(self, dir, subs=None):
shear_stress = self.shear_stress()
if dir == 'x':
dir_num = 0
color = 'r'
elif dir == 'y':
dir_num = 1
color = 'g'
elif dir == 'z':
dir_num = 2
color = 'b'
if subs is None:
subs = {}
for sym in shear_stress[dir_num].atoms(Symbol):
if sym != self.variable and sym not in subs:
raise ValueError('Value of %s was not passed.' %sym)
if self.length in subs:
length = subs[self.length]
else:
length = self.length
return plot(shear_stress[dir_num].subs(subs), (self.variable, 0, length), show = False, title='Shear stress along %c direction'%dir,
xlabel=r'$\mathrm{X}$', ylabel=r'$\tau(%c)$'%dir, line_color=color)
def plot_shear_stress(self, dir="all", subs=None):
"""
Returns a plot for Shear Stress along all three directions
present in the Beam object.
Parameters
==========
dir : string (default : "all")
Direction along which shear stress plot is required.
If no direction is specified, all plots are displayed.
subs : dictionary
Python dictionary containing Symbols as key and their
corresponding values.
Examples
========
There is a beam of length 20 meters and area of cross section 2 square
meters. It it supported by rollers at of its end. A linear load having
slope equal to 12 is applied along y-axis. A constant distributed load
of magnitude 15 N is applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, E, G, I, 2, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.plot_shear_stress()
PlotGrid object containing:
Plot[0]:Plot object containing:
[0]: cartesian line: 0 for x over (0.0, 20.0)
Plot[1]:Plot object containing:
[0]: cartesian line: -3*x**2 for x over (0.0, 20.0)
Plot[2]:Plot object containing:
[0]: cartesian line: -15*x/2 for x over (0.0, 20.0)
"""
dir = dir.lower()
# For shear stress along x direction
if dir == "x":
Px = self._plot_shear_stress('x', subs)
return Px.show()
# For shear stress along y direction
elif dir == "y":
Py = self._plot_shear_stress('y', subs)
return Py.show()
# For shear stress along z direction
elif dir == "z":
Pz = self._plot_shear_stress('z', subs)
return Pz.show()
# For shear stress along all direction
else:
Px = self._plot_shear_stress('x', subs)
Py = self._plot_shear_stress('y', subs)
Pz = self._plot_shear_stress('z', subs)
return PlotGrid(3, 1, Px, Py, Pz)
def _max_shear_force(self, dir):
"""
Helper function for max_shear_force().
"""
dir = dir.lower()
if dir == 'x':
dir_num = 0
elif dir == 'y':
dir_num = 1
elif dir == 'z':
dir_num = 2
from sympy.solvers.solvers import solve
if not self.shear_force()[dir_num]:
return (0,0)
# To restrict the range within length of the Beam
load_curve = Piecewise((float("nan"), self.variable<=0),
(self._load_vector[dir_num], self.variable<self.length),
(float("nan"), True))
points = solve(load_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
points.append(0)
points.append(self.length)
shear_curve = self.shear_force()[dir_num]
shear_values = [shear_curve.subs(self.variable, x) for x in points]
shear_values = list(map(abs, shear_values))
max_shear = max(shear_values)
return (points[shear_values.index(max_shear)], max_shear)
def max_shear_force(self):
"""
Returns point of max shear force and its corresponding shear value
along all directions in a Beam object as a list.
solve_for_reaction_loads() must be called before using this function.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.max_shear_force()
[(0, 0), (20, 2400), (20, 300)]
"""
max_shear = []
max_shear.append(self._max_shear_force('x'))
max_shear.append(self._max_shear_force('y'))
max_shear.append(self._max_shear_force('z'))
return max_shear
def _max_bending_moment(self, dir):
"""
Helper function for max_bending_moment().
"""
dir = dir.lower()
if dir == 'x':
dir_num = 0
elif dir == 'y':
dir_num = 1
elif dir == 'z':
dir_num = 2
from sympy.solvers.solvers import solve
if not self.bending_moment()[dir_num]:
return (0,0)
# To restrict the range within length of the Beam
shear_curve = Piecewise((float("nan"), self.variable<=0),
(self.shear_force()[dir_num], self.variable<self.length),
(float("nan"), True))
points = solve(shear_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
points.append(0)
points.append(self.length)
bending_moment_curve = self.bending_moment()[dir_num]
bending_moments = [bending_moment_curve.subs(self.variable, x) for x in points]
bending_moments = list(map(abs, bending_moments))
max_bending_moment = max(bending_moments)
return (points[bending_moments.index(max_bending_moment)], max_bending_moment)
def max_bending_moment(self):
"""
Returns point of max bending moment and its corresponding bending moment value
along all directions in a Beam object as a list.
solve_for_reaction_loads() must be called before using this function.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.max_bending_moment()
[(0, 0), (20, 3000), (20, 16000)]
"""
max_bmoment = []
max_bmoment.append(self._max_bending_moment('x'))
max_bmoment.append(self._max_bending_moment('y'))
max_bmoment.append(self._max_bending_moment('z'))
return max_bmoment
max_bmoment = max_bending_moment
def _max_deflection(self, dir):
"""
Helper function for max_Deflection()
"""
dir = dir.lower()
if dir == 'x':
dir_num = 0
elif dir == 'y':
dir_num = 1
elif dir == 'z':
dir_num = 2
from sympy.solvers.solvers import solve
if not self.deflection()[dir_num]:
return (0,0)
# To restrict the range within length of the Beam
slope_curve = Piecewise((float("nan"), self.variable<=0),
(self.slope()[dir_num], self.variable<self.length),
(float("nan"), True))
points = solve(slope_curve.rewrite(Piecewise), self.variable,
domain=S.Reals)
points.append(0)
points.append(self._length)
deflection_curve = self.deflection()[dir_num]
deflections = [deflection_curve.subs(self.variable, x) for x in points]
deflections = list(map(abs, deflections))
max_def = max(deflections)
return (points[deflections.index(max_def)], max_def)
def max_deflection(self):
"""
Returns point of max deflection and its corresponding deflection value
along all directions in a Beam object as a list.
solve_for_reaction_loads() and solve_slope_deflection() must be called
before using this function.
Examples
========
There is a beam of length 20 meters. It it supported by rollers
at of its end. A linear load having slope equal to 12 is applied
along y-axis. A constant distributed load of magnitude 15 N is
applied from start till its end along z-axis.
.. plot::
:context: close-figs
:format: doctest
:include-source: True
>>> from sympy.physics.continuum_mechanics.beam import Beam3D
>>> from sympy import symbols
>>> l, E, G, I, A, x = symbols('l, E, G, I, A, x')
>>> b = Beam3D(20, 40, 21, 100, 25, x)
>>> b.apply_load(15, start=0, order=0, dir="z")
>>> b.apply_load(12*x, start=0, order=0, dir="y")
>>> b.bc_deflection = [(0, [0, 0, 0]), (20, [0, 0, 0])]
>>> R1, R2, R3, R4 = symbols('R1, R2, R3, R4')
>>> b.apply_load(R1, start=0, order=-1, dir="z")
>>> b.apply_load(R2, start=20, order=-1, dir="z")
>>> b.apply_load(R3, start=0, order=-1, dir="y")
>>> b.apply_load(R4, start=20, order=-1, dir="y")
>>> b.solve_for_reaction_loads(R1, R2, R3, R4)
>>> b.solve_slope_deflection()
>>> b.max_deflection()
[(0, 0), (10, 495/14), (-10 + 10*sqrt(10793)/43, (10 - 10*sqrt(10793)/43)**3/160 - 20/7 + (10 - 10*sqrt(10793)/43)**4/6400 + 20*sqrt(10793)/301 + 27*(10 - 10*sqrt(10793)/43)**2/560)]
"""
max_def = []
max_def.append(self._max_deflection('x'))
max_def.append(self._max_deflection('y'))
max_def.append(self._max_deflection('z'))
return max_def
| 40.661074
| 206
| 0.560542
|
a539ff701301d789acf314da41d57c26fb8a7ee2
| 2,364
|
py
|
Python
|
sandy-disaster-recovery/old_file_delete_handler.py
|
toddjcrane/crisiscleanup-legacy
|
74dbad143ebc3bfae4cc5afc478e43ab4033ff69
|
[
"Apache-2.0"
] | 1
|
2017-01-07T21:44:21.000Z
|
2017-01-07T21:44:21.000Z
|
sandy-disaster-recovery/old_file_delete_handler.py
|
aarontitus/crisiscleanup-legacy
|
74dbad143ebc3bfae4cc5afc478e43ab4033ff69
|
[
"Apache-2.0"
] | 1
|
2021-03-26T00:25:19.000Z
|
2021-03-26T00:25:19.000Z
|
sandy-disaster-recovery/old_file_delete_handler.py
|
toddjcrane/crisiscleanup-legacy
|
74dbad143ebc3bfae4cc5afc478e43ab4033ff69
|
[
"Apache-2.0"
] | 1
|
2017-09-07T09:52:15.000Z
|
2017-09-07T09:52:15.000Z
|
#!/usr/bin/env python
#
# Copyright 2013 Chris Wood
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
from google.appengine.ext import blobstore
from google.appengine.api import app_identity
import cloudstorage
from cron_utils import AbstractCronHandler
# constants
APP_ID = app_identity.get_application_id()
BUCKET_NAME = '/' + APP_ID
MAX_FILE_LIFESPAN_DAYS = 14
# handler
class OldFileDeleteHandler(AbstractCronHandler):
def get(self):
# delete blobs
query = blobstore.BlobInfo.all()
blobs = query.fetch(10000)
blob_deletion_count = 0
for blob in blobs:
age = datetime.datetime.utcnow() - blob.creation
# delete CSV & HTML blobs
should_delete = (
blob.filename is not None
and (blob.filename.endswith('.csv') or blob.filename.endswith('.html'))
and age.days > MAX_FILE_LIFESPAN_DAYS
)
if should_delete:
blob.delete()
blob_deletion_count += 1
# delete from GCS
gcs_deletion_count = 0
for file_stat in cloudstorage.listbucket(BUCKET_NAME):
age = datetime.datetime.utcnow() - \
datetime.datetime.utcfromtimestamp(file_stat.st_ctime)
should_delete = (
('.csv' in file_stat.filename or '.html' in file_stat.filename)
and age.days > MAX_FILE_LIFESPAN_DAYS
)
if should_delete:
cloudstorage.delete(file_stat.filename)
gcs_deletion_count += 1
# write outcome
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Deleted %d blobs from blobstore.\n' % blob_deletion_count)
self.response.out.write('Deleted %d files from GCS.\n' % gcs_deletion_count)
| 31.52
| 91
| 0.653976
|
36c55f6ecf57f6fb1c1a06698edfff54117c57d9
| 3,246
|
py
|
Python
|
tests/ut/python/dataset/test_random_vertical_flip.py
|
ythlml/mindspore
|
028ae212624164044cfaa84f347fc502cb7fcb0f
|
[
"Apache-2.0"
] | 7
|
2020-05-24T03:19:26.000Z
|
2020-05-24T03:20:00.000Z
|
tests/ut/python/dataset/test_random_vertical_flip.py
|
ythlml/mindspore
|
028ae212624164044cfaa84f347fc502cb7fcb0f
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/python/dataset/test_random_vertical_flip.py
|
ythlml/mindspore
|
028ae212624164044cfaa84f347fc502cb7fcb0f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
Testing the random vertical flip op in DE
"""
import matplotlib.pyplot as plt
import numpy as np
import mindspore.dataset as ds
import mindspore.dataset.transforms.vision.c_transforms as vision
from mindspore import log as logger
DATA_DIR = ["../data/dataset/test_tf_file_3_images/train-0000-of-0001.data"]
SCHEMA_DIR = "../data/dataset/test_tf_file_3_images/datasetSchema.json"
def v_flip(image):
"""
Apply the random_vertical
"""
# with the seed provided in this test case, it will always flip.
# that's why we flip here too
image = image[::-1, :, :]
return image
def visualize(image_de_random_vertical, image_pil_random_vertical, mse, image_original):
"""
visualizes the image using DE op and Numpy op
"""
plt.subplot(141)
plt.imshow(image_original)
plt.title("Original image")
plt.subplot(142)
plt.imshow(image_de_random_vertical)
plt.title("DE random_vertical image")
plt.subplot(143)
plt.imshow(image_pil_random_vertical)
plt.title("vertically flipped image")
plt.subplot(144)
plt.imshow(image_de_random_vertical - image_pil_random_vertical)
plt.title("Difference image, mse : {}".format(mse))
plt.show()
def test_random_vertical_op():
"""
Test random_vertical
"""
logger.info("Test random_vertical")
# First dataset
data1 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
decode_op = vision.Decode()
random_vertical_op = vision.RandomVerticalFlip()
data1 = data1.map(input_columns=["image"], operations=decode_op)
data1 = data1.map(input_columns=["image"], operations=random_vertical_op)
# Second dataset
data2 = ds.TFRecordDataset(DATA_DIR, SCHEMA_DIR, columns_list=["image"], shuffle=False)
data2 = data2.map(input_columns=["image"], operations=decode_op)
num_iter = 0
for item1, item2 in zip(data1.create_dict_iterator(), data2.create_dict_iterator()):
# with the seed value, we can only guarantee the first number generated
if num_iter > 0:
break
image_v_flipped = item1["image"]
image = item2["image"]
image_v_flipped_2 = v_flip(image)
diff = image_v_flipped - image_v_flipped_2
mse = np.sum(np.power(diff, 2))
logger.info("image_{}, mse: {}".format(num_iter + 1, mse))
# Uncomment below line if you want to visualize images
# visualize(image_v_flipped, image_v_flipped_2, mse, image)
num_iter += 1
if __name__ == "__main__":
test_random_vertical_op()
| 32.138614
| 91
| 0.69008
|
af9fafb506b56702f39203c961ce1da1db92b7e7
| 2,571
|
py
|
Python
|
web/middleware.py
|
baronpan/SysmonHunter
|
0ec6e28c07386c9e8f470bf7dedce097a899dde5
|
[
"MIT"
] | 187
|
2019-07-30T08:11:02.000Z
|
2022-03-28T06:19:03.000Z
|
web/middleware.py
|
l3ngd0n/SysmonHunter
|
0ec6e28c07386c9e8f470bf7dedce097a899dde5
|
[
"MIT"
] | 2
|
2020-05-15T07:10:48.000Z
|
2021-06-05T13:55:56.000Z
|
web/middleware.py
|
l3ngd0n/SysmonHunter
|
0ec6e28c07386c9e8f470bf7dedce097a899dde5
|
[
"MIT"
] | 61
|
2019-08-08T09:39:25.000Z
|
2021-12-06T05:59:43.000Z
|
import pandas
import server
from data.sysmon import SysmonData
from core import rule
from core.behavior import *
from db import esapi
import analyst.statistic as ast
from core.attck import *
def daterange_format(d):
start = d.split(' - ')[0]
end = d.split(' - ')[1]
start_date = '{}-{}-{}'.format(start.split('/')[2], start.split('/')[0], start.split('/')[1])
end_date = '{}-{}-{}'.format(end.split('/')[2], end.split('/')[0], end.split('/')[1])
return (start_date, end_date)
def get_event(event_type, behav_type, timerange, callfunc, **kwargs):
_tr = daterange_format(timerange)
behavs = []
if behav_type == 'All':
for behav_cls in BEHAVIOR_SETS:
behavs.extend(get_behaviors(event_type + behav_cls.__name__.lower(), _tr, callfunc, **kwargs))
else:
behavs = get_behaviors(event_type + behav_type.lower(), _tr, callfunc, **kwargs)
result = []
for behav in behavs:
result.append({
'epid': behav.endpoint['uuid'],
'timestamp': behav.date,
'behavior': behav.getname(),
'attck': '{}\t{}'.format(behav.attck_ids, get_attcks_name(behav.attck_ids, server.ATTCK_TECHS)),
'value': behav.get_value(),
})
return result
def get_behaviors(_index, timerange, callfunc, **kwargs):
_es = server.ES_INSTANCE
data = callfunc(_es, _index, timerange, **kwargs)
data.drop_duplicates(subset=['timestamp', 'value'])
behavs = []
for _, en in data.iterrows():
behavs.append(BaseBehavior.deserialize(en))
return behavs
def get_statistic_data(event_type, behav_type, timerange, st_func, **kwargs):
_es = server.ES_INSTANCE
data = esapi.esapi_behavior_by_range(_es, event_type + behav_type.lower(), daterange_format(timerange))
if data.shape[0] == 0:
return []
stdata = st_func(data, **kwargs)
return ast.st_output(stdata)
def get_st_details_data(event_type, behav_type, timerange, conds):
_es = server.ES_INSTANCE
data = esapi.esapi_propconds_by_range(_es, event_type + behav_type.lower(), daterange_format(timerange), conds)
if data.shape[0] == 0:
return []
result = []
for k, en in data.iterrows():
result.append({
'epid': en['endpoint.uuid'],
'timestamp': en['timestamp'],
'behavior': en['behaviortype'],
'attck': '{}\t{}'.format(en['attckids'], get_attcks_name(en['attckids'], server.ATTCK_TECHS)),
'value': en['value'],
})
return result
| 33.828947
| 115
| 0.622326
|
7b031b549854c8a94b7a644d628f093ecda7355a
| 2,093
|
py
|
Python
|
examples/ad_manager/v201805/placement_service/deactivate_placements.py
|
beamc83/python-googleads
|
6039d08e2d85850a46a70f24359d362ffde2f7ed
|
[
"Apache-2.0"
] | 2
|
2019-07-11T13:01:56.000Z
|
2019-07-11T13:01:58.000Z
|
examples/ad_manager/v201805/placement_service/deactivate_placements.py
|
SoungMo/googleads-python-lib
|
fe86335c416e0571328c0a481c4b0cff863c01d9
|
[
"Apache-2.0"
] | null | null | null |
examples/ad_manager/v201805/placement_service/deactivate_placements.py
|
SoungMo/googleads-python-lib
|
fe86335c416e0571328c0a481c4b0cff863c01d9
|
[
"Apache-2.0"
] | 1
|
2020-07-19T14:24:05.000Z
|
2020-07-19T14:24:05.000Z
|
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example deactivates all active placements.
To determine which placements exist, run get_all_placements.py.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
PLACEMENT_ID = 'INSERT_PLACEMENT_ID_HERE'
def main(client, placement_id):
# Initialize appropriate service.
placement_service = client.GetService('PlacementService', version='v201805')
# Create query.
statement = (ad_manager.StatementBuilder(version='v201805')
.Where('id = :placementId')
.WithBindVariable('placementId', long(placement_id))
.Limit(1))
# Get placements by statement.
placements = placement_service.getPlacementsByStatement(
statement.ToStatement())
for placement in placements:
print ('Placement with id "%s", name "%s", and status "%s" will be '
'deactivated.' % (placement['id'], placement['name'],
placement['status']))
# Perform action.
result = placement_service.performPlacementAction(
{'xsi_type': 'DeactivatePlacements'}, statement.ToStatement())
# Display results.
if result and int(result['numChanges']) > 0:
print 'Number of placements deactivated: %s' % result['numChanges']
else:
print 'No placements were deactivated.'
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, PLACEMENT_ID)
| 33.222222
| 78
| 0.716675
|
88a41edfbc4cb45f2256a7cc83e0f49b4721eb5f
| 6,278
|
py
|
Python
|
df_generator.py
|
NTerlemes/opportunity
|
6444b1779261ce594bab5d804fb3d0ab36affc13
|
[
"MIT"
] | 1
|
2021-05-21T15:17:54.000Z
|
2021-05-21T15:17:54.000Z
|
df_generator.py
|
NTerlemes/opportunity
|
6444b1779261ce594bab5d804fb3d0ab36affc13
|
[
"MIT"
] | null | null | null |
df_generator.py
|
NTerlemes/opportunity
|
6444b1779261ce594bab5d804fb3d0ab36affc13
|
[
"MIT"
] | null | null | null |
import os
import json
import re
import pandas as pd
import numpy as np
from pathlib import Path
from typing import List, Tuple
from tqdm import tqdm
import shutil
data_folder = os.path.join(os.path.dirname(os.path.dirname(__file__)), '../Data/OpportunityUCIDataset/dataset')
results_folder = os.path.join(os.path.dirname(os.path.dirname(__file__)), '../results/pickles')
def rename_dict(df):
column_file = Path('Data/OpportunityUCIDataset/dataset/column_names.txt').resolve()
column_pattern = "^Column: (\d+) ([A-Za-z]+) ([A-Za-z\d^_-]*) ([A-Za-z]*)"
with open(column_file, 'r') as f:
for line in f:
pattern_match = re.match(string=line, pattern=column_pattern)
if pattern_match:
print(line)
def metadata2columns(metadata: pd.DataFrame, what_we_want: dict) -> List[int]:
"""Given a description of the signal based on the 3 basic attributes
Signal, Location and Sensor in the form of a dictonary with
key the wanted attribute and value the wanted value of it,
it returns the column of this signal.
Arguments:
metadata {pd.DataFrame} -- It retains the required information associating columns(signals) to their attributes.
what_we_want {dict} -- Dictionary that specifies our query
Returns:
List[int] -- List of integers representing the columns(signals) that satisfy our query
"""
attributes = list(what_we_want.keys())
supported_attr = ['Signal', 'Location', 'Sensor']
filtered_attr = [x for x in attributes if x in supported_attr]
result = [True for i in range(metadata.shape[0])]
for x in filtered_attr:
temp_result = [False for i in range(metadata.shape[0])]
for value in what_we_want[x]:
t = metadata[x] == value
temp_result = temp_result | t
result = result & temp_result
return np.where(result == True)[0].tolist()
def label_handler(data_folder: str) -> pd.DataFrame:
column_pattern = "^(\d+)\s*-\s*([A-Za-z_]+)\s*-\s*([A-Za-z_ 1-9()]+)"
label_list = []
with open(os.path.join(data_folder, 'label_legend.txt'),'r') as f:
for line in f:
label_match = re.match(string = line.strip(), pattern = column_pattern)
if label_match:
label_list.append(pd.DataFrame(data = [[label_match.group(1), label_match.group(2), label_match.group(3)]],
columns = ['Code', 'Class', 'Label']))
return pd.concat(label_list)
def labels2index(labels, motion_class, motion_description):
"""
Gets 3 inputs.
A dataframe that includes the necessary data
and the motion_class and description in order to find the index that describes it.
Both motion_class and motion_description can be string or list of strings.
The logical relationship applied between the class and description is AND.
"""
if type(motion_class) is str:
m_class = []
m_class.append(motion_class)
elif type(motion_class) is list:
m_class = motion_class
if type(motion_description) is str:
m_description = []
m_description.append(motion_description)
elif type(motion_description) is list:
m_description = motion_description
result = labels[labels['Class'].isin(m_class) & (labels['Label'].isin(m_description))]
return result.to_dict('records')
def column_name_handler(data_folder: str) -> pd.DataFrame:
column_pattern = "^Column: (\d+) ([A-Za-z]+) ([A-Za-z\\d^_-]*) ([A-Za-z]*)"
metadata = pd.DataFrame(data= [['Time','Time', 'Time']],columns = ['Sensor','Location','Signal'])
with open(os.path.join(data_folder, 'column_names.txt'),'r') as f:
for line in f:
ptrn_match = re.match(string=line.strip(), pattern = column_pattern)
if ptrn_match:
temp_df = pd.DataFrame(data = [[ptrn_match.group(2), ptrn_match.group(3), ptrn_match.group(4)]],
columns = ['Sensor','Location','Signal'])
metadata = metadata.append(temp_df, ignore_index=True)
return metadata
def dat_reader(full_filepath: str) -> pd.DataFrame:
basename = os.path.basename(full_filepath)
filename_pattern = '^S(\d)-(ADL\d|Drill).*'
file_re = re.match(pattern=filename_pattern, string=basename)
df = pd.read_csv(full_filepath, sep=' ', header=None)
# Hack to search labels easier
df.rename({243:'Locomotion',
244:'HL_Activity',
245:'LL_Left_Arm',
246:'LL_Left_Arm_Object',
247:'LL_Right_Arm',
248:'LL_Right_Arm_Object',
249:'ML_Both_Arms'}, axis='columns', inplace=True)
df['file'] = basename
df['PID'] = file_re.group(1)
df['RunID'] = file_re.group(2)
return df
def df_generator() -> Tuple[pd.DataFrame, pd.DataFrame]:
data_files = list(filter(lambda x: x.endswith('.dat'), os.listdir(data_folder)))
df_list = []
metadata = column_name_handler(data_folder)
# Filename pattern for retrieving run_metadata
# Metadata:
# PID : Participant ID
# RunID : Which run and iteration this file represents
for file in tqdm(data_files):
temp_df = dat_reader(os.path.abspath(os.path.join(data_folder,file)))
df_list.append(temp_df)
full_df = pd.concat(df_list, sort=False)
return (full_df, metadata)
if __name__ == "__main__":
data_pickle_path = os.path.join(results_folder, 'data.pkl')
metadata_pickle_path = os.path.join(results_folder, 'metadata.pkl')
labels_pickle_path = os.path.join(results_folder, 'labels.pkl')
if not os.path.exists(results_folder):
os.makedirs(results_folder)
if not os.path.isfile(data_pickle_path) or not os.path.isfile(metadata_pickle_path):
shutil.rmtree(results_folder)
os.makedirs(results_folder)
df, metadata = df_generator()
labels = label_handler(data_folder)
labels.to_pickle(labels_pickle_path)
df.to_pickle(data_pickle_path)
metadata.to_pickle(metadata_pickle_path)
else:
print('Already existing pickles')
df = pd.read_pickle(data_pickle_path)
metadata = pd.read_pickle(metadata_pickle_path)
labels = pd.read_pickle(labels_pickle_path)
| 41.302632
| 123
| 0.660083
|
f971be029b8cea596ecacf841ffc35d863658bcf
| 1,881
|
py
|
Python
|
python/raydp/tests/conftest.py
|
KepingYan/raydp
|
7d9f0e88907addeb32dcd5af04c57c7b69da8025
|
[
"Apache-2.0"
] | null | null | null |
python/raydp/tests/conftest.py
|
KepingYan/raydp
|
7d9f0e88907addeb32dcd5af04c57c7b69da8025
|
[
"Apache-2.0"
] | null | null | null |
python/raydp/tests/conftest.py
|
KepingYan/raydp
|
7d9f0e88907addeb32dcd5af04c57c7b69da8025
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import pytest
from pyspark.sql import SparkSession
import ray
import raydp
def quiet_logger():
py4j_logger = logging.getLogger("py4j")
py4j_logger.setLevel(logging.WARNING)
koalas_logger = logging.getLogger("koalas")
koalas_logger.setLevel(logging.WARNING)
@pytest.fixture(scope="function")
def spark_session(request):
spark = SparkSession.builder.master("local[2]").appName("RayDP test").getOrCreate()
request.addfinalizer(lambda: spark.stop())
quiet_logger()
return spark
@pytest.fixture(scope="function", params=["localhost:6379", "ray://localhost:10001"])
def ray_cluster(request):
ray.shutdown()
ray.init(address=request.param)
request.addfinalizer(lambda: ray.shutdown())
@pytest.fixture(scope="function", params=["localhost:6379", "ray://localhost:10001"])
def spark_on_ray_small(request):
ray.shutdown()
ray.init(address=request.param)
spark = raydp.init_spark("test", 1, 1, "500 M")
def stop_all():
raydp.stop_spark()
ray.shutdown()
request.addfinalizer(stop_all)
return spark
| 31.35
| 87
| 0.738969
|
398a932361b092daca65c60d697b8f436d108009
| 7,405
|
py
|
Python
|
cachier/pickle_core.py
|
shaoyucheng/cachier
|
278e395cedc2a10616eed208682c296d01985074
|
[
"MIT"
] | null | null | null |
cachier/pickle_core.py
|
shaoyucheng/cachier
|
278e395cedc2a10616eed208682c296d01985074
|
[
"MIT"
] | null | null | null |
cachier/pickle_core.py
|
shaoyucheng/cachier
|
278e395cedc2a10616eed208682c296d01985074
|
[
"MIT"
] | null | null | null |
"""A pickle-based caching core for cachier."""
# This file is part of Cachier.
# https://github.com/shaypal5/cachier
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT-license
# Copyright (c) 2016, Shay Palachy <shaypal5@gmail.com>
import os
import pickle # for local caching
from datetime import datetime
import threading
import portalocker # to lock on pickle cache IO
from watchdog.observers import Observer
from watchdog.events import PatternMatchingEventHandler
# Altenative: https://github.com/WoLpH/portalocker
from .base_core import _BaseCore
DEF_CACHIER_DIR = '~/.cachier/'
class _PickleCore(_BaseCore):
"""The pickle core class for cachier.
Parameters
----------
stale_after : datetime.timedelta, optional
See _BaseCore documentation.
next_time : bool, optional
See _BaseCore documentation.
pickle_reload : bool, optional
See core.cachier() documentation.
cache_dir : str, optional.
See core.cachier() documentation.
"""
class CacheChangeHandler(PatternMatchingEventHandler):
"""Handles cache-file modification events."""
def __init__(self, filename, core, key):
PatternMatchingEventHandler.__init__(
self,
patterns=["*" + filename],
ignore_patterns=None,
ignore_directories=True,
case_sensitive=False,
)
self.core = core
self.key = key
self.observer = None
self.value = None
def inject_observer(self, observer):
"""Inject the observer running this handler."""
self.observer = observer
def _check_calculation(self):
# print('checking calc')
entry = self.core.get_entry_by_key(self.key, True)[1]
# print(self.key)
# print(entry)
try:
if not entry['being_calculated']:
# print('stoping observer!')
self.value = entry['value']
self.observer.stop()
# else:
# print('NOT stoping observer... :(')
except TypeError:
self.value = None
self.observer.stop()
def on_created(self, event): # skipcq: PYL-W0613
self._check_calculation() # pragma: no cover
def on_modified(self, event): # skipcq: PYL-W0613
self._check_calculation()
def __init__(self, stale_after, next_time, reload, cache_dir, module_name=None):
_BaseCore.__init__(self, stale_after, next_time)
self.cache = None
self.reload = reload
self.cache_dir = DEF_CACHIER_DIR
if cache_dir is not None:
self.cache_dir = cache_dir
self.expended_cache_dir = os.path.expanduser(self.cache_dir)
self.lock = threading.RLock()
self.cache_fname = None
self.cache_fpath = None
# used for construction of cache file name
self.module_name = module_name
def _cache_fname(self):
module_tag = self.func.__module__
if self.module_name is not None:
module_tag = self.module_name
self.cache_fname = '.{}.{}'.format(
module_tag, self.func.__name__
)
return self.cache_fname
def _cache_fpath(self):
# print(EXPANDED_CACHIER_DIR)
if not os.path.exists(self.expended_cache_dir):
os.makedirs(self.expended_cache_dir)
self.cache_fpath = os.path.abspath(
os.path.join(
os.path.realpath(self.expended_cache_dir),
self._cache_fname(),
)
)
return self.cache_fpath
def _reload_cache(self):
with self.lock:
fpath = self._cache_fpath()
# print(fpath)
try:
with portalocker.Lock(fpath, mode='rb') as cache_file:
try:
self.cache = pickle.load(cache_file)
except EOFError:
self.cache = {}
except FileNotFoundError:
self.cache = {}
def _get_cache(self):
with self.lock:
if not self.cache:
self._reload_cache()
return self.cache
def _save_cache(self, cache):
with self.lock:
self.cache = cache
fpath = self._cache_fpath()
with portalocker.Lock(fpath, mode='wb') as cache_file:
pickle.dump(cache, cache_file, protocol=4)
self._reload_cache()
def get_entry_by_key(self, key, reload=False): # pylint: disable=W0221
with self.lock:
# print('{}, {}'.format(self.reload, reload))
if self.reload or reload:
self._reload_cache()
return key, self._get_cache().get(key, None)
def get_entry(self, args, kwds, hash_params):
key = args + tuple(sorted(kwds.items())) if hash_params is None else hash_params(args, kwds)
# print('key type={}, key={}'.format(type(key), key))
return self.get_entry_by_key(key)
def set_entry(self, key, func_res):
with self.lock:
cache = self._get_cache()
cache[key] = {
'value': func_res,
'time': datetime.now(),
'stale': False,
'being_calculated': False,
}
self._save_cache(cache)
def mark_entry_being_calculated(self, key):
with self.lock:
cache = self._get_cache()
try:
cache[key]['being_calculated'] = True
except KeyError:
cache[key] = {
'value': None,
'time': datetime.now(),
'stale': False,
'being_calculated': True,
}
self._save_cache(cache)
def mark_entry_not_calculated(self, key):
with self.lock:
cache = self._get_cache()
try:
cache[key]['being_calculated'] = False
self._save_cache(cache)
except KeyError:
pass # that's ok, we don't need an entry in that case
def wait_on_entry_calc(self, key):
with self.lock:
self._reload_cache()
entry = self._get_cache()[key]
if not entry['being_calculated']:
return entry['value']
event_handler = _PickleCore.CacheChangeHandler(
filename=self._cache_fname(), core=self, key=key
)
observer = Observer()
event_handler.inject_observer(observer)
observer.schedule(
event_handler, path=self.expended_cache_dir, recursive=True
)
observer.start()
observer.join(timeout=1.0)
if observer.is_alive():
# print('Timedout waiting. Starting again...')
return self.wait_on_entry_calc(key)
# print("Returned value: {}".format(event_handler.value))
return event_handler.value
def clear_cache(self):
self._save_cache({})
def clear_being_calculated(self):
with self.lock:
cache = self._get_cache()
for key in cache:
cache[key]['being_calculated'] = False
self._save_cache(cache)
| 33.206278
| 100
| 0.567319
|
b14ef1df8ff4c660b9b6f2abfd5df6572d10b1e8
| 12,018
|
py
|
Python
|
tensorflow/contrib/eager/python/examples/resnet50/resnet50_test.py
|
tianyapiaozi/tensorflow
|
fb3ce0467766a8e91f1da0ad7ada7c24fde7a73a
|
[
"Apache-2.0"
] | 71
|
2017-05-25T16:02:15.000Z
|
2021-06-09T16:08:08.000Z
|
tensorflow/contrib/eager/python/examples/resnet50/resnet50_test.py
|
shrikunjsarda/tensorflow
|
7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae
|
[
"Apache-2.0"
] | 133
|
2017-04-26T16:49:49.000Z
|
2019-10-15T11:39:26.000Z
|
tensorflow/contrib/eager/python/examples/resnet50/resnet50_test.py
|
shrikunjsarda/tensorflow
|
7e8927e7af0c51ac20a63bd4eab6ff83df1a39ae
|
[
"Apache-2.0"
] | 31
|
2018-09-11T02:17:17.000Z
|
2021-12-15T10:33:35.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests and benchmarks for the ResNet50 model, executed eagerly."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gc
import tempfile
import time
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import tensorflow.contrib.eager as tfe
from tensorflow.contrib.eager.python.examples.resnet50 import resnet50
from tensorflow.contrib.summary import summary_test_util
from tensorflow.python.client import device_lib
def device_and_data_format():
return ('/gpu:0', 'channels_first') if tfe.num_gpus() else ('/cpu:0',
'channels_last')
def random_batch(batch_size, data_format):
shape = (3, 224, 224) if data_format == 'channels_first' else (224, 224, 3)
shape = (batch_size,) + shape
num_classes = 1000
images = tf.random_uniform(shape)
labels = tf.random_uniform(
[batch_size], minval=0, maxval=num_classes, dtype=tf.int32)
one_hot = tf.one_hot(labels, num_classes)
return images, one_hot
def compute_gradients(model, images, labels):
with tf.GradientTape() as tape:
logits = model(images, training=True)
loss = tf.losses.softmax_cross_entropy(
logits=logits, onehot_labels=labels)
tf.contrib.summary.scalar(name='loss', tensor=loss)
return tape.gradient(loss, model.variables)
def apply_gradients(model, optimizer, gradients):
optimizer.apply_gradients(zip(gradients, model.variables))
class ResNet50Test(tf.test.TestCase):
def _apply(self, defun=False, execution_mode=None):
device, data_format = device_and_data_format()
model = resnet50.ResNet50(data_format)
if defun:
model.call = tfe.defun(model.call)
with tf.device(device), tfe.execution_mode(execution_mode):
images, _ = random_batch(2, data_format)
output = model(images, training=False)
tfe.async_wait()
self.assertEqual((2, 1000), output.shape)
def test_apply(self):
self._apply(defun=False)
def test_apply_async(self):
self._apply(defun=False, execution_mode=tfe.ASYNC)
def test_apply_with_defun(self):
self._apply(defun=True)
def test_apply_with_defun_async(self):
self._apply(defun=True, execution_mode=tfe.ASYNC)
def test_apply_no_top(self):
device, data_format = device_and_data_format()
model = resnet50.ResNet50(data_format, include_top=False)
with tf.device(device):
images, _ = random_batch(2, data_format)
output = model(images, training=False)
output_shape = ((2, 2048, 1, 1)
if data_format == 'channels_first' else (2, 1, 1, 2048))
self.assertEqual(output_shape, output.shape)
def test_apply_with_pooling(self):
device, data_format = device_and_data_format()
model = resnet50.ResNet50(data_format, include_top=False, pooling='avg')
with tf.device(device):
images, _ = random_batch(2, data_format)
output = model(images, training=False)
self.assertEqual((2, 2048), output.shape)
def _test_train(self, execution_mode=None):
device, data_format = device_and_data_format()
model = resnet50.ResNet50(data_format)
tf.train.get_or_create_global_step()
logdir = tempfile.mkdtemp()
with tf.contrib.summary.create_file_writer(
logdir, max_queue=0,
name='t0').as_default(), tf.contrib.summary.always_record_summaries():
with tf.device(device), tfe.execution_mode(execution_mode):
optimizer = tf.train.GradientDescentOptimizer(0.1)
images, labels = random_batch(2, data_format)
apply_gradients(model, optimizer,
compute_gradients(model, images, labels))
self.assertEqual(320, len(model.variables))
tfe.async_wait()
events = summary_test_util.events_from_logdir(logdir)
self.assertEqual(len(events), 2)
self.assertEqual(events[1].summary.value[0].tag, 'loss')
def test_train(self):
self._test_train()
def test_train_async(self):
self._test_train(execution_mode=tfe.ASYNC)
def test_no_garbage(self):
device, data_format = device_and_data_format()
model = resnet50.ResNet50(data_format)
optimizer = tf.train.GradientDescentOptimizer(0.1)
with tf.device(device):
images, labels = random_batch(2, data_format)
gc.disable()
# Warm up. Note that this first run does create significant amounts of
# garbage to be collected. The hope is that this is a build-only effect,
# and a subsequent training loop will create nothing which needs to be
# collected.
apply_gradients(model, optimizer,
compute_gradients(model, images, labels))
gc.collect()
previous_gc_debug_flags = gc.get_debug()
gc.set_debug(gc.DEBUG_SAVEALL)
for _ in range(2):
# Run twice to ensure that garbage that is created on the first
# iteration is no longer accessible.
apply_gradients(model, optimizer,
compute_gradients(model, images, labels))
gc.collect()
# There should be no garbage requiring collection.
self.assertEqual(0, len(gc.garbage))
gc.set_debug(previous_gc_debug_flags)
gc.enable()
class MockIterator(object):
def __init__(self, tensors):
self._tensors = [tf.identity(x) for x in tensors]
def next(self):
return self._tensors
class ResNet50Benchmarks(tf.test.Benchmark):
def _train_batch_sizes(self):
"""Choose batch sizes based on GPU capability."""
for device in device_lib.list_local_devices():
if tf.DeviceSpec.from_string(device.name).device_type == 'GPU':
# Avoid OOM errors with larger batch sizes, which seem to cause errors
# later on even if caught.
#
# TODO(allenl): Base this on device memory; memory limit information
# during the test seems to exclude the amount TensorFlow has allocated,
# which isn't useful.
if 'K20' in device.physical_device_desc:
return (16,)
if 'P100' in device.physical_device_desc:
return (16, 32, 64)
if tf.DeviceSpec.from_string(device.name).device_type == 'TPU':
return (32,)
return (16, 32)
def _report(self, label, start, num_iters, device, batch_size, data_format):
avg_time = (time.time() - start) / num_iters
dev = tf.DeviceSpec.from_string(device).device_type.lower()
name = '%s_%s_batch_%d_%s' % (label, dev, batch_size, data_format)
extras = {'examples_per_sec': batch_size / avg_time}
self.report_benchmark(
iters=num_iters, wall_time=avg_time, name=name, extras=extras)
def _force_device_sync(self):
# If this function is called in the context of a non-CPU device
# (e.g., inside a 'with tf.device("/gpu:0")' block)
# then this will force a copy from CPU->NON_CPU_DEVICE->CPU,
# which forces a sync. This is a roundabout way, yes.
tf.constant(1.).cpu()
def _benchmark_eager_apply(self, label, device_and_format, defun=False,
execution_mode=None, compiled=False):
with tfe.execution_mode(execution_mode):
device, data_format = device_and_format
model = resnet50.ResNet50(data_format)
if defun:
model.call = tfe.defun(model.call, compiled=compiled)
batch_size = 64
num_burn = 5
num_iters = 30
with tf.device(device):
images, _ = random_batch(batch_size, data_format)
for _ in xrange(num_burn):
model(images, training=False).cpu()
if execution_mode:
tfe.async_wait()
gc.collect()
start = time.time()
for _ in xrange(num_iters):
model(images, training=False).cpu()
if execution_mode:
tfe.async_wait()
self._report(label, start, num_iters, device, batch_size, data_format)
def benchmark_eager_apply_sync(self):
self._benchmark_eager_apply('eager_apply', device_and_data_format(),
defun=False)
def benchmark_eager_apply_async(self):
self._benchmark_eager_apply(
'eager_apply_async', device_and_data_format(), defun=False,
execution_mode=tfe.ASYNC)
def benchmark_eager_apply_with_defun(self):
self._benchmark_eager_apply('eager_apply_with_defun',
device_and_data_format(), defun=True)
def _benchmark_eager_train(self,
label,
make_iterator,
device_and_format,
defun=False,
execution_mode=None,
compiled=False):
with tfe.execution_mode(execution_mode):
device, data_format = device_and_format
for batch_size in self._train_batch_sizes():
(images, labels) = random_batch(batch_size, data_format)
model = resnet50.ResNet50(data_format)
optimizer = tf.train.GradientDescentOptimizer(0.1)
apply_grads = apply_gradients
if defun:
model.call = tfe.defun(model.call, compiled=compiled)
apply_grads = tfe.defun(apply_gradients, compiled=compiled)
num_burn = 3
num_iters = 10
with tf.device(device):
iterator = make_iterator((images, labels))
for _ in xrange(num_burn):
(images, labels) = iterator.next()
apply_grads(model, optimizer,
compute_gradients(model, images, labels))
if execution_mode:
tfe.async_wait()
self._force_device_sync()
gc.collect()
start = time.time()
for _ in xrange(num_iters):
(images, labels) = iterator.next()
apply_grads(model, optimizer,
compute_gradients(model, images, labels))
if execution_mode:
tfe.async_wait()
self._force_device_sync()
self._report(label, start, num_iters, device, batch_size, data_format)
def benchmark_eager_train_sync(self):
self._benchmark_eager_train('eager_train', MockIterator,
device_and_data_format(), defun=False)
def benchmark_eager_train_async(self):
self._benchmark_eager_train(
'eager_train_async',
MockIterator,
device_and_data_format(),
defun=False,
execution_mode=tfe.ASYNC)
def benchmark_eager_train_with_defun(self):
self._benchmark_eager_train(
'eager_train_with_defun', MockIterator,
device_and_data_format(), defun=True)
def benchmark_eager_train_datasets(self):
def make_iterator(tensors):
with tf.device('/device:CPU:0'):
ds = tf.data.Dataset.from_tensors(tensors).repeat()
return tfe.Iterator(ds)
self._benchmark_eager_train(
'eager_train_dataset', make_iterator,
device_and_data_format(), defun=False)
def benchmark_eager_train_datasets_with_defun(self):
def make_iterator(tensors):
with tf.device('/device:CPU:0'):
ds = tf.data.Dataset.from_tensors(tensors).repeat()
return tfe.Iterator(ds)
self._benchmark_eager_train(
'eager_train_dataset_with_defun', make_iterator,
device_and_data_format(), defun=True)
if __name__ == '__main__':
tfe.enable_eager_execution()
tf.test.main()
| 36.752294
| 80
| 0.670078
|
48cec7034cfb639fce5eadf2956d392a73fa44cf
| 1,414
|
py
|
Python
|
sciencebeam_parser/document/tei/citation.py
|
elifesciences/sciencebeam-parser
|
66964f283612b8d6fa8a23ad8790292c1ec07651
|
[
"MIT"
] | 13
|
2021-08-04T12:11:17.000Z
|
2022-03-28T20:41:20.000Z
|
sciencebeam_parser/document/tei/citation.py
|
elifesciences/sciencebeam-parser
|
66964f283612b8d6fa8a23ad8790292c1ec07651
|
[
"MIT"
] | 33
|
2021-08-05T08:37:59.000Z
|
2022-03-29T18:42:09.000Z
|
sciencebeam_parser/document/tei/citation.py
|
elifesciences/sciencebeam-parser
|
66964f283612b8d6fa8a23ad8790292c1ec07651
|
[
"MIT"
] | 1
|
2022-01-05T14:53:06.000Z
|
2022-01-05T14:53:06.000Z
|
from typing import (
Any,
Mapping,
Type
)
from lxml import etree
from sciencebeam_parser.document.semantic_document import (
SemanticCitation,
SemanticContentWrapper,
SemanticFigureCitation,
SemanticReferenceCitation,
SemanticTableCitation
)
from sciencebeam_parser.document.tei.common import (
TEI_E
)
from sciencebeam_parser.document.tei.factory import (
SingleElementTeiElementFactory,
TeiElementFactoryContext
)
CITATION_TYPE_BY_SEMANTIC_CLASS: Mapping[Type[Any], str] = {
SemanticFigureCitation: 'figure',
SemanticTableCitation: 'table',
SemanticReferenceCitation: 'bibr'
}
class CitationTeiElementFactory(SingleElementTeiElementFactory):
def get_tei_element_for_semantic_content(
self,
semantic_content: SemanticContentWrapper,
context: TeiElementFactoryContext
) -> etree.ElementBase:
assert isinstance(semantic_content, SemanticCitation)
citation = semantic_content
citation_type = CITATION_TYPE_BY_SEMANTIC_CLASS.get(type(citation))
attributes = {}
if citation_type:
attributes['type'] = citation_type
if citation.target_content_id:
attributes['target'] = '#' + citation.target_content_id
return TEI_E(
'ref',
attributes,
*context.iter_layout_block_tei_children(citation.merged_block)
)
| 27.72549
| 75
| 0.713579
|
9ad7f79c73ee093c6aad22ea5e5fa007fd7c93a9
| 826
|
py
|
Python
|
handler/weui.py
|
hulingfeng211/weixin
|
e3b869ffa2bf54366d561535c84e118db0e9827e
|
[
"Apache-2.0"
] | null | null | null |
handler/weui.py
|
hulingfeng211/weixin
|
e3b869ffa2bf54366d561535c84e118db0e9827e
|
[
"Apache-2.0"
] | null | null | null |
handler/weui.py
|
hulingfeng211/weixin
|
e3b869ffa2bf54366d561535c84e118db0e9827e
|
[
"Apache-2.0"
] | null | null | null |
# -*- conding:utf -*-
#===================================================
# creater:george
# email:hulingfeng211@163.com
# create time:2016-06-06 15:07
# description:
#
#===================================================
from tornado.web import RequestHandler,url
class IndexHandler(RequestHandler):
def get(self,*args,**kwargs):
self.render('weui/index.html')
pass
class Index2Handler(RequestHandler):
def get(self,*args,**kwargs):
self.render('weui/jquery-weui-demos/index.html')
class PageHandler(RequestHandler):
#(r"/(P?<nickname>.*)/article/details/(P?<postid>.*)", MainHandler),
def get(self,*args):
#self.write(args[0])
self.render('weui/jquery-weui-demos/%s'%args[0])
routes = [
url(r'/weui',IndexHandler,name='weui.home'),
url(r'/weui2',Index2Handler),
url(r'/(.*)',PageHandler), #
]
| 22.324324
| 69
| 0.599274
|
a3e6980802b41287b739767c4b278aff6ec89c11
| 2,079
|
py
|
Python
|
day5/server/controllers/users/controllers.py
|
smalljiny/raspi-class-example
|
7f92f34d366b94f9ae3c7da5ebeacdeb628be446
|
[
"Apache-2.0"
] | null | null | null |
day5/server/controllers/users/controllers.py
|
smalljiny/raspi-class-example
|
7f92f34d366b94f9ae3c7da5ebeacdeb628be446
|
[
"Apache-2.0"
] | null | null | null |
day5/server/controllers/users/controllers.py
|
smalljiny/raspi-class-example
|
7f92f34d366b94f9ae3c7da5ebeacdeb628be446
|
[
"Apache-2.0"
] | null | null | null |
from flask import Blueprint, abort, request, json, Response
import pymysql.cursors
from pymysql.err import IntegrityError
from encryption import password
from error import db_error
users = Blueprint('users', __name__)
# 사용자 리스트 조회, 사용자 추가
@users.route('', methods=['GET', 'POST'])
def route_user():
if request.method == 'POST':
return create_new_user()
else:
return get_users()
# DB 연결
def get_db_connection():
conn = pymysql.connect(host='192.168.25.60',
user='pi',
password='Wkddmsgk0613!',
db='raspi',
charset='utf8')
return conn;
# 응답 - 패스워드 오류
def invalid_password():
data = {
'code': 400,
'debug': 'Invalid password'
}
js = json.dumps(data)
resp = Response(js, status=400, mimetype='application/json')
return resp
# 응답 - 성공
def send_response(body):
data = {
'code': 200,
'body': body
}
js = json.dumps(data)
resp = Response(js, status=200, mimetype='application/json')
return resp
# 신규 사용자 가입
def create_new_user():
data = request.json
email = data["email"]
passwd = data["password"]
confirm = data["confirm"]
if (passwd != confirm):
return invalid_password()
conn = get_db_connection()
try:
with conn.cursor() as cursor:
sql = 'INSERT INTO `users` (`email`, `password`) VALUES (%s, %s);'
cursor.execute(sql, (email, password.get(passwd)))
conn.commit()
except IntegrityError as e:
return db_error.response(e)
finally:
conn.close()
return send_response('OK')
# 사용자 리스트
def get_users():
conn = get_db_connection()
try:
with conn.cursor() as cursor:
sql = 'SELECT `email` FROM users;'
cursor.execute(sql)
db_result = cursor.fetchall()
except IntegrityError as e:
return db_error.response(e)
finally:
conn.close()
result = [];
for row in db_result:
result.append({'email':row[0]})
return send_response(result)
| 23.359551
| 78
| 0.597403
|
e0f3b2a4ffec4df78702ca35140b23de9b38cfcb
| 1,166
|
py
|
Python
|
peek_agent/plugin/PeekAgentPlatformHook.py
|
Synerty/peek-agent
|
12ebeaa2e8017760d926e3d41dbd6112f8fdf142
|
[
"MIT"
] | null | null | null |
peek_agent/plugin/PeekAgentPlatformHook.py
|
Synerty/peek-agent
|
12ebeaa2e8017760d926e3d41dbd6112f8fdf142
|
[
"MIT"
] | null | null | null |
peek_agent/plugin/PeekAgentPlatformHook.py
|
Synerty/peek-agent
|
12ebeaa2e8017760d926e3d41dbd6112f8fdf142
|
[
"MIT"
] | 1
|
2016-12-12T21:44:39.000Z
|
2016-12-12T21:44:39.000Z
|
from typing import Optional
from peek_platform import PeekPlatformConfig
from peek_plugin_base.agent.PeekAgentPlatformHookABC import PeekAgentPlatformHookABC
class PeekAgentPlatformHook(PeekAgentPlatformHookABC):
def getOtherPluginApi(self, pluginName: str) -> Optional[object]:
pluginLoader = PeekPlatformConfig.pluginLoader
otherPlugin = pluginLoader.pluginEntryHook(pluginName)
if not otherPlugin:
return None
from peek_plugin_base.agent.PluginAgentEntryHookABC import PluginAgentEntryHookABC
assert isinstance(otherPlugin, PluginAgentEntryHookABC), (
"Not an instance of PluginAgentEntryHookABC")
return otherPlugin.publishedAgentApi
@property
def peekServerHttpPort(self) -> int:
from peek_platform import PeekPlatformConfig
return PeekPlatformConfig.config.peekServerHttpPort
@property
def peekServerHost(self) -> str:
from peek_platform import PeekPlatformConfig
return PeekPlatformConfig.config.peekServerHost
@property
def serviceId(self) -> str:
import socket
return "agent|" + socket.gethostname()
| 33.314286
| 90
| 0.745283
|
4486f02418f841ca5dfd6f54e11b77061a649af4
| 56,328
|
py
|
Python
|
legacy/autodiff.py
|
chao-ji/np-auto-diff
|
4b21a8895755072f8e0390ce4cc96b17308f8a90
|
[
"MIT"
] | 2
|
2021-03-07T09:43:38.000Z
|
2021-09-09T21:20:19.000Z
|
legacy/autodiff.py
|
chao-ji/reverse-mode-auto-differentiation
|
4b21a8895755072f8e0390ce4cc96b17308f8a90
|
[
"MIT"
] | null | null | null |
legacy/autodiff.py
|
chao-ji/reverse-mode-auto-differentiation
|
4b21a8895755072f8e0390ce4cc96b17308f8a90
|
[
"MIT"
] | null | null | null |
import numpy as np
class _BaseOp(object):
"""Base class of all Op classes. An Op emits a tensor of a specific shape, and contains the
following attributes:
sess: `Session`;
The session in which the Op is defined.
shape: `numpy.ndarray`;
1D array, e.g. [2, 3, 3, 2], specifying the shape of the emitted tensor.
parent_total: integer;
Total number of Op's for which the current Op is an argument; this is determined when
the data flow graph was defined in the beginning.
parent_acc: integer;
Initialized to zero; it keeps track of the number of parent Op's that have backpropped
gradients to the current Op in an iteration.
is_terminal: bool;
Initialized to False; indicates if the Op is terminal node (i.e. has no child node).
_cache_data: dict;
Caches data that are needed in both forward and backward pass to avoid recomputing.
Parameters
----------
sess: `Session`;
The session in which the Op is defined.
"""
def __init__(self, sess=None):
self.sess = sess
self.shape = ()
self.parent_total = 0
self.parent_acc = 0
self.is_terminal = False
self._cache_data = {}
self.sess.variables.append(self)
def eval(self, feed_dict):
"""Forward pass. Evaluate the current Op.
`_eval_func` is implemented by each Op separately to compute the tensor value.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}.
Returns
-------
`numpy.ndarray`; value of the tensor.
"""
if id(self) not in self.sess.values:
self.sess.values[id(self)] = self._eval_func(feed_dict)
return self.sess.values[id(self)]
def grad(self, feed_dict, backprop):
"""Backward pass. Update the gradient w.r.t. the current Op (`backprop`), and propagate
gradient down to child Op's. `_grad_func` is implemented by each Op separately to propagate
gradient down to child Op's.
NOTE: `grad` is invoked when a parent Op propagates gradient (`backprop`) back to the current
Op. When `grad` is invoked, the gradient is accumulated and `parent_acc` is incremented, which
maintains the number of parent Op's that have already backpropped gradients. The computation of
the gradient w.r.t. the current Op is finished when `parent_acc` == `parent_total`, and this
gradient is further propagated down to child Op's of the current Op by invoking
`self._grad_func(feed_dict)`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}.
backprop: `numpy.ndarray`;
Gradient backpropped from a parent Op. Has the SAME shape as the shape of the current Op
(i.e. `self.shape`).
"""
if self.is_terminal and not self.is_variable:
return
if id(self) not in self.sess.gradients:
self.sess.gradients[id(self)] = np.zeros(self.shape)
self.sess.gradients[id(self)] += backprop
self.parent_acc += 1
if self.parent_acc == self.parent_total and not self.is_terminal:
self._grad_func(feed_dict)
def __repr__(self):
"""Display the representation with tensor shape."""
return super(_BaseOp, self).__repr__()[:-1] + ", shape=" + str(self.shape) + ">"
class PlaceholderOp(_BaseOp):
"""Creates placeholder for input or parameters.
NOTE: placeholders must be terminal nodes of the data flow graph, so it does not implement
the `_grad_func()` method.
Parameters
----------
shape: `numpy.array`;
1D array specifying shape of tensor
sess: `Session`;
The session in which the Op is defined
is_variable: bool;
Indicates if the placeholder holds trainable parameters or not
"""
def __init__(self, shape, is_variable=True, sess=None):
super(PlaceholderOp, self).__init__(sess)
self.is_terminal = True
self.is_variable = is_variable
self.shape = shape
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of the tensor
"""
return feed_dict[self]
class SigmoidOp(_BaseOp):
"""Op that applies sigmoid function componentwise to the input `X`.
Parameters
----------
X: `_BaseOp`;
The input tensor
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, X, sess=None):
super(SigmoidOp, self).__init__(sess)
self.shape = X.shape
self.X = X
self.X.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of the tensor
"""
X_val = self.X.eval(feed_dict)
return 1. / (1. + np.exp(-X_val))
def _grad_func(self, feed_dict):
"""Propagate gradient down to child `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
val = self.eval(feed_dict)
val = val * (1. - val)
grad_val = self.sess.gradients[id(self)]
dX_val = grad_val * val
self.X.grad(feed_dict, dX_val)
class ReluOp(_BaseOp):
"""Op that appliesi Rectifier Linear Unit (i.e. ReLU) componentwise to the input `X`.
Parameters
----------
X: `_BaseOp`;
The input tensor
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, X, sess=None):
super(ReluOp, self).__init__(sess)
self.shape = X.shape
self.X = X
self.X.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of the tensor
"""
X_val = self.X.eval(feed_dict)
return np.maximum(0, X_val)
def _grad_func(self, feed_dict):
"""Propagate gradient down to child `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
X_val = self.X.eval(feed_dict)
grad_val = self.sess.gradients[id(self)]
grad_val[X_val <= 0] = 0
dX_val = grad_val
self.X.grad(feed_dict, dX_val)
class L2LossOp(_BaseOp):
"""Op that performs l2-regularization over the parameter tensor `W`. Specifically, it computes
`0.5 * reg * sum(|W|^2)`.
Parameters
----------
W: `_BaseOp`;
The input tensor containing parameters
reg: float;
Float between 0.0 and 1.0, controls the strength of regularization
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, W, reg, sess=None):
super(L2LossOp, self).__init__(sess)
self.shape = 1, 1
self.W = W
self.W.parent_total += 1
self.reg = reg
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`: value of tensor
"""
W_val = self.W.eval(feed_dict)
return .5 * self.reg * np.sum(W_val * W_val)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `W`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
W_val = self.W.eval(feed_dict)
grad_val = self.sess.gradients[id(self)]
dW_val = self.reg * grad_val * W_val
self.W.grad(feed_dict, dW_val)
class AddOp(_BaseOp):
"""Op that computes the sum of two input tensors `X1` and `X2`.
NOTE: `X1` and `X2` must have the same shape.
Parameters
----------
X1: `_BaseOp`;
Input tensor
X2: `_BaseOp`;
Input tensor
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, X1, X2, sess=None):
super(AddOp, self).__init__(sess)
self.shape = X1.shape
self.X1 = X1
self.X2 = X2
self.X1.parent_total += 1
self.X2.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`: value of tensor
"""
return self.X1.eval(feed_dict) + self.X2.eval(feed_dict)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X1` and `X2`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
self.X1.grad(feed_dict, grad_val)
self.X2.grad(feed_dict, grad_val)
class MatMulOp(_BaseOp):
"""Op that computes the matrix multiplication of input tensors `X1` and `X2`.
NOTE: the shapes of `X1` and `X2` must be compatible.
Parameters
----------
X1: `_BaseOp`;
Input 2D tensor, matrix
X2: `_BaseOp`;
Input 2D tensor, matrix
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, X1, X2, sess=None):
super(MatMulOp, self).__init__(sess)
self.shape = X1.shape[0], X2.shape[1]
self.X1 = X1
self.X2 = X2
self.X1.parent_total += 1
self.X2.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of tensor
"""
return np.dot(self.X1.eval(feed_dict), self.X2.eval(feed_dict))
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X1` and `X2`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
X1_val = self.X1.eval(feed_dict)
X2_val = self.X2.eval(feed_dict)
dX1_val = np.dot(grad_val, X2_val.T)
dX2_val = np.dot(X1_val.T, grad_val)
self.X1.grad(feed_dict, dX1_val)
self.X2.grad(feed_dict, dX2_val)
class _2dKernelOp(_BaseOp):
"""Base class of 2D filter Op's (`Conv2dOp` and `MaxPool2dOp`).
It provides methods for
1. Set the padding hyperparameters (`_set_padding()`)
2. Padding zeros (`_pad_X()`) for input 4D tensor;
3. Removing padded zeros (`_depad_dX()`) for input 4D tensor;
4. Computing the height and width coordinates of the upper left pixel of all image patches
(`_img_col_index()`);
5. Converting input 4D tensor into the format of "patch matrix" (`_X_tensor2patchmat()`);
6. Converting the gradient w.r.t. input tensor `X` in the format of "patch matrix" to a 4D
tensor with the same shape as `X` (`_dX_patchmat2tensor()`).
Parameters
----------
X: `_BaseOp`;
Input 4D tensor, of dimensions in [batch, in_height, in_width, in_channels]
fsize: `numpy.ndarray`;
1D array of length 2, specifying filter sizes along height and width axes, e.g. [3, 3]
strides: `numpy.ndarray`;
1D array of length 2, specifying strides along height and width axes, e.g. [2, 2]
padding: string;
Either "SAME" or "VALID", specifying the padding algorithm
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, X, fsize, strides, padding, sess=None):
super(_2dKernelOp, self).__init__(sess)
self.X = X
self.X.parent_total += 1
self.fsize = fsize
self.strides = strides
self.padding = padding
self._out_height, self._out_width, self._padding, self._X_shape_pad = self._set_padding()
self._img_col_index_val = self._img_col_index()
def _set_padding(self):
"""Set the padding hyperparameters according to the algorithm:
https://www.tensorflow.org/versions/r1.3/api_guides/python/nn#Convolution
Returns
-------
`tuple`; Tuple containing the 2D output dimensions (`_out_height` and `out_width`), padding
hyperparameters (`_padding`), and the shape of the padded `X` (`_X_shape_pad`)
"""
X, strides, fsize = self.X, self.strides, self.fsize
if self.padding == "SAME":
_out_height = int(np.ceil(float(X.shape[1]) / strides[0]))
_out_width = int(np.ceil(float(X.shape[2]) / strides[1]))
pad_along_height = max(fsize[0] - strides[0], 0) if X.shape[1] % strides[0] == 0 \
else max(fsize[0] - X.shape[1] % strides[0], 0)
pad_along_width = max(fsize[1] - strides[1], 0) if X.shape[2] % strides[1] == 0 \
else max(fsize[1] - X.shape[2] % strides[1], 0)
_padding = pad_along_height // 2, pad_along_height - pad_along_height // 2, \
pad_along_width // 2, pad_along_width - pad_along_width // 2
elif self.padding == "VALID":
_out_height = int(np.ceil(float(X.shape[1] - fsize[0] + 1) / strides[0]))
_out_width = int(np.ceil(float(X.shape[2] - fsize[1] + 1) / strides[1]))
_padding = 0, 0, 0, 0
else:
raise ValueError("Padding scheme should be 'SAME' or 'VALID'.")
_X_shape_pad = X.shape[0], X.shape[1] + _padding[0] + _padding[1], \
X.shape[2] + _padding[2] + _padding[3], X.shape[3]
return _out_height, _out_width, _padding, _X_shape_pad
def _pad_X(self, feed_dict):
"""Pad a 4D tensor with zeros (or specified value) along `in_height` and `in_width` axes.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; the input 4D array with zero padded
"""
_, in_height_pad, in_width_pad, _ = self._X_shape_pad
pad_top, pad_bot, pad_left, pad_right = self._padding
X_pad = np.ones(self._X_shape_pad) * self._pad_value if hasattr(self, "_pad_value") \
else np.zeros(self._X_shape_pad)
X_pad[:,
pad_top : in_height_pad - pad_bot,
pad_left : in_width_pad - pad_right, :] = self.X.eval(feed_dict)
return X_pad
def _depad_dX(self, dX):
"""Remove padded zeros in a 4D tensor.
Parameters
----------
dX: `numpy.ndarray`;
4D array containing gradients w.r.t. zero-padded `X`
Returns
-------
`numpy.ndarray`; 4D array containing the gradient w.r.t. the input array `X` (`dX`) where padded
zeros are removed
"""
_, in_height_pad, in_width_pad, _ = self._X_shape_pad
pad_top, pad_bot, pad_left, pad_right = self._padding
dX = dX[:,
pad_top : in_height_pad - pad_bot,
pad_left : in_width_pad - pad_right, :]
return dX
def _img_col_index(self):
"""Compute the height and width coordinates of the upper left pixel of all image patches.
Returns
-------
`numpy.ndarray`; 2D array containing 4-tuples (h, w, h_idx, w_idx), where `h` and `w`
correspond to the height and width indexes of the upper left pixel of each patch within the
input 2D image, and `h_idx` and `w_idx` the height and width indexes of each patch within the
output 2D image.
Given 4px-by-4px image and 2px-by-2px filter with 2px strides along height and width
X[0,0] X[0,1] X[0,2] X[0,3]
X[1,0] X[1,1] X[1,2] X[1,3]
X[2,0] X[2,1] X[2,2] X[2,3]
X[3,0] X[3,1] X[3,2] X[3,3]
[(h, w, h_idx, w_idx)] =
[(0, 0, 0, 0),
(0, 2, 0, 1),
(2, 0, 1, 0),
(2, 2, 1, 1)]
"""
filter_height, filter_width = self.fsize
stride_height, stride_width = self.strides
_, in_height_pad, in_width_pad, _ = self._X_shape_pad
img_col_index = np.array([(h, w, h_idx, w_idx)
for h_idx, h in enumerate(np.arange(0, in_height_pad - filter_height + 1,
stride_height))
for w_idx, w in enumerate(np.arange(0, in_width_pad - filter_width + 1,
stride_width))])
return img_col_index
def _X_tensor2patchmat(self, feed_dict, flat_batch=False, in_channels=None):
"""PRE-PROCESSING step that converts input 4D tensor into 2D tensor in the "patch matrix"
format.
Input 4D tensor `X` has dimensions [batch, in_height, in_width, in_channels] = [2, 3, 3, 2]
X[0,0,0,0] X[0,0,1,0] X[0,0,2,0] X[0,0,0,1] X[0,0,1,1] X[0,0,2,1]
X[0,1,0,0] X[0,1,1,0] X[0,1,2,0] X[0,1,0,1] X[0,1,1,1] X[0,1,2,1]
X[0,2,0,0] X[0,2,1,0] X[0,2,2,0] X[0,2,0,1] X[0,2,1,1] X[0,2,2,1]
X[1,0,0,0] X[1,0,1,0] X[1,0,2,0] X[1,0,0,1] X[1,0,1,1] X[1,0,2,1]
X[1,1,0,0] X[1,1,1,0] X[1,1,2,0] X[1,1,0,1] X[1,1,1,1] X[1,1,2,1]
X[1,2,0,0] X[1,2,1,0] X[1,2,2,0] X[1,2,0,1] X[1,2,1,1] X[1,2,2,1]
Each 3px-by-3px submatrix corresponds to an image of dimensions [in_height, in_width], and the
four smaller submatrixes form a 2-by-2 "matrix" where the rows corresponds to `batch` and
columns to `in_channels`.
Given geometric hyperparameters `filter_height`=2 and `filter_width`=2, `X` is converted
into 2D array in the "patch matrix" format of dimensions [out_height * out_width * batch,
in_channels * filter_height * filter_width] where `out_height`=2 and `out_width`=2.
X[0,0,0,0] X[0,0,1,0] X[0,1,0,0] X[0,1,1,0] X[0,0,0,1] X[0,0,1,1] X[0,1,0,1] X[0,1,1,1]
X[1,0,0,0] X[1,0,1,0] X[1,1,0,0] X[1,1,1,0] X[1,0,0,1] X[1,0,1,1] X[1,1,0,1] X[1,1,1,1]
X[0,0,1,0] X[0,0,2,0] X[0,1,1,0] X[0,1,2,0] X[0,0,1,1] X[0,0,2,1] X[0,1,1,1] X[0,1,2,1]
X[1,0,1,0] X[1,0,2,0] X[1,1,1,0] X[1,1,2,0] X[1,0,1,1] X[1,0,2,1] X[1,1,1,1] X[1,1,2,1]
X[0,1,0,0] X[0,1,1,0] X[0,2,0,0] X[0,2,1,0] X[0,1,0,1] X[0,1,1,1] X[0,2,0,1] X[0,2,1,1]
X[1,1,0,0] X[1,1,1,0] X[1,2,0,0] X[1,2,1,0] X[1,1,0,1] X[1,1,1,1] X[1,2,0,1] X[1,2,1,1]
X[0,1,1,0] X[0,1,2,0] X[0,2,1,0] X[0,2,2,0] X[0,1,1,1] X[0,1,2,1] X[0,2,1,1] X[0,2,2,1]
X[1,1,1,0] X[1,1,2,0] X[1,2,1,0] X[1,2,2,0] X[1,1,1,1] X[1,1,2,1] X[1,2,1,1] X[1,2,2,1]
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray` 2D array in the "patch matrix" format
"""
if "X_val_mat" in self._cache_data:
return self._cache_data["X_val_mat"]
X_val = self._pad_X(feed_dict)
filter_height, filter_width = self.fsize
batch = self.X.shape[0]
X_val_mat = np.vstack([X_val[:, h:h+filter_height, w:w+filter_width, :]
.transpose(0, 3, 1, 2).reshape((batch, -1))
for h, w, _, _ in self._img_col_index_val])
if flat_batch and in_channels is not None:
out_height, out_width = self._out_height, self._out_width
X_val_mat = X_val_mat.reshape((out_height, out_width, -1, in_channels, filter_height,
filter_width)).reshape((-1, filter_height * filter_width))
self._cache_data["X_val_mat"] = X_val_mat
return self._cache_data["X_val_mat"]
def _dX_patchmat2tensor(self, dX_val_mat):
"""POST-PROCESSING step that convert the gradient w.r.t. the "patch matrix" into 4D tensor
with the same shape as `X`.
1. `dX_val_mat` of dimensions [out_height * out_width * batch, in_channels * filter_height *
filter_width] is reshaped into 6D array of dimensions [out_height, out_width, batch,
in_channels, filter_height, filter_width];
2. The dimensions in the 6D array is reordered as [out_height, out_width, batch, filter_height,
filter_width, in_channels], as in `dX_val_tmp`;
3. Initialize a zero tensor of shape `self._X_shape_pad`, as in `dX_val`;
4. The gradient in each patch of `dX_val_tmp` (of dimensions [batch, filter_height,
filter_width, in_channels]) is added to the corresponding subarray in `dX_val`;
5. Remove padded zeros in `dX_val`.
Parameters
----------
dX_val_mat: `numpy.ndarray`
2D array, the patch matrix, of dimensions [out_height * out_width * batch,
in_channels * filter_height * filter_width]
Returns
-------
`numpy.ndarray`; 4D tensor, of dimensions [batch, in_height, in_width, in_channels]
"""
filter_height, filter_width = self.fsize
out_height, out_width, = self._out_height, self._out_width
in_channels = self.X.shape[3]
dX_val_tmp = dX_val_mat.reshape((out_height, out_width, -1, in_channels, filter_height,\
filter_width)).transpose(0, 1, 2, 4, 5, 3)
dX_val = np.zeros(self._X_shape_pad)
for h, w, h_idx, w_idx in self._img_col_index_val:
dX_val[:, h:h+filter_height, w:w+filter_width, :] += dX_val_tmp[h_idx, w_idx]
dX_val = self._depad_dX(dX_val)
return dX_val
class Conv2dOp(_2dKernelOp):
def __init__(self, X, W, strides, padding, sess=None):
"""Op that performs 2D convolution between a 4D input tensor `X` and a 4D filter tensor `W`.
Parameters
----------
X: `_BaseOp`;
Input 4D tensor, of dimensions in [batch, in_height, in_width, in_channels]
W: `_BaseOp`;
Filter 4D tensor, of dimensions in [filter_height, filter_width, in_channels, out_channels]
strides: `numpy.ndarray`;
1D array of length 2, specifying strides along height and width axes, e.g. [2, 2]
padding: `numpy.ndarray`;
1D array of length 2, specifying total zero padding along height and width axes, e.g. [2, 2]
sess: `Session`;
The session in which the Op is defined
"""
fsize = W.shape[0], W.shape[1]
super(Conv2dOp, self).__init__(X, fsize, strides, padding, sess)
self.shape = X.shape[0], self._out_height, self._out_width, W.shape[3]
self.W = W
self.W.parent_total += 1
def _W_tensor2patchmat(self, feed_dict):
"""PRE-PROCESSING step that convert 4D filter tensor into "patch matrix" format.
Filter 4D tensor `W` has dimensions [filter_height, filter_width, in_channels, out_channels]
= [2, 3, 3, 2]
W[0,0,0,0] W[0,1,0,0] W[0,0,1,0] W[0,1,1,0]
W[1,0,0,0] W[1,1,0,0] W[1,0,1,0] W[1,1,1,0]
W[0,0,0,1] W[0,1,0,1] W[0,0,1,1] W[0,1,1,1]
W[1,0,0,1] W[1,1,0,1] W[1,0,1,1] W[1,1,1,1]
Each 2px-by-2px submatrix corresponds to a 2D array of dimensions [filter_height, filter_width],
and the four smaller submatrixes form a 2-by-2 "matrix" where the rows corresponds to
`out_channels` and columns to `in_channels`.
`W` is converted to 2D array in the "patch matrix" format of dimensions [in_channels *
filter_height * filter_width, out_channels]
W[0,0,0,0] W[0,0,0,1]
W[0,1,0,0] W[0,1,0,1]
W[1,0,0,0] W[1,0,0,1]
W[1,1,0,0] W[1,1,0,1]
W[0,0,1,0] W[0,0,1,1]
W[0,1,1,0] W[0,1,1,1]
W[1,0,1,0] W[1,0,1,1]
W[1,1,1,0] W[1,1,1,1]
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; 2D array, matrix
"""
if "W_val_mat" in self._cache_data:
return self._cache_data["W_val_mat"]
W_val = self.W.eval(feed_dict)
out_channels = self.W.shape[3]
self._cache_data["W_val_mat"] = W_val.transpose(2, 0, 1, 3).reshape((-1, out_channels))
return self._cache_data["W_val_mat"]
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Given `X` and `W` in "patch matrix" format `X_val_mat` and `W_val_mat`, right-multiplying
`W_val_mat` with `X_val_mat` produces 2D array of dimensions [out_height * out_width * batch,
out_channels] `C_val_mat`. `C_val_mat` is then reshaped into 4D tensor of dimensions [out_height
, out_width, batch, out_channels], which is then reordered as [batch, out_height, out_width,
out_channels].
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; 4D tensor of dimensions [batch, out_height, out_width, out_channels]
"""
batch, out_height, out_width, out_channels = self.shape
X_val_mat = self._X_tensor2patchmat(feed_dict)
W_val_mat = self._W_tensor2patchmat(feed_dict)
C_val_mat = np.dot(X_val_mat, W_val_mat)
return C_val_mat.reshape((out_height, out_width, batch, out_channels)).transpose(2, 0, 1, 3)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X` and `W`.
Given the 4D tensor `grad_val` (i.e. gradient w.r.t. Op's output) of dimensions [batch,
out_height, out_width, out_channels], it is reordered into [out_height, out_width, batch,
out_channels] and then reshaped into 2D array `grad_val_mat` of dimensions [out_height *
out_width * batch, out_channels].
The gradient w.r.t `W` in the "patch matrix" format is computed by right-multiplying
`grad_val_mat` with `X_val_mat.T` (of dimensions [in_channels * filter_height * filter_width,
out_height * out_width * batch]), producing 2D array of dimensions [in_channels * filter_height
* filter_width, out_channels].
The gradient w.r.t. `X` in the "path matrix" format is computed by right-multiplying
`W_val_mat.T` (of dimensions [out_channels, in_channels * filter_height * filter_width]) with
`grad_val_mat`, procuding 2D array of dimensions [out_height * out_width * batch, in_channels *
filter_height * filter_width].
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
filter_height, filter_width, in_channels, out_channels = self.W.shape
grad_val = self.sess.gradients[id(self)]
grad_val_mat = grad_val.transpose(1, 2, 0, 3).reshape((-1, out_channels))
X_val_mat = self._X_tensor2patchmat(feed_dict)
W_val_mat = self._W_tensor2patchmat(feed_dict)
dW_val_mat = np.dot(X_val_mat.T, grad_val_mat)
dW_val = dW_val_mat.reshape((in_channels, filter_height, filter_width, out_channels)).\
transpose(1, 2, 0, 3)
dX_val_mat = np.dot(grad_val_mat, W_val_mat.T)
dX_val = self._dX_patchmat2tensor(dX_val_mat)
self.X.grad(feed_dict, dX_val)
self.W.grad(feed_dict, dW_val)
class MaxPool2dOp(_2dKernelOp):
def __init__(self, X, fsize, strides, padding, sess=None):
"""Op that performs 2D max-pooling on a 4D tensor.
Parameters
----------
X: `_BaseOp`;
Input 4D tensor, of dimensions in [batch, in_height, in_width, in_channels]
fsize: `numpy.ndarray`
1D array of length 2, specifying filter size along height and width axes, e.g. [2, 2]
strides: `numpy.ndarray`;
1D array of length 2, specifying strides along height and width axes, e.g. [2, 2]
padding: `numpy.ndarray`;
1D array of length 2, specifying total zero pading along height and width axes, e.g. [2, 2]
sess: `Session`;
The session that the Op is associated with
"""
super(MaxPool2dOp, self).__init__(X, fsize, strides, padding, sess)
self.shape = X.shape[0], self._out_height, self._out_width, X.shape[3]
self._pad_value = np.nan
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; 4D tensor, the output of MaxPool(X)
"""
batch, out_height, out_width, in_channels = self.shape
X_val_mat = self._X_tensor2patchmat(feed_dict, flat_batch=True, in_channels=in_channels)
_argmax = self._argmax(X_val_mat)
P_val = X_val_mat[np.arange(X_val_mat.shape[0]), _argmax].\
reshape((out_height, out_width, batch, in_channels)).\
transpose(2, 0, 1, 3)
return P_val
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
`X_val_mat` is a 2D array of dimensions [out_height * out_width * batch * in_channels,
filter_height * filter_width], where each row is an indicator vector that indicates the index
of the maximum-intensity pixel.
Given the 4D tensor `grad_val` (i.e. gradient w.r.t. Op's output) of dimensions [batch,
out_height, out_width, out_channels], it is reordered into [out_height, out_width, batch,
out_channels] and then reshaped into 2D array of dimensions [out_height * out_width * batch
* out_channels, 1]. The 2D array is duplicated (`np.tile`) along the width axis by a factor of
`X_val_mat.shape[1]` (i.e. `filter_height` * `filter_width`), producing `grad_val_mat`.
`X_val_mat` is component-wise multiplied by `grad_val_mat`, which contains the gradient w.r.t.
`X` in the "patch matrix" format.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
filter_height, filter_width = self.fsize
batch, out_height, out_width, in_channels = self.shape
X_val_mat = self._X_tensor2patchmat(feed_dict, flat_batch=True, in_channels=in_channels)
_argmax = self._argmax(X_val_mat)
ind_mat = np.zeros_like(X_val_mat)
ind_mat[np.arange(ind_mat.shape[0]), _argmax] = 1
grad_val = self.sess.gradients[id(self)]
grad_val_mat = np.tile(grad_val.transpose(1, 2, 0, 3).reshape((-1, 1)), (1, ind_mat.shape[1]))
dX_val_mat = ind_mat * grad_val_mat
dX_val_mat = dX_val_mat.reshape((out_height * out_width * batch, in_channels * filter_height *
filter_width))
dX_val = self._dX_patchmat2tensor(dX_val_mat)
self.X.grad(feed_dict, dX_val)
def _argmax(self, X_val_mat):
"""Compute the indexes of the largest element of each flattened image patch per depth (channel)
of length `filter_height` * `filter_width`.
Parameters
----------
`X_val_mat`: `numpy.array`;
2-D array of dimensions [out_height * out_width * batch * in_channels,
filter_height * filter_width]
"""
if "argmax" not in self._cache_data:
self._cache_data["argmax"] = np.nanargmax(X_val_mat, axis=1)
return self._cache_data["argmax"]
class AvgPool2dOp(_2dKernelOp):
def __init__(self, X, fsize, strides, padding, sess=None):
super(AvgPool2dOp, self).__init__(X, fsize, strides, padding, sess)
self.shape = X.shape[0], self._out_height, self._out_width, X.shape[3]
self._pad_value = np.nan
def _eval_func(self, feed_dict):
batch, out_height, out_width, in_channels = self.shape
X_val_mat = self._X_tensor2patchmat(feed_dict, flat_batch=True, in_channels=in_channels)
X_val_mat = np.nanmean(X_val_mat, axis=1)
P_val = X_val_mat.reshape((out_height, out_width, batch, in_channels)).transpose(2, 0, 1, 3)
return P_val
def _grad_func(self, feed_dict):
filter_height, filter_width = self.fsize
batch, out_height, out_width, in_channels = self.shape
X_val_mat = self._X_tensor2patchmat(feed_dict, flat_batch=True, in_channels=in_channels)
div = np.logical_not(np.isnan(X_val_mat)).astype(np.float).sum(axis=1, keepdims=True)
ind_mat = np.ones_like(X_val_mat) / div
grad_val = self.sess.gradients[id(self)]
grad_val_mat = np.tile(grad_val.transpose(1, 2, 0, 3).reshape((-1, 1)), (1, ind_mat.shape[1]))
dX_val_mat = ind_mat * grad_val_mat
dX_val_mat = dX_val_mat.reshape((out_height * out_width * batch, in_channels * filter_height *
filter_width))
dX_val = self._dX_patchmat2tensor(dX_val_mat)
self.X.grad(feed_dict, dX_val)
class PadOp(_BaseOp):
def __init__(self, X, paddings, constant_value=0, sess=None):
super(PadOp, self).__init__(sess)
self.shape = self._shape(X, paddings)
self.X = X
self.paddings = paddings
self.constant_value = constant_value
self._slice = self._slice(self.X, self.paddings)
def _eval_func(self, feed_dict):
X_val = self.X.eval(feed_dict)
Pad_val = np.ones(self.shape) * self.constant_value
Pad_val[self._slice] = X_val
return Pad_val
def _grad_func(self, feed_dict):
grad_val = self.sess.gradients[id(self)]
dX_val = grad_val[self._slice]
self.X.grad(feed_dict, dX_val)
def _shape(self, X, paddings):
_shape = tuple(np.sum(paddings, axis=1) + X.shape)
return _shape
def _slice(self, X, paddings):
_slice = [slice(p[0], p[0] + i) for p, i in zip(paddings, X.shape)]
return _slice
class BiasAddOp(_BaseOp):
"""Op that adds bias `B` to a tensor `X`.
`X` may have arbitrary shape. `B` must be 1D tensor with length matching the last dim of `X`. For
example, given `B` = array([0.1, 0.2, 0.4]) and `X` =
array([[[0, 4, 0],
[3, 9, 9],
[6, 6, 6]],
[[7, 0, 4],
[7, 1, 7],
[7, 1, 1]]])
`X`+'B' would be
array([[[ 0.1, 4.2, 0.4],
[ 3.1, 9.2, 9.4],
[ 6.1, 6.2, 6.4]],
[[ 7.1, 0.2, 4.4],
[ 7.1, 1.2, 7.4],
[ 7.1, 1.2, 1.4]]])
Gradients are backpropped to both `X` and `B`.
Parameters
----------
X: `_BaseOp`;
Input tensor with arbitrary shape
B: `_BaseOp`;
Input 1D tensor containing bias
sess: `Session`;
The session in which the Op is defined
"""
def __init__(self, X, B, sess=None):
if len(B.shape) != 1 or X.shape[-1] != B.shape[0]:
raise ValueError("`B` must be a 1D array with length matching the last dim of `X`.\
B.shape[0]: %d, X.shape[-1]: %d" % (B.shape[0], X.shape[-1]))
super(BiasAddOp, self).__init__(sess)
self.shape = X.shape
self.X = X
self.B = B
self.X.parent_total += 1
self.B.parent_total += 1
self._ones = np.ones((np.prod(self.shape[:-1]), 1))
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of broadcast `B` added to `X`
"""
return self.X.eval(feed_dict) + self.B.eval(feed_dict)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X` and `B`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
dX_val = grad_val
dB_val = np.dot(self._ones.T, grad_val.reshape((-1, self.B.shape[0]))).reshape(self.B.shape[0])
self.X.grad(feed_dict, dX_val)
self.B.grad(feed_dict, dB_val)
class ReshapeOp(_BaseOp):
"""Op that reshapes input tensor.
Paramters
---------
X: `_BaseOp`;
Input tensor to be reshaped
shape: `np.ndarray`;
The shape that `X` will be reshaped to
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, X, shape, sess=None):
super(ReshapeOp, self).__init__(sess)
self.shape = shape
self.X = X
self.X.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; the n-D array containing the values of the reshaped tensor
"""
return self.X.eval(feed_dict).reshape(self.shape)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
dX_val = grad_val.reshape(self.X.shape)
self.X.grad(feed_dict, dX_val)
class DropoutOp(_BaseOp):
"""Op that implements dropout.
http://www.jmlr.org/papers/volume15/srivastava14a/srivastava14a.pdf
In input tensor `X`, a random subset of units are forced to output zero with probability
`1 - KEEP_PROB`, while the output of the remaining units is scaled up by `1 / self.KEEP_PROB`.
Paramters
---------
X: `_BaseOp`;
Input tensor in which a random subset of units is forced to output zero
KEEP_PROB: `_BaseOp`;
The probability with which each unit in `X` is kept (i.e. not forced to output zero)
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, X, KEEP_PROB, sess=None):
super(DropoutOp, self).__init__(sess)
self.shape = X.shape
self.X = X
self.KEEP_PROB = KEEP_PROB
self.X.parent_total += 1
self.KEEP_PROB.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; the n-D array containing the values of `self.X` after dropout
"""
X_val = self.X.eval(feed_dict)
X_dropout_val = X_val / self.KEEP_PROB.eval(feed_dict)
_mask = self._mask(feed_dict)
X_dropout_val[_mask] = 0.
return X_dropout_val
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
dX_val = grad_val / self.KEEP_PROB.eval(feed_dict)
_mask = self._mask(feed_dict)
dX_val[_mask] = 0.
self.X.grad(feed_dict, dX_val)
def _mask(self, feed_dict):
"""Compute a boolean-valued tensor the same shape as `X` containing indicator variables
(True if the component is to be dropped, False if the component is to be kept).
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; the n-D array with the same size as `X` containing indicator variables
"""
if "mask" not in self._cache_data:
self._cache_data["mask"] = np.random.binomial(1, 1. - self.KEEP_PROB.eval(feed_dict),
size=self.shape).astype(np.bool)
return self._cache_data["mask"]
class SoftmaxCrossEntropyWithLogitsOp(_BaseOp):
"""Op that computes the cross-entropy.
Logits are first transformed into probabilitis by applying softmax function on each row of
`logits`, then cross-entropy is computed based on probabilities and true class labels.
Emits tensor of dimension [batch].
Parameters
----------
labels: `_BaseOp`;
2D tensor of dimensions [batch, num_of_classes]
logits: `_BaseOp`;
2D tensor of dimensions [batch, num_of_classes]
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, labels, logits, sess=None):
super(SoftmaxCrossEntropyWithLogitsOp, self).__init__(sess)
self.shape = (logits.shape[0],)
self.labels = labels
self.logits = logits
self.labels.parent_total += 1
self.logits.parent_total += 1
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; 1D array of dimension [batch]
"""
logits_probs_val = self._softmax(feed_dict)
labels_val = self.labels.eval(feed_dict)
cross_entropy = np.sum(-np.log(logits_probs_val) * labels_val, axis=1)
return cross_entropy
def _grad_func(self, feed_dict):
"""Propagate gradient down to `logits`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
logits_probs_val = self._softmax(feed_dict)
labels_val = self.labels.eval(feed_dict)
dlogits_val = logits_probs_val - labels_val
dlogits_val = grad_val.reshape((-1, 1)) * dlogits_val
self.logits.grad(feed_dict, dlogits_val)
def _softmax(self, feed_dict):
"""Transform `logits` into probabilities by applying softmax function.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; 2D array of dimensions [batch, num_of_classes]
"""
if "softmax" in self._cache_data:
return self._cache_data["softmax"]
logits_val = self.logits.eval(feed_dict)
logits_probs_val = np.exp(logits_val)
self._cache_data["softmax"] = logits_probs_val / logits_probs_val.sum(axis=1, keepdims=True)
return self._cache_data["softmax"]
class ReduceMeanOp(_BaseOp):
"""Op that reduce a tensor to its mean (average) along an axis.
Parameters
----------
X: `_BaseOp`;
Input tensor
axis: integer;
The axis along which `X` is averaged
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, X, axis, sess=None):
super(ReduceMeanOp, self).__init__(sess)
self.shape = X.shape[:axis] + X.shape[axis + 1:]
self.X = X
self.X.parent_total += 1
self.axis = axis
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; N-D array with one less dimension than `X`
"""
return self.X.eval(feed_dict).mean(axis=self.axis)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
axis, shape = self.axis, self.shape
grad_val = self.sess.gradients[id(self)] / self.X.shape[axis]
grad_val = grad_val.reshape((int(np.prod(shape[:axis])), int(np.prod(shape[axis:]))))
grad_val = np.tile(grad_val, (1, self.X.shape[axis]))
dX_val = grad_val.reshape(self.X.shape)
self.X.grad(feed_dict, dX_val)
class ReduceSumOp(_BaseOp):
"""Op that reduce a tensor to its sum along an axis.
Parameters
----------
X: `_BaseOp`;
Input tensor
axis: integer;
The axis along which `X` is averaged
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, X, axis, sess=None):
super(ReduceSumOp, self).__init__(sess)
self.shape = X.shape[:axis] + X.shape[axis + 1:]
self.X = X
self.X.parent_total += 1
self.axis = axis
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; N-D array with one less dimension than `X`
"""
return self.X.eval(feed_dict).sum(axis=self.axis)
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
axis, shape = self.axis, self.shape
grad_val = self.sess.gradients[id(self)]
grad_val = grad_val.reshape((int(np.prod(shape[:axis])), int(np.prod(shape[axis:]))))
grad_val = np.tile(grad_val, (1, self.X.shape[axis]))
dX_val = grad_val.reshape(self.X.shape)
self.X.grad(feed_dict, dX_val)
class LRNOp(_BaseOp):
"""Op that performs Local Response Normalization.
Parameters
----------
X: `_BaseOp`;
Input 4D tensor
sess: `Session`;
The session that the Op is associated with
depth_radius: integer, defaults to 5;
Half-width of the 1D normalization window
bias: float, defaults to 1.;
Offset
alpha: float, defaults to 1.;
Scale factor
beta: float, defaults to .5;
Exponent
"""
def __init__(self, X, depth_radius, bias, alpha, beta, sess=None):
super(LRNOp, self).__init__(sess)
self.shape = X.shape
self.X = X
self.X.parent_total += 1
self.depth_radius = depth_radius
self.bias = bias
self.alpha = alpha
self.beta = beta
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of local response normalization performed on `X`
"""
X_val = self.X.eval(feed_dict)
weighted_sqr_sum = self._weighted_sqr_sum(feed_dict)
X_val = X_val / weighted_sqr_sum ** self.beta
return X_val
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
radius = self.depth_radius
scale = -2 * self.alpha * self.beta
_matrix_shape = -1, self.shape[-1]
X_val = self.X.eval(feed_dict).reshape(_matrix_shape)
_WSS = self._weighted_sqr_sum(feed_dict).reshape(_matrix_shape)
M = X_val * np.power(_WSS, -self.beta - 1)
WSS_p = np.power(_WSS, -self.beta)
grad_val = self.sess.gradients[id(self)].reshape(_matrix_shape)
def _func(i):
X_val_row, M_row, WSS_p_row, grad_val_row = X_val[i], M[i], WSS_p[i], grad_val[i]
def _func1(j):
vec_k = np.zeros(grad_val.shape[1])
def _func2(k):
update = scale * M_row[j] * X_val_row[k]
update = update + WSS_p_row[j] if k == j else update
return update * grad_val_row[j]
indexes = range(max(0, j - radius), min(j + radius + 1, grad_val.shape[1]))
vec_k[indexes] += np.array(map(_func2, indexes))
return vec_k
return np.array(map(_func1, range(grad_val.shape[1]))).sum(axis=0)
dX_val = np.array(map(_func, range(grad_val.shape[0]))).reshape(self.shape)
self.X.grad(feed_dict, dX_val)
def _weighted_sqr_sum(self, feed_dict):
"""Computes the weighted squared sum of local response normalization.
The output raise to power of `beta` is used to normalized input tensor `X`
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; 2-D array the same size as `X` containing the sum of weighted squares
"""
if "WSS" in self._cache_data:
return self._cache_data["WSS"]
X_val = self.X.eval(feed_dict)
radius = self.depth_radius
X_val = X_val.reshape((-1, self.shape[-1]))
def _func(x):
return map(lambda i: np.sum(np.square(x[max(0, i - radius):
min(i + radius + 1, self.shape[-1])])), xrange(len(x)))
sqr_sum = np.apply_along_axis(_func, 1, X_val)
weighted_sqr_sum = self.bias + self.alpha * sqr_sum
self._cache_data["WSS"] = weighted_sqr_sum.reshape(self.shape)
return self._cache_data["WSS"]
class MomentsOp(_BaseOp):
"""Op that emits the mean and variance of input tensor `X` over axes `axes`.
The dimensions of `X` are kept in mean and variance. For example, `X` has shape
[2, 3, 5, 4], and `axes_drop` = [0, 3], then the resulting mean and variance have shape
[1, 3, 5, 1], and the output of `_eval_func` has shape [2, 1, 3, 5, 1].
Parameters
----------
X: `_BaseOp`;
Input tensor
axes_drop: `list`;
The axes that are normalized over
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, X, axes_drop, sess=None):
super(MomentsOp, self).__init__(sess)
self._axes_keep = [i for i in xrange(len(X.shape)) if i not in axes_drop]
self._shape_keep = [X.shape[i] for i in self._axes_keep]
self._shape_drop = [X.shape[i] for i in axes_drop]
self._average_over = int(np.prod(self._shape_drop))
self._shape = np.array(X.shape)
self._shape[axes_drop] = 1
self.shape = tuple([2] + list(self._shape))
self.X = X
self.X.parent_total += 1
self.axes_drop = axes_drop
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of the ndarray containing mean and variance of `X`
"""
X_val = self.X.eval(feed_dict)
X_val = X_val.transpose(self.axes_drop + self._axes_keep).\
reshape((-1, int(np.prod(self._shape_keep))))
mean_val, var_val = X_val.mean(axis=0), X_val.var(axis=0)
mean_val, var_val = mean_val.reshape(self._shape), var_val.reshape(self._shape)
return np.array([mean_val, var_val])
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
dX_m_val = grad_val[0] / self._average_over
X_val = self.X.eval(feed_dict)
mean_val = self.eval(feed_dict)[0]
dX_v_val = grad_val[1] * (2 * (X_val - mean_val) / self._average_over)
self.X.grad(feed_dict, dX_m_val + dX_v_val)
class BatchNormOp(_BaseOp):
"""Op that performs batch normalization.
http://arxiv.org/abs/1502.03167
Parameters
----------
X: `_BaseOp`;
Input tensor to be batch-normalized
mean_var: `_BaseOp`;
Tensor containing mean and variance of `X`. Emitted by `MomentsOp`.
offet: `_BaseOp`;
Offset tensor (i.e. `beta` in the paper). Has the same shape as mean.
scale: `_BaseOp`;
Scale tensor (i.e. `gamma` in the paper). Has the same shape as var.
epsilon: float;
Small float to avoid dividing by zero
sess: `Session`;
The session that the Op is associated with
"""
def __init__(self, X, mean_var, offset, scale, epsilon, sess=None):
super(BatchNormOp, self).__init__(sess)
_indexes = np.arange(len(X.shape))
self.axes_drop = list(_indexes[np.array(X.shape) != np.array(offset.shape)])
self._axes_keep = list(_indexes[np.array(X.shape) == np.array(offset.shape)])
self._shape_keep = [X.shape[i] for i in self._axes_keep]
self.shape = X.shape
self.X = X
self.X.parent_total += 1
self.mean_var = mean_var
self.mean_var.parent_total += 1
self.offset = offset
self.offset.parent_total += 1
self.scale = scale
self.scale.parent_total += 1
self.epsilon = epsilon
def _eval_func(self, feed_dict):
"""Function that outputs the value of the tensor.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; value of local response normalization performed on `X`
"""
X_val, epsilon = self.X.eval(feed_dict), self.epsilon
mean_val, var_val = self.mean_var.eval(feed_dict)
offset_val, scale_val = self.offset.eval(feed_dict), self.scale.eval(feed_dict)
standard_X_val = self._standard_X(X_val, mean_val, var_val, epsilon)
val = scale_val * standard_X_val + offset_val
return val
def _grad_func(self, feed_dict):
"""Propagate gradient down to `X`.
Parameters
----------
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
grad_val = self.sess.gradients[id(self)]
X_val, epsilon = self.X.eval(feed_dict), self.epsilon
mean_val, var_val = self.mean_var.eval(feed_dict)
offset_val, scale_val = self.offset.eval(feed_dict), self.scale.eval(feed_dict)
dStandard_X = grad_val * scale_val
d_mean_val = dStandard_X * np.power(var_val + epsilon, -0.5)
d_mean_val = -self._postprocess(d_mean_val, self.mean_var.shape[1:])
d_var_val = dStandard_X * (X_val - mean_val) * np.power(var_val + epsilon, -1.5)
d_var_val = -0.5 * self._postprocess(d_var_val, self.mean_var.shape[1:])
self.mean_var.grad(feed_dict, np.array([d_mean_val, d_var_val]))
dX_val = dStandard_X * np.power(var_val + epsilon, -0.5)
self.X.grad(feed_dict, dX_val)
d_offset_val = self._postprocess(grad_val, self.offset.shape)
self.offset.grad(feed_dict, d_offset_val)
d_scale_val = self._standard_X(X_val, mean_val, var_val, epsilon) * grad_val
d_scale_val = self._postprocess(d_scale_val, self.scale.shape)
self.scale.grad(feed_dict, d_scale_val)
def _standard_X(self, X_val, mean_val, var_val, epsilon):
"""Computes the standardized `X` (i.e. Zero mean and unit variance).
Parameters
----------
X_val: `numpy.ndarray`;
Input array
mean_val: `numpy.ndarray`;
Mean of `X_val`
var_val: `numpy.ndarray`;
Variance of `X_val`
Returns
-------
`numpy.ndarray`; standardized `X`
"""
if "standard_X" not in self._cache_data:
self._cache_data["standard_X"] = (X_val - mean_val) / np.sqrt(var_val + epsilon)
return self._cache_data["standard_X"]
def _postprocess(self, array, shape):
"""Postprocess input `array` for computing gradients.
Parameters
----------
array: `numpy.ndarray`;
2D input array to be processed
shape: `list`;
The desired shape
Returns
-------
`numpy.ndarray`; the post-processed array
"""
array = array.transpose(self.axes_drop + self._axes_keep).\
reshape((-1, int(np.prod(self._shape_keep)))).sum(axis=0).\
reshape(shape)
return array
class Session(object):
""" Session that keeps track of the following info of all the Operations (Op) in a data flow
graph across iterations of backpropagation:
`variables`: Op's
`values`: Value of each node (i.e. tensor emitted by `Op`) in the graph
`gradients`: Gradients w.r.t. each node (i.e. tensor emitted by `Op`) in the graph
`dtype`: Data type of arrays
"""
def __init__(self, dtype=np.float32):
self.variables = []
self.values = {}
self.gradients = {}
self.dtype = dtype
def _start(self, obj_tensor, feed_dict):
"""Set the objective tensor and kicks off the gradient computation.
This function sets `parent_total` of `obj_tensor` to 1 and the gradient w.r.t each component of
`obj_tensor` is set to 1., and this gradient is backpropped THROUGHOUT THE ENTIRE DATA FLOW
GRAPH by invoking the `grad()` and `_grad_func()` method of each Op recursively. In the end the
gradient w.r.t each Op (except for tensors containing constant input data) is computed and
stored in the dict `sess.gradients`.
Parameters
----------
obj_tensor: `_BaseOp`;
The objective function to be optimized (e.g. loss)
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
obj_tensor.parent_total = 1
obj_tensor.grad(feed_dict, np.ones(obj_tensor.shape))
def sgd_update(self, params, obj_tensor, feed_dict):
"""Update the tunable parameters using Stochastic Gradient Descent (SGD) algorithm.
Parameters
----------
params: `params`;
dict: containing hyperparameters
obj_tensor: `_BaseOp`;
The objective function to be optimized (e.g. loss), with shape (1, 1) or ()
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
self._start(obj_tensor, feed_dict)
alpha = params["alpha"]
tensor_value_list = [(tensor, value) for tensor, value in zip(feed_dict.keys(),
feed_dict.values()) if tensor.is_variable]
updated_value_list = [(tensor, value - alpha * self.gradients[id(tensor)])
for tensor, value in tensor_value_list if id(tensor) in self.gradients]
for tensor, value in updated_value_list:
feed_dict[tensor] = value
self._reset()
def adam_update(self, params, obj_tensor, feed_dict):
"""Update the tunable parameters using Adaptive Moment Estimation (Adam) algorithm.
http://arxiv.org/abs/1412.6980
Parameters
----------
params: `params`;
dict: containing hyperparameters
obj_tensor: `_BaseOp`;
The objective function to be optimized (e.g. loss), with shape (1, 1) or ()
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
"""
self._start(obj_tensor, feed_dict)
alpha, beta1, beta2 = params["alpha"], params["beta1"], params["beta2"]
epsilon, t, m, v = params["epsilon"], params["t"], params["m"], params["v"]
alpha_t = alpha if t < 1 else alpha * np.sqrt(1 - np.power(beta2, t)) / (1 - np.power(beta1, t))
for tensor in feed_dict.keys():
if not tensor.is_variable and id(tensor) not in self.gradients:
continue
g = self.gradients[id(tensor)]
m[tensor] = beta1 * m[tensor] + (1 - beta1) * g
v[tensor] = beta2 * v[tensor] + (1 - beta2) * g * g
feed_dict[tensor] += -alpha_t * m[tensor] / (np.sqrt(v[tensor]) + epsilon)
params["m"], params["v"] = m, v
params["t"] += 1
self._reset()
def eval_tensor(self, tensor, feed_dict):
"""Evaluate a tensor.
Parameters
----------
tensor: `_BaseOp`;
The tensor whose value is to be computed
feed_dict: `dict`;
dict: {id(`Op`): `numpy.ndarray`}
Returns
-------
`numpy.ndarray`; n-D array containing the value of `tensor`
"""
tensor_val = tensor.eval(feed_dict)
self._reset()
return tensor_val
def _reset(self):
"""Reset data associated with Op's in each iteration."""
self.values = {}
self.gradients = {}
for op in self.variables:
op.parent_acc = 0
op._cache_data = {}
| 33.648746
| 100
| 0.635723
|
17b6b0e20f79b58c3892da663d62d9ef39e2d612
| 1,565
|
py
|
Python
|
csv_Output_match_norm.py
|
maxcurie1996/SLiM
|
bdb480f7e73ce9d9b3ff58e4b0245c514e2c64ed
|
[
"MIT"
] | null | null | null |
csv_Output_match_norm.py
|
maxcurie1996/SLiM
|
bdb480f7e73ce9d9b3ff58e4b0245c514e2c64ed
|
[
"MIT"
] | null | null | null |
csv_Output_match_norm.py
|
maxcurie1996/SLiM
|
bdb480f7e73ce9d9b3ff58e4b0245c514e2c64ed
|
[
"MIT"
] | null | null | null |
import pandas as pd
intput_para='./Test_files/Output_test/parameter_list.csv'
intput_calc='./Test_files/Output_test/0Disperson_calc.csv'
output_name='./Test_files/Output_test/joint_and_normed.csv'
mode=1 #mode1 is for the results from the MPI calculations(has normalized frequency and growth rate)
#mode2 is for the results calculated from PC(has frequency in kHz and growth in cs/a)
df_para=pd.read_csv(intput_para)
df_calc=pd.read_csv(intput_calc)
df_para_key=df_para.keys()
df_calc_key=df_para.keys()
for i in range(len(df_para)):
for j in range(len(df_calc)):
if df_para['nu'][i]==df_calc['nu'][j]\
and df_para['zeff'][i]==df_calc['zeff'][j]\
and df_para['eta'][i]==df_calc['eta'][j]\
and df_para['ky'][i]==df_calc['ky'][j]\
and df_para['mu'][i]==df_calc['mu'][j]\
and df_para['xstar'][i]==df_calc['xstar'][j]\
and df_para['ModIndex'][i]==df_calc['ModIndex'][j]:
if mode==1:
df_para['omega_plasma_kHz'][i]=df_calc['omega_omega_n'][j]*df_para['omega_n_kHz'][i]
df_para['gamma_cs_a'][i]=df_calc['gamma_omega_n'][j]*df_para['omega_n_cs_a'][i]
df_para['omega_lab_kHz'][i]=df_para['omega_plasma_kHz'][i]\
-df_para['omega_e_plasma_kHz'][i]\
+df_para['omega_e_lab_kHz'][i]
elif mode==2:
df_para['omega_plasma_kHz'][i]=df_calc['omega_plasma_kHz'][j]
df_para['omega_lab_kHz'][i]=df_calc['omega_lab_kHz'][j]
df_para['gamma_cs_a'][i]=df_calc['gamma_cs_a'][j]
df_out=pd.DataFrame(df_para, columns=df_para.keys()) #construct the panda dataframe
df_out.to_csv(output_name,index=False)
| 38.170732
| 101
| 0.703514
|
dd4b2190b28f08b4704bb017661253da96d012f4
| 3,463
|
py
|
Python
|
django_extensions/tests/test_dumpscript.py
|
watc/django-extensions
|
695af4098355148c477382425dc4e20d3e09a30d
|
[
"MIT"
] | 1
|
2016-11-29T21:31:59.000Z
|
2016-11-29T21:31:59.000Z
|
django_extensions/tests/test_dumpscript.py
|
watc/django-extensions
|
695af4098355148c477382425dc4e20d3e09a30d
|
[
"MIT"
] | null | null | null |
django_extensions/tests/test_dumpscript.py
|
watc/django-extensions
|
695af4098355148c477382425dc4e20d3e09a30d
|
[
"MIT"
] | null | null | null |
import sys
# conditional imports for python 3
try:
import compiler # NOQA
from StringIO import StringIO # NOQA
except ImportError:
import ast as compiler # NOQA
from io import StringIO # NOQA
from django.test import TestCase
from django.core.management import call_command
from django_extensions.tests.models import Name, Note, Person
from django.conf import settings
from django.db.models import loading
class DumpScriptTests(TestCase):
def setUp(self):
self.real_stdout = sys.stdout
self.real_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
self.original_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS)
settings.INSTALLED_APPS.append('django_extensions.tests')
loading.cache.loaded = False
call_command('syncdb', verbosity=0)
def tearDown(self):
sys.stdout = self.real_stdout
sys.stderr = self.real_stderr
settings.INSTALLED_APPS.remove('django_extensions.tests')
settings.INSTALLED_APPS = self.original_installed_apps
loading.cache.loaded = False
def test_runs(self):
# lame test...does it run?
n = Name(name='Gabriel')
n.save()
call_command('dumpscript', 'tests')
self.assertTrue('Gabriel' in sys.stdout.getvalue())
#----------------------------------------------------------------------
def test_replaced_stdout(self):
# check if stdout can be replaced
sys.stdout = StringIO()
n = Name(name='Mike')
n.save()
tmp_out = StringIO()
call_command('dumpscript', 'tests', stdout=tmp_out)
self.assertTrue('Mike' in tmp_out.getvalue()) # script should go to tmp_out
self.assertEqual(0, len(sys.stdout.getvalue())) # there should not be any output to sys.stdout
tmp_out.close()
#----------------------------------------------------------------------
def test_replaced_stderr(self):
# check if stderr can be replaced, without changing stdout
n = Name(name='Fred')
n.save()
tmp_err = StringIO()
sys.stderr = StringIO()
call_command('dumpscript', 'tests', stderr=tmp_err)
self.assertTrue('Fred' in sys.stdout.getvalue()) # script should still go to stdout
self.assertTrue('Name' in tmp_err.getvalue()) # error output should go to tmp_err
self.assertEqual(0, len(sys.stderr.getvalue())) # there should not be any output to sys.stderr
tmp_err.close()
#----------------------------------------------------------------------
def test_valid_syntax(self):
n1 = Name(name='John')
n1.save()
p1 = Person(name=n1, age=40)
p1.save()
n2 = Name(name='Jane')
n2.save()
p2 = Person(name=n2, age=18)
p2.save()
p2.children.add(p1)
note1 = Note(note="This is the first note.")
note1.save()
note2 = Note(note="This is the second note.")
note2.save()
p2.notes.add(note1, note2)
tmp_out = StringIO()
call_command('dumpscript', 'tests', stdout=tmp_out)
ast_syntax_tree = compiler.parse(tmp_out.getvalue())
if hasattr(ast_syntax_tree, 'body'):
self.assertTrue(len(ast_syntax_tree.body) > 1)
else:
self.assertTrue(len(ast_syntax_tree.asList()) > 1)
tmp_out.close()
| 36.072917
| 103
| 0.596881
|
69cabf467d85a31676a6a93e76a9ce9a8b553d61
| 4,466
|
py
|
Python
|
gamestonk_terminal/common/technical_analysis/trend_indicators_view.py
|
Aerex/GamestonkTerminal
|
680e0cd278f0d8e45031cdc9d51f247e9aa90ce1
|
[
"MIT"
] | 3
|
2021-02-28T09:54:47.000Z
|
2021-03-11T17:42:35.000Z
|
gamestonk_terminal/common/technical_analysis/trend_indicators_view.py
|
Aerex/GamestonkTerminal
|
680e0cd278f0d8e45031cdc9d51f247e9aa90ce1
|
[
"MIT"
] | 3
|
2022-02-28T03:37:52.000Z
|
2022-02-28T03:37:53.000Z
|
gamestonk_terminal/common/technical_analysis/trend_indicators_view.py
|
Aerex/GamestonkTerminal
|
680e0cd278f0d8e45031cdc9d51f247e9aa90ce1
|
[
"MIT"
] | 1
|
2021-11-20T16:09:48.000Z
|
2021-11-20T16:09:48.000Z
|
"""Trend Indicators View"""
__docformat__ = "numpy"
import os
import matplotlib.pyplot as plt
import pandas as pd
from pandas.plotting import register_matplotlib_converters
from gamestonk_terminal import feature_flags as gtff
from gamestonk_terminal.common.technical_analysis import trend_indicators_model
from gamestonk_terminal.config_plot import PLOT_DPI
from gamestonk_terminal.helper_funcs import export_data, plot_autoscale
register_matplotlib_converters()
def plot_adx(
s_ticker: str,
s_interval: str,
df_stock: pd.DataFrame,
length: int,
scalar: int,
drift: int,
export: str,
):
"""Plot ADX indicator
Parameters
----------
s_ticker : str
Ticker
s_interval : str
Interval for data
df_stock : pd.DataFrame
Dataframe of prices
length : int
Length of window
scalar : int
Scalar variable
drift : int
Drift variable
export: str
Format to export data
"""
df_ta = trend_indicators_model.adx(s_interval, df_stock, length, scalar, drift)
fig, ax = plt.subplots(2, 1, figsize=plot_autoscale(), dpi=PLOT_DPI)
ax0 = ax[0]
ax0.plot(df_stock.index, df_stock["Close"].values, "k", lw=2)
ax0.set_title(f"Average Directional Movement Index (ADX) on {s_ticker}")
ax0.set_xlim(df_stock.index[0], df_stock.index[-1])
ax0.set_ylabel("Share Price ($)")
ax0.grid(b=True, which="major", color="#666666", linestyle="-")
ax1 = ax[1]
ax1.plot(df_ta.index, df_ta.iloc[:, 0].values, "b", lw=2)
ax1.plot(df_ta.index, df_ta.iloc[:, 1].values, "g", lw=1)
ax1.plot(df_ta.index, df_ta.iloc[:, 2].values, "r", lw=1)
ax1.set_xlim(df_stock.index[0], df_stock.index[-1])
ax1.axhline(25, linewidth=3, color="k", ls="--")
ax1.legend(
[
f"ADX ({df_ta.columns[0]})",
f"+DI ({df_ta.columns[1]})",
f"- DI ({df_ta.columns[2]})",
],
loc="upper left",
)
ax1.set_xlabel("Time")
ax1.grid(b=True, which="major", color="#666666", linestyle="-")
ax1.set_ylim([0, 100])
if gtff.USE_ION:
plt.ion()
fig.tight_layout()
plt.gcf().autofmt_xdate()
plt.show()
print("")
export_data(
export,
os.path.dirname(os.path.abspath(__file__)).replace("common", "stocks"),
"adx",
df_ta,
)
def plot_aroon(
s_ticker: str,
s_interval: str,
df_stock: pd.DataFrame,
length: int,
scalar: int,
export: str,
):
"""Plot Aroon indicator
Parameters
----------
s_ticker : str
Ticker
s_interval: str
Interval of price data
df_stock : pd.DataFrame.length
Dataframe of prices
length:int
Length of window
scalar : int
Scalar variable
"""
df_ta = trend_indicators_model.aroon(df_stock, length, scalar)
fig, ax = plt.subplots(3, 1, figsize=plot_autoscale(), dpi=PLOT_DPI)
ax0 = ax[0]
# Daily
if s_interval == "1440min":
ax0.plot(df_stock.index, df_stock["Adj Close"].values, "k", lw=2)
# Intraday
else:
ax0.plot(df_stock.index, df_stock["Close"].values, "k", lw=2)
ax0.set_title(f"Aroon on {s_ticker}")
ax0.set_xlim(df_stock.index[0], df_stock.index[-1])
ax0.set_ylabel("Share Price ($)")
ax0.grid(b=True, which="major", color="#666666", linestyle="-")
ax1 = ax[1]
ax1.plot(df_ta.index, df_ta.iloc[:, 0].values, "r", lw=2)
ax1.plot(df_ta.index, df_ta.iloc[:, 1].values, "g", lw=2)
ax1.set_xlim(df_stock.index[0], df_stock.index[-1])
ax1.axhline(50, linewidth=1, color="k", ls="--")
ax1.legend(
[f"Aroon DOWN ({df_ta.columns[0]})", f"Aroon UP ({df_ta.columns[1]})"],
loc="upper left",
)
ax1.grid(b=True, which="major", color="#666666", linestyle="-")
ax1.set_ylim([0, 100])
ax2 = ax[2]
ax2.plot(df_ta.index, df_ta.iloc[:, 2].values, "b", lw=2)
ax2.set_xlim(df_stock.index[0], df_stock.index[-1])
ax2.set_xlabel("Time")
ax2.legend([f"Aroon OSC ({df_ta.columns[2]})"], loc="upper left")
ax2.grid(b=True, which="major", color="#666666", linestyle="-")
ax2.set_ylim([-100, 100])
if gtff.USE_ION:
plt.ion()
fig.tight_layout(pad=1)
plt.show()
plt.gcf().autofmt_xdate()
print("")
export_data(
export,
os.path.dirname(os.path.abspath(__file__)).replace("common", "stocks"),
"aroon",
df_ta,
)
| 27.066667
| 83
| 0.610614
|
2b6b4656e11013f2f963a379fad6c8b7a6d48495
| 1,466
|
py
|
Python
|
tests/test_settlers_of_the_north_pole.py
|
nullus/advent2018
|
b7b139d0128c74e3a744c14367ebb34775ffe747
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_settlers_of_the_north_pole.py
|
nullus/advent2018
|
b7b139d0128c74e3a744c14367ebb34775ffe747
|
[
"BSD-2-Clause"
] | 1
|
2021-06-10T04:56:55.000Z
|
2021-06-10T04:56:55.000Z
|
tests/test_settlers_of_the_north_pole.py
|
nullus/advent2018
|
b7b139d0128c74e3a744c14367ebb34775ffe747
|
[
"BSD-2-Clause"
] | null | null | null |
# coding: utf-8
#
# Copyright (c) 2018, Dylan Perry <dylan.perry@gmail.com>. All rights reserved.
# Licensed under BSD 2-Clause License. See LICENSE file for full license.
from pytest import mark
from advent.input import text
from advent.settlers_of_the_north_pole import parse, resource_value, next_state, part1, part2
test_area_initial_str = r'''
.#.#...|#.
.....#|##|
.|..|...#.
..|#.....#
#.#|||#|#|
...#.||...
.|....|...
||...#|.#|
|.||||..|.
...#.|..|.
'''.strip()
test_area_step1_str = r'''
.......##.
......|###
.|..|...#.
..|#||...#
..##||.|#|
...#||||..
||...|||..
|||||.||.|
||||||||||
....||..|.
'''.strip()
test_area_step10_str = r'''
.||##.....
||###.....
||##......
|##.....##
|##.....##
|##....##|
||##.####|
||#####|||
||||#|||||
||||||||||
'''.strip()
def test_parse_map():
# Sample some regions in the test area
map_ = parse(test_area_initial_str)
assert '.' == map_[0][0]
assert '#' == map_[2][8]
assert '|' == map_[8][0]
def test_resource_value():
assert 1147 == resource_value(parse(test_area_step10_str))
def test_next_state():
assert parse(test_area_step1_str) == next_state(parse(test_area_initial_str))
def test_part1():
assert 1147 == part1(test_area_initial_str)
def test_part1_with_puzzle_input():
assert 355918 == part1(text("settlers_of_the_north_pole"))
@mark.slow
def test_part2_with_puzzle_input():
assert 202806 == part2(text("settlers_of_the_north_pole"), 1_000_000_000)
| 18.325
| 93
| 0.562756
|
04877f255c3cbf2af46bba902e4d6856170065de
| 16,493
|
py
|
Python
|
wagtail/tests/demosite/models.py
|
balkantechnologies/BalkanCMS_core
|
68625199028fc96abb175e410a4a7a92c02cb261
|
[
"BSD-3-Clause"
] | 1
|
2021-09-21T00:06:52.000Z
|
2021-09-21T00:06:52.000Z
|
wagtail/tests/demosite/models.py
|
balkantechnologies/BalkanCMS_core
|
68625199028fc96abb175e410a4a7a92c02cb261
|
[
"BSD-3-Clause"
] | 1
|
2021-02-24T08:25:30.000Z
|
2021-02-24T08:25:30.000Z
|
wagtail/tests/demosite/models.py
|
balkantechnologies/BalkanCMS_core
|
68625199028fc96abb175e410a4a7a92c02cb261
|
[
"BSD-3-Clause"
] | 1
|
2020-11-24T10:21:24.000Z
|
2020-11-24T10:21:24.000Z
|
from datetime import date
from django.db import models
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from modelcluster.fields import ParentalKey
from modelcluster.tags import ClusterTaggableManager
from taggit.models import TaggedItemBase
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import FieldPanel, MultiFieldPanel, \
InlinePanel, PageChooserPanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtaildocs.edit_handlers import DocumentChooserPanel
from wagtail.wagtailsearch import index
# ABSTRACT MODELS
# =============================
class AbstractLinkFields(models.Model):
link_external = models.URLField("External link", blank=True)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+'
)
link_document = models.ForeignKey(
'wagtaildocs.Document',
null=True,
blank=True,
related_name='+'
)
@property
def link(self):
if self.link_page:
return self.link_page.url
elif self.link_document:
return self.link_document.url
else:
return self.link_external
api_fields = ('link', )
panels = [
FieldPanel('link_external'),
PageChooserPanel('link_page'),
DocumentChooserPanel('link_document'),
]
class Meta:
abstract = True
class AbstractRelatedLink(AbstractLinkFields):
title = models.CharField(max_length=255, help_text="Link title")
api_fields = ('title', ) + AbstractLinkFields.api_fields
panels = [
FieldPanel('title'),
MultiFieldPanel(AbstractLinkFields.panels, "Link"),
]
class Meta:
abstract = True
class AbstractCarouselItem(AbstractLinkFields):
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
embed_url = models.URLField("Embed URL", blank=True)
caption = models.CharField(max_length=255, blank=True)
api_fields = (
'image',
'embed_url',
'caption',
) + AbstractLinkFields.api_fields
panels = [
ImageChooserPanel('image'),
FieldPanel('embed_url'),
FieldPanel('caption'),
MultiFieldPanel(AbstractLinkFields.panels, "Link"),
]
class Meta:
abstract = True
class ContactFieldsMixin(models.Model):
telephone = models.CharField(max_length=20, blank=True)
email = models.EmailField(blank=True)
address_1 = models.CharField(max_length=255, blank=True)
address_2 = models.CharField(max_length=255, blank=True)
city = models.CharField(max_length=255, blank=True)
country = models.CharField(max_length=255, blank=True)
post_code = models.CharField(max_length=10, blank=True)
api_fields = (
'telephone',
'email',
'address_1',
'address_2',
'city',
'country',
'post_code',
)
panels = [
FieldPanel('telephone'),
FieldPanel('email'),
FieldPanel('address_1'),
FieldPanel('address_2'),
FieldPanel('city'),
FieldPanel('country'),
FieldPanel('post_code'),
]
class Meta:
abstract = True
# PAGE MODELS
# =============================
# Home page
class HomePage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
body = RichTextField(blank=True)
api_fields = (
'body',
'carousel_items',
'related_links',
)
search_fields = Page.search_fields + (
index.SearchField('body'),
)
class Meta:
verbose_name = "Homepage"
class HomePageCarouselItem(Orderable, AbstractCarouselItem):
page = ParentalKey('HomePage', related_name='carousel_items')
class HomePageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('HomePage', related_name='related_links')
HomePage.content_panels = Page.content_panels + [
FieldPanel('body', classname="full"),
InlinePanel('carousel_items', label="Carousel items"),
InlinePanel('related_links', label="Related links"),
]
# Standard pages
class StandardPage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
intro = RichTextField(blank=True)
body = RichTextField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'intro',
'body',
'feed_image',
'carousel_items',
'related_links',
)
search_fields = Page.search_fields + (
index.SearchField('intro'),
index.SearchField('body'),
)
class StandardPageCarouselItem(Orderable, AbstractCarouselItem):
page = ParentalKey('StandardPage', related_name='carousel_items')
class StandardPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('StandardPage', related_name='related_links')
StandardPage.content_panels = Page.content_panels + [
FieldPanel('intro', classname="full"),
InlinePanel('carousel_items', label="Carousel items"),
FieldPanel('body', classname="full"),
InlinePanel('related_links', label="Related links"),
]
StandardPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
class StandardIndexPage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
intro = RichTextField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'intro',
'feed_image',
'related_links',
)
search_fields = Page.search_fields + (
index.SearchField('intro'),
)
class StandardIndexPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('StandardIndexPage', related_name='related_links')
StandardIndexPage.content_panels = Page.content_panels + [
FieldPanel('intro', classname="full"),
InlinePanel('related_links', label="Related links"),
]
StandardIndexPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
# Blog pages
class BlogEntryPage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
body = RichTextField()
tags = ClusterTaggableManager(through='BlogEntryPageTag', blank=True)
date = models.DateField("Post date")
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'body',
'tags',
'date',
'feed_image',
'carousel_items',
'related_links',
)
search_fields = Page.search_fields + (
index.SearchField('body'),
)
def get_blog_index(self):
# Find closest ancestor which is a blog index
return BlogIndexPage.ancestor_of(self).last()
class BlogEntryPageCarouselItem(Orderable, AbstractCarouselItem):
page = ParentalKey('BlogEntryPage', related_name='carousel_items')
class BlogEntryPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('BlogEntryPage', related_name='related_links')
class BlogEntryPageTag(TaggedItemBase):
content_object = ParentalKey('BlogEntryPage', related_name='tagged_items')
BlogEntryPage.content_panels = Page.content_panels + [
FieldPanel('date'),
FieldPanel('body', classname="full"),
InlinePanel('carousel_items', label="Carousel items"),
InlinePanel('related_links', label="Related links"),
]
BlogEntryPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
FieldPanel('tags'),
]
class BlogIndexPage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
intro = RichTextField(blank=True)
api_fields = (
'intro',
'related_links',
)
search_fields = Page.search_fields + (
index.SearchField('intro'),
)
def get_blog_entries(self):
# Get list of live blog pages that are descendants of this page
entries = BlogEntryPage.objects.descendant_of(self).live()
# Order by most recent date first
entries = entries.order_by('-date')
return entries
def get_context(self, request):
# Get blog entries
entries = self.get_blog_entries()
# Filter by tag
tag = request.GET.get('tag')
if tag:
entries = entries.filter(tags__name=tag)
# Pagination
page = request.GET.get('page')
paginator = Paginator(entries, 10) # Show 10 entries per page
try:
entries = paginator.page(page)
except PageNotAnInteger:
entries = paginator.page(1)
except EmptyPage:
entries = paginator.page(paginator.num_pages)
# Update template context
context = super(BlogIndexPage, self).get_context(request)
context['entries'] = entries
return context
class BlogIndexPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('BlogIndexPage', related_name='related_links')
BlogIndexPage.content_panels = Page.content_panels + [
FieldPanel('intro', classname="full"),
InlinePanel('related_links', label="Related links"),
]
# Events pages
class EventPage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
AUDIENCE_CHOICES = (
('public', "Public"),
('private', "Private"),
)
date_from = models.DateField("Start date")
date_to = models.DateField(
"End date",
null=True,
blank=True,
help_text="Not required if event is on a single day"
)
time_from = models.TimeField("Start time", null=True, blank=True)
time_to = models.TimeField("End time", null=True, blank=True)
audience = models.CharField(max_length=255, choices=AUDIENCE_CHOICES)
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
cost = models.CharField(max_length=255)
signup_link = models.URLField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'date_from',
'date_to',
'time_from',
'time_to',
'audience',
'location',
'body',
'cost',
'signup_link',
'feed_image',
'carousel_items',
'related_links',
'speakers',
)
search_fields = Page.search_fields + (
index.SearchField('get_audience_display'),
index.SearchField('location'),
index.SearchField('body'),
)
def get_event_index(self):
# Find closest ancestor which is an event index
return EventIndexPage.objects.ancester_of(self).last()
class EventPageCarouselItem(Orderable, AbstractCarouselItem):
page = ParentalKey('EventPage', related_name='carousel_items')
class EventPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('EventPage', related_name='related_links')
class EventPageSpeaker(Orderable, AbstractLinkFields):
page = ParentalKey('EventPage', related_name='speakers')
first_name = models.CharField("Name", max_length=255, blank=True)
last_name = models.CharField("Surname", max_length=255, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'first_name',
'last_name',
'image',
)
panels = [
FieldPanel('first_name'),
FieldPanel('last_name'),
ImageChooserPanel('image'),
MultiFieldPanel(AbstractLinkFields.panels, "Link"),
]
EventPage.content_panels = Page.content_panels + [
FieldPanel('date_from'),
FieldPanel('date_to'),
FieldPanel('time_from'),
FieldPanel('time_to'),
FieldPanel('location'),
FieldPanel('audience'),
FieldPanel('cost'),
FieldPanel('signup_link'),
InlinePanel('carousel_items', label="Carousel items"),
FieldPanel('body', classname="full"),
InlinePanel('speakers', label="Speakers"),
InlinePanel('related_links', label="Related links"),
]
EventPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
class EventIndexPage(Page):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
intro = RichTextField(blank=True)
api_fields = (
'intro',
'related_links',
)
search_fields = Page.search_fields + (
index.SearchField('intro'),
)
def get_events(self):
# Get list of live event pages that are descendants of this page
events = EventPage.objects.descendant_of(self).live()
# Filter events list to get ones that are either
# running now or start in the future
events = events.filter(date_from__gte=date.today())
# Order by date
events = events.order_by('date_from')
return events
class EventIndexPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('EventIndexPage', related_name='related_links')
EventIndexPage.content_panels = Page.content_panels + [
FieldPanel('intro', classname="full"),
InlinePanel('related_links', label="Related links"),
]
# Person page
class PersonPage(Page, ContactFieldsMixin):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
intro = RichTextField(blank=True)
biography = RichTextField(blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'first_name',
'last_name',
'intro',
'biography',
'image',
'feed_image',
'related_links',
) + ContactFieldsMixin.api_fields
search_fields = Page.search_fields + (
index.SearchField('first_name'),
index.SearchField('last_name'),
index.SearchField('intro'),
index.SearchField('biography'),
)
class PersonPageRelatedLink(Orderable, AbstractRelatedLink):
page = ParentalKey('PersonPage', related_name='related_links')
PersonPage.content_panels = Page.content_panels + [
FieldPanel('first_name'),
FieldPanel('last_name'),
FieldPanel('intro', classname="full"),
FieldPanel('biography', classname="full"),
ImageChooserPanel('image'),
MultiFieldPanel(ContactFieldsMixin.panels, "Contact"),
InlinePanel('related_links', label="Related links"),
]
PersonPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
# Contact page
class ContactPage(Page, ContactFieldsMixin):
page_ptr = models.OneToOneField(Page, parent_link=True, related_name='+')
body = RichTextField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
api_fields = (
'body',
'feed_image',
) + ContactFieldsMixin.api_fields
search_fields = Page.search_fields + (
index.SearchField('body'),
)
ContactPage.content_panels = Page.content_panels + [
FieldPanel('body', classname="full"),
MultiFieldPanel(ContactFieldsMixin.panels, "Contact"),
]
ContactPage.promote_panels = [
MultiFieldPanel(Page.promote_panels, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
| 27.719328
| 78
| 0.659795
|
6706748301b7dca2f051a9b6eb408e0cb775b34a
| 3,702
|
py
|
Python
|
load_atari_model.py
|
ac-93/soft-actor-critic
|
32d637f7da03fe427ada1db325825bc48507a1dc
|
[
"MIT"
] | 81
|
2020-02-20T13:14:25.000Z
|
2022-03-30T12:36:05.000Z
|
load_atari_model.py
|
ac-93/soft-actor-critic
|
32d637f7da03fe427ada1db325825bc48507a1dc
|
[
"MIT"
] | 3
|
2020-07-28T07:10:05.000Z
|
2021-04-29T18:37:43.000Z
|
load_atari_model.py
|
ac-93/soft-actor-critic
|
32d637f7da03fe427ada1db325825bc48507a1dc
|
[
"MIT"
] | 7
|
2020-07-06T21:46:05.000Z
|
2021-09-15T13:11:55.000Z
|
import sys, os
import numpy as np
import time
import gym
import tensorflow as tf
from spinup.utils.logx import *
from image_observation.sac_discrete_kl_atari.common_utils import *
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.6)
tf_config = tf.compat.v1.ConfigProto(gpu_options=gpu_options)
tf_config.gpu_options.allow_growth = True
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
def load_json_obj(name):
with open(name + '.json', 'r') as fp:
return json.load(fp)
def load_and_test_model(model_dir, model_save_name):
sess = tf.compat.v1.Session(config=tf_config)
model = restore_tf_graph(sess=sess, fpath=os.path.join(model_dir, model_save_name))
config = load_json_obj(os.path.join(model_dir, 'config'))
test_env = gym.make(config['rl_params']['env_name'])
x_ph = model['x']
mu = model['mu']
pi = model['pi']
obs_dim = config['network_params']['input_dims']
test_state_buffer = StateBuffer(m=obs_dim[2])
max_ep_len = config['rl_params']['max_ep_len']
max_noop = config['rl_params']['max_noop']
thresh = config['rl_params']['thresh']
def get_action(state, deterministic=False):
state = state.astype('float32') / 255.
act_op = mu if deterministic else pi
return sess.run(act_op, feed_dict={x_ph: [state]})[0]
def reset(env, state_buffer):
o, r, d, ep_ret, ep_len = env.reset(), 0, False, 0, 0
# fire to start game and perform no-op for some frames to randomise start
o, _, _, _ = env.step(1) # Fire action to start game
for _ in range(np.random.randint(1, max_noop)):
o, _, _, _ = env.step(0) # Action 'NOOP'
o = process_image_observation(o, obs_dim, thresh)
r = process_reward(r)
old_lives = env.ale.lives()
state = state_buffer.init_state(init_obs=o)
return o, r, d, ep_ret, ep_len, old_lives, state
def test_agent(n=10, render=True):
global sess, mu, pi, q1, q2
for j in range(n):
o, r, d, ep_ret, ep_len, test_old_lives, test_state = reset(test_env, test_state_buffer)
terminal_life_lost_test = False
if render: test_env.render()
while not(d or (ep_len == max_ep_len)):
# start by firing
if terminal_life_lost_test:
a = 1
else:
# Take lower variance actions at test(noise_scale=0.05)
a = get_action(test_state, False)
# Take deterministic actions at test time
o, r, d, _ = test_env.step(a)
o = process_image_observation(o, obs_dim, thresh)
r = process_reward(r)
test_state = test_state_buffer.append_state(o)
ep_ret += r
ep_len += 1
if test_env.ale.lives() < test_old_lives:
test_old_lives = test_env.ale.lives()
terminal_life_lost_test = True
else:
terminal_life_lost_test = False
if render: test_env.render()
if render: test_env.close()
print('Ep Return: ', ep_ret)
test_agent(n=5, render=True)
test_env.close()
if __name__ == '__main__':
# model_dir = 'saved_models/sac_discrete_kl_atari_BreakoutDeterministic-v4/sac_discrete_kl_atari_BreakoutDeterministic-v4_s1/'
model_dir = 'saved_models/sac_discrete_pc_atari_BreakoutDeterministic-v4/sac_discrete_pc_atari_BreakoutDeterministic-v4_s2/'
model_save_name = 'simple_save48'
load_and_test_model(model_dir, model_save_name)
| 35.257143
| 130
| 0.634792
|
24018f18a80fb0058f94c89a6ffa6f34c9c5a1f2
| 5,667
|
py
|
Python
|
sumy/evaluation/__main__.py
|
mapado/sumy
|
046aa6302d996f65db064d291d47a158af4b6861
|
[
"Apache-2.0"
] | 1
|
2015-07-28T12:45:55.000Z
|
2015-07-28T12:45:55.000Z
|
sumy/evaluation/__main__.py
|
mapado/sumy
|
046aa6302d996f65db064d291d47a158af4b6861
|
[
"Apache-2.0"
] | null | null | null |
sumy/evaluation/__main__.py
|
mapado/sumy
|
046aa6302d996f65db064d291d47a158af4b6861
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf8 -*-
"""
Sumy - evaluation of automatic text summary.
Usage:
sumy_eval (random | luhn | edmundson | lsa) <reference_summary> [--length=<length>]
sumy_eval (random | luhn | edmundson | lsa) <reference_summary> [--length=<length>] --url=<url>
sumy_eval (random | luhn | edmundson | lsa) <reference_summary> [--length=<length>] --file=<file_path> --format=<file_format>
sumy_eval --version
sumy_eval --help
Options:
<reference_summary> Path to the file with reference summary.
--url=<url> URL address of summarizied message.
--file=<file> Path to file with summarizied text.
--format=<format> Format of input file. [default: plaintext]
--length=<length> Length of summarizied text. It may be count of sentences
or percentage of input text. [default: 20%]
--version Displays version of application.
--help Displays this text.
"""
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import sys
from itertools import chain
from docopt import docopt
from .. import __version__
from ..utils import ItemsCount, get_stop_words
from ..models import TfDocumentModel
from .._compat import urllib, to_string
from ..nlp.tokenizers import Tokenizer
from ..parsers.html import HtmlParser
from ..parsers.plaintext import PlaintextParser
from ..summarizers.random import RandomSummarizer
from ..summarizers.luhn import LuhnSummarizer
from ..summarizers.edmundson import EdmundsonSummarizer
from ..summarizers.lsa import LsaSummarizer
from ..nlp.stemmers.cs import stem_word
from . import precision, recall, f_score, cosine_similarity, unit_overlap
HEADERS = {
"User-Agent": "Sumy (Automatic text summarizer) Version/%s" % __version__,
}
PARSERS = {
"html": HtmlParser,
"plaintext": PlaintextParser,
}
def build_random(parser):
return RandomSummarizer()
def build_luhn(parser):
summarizer = LuhnSummarizer(stem_word)
summarizer.stop_words = get_stop_words("cs")
return summarizer
def build_edmundson(parser):
summarizer = EdmundsonSummarizer(stem_word)
summarizer.null_words = get_stop_words("cs")
summarizer.bonus_words = parser.significant_words
summarizer.stigma_words = parser.stigma_words
return summarizer
def build_lsa(parser):
summarizer = LsaSummarizer(stem_word)
summarizer.stop_words = get_stop_words("cs")
return summarizer
def evaluate_cosine_similarity(evaluated_sentences, reference_sentences):
evaluated_words = tuple(chain(*(s.words for s in evaluated_sentences)))
reference_words = tuple(chain(*(s.words for s in reference_sentences)))
evaluated_model = TfDocumentModel(evaluated_words)
reference_model = TfDocumentModel(reference_words)
return cosine_similarity(evaluated_model, reference_model)
def evaluate_unit_overlap(evaluated_sentences, reference_sentences):
evaluated_words = tuple(chain(*(s.words for s in evaluated_sentences)))
reference_words = tuple(chain(*(s.words for s in reference_sentences)))
evaluated_model = TfDocumentModel(evaluated_words)
reference_model = TfDocumentModel(reference_words)
return unit_overlap(evaluated_model, reference_model)
AVAILABLE_METHODS = {
"random": build_random,
"luhn": build_luhn,
"edmundson": build_edmundson,
"lsa": build_lsa,
}
AVAILABLE_EVALUATIONS = (
("Precision", False, precision),
("Recall", False, recall),
("F-score", False, f_score),
("Cosine similarity", False, evaluate_cosine_similarity),
("Cosine similarity (document)", True, evaluate_cosine_similarity),
("Unit overlap", False, evaluate_unit_overlap),
("Unit overlap (document)", True, evaluate_unit_overlap),
)
def main(args=None):
args = docopt(to_string(__doc__), args, version=__version__)
summarizer, document, items_count, reference_summary = handle_arguments(args)
evaluated_sentences = summarizer(document, items_count)
reference_document = PlaintextParser.from_string(reference_summary, Tokenizer("czech"))
reference_sentences = reference_document.document.sentences
for name, evaluate_document, evaluate in AVAILABLE_EVALUATIONS:
if evaluate_document:
result = evaluate(evaluated_sentences, document.sentences)
else:
result = evaluate(evaluated_sentences, reference_sentences)
print("%s: %f" % (name, result))
def handle_arguments(args):
parser = PARSERS["plaintext"]
input_stream = sys.stdin
if args["--url"] is not None:
parser = PARSERS["html"]
request = urllib.Request(args["--url"], headers=HEADERS)
input_stream = urllib.urlopen(request)
elif args["--file"] is not None:
parser = PARSERS.get(args["--format"], PlaintextParser)
input_stream = open(args["--file"], "rb")
summarizer_builder = AVAILABLE_METHODS["luhn"]
for method, builder in AVAILABLE_METHODS.items():
if args[method]:
summarizer_builder = builder
break
items_count = ItemsCount(args["--length"])
parser = parser(input_stream.read(), Tokenizer("czech"))
if input_stream is not sys.stdin:
input_stream.close()
with open(args["<reference_summary>"], "rb") as file:
reference_summmary = file.read().decode("utf8")
return summarizer_builder(parser), parser.document, items_count, reference_summmary
if __name__ == "__main__":
try:
exit_code = main()
exit(exit_code)
except KeyboardInterrupt:
exit(1)
except Exception as e:
print(e)
exit(1)
| 32.757225
| 129
| 0.712546
|
791a31e5afa76509dfd6b875145a69ef97e61ca0
| 4,410
|
py
|
Python
|
src/utils.py
|
catrionamurray/chromatic_fitting
|
4903373b385abbf0e67d6ea3ba15d7621f44f2dd
|
[
"MIT"
] | null | null | null |
src/utils.py
|
catrionamurray/chromatic_fitting
|
4903373b385abbf0e67d6ea3ba15d7621f44f2dd
|
[
"MIT"
] | null | null | null |
src/utils.py
|
catrionamurray/chromatic_fitting
|
4903373b385abbf0e67d6ea3ba15d7621f44f2dd
|
[
"MIT"
] | null | null | null |
from .imports import *
def rainbow_to_vector(r, timeformat='h'):
""" Convert Rainbow object to np.arrays
Parameters
----------
r : Rainbow object
chromatic Rainbow object to convert into array format
timeformat : str
(optional, default='hours')
The time format to use (seconds, minutes, hours, days etc.)
Returns
----------
rflux : np.array
flux (MJy/sr) [n_wavelengths x n_integrations]
rfluxe : np.array
flux error (MJy/sr) [n_wavelengths x n_integrations]
rtime : np.array
time (BJD_TDB, houra) [n_integrations]
rwavel : np.array
wavelength (microns) [n_wavelengths]
"""
secondformat = ['second', 'seconds', 'sec', 's']
minuteformat = ['minute', 'minutes', 'min', 'm']
hourformat = ['hour', 'hours', 'h']
dayformat = ['day', 'days', 'd']
yearformat = ['year', 'years', 'y']
rflux = r.fluxlike['flux'] # flux (MJy/sr) : [n_wavelengths x n_integrations]
rfluxe = r.fluxlike['uncertainty'] # flux error (MJy/sr) : [n_wavelengths x n_integrations]
rtime = r.timelike['time'] # time (BJD_TDB, hours) : [n_integrations]
rwavel = r.wavelike['wavelength'] # wavelength (microns) : [n_wavelengths]
# change the time array into the requested format (e.g. seconds, minutes, days etc.)
if timeformat in secondformat:
rtime = rtime * 3600
elif timeformat in minuteformat:
rtime = rtime * 60
elif timeformat in hourformat:
# hours is the default time setting
pass
elif timeformat in dayformat:
rtime = rtime / 24.
elif timeformat in yearformat:
rtime = rtime / (24 * 365.)
else:
warnings.warn("Unrecognised Time Format!")
return
return rflux, rfluxe, rtime, rwavel
def rainbow_to_df(r, timeformat='h'):
""" Convert Rainbow object to pandas dataframe
Parameters
----------
r : Rainbow object
chromatic Rainbow object to convert into pandas df format
timeformat : str
(optional, default='hours')
The time format to use (seconds, minutes, hours, days etc.)
Returns
----------
pd.DataFrame
"""
rflux, rfluxe, rtime, rwavel = rainbow_to_vector(r, timeformat)
x, y = np.meshgrid(rtime.to_value(), rwavel.to_value())
rainbow_dict = {f"Time ({timeformat})": x.ravel(), "Wavelength (microns)": y.ravel(), "Flux": rflux.ravel(),
"Flux Error": rfluxe.ravel()}
df = pd.DataFrame(rainbow_dict)
return df
def bin_data(jd, y, mins_jd):
t = np.array(jd)
split = []
sorted_t = t
sorted_y = y
start = sorted_t[0]
nextbin = sorted_t[np.absolute(sorted_t - start) > mins_jd]
while nextbin != []:
start = start + mins_jd
ind_st = np.argmax(sorted_t > start)
if len(split) > 0:
if ind_st != split[-1]:
split.append(ind_st)
time = sorted_t[ind_st:]
# need to add defn for time here?
else:
split.append(ind_st)
time = sorted_t[ind_st:]
nextbin = time[np.absolute(time - start) > mins_jd]
times = np.split(sorted_t, split)
ys = np.split(sorted_y, split)
bins = np.zeros(len(times))
binned_y = np.zeros(len(times))
binned_err = np.zeros(len(times))
for i in range(len(times)):
if len(ys[i]) > 0:
try:
bins[i] = np.nanmean(times[i])
binned_y[i] = np.nanmean(ys[i])
n = len(times[i])
# standard error in the median:
# binned_err[i] = 1.253 * np.nanstd(ys[i]) / np.sqrt(n)
binned_err[i] = np.nanstd(ys[i]) / np.sqrt(n)
except Exception as e:
print(e)
pass
bin_t = bins[binned_y != 0]
bin_e = binned_err[binned_y != 0]
bin_y = binned_y[binned_y != 0]
return bin_t, bin_y, bin_e
def find_nearest(array, value):
# array = np.asarray(array)
idx = (np.abs(array - value)).argmin()
return idx
def remove_nans(arr_with_nans,*otherarrs):
nanfree_arrs = []
for arr in otherarrs:
nanfree_arrs.append(arr[~np.isnan(arr_with_nans)])
arr_without_nans = arr_with_nans[~np.isnan(arr_with_nans)]
return arr_without_nans, nanfree_arrs
| 32.910448
| 112
| 0.578005
|
6800edf8be729fbd47336e54a83bae77e97aee5a
| 10,181
|
py
|
Python
|
interval.py
|
theodox/disempower
|
3e8df240a9b63250b6f78e2ec9bd5414d63cea07
|
[
"MIT"
] | 1
|
2019-05-20T05:28:32.000Z
|
2019-05-20T05:28:32.000Z
|
interval.py
|
theodox/disempower
|
3e8df240a9b63250b6f78e2ec9bd5414d63cea07
|
[
"MIT"
] | 1
|
2019-09-04T04:52:15.000Z
|
2019-09-04T04:52:15.000Z
|
interval.py
|
theodox/disempower
|
3e8df240a9b63250b6f78e2ec9bd5414d63cea07
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timedelta
from collections import defaultdict
import itertools
import pickle
import time
import logging
logger = logging.getLogger("disempower.interval")
CREDITS = defaultdict(int) # per-user bank
INTERVALS = defaultdict(list) # per-user allowable times
BLACKOUTS = defaultdict(list) # per-user forbidden times
CAPS = dict() # per-user max credits
DAILY_BANK = defaultdict(int) # add every day at 00:00
WEEKLY_BANK = defaultdict(int) # add every week at 00:00 on Monday
# last tick time per user; -1 if user is inactive
ACTIVE = defaultdict(int)
LAST_TICK = -1
LAST_TOPOFF = 17990
WEEK = 10080
DAY = 1440
HR = 60
# Daylght savings, from Python TZ example code
DSTSTART = datetime(1, 4, 1, 2)
DSTEND = datetime(1, 10, 25, 1)
# these could be configured for other time zones
OFFSET_DST = -420
OFFSET_ST = -480
def get_time_offset(utc):
def first_sunday_on_or_after(dt):
days_to_go = 6 - dt.weekday()
if days_to_go:
dt += timedelta(days_to_go)
return dt
start = first_sunday_on_or_after(DSTSTART.replace(year=utc.year))
end = first_sunday_on_or_after(DSTEND.replace(year=utc.year))
if start < utc < end:
return OFFSET_DST
else:
return OFFSET_ST
def to_minutes(day, hour, minute):
'''week-day, hour, minute to minuts. days are 0-6'''
minute_overage = minute // 60
hour += minute_overage
minute %= 60
hour_overage = hour // 24
day += hour_overage
hour %= 24
day %= 7
return (day * DAY) + (hour * HR) + minute
def from_minutes(mins):
"""
converts a minute value into a day-hour-minute tuple
"""
day = mins // 1440
remainder = mins - (day * 1440)
hour = remainder // 60
minute = remainder - (hour * 60)
return day, hour, minute
def _add(user, start_tuple, end_tuple, target=INTERVALS):
# ignore zero-length interval
if start_tuple == end_tuple:
return
sd, sh, sm = start_tuple
ed, eh, em = end_tuple
wrap = ed < sd
if wrap:
start_one = to_minutes(sd, sh, sm)
end_one = WEEK
start_two = 0
end_two = to_minutes(ed, eh, em)
segments = ((start_two, end_two), (start_one, end_one),)
else:
segments = ((to_minutes(sd, sh, sm), to_minutes(ed, eh, em)), )
for s in segments:
if s not in target[user]:
target[user].append(s)
target[user].sort()
def add_interval(user, st, en):
_add(user, st, en, INTERVALS)
def add_blackout(user, st, en):
_add(user, st, en, BLACKOUTS)
def get_ui_blackouts(user):
as_dates = ((from_minutes(s), from_minutes(e))
for s, e in BLACKOUTS[user])
return list(as_dates)
def clear_blackouts(user):
BLACKOUTS[user].clear()
def clear_intervals(user):
INTERVALS[user].clear()
def _remove(user, interval, dictionary):
idx = dictionary[user].index(interval)
del dictionary[user][idx]
def remove(user, interval):
_remove(user, interval, INTERVALS)
def remove_blackout(user, interval):
_remove(user, interval, BLACKOUTS)
def add_credit(user, amount):
total = CREDITS.get(user, 0)
total += amount
CREDITS[user] = total
def set_credit(user, amount):
CREDITS[user] = amount
def set_cap(user, amount):
CAPS[user] = amount
def check(user):
"""
returns positive # of minutes remaining,
0 if not in an active interval,
"""
now_minute = tick(user)
remaining = 0
for i_start, i_end in get_intervals(user):
if i_start <= now_minute <= i_end:
remaining = max(remaining, i_end - now_minute)
user_total = CREDITS.get(user, 0)
# FIX: as written this will return a false
# countdown to midnight on Sunday if there
# is a wraparound interval to Monday morning...
return min(remaining, user_total)
def server_time(now):
local_time_offset = get_time_offset(now)
# server runs UTC, but the minute conversion is hard-coded to
# a simplified version of Pacific time : -7 during the
# PST interval, -8 the rest of the time
now_minute = to_minutes(now.weekday() % 7, now.hour, now.minute)
now_minute += local_time_offset
now_minute %= WEEK
return now_minute
def tick(user):
now = datetime.utcnow()
daily_topoff(now)
now_minute = server_time(now)
recent = ACTIVE[user]
if recent == -1:
recent = now_minute
delta = now_minute - recent
msg = "time: {}, {}, {}, 'delta', {}, 'credits' {} "
logger.info(msg.format(now, from_minutes(now_minute), ACTIVE[user], delta, CREDITS[user]))
# wraparounds for normalized time
if delta < 0:
delta += WEEK
# assume longer interval = dropped connection
# expect multiple polls per minute
if delta > 3:
delta = 0
logger.warning("dropped connection")
CREDITS[user] -= delta
CREDITS[user] = max(0, CREDITS[user])
ACTIVE[user] = now_minute
return now_minute
def daily_topoff(today_datetime):
global LAST_TOPOFF
current_day_timestamp = today_datetime.timestamp()
current_day_serial = int(current_day_timestamp // 86400)
for d in range(LAST_TOPOFF + 1, current_day_serial + 1):
next_day = datetime.utcfromtimestamp(d * 86400)
logger.info(str(next_day))
local_time_offset = get_time_offset(next_day)
now_minute = to_minutes(next_day.weekday() %
7, next_day.hour, next_day.minute)
now_minute += local_time_offset
now_minute %= WEEK
day_number = now_minute // DAY
logger.info("topoff {} {}".format(d, day_number))
for u in DAILY_BANK:
daily = DAILY_BANK[u] or 0
logger.info('daily {}: {}'.format(daily, DAILY_BANK))
CREDITS[u] += daily
if day_number == 0:
weekly = WEEKLY_BANK[u] or 0
logger.info('weekly {}: {}'.format(weekly, WEEKLY_BANK))
CREDITS[u] += weekly
CREDITS[u] = min(CREDITS[u], CAPS.get(u, 180))
LAST_TOPOFF = current_day_serial
def set_cap(user, amount):
CAPS[user] = amount
def get_cap(user):
return CAPS.get(user, 180)
def set_daily_allowance(user, amount):
DAILY_BANK[user] = amount
def get_daily_allowance(user):
return DAILY_BANK.get(user)
def set_weekly_allowance(user, amount):
WEEKLY_BANK[user] = amount
def get_weekly_allowance(user):
return WEEKLY_BANK.get(user)
def set_credits(user, amount):
CREDITS[user] = amount
def get_credits(user):
return CREDITS.get(user, 0)
def get_users():
return INTERVALS.keys()
def delete_user(user):
for each_dict in (INTERVALS, CREDITS, WEEKLY_BANK, DAILY_BANK, CAPS, BLACKOUTS):
try:
each_dict.pop(user)
except KeyError:
pass
def get_intervals(user):
logger.debug(str([(from_minutes(s), from_minutes(e)) for s, e in BLACKOUTS[user]]))
ints = (k for k in INTERVALS[user])
for b in BLACKOUTS[user]:
ints = (j for j in blackout_filter(ints, b))
return (tuple(map(round, t)) for t in ints)
def get_ui_intervals(user):
as_dates = ((from_minutes(s), from_minutes(e))
for s, e in get_intervals(user))
return list(as_dates)
def blackout_filter(interval_stream, blackout):
for interval in interval_stream:
if max(*interval) <= min(*blackout):
yield interval
continue
if min(*interval) >= max(*blackout):
yield interval
continue
if min(*blackout) > min(*interval):
yield ((min(*interval), min(*blackout)))
if max(*blackout) < max(*interval):
yield (max(*blackout), max(*interval))
def save(filename):
state = {
'CREDITS': CREDITS,
'INTERVALS': INTERVALS,
'BLACKOUTS': BLACKOUTS,
'DAILY_BANK': DAILY_BANK,
'WEEKLY_BANK': WEEKLY_BANK,
'CAPS': CAPS
}
with open(filename, 'wb') as handle:
pickle.dump(state, handle)
def load(filename):
with open(filename, 'rb') as handle:
state = pickle.load(handle)
CREDITS.clear()
CREDITS.update(state['CREDITS'])
BLACKOUTS.clear()
BLACKOUTS.update(state['BLACKOUTS'])
INTERVALS.clear()
INTERVALS.update(state['INTERVALS'])
ACTIVE.clear()
DAILY_BANK.clear()
DAILY_BANK.update(state['DAILY_BANK'])
WEEKLY_BANK.clear()
WEEKLY_BANK.update(state['WEEKLY_BANK'])
CAPS.clear()
CAPS.update(state['CAPS'])
for u in CREDITS:
ACTIVE[u] = -1
if __name__ == '__main__':
# testing time shift
now = datetime.utcnow()
pacific_time_offset = get_time_offset(now)
now_minute = to_minutes(now.weekday() % 7, now.hour, now.minute)
print ("UTC", now)
print ("raw", now_minute)
print ("offset", pacific_time_offset)
now_minute += pacific_time_offset
now_minute %= WEEK
print ("pst", now_minute)
now = datetime.now()
confirm = to_minutes(now.weekday() % 7, now.hour, now.minute)
print ("confirm", confirm)
CAPS['nicky'] = 120
CREDITS['nicky'] = 0
ACTIVE['nicky'] = -1
DAILY_BANK['nicky'] = 10
WEEKLY_BANK['nicky'] = 5
DAILY_BANK['helen'] = 7
add_interval('nicky', (6, 23, 0), (0, 8, 0))
add_interval('nicky', (0, 9, 30), (0, 13, 30))
add_blackout('nicky', (0, 7, 0), (0, 9, 45))
add_blackout('nicky', (0, 11, 0), (0, 12, 00))
add_blackout('nicky', (0, 13, 15), (0, 20, 20))
print (INTERVALS['nicky'])
print (BLACKOUTS['nicky'])
print (get_intervals('nicky'))
print (get_ui_intervals('nicky'))
# save("test_db")
| 25.076355
| 95
| 0.600923
|
0cda0d81fc7f687db0c38a696b3e71ffb2c30984
| 1,599
|
py
|
Python
|
e2e/Tests/Merit/LockedMerit/PendingDieRegainTest.py
|
kayabaNerve/Currency
|
260ebc20f1704f42ad6183fee39ad58ec6d07961
|
[
"CC0-1.0"
] | 66
|
2019-01-14T08:39:52.000Z
|
2022-01-06T11:39:15.000Z
|
e2e/Tests/Merit/LockedMerit/PendingDieRegainTest.py
|
kayabaNerve/Currency
|
260ebc20f1704f42ad6183fee39ad58ec6d07961
|
[
"CC0-1.0"
] | 228
|
2019-01-16T15:42:44.000Z
|
2022-02-05T07:48:07.000Z
|
e2e/Tests/Merit/LockedMerit/PendingDieRegainTest.py
|
kayabaNerve/Currency
|
260ebc20f1704f42ad6183fee39ad58ec6d07961
|
[
"CC0-1.0"
] | 19
|
2019-01-14T08:53:04.000Z
|
2021-11-03T20:19:28.000Z
|
from typing import Dict, Any
import json
from e2e.Meros.RPC import RPC
from e2e.Meros.Liver import Liver
from e2e.Tests.Errors import TestError
correctVectorsHeight: bool = False
def PendingDieRegainTest(
rpc: RPC
) -> None:
def verifyCorrectlyLocked(
height: int
) -> None:
merit: Dict[str, Any] = rpc.call("merit", "getMerit", {"nick": 0})
merit = {
"merit": merit["merit"],
"status": merit["status"]
}
if height < 9:
if merit != {
"merit": 1,
"status": "Unlocked"
}:
raise TestError("Merit was locked early.")
elif height < 100:
if merit != {
"merit": 1,
"status": "Locked"
}:
raise TestError("Merit wasn't locked.")
elif height == 100:
if merit != {
"merit": 1,
"status": "Pending"
}:
raise TestError("Merit wasn't pending.")
elif height == 101:
if merit != {
"merit": 0,
"status": "Unlocked"
}:
raise TestError("Merit didn't die and become unlocked.")
elif height == 102:
#pylint: disable=global-statement
global correctVectorsHeight
correctVectorsHeight = True
if merit != {
"merit": 1,
"status": "Unlocked"
}:
raise TestError("Didn't regain Merit which was unlocked.")
with open("e2e/Vectors/Merit/LockedMerit/PendingDieRegain.json", "r") as file:
Liver(rpc, json.loads(file.read()), everyBlock=verifyCorrectlyLocked).live()
if not correctVectorsHeight:
raise Exception("PendingDieRegain vectors have an invalid length.")
| 26.213115
| 80
| 0.594747
|
c56207b5708566bd33f4e0a0d69f73959d9031c8
| 2,259
|
py
|
Python
|
ds2_arxiv/tools/my_proxy_complicated.py
|
liusida/ds2_arxiv
|
1ee8a3f65cfb662a1af6dca29cde3e07ec5b322b
|
[
"MIT"
] | null | null | null |
ds2_arxiv/tools/my_proxy_complicated.py
|
liusida/ds2_arxiv
|
1ee8a3f65cfb662a1af6dca29cde3e07ec5b322b
|
[
"MIT"
] | null | null | null |
ds2_arxiv/tools/my_proxy_complicated.py
|
liusida/ds2_arxiv
|
1ee8a3f65cfb662a1af6dca29cde3e07ec5b322b
|
[
"MIT"
] | null | null | null |
import random
import urllib3
from urllib3 import ProxyManager
from urllib3.contrib.socks import SOCKSProxyManager
class MyProxy:
def __init__(self, proxy_txt_filename="proxies.txt", proxy_disabled=False):
self.proxy_disabled = proxy_disabled
self.bad_proxies = []
self.proxies = []
self.proxies_cursor = 0
self.proxy = None
self.proxy_count = 999 # should rotate the proxy every 90 calls
self.proxy_txt_filename = proxy_txt_filename
self.rotate_proxy()
def rotate_proxy(self, report_bad_proxy=False):
# dynamically load newest file
with open(self.proxy_txt_filename, "r") as f:
self.proxies = f.read().split("\n")
try:
with open("config/bad_proxies.txt", "r") as f:
self.bad_proxies = f.read().split("\n")
except FileNotFoundError:
self.bad_proxies = []
self.proxies_cursor = random.randrange(0, len(self.proxies))
if report_bad_proxy and self.proxies[self.proxies_cursor]!="p.webshare.io:9999":
with open("config/bad_proxies.txt", "a") as f:
f.write(self.proxies[self.proxies_cursor] + "\n")
while True:
self.proxies_cursor = random.randrange(0, len(self.proxies))
if self.proxies[self.proxies_cursor] not in self.bad_proxies:
break
print(f"Skipping bad proxy {self.proxies_cursor}: {self.proxies[self.proxies_cursor]}")
proxy_string = f"socks5://{self.proxies[self.proxies_cursor]}"
self.custom_proxy(proxy_string)
return self.proxy
def current_proxy(self):
self.proxy_count += 1
if self.proxy is None or self.proxy_count>90:
self.rotate_proxy()
return self.proxy
def custom_proxy(self, proxy_string):
if self.proxy_disabled:
self.proxy = urllib3.PoolManager()
else:
print(f"Using proxy: {proxy_string}")
self.proxy = SOCKSProxyManager(proxy_string)
self.proxy_count = 0
return self.proxy
if __name__ == "__main__":
myproxy = MyProxy()
url = "https://star-lab.ai/"
c = myproxy.current_proxy().request('GET', url, timeout=3).data
print(c)
| 37.032787
| 99
| 0.631695
|
048d5da96044a07ef609106e181c24e425560862
| 20,221
|
py
|
Python
|
azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/load_balancers_operations.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 4
|
2016-06-17T23:25:29.000Z
|
2022-03-30T22:37:45.000Z
|
azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/load_balancers_operations.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 2
|
2016-09-30T21:40:24.000Z
|
2017-11-10T18:16:18.000Z
|
azure-mgmt-network/azure/mgmt/network/v2017_08_01/operations/load_balancers_operations.py
|
v-Ajnava/azure-sdk-for-python
|
a1f6f80eb5869c5b710e8bfb66146546697e2a6f
|
[
"MIT"
] | 3
|
2016-05-03T20:49:46.000Z
|
2017-10-05T21:05:27.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class LoadBalancersOperations(object):
"""LoadBalancersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client API version. Constant value: "2017-08-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2017-08-01"
self.config = config
def _delete_initial(
self, resource_group_name, load_balancer_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, load_balancer_name, custom_headers=None, raw=False, **operation_config):
"""Deletes the specified load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def get(
self, resource_group_name, load_balancer_name, expand=None, custom_headers=None, raw=False, **operation_config):
"""Gets the specified load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param expand: Expands referenced resources.
:type expand: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: LoadBalancer or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2017_08_01.models.LoadBalancer or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LoadBalancer', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def _create_or_update_initial(
self, resource_group_name, load_balancer_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'LoadBalancer')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('LoadBalancer', response)
if response.status_code == 201:
deserialized = self._deserialize('LoadBalancer', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, load_balancer_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param parameters: Parameters supplied to the create or update load
balancer operation.
:type parameters: ~azure.mgmt.network.v2017_08_01.models.LoadBalancer
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns LoadBalancer
or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2017_08_01.models.LoadBalancer]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
load_balancer_name=load_balancer_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('LoadBalancer', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
def list_all(
self, custom_headers=None, raw=False, **operation_config):
"""Gets all the load balancers in a subscription.
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of LoadBalancer
:rtype:
~azure.mgmt.network.v2017_08_01.models.LoadBalancerPaged[~azure.mgmt.network.v2017_08_01.models.LoadBalancer]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/loadBalancers'
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.LoadBalancerPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.LoadBalancerPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
def list(
self, resource_group_name, custom_headers=None, raw=False, **operation_config):
"""Gets all the load balancers in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of LoadBalancer
:rtype:
~azure.mgmt.network.v2017_08_01.models.LoadBalancerPaged[~azure.mgmt.network.v2017_08_01.models.LoadBalancer]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers'
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.LoadBalancerPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.LoadBalancerPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
| 44.441758
| 144
| 0.657139
|
fa110e615caee73b6504e7f18629456025242704
| 3,193
|
py
|
Python
|
quaternary_FOM_stackedtern10.py
|
johnmgregoire/PythonCompositionPlots
|
e105c575463b7d4512d9aac18c7330d1a0dc2c14
|
[
"BSD-3-Clause"
] | 4
|
2018-03-05T09:34:49.000Z
|
2022-02-01T15:33:54.000Z
|
quaternary_FOM_stackedtern10.py
|
johnmgregoire/PythonCompositionPlots
|
e105c575463b7d4512d9aac18c7330d1a0dc2c14
|
[
"BSD-3-Clause"
] | null | null | null |
quaternary_FOM_stackedtern10.py
|
johnmgregoire/PythonCompositionPlots
|
e105c575463b7d4512d9aac18c7330d1a0dc2c14
|
[
"BSD-3-Clause"
] | 2
|
2016-01-24T19:09:21.000Z
|
2019-10-11T12:43:07.000Z
|
import matplotlib.cm as cm
import numpy
import pylab
import operator, copy, os
#pylab.rc('font',**{'family':'serif''serif':['Times New Roman']})
#pylab.rcParams['font.family']='serif'
#pylab.rcParams['font.serif']='Times New Roman'
pylab.rc('font', family='serif', serif='Times New Roman')
#os.chdir('C:/Users/Gregoire/Documents/PythonCode/ternaryplot')
from myternaryutility import TernaryPlot
from myquaternaryutility import QuaternaryPlot
def make10ternaxes(ellabels=['A', 'B', 'C', 'D'], fig=None, fontsize=17):
if fig is None:
fig=pylab.figure(figsize=(12, 8))
ax_xc=[]
ax_yc=[]
xcdel=[.18, .19, .065, .1, .04, .05, .055, .03, .02, .02]
ax_yc=[.49, .68, .30, .74, .48, .24, .78, .58, .39, .21]
for i in range(10):
if i==0:
ax_xc+=[xcdel[i]]
else:
ax_xc+=[ax_xc[-1]+xcdel[i]]
#ax_yc+=[.5+((i%2)*2.-1.)*((i>0)*.1+.072*i/10)]
shape1=numpy.array([.35, 1.])
scales=[.82, 0.51, 0.39, 0.3, 0.22, 0.2, 0.17, 0.14, 0.11, 0.09]
axl=[]
for sc, xc, yc in zip(scales, ax_xc, ax_yc):
w, l=shape1*sc
axl+=[fig.add_axes([xc-w/2, yc-l/2, w, l])]
stpl=[]
xpos=[.27]*10
xpos[0:3]=[.38, .36, .33]
xpos[-1]=.18
for count, (ax, xp) in enumerate(zip(axl, xpos)):
stp=TernaryPlot(ax, ellabels=ellabels[:3], offset=.03)
if not fontsize is None:
stp.label(fontsize=fontsize)#,fontdict={'fontname':'Times New Roman'})
stpl+=[stp]
if not fontsize is None:
if count<9:
stp.ax.text(xp, .8, '%s$_{%.2f-%.2f}$' %(ellabels[3], (count*.1), ((count+1)*.1)-.01), ha='right', va='center', fontsize=fontsize)
else:
stp.ax.text(xp, .8, '%s$_{%.2f-%d}$' %(ellabels[3], (count*.1), 1), ha='right', va='center', fontsize=fontsize)
return axl, stpl
def scatter_10axes(comps, fom, stpl, s=18, cb=False, cbrect=(.85, .3, .04, .4), cblabel='', **kwargs):# for colorbar must pass kwargs norm and cmap and optionally cblabel
abc=comps[:, :3]
abc[abc.sum(axis=1)==0.]=numpy.array([1., 1., 1.])/3.
abc=numpy.array([c/c.sum() for c in abc])
d=comps[:, 3]
d30=numpy.round(d*30.)
dlims=numpy.array([0., 1., 2., 3.])
marks=[('o', 1., 1.), ('D', .9, .7),('s', .8, .5)]
sl=s*numpy.array([6.9, 3., 2.1, 1.5, 1.2, 1.35, 1.5, 1.8, 2.4, 3., 4.5])
scplots=[]
for i, (stp, sv) in enumerate(zip(stpl, sl)):
dl=dlims+(i*3.)
if i==9:
dl[-1]+=.01
for a, b, (m, sf, al) in zip(dl, dl[1:], marks):
inds=numpy.where((d30>=a) & (d30<b))[0]
#print a, b, len(inds)
if len(inds)>0:
scplots+=[stp.scatter(abc[inds], c=fom[inds], marker=m, s=sv*sf, alpha=al, **kwargs)]
if cb:
cbax=stp.ax.figure.add_axes(cbrect)
if 'extend' in kwargs.keys():
sm=cm.ScalarMappable(norm=kwargs['norm'], cmap=kwargs['cmap'], extend=kwargs['extend'])
else:
sm=cm.ScalarMappable(norm=kwargs['norm'], cmap=kwargs['cmap'])
sm.set_array(fom)
cb=stp.ax.figure.colorbar(sm, cax=cbax)
cb.set_label(cblabel, fontsize=18)
| 36.284091
| 170
| 0.541497
|
d792558e8cf671406d503429159a49a429a004d8
| 3,395
|
py
|
Python
|
pyEpiabm/pyEpiabm/property/spatial_foi.py
|
Saketkr21/epiabm
|
3ec0dcbc78d3fd4114ed3c6bdd78ef39f0013d2f
|
[
"BSD-3-Clause"
] | null | null | null |
pyEpiabm/pyEpiabm/property/spatial_foi.py
|
Saketkr21/epiabm
|
3ec0dcbc78d3fd4114ed3c6bdd78ef39f0013d2f
|
[
"BSD-3-Clause"
] | null | null | null |
pyEpiabm/pyEpiabm/property/spatial_foi.py
|
Saketkr21/epiabm
|
3ec0dcbc78d3fd4114ed3c6bdd78ef39f0013d2f
|
[
"BSD-3-Clause"
] | 1
|
2022-03-14T06:00:30.000Z
|
2022-03-14T06:00:30.000Z
|
#
# Calculate spatial force of infection based on Covidsim code
#
import pyEpiabm.core
class SpatialInfection:
"""Class to calculate the infectiousness and susceptibility
parameters for the force of infection parameter, between cells.
"""
@staticmethod
def cell_inf(inf_cell, time: float):
"""Calculate the infectiousness of one cell
towards its neighbouring cells. Does not include interventions such
as isolation, or whether individual is a carehome resident.
Parameters
----------
inf_cell : Cell
Cell causing the infection
time : float
Current simulation time
Returns
-------
int
Average number of infection events from the cell
"""
R_0 = pyEpiabm.core.Parameters.instance().basic_reproduction_num
total_infectors = inf_cell.number_infectious()
average_number_to_infect = total_infectors * R_0
# This gives the expected number of infection events
# caused by people within this cell.
return average_number_to_infect
@staticmethod
def space_inf(inf_cell, infector,
time: float):
"""Calculate the infectiousness between cells, dependent on the
infectious people in it. Does not include interventions such as
isolation, whether individual is a carehome resident, or age
dependance on spatial contact.
Parameters
----------
inf_cell : Cell
Cell causing the infection
infector : Person
Infector
time : float
Current simulation time
Returns
-------
float
Infectiousness parameter of cell
"""
return infector.infectiousness
@staticmethod
def space_susc(susc_cell, infectee,
time: float):
"""Calculate the susceptibility of one cell towards its neighbouring cells.
Does not include interventions such as isolation, age of individual
or whether individual is a carehome resident.
Parameters
----------
susc_cell : Cell
Cell receiving infections
infectee : Person
Infectee
time : float
Current simulation time
Returns
-------
float
Susceptibility parameter of cell
"""
return 1.0
@staticmethod
def space_foi(inf_cell, susc_cell, infector,
infectee, time: float):
"""Calculate the force of infection between cells, for a particular
infector and infectee.
Parameters
----------
inf_cell : Cell
Cell doing infecting
susc_cell : Cell
Cell receiving infections
infector : Person
Infector
infectee : Person
Infectee
time : float
Current simulation time
Returns
-------
float
Force of infection parameter of cell
"""
infectiousness = SpatialInfection.space_inf(inf_cell, infector,
time)
susceptibility = SpatialInfection.space_susc(susc_cell, infectee,
time)
return (infectiousness * susceptibility)
| 28.529412
| 83
| 0.582032
|
28af5d7714017e57b17ec52269a7e9a005b9d28e
| 467
|
py
|
Python
|
tag/models.py
|
AICAN-Research/learn-pathology
|
663f9c5f125857badf5bb41b6bfa2d9100578e2e
|
[
"MIT"
] | 2
|
2021-09-16T08:38:10.000Z
|
2021-09-16T10:46:53.000Z
|
tag/models.py
|
AICAN-Research/learn-pathology
|
663f9c5f125857badf5bb41b6bfa2d9100578e2e
|
[
"MIT"
] | 6
|
2021-09-20T10:56:21.000Z
|
2022-01-05T08:25:17.000Z
|
tag/models.py
|
AICAN-Research/learn-pathology
|
663f9c5f125857badf5bb41b6bfa2d9100578e2e
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.forms import ModelForm
class Tag(models.Model):
name = models.CharField(max_length=255)
is_organ = models.BooleanField(help_text='Is this an organ tag?', default=False)
is_stain = models.BooleanField(help_text='Is this a stain tag?', default=False)
def __str__(self):
return self.name
class TagForm(ModelForm):
class Meta:
model = Tag
fields = ['name', 'is_organ', 'is_stain']
| 25.944444
| 84
| 0.689507
|
67b7e26991e36436e5063706c53daba481bea7a4
| 622
|
py
|
Python
|
var/spack/repos/builtin/packages/py-funcsigs/package.py
|
MiddelkoopT/spack
|
4d94c4c4600f42a7a3bb3d06ec879140bc259304
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-funcsigs/package.py
|
MiddelkoopT/spack
|
4d94c4c4600f42a7a3bb3d06ec879140bc259304
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
var/spack/repos/builtin/packages/py-funcsigs/package.py
|
MiddelkoopT/spack
|
4d94c4c4600f42a7a3bb3d06ec879140bc259304
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null |
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyFuncsigs(PythonPackage):
"""Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2."""
pypi = "funcsigs/funcsigs-1.0.2.tar.gz"
version('1.0.2', sha256='a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50')
version('0.4', sha256='d83ce6df0b0ea6618700fe1db353526391a8a3ada1b7aba52fed7a61da772033')
depends_on('py-setuptools@17.1:', type='build')
| 34.555556
| 95
| 0.755627
|
37ef57b4935d10b4c5c193bcd4cc8e493d204014
| 1,331
|
py
|
Python
|
supplement/dataPro2.py
|
5966466/software-curriculum-design
|
42af2401371da30faf6b461d6c29a3c37f691656
|
[
"MIT"
] | null | null | null |
supplement/dataPro2.py
|
5966466/software-curriculum-design
|
42af2401371da30faf6b461d6c29a3c37f691656
|
[
"MIT"
] | null | null | null |
supplement/dataPro2.py
|
5966466/software-curriculum-design
|
42af2401371da30faf6b461d6c29a3c37f691656
|
[
"MIT"
] | null | null | null |
import sqlite3
import os, shutil
import numpy as np
import pickle
class DataPro():
def __init__(self, pre):
self.pre = pre
self.path = os.path.join(pre.setpath, pre.setName)
dm = "SELECT COUNT(*) FROM %s" % self.pre.tableName
data = self.pre.execute_set(dm)
if data[0][0] == 0:
self.st_msg = (False, "There is no datum to analyze")
os.remove(os.path.join(pre.setpath, pre.setName+'.db'))
os.rmdir(self.path)
else:
self.st_msg = (True, None)
def acc_Img(self):
dm = "SELECT DISTINCT(labelled_img) FROM %s" % self.pre.tableName
data = self.execute_set(dm)
img_dict = {i[0]:[] for i in data}
for i in img_dict.keys():
dm = "SELECT interfered_img, state FROM %s WHERE labelled_img = '%s'"\
% (self.pre.tableName, i)
data = self.execute_set(dm)
img_dict[i] = np.array(data)
self.save(img_dict, 'Img')
def execute_set(self, demand):
self.pre.cursor.execute(demand)
self.pre.connection.commit()
data = self.pre.cursor.fetchall()
return data
def save(self, data, name):
with open(os.path.join(self.path, name+'.pkl'), 'wb') as f:
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
def add_msg(self, msg):
if self.st_msg[0]:
self.st_msg = (False, msg)
def acc_all(self):
try:
self.acc_Img()
except Exception as e:
self.add_msg(str(e))
print(str(e))
| 23.767857
| 73
| 0.663411
|
d0e5902b0f039d444afee0c1be3669e6f7a5c6ad
| 2,185
|
py
|
Python
|
pytestgen/cli/pytestgen.py
|
notionparallax/pytestgen
|
52821ac1ed3aa4864fa47af9dd1825f92d4367d7
|
[
"MIT"
] | 5
|
2019-10-20T19:58:50.000Z
|
2021-12-15T00:44:41.000Z
|
pytestgen/cli/pytestgen.py
|
notionparallax/pytestgen
|
52821ac1ed3aa4864fa47af9dd1825f92d4367d7
|
[
"MIT"
] | 2
|
2020-02-02T12:23:37.000Z
|
2021-12-13T23:58:42.000Z
|
pytestgen/cli/pytestgen.py
|
notionparallax/pytestgen
|
52821ac1ed3aa4864fa47af9dd1825f92d4367d7
|
[
"MIT"
] | 2
|
2020-05-18T13:56:30.000Z
|
2021-12-15T00:44:46.000Z
|
"""pytestgen.py
This is the CLI of pytestgen.
Author:
Figglewatts <me@figglewatts.co.uk>
"""
import logging
from os.path import isdir, exists
import click
from pytestgen import load
from pytestgen import parse
from pytestgen import output
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("path", nargs=-1, type=str, required=True)
@click.option("--output-dir",
"-o",
default="tests",
type=str,
show_default=True,
metavar="PATH",
help="The path to generate tests in.")
@click.option(
"--include",
"-i",
multiple=True,
default=[],
metavar="FUNC",
help="Function names to generate tests for. You can use this multiple times."
)
def cli(path, output_dir, include):
"""Generate pytest unit tests from your Python source code.
\b
Examples:
# generate tests for directory 'my_package' in 'tests/' directory
$ pytestgen my_package
\b
# generate tests for some python files and directory 'the_package'
$ pytestgen my_module_a.py another_module.py the_package
\b
# generate tests for directory 'cool_app' in 'cool_tests/' directory
$ pytestgen cool_app -o cool_tests
\b
# generate tests for functions 'foo' and 'bar' in 'functionality.py'
$ pytestgen functionality.py -i foo -i bar
"""
logging.basicConfig(level=logging.INFO, format="%(message)s")
for path_element in path:
if not exists(path_element):
logging.error(f"ERROR: path '{path_element}' did not exist")
input_set = None
if isdir(path_element):
input_set = load.directory(path_element, output_dir)
else:
try:
input_set = load.filename(path_element, output_dir)
except ValueError as err:
logging.error("ERROR: " + str(err))
raise SystemExit(1)
parsed_set = parse.parse_input_set(input_set)
output.output_tests(parsed_set, include=include)
if __name__ == "__main__":
cli.invoke(ctx={})
| 28.376623
| 81
| 0.632494
|
b7579543feed4f0925f6c1d88059c9e008118fde
| 1,936
|
py
|
Python
|
src/storage-preview/azext_storage_preview/vendored_sdks/azure_mgmt_storage/v2018_07_01/models/encryption.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 1
|
2019-05-10T19:58:09.000Z
|
2019-05-10T19:58:09.000Z
|
src/storage-preview/azext_storage_preview/vendored_sdks/azure_mgmt_storage/v2018_07_01/models/encryption.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 2
|
2019-10-02T23:37:38.000Z
|
2020-10-02T01:17:31.000Z
|
src/storage-preview/azext_storage_preview/vendored_sdks/azure_mgmt_storage/v2018_07_01/models/encryption.py
|
mayank88mahajan/azure-cli-extensions
|
8bd389a1877bffd14052bec5519ce75dc6fc34cf
|
[
"MIT"
] | 1
|
2020-07-16T23:49:49.000Z
|
2020-07-16T23:49:49.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Encryption(Model):
"""The encryption settings on the storage account.
All required parameters must be populated in order to send to Azure.
:param services: List of services which support encryption.
:type services: ~azure.mgmt.storage.v2018_07_01.models.EncryptionServices
:param key_source: Required. The encryption keySource (provider). Possible
values (case-insensitive): Microsoft.Storage, Microsoft.Keyvault.
Possible values include: 'Microsoft.Storage', 'Microsoft.Keyvault'.
Default value: "Microsoft.Storage" .
:type key_source: str or ~azure.mgmt.storage.v2018_07_01.models.KeySource
:param key_vault_properties: Properties provided by key vault.
:type key_vault_properties:
~azure.mgmt.storage.v2018_07_01.models.KeyVaultProperties
"""
_validation = {
'key_source': {'required': True},
}
_attribute_map = {
'services': {'key': 'services', 'type': 'EncryptionServices'},
'key_source': {'key': 'keySource', 'type': 'str'},
'key_vault_properties': {'key': 'keyvaultproperties', 'type': 'KeyVaultProperties'},
}
def __init__(self, **kwargs):
super(Encryption, self).__init__(**kwargs)
self.services = kwargs.get('services', None)
self.key_source = kwargs.get('key_source', "Microsoft.Storage")
self.key_vault_properties = kwargs.get('key_vault_properties', None)
| 41.191489
| 92
| 0.654442
|
063db16adb90266f3d5599ee61db97655a1a6425
| 1,024
|
py
|
Python
|
Incomplete/build-hybrid.py
|
andrewginns/CycleGAN-Tensorflow-PyTorch
|
2715ca0ef611b8e3031e2fa4dda5b1c84b2011c9
|
[
"MIT"
] | 1
|
2018-12-11T09:07:26.000Z
|
2018-12-11T09:07:26.000Z
|
Incomplete/build-hybrid.py
|
andrewginns/CycleGAN-Tensorflow-PyTorch
|
2715ca0ef611b8e3031e2fa4dda5b1c84b2011c9
|
[
"MIT"
] | null | null | null |
Incomplete/build-hybrid.py
|
andrewginns/CycleGAN-Tensorflow-PyTorch
|
2715ca0ef611b8e3031e2fa4dda5b1c84b2011c9
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import sys
from tensorflow.core.framework import graph_pb2
import copy
INPUT_GRAPH_DEF_FILE = 'C:/Users/ag17634/Desktop/optimized-graph.pb'
OUTPUT_GRAPH_DEF_FILE = 'C:/Users/ag17634/Desktop/hybrid-graph.pb'
# load our graph
def load_graph(filename):
graph_def = tf.GraphDef()
with tf.gfile.FastGFile(filename, 'rb') as f:
graph_def.ParseFromString(f.read())
return graph_def
graph_def = load_graph(INPUT_GRAPH_DEF_FILE)
target_node_name = 'a2b_generator/Conv_7/Relu'
c = tf.constant(False, dtype=bool, shape=[], name=target_node_name)
# Create new graph, and rebuild it from original one
# replacing phase train node def with constant
new_graph_def = graph_pb2.GraphDef()
for node in graph_def.node:
if node.name == target_node_name:
new_graph_def.node.extend([c.op.node_def])
else:
new_graph_def.node.extend([copy.deepcopy(node)])
# save new graph
with tf.gfile.GFile(OUTPUT_GRAPH_DEF_FILE, "wb") as f:
f.write(new_graph_def.SerializeToString())
| 32
| 68
| 0.753906
|
0484b724b52fc8183fbe1f9221a4ca27066cc920
| 403
|
py
|
Python
|
farmer/urls.py
|
davidiweala/Precision-agriculture-project
|
b371221ea5b7e85311ed362b6c45af66995f1407
|
[
"Apache-2.0"
] | null | null | null |
farmer/urls.py
|
davidiweala/Precision-agriculture-project
|
b371221ea5b7e85311ed362b6c45af66995f1407
|
[
"Apache-2.0"
] | null | null | null |
farmer/urls.py
|
davidiweala/Precision-agriculture-project
|
b371221ea5b7e85311ed362b6c45af66995f1407
|
[
"Apache-2.0"
] | null | null | null |
from django.urls import path
from . import views
from django.conf import settings
urlpatterns = [
path('farmerlogin/', views.farmerlogin, name="farmerlogin"),
path('farmerregister/', views.register, name="farmerregister"),
path('farmerdash/', views.dashboard, name="farmerdash"),
path('logout/', views.logoutpage, name="logout"),
path('farmerhome/', views.home, name="farmerhome")
]
| 36.636364
| 67
| 0.712159
|
b6bcef2adf280c9e297c7e1fc90df8cd52d39046
| 21,227
|
py
|
Python
|
lib/util.py
|
bitcoinnano/btcnano-wallet-client-desktop
|
a368d86b38582c09aa1ec1a8fe27f574056db065
|
[
"MIT"
] | 3
|
2018-01-16T09:45:41.000Z
|
2018-01-27T04:07:10.000Z
|
lib/util.py
|
bitcoinnano/btcnano-wallet-client-desktop
|
a368d86b38582c09aa1ec1a8fe27f574056db065
|
[
"MIT"
] | null | null | null |
lib/util.py
|
bitcoinnano/btcnano-wallet-client-desktop
|
a368d86b38582c09aa1ec1a8fe27f574056db065
|
[
"MIT"
] | null | null | null |
# Electrum - lightweight Bitcoin client
# Copyright (C) 2011 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import binascii
import os, sys, re, json
from collections import defaultdict
from datetime import datetime
from decimal import Decimal
import traceback
import urllib
import threading
from .i18n import _
import urllib.request, urllib.parse, urllib.error
import queue
def inv_dict(d):
return {v: k for k, v in d.items()}
base_units = {'Nano':8, 'mNano':5, 'uNano':2}
fee_levels = [_('Within 25 blocks'), _('Within 10 blocks'), _('Within 5 blocks'), _('Within 2 blocks'), _('In the next block')]
def normalize_version(v):
return [int(x) for x in re.sub(r'(\.0+)*$','', v).split(".")]
class NotEnoughFunds(Exception): pass
class InvalidPassword(Exception):
def __str__(self):
return _("Incorrect password")
# Throw this exception to unwind the stack like when an error occurs.
# However unlike other exceptions the user won't be informed.
class UserCancelled(Exception):
'''An exception that is suppressed from the user'''
pass
class MyEncoder(json.JSONEncoder):
def default(self, obj):
from .transaction import Transaction
if isinstance(obj, Transaction):
return obj.as_dict()
return super(MyEncoder, self).default(obj)
class PrintError(object):
'''A handy base class'''
def diagnostic_name(self):
return self.__class__.__name__
def print_error(self, *msg):
print_error("[%s]" % self.diagnostic_name(), *msg)
def print_msg(self, *msg):
print_msg("[%s]" % self.diagnostic_name(), *msg)
class ThreadJob(PrintError):
"""A job that is run periodically from a thread's main loop. run() is
called from that thread's context.
"""
def run(self):
"""Called periodically from the thread"""
pass
class DebugMem(ThreadJob):
'''A handy class for debugging GC memory leaks'''
def __init__(self, classes, interval=30):
self.next_time = 0
self.classes = classes
self.interval = interval
def mem_stats(self):
import gc
self.print_error("Start memscan")
gc.collect()
objmap = defaultdict(list)
for obj in gc.get_objects():
for class_ in self.classes:
if isinstance(obj, class_):
objmap[class_].append(obj)
for class_, objs in objmap.items():
self.print_error("%s: %d" % (class_.__name__, len(objs)))
self.print_error("Finish memscan")
def run(self):
if time.time() > self.next_time:
self.mem_stats()
self.next_time = time.time() + self.interval
class DaemonThread(threading.Thread, PrintError):
""" daemon thread that terminates cleanly """
def __init__(self):
threading.Thread.__init__(self)
self.parent_thread = threading.currentThread()
self.running = False
self.running_lock = threading.Lock()
self.job_lock = threading.Lock()
self.jobs = []
def add_jobs(self, jobs):
with self.job_lock:
self.jobs.extend(jobs)
def run_jobs(self):
# Don't let a throwing job disrupt the thread, future runs of
# itself, or other jobs. This is useful protection against
# malformed or malicious server responses
with self.job_lock:
for job in self.jobs:
try:
job.run()
except Exception as e:
traceback.print_exc(file=sys.stderr)
def remove_jobs(self, jobs):
with self.job_lock:
for job in jobs:
self.jobs.remove(job)
def start(self):
with self.running_lock:
self.running = True
return threading.Thread.start(self)
def is_running(self):
with self.running_lock:
return self.running and self.parent_thread.is_alive()
def stop(self):
with self.running_lock:
self.running = False
def on_stop(self):
if 'ANDROID_DATA' in os.environ:
import jnius
jnius.detach()
self.print_error("jnius detach")
self.print_error("stopped")
# TODO: disable
is_verbose = True
def set_verbosity(b):
global is_verbose
is_verbose = b
def print_error(*args):
if not is_verbose: return
print_stderr(*args)
def print_stderr(*args):
args = [str(item) for item in args]
sys.stderr.write(" ".join(args) + "\n")
sys.stderr.flush()
def print_msg(*args):
# Stringify args
args = [str(item) for item in args]
sys.stdout.write(" ".join(args) + "\n")
sys.stdout.flush()
def json_encode(obj):
try:
s = json.dumps(obj, sort_keys = True, indent = 4, cls=MyEncoder)
except TypeError:
s = repr(obj)
return s
def json_decode(x):
try:
return json.loads(x, parse_float=Decimal)
except:
return x
# decorator that prints execution time
def profiler(func):
def do_profile(func, args, kw_args):
n = func.__name__
t0 = time.time()
o = func(*args, **kw_args)
t = time.time() - t0
print_error("[profiler]", n, "%.4f"%t)
return o
return lambda *args, **kw_args: do_profile(func, args, kw_args)
def android_ext_dir():
import jnius
env = jnius.autoclass('android.os.Environment')
return env.getExternalStorageDirectory().getPath()
def android_data_dir():
import jnius
PythonActivity = jnius.autoclass('org.kivy.android.PythonActivity')
return PythonActivity.mActivity.getFilesDir().getPath() + '/data'
def android_headers_dir():
d = android_ext_dir() + '/org.electron.electron'
if not os.path.exists(d):
os.mkdir(d)
return d
def android_check_data_dir():
""" if needed, move old directory to sandbox """
ext_dir = android_ext_dir()
data_dir = android_data_dir()
old_electrum_dir = ext_dir + '/electrum'
if not os.path.exists(data_dir) and os.path.exists(old_electrum_dir):
import shutil
new_headers_path = android_headers_dir() + '/blockchain_headers'
old_headers_path = old_electrum_dir + '/blockchain_headers'
if not os.path.exists(new_headers_path) and os.path.exists(old_headers_path):
print_error("Moving headers file to", new_headers_path)
shutil.move(old_headers_path, new_headers_path)
print_error("Moving data to", data_dir)
shutil.move(old_electrum_dir, data_dir)
return data_dir
def get_headers_dir(config):
return android_headers_dir() if 'ANDROID_DATA' in os.environ else config.path
def assert_bytes(*args):
"""
porting helper, assert args type
"""
try:
for x in args:
assert isinstance(x, (bytes, bytearray))
except:
print('assert bytes failed', list(map(type, args)))
raise
def assert_str(*args):
"""
porting helper, assert args type
"""
for x in args:
assert isinstance(x, str)
def to_string(x, enc):
if isinstance(x, (bytes, bytearray)):
return x.decode(enc)
if isinstance(x, str):
return x
else:
raise TypeError("Not a string or bytes like object")
def to_bytes(something, encoding='utf8'):
"""
cast string to bytes() like object, but for python2 support it's bytearray copy
"""
if isinstance(something, bytes):
return something
if isinstance(something, str):
return something.encode(encoding)
elif isinstance(something, bytearray):
return bytes(something)
else:
raise TypeError("Not a string or bytes like object")
bfh = bytes.fromhex
hfu = binascii.hexlify
def bh2u(x):
"""
str with hex representation of a bytes-like object
>>> x = bytes((1, 2, 10))
>>> bh2u(x)
'01020A'
:param x: bytes
:rtype: str
"""
#v = hfu(x)
#v1 = v.decode('ascii')
#return v1
return hfu(x).decode('ascii')
def user_dir():
if 'ANDROID_DATA' in os.environ:
return android_check_data_dir()
elif os.name == 'posix':
return os.path.join(os.environ["HOME"], ".bitcoinnano")
elif "APPDATA" in os.environ:
return os.path.join(os.environ["APPDATA"], "BitcoinNano")
elif "LOCALAPPDATA" in os.environ:
return os.path.join(os.environ["LOCALAPPDATA"], "BitcoinNano")
else:
#raise Exception("No home directory found in environment variables.")
return
def format_satoshis_plain(x, decimal_point = 8):
"""Display a satoshi amount scaled. Always uses a '.' as a decimal
point and has no thousands separator"""
scale_factor = pow(10, decimal_point)
return "{:.8f}".format(Decimal(x) / scale_factor).rstrip('0').rstrip('.')
def format_satoshis(x, is_diff=False, num_zeros = 0, decimal_point = 8, whitespaces=False):
from locale import localeconv
if x is None:
return 'unknown'
x = int(x) # Some callers pass Decimal
scale_factor = pow (10, decimal_point)
integer_part = "{:n}".format(int(abs(x) / scale_factor))
if x < 0:
integer_part = '-' + integer_part
elif is_diff:
integer_part = '+' + integer_part
dp = localeconv()['decimal_point']
fract_part = ("{:0" + str(decimal_point) + "}").format(abs(x) % scale_factor)
fract_part = fract_part.rstrip('0')
if len(fract_part) < num_zeros:
fract_part += "0" * (num_zeros - len(fract_part))
result = integer_part + dp + fract_part
if whitespaces:
result += " " * (decimal_point - len(fract_part))
result = " " * (15 - len(result)) + result
return result
def timestamp_to_datetime(timestamp):
try:
return datetime.fromtimestamp(timestamp)
except:
return None
def format_time(timestamp):
date = timestamp_to_datetime(timestamp)
return date.isoformat(' ')[:-3] if date else _("Unknown")
# Takes a timestamp and returns a string with the approximation of the age
def age(from_date, since_date = None, target_tz=None, include_seconds=False):
if from_date is None:
return "Unknown"
from_date = datetime.fromtimestamp(from_date)
if since_date is None:
since_date = datetime.now(target_tz)
td = time_difference(from_date - since_date, include_seconds)
return td + " ago" if from_date < since_date else "in " + td
def time_difference(distance_in_time, include_seconds):
#distance_in_time = since_date - from_date
distance_in_seconds = int(round(abs(distance_in_time.days * 86400 + distance_in_time.seconds)))
distance_in_minutes = int(round(distance_in_seconds/60))
if distance_in_minutes <= 1:
if include_seconds:
for remainder in [5, 10, 20]:
if distance_in_seconds < remainder:
return "less than %s seconds" % remainder
if distance_in_seconds < 40:
return "half a minute"
elif distance_in_seconds < 60:
return "less than a minute"
else:
return "1 minute"
else:
if distance_in_minutes == 0:
return "less than a minute"
else:
return "1 minute"
elif distance_in_minutes < 45:
return "%s minutes" % distance_in_minutes
elif distance_in_minutes < 90:
return "about 1 hour"
elif distance_in_minutes < 1440:
return "about %d hours" % (round(distance_in_minutes / 60.0))
elif distance_in_minutes < 2880:
return "1 day"
elif distance_in_minutes < 43220:
return "%d days" % (round(distance_in_minutes / 1440))
elif distance_in_minutes < 86400:
return "about 1 month"
elif distance_in_minutes < 525600:
return "%d months" % (round(distance_in_minutes / 43200))
elif distance_in_minutes < 1051200:
return "about 1 year"
else:
return "over %d years" % (round(distance_in_minutes / 525600))
mainnet_block_explorers = {
'btcnano.org': ('http://explorer.btcnano.org',
{'tx': 'transaction', 'addr': 'address'}),
}
testnet_block_explorers = {
'btcnano.org': ('http://explorer.btcnano.org',
{'tx': 'tx', 'addr': 'address'}),
'system default': ('blockchain:',
{'tx': 'tx', 'addr': 'address'}),
}
def block_explorer_info():
from . import bitcoin
return testnet_block_explorers if bitcoin.NetworkConstants.TESTNET else mainnet_block_explorers
def block_explorer(config):
return config.get('block_explorer', 'btcnano.org')
def block_explorer_tuple(config):
return block_explorer_info().get(block_explorer(config))
def block_explorer_URL(config, kind, item):
be_tuple = block_explorer_tuple(config)
if not be_tuple:
return
kind_str = be_tuple[1].get(kind)
if not kind_str:
return
url_parts = [be_tuple[0], kind_str, item]
return "/".join(url_parts)
# URL decode
#_ud = re.compile('%([0-9a-hA-H]{2})', re.MULTILINE)
#urldecode = lambda x: _ud.sub(lambda m: chr(int(m.group(1), 16)), x)
def parse_URI(uri, on_pr=None):
from . import bitcoin
from .bitcoin import COIN
if ':' not in uri:
if not bitcoin.is_address(uri):
raise BaseException("Not a bitcoin address")
return {'address': uri}
u = urllib.parse.urlparse(uri)
if u.scheme != 'bitcoinnano':
raise BaseException("Not a bitcoinnano URI")
address = u.path
# python for android fails to parse query
if address.find('?') > 0:
address, query = u.path.split('?')
pq = urllib.parse.parse_qs(query)
else:
pq = urllib.parse.parse_qs(u.query)
for k, v in pq.items():
if len(v)!=1:
raise Exception('Duplicate Key', k)
out = {k: v[0] for k, v in pq.items()}
if address:
if not bitcoin.is_address(address):
raise BaseException("Invalid bitcoin address:" + address)
out['address'] = address
if 'amount' in out:
am = out['amount']
m = re.match('([0-9\.]+)X([0-9])', am)
if m:
k = int(m.group(2)) - 8
amount = Decimal(m.group(1)) * pow( Decimal(10) , k)
else:
amount = Decimal(am) * COIN
out['amount'] = int(amount)
if 'message' in out:
out['message'] = out['message']
out['memo'] = out['message']
if 'time' in out:
out['time'] = int(out['time'])
if 'exp' in out:
out['exp'] = int(out['exp'])
if 'sig' in out:
out['sig'] = bh2u(bitcoin.base_decode(out['sig'], None, base=58))
r = out.get('r')
sig = out.get('sig')
name = out.get('name')
if on_pr and (r or (name and sig)):
def get_payment_request_thread():
from . import paymentrequest as pr
if name and sig:
s = pr.serialize_request(out).SerializeToString()
request = pr.PaymentRequest(s)
else:
request = pr.get_payment_request(r)
if on_pr:
on_pr(request)
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
return out
def create_URI(addr, amount, message):
from . import bitcoin
if not bitcoin.is_address(addr):
return ""
query = []
if amount:
query.append('amount=%s'%format_satoshis_plain(amount))
if message:
query.append('message=%s'%urllib.parse.quote(message))
p = urllib.parse.ParseResult(scheme='bitcoinnano', netloc='', path=addr, params='', query='&'.join(query), fragment='')
return urllib.parse.urlunparse(p)
# Python bug (http://bugs.python.org/issue1927) causes raw_input
# to be redirected improperly between stdin/stderr on Unix systems
#TODO: py3
def raw_input(prompt=None):
if prompt:
sys.stdout.write(prompt)
return builtin_raw_input()
import builtins
builtin_raw_input = builtins.input
builtins.input = raw_input
def parse_json(message):
# TODO: check \r\n pattern
n = message.find(b'\n')
if n==-1:
return None, message
try:
j = json.loads(message[0:n].decode('utf8'))
except:
j = None
return j, message[n+1:]
class timeout(Exception):
pass
import socket
import json
import ssl
import time
class SocketPipe:
def __init__(self, socket):
self.socket = socket
self.message = b''
self.set_timeout(0.1)
self.recv_time = time.time()
def set_timeout(self, t):
self.socket.settimeout(t)
def idle_time(self):
return time.time() - self.recv_time
def get(self):
while True:
response, self.message = parse_json(self.message)
if response is not None:
return response
try:
data = self.socket.recv(1024)
except socket.timeout:
raise timeout
except ssl.SSLError:
raise timeout
except socket.error as err:
if err.errno == 60:
raise timeout
elif err.errno in [11, 35, 10035]:
print_error("socket errno %d (resource temporarily unavailable)"% err.errno)
time.sleep(0.2)
raise timeout
else:
print_error("pipe: socket error", err)
data = b''
except:
traceback.print_exc(file=sys.stderr)
data = b''
if not data: # Connection closed remotely
return None
self.message += data
self.recv_time = time.time()
def send(self, request):
out = json.dumps(request) + '\n'
out = out.encode('utf8')
self._send(out)
def send_all(self, requests):
out = b''.join(map(lambda x: (json.dumps(x) + '\n').encode('utf8'), requests))
self._send(out)
def _send(self, out):
while out:
try:
sent = self.socket.send(out)
out = out[sent:]
except ssl.SSLError as e:
print_error("SSLError:", e)
time.sleep(0.1)
continue
except OSError as e:
print_error("OSError", e)
time.sleep(0.1)
continue
class QueuePipe:
def __init__(self, send_queue=None, get_queue=None):
self.send_queue = send_queue if send_queue else queue.Queue()
self.get_queue = get_queue if get_queue else queue.Queue()
self.set_timeout(0.1)
def get(self):
try:
return self.get_queue.get(timeout=self.timeout)
except queue.Empty:
raise timeout
def get_all(self):
responses = []
while True:
try:
r = self.get_queue.get_nowait()
responses.append(r)
except queue.Empty:
break
return responses
def set_timeout(self, t):
self.timeout = t
def send(self, request):
self.send_queue.put(request)
def send_all(self, requests):
for request in requests:
self.send(request)
def check_www_dir(rdir):
import urllib, shutil, os
if not os.path.exists(rdir):
os.mkdir(rdir)
index = os.path.join(rdir, 'index.html')
if not os.path.exists(index):
print_error("copying index.html")
src = os.path.join(os.path.dirname(__file__), 'www', 'index.html')
shutil.copy(src, index)
files = [
"https://code.jquery.com/jquery-1.9.1.min.js",
"https://raw.githubusercontent.com/davidshimjs/qrcodejs/master/qrcode.js",
"https://code.jquery.com/ui/1.10.3/jquery-ui.js",
"https://code.jquery.com/ui/1.10.3/themes/smoothness/jquery-ui.css"
]
for URL in files:
path = urllib.parse.urlsplit(URL).path
filename = os.path.basename(path)
path = os.path.join(rdir, filename)
if not os.path.exists(path):
print_error("downloading ", URL)
urllib.request.urlretrieve(URL, path)
| 30.674855
| 127
| 0.616055
|
bf70bd98d77e44ac07dd2a3a40b47116bcdb975c
| 2,014
|
py
|
Python
|
docs/conf.py
|
spam128/notes
|
100008b7e0a2afa5677c15826588105027f52883
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
spam128/notes
|
100008b7e0a2afa5677c15826588105027f52883
|
[
"MIT"
] | null | null | null |
docs/conf.py
|
spam128/notes
|
100008b7e0a2afa5677c15826588105027f52883
|
[
"MIT"
] | null | null | null |
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import django
sys.path.insert(0, os.path.abspath(".."))
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
django.setup()
# -- Project information -----------------------------------------------------
project = "notes"
copyright = """2020, Dawid Lesniak"""
author = "Dawid Lesniak"
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.napoleon",
]
# Add any paths that contain templates here, relative to this directory.
# templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "alabaster"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ["_static"]
| 34.724138
| 79
| 0.666832
|
0e6390c5886e8a827c4d5636acb7eb595483e169
| 1,406
|
py
|
Python
|
Submission_Assignment_3_CV/Source/getIntrinsic.py
|
rahuljain1310/Augmented-Reality-Application
|
6a464151fc08af45197b35a68734bc613ed2a7db
|
[
"MIT"
] | null | null | null |
Submission_Assignment_3_CV/Source/getIntrinsic.py
|
rahuljain1310/Augmented-Reality-Application
|
6a464151fc08af45197b35a68734bc613ed2a7db
|
[
"MIT"
] | null | null | null |
Submission_Assignment_3_CV/Source/getIntrinsic.py
|
rahuljain1310/Augmented-Reality-Application
|
6a464151fc08af45197b35a68734bc613ed2a7db
|
[
"MIT"
] | null | null | null |
import os,glob,math,argparse
import cv2
import numpy as np
def getGrayImage(fname,shape):
img = cv2.imread(fname)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
gray = cv2.resize(gray, shape)
return gray
def getK():
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((7*7,3), np.float32)
objp[:,:2] = np.mgrid[0:7,0:7].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d point in real world space
imgpoints = [] # 2d points in image plane.
images = glob.glob('CalibrationImages/Set4/*.jpg')
Shape = None
for fname in images:
gray = getGrayImage(fname,(640,352))
Shape = gray.shape[::-1]
ret, corners = cv2.findChessboardCorners(gray, (7,7),None)
if ret == True:
print(fname)
objpoints.append(objp)
corners2 = cv2.cornerSubPix(gray,corners,(11,11),(-1,-1),criteria)
imgpoints.append(corners2)
# Draw and display the corners
img = cv2.drawChessboardCorners(gray, (7,7), corners2, ret )
# cv2.imshow('img',gray)
# cv2.waitKey(1000)
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, Shape, None,None)
if ret:
print("Image Size:")
print(Shape)
print("Camera Matrix:")
print(mtx)
return mtx
else:
print("No Solution Found")
return None
cv2.destroyAllWindows()
| 29.291667
| 91
| 0.686344
|
b09126c70b5dd5df0a2e63be61fa49e2ce60c511
| 2,254
|
py
|
Python
|
data/task_scripts/main/task00215.py
|
aallaire91/phyre
|
ee882194c12bae5561c25ec65f95a7c0944f8129
|
[
"Apache-2.0"
] | 432
|
2019-08-15T15:45:43.000Z
|
2022-02-26T23:13:34.000Z
|
data/task_scripts/main/task00215.py
|
aallaire91/phyre
|
ee882194c12bae5561c25ec65f95a7c0944f8129
|
[
"Apache-2.0"
] | 38
|
2019-09-06T15:39:03.000Z
|
2022-03-12T00:11:25.000Z
|
data/task_scripts/main/task00215.py
|
aallaire91/phyre
|
ee882194c12bae5561c25ec65f95a7c0944f8129
|
[
"Apache-2.0"
] | 69
|
2019-08-16T02:08:41.000Z
|
2022-01-27T23:23:03.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Template task with a ball that must fall on the other side of a jar."""
import phyre.creator as creator_lib
__JAR_XS = [val * 0.1 for val in range(3, 7)]
__JAR_SCALES = [val * 0.1 for val in range(2, 6)]
__BALL_XS = [val * 0.1 for val in range(2, 8)]
__BALL_YS = [val * 0.1 for val in range(5, 8)]
@creator_lib.define_task_template(
jar_x=__JAR_XS, jar_scale=__JAR_SCALES, ball_x=__BALL_XS, ball_y=__BALL_YS, version='2')
def build_task(C, jar_x, jar_scale, ball_x, ball_y):
# Add jar.
jar = C.add('dynamic jar', scale=jar_scale) \
.set_left(jar_x * C.scene.width) \
.set_bottom(0.)
if jar.left < 0. or jar.right > C.scene.width:
raise creator_lib.SkipTemplateParams
# Add ball that is not hovering over jar.
ball = C.add('dynamic ball', scale=0.1) \
.set_center_x(ball_x * C.scene.width) \
.set_bottom(0.9 * C.scene.height)
# Add a floor bar into two parts: target part and non-target part.
if ball.left > jar.right: # ball is right of jar
bottom_wall = C.add('static bar', 1.0, bottom=0, right=jar.left)
C.add('static bar', 1.0, bottom=0, left=bottom_wall.right)
elif ball.right < jar.left: # ball is left of jar
bottom_wall = C.add('static bar', 1.0, bottom=0, left=jar.right)
C.add('static bar', 1.0, bottom=0, right=bottom_wall.left)
else:
raise creator_lib.SkipTemplateParams
jar.set_bottom(bottom_wall.top)
# Create assignment.
C.update_task(body1=ball,
body2=bottom_wall,
relationships=[C.SpatialRelationship.TOUCHING])
C.set_meta(C.SolutionTier.PRE_TWO_BALLS)
| 39.54386
| 92
| 0.679681
|
d8633272c320677f64ea6c3ce926d2e2b42b06a1
| 1,546
|
py
|
Python
|
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_tensorboard_service_get_tensorboard_run_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | 1
|
2022-03-30T05:23:29.000Z
|
2022-03-30T05:23:29.000Z
|
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_tensorboard_service_get_tensorboard_run_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | null | null | null |
samples/generated_samples/aiplatform_generated_aiplatform_v1beta1_tensorboard_service_get_tensorboard_run_sync.py
|
sakagarwal/python-aiplatform
|
62b4a1ea589235910c6e87f027899a29bf1bacb1
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for GetTensorboardRun
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1beta1_TensorboardService_GetTensorboardRun_sync]
from google.cloud import aiplatform_v1beta1
def sample_get_tensorboard_run():
# Create a client
client = aiplatform_v1beta1.TensorboardServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.GetTensorboardRunRequest(
name="name_value",
)
# Make the request
response = client.get_tensorboard_run(request=request)
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1beta1_TensorboardService_GetTensorboardRun_sync]
| 33.608696
| 91
| 0.774256
|
d589bbe25ff2ed16a904c8051b6ad3bca7da05ae
| 9,191
|
py
|
Python
|
google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py
|
googleapis/python-redis
|
9c0cd3dc619557e6f73e418f0af16f38cfdcfc44
|
[
"Apache-2.0"
] | 9
|
2020-09-19T11:31:25.000Z
|
2022-03-18T04:43:06.000Z
|
google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py
|
googleapis/python-redis
|
9c0cd3dc619557e6f73e418f0af16f38cfdcfc44
|
[
"Apache-2.0"
] | 50
|
2020-02-03T19:18:21.000Z
|
2022-03-15T21:27:39.000Z
|
google/cloud/redis_v1beta1/services/cloud_redis/transports/base.py
|
googleapis/python-redis
|
9c0cd3dc619557e6f73e418f0af16f38cfdcfc44
|
[
"Apache-2.0"
] | 4
|
2020-02-03T19:00:51.000Z
|
2022-01-29T08:13:18.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.redis_v1beta1.types import cloud_redis
from google.longrunning import operations_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-redis",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class CloudRedisTransport(abc.ABC):
"""Abstract transport class for CloudRedis."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "redis.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.list_instances: gapic_v1.method.wrap_method(
self.list_instances, default_timeout=600.0, client_info=client_info,
),
self.get_instance: gapic_v1.method.wrap_method(
self.get_instance, default_timeout=600.0, client_info=client_info,
),
self.create_instance: gapic_v1.method.wrap_method(
self.create_instance, default_timeout=600.0, client_info=client_info,
),
self.update_instance: gapic_v1.method.wrap_method(
self.update_instance, default_timeout=600.0, client_info=client_info,
),
self.upgrade_instance: gapic_v1.method.wrap_method(
self.upgrade_instance, default_timeout=600.0, client_info=client_info,
),
self.import_instance: gapic_v1.method.wrap_method(
self.import_instance, default_timeout=600.0, client_info=client_info,
),
self.export_instance: gapic_v1.method.wrap_method(
self.export_instance, default_timeout=600.0, client_info=client_info,
),
self.failover_instance: gapic_v1.method.wrap_method(
self.failover_instance, default_timeout=600.0, client_info=client_info,
),
self.delete_instance: gapic_v1.method.wrap_method(
self.delete_instance, default_timeout=600.0, client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def list_instances(
self,
) -> Callable[
[cloud_redis.ListInstancesRequest],
Union[
cloud_redis.ListInstancesResponse,
Awaitable[cloud_redis.ListInstancesResponse],
],
]:
raise NotImplementedError()
@property
def get_instance(
self,
) -> Callable[
[cloud_redis.GetInstanceRequest],
Union[cloud_redis.Instance, Awaitable[cloud_redis.Instance]],
]:
raise NotImplementedError()
@property
def create_instance(
self,
) -> Callable[
[cloud_redis.CreateInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def update_instance(
self,
) -> Callable[
[cloud_redis.UpdateInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def upgrade_instance(
self,
) -> Callable[
[cloud_redis.UpgradeInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def import_instance(
self,
) -> Callable[
[cloud_redis.ImportInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def export_instance(
self,
) -> Callable[
[cloud_redis.ExportInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def failover_instance(
self,
) -> Callable[
[cloud_redis.FailoverInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_instance(
self,
) -> Callable[
[cloud_redis.DeleteInstanceRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
__all__ = ("CloudRedisTransport",)
| 36.328063
| 101
| 0.656403
|
cdb1af5c3d133ed31beb95e15f951f66ed9c9659
| 13,487
|
py
|
Python
|
Src/StdLib/Lib/test/test_gzip.py
|
cwensley/ironpython2
|
f854444e1e08afc8850cb7c1a739a7dd2d10d32a
|
[
"Apache-2.0"
] | 1,078
|
2016-07-19T02:48:30.000Z
|
2022-03-30T21:22:34.000Z
|
Src/StdLib/Lib/test/test_gzip.py
|
cwensley/ironpython2
|
f854444e1e08afc8850cb7c1a739a7dd2d10d32a
|
[
"Apache-2.0"
] | 576
|
2017-05-21T12:36:48.000Z
|
2022-03-30T13:47:03.000Z
|
Src/StdLib/Lib/test/test_gzip.py
|
cwensley/ironpython2
|
f854444e1e08afc8850cb7c1a739a7dd2d10d32a
|
[
"Apache-2.0"
] | 269
|
2017-05-21T04:44:47.000Z
|
2022-03-31T16:18:13.000Z
|
"""Test script for the gzip module.
"""
import unittest
from test import test_support
import os
import io
import struct
import tempfile
gzip = test_support.import_module('gzip')
data1 = """ int length=DEFAULTALLOC, err = Z_OK;
PyObject *RetVal;
int flushmode = Z_FINISH;
unsigned long start_total_out;
"""
data2 = """/* zlibmodule.c -- gzip-compatible data compression */
/* See http://www.gzip.org/zlib/
/* See http://www.winimage.com/zLibDll for Windows */
"""
class TestGzip(unittest.TestCase):
filename = test_support.TESTFN
def setUp(self):
test_support.unlink(self.filename)
def tearDown(self):
test_support.unlink(self.filename)
def write_and_read_back(self, data, mode='b'):
b_data = memoryview(data).tobytes()
with gzip.GzipFile(self.filename, 'w'+mode) as f:
l = f.write(data)
self.assertEqual(l, len(b_data))
with gzip.GzipFile(self.filename, 'r'+mode) as f:
self.assertEqual(f.read(), b_data)
@test_support.requires_unicode
def test_unicode_filename(self):
unicode_filename = test_support.TESTFN_UNICODE
try:
unicode_filename.encode(test_support.TESTFN_ENCODING)
except (UnicodeError, TypeError):
self.skipTest("Requires unicode filenames support")
self.filename = unicode_filename
with gzip.GzipFile(unicode_filename, "wb") as f:
f.write(data1 * 50)
with gzip.GzipFile(unicode_filename, "rb") as f:
self.assertEqual(f.read(), data1 * 50)
# Sanity check that we are actually operating on the right file.
with open(unicode_filename, 'rb') as fobj, \
gzip.GzipFile(fileobj=fobj, mode="rb") as f:
self.assertEqual(f.read(), data1 * 50)
def test_write(self):
with gzip.GzipFile(self.filename, 'wb') as f:
f.write(data1 * 50)
# Try flush and fileno.
f.flush()
f.fileno()
if hasattr(os, 'fsync'):
os.fsync(f.fileno())
f.close()
# Test multiple close() calls.
f.close()
# The following test_write_xy methods test that write accepts
# the corresponding bytes-like object type as input
# and that the data written equals bytes(xy) in all cases.
def test_write_memoryview(self):
self.write_and_read_back(memoryview(data1 * 50))
def test_write_incompatible_type(self):
# Test that non-bytes-like types raise TypeError.
# Issue #21560: attempts to write incompatible types
# should not affect the state of the fileobject
with gzip.GzipFile(self.filename, 'wb') as f:
with self.assertRaises(UnicodeEncodeError):
f.write(u'\xff')
with self.assertRaises(TypeError):
f.write([1])
f.write(data1)
with gzip.GzipFile(self.filename, 'rb') as f:
self.assertEqual(f.read(), data1)
def test_read(self):
self.test_write()
# Try reading.
with gzip.GzipFile(self.filename, 'r') as f:
d = f.read()
self.assertEqual(d, data1*50)
def test_read_universal_newlines(self):
# Issue #5148: Reading breaks when mode contains 'U'.
self.test_write()
with gzip.GzipFile(self.filename, 'rU') as f:
d = f.read()
self.assertEqual(d, data1*50)
def test_io_on_closed_object(self):
# Test that I/O operations on closed GzipFile objects raise a
# ValueError, just like the corresponding functions on file objects.
# Write to a file, open it for reading, then close it.
self.test_write()
f = gzip.GzipFile(self.filename, 'r')
f.close()
with self.assertRaises(ValueError):
f.read(1)
with self.assertRaises(ValueError):
f.seek(0)
with self.assertRaises(ValueError):
f.tell()
# Open the file for writing, then close it.
f = gzip.GzipFile(self.filename, 'w')
f.close()
with self.assertRaises(ValueError):
f.write('')
with self.assertRaises(ValueError):
f.flush()
def test_append(self):
self.test_write()
# Append to the previous file
with gzip.GzipFile(self.filename, 'ab') as f:
f.write(data2 * 15)
with gzip.GzipFile(self.filename, 'rb') as f:
d = f.read()
self.assertEqual(d, (data1*50) + (data2*15))
def test_many_append(self):
# Bug #1074261 was triggered when reading a file that contained
# many, many members. Create such a file and verify that reading it
# works.
with gzip.open(self.filename, 'wb', 9) as f:
f.write('a')
for i in range(0, 200):
with gzip.open(self.filename, "ab", 9) as f: # append
f.write('a')
# Try reading the file
with gzip.open(self.filename, "rb") as zgfile:
contents = ""
while 1:
ztxt = zgfile.read(8192)
contents += ztxt
if not ztxt: break
self.assertEqual(contents, 'a'*201)
def test_buffered_reader(self):
# Issue #7471: a GzipFile can be wrapped in a BufferedReader for
# performance.
self.test_write()
with gzip.GzipFile(self.filename, 'rb') as f:
with io.BufferedReader(f) as r:
lines = [line for line in r]
self.assertEqual(lines, 50 * data1.splitlines(True))
def test_readline(self):
self.test_write()
# Try .readline() with varying line lengths
with gzip.GzipFile(self.filename, 'rb') as f:
line_length = 0
while 1:
L = f.readline(line_length)
if not L and line_length != 0: break
self.assertTrue(len(L) <= line_length)
line_length = (line_length + 1) % 50
def test_readlines(self):
self.test_write()
# Try .readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
L = f.readlines()
with gzip.GzipFile(self.filename, 'rb') as f:
while 1:
L = f.readlines(150)
if L == []: break
def test_seek_read(self):
self.test_write()
# Try seek, read test
with gzip.GzipFile(self.filename) as f:
while 1:
oldpos = f.tell()
line1 = f.readline()
if not line1: break
newpos = f.tell()
f.seek(oldpos) # negative seek
if len(line1)>10:
amount = 10
else:
amount = len(line1)
line2 = f.read(amount)
self.assertEqual(line1[:amount], line2)
f.seek(newpos) # positive seek
def test_seek_whence(self):
self.test_write()
# Try seek(whence=1), read test
with gzip.GzipFile(self.filename) as f:
f.read(10)
f.seek(10, whence=1)
y = f.read(10)
self.assertEqual(y, data1[20:30])
def test_seek_write(self):
# Try seek, write test
with gzip.GzipFile(self.filename, 'w') as f:
for pos in range(0, 256, 16):
f.seek(pos)
f.write('GZ\n')
def test_mode(self):
self.test_write()
with gzip.GzipFile(self.filename, 'r') as f:
self.assertEqual(f.myfileobj.mode, 'rb')
def test_1647484(self):
for mode in ('wb', 'rb'):
with gzip.GzipFile(self.filename, mode) as f:
self.assertTrue(hasattr(f, "name"))
self.assertEqual(f.name, self.filename)
def test_mtime(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with gzip.GzipFile(self.filename) as fRead:
dataRead = fRead.read()
self.assertEqual(dataRead, data1)
self.assertTrue(hasattr(fRead, 'mtime'))
self.assertEqual(fRead.mtime, mtime)
def test_metadata(self):
mtime = 123456789
with gzip.GzipFile(self.filename, 'w', mtime = mtime) as fWrite:
fWrite.write(data1)
with open(self.filename, 'rb') as fRead:
# see RFC 1952: http://www.faqs.org/rfcs/rfc1952.html
idBytes = fRead.read(2)
self.assertEqual(idBytes, '\x1f\x8b') # gzip ID
cmByte = fRead.read(1)
self.assertEqual(cmByte, '\x08') # deflate
flagsByte = fRead.read(1)
self.assertEqual(flagsByte, '\x08') # only the FNAME flag is set
mtimeBytes = fRead.read(4)
self.assertEqual(mtimeBytes, struct.pack('<i', mtime)) # little-endian
xflByte = fRead.read(1)
self.assertEqual(xflByte, '\x02') # maximum compression
osByte = fRead.read(1)
self.assertEqual(osByte, '\xff') # OS "unknown" (OS-independent)
# Since the FNAME flag is set, the zero-terminated filename follows.
# RFC 1952 specifies that this is the name of the input file, if any.
# However, the gzip module defaults to storing the name of the output
# file in this field.
expected = self.filename.encode('Latin-1') + '\x00'
nameBytes = fRead.read(len(expected))
self.assertEqual(nameBytes, expected)
# Since no other flags were set, the header ends here.
# Rather than process the compressed data, let's seek to the trailer.
fRead.seek(os.stat(self.filename).st_size - 8)
crc32Bytes = fRead.read(4) # CRC32 of uncompressed data [data1]
self.assertEqual(crc32Bytes, '\xaf\xd7d\x83')
isizeBytes = fRead.read(4)
self.assertEqual(isizeBytes, struct.pack('<i', len(data1)))
def test_with_open(self):
# GzipFile supports the context management protocol
with gzip.GzipFile(self.filename, "wb") as f:
f.write(b"xxx")
f = gzip.GzipFile(self.filename, "rb")
f.close()
try:
with f:
pass
except ValueError:
pass
else:
self.fail("__enter__ on a closed file didn't raise an exception")
try:
with gzip.GzipFile(self.filename, "wb") as f:
1 // 0
except ZeroDivisionError:
pass
else:
self.fail("1 // 0 didn't raise an exception")
def test_zero_padded_file(self):
with gzip.GzipFile(self.filename, "wb") as f:
f.write(data1 * 50)
# Pad the file with zeroes
with open(self.filename, "ab") as f:
f.write("\x00" * 50)
with gzip.GzipFile(self.filename, "rb") as f:
d = f.read()
self.assertEqual(d, data1 * 50, "Incorrect data in file")
def test_fileobj_from_fdopen(self):
# Issue #13781: Creating a GzipFile using a fileobj from os.fdopen()
# should not embed the fake filename "<fdopen>" in the output file.
fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT)
with os.fdopen(fd, "wb") as f:
with gzip.GzipFile(fileobj=f, mode="w") as g:
self.assertEqual(g.name, "")
def test_fileobj_from_io_open(self):
fd = os.open(self.filename, os.O_WRONLY | os.O_CREAT)
with io.open(fd, "wb") as f:
with gzip.GzipFile(fileobj=f, mode="w") as g:
self.assertEqual(g.name, "")
def test_fileobj_mode(self):
gzip.GzipFile(self.filename, "wb").close()
with open(self.filename, "r+b") as f:
with gzip.GzipFile(fileobj=f, mode='r') as g:
self.assertEqual(g.mode, gzip.READ)
with gzip.GzipFile(fileobj=f, mode='w') as g:
self.assertEqual(g.mode, gzip.WRITE)
with gzip.GzipFile(fileobj=f, mode='a') as g:
self.assertEqual(g.mode, gzip.WRITE)
with self.assertRaises(IOError):
gzip.GzipFile(fileobj=f, mode='z')
for mode in "rb", "r+b":
with open(self.filename, mode) as f:
with gzip.GzipFile(fileobj=f) as g:
self.assertEqual(g.mode, gzip.READ)
for mode in "wb", "ab":
with open(self.filename, mode) as f:
with gzip.GzipFile(fileobj=f) as g:
self.assertEqual(g.mode, gzip.WRITE)
def test_read_with_extra(self):
# Gzip data with an extra field
gzdata = (b'\x1f\x8b\x08\x04\xb2\x17cQ\x02\xff'
b'\x05\x00Extra'
b'\x0bI-.\x01\x002\xd1Mx\x04\x00\x00\x00')
with gzip.GzipFile(fileobj=io.BytesIO(gzdata)) as f:
self.assertEqual(f.read(), b'Test')
def test_fileobj_without_name(self):
# Issue #33038: GzipFile should not assume that file objects that have
# a .name attribute use a non-None value.
with tempfile.SpooledTemporaryFile() as f:
with gzip.GzipFile(fileobj=f, mode='wb') as archive:
archive.write(b'data')
self.assertEqual(archive.name, '')
def test_main(verbose=None):
test_support.run_unittest(TestGzip)
if __name__ == "__main__":
test_main(verbose=True)
| 35.306283
| 82
| 0.571884
|
15245dd767567eec3488665c510e0a6aa2660ded
| 1,127
|
py
|
Python
|
setup.py
|
quickdata-team/panamah-sdk-python
|
746f3fb7ebcf01810917bf9afa8e7ff5a4efad21
|
[
"MIT"
] | null | null | null |
setup.py
|
quickdata-team/panamah-sdk-python
|
746f3fb7ebcf01810917bf9afa8e7ff5a4efad21
|
[
"MIT"
] | null | null | null |
setup.py
|
quickdata-team/panamah-sdk-python
|
746f3fb7ebcf01810917bf9afa8e7ff5a4efad21
|
[
"MIT"
] | null | null | null |
import setuptools
import json
with open("README.md", "r") as fh:
long_description = fh.read()
with open('Pipfile.lock') as fd:
lock_data = json.load(fd)
install_requires = [
package_name + package_data['version']
for package_name, package_data in lock_data['default'].items()
]
tests_require = [
package_name + package_data['version']
for package_name, package_data in lock_data['develop'].items()
]
setuptools.setup(
name="panamah-sdk-python",
version="1.0.4",
author="Casa Magalhães",
author_email="contato@casamagalhaes.com.br",
description="Panamah Software Development Kit for Python",
long_description="APIs and models for Panamah services",
long_description_content_type="text/markdown",
url="https://github.com/casamagalhaes/panamah-sdk-python",
packages=setuptools.find_packages(),
install_requires=install_requires,
tests_require=tests_require,
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
| 32.2
| 70
| 0.681455
|
2e72720dfb831ca3a8cbe8bdddf92b58602d17b8
| 24,950
|
py
|
Python
|
myven/lib/python3.8/site-packages/ansible/modules/system/cron.py
|
baltham/dne-dna-code
|
4a13309a790a670d2f07e635c9264a0c29976c6a
|
[
"MIT"
] | 1
|
2021-04-02T08:08:39.000Z
|
2021-04-02T08:08:39.000Z
|
myven/lib/python3.8/site-packages/ansible/modules/system/cron.py
|
baltham/dne-dna-code
|
4a13309a790a670d2f07e635c9264a0c29976c6a
|
[
"MIT"
] | null | null | null |
myven/lib/python3.8/site-packages/ansible/modules/system/cron.py
|
baltham/dne-dna-code
|
4a13309a790a670d2f07e635c9264a0c29976c6a
|
[
"MIT"
] | 1
|
2020-05-03T01:13:16.000Z
|
2020-05-03T01:13:16.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Dane Summers <dsummers@pinedesk.biz>
# Copyright: (c) 2013, Mike Grozak <mike.grozak@gmail.com>
# Copyright: (c) 2013, Patrick Callahan <pmc@patrickcallahan.com>
# Copyright: (c) 2015, Evan Kaufman <evan@digitalflophouse.com>
# Copyright: (c) 2015, Luca Berruti <nadirio@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: cron
short_description: Manage cron.d and crontab entries
description:
- Use this module to manage crontab and environment variables entries. This module allows
you to create environment variables and named crontab entries, update, or delete them.
- 'When crontab jobs are managed: the module includes one line with the description of the
crontab entry C("#Ansible: <name>") corresponding to the "name" passed to the module,
which is used by future ansible/module calls to find/check the state. The "name"
parameter should be unique, and changing the "name" value will result in a new cron
task being created (or a different one being removed).'
- 'When environment variables are managed: no comment line is added, but, when the module
needs to find/check the state, it uses the "name" parameter to find the environment
variable definition line.'
- 'When using symbols such as %, they must be properly escaped.'
version_added: "0.9"
options:
name:
description:
- Description of a crontab entry or, if env is set, the name of environment variable.
Required if state=absent. Note that if name is not set and state=present, then a
new crontab entry will always be created, regardless of existing ones.
user:
description:
- The specific user whose crontab should be modified.
default: root
job:
description:
- The command to execute or, if env is set, the value of environment variable.
The command should not contain line breaks.
Required if state=present.
aliases: [ value ]
state:
description:
- Whether to ensure the job or environment variable is present or absent.
choices: [ absent, present ]
default: present
cron_file:
description:
- If specified, uses this file instead of an individual user's crontab.
If this is a relative path, it is interpreted with respect to
/etc/cron.d. (If it is absolute, it will typically be /etc/crontab).
Many linux distros expect (and some require) the filename portion to consist solely
of upper- and lower-case letters, digits, underscores, and hyphens.
To use the C(cron_file) parameter you must specify the C(user) as well.
backup:
description:
- If set, create a backup of the crontab before it is modified.
The location of the backup is returned in the C(backup_file) variable by this module.
type: bool
default: 'no'
minute:
description:
- Minute when the job should run ( 0-59, *, */2, etc )
default: "*"
hour:
description:
- Hour when the job should run ( 0-23, *, */2, etc )
default: "*"
day:
description:
- Day of the month the job should run ( 1-31, *, */2, etc )
default: "*"
aliases: [ dom ]
month:
description:
- Month of the year the job should run ( 1-12, *, */2, etc )
default: "*"
weekday:
description:
- Day of the week that the job should run ( 0-6 for Sunday-Saturday, *, etc )
default: "*"
aliases: [ dow ]
reboot:
description:
- If the job should be run at reboot. This option is deprecated. Users should use special_time.
version_added: "1.0"
type: bool
default: "no"
special_time:
description:
- Special time specification nickname.
choices: [ reboot, yearly, annually, monthly, weekly, daily, hourly ]
version_added: "1.3"
disabled:
description:
- If the job should be disabled (commented out) in the crontab.
- Only has effect if C(state=present).
type: bool
default: 'no'
version_added: "2.0"
env:
description:
- If set, manages a crontab's environment variable. New variables are added on top of crontab.
"name" and "value" parameters are the name and the value of environment variable.
type: bool
default: "no"
version_added: "2.1"
insertafter:
description:
- Used with C(state=present) and C(env). If specified, the environment variable will be
inserted after the declaration of specified environment variable.
version_added: "2.1"
insertbefore:
description:
- Used with C(state=present) and C(env). If specified, the environment variable will be
inserted before the declaration of specified environment variable.
version_added: "2.1"
requirements:
- cron
author:
- Dane Summers (@dsummersl)
- Mike Grozak
- Patrick Callahan
- Evan Kaufman (@EvanK)
- Luca Berruti (@lberruti)
"""
EXAMPLES = '''
# Ensure a job that runs at 2 and 5 exists.
# Creates an entry like "0 5,2 * * ls -alh > /dev/null"
- cron:
name: "check dirs"
minute: "0"
hour: "5,2"
job: "ls -alh > /dev/null"
# Ensure an old job is no longer present. Removes any job that is prefixed
# by "#Ansible: an old job" from the crontab
- cron:
name: "an old job"
state: absent
# Creates an entry like "@reboot /some/job.sh"
- cron:
name: "a job for reboot"
special_time: reboot
job: "/some/job.sh"
# Creates an entry like "PATH=/opt/bin" on top of crontab
- cron:
name: PATH
env: yes
value: /opt/bin
# Creates an entry like "APP_HOME=/srv/app" and insert it after PATH
# declaration
- cron:
name: APP_HOME
env: yes
value: /srv/app
insertafter: PATH
# Creates a cron file under /etc/cron.d
- cron:
name: yum autoupdate
weekday: 2
minute: 0
hour: 12
user: root
job: "YUMINTERACTIVE: 0 /usr/sbin/yum-autoupdate"
cron_file: ansible_yum-autoupdate
# Removes a cron file from under /etc/cron.d
- cron:
name: "yum autoupdate"
cron_file: ansible_yum-autoupdate
state: absent
# Removes "APP_HOME" environment variable from crontab
- cron:
name: APP_HOME
env: yes
state: absent
'''
import os
import platform
import pipes
import pwd
import re
import sys
import tempfile
from ansible.module_utils.basic import AnsibleModule, get_platform
CRONCMD = "/usr/bin/crontab"
class CronTabError(Exception):
pass
class CronTab(object):
"""
CronTab object to write time based crontab file
user - the user of the crontab (defaults to root)
cron_file - a cron file under /etc/cron.d, or an absolute path
"""
def __init__(self, module, user=None, cron_file=None):
self.module = module
self.user = user
self.root = (os.getuid() == 0)
self.lines = None
self.ansible = "#Ansible: "
self.existing = ''
if cron_file:
if os.path.isabs(cron_file):
self.cron_file = cron_file
else:
self.cron_file = os.path.join('/etc/cron.d', cron_file)
else:
self.cron_file = None
self.read()
def read(self):
# Read in the crontab from the system
self.lines = []
if self.cron_file:
# read the cronfile
try:
f = open(self.cron_file, 'r')
self.existing = f.read()
self.lines = self.existing.splitlines()
f.close()
except IOError:
# cron file does not exist
return
except:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
else:
# using safely quoted shell for now, but this really should be two non-shell calls instead. FIXME
(rc, out, err) = self.module.run_command(self._read_user_execute(), use_unsafe_shell=True)
if rc != 0 and rc != 1: # 1 can mean that there are no jobs.
raise CronTabError("Unable to read crontab")
self.existing = out
lines = out.splitlines()
count = 0
for l in lines:
if count > 2 or (not re.match(r'# DO NOT EDIT THIS FILE - edit the master and reinstall.', l) and
not re.match(r'# \(/tmp/.*installed on.*\)', l) and
not re.match(r'# \(.*version.*\)', l)):
self.lines.append(l)
else:
pattern = re.escape(l) + '[\r\n]?'
self.existing = re.sub(pattern, '', self.existing, 1)
count += 1
def is_empty(self):
if len(self.lines) == 0:
return True
else:
return False
def write(self, backup_file=None):
"""
Write the crontab to the system. Saves all information.
"""
if backup_file:
fileh = open(backup_file, 'w')
elif self.cron_file:
fileh = open(self.cron_file, 'w')
else:
filed, path = tempfile.mkstemp(prefix='crontab')
os.chmod(path, int('0644', 8))
fileh = os.fdopen(filed, 'w')
fileh.write(self.render())
fileh.close()
# return if making a backup
if backup_file:
return
# Add the entire crontab back to the user crontab
if not self.cron_file:
# quoting shell args for now but really this should be two non-shell calls. FIXME
(rc, out, err) = self.module.run_command(self._write_execute(path), use_unsafe_shell=True)
os.unlink(path)
if rc != 0:
self.module.fail_json(msg=err)
# set SELinux permissions
if self.module.selinux_enabled() and self.cron_file:
self.module.set_default_selinux_context(self.cron_file, False)
def do_comment(self, name):
return "%s%s" % (self.ansible, name)
def add_job(self, name, job):
# Add the comment
self.lines.append(self.do_comment(name))
# Add the job
self.lines.append("%s" % (job))
def update_job(self, name, job):
return self._update_job(name, job, self.do_add_job)
def do_add_job(self, lines, comment, job):
lines.append(comment)
lines.append("%s" % (job))
def remove_job(self, name):
return self._update_job(name, "", self.do_remove_job)
def do_remove_job(self, lines, comment, job):
return None
def add_env(self, decl, insertafter=None, insertbefore=None):
if not (insertafter or insertbefore):
self.lines.insert(0, decl)
return
if insertafter:
other_name = insertafter
elif insertbefore:
other_name = insertbefore
other_decl = self.find_env(other_name)
if len(other_decl) > 0:
if insertafter:
index = other_decl[0] + 1
elif insertbefore:
index = other_decl[0]
self.lines.insert(index, decl)
return
self.module.fail_json(msg="Variable named '%s' not found." % other_name)
def update_env(self, name, decl):
return self._update_env(name, decl, self.do_add_env)
def do_add_env(self, lines, decl):
lines.append(decl)
def remove_env(self, name):
return self._update_env(name, '', self.do_remove_env)
def do_remove_env(self, lines, decl):
return None
def remove_job_file(self):
try:
os.unlink(self.cron_file)
return True
except OSError:
# cron file does not exist
return False
except:
raise CronTabError("Unexpected error:", sys.exc_info()[0])
def find_job(self, name, job=None):
# attempt to find job by 'Ansible:' header comment
comment = None
for l in self.lines:
if comment is not None:
if comment == name:
return [comment, l]
else:
comment = None
elif re.match(r'%s' % self.ansible, l):
comment = re.sub(r'%s' % self.ansible, '', l)
# failing that, attempt to find job by exact match
if job:
for i, l in enumerate(self.lines):
if l == job:
# if no leading ansible header, insert one
if not re.match(r'%s' % self.ansible, self.lines[i - 1]):
self.lines.insert(i, self.do_comment(name))
return [self.lines[i], l, True]
# if a leading blank ansible header AND job has a name, update header
elif name and self.lines[i - 1] == self.do_comment(None):
self.lines[i - 1] = self.do_comment(name)
return [self.lines[i - 1], l, True]
return []
def find_env(self, name):
for index, l in enumerate(self.lines):
if re.match(r'^%s=' % name, l):
return [index, l]
return []
def get_cron_job(self, minute, hour, day, month, weekday, job, special, disabled):
# normalize any leading/trailing newlines (ansible/ansible-modules-core#3791)
job = job.strip('\r\n')
if disabled:
disable_prefix = '#'
else:
disable_prefix = ''
if special:
if self.cron_file:
return "%s@%s %s %s" % (disable_prefix, special, self.user, job)
else:
return "%s@%s %s" % (disable_prefix, special, job)
else:
if self.cron_file:
return "%s%s %s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, self.user, job)
else:
return "%s%s %s %s %s %s %s" % (disable_prefix, minute, hour, day, month, weekday, job)
def get_jobnames(self):
jobnames = []
for l in self.lines:
if re.match(r'%s' % self.ansible, l):
jobnames.append(re.sub(r'%s' % self.ansible, '', l))
return jobnames
def get_envnames(self):
envnames = []
for l in self.lines:
if re.match(r'^\S+=', l):
envnames.append(l.split('=')[0])
return envnames
def _update_job(self, name, job, addlinesfunction):
ansiblename = self.do_comment(name)
newlines = []
comment = None
for l in self.lines:
if comment is not None:
addlinesfunction(newlines, comment, job)
comment = None
elif l == ansiblename:
comment = l
else:
newlines.append(l)
self.lines = newlines
if len(newlines) == 0:
return True
else:
return False # TODO add some more error testing
def _update_env(self, name, decl, addenvfunction):
newlines = []
for l in self.lines:
if re.match(r'^%s=' % name, l):
addenvfunction(newlines, decl)
else:
newlines.append(l)
self.lines = newlines
def render(self):
"""
Render this crontab as it would be in the crontab.
"""
crons = []
for cron in self.lines:
crons.append(cron)
result = '\n'.join(crons)
if result:
result = result.rstrip('\r\n') + '\n'
return result
def _read_user_execute(self):
"""
Returns the command line for reading a crontab
"""
user = ''
if self.user:
if platform.system() == 'SunOS':
return "su %s -c '%s -l'" % (pipes.quote(self.user), pipes.quote(CRONCMD))
elif platform.system() == 'AIX':
return "%s -l %s" % (pipes.quote(CRONCMD), pipes.quote(self.user))
elif platform.system() == 'HP-UX':
return "%s %s %s" % (CRONCMD, '-l', pipes.quote(self.user))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % pipes.quote(self.user)
return "%s %s %s" % (CRONCMD, user, '-l')
def _write_execute(self, path):
"""
Return the command line for writing a crontab
"""
user = ''
if self.user:
if platform.system() in ['SunOS', 'HP-UX', 'AIX']:
return "chown %s %s ; su '%s' -c '%s %s'" % (pipes.quote(self.user), pipes.quote(path), pipes.quote(self.user), CRONCMD, pipes.quote(path))
elif pwd.getpwuid(os.getuid())[0] != self.user:
user = '-u %s' % pipes.quote(self.user)
return "%s %s %s" % (CRONCMD, user, pipes.quote(path))
def main():
# The following example playbooks:
#
# - cron: name="check dirs" hour="5,2" job="ls -alh > /dev/null"
#
# - name: do the job
# cron: name="do the job" hour="5,2" job="/some/dir/job.sh"
#
# - name: no job
# cron: name="an old job" state=absent
#
# - name: sets env
# cron: name="PATH" env=yes value="/bin:/usr/bin"
#
# Would produce:
# PATH=/bin:/usr/bin
# # Ansible: check dirs
# * * 5,2 * * ls -alh > /dev/null
# # Ansible: do the job
# * * 5,2 * * /some/dir/job.sh
module = AnsibleModule(
argument_spec=dict(
name=dict(type='str'),
user=dict(type='str'),
job=dict(type='str', aliases=['value']),
cron_file=dict(type='str'),
state=dict(type='str', default='present', choices=['present', 'absent']),
backup=dict(type='bool', default=False),
minute=dict(type='str', default='*'),
hour=dict(type='str', default='*'),
day=dict(type='str', default='*', aliases=['dom']),
month=dict(type='str', default='*'),
weekday=dict(type='str', default='*', aliases=['dow']),
reboot=dict(type='bool', default=False),
special_time=dict(type='str', choices=["reboot", "yearly", "annually", "monthly", "weekly", "daily", "hourly"]),
disabled=dict(type='bool', default=False),
env=dict(type='bool'),
insertafter=dict(type='str'),
insertbefore=dict(type='str'),
),
supports_check_mode=True,
mutually_exclusive=[
['reboot', 'special_time'],
['insertafter', 'insertbefore'],
],
)
name = module.params['name']
user = module.params['user']
job = module.params['job']
cron_file = module.params['cron_file']
state = module.params['state']
backup = module.params['backup']
minute = module.params['minute']
hour = module.params['hour']
day = module.params['day']
month = module.params['month']
weekday = module.params['weekday']
reboot = module.params['reboot']
special_time = module.params['special_time']
disabled = module.params['disabled']
env = module.params['env']
insertafter = module.params['insertafter']
insertbefore = module.params['insertbefore']
do_install = state == 'present'
changed = False
res_args = dict()
warnings = list()
if cron_file:
cron_file_basename = os.path.basename(cron_file)
if not re.search(r'^[A-Z0-9_-]+$', cron_file_basename, re.I):
warnings.append('Filename portion of cron_file ("%s") should consist' % cron_file_basename +
' solely of upper- and lower-case letters, digits, underscores, and hyphens')
# Ensure all files generated are only writable by the owning user. Primarily relevant for the cron_file option.
os.umask(int('022', 8))
crontab = CronTab(module, user, cron_file)
module.debug('cron instantiated - name: "%s"' % name)
if module._diff:
diff = dict()
diff['before'] = crontab.existing
if crontab.cron_file:
diff['before_header'] = crontab.cron_file
else:
if crontab.user:
diff['before_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['before_header'] = 'crontab'
# --- user input validation ---
if (special_time or reboot) and \
(True in [(x != '*') for x in [minute, hour, day, month, weekday]]):
module.fail_json(msg="You must specify time and date fields or special time.")
# cannot support special_time on solaris
if (special_time or reboot) and get_platform() == 'SunOS':
module.fail_json(msg="Solaris does not support special_time=... or @reboot")
if cron_file and do_install:
if not user:
module.fail_json(msg="To use cron_file=... parameter you must specify user=... as well")
if job is None and do_install:
module.fail_json(msg="You must specify 'job' to install a new cron job or variable")
if (insertafter or insertbefore) and not env and do_install:
module.fail_json(msg="Insertafter and insertbefore parameters are valid only with env=yes")
if reboot:
special_time = "reboot"
# if requested make a backup before making a change
if backup and not module.check_mode:
(backuph, backup_file) = tempfile.mkstemp(prefix='crontab')
crontab.write(backup_file)
if crontab.cron_file and not name and not do_install:
if module._diff:
diff['after'] = ''
diff['after_header'] = '/dev/null'
else:
diff = dict()
if module.check_mode:
changed = os.path.isfile(crontab.cron_file)
else:
changed = crontab.remove_job_file()
module.exit_json(changed=changed, cron_file=cron_file, state=state, diff=diff)
if env:
if ' ' in name:
module.fail_json(msg="Invalid name for environment variable")
decl = '%s="%s"' % (name, job)
old_decl = crontab.find_env(name)
if do_install:
if len(old_decl) == 0:
crontab.add_env(decl, insertafter, insertbefore)
changed = True
if len(old_decl) > 0 and old_decl[1] != decl:
crontab.update_env(name, decl)
changed = True
else:
if len(old_decl) > 0:
crontab.remove_env(name)
changed = True
else:
if do_install:
for char in ['\r', '\n']:
if char in job.strip('\r\n'):
warnings.append('Job should not contain line breaks')
break
job = crontab.get_cron_job(minute, hour, day, month, weekday, job, special_time, disabled)
old_job = crontab.find_job(name, job)
if len(old_job) == 0:
crontab.add_job(name, job)
changed = True
if len(old_job) > 0 and old_job[1] != job:
crontab.update_job(name, job)
changed = True
if len(old_job) > 2:
crontab.update_job(name, job)
changed = True
else:
old_job = crontab.find_job(name)
if len(old_job) > 0:
crontab.remove_job(name)
changed = True
# no changes to env/job, but existing crontab needs a terminating newline
if not changed and not crontab.existing == '':
if not (crontab.existing.endswith('\r') or crontab.existing.endswith('\n')):
changed = True
res_args = dict(
jobs=crontab.get_jobnames(),
envs=crontab.get_envnames(),
warnings=warnings,
changed=changed
)
if changed:
if not module.check_mode:
crontab.write()
if module._diff:
diff['after'] = crontab.render()
if crontab.cron_file:
diff['after_header'] = crontab.cron_file
else:
if crontab.user:
diff['after_header'] = 'crontab for user "%s"' % crontab.user
else:
diff['after_header'] = 'crontab'
res_args['diff'] = diff
# retain the backup only if crontab or cron file have changed
if backup and not module.check_mode:
if changed:
res_args['backup_file'] = backup_file
else:
os.unlink(backup_file)
if cron_file:
res_args['cron_file'] = cron_file
module.exit_json(**res_args)
# --- should never get here
module.exit_json(msg="Unable to execute cron task.")
if __name__ == '__main__':
main()
| 33.355615
| 155
| 0.576433
|
7269619203c127b21552a8c5cb5999fc501a6f6f
| 226
|
py
|
Python
|
1stMonth{ImageManipulation}/Files/17edge.py
|
KariukiKirubi/computer-vision-ai-saturdays
|
e18c7557bc29a00c0586411f019fd33d2eb5ebb4
|
[
"MIT"
] | 2
|
2019-11-28T16:11:08.000Z
|
2019-11-28T16:14:19.000Z
|
1stMonth{ImageManipulation}/Files/17edge.py
|
KariukiKirubi/computer-vision-ai-saturdays
|
e18c7557bc29a00c0586411f019fd33d2eb5ebb4
|
[
"MIT"
] | null | null | null |
1stMonth{ImageManipulation}/Files/17edge.py
|
KariukiKirubi/computer-vision-ai-saturdays
|
e18c7557bc29a00c0586411f019fd33d2eb5ebb4
|
[
"MIT"
] | null | null | null |
import numpy as np
import cv2
img = cv2.imread('../Images/love.jpg',cv2.IMREAD_COLOR)
edges = cv2.Canny(img,100,200)
cv2.imwrite('../Images/17edge.jpg', edges)
cv2.imshow('image',edges)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 22.6
| 55
| 0.734513
|
15e0d569c221d7f197678adce673b465835174b0
| 3,389
|
py
|
Python
|
src/recipes/textcat/textcat_teach.py
|
bastiancy/prodigy-multi-annotator
|
69fbe523dac6b7d7abc72e55932568e228ac3b5c
|
[
"MIT"
] | 12
|
2018-12-03T23:22:04.000Z
|
2022-03-16T10:03:12.000Z
|
textcat/textcat_teach.py
|
vitojph/prodigy-recipes
|
ca14bd7e4c59e47a2fedc62f12fef7ee5bbf89e8
|
[
"MIT"
] | 1
|
2020-08-13T06:52:55.000Z
|
2020-08-13T06:52:55.000Z
|
textcat/textcat_teach.py
|
vitojph/prodigy-recipes
|
ca14bd7e4c59e47a2fedc62f12fef7ee5bbf89e8
|
[
"MIT"
] | 4
|
2019-06-18T18:58:42.000Z
|
2022-01-24T15:59:20.000Z
|
# coding: utf8
from __future__ import unicode_literals
import prodigy
from prodigy.components.loaders import JSONL
from prodigy.models.textcat import TextClassifier
from prodigy.models.matcher import PatternMatcher
from prodigy.components.sorters import prefer_uncertain
from prodigy.util import combine_models, split_string
import spacy
# Recipe decorator with argument annotations: (description, argument type,
# shortcut, type / converter function called on value before it's passed to
# the function). Descriptions are also shown when typing --help.
@prodigy.recipe('textcat.teach',
dataset=("The dataset to use", "positional", None, str),
spacy_model=("The base model", "positional", None, str),
source=("The source data as a JSONL file", "positional", None, str),
label=("One or more comma-separated labels", "option", "l", split_string),
patterns=("Optional match patterns", "option", "p", str),
exclude=("Names of datasets to exclude", "option", "e", split_string),
long_text=("Enable long-text classification mode", "flag", "L", bool)
)
def textcat_teach(dataset, spacy_model, source, label=None, patterns=None,
exclude=None, long_text=False):
"""
Collect the best possible training data for a text classification model
with the model in the loop. Based on your annotations, Prodigy will decide
which questions to ask next.
"""
# Load the stream from a JSONL file and return a generator that yields a
# dictionary for each example in the data.
stream = JSONL(source)
# Load the spaCy model
nlp = spacy.load(spacy_model)
# Initialize Prodigy's text classifier model, which outputs
# (score, example) tuples
model = TextClassifier(nlp, label, long_text=long_text)
if patterns is None:
# No patterns are used, so just use the model to suggest examples
# and only use the model's update method as the update callback
predict = model
update = model.update
else:
# Initialize the pattern matcher and load in the JSONL patterns.
# Set the matcher to not label the highlighted spans, only the text.
matcher = PatternMatcher(nlp, prior_correct=5., prior_incorrect=5.,
label_span=False, label_task=True)
matcher = matcher.from_disk(patterns)
# Combine the NER model and the matcher and interleave their
# suggestions and update both at the same time
predict, update = combine_models(model, matcher)
# Use the prefer_uncertain sorter to focus on suggestions that the model
# is most uncertain about (i.e. with a score closest to 0.5). The model
# yields (score, example) tuples and the sorter yields just the example
stream = prefer_uncertain(predict(stream))
return {
'view_id': 'classification', # Annotation interface to use
'dataset': dataset, # Name of dataset to save annotations
'stream': stream, # Incoming stream of examples
'update': update, # Update callback, called with batch of answers
'exclude': exclude, # List of dataset names to exclude
'config': { # Additional config settings, mostly for app UI
'lang': nlp.lang,
'label': ', '.join(label) if label is not None else 'n/a'
}
}
| 45.797297
| 84
| 0.680142
|
e74467ae8609417d97f8d81ebb8dde12c4ec9fd2
| 4,788
|
py
|
Python
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/subscriptions/v2016_06_01/operations/_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 2,728
|
2015-01-09T10:19:32.000Z
|
2022-03-31T14:50:33.000Z
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/subscriptions/v2016_06_01/operations/_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 17,773
|
2015-01-05T15:57:17.000Z
|
2022-03-31T23:50:25.000Z
|
sdk/resources/azure-mgmt-resource/azure/mgmt/resource/subscriptions/v2016_06_01/operations/_operations.py
|
rsdoherty/azure-sdk-for-python
|
6bba5326677468e6660845a703686327178bb7b1
|
[
"MIT"
] | 1,916
|
2015-01-19T05:05:41.000Z
|
2022-03-31T19:36:44.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class Operations(object):
"""Operations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.resource.subscriptions.v2016_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.OperationListResult"]
"""Lists all of the available Microsoft.Resources REST API operations.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OperationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.resource.subscriptions.v2016_06_01.models.OperationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OperationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('OperationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/providers/Microsoft.Resources/operations'} # type: ignore
| 43.527273
| 133
| 0.658521
|
fabfc8a560c0b2e8510aac66aab9ff6a417d7686
| 6,050
|
py
|
Python
|
custom_components/samsungtv_encrypted/PySmartCrypto/pysmartcrypto.py
|
nickcj931/ha-samsungtv-encrypted
|
a3b65e0bce4c37a21f302053089c793b2113cead
|
[
"Apache-2.0"
] | 1
|
2020-04-12T22:39:46.000Z
|
2020-04-12T22:39:46.000Z
|
custom_components/samsungtv_encrypted/PySmartCrypto/pysmartcrypto.py
|
nickcj931/ha-samsungtv-encrypted
|
a3b65e0bce4c37a21f302053089c793b2113cead
|
[
"Apache-2.0"
] | null | null | null |
custom_components/samsungtv_encrypted/PySmartCrypto/pysmartcrypto.py
|
nickcj931/ha-samsungtv-encrypted
|
a3b65e0bce4c37a21f302053089c793b2113cead
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
from . import crypto
import sys
import re
from .command_encryption import AESCipher
import requests
import time
import websocket
import threading
class PySmartCrypto():
UserId = "654321"
AppId = "12345"
deviceId = "7e509404-9d7c-46b4-8f6a-e2a9668ad184"
def disconnectCallback(self): self.close()
def getFullUrl(self, urlPath):
return "http://" + self._host + ":" + self._port + urlPath
def GetFullRequestUri(self, step, appId, deviceId):
return self.getFullUrl("/ws/pairing?step="+str(step)+"&app_id="+appId+"&device_id="+deviceId)
def ShowPinPageOnTv(self):
requests.post(self.getFullUrl("/ws/apps/CloudPINPage"), "pin4")
def CheckPinPageOnTv(self):
full_url = self.getFullUrl("/ws/apps/CloudPINPage")
page = requests.get(full_url).text
output = re.search('state>([^<>]*)</state>', page, flags=re.IGNORECASE)
if output is not None:
state = output.group(1)
print("Current state: "+state)
if state == "stopped":
return True
return False
def FirstStepOfPairing(self):
firstStepURL = self.GetFullRequestUri(0, self.AppId, self.deviceId)+"&type=1"
firstStepResponse = requests.get(firstStepURL).text
def StartPairing(self):
self._lastRequestId=0
if self.CheckPinPageOnTv():
print("Pin NOT on TV")
self.ShowPinPageOnTv()
else:
print("Pin ON TV");
def HelloExchange(self, pin):
hello_output = crypto.generateServerHello(self.UserId,pin)
if not hello_output:
return False
content = "{\"auth_Data\":{\"auth_type\":\"SPC\",\"GeneratorServerHello\":\"" + hello_output['serverHello'].hex().upper() + "\"}}"
secondStepURL = self.GetFullRequestUri(1, self.AppId, self.deviceId)
secondStepResponse = requests.post(secondStepURL, content).text
print('secondStepResponse: ' + secondStepResponse)
output = re.search('request_id.*?(\d).*?GeneratorClientHello.*?:.*?(\d[0-9a-zA-Z]*)', secondStepResponse, flags=re.IGNORECASE)
if output is None:
return False
requestId = output.group(1)
clientHello = output.group(2)
lastRequestId = int(requestId)
return crypto.parseClientHello(clientHello, hello_output['hash'], hello_output['AES_key'], self.UserId)
def AcknowledgeExchange(self, SKPrime):
serverAckMessage = crypto.generateServerAcknowledge(SKPrime)
content="{\"auth_Data\":{\"auth_type\":\"SPC\",\"request_id\":\"" + str(self._lastRequestId) + "\",\"ServerAckMsg\":\"" + serverAckMessage + "\"}}"
thirdStepURL = self.GetFullRequestUri(2, self.AppId, self.deviceId)
thirdStepResponse = requests.post(thirdStepURL, content).text
if "secure-mode" in thirdStepResponse:
print("TODO: Implement handling of encryption flag!!!!")
sys.exit(-1)
output = re.search('ClientAckMsg.*?:.*?(\d[0-9a-zA-Z]*).*?session_id.*?(\d)', thirdStepResponse, flags=re.IGNORECASE)
if output is None:
print("Unable to get session_id and/or ClientAckMsg!!!");
sys.exit(-1)
clientAck = output.group(1)
if not crypto.parseClientAcknowledge(clientAck, SKPrime):
print("Parse client ac message failed.")
sys.exit(-1)
sessionId=output.group(2)
print("sessionId: "+sessionId)
return sessionId
def ClosePinPageOnTv(self):
full_url = self.getFullUrl("/ws/apps/CloudPINPage/run");
requests.delete(full_url)
return False
def connect(self):
millis = int(round(time.time() * 1000))
step4_url = 'http://' + self._host + ':8000/socket.io/1/?t=' + str(millis)
websocket_response = requests.get(step4_url)
websocket_url = 'ws://' + self._host + ':8000/socket.io/1/websocket/' + websocket_response.text.split(':')[0]
# print(websocket_url)
# pairs to this app with this command.
connection = websocket.create_connection(websocket_url)
connection.send('1::/com.samsung.companion')
return connection
def control(self, key_command):
self._connection.send(self._aesLib.generate_command(key_command))
# need sleeps cuz if you send commands to quick it fails
time.sleep(0.1)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
"""Close the connection."""
self._connection.close()
def __init__(self, host, port, token=None, sessionid=None, command=None):
self._lastRequestId = 0
self._host = host
self._port = port
self._connection = self.connect()
self._timer = threading.Timer(10, self.disconnectCallback)
self._timer.start()
if token is None and sessionid is None:
self.StartPairing()
token = False
SKPrime = False
while not token:
tvPIN = input("Please enter pin from tv: ")
print("Got pin: '"+tvPIN+"'\n")
self.FirstStepOfPairing()
output = self.HelloExchange(tvPIN)
if output:
token = output['ctx'].hex()
SKPrime = output['SKPrime']
print("ctx: " + token)
print("Pin accepted :)\n")
else:
print("Pin incorrect. Please try again...\n")
sessionid = self.AcknowledgeExchange(SKPrime)
print("SessionID: " + str(sessionid))
self.ClosePinPageOnTv()
print("Authorization successfull :)\n")
self._token = token
self._sessionid = sessionid
self._aesLib = AESCipher(self._token.upper(), self._sessionid)
if command is not None:
print('Attempting to send command to tv')
self.control(command)
| 38.291139
| 155
| 0.610083
|
b0b8832cd79d013420fc63dddf98aac5e2d4a77a
| 1,167
|
py
|
Python
|
google/cloud/deploy/v1/deploy-v1-py/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/cloud/deploy/v1/deploy-v1-py/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/cloud/deploy/v1/deploy-v1-py/google/cloud/deploy_v1/services/cloud_deploy/transports/__init__.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import CloudDeployTransport
from .grpc import CloudDeployGrpcTransport
from .grpc_asyncio import CloudDeployGrpcAsyncIOTransport
# Compile a registry of transports.
_transport_registry = OrderedDict() # type: Dict[str, Type[CloudDeployTransport]]
_transport_registry['grpc'] = CloudDeployGrpcTransport
_transport_registry['grpc_asyncio'] = CloudDeployGrpcAsyncIOTransport
__all__ = (
'CloudDeployTransport',
'CloudDeployGrpcTransport',
'CloudDeployGrpcAsyncIOTransport',
)
| 34.323529
| 82
| 0.782348
|
8d1017d0d40e64625ee4cb9616fa6aca17421239
| 6,464
|
py
|
Python
|
odtlearn/utils/StrongTreeBendersOCT.py
|
nathanljustin/decision_tree_estimators
|
84b37197e88d85f32d10c0412b369e94f563f155
|
[
"BSD-3-Clause"
] | 2
|
2022-03-09T01:08:00.000Z
|
2022-03-09T07:15:26.000Z
|
odtlearn/utils/StrongTreeBendersOCT.py
|
nathanljustin/decision_tree_estimators
|
84b37197e88d85f32d10c0412b369e94f563f155
|
[
"BSD-3-Clause"
] | null | null | null |
odtlearn/utils/StrongTreeBendersOCT.py
|
nathanljustin/decision_tree_estimators
|
84b37197e88d85f32d10c0412b369e94f563f155
|
[
"BSD-3-Clause"
] | 3
|
2022-02-07T18:12:08.000Z
|
2022-02-16T00:17:15.000Z
|
"""
This module formulate the BendersOCT problem in gurobipy.
"""
from gurobipy import Model, GRB, quicksum, LinExpr
import numpy as np
import pandas as pd
class BendersOCT:
def __init__(
self,
X,
y,
tree,
X_col_labels,
labels,
_lambda,
time_limit,
num_threads,
obj_mode,
verbose,
):
"""
:param X: numpy matrix of covariates
:param y: numpy array of class labels
:param tree: Tree object
:param _lambda: The regularization parameter in the objective
:param time_limit: The given time limit for solving the MIP
:param obj_mode: if obj_mode=acc we maximize the acc; if obj_mode = balance we maximize the balanced acc
:param verbose: Display Gurobi model output
"""
self.X = pd.DataFrame(X, columns=X_col_labels)
self.y = y
self.X_col_labels = X_col_labels
self.labels = labels
# datapoints contains the indicies of our training data
self.datapoints = np.arange(0, self.X.shape[0])
self.tree = tree
self._lambda = _lambda
self.obj_mode = obj_mode
# Decision Variables
self.g = 0
self.b = 0
self.p = 0
self.w = 0
# Gurobi model
self.model = Model("BendersOCT")
if not verbose:
# supress all logging
self.model.params.OutputFlag = 0
# The cuts we add in the callback function would be treated as lazy constraints
self.model.params.LazyConstraints = 1
if num_threads is not None:
self.model.params.Threads = num_threads
self.model.params.TimeLimit = time_limit
"""
The following variables are used for the Benders problem to keep track
of the times we call the callback.
- counter_integer tracks number of times we call the callback from an
integer node in the branch-&-bound tree
- time_integer tracks the associated time spent in the
callback for these calls
- counter_general tracks number of times we call the callback from
a non-integer node in the branch-&-bound tree
- time_general tracks the associated time spent in the callback for
these calls
the ones ending with success are related to success calls.
By success we mean ending up adding a lazy constraint
to the model
"""
self.model._total_callback_time_integer = 0
self.model._total_callback_time_integer_success = 0
self.model._total_callback_time_general = 0
self.model._total_callback_time_general_success = 0
self.model._callback_counter_integer = 0
self.model._callback_counter_integer_success = 0
self.model._callback_counter_general = 0
self.model._callback_counter_general_success = 0
# We also pass the following information to the model as we need them in the callback
self.model._main_grb_obj = self
def create_main_problem(self):
"""
This function create and return a gurobi model
formulating the BendersOCT problem
:return: gurobi model object with the BendersOCT formulation
"""
###########################################################
# Define Variables
###########################################################
# g[i] is the objective value for the sub-problem[i]
self.g = self.model.addVars(
self.datapoints, vtype=GRB.CONTINUOUS, ub=1, name="g"
)
# b[n,f] ==1 iff at node n we branch on feature f
self.b = self.model.addVars(
self.tree.Nodes, self.X_col_labels, vtype=GRB.BINARY, name="b"
)
# p[n] == 1 iff at node n we do not branch and we make a prediction
self.p = self.model.addVars(
self.tree.Nodes + self.tree.Leaves, vtype=GRB.BINARY, name="p"
)
# w[n,k]=1 iff at node n we predict class k
self.w = self.model.addVars(
self.tree.Nodes + self.tree.Leaves,
self.labels,
vtype=GRB.CONTINUOUS,
lb=0,
name="w",
)
# we need these in the callback to have access to the value of the decision variables
self.model._vars_g = self.g
self.model._vars_b = self.b
self.model._vars_p = self.p
self.model._vars_w = self.w
###########################################################
# Define Constraints
###########################################################
# sum(b[n,f], f) + p[n] + sum(p[m], m in A(n)) = 1 forall n in Nodes
self.model.addConstrs(
(
quicksum(self.b[n, f] for f in self.X_col_labels)
+ self.p[n]
+ quicksum(self.p[m] for m in self.tree.get_ancestors(n))
== 1
)
for n in self.tree.Nodes
)
# sum(w[n,k], k in labels) = p[n]
self.model.addConstrs(
(quicksum(self.w[n, k] for k in self.labels) == self.p[n])
for n in self.tree.Nodes + self.tree.Leaves
)
# p[n] + sum(p[m], m in A(n)) = 1 forall n in Leaves
self.model.addConstrs(
(self.p[n] + quicksum(self.p[m] for m in self.tree.get_ancestors(n)) == 1)
for n in self.tree.Leaves
)
###########################################################
# Define the Objective
###########################################################
obj = LinExpr(0)
for n in self.tree.Nodes:
for f in self.X_col_labels:
obj.add(-1 * self._lambda * self.b[n, f])
if self.obj_mode == "acc":
for i in self.datapoints:
obj.add((1 - self._lambda) * self.g[i])
elif self.obj_mode == "balance":
for i in self.datapoints:
obj.add(
(1 - self._lambda)
* (1 / self.y[self.y == self.y[i]].shape[0] / self.labels.shape[0])
* self.g[i]
)
else:
assert self.obj_mode not in [
"acc",
"balance",
], f"Wrong objective mode. obj_mode should be one of acc or balance."
self.model.setObjective(obj, GRB.MAXIMIZE)
| 35.322404
| 112
| 0.538985
|
89d169c9eef51cf323009fdfccbdcc9c74a8f7fc
| 230
|
py
|
Python
|
ThinkPython/chap5/ex2.py
|
sokolowskik/Tutorials
|
d2681d4f18b03e00f90f9132c77f0b23b74d2629
|
[
"MIT"
] | null | null | null |
ThinkPython/chap5/ex2.py
|
sokolowskik/Tutorials
|
d2681d4f18b03e00f90f9132c77f0b23b74d2629
|
[
"MIT"
] | null | null | null |
ThinkPython/chap5/ex2.py
|
sokolowskik/Tutorials
|
d2681d4f18b03e00f90f9132c77f0b23b74d2629
|
[
"MIT"
] | null | null | null |
def print_string(s):
print s
def do_n(f, s, n):
if n <= 0:
print 'done!'
else:
f(s)
do_n(f, s, n-1)
do_n(print_string, 'Hello', 2)
do_n(print_string, 'Nana', 4)
do_n(print_string, 'Dada', 8)
| 15.333333
| 30
| 0.534783
|
0853c301fa397e5e18bc2d3e7ce60223bd775cb0
| 3,860
|
py
|
Python
|
Fora 2.0/commands/leveling.py
|
Dolsity/meet-fora
|
4b2bce766c7420fd0c324dd6f5ea1f612df1cb09
|
[
"Unlicense"
] | 1
|
2022-03-07T18:16:20.000Z
|
2022-03-07T18:16:20.000Z
|
Fora 2.0/commands/leveling.py
|
radiantdinosaurs/meet-fora
|
00997ab82e04a2e057a08b2d5d23ffcc85817bd5
|
[
"Unlicense"
] | null | null | null |
Fora 2.0/commands/leveling.py
|
radiantdinosaurs/meet-fora
|
00997ab82e04a2e057a08b2d5d23ffcc85817bd5
|
[
"Unlicense"
] | 1
|
2022-03-05T03:17:11.000Z
|
2022-03-05T03:17:11.000Z
|
from discord import File, Member
from discord.ext import commands
from easy_pil import Editor, load_image_async, Font, Canvas
from PIL import Image ,ImageDraw ,ImageFont, ImageDraw
from database.levels import *
class Level(commands.Cog):
def __init__(self, bot) -> None:
self.bot = bot
# Check your rank
@commands.command()
async def rank(self, ctx, member: Member = None):
if not member:
member = ctx.author
await ctx.trigger_typing()
user_data = await get_user_data_guild(self.bot.db, member.id, ctx.guild.id)
# rank = await get_rank(self.bot.db, member.id, ctx.guild.id) # in case of using rank
next_level_xp = (user_data["level"] + 1) * 100
current_level_xp = user_data["level"] * 100
xp_need = next_level_xp - current_level_xp
xp_have = user_data["xp"] - current_level_xp
percentage = (xp_need / 100) * xp_have
## Rank card
background = Editor("assets/14.jpg")
profile = await load_image_async(str(member.avatar_url))
profile = Editor(profile).resize((150, 150)).circle_image()
poppins = Font().poppins(size=40)
poppins_small = Font().poppins(size=30)
background.paste(profile.image, (30, 30))
background.rectangle((30, 220), width=650, height=40, fill="white", radius=20)
background.bar(
(30, 220),
max_width=650,
height=40,
percentage=percentage,
fill="#3256a8",
radius=20,
)
background.text((200, 40), str(member), font=poppins, color="white")
background.rectangle((200, 100), width=350, height=2, fill="#3256a8")
background.text(
(200, 130),
f"Level : {user_data['level']}"
+ f" XP : {user_data['xp']} / {(user_data['level'] + 1) * 100}",
font=poppins_small,
color="white",
)
file = File(fp=background.image_bytes, filename="card.png")
await ctx.send(file=file)
# Check your rank
@commands.command()
async def globalrank(self, ctx, member: Member = None):
if not member:
member = ctx.author
await ctx.trigger_typing()
user_data = await get_user_data_global(self.bot.db, member.id)
# rank = await get_rank(self.bot.db, member.id) # in case of using rank
next_level_xp = (user_data["level"] + 1) * 100
current_level_xp = user_data["level"] * 100
xp_need = next_level_xp - current_level_xp
xp_have = user_data["xp"] - current_level_xp
percentage = (xp_need / 100) * xp_have
## Rank card
background = Editor("assets/14.jpg")
profile = await load_image_async(str(member.avatar_url))
profile = Editor(profile).resize((150, 150)).circle_image()
poppins = Font().poppins(size=40)
poppins_small = Font().poppins(size=30)
background.paste(profile.image, (30, 30))
background.rectangle((30, 220), width=650, height=40, fill="white", radius=20)
background.bar(
(30, 220),
max_width=650,
height=40,
percentage=percentage,
fill="#3256a8",
radius=20,
)
background.text((200, 40), str(member), font=poppins, color="white")
background.rectangle((200, 100), width=350, height=2, fill="#3256a8")
background.text(
(200, 130),
f"Level : {user_data['level']}"
+ f" XP : {user_data['xp']} / {(user_data['level'] + 1) * 100}",
font=poppins_small,
color="white",
)
file = File(fp=background.image_bytes, filename="card.png")
await ctx.send(file=file)
# TEAMSEAS -- #0E5F93
def setup(bot):
bot.add_cog(Level(bot))
| 32.436975
| 93
| 0.585233
|
970357ef6c34e067dbba07696d2a84e09960d012
| 4,671
|
py
|
Python
|
src/wfleet/scraper/utils/utilities_http.py
|
dmitry-ed-gusev/fleet-infoservice
|
cb96dc42b8eedacc4055ff4f92d27e4c6afdacc6
|
[
"MIT"
] | 1
|
2021-05-21T20:49:09.000Z
|
2021-05-21T20:49:09.000Z
|
src/wfleet/scraper/utils/utilities_http.py
|
dmitry-ed-gusev/fleet-infoservice
|
cb96dc42b8eedacc4055ff4f92d27e4c6afdacc6
|
[
"MIT"
] | 16
|
2021-05-21T20:40:14.000Z
|
2022-01-19T21:52:24.000Z
|
src/wfleet/scraper/utils/utilities_http.py
|
dmitry-ed-gusev/fleet-infoservice
|
cb96dc42b8eedacc4055ff4f92d27e4c6afdacc6
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
HTTP / network related utilities module for Fleet DB Scraper.
Useful resources:
- (download file) https://stackoverflow.com/questions/7243750/download-file-from-web-in-python-3
Created: Dmitrii Gusev, 01.06.2021
Modified: Dmitrii Gusev, 16.06.2021
"""
import ssl
import logging
import shutil
from urllib import request, response, parse, error
from pathlib import Path
from . import constants as const
# init module logger
log = logging.getLogger(const.LOGGING_UTILITIES_HTTP_LOGGER)
# todo: add perform_http_get_request() method + appropriately rename the method below
def perform_http_get_request(url: str) -> str: # todo: refactor - generalize
""""""
# log.debug() # <- too much output
if url is None or len(url.strip()) == 0: # fail-fast - empty URL
raise ValueError("Provided empty URL, can't perform the request!")
# todo: implementation!
return ""
def perform_http_post_request(url: str, request_params: dict, retry_count: int = 0) -> str:
"""Perform one HTTP POST request with one form parameter for search.
:param url:
:param request_params:
:param retry_count: number of retries. 0 -> no retries (one request), less than 0 -> no requests at all,
greater than 0 -> (retry_count + 1) - such number of requests
:return: HTML output with found data
"""
if url is None or len(url.strip()) == 0: # fail-fast - empty URL
raise ValueError("Provided empty URL, can't perform the request!")
data = parse.urlencode(request_params).encode(
const.DEFAULT_ENCODING
) # perform encoding of request params
req = request.Request(url, data=data) # this will make the method "POST" request (with data load)
context = ssl.SSLContext() # new SSLContext -> to bypass security certificate check
tries_counter: int = 0
response_ok: bool = False
my_response = None
while tries_counter <= retry_count and not response_ok: # perform specified number of requests
log.debug(f"HTTP POST: URL: {url}, data: {request_params}, try #{tries_counter}/{retry_count}.")
try:
my_response = request.urlopen(req, context=context, timeout=const.TIMEOUT_URLLIB_URLOPEN)
response_ok = True # after successfully done request we should stop requests
except (TimeoutError, error.URLError) as e:
log.error(
f"We got error -> URL: {url}, data: {request_params}, try: #{tries_counter}/{retry_count}, "
f"error: {e}."
)
tries_counter += 1
if my_response is not None:
result = my_response.read().decode(const.DEFAULT_ENCODING) # read response and perform decode
else:
result = None
return result
def perform_file_download_over_http(url: str, target_dir: str, target_file: str = None) -> str:
"""Downloads file via HTTP protocol.
:param url: URL for file download, shouldn't be empty.
:param target_dir: local dir to save file, if empty - save to the current dir
:param target_file: local file name to save, if empty - file name will be derived from URL
:return: path to locally saved file, that was downloaded
"""
log.debug(
f"perform_file_download_over_http(): downloading link: {url}, target dir: {target_dir}, "
f"target_file: {target_file}."
)
if url is None or len(url.strip()) == 0: # fail-fast check for provided url
raise ValueError("Provided empty URL!")
# check target dir name - if not empty we will create all missing dirs in the path
if target_dir is not None and len(target_dir.strip()) > 0:
Path(target_dir).mkdir(parents=True, exist_ok=True) # create necessary parent dirs in path
log.debug(f"Created all missing dirs in path: {target_dir}")
else:
log.debug(f"Provided empty target dir - file will be saved in the current directory.")
# pick a target file name
if target_file is None or len(target_file.strip()) == 0:
local_file_name: str = Path(url).name
else:
local_file_name: str = target_file
log.debug(f"Target file name: {local_file_name}")
# construct the full local target path
local_path: str = target_dir + "/" + local_file_name
log.debug(f"Generated local full path: {local_path}")
# download the file from the provided `url` and save it locally under certain `file_name`:
with request.urlopen(url) as my_response, open(local_path, "wb") as out_file:
shutil.copyfileobj(my_response, out_file)
log.info(f"Downloaded file: {url} and put here: {local_path}")
return local_path
| 39.923077
| 108
| 0.677371
|
423082f680750f21f9c0fc85ed6dcfbe12c33c4f
| 2,581
|
py
|
Python
|
tests/test_report.py
|
mrchapp/squad-client
|
06612961fc16c5f4af9983fcfe6f378e7159f965
|
[
"MIT"
] | 3
|
2020-03-30T18:34:57.000Z
|
2020-04-12T07:51:25.000Z
|
tests/test_report.py
|
mrchapp/squad-client
|
06612961fc16c5f4af9983fcfe6f378e7159f965
|
[
"MIT"
] | 127
|
2020-03-05T23:20:37.000Z
|
2022-03-23T18:44:19.000Z
|
tests/test_report.py
|
mrchapp/squad-client
|
06612961fc16c5f4af9983fcfe6f378e7159f965
|
[
"MIT"
] | 17
|
2020-02-14T14:37:38.000Z
|
2022-03-17T15:59:56.000Z
|
import unittest
from unittest.mock import patch
from io import StringIO
from squad_client.report import Report, ReportContext, ReportGenerator
from squad_client.exceptions import InvalidSquadObject
class ReportContextTest(unittest.TestCase):
def test_basics(self):
context = {
'var1': {
'type': 'Build',
'filters': {
'param1': 'val1'
}
}
}
report_context = ReportContext(context)
self.assertEqual(1, len(report_context.context))
c = report_context.context[0]
self.assertEqual('var1', c.name)
self.assertEqual('Build', c.type)
self.assertEqual({'param1': 'val1'}, c.filters)
def test_invalid_object_type(self):
context = {
'var1': {
'type': 'InvalidType',
}
}
report_context = ReportContext(context)
with self.assertRaises(InvalidSquadObject):
report_context.fill()
class ReportTest(unittest.TestCase):
def test_basic_report_generation(self):
template = 'This is the most basic template'
report = Report(template)
generated = report.generate()
self.assertEqual(template, generated)
@patch('squad_client.core.models.Squad.fetch')
def test_basic_report_generation_with_context(self, squad_fetch):
squad_fetch.return_value = 'fetched string'
template = 'Report: {{ dummy }}'
context = ReportContext({
'dummy': {
'type': 'Test',
'filters': {
'param1': 'val1'
}
}
})
report = Report(template, context=context)
generated = report.generate()
self.assertEqual('Report: fetched string', generated)
class ReportGeneratorTest(unittest.TestCase):
@patch('squad_client.core.models.Squad.fetch')
def test_basics(self, squad_fetch):
squad_fetch.return_value = 'fetched string'
template = 'Report: {{ dummy }}'
output = StringIO()
context = {
'dummy': {
'type': 'Test',
'filters': {
'param1': 'val1'
}
}
}
generator = ReportGenerator('http://example.com')
generator.add_report('dummy report', template, output=output, context=context)
reports = generator.generate()
self.assertEqual(1, len(reports))
self.assertEqual('Report: fetched string', output.getvalue())
| 29.666667
| 86
| 0.573421
|
20c3702b4a04a72faffb724f41bede4fd438a773
| 126
|
py
|
Python
|
pycmakeserver/errors.py
|
anarthal/py-cmake-server
|
480f3b45d8ec99a4c5c822dcc5252ee3def88c6b
|
[
"MIT"
] | null | null | null |
pycmakeserver/errors.py
|
anarthal/py-cmake-server
|
480f3b45d8ec99a4c5c822dcc5252ee3def88c6b
|
[
"MIT"
] | null | null | null |
pycmakeserver/errors.py
|
anarthal/py-cmake-server
|
480f3b45d8ec99a4c5c822dcc5252ee3def88c6b
|
[
"MIT"
] | null | null | null |
class CmakeError(Exception):
pass
class CommunicationError(CmakeError):
pass
class ErrorReply(CmakeError):
pass
| 14
| 37
| 0.746032
|
1f7dcb55d2eb8ca17761f2802d541bb49500d0cc
| 59,407
|
py
|
Python
|
sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py
|
jphalip/beam
|
26f694d2c8a8791ea0357ba44c7580c78f617bf8
|
[
"Apache-2.0"
] | 1
|
2021-08-14T15:28:41.000Z
|
2021-08-14T15:28:41.000Z
|
sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py
|
RocMarshal/beam
|
34c58c42f14d2534a2f72f9194ddf9cc69138eea
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 5
|
2020-11-13T19:06:10.000Z
|
2021-11-10T19:56:12.000Z
|
sdks/python/apache_beam/runners/portability/fn_api_runner/fn_runner_test.py
|
espv/beam-plus-wrapper
|
901b762c0b331c31217c0a66a830a6050b7cd257
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
from __future__ import absolute_import
from __future__ import print_function
import collections
import logging
import os
import random
import shutil
import sys
import tempfile
import threading
import time
import traceback
import typing
import unittest
import uuid
from builtins import range
from typing import Dict
# patches unittest.TestCase to be python3 compatible
import hamcrest # pylint: disable=ungrouped-imports
from hamcrest.core.matcher import Matcher
from hamcrest.core.string_description import StringDescription
from nose.plugins.attrib import attr
from tenacity import retry
from tenacity import stop_after_attempt
import apache_beam as beam
from apache_beam.io import restriction_trackers
from apache_beam.io.watermark_estimators import ManualWatermarkEstimator
from apache_beam.metrics import monitoring_infos
from apache_beam.metrics.execution import MetricKey
from apache_beam.metrics.metricbase import MetricName
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.runners.portability import fn_api_runner
from apache_beam.runners.portability.fn_api_runner import fn_runner
from apache_beam.runners.sdf_utils import RestrictionTrackerView
from apache_beam.runners.worker import data_plane
from apache_beam.runners.worker import sdk_worker
from apache_beam.runners.worker import statesampler
from apache_beam.testing.synthetic_pipeline import SyntheticSDFAsSource
from apache_beam.testing.test_stream import TestStream
from apache_beam.testing.util import assert_that
from apache_beam.testing.util import equal_to
from apache_beam.tools import utils
from apache_beam.transforms import environments
from apache_beam.transforms import userstate
from apache_beam.transforms import window
from apache_beam.utils import timestamp
if statesampler.FAST_SAMPLER:
DEFAULT_SAMPLING_PERIOD_MS = statesampler.DEFAULT_SAMPLING_PERIOD_MS
else:
DEFAULT_SAMPLING_PERIOD_MS = 0
_LOGGER = logging.getLogger(__name__)
def _matcher_or_equal_to(value_or_matcher):
"""Pass-thru for matchers, and wraps value inputs in an equal_to matcher."""
if value_or_matcher is None:
return None
if isinstance(value_or_matcher, Matcher):
return value_or_matcher
return hamcrest.equal_to(value_or_matcher)
def has_urn_and_labels(mi, urn, labels):
"""Returns true if it the monitoring_info contains the labels and urn."""
def contains_labels(mi, labels):
# Check all the labels and their values exist in the monitoring_info
return all(item in mi.labels.items() for item in labels.items())
return contains_labels(mi, labels) and mi.urn == urn
class FnApiRunnerTest(unittest.TestCase):
def create_pipeline(self):
return beam.Pipeline(runner=fn_api_runner.FnApiRunner())
def test_assert_that(self):
# TODO: figure out a way for fn_api_runner to parse and raise the
# underlying exception.
if sys.version_info < (3, 2):
assertRaisesRegex = self.assertRaisesRegexp
else:
assertRaisesRegex = self.assertRaisesRegex
with assertRaisesRegex(Exception, 'Failed assert'):
with self.create_pipeline() as p:
assert_that(p | beam.Create(['a', 'b']), equal_to(['a']))
def test_create(self):
with self.create_pipeline() as p:
assert_that(p | beam.Create(['a', 'b']), equal_to(['a', 'b']))
def test_pardo(self):
with self.create_pipeline() as p:
res = (
p
| beam.Create(['a', 'bc'])
| beam.Map(lambda e: e * 2)
| beam.Map(lambda e: e + 'x'))
assert_that(res, equal_to(['aax', 'bcbcx']))
def test_pardo_side_outputs(self):
def tee(elem, *tags):
for tag in tags:
if tag in elem:
yield beam.pvalue.TaggedOutput(tag, elem)
with self.create_pipeline() as p:
xy = (
p
| 'Create' >> beam.Create(['x', 'y', 'xy'])
| beam.FlatMap(tee, 'x', 'y').with_outputs())
assert_that(xy.x, equal_to(['x', 'xy']), label='x')
assert_that(xy.y, equal_to(['y', 'xy']), label='y')
def test_pardo_side_and_main_outputs(self):
def even_odd(elem):
yield elem
yield beam.pvalue.TaggedOutput('odd' if elem % 2 else 'even', elem)
with self.create_pipeline() as p:
ints = p | beam.Create([1, 2, 3])
named = ints | 'named' >> beam.FlatMap(even_odd).with_outputs(
'even', 'odd', main='all')
assert_that(named.all, equal_to([1, 2, 3]), label='named.all')
assert_that(named.even, equal_to([2]), label='named.even')
assert_that(named.odd, equal_to([1, 3]), label='named.odd')
unnamed = ints | 'unnamed' >> beam.FlatMap(even_odd).with_outputs()
unnamed[None] | beam.Map(id) # pylint: disable=expression-not-assigned
assert_that(unnamed[None], equal_to([1, 2, 3]), label='unnamed.all')
assert_that(unnamed.even, equal_to([2]), label='unnamed.even')
assert_that(unnamed.odd, equal_to([1, 3]), label='unnamed.odd')
def test_pardo_side_inputs(self):
def cross_product(elem, sides):
for side in sides:
yield elem, side
with self.create_pipeline() as p:
main = p | 'main' >> beam.Create(['a', 'b', 'c'])
side = p | 'side' >> beam.Create(['x', 'y'])
assert_that(
main | beam.FlatMap(cross_product, beam.pvalue.AsList(side)),
equal_to([('a', 'x'), ('b', 'x'), ('c', 'x'), ('a', 'y'), ('b', 'y'),
('c', 'y')]))
def test_pardo_windowed_side_inputs(self):
with self.create_pipeline() as p:
# Now with some windowing.
pcoll = p | beam.Create(list(
range(10))) | beam.Map(lambda t: window.TimestampedValue(t, t))
# Intentionally choosing non-aligned windows to highlight the transition.
main = pcoll | 'WindowMain' >> beam.WindowInto(window.FixedWindows(5))
side = pcoll | 'WindowSide' >> beam.WindowInto(window.FixedWindows(7))
res = main | beam.Map(
lambda x, s: (x, sorted(s)), beam.pvalue.AsList(side))
assert_that(
res,
equal_to([
# The window [0, 5) maps to the window [0, 7).
(0, list(range(7))),
(1, list(range(7))),
(2, list(range(7))),
(3, list(range(7))),
(4, list(range(7))),
# The window [5, 10) maps to the window [7, 14).
(5, list(range(7, 10))),
(6, list(range(7, 10))),
(7, list(range(7, 10))),
(8, list(range(7, 10))),
(9, list(range(7, 10)))
]),
label='windowed')
def test_flattened_side_input(self, with_transcoding=True):
with self.create_pipeline() as p:
main = p | 'main' >> beam.Create([None])
side1 = p | 'side1' >> beam.Create([('a', 1)])
side2 = p | 'side2' >> beam.Create([('b', 2)])
if with_transcoding:
# Also test non-matching coder types (transcoding required)
third_element = [('another_type')]
else:
third_element = [('b', 3)]
side3 = p | 'side3' >> beam.Create(third_element)
side = (side1, side2) | beam.Flatten()
assert_that(
main | beam.Map(lambda a, b: (a, b), beam.pvalue.AsDict(side)),
equal_to([(None, {
'a': 1, 'b': 2
})]),
label='CheckFlattenAsSideInput')
assert_that((side, side3) | 'FlattenAfter' >> beam.Flatten(),
equal_to([('a', 1), ('b', 2)] + third_element),
label='CheckFlattenOfSideInput')
def test_gbk_side_input(self):
with self.create_pipeline() as p:
main = p | 'main' >> beam.Create([None])
side = p | 'side' >> beam.Create([('a', 1)]) | beam.GroupByKey()
assert_that(
main | beam.Map(lambda a, b: (a, b), beam.pvalue.AsDict(side)),
equal_to([(None, {
'a': [1]
})]))
def test_multimap_side_input(self):
with self.create_pipeline() as p:
main = p | 'main' >> beam.Create(['a', 'b'])
side = (
p | 'side' >> beam.Create([('a', 1), ('b', 2), ('a', 3)])
# TODO(BEAM-4782): Obviate the need for this map.
| beam.Map(lambda kv: (kv[0], kv[1])))
assert_that(
main | beam.Map(
lambda k, d: (k, sorted(d[k])), beam.pvalue.AsMultiMap(side)),
equal_to([('a', [1, 3]), ('b', [2])]))
def test_multimap_multiside_input(self):
# A test where two transforms in the same stage consume the same PCollection
# twice as side input.
with self.create_pipeline() as p:
main = p | 'main' >> beam.Create(['a', 'b'])
side = (
p | 'side' >> beam.Create([('a', 1), ('b', 2), ('a', 3)])
# TODO(BEAM-4782): Obviate the need for this map.
| beam.Map(lambda kv: (kv[0], kv[1])))
assert_that(
main | 'first map' >> beam.Map(
lambda k,
d,
l: (k, sorted(d[k]), sorted([e[1] for e in l])),
beam.pvalue.AsMultiMap(side),
beam.pvalue.AsList(side))
| 'second map' >> beam.Map(
lambda k,
d,
l: (k[0], sorted(d[k[0]]), sorted([e[1] for e in l])),
beam.pvalue.AsMultiMap(side),
beam.pvalue.AsList(side)),
equal_to([('a', [1, 3], [1, 2, 3]), ('b', [2], [1, 2, 3])]))
def test_multimap_side_input_type_coercion(self):
with self.create_pipeline() as p:
main = p | 'main' >> beam.Create(['a', 'b'])
# The type of this side-input is forced to Any (overriding type
# inference). Without type coercion to Tuple[Any, Any], the usage of this
# side-input in AsMultiMap() below should fail.
side = (
p | 'side' >> beam.Create([('a', 1), ('b', 2),
('a', 3)]).with_output_types(typing.Any))
assert_that(
main | beam.Map(
lambda k, d: (k, sorted(d[k])), beam.pvalue.AsMultiMap(side)),
equal_to([('a', [1, 3]), ('b', [2])]))
def test_pardo_unfusable_side_inputs(self):
def cross_product(elem, sides):
for side in sides:
yield elem, side
with self.create_pipeline() as p:
pcoll = p | beam.Create(['a', 'b'])
assert_that(
pcoll | beam.FlatMap(cross_product, beam.pvalue.AsList(pcoll)),
equal_to([('a', 'a'), ('a', 'b'), ('b', 'a'), ('b', 'b')]))
with self.create_pipeline() as p:
pcoll = p | beam.Create(['a', 'b'])
derived = ((pcoll, ) | beam.Flatten()
| beam.Map(lambda x: (x, x))
| beam.GroupByKey()
| 'Unkey' >> beam.Map(lambda kv: kv[0]))
assert_that(
pcoll | beam.FlatMap(cross_product, beam.pvalue.AsList(derived)),
equal_to([('a', 'a'), ('a', 'b'), ('b', 'a'), ('b', 'b')]))
def test_pardo_state_only(self):
index_state_spec = userstate.CombiningValueStateSpec('index', sum)
# TODO(ccy): State isn't detected with Map/FlatMap.
class AddIndex(beam.DoFn):
def process(self, kv, index=beam.DoFn.StateParam(index_state_spec)):
k, v = kv
index.add(1)
yield k, v, index.read()
inputs = [('A', 'a')] * 2 + [('B', 'b')] * 3
expected = [('A', 'a', 1), ('A', 'a', 2), ('B', 'b', 1), ('B', 'b', 2),
('B', 'b', 3)]
with self.create_pipeline() as p:
# TODO(BEAM-8893): Allow the reshuffle.
assert_that(
p | beam.Create(inputs, reshuffle=False) | beam.ParDo(AddIndex()),
equal_to(expected))
@unittest.skip('TestStream not yet supported')
def test_teststream_pardo_timers(self):
timer_spec = userstate.TimerSpec('timer', userstate.TimeDomain.WATERMARK)
class TimerDoFn(beam.DoFn):
def process(self, element, timer=beam.DoFn.TimerParam(timer_spec)):
unused_key, ts = element
timer.set(ts)
timer.set(2 * ts)
@userstate.on_timer(timer_spec)
def process_timer(self):
yield 'fired'
ts = (
TestStream().add_elements([('k1', 10)]) # Set timer for 20
.advance_watermark_to(100).add_elements([('k2', 100)
]) # Set timer for 200
.advance_watermark_to(1000))
with self.create_pipeline() as p:
_ = (
p
| ts
| beam.ParDo(TimerDoFn())
| beam.Map(lambda x, ts=beam.DoFn.TimestampParam: (x, ts)))
#expected = [('fired', ts) for ts in (20, 200)]
#assert_that(actual, equal_to(expected))
def test_pardo_timers(self):
timer_spec = userstate.TimerSpec('timer', userstate.TimeDomain.WATERMARK)
state_spec = userstate.CombiningValueStateSpec('num_called', sum)
class TimerDoFn(beam.DoFn):
def process(self, element, timer=beam.DoFn.TimerParam(timer_spec)):
unused_key, ts = element
timer.set(ts)
timer.set(2 * ts)
@userstate.on_timer(timer_spec)
def process_timer(
self,
ts=beam.DoFn.TimestampParam,
timer=beam.DoFn.TimerParam(timer_spec),
state=beam.DoFn.StateParam(state_spec)):
if state.read() == 0:
state.add(1)
timer.set(timestamp.Timestamp(micros=2 * ts.micros))
yield 'fired'
with self.create_pipeline() as p:
actual = (
p
| beam.Create([('k1', 10), ('k2', 100)])
| beam.ParDo(TimerDoFn())
| beam.Map(lambda x, ts=beam.DoFn.TimestampParam: (x, ts)))
expected = [('fired', ts) for ts in (20, 200, 40, 400)]
assert_that(actual, equal_to(expected))
def test_pardo_timers_clear(self):
if type(self).__name__ != 'FlinkRunnerTest':
# FnApiRunner fails to wire multiple timer collections
# this method can replace test_pardo_timers when the issue is fixed
self.skipTest('BEAM-7074: Multiple timer definitions not supported.')
timer_spec = userstate.TimerSpec('timer', userstate.TimeDomain.WATERMARK)
clear_timer_spec = userstate.TimerSpec(
'clear_timer', userstate.TimeDomain.WATERMARK)
class TimerDoFn(beam.DoFn):
def process(
self,
element,
timer=beam.DoFn.TimerParam(timer_spec),
clear_timer=beam.DoFn.TimerParam(clear_timer_spec)):
unused_key, ts = element
timer.set(ts)
timer.set(2 * ts)
clear_timer.set(ts)
clear_timer.clear()
@userstate.on_timer(timer_spec)
def process_timer(self):
yield 'fired'
@userstate.on_timer(clear_timer_spec)
def process_clear_timer(self):
yield 'should not fire'
with self.create_pipeline() as p:
actual = (
p
| beam.Create([('k1', 10), ('k2', 100)])
| beam.ParDo(TimerDoFn())
| beam.Map(lambda x, ts=beam.DoFn.TimestampParam: (x, ts)))
expected = [('fired', ts) for ts in (20, 200)]
assert_that(actual, equal_to(expected))
def test_pardo_state_timers(self):
self._run_pardo_state_timers(False)
def test_windowed_pardo_state_timers(self):
self._run_pardo_state_timers(True)
def _run_pardo_state_timers(self, windowed):
state_spec = userstate.BagStateSpec('state', beam.coders.StrUtf8Coder())
timer_spec = userstate.TimerSpec('timer', userstate.TimeDomain.WATERMARK)
elements = list('abcdefgh')
buffer_size = 3
class BufferDoFn(beam.DoFn):
def process(
self,
kv,
ts=beam.DoFn.TimestampParam,
timer=beam.DoFn.TimerParam(timer_spec),
state=beam.DoFn.StateParam(state_spec)):
_, element = kv
state.add(element)
buffer = state.read()
# For real use, we'd keep track of this size separately.
if len(list(buffer)) >= 3:
state.clear()
yield buffer
else:
timer.set(ts + 1)
@userstate.on_timer(timer_spec)
def process_timer(self, state=beam.DoFn.StateParam(state_spec)):
buffer = state.read()
state.clear()
yield buffer
def is_buffered_correctly(actual):
# Pickling self in the closure for asserts gives errors (only on jenkins).
self = FnApiRunnerTest('__init__')
# Acutal should be a grouping of the inputs into batches of size
# at most buffer_size, but the actual batching is nondeterministic
# based on ordering and trigger firing timing.
self.assertEqual(sorted(sum((list(b) for b in actual), [])), elements)
self.assertEqual(max(len(list(buffer)) for buffer in actual), buffer_size)
if windowed:
# Elements were assigned to windows based on their parity.
# Assert that each grouping consists of elements belonging to the
# same window to ensure states and timers were properly partitioned.
for b in actual:
parity = set(ord(e) % 2 for e in b)
self.assertEqual(1, len(parity), b)
with self.create_pipeline() as p:
actual = (
p
# TODO(BEAM-8893): Allow the reshuffle.
| beam.Create(elements, reshuffle=False)
# Send even and odd elements to different windows.
| beam.Map(lambda e: window.TimestampedValue(e, ord(e) % 2))
| beam.WindowInto(
window.FixedWindows(1) if windowed else window.GlobalWindows())
| beam.Map(lambda x: ('key', x))
| beam.ParDo(BufferDoFn()))
assert_that(actual, is_buffered_correctly)
def test_sdf(self):
class ExpandingStringsDoFn(beam.DoFn):
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(
ExpandStringsProvider())):
assert isinstance(restriction_tracker, RestrictionTrackerView)
cur = restriction_tracker.current_restriction().start
while restriction_tracker.try_claim(cur):
yield element[cur]
cur += 1
with self.create_pipeline() as p:
data = ['abc', 'defghijklmno', 'pqrstuv', 'wxyz']
actual = (p | beam.Create(data) | beam.ParDo(ExpandingStringsDoFn()))
assert_that(actual, equal_to(list(''.join(data))))
def test_sdf_with_check_done_failed(self):
class ExpandingStringsDoFn(beam.DoFn):
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(
ExpandStringsProvider())):
assert isinstance(restriction_tracker, RestrictionTrackerView)
cur = restriction_tracker.current_restriction().start
while restriction_tracker.try_claim(cur):
yield element[cur]
cur += 1
return
with self.assertRaises(Exception):
with self.create_pipeline() as p:
data = ['abc', 'defghijklmno', 'pqrstuv', 'wxyz']
_ = (p | beam.Create(data) | beam.ParDo(ExpandingStringsDoFn()))
def test_sdf_with_watermark_tracking(self):
class ExpandingStringsDoFn(beam.DoFn):
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(
ExpandStringsProvider()),
watermark_estimator=beam.DoFn.WatermarkEstimatorParam(
ManualWatermarkEstimator.default_provider())):
cur = restriction_tracker.current_restriction().start
while restriction_tracker.try_claim(cur):
watermark_estimator.set_watermark(timestamp.Timestamp(cur))
assert (
watermark_estimator.current_watermark() == timestamp.Timestamp(
cur))
yield element[cur]
if cur % 2 == 1:
restriction_tracker.defer_remainder(timestamp.Duration(micros=5))
return
cur += 1
with self.create_pipeline() as p:
data = ['abc', 'defghijklmno', 'pqrstuv', 'wxyz']
actual = (p | beam.Create(data) | beam.ParDo(ExpandingStringsDoFn()))
assert_that(actual, equal_to(list(''.join(data))))
def test_sdf_with_sdf_initiated_checkpointing(self):
counter = beam.metrics.Metrics.counter('ns', 'my_counter')
class ExpandStringsDoFn(beam.DoFn):
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(
ExpandStringsProvider())):
assert isinstance(restriction_tracker, RestrictionTrackerView)
cur = restriction_tracker.current_restriction().start
while restriction_tracker.try_claim(cur):
counter.inc()
yield element[cur]
if cur % 2 == 1:
restriction_tracker.defer_remainder()
return
cur += 1
with self.create_pipeline() as p:
data = ['abc', 'defghijklmno', 'pqrstuv', 'wxyz']
actual = (p | beam.Create(data) | beam.ParDo(ExpandStringsDoFn()))
assert_that(actual, equal_to(list(''.join(data))))
if isinstance(p.runner, fn_api_runner.FnApiRunner):
res = p.runner._latest_run_result
counters = res.metrics().query(beam.metrics.MetricsFilter())['counters']
self.assertEqual(1, len(counters))
self.assertEqual(counters[0].committed, len(''.join(data)))
def test_group_by_key(self):
with self.create_pipeline() as p:
res = (
p
| beam.Create([('a', 1), ('a', 2), ('b', 3)])
| beam.GroupByKey()
| beam.Map(lambda k_vs: (k_vs[0], sorted(k_vs[1]))))
assert_that(res, equal_to([('a', [1, 2]), ('b', [3])]))
# Runners may special case the Reshuffle transform urn.
def test_reshuffle(self):
with self.create_pipeline() as p:
assert_that(
p | beam.Create([1, 2, 3]) | beam.Reshuffle(), equal_to([1, 2, 3]))
def test_flatten(self, with_transcoding=True):
with self.create_pipeline() as p:
if with_transcoding:
# Additional element which does not match with the first type
additional = [ord('d')]
else:
additional = ['d']
res = (
p | 'a' >> beam.Create(['a']),
p | 'bc' >> beam.Create(['b', 'c']),
p | 'd' >> beam.Create(additional)) | beam.Flatten()
assert_that(res, equal_to(['a', 'b', 'c'] + additional))
def test_flatten_same_pcollections(self, with_transcoding=True):
with self.create_pipeline() as p:
pc = p | beam.Create(['a', 'b'])
assert_that((pc, pc, pc) | beam.Flatten(), equal_to(['a', 'b'] * 3))
def test_combine_per_key(self):
with self.create_pipeline() as p:
res = (
p
| beam.Create([('a', 1), ('a', 2), ('b', 3)])
| beam.CombinePerKey(beam.combiners.MeanCombineFn()))
assert_that(res, equal_to([('a', 1.5), ('b', 3.0)]))
def test_read(self):
# Can't use NamedTemporaryFile as a context
# due to https://bugs.python.org/issue14243
temp_file = tempfile.NamedTemporaryFile(delete=False)
try:
temp_file.write(b'a\nb\nc')
temp_file.close()
with self.create_pipeline() as p:
assert_that(
p | beam.io.ReadFromText(temp_file.name), equal_to(['a', 'b', 'c']))
finally:
os.unlink(temp_file.name)
def test_windowing(self):
with self.create_pipeline() as p:
res = (
p
| beam.Create([1, 2, 100, 101, 102])
| beam.Map(lambda t: window.TimestampedValue(('k', t), t))
| beam.WindowInto(beam.transforms.window.Sessions(10))
| beam.GroupByKey()
| beam.Map(lambda k_vs1: (k_vs1[0], sorted(k_vs1[1]))))
assert_that(res, equal_to([('k', [1, 2]), ('k', [100, 101, 102])]))
@unittest.skip('BEAM-9119: test is flaky')
def test_large_elements(self):
with self.create_pipeline() as p:
big = (
p
| beam.Create(['a', 'a', 'b'])
|
beam.Map(lambda x: (x, x * data_plane._DEFAULT_SIZE_FLUSH_THRESHOLD)))
side_input_res = (
big
| beam.Map(
lambda x,
side: (x[0], side.count(x[0])),
beam.pvalue.AsList(big | beam.Map(lambda x: x[0]))))
assert_that(
side_input_res,
equal_to([('a', 2), ('a', 2), ('b', 1)]),
label='side')
gbk_res = (big | beam.GroupByKey() | beam.Map(lambda x: x[0]))
assert_that(gbk_res, equal_to(['a', 'b']), label='gbk')
def test_error_message_includes_stage(self):
with self.assertRaises(BaseException) as e_cm:
with self.create_pipeline() as p:
def raise_error(x):
raise RuntimeError('x')
# pylint: disable=expression-not-assigned
(
p
| beam.Create(['a', 'b'])
| 'StageA' >> beam.Map(lambda x: x)
| 'StageB' >> beam.Map(lambda x: x)
| 'StageC' >> beam.Map(raise_error)
| 'StageD' >> beam.Map(lambda x: x))
message = e_cm.exception.args[0]
self.assertIn('StageC', message)
self.assertNotIn('StageB', message)
def test_error_traceback_includes_user_code(self):
def first(x):
return second(x)
def second(x):
return third(x)
def third(x):
raise ValueError('x')
try:
with self.create_pipeline() as p:
p | beam.Create([0]) | beam.Map(first) # pylint: disable=expression-not-assigned
except Exception: # pylint: disable=broad-except
message = traceback.format_exc()
else:
raise AssertionError('expected exception not raised')
self.assertIn('first', message)
self.assertIn('second', message)
self.assertIn('third', message)
def test_no_subtransform_composite(self):
class First(beam.PTransform):
def expand(self, pcolls):
return pcolls[0]
with self.create_pipeline() as p:
pcoll_a = p | 'a' >> beam.Create(['a'])
pcoll_b = p | 'b' >> beam.Create(['b'])
assert_that((pcoll_a, pcoll_b) | First(), equal_to(['a']))
def test_metrics(self, check_gauge=True):
p = self.create_pipeline()
counter = beam.metrics.Metrics.counter('ns', 'counter')
distribution = beam.metrics.Metrics.distribution('ns', 'distribution')
gauge = beam.metrics.Metrics.gauge('ns', 'gauge')
pcoll = p | beam.Create(['a', 'zzz'])
# pylint: disable=expression-not-assigned
pcoll | 'count1' >> beam.FlatMap(lambda x: counter.inc())
pcoll | 'count2' >> beam.FlatMap(lambda x: counter.inc(len(x)))
pcoll | 'dist' >> beam.FlatMap(lambda x: distribution.update(len(x)))
pcoll | 'gauge' >> beam.FlatMap(lambda x: gauge.set(3))
res = p.run()
res.wait_until_finish()
c1, = res.metrics().query(beam.metrics.MetricsFilter().with_step('count1'))[
'counters']
self.assertEqual(c1.committed, 2)
c2, = res.metrics().query(beam.metrics.MetricsFilter().with_step('count2'))[
'counters']
self.assertEqual(c2.committed, 4)
dist, = res.metrics().query(beam.metrics.MetricsFilter().with_step('dist'))[
'distributions']
self.assertEqual(
dist.committed.data, beam.metrics.cells.DistributionData(4, 2, 1, 3))
self.assertEqual(dist.committed.mean, 2.0)
if check_gauge:
gaug, = res.metrics().query(
beam.metrics.MetricsFilter().with_step('gauge'))['gauges']
self.assertEqual(gaug.committed.value, 3)
def test_callbacks_with_exception(self):
elements_list = ['1', '2']
def raise_expetion():
raise Exception('raise exception when calling callback')
class FinalizebleDoFnWithException(beam.DoFn):
def process(
self, element, bundle_finalizer=beam.DoFn.BundleFinalizerParam):
bundle_finalizer.register(raise_expetion)
yield element
with self.create_pipeline() as p:
res = (
p
| beam.Create(elements_list)
| beam.ParDo(FinalizebleDoFnWithException()))
assert_that(res, equal_to(['1', '2']))
def test_register_finalizations(self):
event_recorder = EventRecorder(tempfile.gettempdir())
elements_list = ['2', '1']
class FinalizableDoFn(beam.DoFn):
def process(
self, element, bundle_finalizer=beam.DoFn.BundleFinalizerParam):
bundle_finalizer.register(lambda: event_recorder.record(element))
yield element
with self.create_pipeline() as p:
res = (p | beam.Create(elements_list) | beam.ParDo(FinalizableDoFn()))
assert_that(res, equal_to(elements_list))
results = event_recorder.events()
event_recorder.cleanup()
self.assertEqual(results, sorted(elements_list))
def test_sdf_synthetic_source(self):
common_attrs = {
'key_size': 1,
'value_size': 1,
'initial_splitting_num_bundles': 2,
'initial_splitting_desired_bundle_size': 2,
'sleep_per_input_record_sec': 0,
'initial_splitting': 'const'
}
num_source_description = 5
min_num_record = 10
max_num_record = 20
# pylint: disable=unused-variable
source_descriptions = ([
dict({'num_records': random.randint(min_num_record, max_num_record)},
**common_attrs) for i in range(0, num_source_description)
])
total_num_records = 0
for source in source_descriptions:
total_num_records += source['num_records']
with self.create_pipeline() as p:
res = (
p
| beam.Create(source_descriptions)
| beam.ParDo(SyntheticSDFAsSource())
| beam.combiners.Count.Globally())
assert_that(res, equal_to([total_num_records]))
# These tests are kept in a separate group so that they are
# not ran in the FnApiRunnerTestWithBundleRepeat which repeats
# bundle processing. This breaks the byte sampling metrics as
# it makes the probability of sampling far too small
# upon repeating bundle processing due to unncessarily incrementing
# the sampling counter.
class FnApiRunnerMetricsTest(unittest.TestCase):
def assert_has_counter(
self, mon_infos, urn, labels, value=None, ge_value=None):
# TODO(ajamato): Consider adding a matcher framework
found = 0
matches = []
for mi in mon_infos:
if has_urn_and_labels(mi, urn, labels):
extracted_value = monitoring_infos.extract_counter_value(mi)
if ge_value is not None:
if extracted_value >= ge_value:
found = found + 1
elif value is not None:
if extracted_value == value:
found = found + 1
else:
found = found + 1
ge_value_str = {'ge_value': ge_value} if ge_value else ''
value_str = {'value': value} if value else ''
self.assertEqual(
1,
found,
"Found (%s, %s) Expected only 1 monitoring_info for %s." % (
found,
matches,
(urn, labels, value_str, ge_value_str),
))
def assert_has_distribution(
self, mon_infos, urn, labels, sum=None, count=None, min=None, max=None):
# TODO(ajamato): Consider adding a matcher framework
sum = _matcher_or_equal_to(sum)
count = _matcher_or_equal_to(count)
min = _matcher_or_equal_to(min)
max = _matcher_or_equal_to(max)
found = 0
description = StringDescription()
for mi in mon_infos:
if has_urn_and_labels(mi, urn, labels):
(extracted_count, extracted_sum, extracted_min,
extracted_max) = monitoring_infos.extract_distribution(mi)
increment = 1
if sum is not None:
description.append_text(' sum: ')
sum.describe_to(description)
if not sum.matches(extracted_sum):
increment = 0
if count is not None:
description.append_text(' count: ')
count.describe_to(description)
if not count.matches(extracted_count):
increment = 0
if min is not None:
description.append_text(' min: ')
min.describe_to(description)
if not min.matches(extracted_min):
increment = 0
if max is not None:
description.append_text(' max: ')
max.describe_to(description)
if not max.matches(extracted_max):
increment = 0
found += increment
self.assertEqual(
1,
found,
"Found (%s) Expected only 1 monitoring_info for %s." % (
found,
(urn, labels, str(description)),
))
def create_pipeline(self):
return beam.Pipeline(runner=fn_api_runner.FnApiRunner())
def test_element_count_metrics(self):
class GenerateTwoOutputs(beam.DoFn):
def process(self, element):
yield str(element) + '1'
yield beam.pvalue.TaggedOutput('SecondOutput', str(element) + '2')
yield beam.pvalue.TaggedOutput('SecondOutput', str(element) + '2')
yield beam.pvalue.TaggedOutput('ThirdOutput', str(element) + '3')
class PassThrough(beam.DoFn):
def process(self, element):
yield element
p = self.create_pipeline()
# Produce enough elements to make sure byte sampling occurs.
num_source_elems = 100
pcoll = p | beam.Create(['a%d' % i for i in range(num_source_elems)],
reshuffle=False)
# pylint: disable=expression-not-assigned
pardo = (
'StepThatDoesTwoOutputs' >> beam.ParDo(
GenerateTwoOutputs()).with_outputs(
'SecondOutput', 'ThirdOutput', main='FirstAndMainOutput'))
# Actually feed pcollection to pardo
second_output, third_output, first_output = (pcoll | pardo)
# consume some of elements
merged = ((first_output, second_output, third_output) | beam.Flatten())
merged | ('PassThrough') >> beam.ParDo(PassThrough())
second_output | ('PassThrough2') >> beam.ParDo(PassThrough())
res = p.run()
res.wait_until_finish()
result_metrics = res.monitoring_metrics()
counters = result_metrics.monitoring_infos()
# All element count and byte count metrics must have a PCOLLECTION_LABEL.
self.assertFalse([
x for x in counters if x.urn in [
monitoring_infos.ELEMENT_COUNT_URN,
monitoring_infos.SAMPLED_BYTE_SIZE_URN
] and monitoring_infos.PCOLLECTION_LABEL not in x.labels
])
try:
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_1'
}
self.assert_has_counter(
counters, monitoring_infos.ELEMENT_COUNT_URN, labels, 1)
# Create output.
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_3'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
# GenerateTwoOutputs, main output.
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_4'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
# GenerateTwoOutputs, "SecondOutput" output.
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_5'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
2 * num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
# GenerateTwoOutputs, "ThirdOutput" output.
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_6'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
# Skipping other pcollections due to non-deterministic naming for multiple
# outputs.
# Flatten/Read, main output.
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_7'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
4 * num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
# PassThrough, main output
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_8'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
4 * num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
# PassThrough2, main output
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_9'
}
self.assert_has_counter(
counters,
monitoring_infos.ELEMENT_COUNT_URN,
labels,
num_source_elems)
self.assert_has_distribution(
counters,
monitoring_infos.SAMPLED_BYTE_SIZE_URN,
labels,
min=hamcrest.greater_than(0),
max=hamcrest.greater_than(0),
sum=hamcrest.greater_than(0),
count=hamcrest.greater_than(0))
except:
print(res._monitoring_infos_by_stage)
raise
def test_non_user_metrics(self):
p = self.create_pipeline()
pcoll = p | beam.Create(['a', 'zzz'])
# pylint: disable=expression-not-assigned
pcoll | 'MyStep' >> beam.FlatMap(lambda x: None)
res = p.run()
res.wait_until_finish()
result_metrics = res.monitoring_metrics()
all_metrics_via_montoring_infos = result_metrics.query()
def assert_counter_exists(metrics, namespace, name, step):
found = 0
metric_key = MetricKey(step, MetricName(namespace, name))
for m in metrics['counters']:
if m.key == metric_key:
found = found + 1
self.assertEqual(
1, found, "Did not find exactly 1 metric for %s." % metric_key)
urns = [
monitoring_infos.START_BUNDLE_MSECS_URN,
monitoring_infos.PROCESS_BUNDLE_MSECS_URN,
monitoring_infos.FINISH_BUNDLE_MSECS_URN,
monitoring_infos.TOTAL_MSECS_URN,
]
for urn in urns:
split = urn.split(':')
namespace = split[0]
name = ':'.join(split[1:])
assert_counter_exists(
all_metrics_via_montoring_infos,
namespace,
name,
step='Create/Impulse')
assert_counter_exists(
all_metrics_via_montoring_infos, namespace, name, step='MyStep')
# Due to somewhat non-deterministic nature of state sampling and sleep,
# this test is flaky when state duration is low.
# Since increasing state duration significantly would also slow down
# the test suite, we are retrying twice on failure as a mitigation.
@retry(reraise=True, stop=stop_after_attempt(3))
def test_progress_metrics(self):
p = self.create_pipeline()
_ = (
p
| beam.Create([0, 0, 0, 5e-3 * DEFAULT_SAMPLING_PERIOD_MS],
reshuffle=False)
| beam.Map(time.sleep)
| beam.Map(lambda x: ('key', x))
| beam.GroupByKey()
| 'm_out' >> beam.FlatMap(
lambda x: [
1,
2,
3,
4,
5,
beam.pvalue.TaggedOutput('once', x),
beam.pvalue.TaggedOutput('twice', x),
beam.pvalue.TaggedOutput('twice', x)
]))
res = p.run()
res.wait_until_finish()
def has_mi_for_ptransform(mon_infos, ptransform):
for mi in mon_infos:
if ptransform in mi.labels[monitoring_infos.PTRANSFORM_LABEL]:
return True
return False
try:
# Test the new MonitoringInfo monitoring format.
self.assertEqual(2, len(res._monitoring_infos_by_stage))
pregbk_mis, postgbk_mis = list(res._monitoring_infos_by_stage.values())
if not has_mi_for_ptransform(pregbk_mis, 'Create/Map(decode)'):
# The monitoring infos above are actually unordered. Swap.
pregbk_mis, postgbk_mis = postgbk_mis, pregbk_mis
# pregbk monitoring infos
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_3'
}
self.assert_has_counter(
pregbk_mis, monitoring_infos.ELEMENT_COUNT_URN, labels, value=4)
self.assert_has_distribution(
pregbk_mis, monitoring_infos.SAMPLED_BYTE_SIZE_URN, labels)
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_4'
}
self.assert_has_counter(
pregbk_mis, monitoring_infos.ELEMENT_COUNT_URN, labels, value=4)
self.assert_has_distribution(
pregbk_mis, monitoring_infos.SAMPLED_BYTE_SIZE_URN, labels)
labels = {monitoring_infos.PTRANSFORM_LABEL: 'Map(sleep)'}
self.assert_has_counter(
pregbk_mis,
monitoring_infos.TOTAL_MSECS_URN,
labels,
ge_value=4 * DEFAULT_SAMPLING_PERIOD_MS)
# postgbk monitoring infos
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_8'
}
self.assert_has_counter(
postgbk_mis, monitoring_infos.ELEMENT_COUNT_URN, labels, value=1)
self.assert_has_distribution(
postgbk_mis, monitoring_infos.SAMPLED_BYTE_SIZE_URN, labels)
labels = {
monitoring_infos.PCOLLECTION_LABEL: 'ref_PCollection_PCollection_9'
}
self.assert_has_counter(
postgbk_mis, monitoring_infos.ELEMENT_COUNT_URN, labels, value=5)
self.assert_has_distribution(
postgbk_mis, monitoring_infos.SAMPLED_BYTE_SIZE_URN, labels)
except:
print(res._monitoring_infos_by_stage)
raise
class FnApiRunnerTestWithGrpc(FnApiRunnerTest):
def create_pipeline(self):
return beam.Pipeline(
runner=fn_api_runner.FnApiRunner(
default_environment=environments.EmbeddedPythonGrpcEnvironment()))
class FnApiRunnerTestWithDisabledCaching(FnApiRunnerTest):
def create_pipeline(self):
return beam.Pipeline(
runner=fn_api_runner.FnApiRunner(
default_environment=environments.EmbeddedPythonGrpcEnvironment(
state_cache_size=0, data_buffer_time_limit_ms=0)))
class FnApiRunnerTestWithMultiWorkers(FnApiRunnerTest):
def create_pipeline(self):
pipeline_options = PipelineOptions(direct_num_workers=2)
p = beam.Pipeline(
runner=fn_api_runner.FnApiRunner(), options=pipeline_options)
#TODO(BEAM-8444): Fix these tests..
p.options.view_as(DebugOptions).experiments.remove('beam_fn_api')
return p
def test_metrics(self):
raise unittest.SkipTest("This test is for a single worker only.")
def test_sdf_with_sdf_initiated_checkpointing(self):
raise unittest.SkipTest("This test is for a single worker only.")
def test_sdf_with_watermark_tracking(self):
raise unittest.SkipTest("This test is for a single worker only.")
class FnApiRunnerTestWithGrpcAndMultiWorkers(FnApiRunnerTest):
def create_pipeline(self):
pipeline_options = PipelineOptions(
direct_num_workers=2, direct_running_mode='multi_threading')
p = beam.Pipeline(
runner=fn_api_runner.FnApiRunner(), options=pipeline_options)
#TODO(BEAM-8444): Fix these tests..
p.options.view_as(DebugOptions).experiments.remove('beam_fn_api')
return p
def test_metrics(self):
raise unittest.SkipTest("This test is for a single worker only.")
def test_sdf_with_sdf_initiated_checkpointing(self):
raise unittest.SkipTest("This test is for a single worker only.")
def test_sdf_with_watermark_tracking(self):
raise unittest.SkipTest("This test is for a single worker only.")
class FnApiRunnerTestWithBundleRepeat(FnApiRunnerTest):
def create_pipeline(self):
return beam.Pipeline(runner=fn_api_runner.FnApiRunner(bundle_repeat=3))
def test_register_finalizations(self):
raise unittest.SkipTest("TODO: Avoid bundle finalizations on repeat.")
class FnApiRunnerTestWithBundleRepeatAndMultiWorkers(FnApiRunnerTest):
def create_pipeline(self):
pipeline_options = PipelineOptions(direct_num_workers=2)
p = beam.Pipeline(
runner=fn_api_runner.FnApiRunner(bundle_repeat=3),
options=pipeline_options)
p.options.view_as(DebugOptions).experiments.remove('beam_fn_api')
return p
def test_register_finalizations(self):
raise unittest.SkipTest("TODO: Avoid bundle finalizations on repeat.")
def test_metrics(self):
raise unittest.SkipTest("This test is for a single worker only.")
def test_sdf_with_sdf_initiated_checkpointing(self):
raise unittest.SkipTest("This test is for a single worker only.")
def test_sdf_with_watermark_tracking(self):
raise unittest.SkipTest("This test is for a single worker only.")
class FnApiRunnerSplitTest(unittest.TestCase):
def create_pipeline(self):
# Must be GRPC so we can send data and split requests concurrent
# to the bundle process request.
return beam.Pipeline(
runner=fn_api_runner.FnApiRunner(
default_environment=environments.EmbeddedPythonGrpcEnvironment()))
def test_checkpoint(self):
# This split manager will get re-invoked on each smaller split,
# so N times for N elements.
element_counter = ElementCounter()
def split_manager(num_elements):
# Send at least one element so it can make forward progress.
element_counter.reset()
breakpoint = element_counter.set_breakpoint(1)
# Cede control back to the runner so data can be sent.
yield
breakpoint.wait()
# Split as close to current as possible.
split_result = yield 0.0
# Verify we split at exactly the first element.
self.verify_channel_split(split_result, 0, 1)
# Continue processing.
breakpoint.clear()
self.run_split_pipeline(split_manager, list('abc'), element_counter)
def test_split_half(self):
total_num_elements = 25
seen_bundle_sizes = []
element_counter = ElementCounter()
def split_manager(num_elements):
seen_bundle_sizes.append(num_elements)
if num_elements == total_num_elements:
element_counter.reset()
breakpoint = element_counter.set_breakpoint(5)
yield
breakpoint.wait()
# Split the remainder (20, then 10, elements) in half.
split1 = yield 0.5
self.verify_channel_split(split1, 14, 15) # remainder is 15 to end
split2 = yield 0.5
self.verify_channel_split(split2, 9, 10) # remainder is 10 to end
breakpoint.clear()
self.run_split_pipeline(
split_manager, range(total_num_elements), element_counter)
self.assertEqual([25, 15], seen_bundle_sizes)
def run_split_pipeline(self, split_manager, elements, element_counter=None):
with fn_runner.split_manager('Identity', split_manager):
with self.create_pipeline() as p:
res = (
p
| beam.Create(elements)
| beam.Reshuffle()
| 'Identity' >> beam.Map(lambda x: x)
| beam.Map(lambda x: element_counter.increment() or x))
assert_that(res, equal_to(elements))
def test_nosplit_sdf(self):
def split_manager(num_elements):
yield
elements = [1, 2, 3]
expected_groups = [[(e, k) for k in range(e)] for e in elements]
self.run_sdf_split_pipeline(
split_manager, elements, ElementCounter(), expected_groups)
def test_checkpoint_sdf(self):
element_counter = ElementCounter()
def split_manager(num_elements):
if num_elements > 0:
element_counter.reset()
breakpoint = element_counter.set_breakpoint(1)
yield
breakpoint.wait()
yield 0
breakpoint.clear()
# Everything should be perfectly split.
elements = [2, 3]
expected_groups = [[(2, 0)], [(2, 1)], [(3, 0)], [(3, 1)], [(3, 2)]]
self.run_sdf_split_pipeline(
split_manager, elements, element_counter, expected_groups)
def test_split_half_sdf(self):
element_counter = ElementCounter()
is_first_bundle = [True] # emulate nonlocal for Python 2
def split_manager(num_elements):
if is_first_bundle and num_elements > 0:
del is_first_bundle[:]
breakpoint = element_counter.set_breakpoint(1)
yield
breakpoint.wait()
split1 = yield 0.5
split2 = yield 0.5
split3 = yield 0.5
self.verify_channel_split(split1, 0, 1)
self.verify_channel_split(split2, -1, 1)
self.verify_channel_split(split3, -1, 1)
breakpoint.clear()
elements = [4, 4]
expected_groups = [[(4, 0)], [(4, 1)], [(4, 2), (4, 3)], [(4, 0), (4, 1),
(4, 2), (4, 3)]]
self.run_sdf_split_pipeline(
split_manager, elements, element_counter, expected_groups)
def test_split_crazy_sdf(self, seed=None):
if seed is None:
seed = random.randrange(1 << 20)
r = random.Random(seed)
element_counter = ElementCounter()
def split_manager(num_elements):
if num_elements > 0:
element_counter.reset()
wait_for = r.randrange(num_elements)
breakpoint = element_counter.set_breakpoint(wait_for)
yield
breakpoint.wait()
yield r.random()
yield r.random()
breakpoint.clear()
try:
elements = [r.randrange(5, 10) for _ in range(5)]
self.run_sdf_split_pipeline(split_manager, elements, element_counter)
except Exception:
_LOGGER.error('test_split_crazy_sdf.seed = %s', seed)
raise
def run_sdf_split_pipeline(
self, split_manager, elements, element_counter, expected_groups=None):
# Define an SDF that for each input x produces [(x, k) for k in range(x)].
class EnumerateProvider(beam.transforms.core.RestrictionProvider):
def initial_restriction(self, element):
return restriction_trackers.OffsetRange(0, element)
def create_tracker(self, restriction):
return restriction_trackers.OffsetRestrictionTracker(restriction)
def split(self, element, restriction):
# Don't do any initial splitting to simplify test.
return [restriction]
def restriction_size(self, element, restriction):
return restriction.size()
class EnumerateSdf(beam.DoFn):
def process(
self,
element,
restriction_tracker=beam.DoFn.RestrictionParam(EnumerateProvider())):
to_emit = []
cur = restriction_tracker.current_restriction().start
while restriction_tracker.try_claim(cur):
to_emit.append((element, cur))
element_counter.increment()
cur += 1
# Emitting in batches for tighter testing.
yield to_emit
expected = [(e, k) for e in elements for k in range(e)]
with fn_runner.split_manager('SDF', split_manager):
with self.create_pipeline() as p:
grouped = (
p
| beam.Create(elements, reshuffle=False)
| 'SDF' >> beam.ParDo(EnumerateSdf()))
flat = grouped | beam.FlatMap(lambda x: x)
assert_that(flat, equal_to(expected))
if expected_groups:
assert_that(grouped, equal_to(expected_groups), label='CheckGrouped')
def verify_channel_split(self, split_result, last_primary, first_residual):
self.assertEqual(1, len(split_result.channel_splits), split_result)
channel_split, = split_result.channel_splits
self.assertEqual(last_primary, channel_split.last_primary_element)
self.assertEqual(first_residual, channel_split.first_residual_element)
# There should be a primary and residual application for each element
# not covered above.
self.assertEqual(
first_residual - last_primary - 1,
len(split_result.primary_roots),
split_result.primary_roots)
self.assertEqual(
first_residual - last_primary - 1,
len(split_result.residual_roots),
split_result.residual_roots)
class ElementCounter(object):
"""Used to wait until a certain number of elements are seen."""
def __init__(self):
self._cv = threading.Condition()
self.reset()
def reset(self):
with self._cv:
self._breakpoints = collections.defaultdict(list)
self._count = 0
def increment(self):
with self._cv:
self._count += 1
self._cv.notify_all()
breakpoints = list(self._breakpoints[self._count])
for breakpoint in breakpoints:
breakpoint.wait()
def set_breakpoint(self, value):
with self._cv:
event = threading.Event()
self._breakpoints[value].append(event)
class Breakpoint(object):
@staticmethod
def wait(timeout=10):
with self._cv:
start = time.time()
while self._count < value:
elapsed = time.time() - start
if elapsed > timeout:
raise RuntimeError('Timed out waiting for %s' % value)
self._cv.wait(timeout - elapsed)
@staticmethod
def clear():
event.set()
return Breakpoint()
def __reduce__(self):
# Ensure we get the same element back through a pickling round-trip.
name = uuid.uuid4().hex
_pickled_element_counters[name] = self
return _unpickle_element_counter, (name, )
_pickled_element_counters = {} # type: Dict[str, ElementCounter]
def _unpickle_element_counter(name):
return _pickled_element_counters[name]
class EventRecorder(object):
"""Used to be registered as a callback in bundle finalization.
The reason why records are written into a tmp file is, the in-memory dataset
cannot keep callback records when passing into one DoFn.
"""
def __init__(self, tmp_dir):
self.tmp_dir = os.path.join(tmp_dir, uuid.uuid4().hex)
os.mkdir(self.tmp_dir)
def record(self, content):
file_path = os.path.join(self.tmp_dir, uuid.uuid4().hex + '.txt')
with open(file_path, 'w') as f:
f.write(content)
def events(self):
content = []
record_files = [
f for f in os.listdir(self.tmp_dir)
if os.path.isfile(os.path.join(self.tmp_dir, f))
]
for file in record_files:
with open(os.path.join(self.tmp_dir, file), 'r') as f:
content.append(f.read())
return sorted(content)
def cleanup(self):
shutil.rmtree(self.tmp_dir)
class ExpandStringsProvider(beam.transforms.core.RestrictionProvider):
"""A RestrictionProvider that used for sdf related tests."""
def initial_restriction(self, element):
return restriction_trackers.OffsetRange(0, len(element))
def create_tracker(self, restriction):
return restriction_trackers.OffsetRestrictionTracker(restriction)
def split(self, element, restriction):
desired_bundle_size = restriction.size() // 2
return restriction.split(desired_bundle_size)
def restriction_size(self, element, restriction):
return restriction.size()
class FnApiBasedLullLoggingTest(unittest.TestCase):
def create_pipeline(self):
return beam.Pipeline(
runner=fn_api_runner.FnApiRunner(
default_environment=environments.EmbeddedPythonGrpcEnvironment(),
progress_request_frequency=0.5))
def test_lull_logging(self):
# TODO(BEAM-1251): Remove this test skip after dropping Py 2 support.
if sys.version_info < (3, 4):
self.skipTest('Log-based assertions are supported after Python 3.4')
try:
utils.check_compiled('apache_beam.runners.worker.opcounters')
except RuntimeError:
self.skipTest('Cython is not available')
with self.assertLogs(level='WARNING') as logs:
with self.create_pipeline() as p:
sdk_worker.DEFAULT_LOG_LULL_TIMEOUT_NS = 1000 * 1000 # Lull after 1 ms
_ = (p | beam.Create([1]) | beam.Map(time.sleep))
self.assertRegex(
''.join(logs.output),
'.*Operation ongoing for over.*',
'Unable to find a lull logged for this job.')
class StateBackedTestElementType(object):
live_element_count = 0
def __init__(self, num_elements, unused):
self.num_elements = num_elements
StateBackedTestElementType.live_element_count += 1
# Due to using state backed iterable, we expect there is a few instances
# alive at any given time.
if StateBackedTestElementType.live_element_count > 5:
raise RuntimeError('Too many live instances.')
def __del__(self):
StateBackedTestElementType.live_element_count -= 1
def __reduce__(self):
return (self.__class__, (self.num_elements, 'x' * self.num_elements))
@attr('ValidatesRunner')
class FnApiBasedStateBackedCoderTest(unittest.TestCase):
def create_pipeline(self):
return beam.Pipeline(
runner=fn_api_runner.FnApiRunner(use_state_iterables=True))
def test_gbk_many_values(self):
with self.create_pipeline() as p:
# The number of integers could be a knob to test against
# different runners' default settings on page size.
VALUES_PER_ELEMENT = 300
NUM_OF_ELEMENTS = 200
r = (
p
| beam.Create([None])
| beam.FlatMap(
lambda x: ((1, StateBackedTestElementType(VALUES_PER_ELEMENT, _))
for _ in range(NUM_OF_ELEMENTS)))
| beam.GroupByKey()
| beam.MapTuple(lambda _, vs: sum(e.num_elements for e in vs)))
assert_that(r, equal_to([VALUES_PER_ELEMENT * NUM_OF_ELEMENTS]))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
| 35.36131
| 89
| 0.64218
|
9d201abac3b691c2352ce49c740540fec78b5ebb
| 7,117
|
py
|
Python
|
enaml/qt/qt_ipython_console.py
|
jwiggins/enaml
|
1c8793ba5390c52e119423684753fc3b1b893ae2
|
[
"BSD-3-Clause-Clear"
] | 26
|
2016-04-01T18:49:31.000Z
|
2020-07-21T22:19:46.000Z
|
enaml/qt/qt_ipython_console.py
|
jwiggins/enaml
|
1c8793ba5390c52e119423684753fc3b1b893ae2
|
[
"BSD-3-Clause-Clear"
] | 29
|
2016-02-22T17:40:55.000Z
|
2018-08-21T18:18:36.000Z
|
enaml/qt/qt_ipython_console.py
|
jwiggins/enaml
|
1c8793ba5390c52e119423684753fc3b1b893ae2
|
[
"BSD-3-Clause-Clear"
] | 4
|
2016-08-29T13:07:19.000Z
|
2018-11-04T01:31:46.000Z
|
#------------------------------------------------------------------------------
# Copyright (c) 2014, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from enaml.widgets.ipython_console import ProxyIPythonConsole
from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
from IPython.qt.inprocess import QtInProcessKernelManager
from .QtGui import QFrame, QVBoxLayout
from . import focus_registry
from .q_deferred_caller import deferredCall
from .qt_control import QtControl
class QtIPythonConsole(QtControl, ProxyIPythonConsole):
""" A Qt4 implementation of an Enaml IPythonConsole.
"""
#: The wrapper widget created by the proxy. A wrapper is necessary
#: since the IPython widget overrides critical Qt API methods which
#: renders the widget incompatible with the ancestor hierarchy.
widget = Typed(QFrame)
#: The internal IPython console widget.
ipy_widget = Typed(RichIPythonWidget)
#--------------------------------------------------------------------------
# Lifecycle API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying widget.
"""
self.widget = QFrame(self.parent_widget())
self.ipy_widget = RichIPythonWidget()
assert self.page_control is not None # always use paging
def init_widget(self):
""" Initialize the underlying widget.
"""
super(QtIPythonConsole, self).init_widget()
self._setup_kernel()
focus_registry.register(self.text_control, self)
focus_registry.register(self.page_control, self)
self.update_ns(self.declaration.initial_ns)
self.ipy_widget.exit_requested.connect(self._on_exit_requested)
def init_layout(self):
""" Initialize the underlying widget layout.
"""
super(QtIPythonConsole, self).init_layout()
layout = QVBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
layout.addWidget(self.ipy_widget)
self.widget.setLayout(layout)
def destroy(self):
""" Destroy the underlying widget.
"""
self._teardown_kernel()
focus_registry.unregister(self.text_control)
focus_registry.unregister(self.page_control)
super(QtIPythonConsole, self).destroy()
#--------------------------------------------------------------------------
# Private API
#--------------------------------------------------------------------------
def _setup_kernel(self):
""" Setup the kernel for the widget.
"""
kernel_manager = QtInProcessKernelManager()
kernel_manager.start_kernel()
kernel_client = kernel_manager.client()
kernel_client.start_channels()
ipy_widget = self.ipy_widget
ipy_widget.kernel_manager = kernel_manager
ipy_widget.kernel_client = kernel_client
def _teardown_kernel(self):
""" Teardown the kernel for the widget.
"""
ipy_widget = self.ipy_widget
ipy_widget.kernel_client.stop_channels()
ipy_widget.kernel_manager.shutdown_kernel()
def _on_exit_requested(self, obj):
""" Handle the 'exit_requested' signal on the widget.
"""
deferredCall(self.declaration.exit_requested)
#--------------------------------------------------------------------------
# Protected API
#--------------------------------------------------------------------------
def focus_target(self):
""" Returns the current focus target for the widget.
"""
page = self.page_control
if page.isVisibleTo(self.widget):
return page
return self.text_control
def hook_focus_events(self):
""" Hook the focus events for the underlyling widget.
"""
text = self.text_control
text.focusInEvent = self.textFocusInEvent
text.focusOutEvent = self.textFocusOutEvent
page = self.page_control
page.focusInEvent = self.pageFocusInEvent
page.focusOutEvent = self.pageFocusOutEvent
def unhook_focus_events(self):
""" Unhook the focus events for the underling widget.
"""
text = self.text_control
del text.focusInEvent
del text.focusOutEvent
page = self.page_control
del page.focusInEvent
del page.focusOutEvent
def textFocusInEvent(self, event):
""" Handle the focusInEvent for the text widget.
"""
self.handleFocusInEvent(self.text_control, event)
def textFocusOutEvent(self, event):
""" Handle the focusOutEvent for the text widget.
"""
self.handleFocusOutEvent(self.text_control, event)
def pageFocusInEvent(self, event):
""" Handle the focusInEvent for the page widget.
"""
self.handleFocusInEvent(self.page_control, event)
def pageFocusOutEvent(self, event):
""" Handle the focusOutEvent for the page widget.
"""
self.handleFocusOutEvent(self.page_control, event)
def handleFocusInEvent(self, widget, event):
""" Handle the focusInEvent for the given widget.
"""
type(widget).focusInEvent(widget, event)
self.declaration.focus_gained()
def handleFocusOutEvent(self, widget, event):
""" Handle the focusOutEvent for the given widget.
"""
type(widget).focusOutEvent(widget, event)
self.declaration.focus_lost()
#--------------------------------------------------------------------------
# Public API
#--------------------------------------------------------------------------
@property
def text_control(self):
""" Return the text control for the IPython widget.
Returns
-------
result : QTextEdit
The text control for the IPython widget.
"""
return self.ipy_widget._control
@property
def page_control(self):
""" Return the page control for the IPython widget.
Returns
-------
result : QTextEdit
The page control for the IPython widget.
"""
return self.ipy_widget._page_control
#--------------------------------------------------------------------------
# ProxyIPythonConsole API
#--------------------------------------------------------------------------
def get_var(self, name, default):
""" Get a variable from the console namespace.
"""
kernel = self.ipy_widget.kernel_manager.kernel
return kernel.shell.user_ns.get(name, default)
def update_ns(self, ns):
""" Update the namespace of the underlying console.
"""
if len(ns) > 0:
kernel = self.ipy_widget.kernel_manager.kernel
kernel.shell.push(ns)
| 32.20362
| 79
| 0.567795
|
4522524af481a64849c54040c7dba2f2494167b8
| 129
|
py
|
Python
|
info.py
|
Immain/PurchasingBot-Oct2021-MacOS
|
340de3ea37c4c14a9c2e8c6f2e5ce6e35ae32291
|
[
"MIT"
] | null | null | null |
info.py
|
Immain/PurchasingBot-Oct2021-MacOS
|
340de3ea37c4c14a9c2e8c6f2e5ce6e35ae32291
|
[
"MIT"
] | null | null | null |
info.py
|
Immain/PurchasingBot-Oct2021-MacOS
|
340de3ea37c4c14a9c2e8c6f2e5ce6e35ae32291
|
[
"MIT"
] | null | null | null |
# BEST BUY LOGIN CREDENTIALS MUST MATCH BESTBUY.COM WEBSITE
email = "Example@Example.com"
password = "LoginPassword"
cvv = "000"
| 25.8
| 59
| 0.75969
|
94ce9cf8a75f697fc0cead2dd85da1fb571faccd
| 2,382
|
py
|
Python
|
primitive_tests/feature_analysis/StatisticalMeanAbs_pipeline.py
|
1326899446/tods
|
2bf27fab2d8bab80ec222beb8f615800d77a01a4
|
[
"Apache-2.0"
] | 544
|
2020-09-21T06:02:33.000Z
|
2022-03-27T07:16:32.000Z
|
primitive_tests/feature_analysis/StatisticalMeanAbs_pipeline.py
|
1326899446/tods
|
2bf27fab2d8bab80ec222beb8f615800d77a01a4
|
[
"Apache-2.0"
] | 35
|
2020-09-21T06:33:13.000Z
|
2022-03-11T14:20:21.000Z
|
primitive_tests/feature_analysis/StatisticalMeanAbs_pipeline.py
|
1326899446/tods
|
2bf27fab2d8bab80ec222beb8f615800d77a01a4
|
[
"Apache-2.0"
] | 86
|
2020-09-21T16:44:33.000Z
|
2022-03-11T18:20:22.000Z
|
from d3m import index
from d3m.metadata.base import ArgumentType
from d3m.metadata.pipeline import Pipeline, PrimitiveStep
# Creating pipeline
pipeline_description = Pipeline()
pipeline_description.add_input(name='inputs')
# Step 0: dataset_to_dataframe
primitive_0 = index.get_primitive('d3m.primitives.tods.data_processing.dataset_to_dataframe')
step_0 = PrimitiveStep(primitive=primitive_0)
step_0.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='inputs.0')
step_0.add_output('produce')
pipeline_description.add_step(step_0)
# # Step 1: column_parser
primitive_1 = index.get_primitive('d3m.primitives.tods.data_processing.column_parser')
step_1 = PrimitiveStep(primitive=primitive_1)
step_1.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.0.produce')
step_1.add_output('produce')
pipeline_description.add_step(step_1)
# Step 2: extract_columns_by_semantic_types(attributes)
step_2 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.data_processing.extract_columns_by_semantic_types'))
step_2.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.1.produce')
step_2.add_output('produce')
step_2.add_hyperparameter(name='semantic_types', argument_type=ArgumentType.VALUE,
data=['https://metadata.datadrivendiscovery.org/types/Attribute'])
pipeline_description.add_step(step_2)
# # Step 3: statistical_mean_abs
step_3 = PrimitiveStep(primitive=index.get_primitive('d3m.primitives.tods.feature_analysis.statistical_mean_abs'))
step_3.add_hyperparameter(name='window_size', argument_type=ArgumentType.VALUE, data=4)
step_3.add_hyperparameter(name='use_semantic_types', argument_type=ArgumentType.VALUE, data=True)
step_3.add_hyperparameter(name='use_columns', argument_type=ArgumentType.VALUE, data=(5,))
step_3.add_hyperparameter(name='return_result', argument_type=ArgumentType.VALUE, data='append')
step_3.add_argument(name='inputs', argument_type=ArgumentType.CONTAINER, data_reference='steps.2.produce')
step_3.add_output('produce')
pipeline_description.add_step(step_3)
# Final Output
pipeline_description.add_output(name='output predictions', data_reference='steps.3.produce')
# Output to JSON
data = pipeline_description.to_json()
with open('example_pipeline.json', 'w') as f:
f.write(data)
print(data)
| 47.64
| 126
| 0.816961
|
1efb6c5f79c020f692535b33b366362534f7d77f
| 2,173
|
py
|
Python
|
neutronclient/osc/plugin.py
|
lmaycotte/python-neutronclient
|
5da767d36e8b2343fabab674dbacb75181efb774
|
[
"Apache-2.0"
] | null | null | null |
neutronclient/osc/plugin.py
|
lmaycotte/python-neutronclient
|
5da767d36e8b2343fabab674dbacb75181efb774
|
[
"Apache-2.0"
] | null | null | null |
neutronclient/osc/plugin.py
|
lmaycotte/python-neutronclient
|
5da767d36e8b2343fabab674dbacb75181efb774
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import logging
# TODO(rtheis/amotoki): Add functional test infrastructure for OSC
# plugin commands.
# TODO(amotoki): Add and update document on OSC pluign.
from osc_lib import utils
LOG = logging.getLogger(__name__)
DEFAULT_API_VERSION = '2.0'
API_VERSION_OPTION = 'os_network_api_version'
# NOTE(rtheis): API_NAME must NOT be set to 'network' since
# 'network' is owned by OSC! The OSC 'network' client uses
# the OpenStack SDK.
API_NAME = 'neutronclient'
API_VERSIONS = {
'2.0': 'neutronclient.v2_0.client.Client',
'2': 'neutronclient.v2_0.client.Client',
}
def make_client(instance):
"""Returns an neutron client."""
neutron_client = utils.get_client_class(
API_NAME,
instance._api_version[API_NAME],
API_VERSIONS)
LOG.debug('Instantiating neutron client: %s', neutron_client)
# TODO(amotoki): Check the following arguments need to be passed
# to neutronclient class. Check keystoneauth code.
# - endpoint_type (do we need to specify it explicitly?)
# - auth (session object contains auth. Is it required?)
client = neutron_client(session=instance.session,
region_name=instance._region_name,
endpoint_type=instance._interface,
insecure=instance._insecure,
ca_cert=instance._cacert)
return client
def build_option_parser(parser):
"""Hook to add global options"""
# NOTE(amotoki): At now we register no option.
# OSC itself has an option for Network API version # and we refer to it.
return parser
| 35.048387
| 77
| 0.693971
|
05181bb03f5a012fe32b24a5d453dcff0d361ac9
| 1,311
|
py
|
Python
|
etldjango/etldata/management/commands/worker_init_bucket.py
|
DavidCastilloAlvarado/opencovid_ETL
|
0cd7afcb0e7e6247a01c0aced9aab02b8ad1edaf
|
[
"MIT"
] | 5
|
2021-05-21T20:02:34.000Z
|
2021-08-04T21:06:19.000Z
|
etldjango/etldata/management/commands/worker_init_bucket.py
|
DavidCastilloAlvarado/opencovid_ETL
|
0cd7afcb0e7e6247a01c0aced9aab02b8ad1edaf
|
[
"MIT"
] | 1
|
2021-06-04T06:17:17.000Z
|
2021-06-04T06:17:17.000Z
|
etldjango/etldata/management/commands/worker_init_bucket.py
|
DavidCastilloAlvarado/opencovid_ETL
|
0cd7afcb0e7e6247a01c0aced9aab02b8ad1edaf
|
[
"MIT"
] | null | null | null |
import os
from django.core.management.base import BaseCommand, CommandError
from etldjango.settings import GCP_PROJECT_ID, BUCKET_NAME, BUCKET_ROOT
from .utils.storage import Bucket_handler
from .utils.extractor import Data_Extractor
from datetime import datetime
from etldata.models import Logs_extractor
from tqdm import tqdm
import time
import glob
class Command(BaseCommand):
help = 'Load csv support files to gcp bucket'
bucket = Bucket_handler(project_id=GCP_PROJECT_ID)
def handle(self, *args, **options):
self.create_bucket()
self.uploading_files_to_bucket()
self.print_shell('Work Done!')
def create_bucket(self):
self.print_shell('Creating bucket if doesn\'t exist ... ')
self.bucket.create_bucket(BUCKET_NAME)
def print_shell(self, text):
self.stdout.write(self.style.SUCCESS(text))
def uploading_files_to_bucket(self):
dir_files = glob.glob('data_source/**.csv')
for file_name in tqdm(dir_files, total=len(dir_files)):
destination = file_name
self.bucket.upload_blob(bucket_name=BUCKET_NAME,
source_file_name=file_name,
destination_blob_name=destination)
time.sleep(2)
#os.system("rm temp/*")
| 34.5
| 71
| 0.687262
|
6ab093ab85241601262e64152255955c7c19f3fa
| 92
|
py
|
Python
|
test/Manifest.py
|
hdl-util/image-processing
|
92f74d8a3894f365b2be6dd588351aff4e7b8565
|
[
"Apache-2.0",
"MIT"
] | 5
|
2020-09-15T09:47:32.000Z
|
2021-06-10T07:33:30.000Z
|
test/Manifest.py
|
hdl-util/image-processing
|
92f74d8a3894f365b2be6dd588351aff4e7b8565
|
[
"Apache-2.0",
"MIT"
] | null | null | null |
test/Manifest.py
|
hdl-util/image-processing
|
92f74d8a3894f365b2be6dd588351aff4e7b8565
|
[
"Apache-2.0",
"MIT"
] | 3
|
2020-07-07T10:10:01.000Z
|
2021-05-26T06:03:56.000Z
|
files = [
"malvar_he_cutler_demosaic_tb.sv"
]
modules = {
"local" : [ "../src/" ],
}
| 11.5
| 37
| 0.543478
|
ec567606d5894b23ef854872e5c4e423d8454b2c
| 15,359
|
py
|
Python
|
tests/data/challenge_response.py
|
yashdusing/evalai-cli
|
4649fd520e2d7c4477589cc23f54a56b30f62e98
|
[
"BSD-3-Clause"
] | null | null | null |
tests/data/challenge_response.py
|
yashdusing/evalai-cli
|
4649fd520e2d7c4477589cc23f54a56b30f62e98
|
[
"BSD-3-Clause"
] | null | null | null |
tests/data/challenge_response.py
|
yashdusing/evalai-cli
|
4649fd520e2d7c4477589cc23f54a56b30f62e98
|
[
"BSD-3-Clause"
] | null | null | null |
challenges = """
{
"count": 2,
"next": null,
"previous": null,
"results": [
{
"allowed_email_domains": [],
"anonymous_leaderboard": false,
"approved_by_admin": true,
"blocked_email_domains": [],
"creator": {
"created_by": "host",
"id": 2,
"team_name": "South Lisafurt Host Team"
},
"description": "Excepturi eligendi minus modi delectus doloreasperiores voluptatem. \
Aspernatur itaque vitae repellendus. Natus ut tenetur labore dolores ex repudiandae.",
"enable_forum": true,
"end_date": "2019-06-18T20:00:00Z",
"evaluation_details": "Amet officia saepe quis tempora magnam eum. Quidem ab \
consectetur exercitationem omnis. Nostrumconsequuntur architecto eaque mollitia \
ab minima expedita quam. Velit itaque voluptates suscipit aliquam perspiciatis \
itaque cupiditate.",
"id": 2,
"image": null,
"is_active": true,
"published": true,
"short_description": "Ratione laboriosam quae tempora. Temporibus porro repellat \
rem facere. In impedit cupiditate voluptatum aut omnis animi illo. Perferendis \
ratione dolores eaque nulla iustomollitia facere voluptatum. Earum dolor corporis \
quo enim quia optio.",
"start_date": "2018-02-02T18:56:42.747134Z",
"submission_guidelines": "Perspiciatis id sunt ab magni rerum laboriosam. Alias \
temporibus ratione est animi. Quisquam reiciendis impedit fugiat corporis nesciunt \
totam. Odit molestiae praesentium et fuga architecto suscipit. At deleniti fugiat \
necessitatibus vel provident qui perspiciatis.",
"terms_and_conditions": "Est vero fugiattemporibus necessitatibus. Ea nihil \
possimus consequuntur doloribus odio. Vero voluptates non et repellat \
perferendis ipsam. Ex dicta nemo numquam cupiditate recusandae impedit.",
"title": "Olivia Challenge"
},
{
"allowed_email_domains": [],
"anonymous_leaderboard": false,
"approved_by_admin": true,
"blocked_email_domains": [],
"creator": {
"created_by": "host",
"id": 2,
"team_name": "South Lisafurt Host Team"
},
"description": "Voluptates consequatur commodi odit repellendus quam. Id nemo \
provident ipsa cupiditate enim blanditiis autem. Recusandae veronecessitatibus \
debitis esse eveniet consequatur. Provident saepe officiis incidunt cum.",
"enable_forum": true,
"end_date": "2019-06-18T20:00:00Z",
"evaluation_details": "Adipisci possimus tenetur illum maiores. Laboriosam error \
nostrum illum nesciunt cumque officiis suscipit. Occaecati velit fugiat alias \
magnamvoluptas voluptatem ad. Repudiandae velit impedit veniam numquam.",
"id": 3,
"image": null,
"is_active": false,
"published": true,
"short_description": "Dicta tempore quia itaque ex quam. Quas sequi in voluptates \
esse aspernatur deleniti. In magnam ipsam totam ratione quidempraesentium eius \
distinctio.",
"start_date": "2016-12-29T18:56:42.752783Z",
"submission_guidelines": "Ullam vitae explicabo consequuntur odit fugiat pariatur \
doloribus ab. Qui ullam adipisci est corporis facilis. Quas excepturi \
delenitidolorum tempora necessitatibus.",
"terms_and_conditions": "Recusandae saepe ipsum saepe ullam aut. Cum eiusnihil \
blanditiis itaque. Fugiat sed quod nostrum.",
"title": "Jason Challenge"
}
]
}
"""
challenge_details = """
{
"allowed_email_domains": [],
"anonymous_leaderboard": false,
"approved_by_admin": true,
"blocked_email_domains": [],
"creator": {
"created_by": "host",
"id": 1,
"team_name": "Lake Cynthiabury Host Team",
"team_url": ""
},
"description": "Ex voluptatum accusantium dignissimos voluptatem eveniet enim non \
aspernatur. Expedita consequatur velit vitae enim. Vel asperiores deserunt suscipit \
non eaque veritatis labore. A atque illo fuga suscipit mollitia dignissimos assumenda.",
"enable_forum": true,
"end_date": "2019-11-11T06:31:31.594239Z",
"evaluation_details": "Perspiciatis harum molestias iste corporis \
aspernatur sit doloribus. Occaecati aliquid ullam odit aperiam in. Cupiditate consectetur \
ab doloremque dolore.",
"id": 1,
"image": null,
"is_active": true,
"published": true,
"short_description": "Nisi vero sint ipsam recusandae. Eveniet provident expedita iusto \
atque delectus et recusandae. Odio blanditiis qui alias autem minima blanditiis. Iste et \
ipsa minima placeat cupiditate fuga.",
"start_date": "2018-03-21T06:31:31.594224Z",
"submission_guidelines": "Ratione vitae dolor eos officia rem exercitationem. \
Ipsam pariatur a alias mollitia perspiciatis. Ipsa sit esse officiis quam eaque.",
"terms_and_conditions": "Officia dolores non labore nihil exercitationem minima. \
Esse repellendus accusamus minus nisi. Commodi cum adipisci molestias ipsum beatae qui \
enim porro. Cumque saepe distinctio repellendus et sed labore ratione aspernatur.",
"title": "Sarah Challenge"
}
"""
challenge_participant_teams = """
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"created_by": "host",
"id": 3,
"members": [
{
"member_id": 5,
"member_name": "host",
"status": "Self"
}
],
"team_name": "Test1"
}
]
}
"""
challenge_host_teams = """
{
"count": 1,
"next": null,
"previous": null,
"results": [
{
"created_by": "host",
"id": 2,
"members": [
{
"id": 2,
"permissions": "Admin",
"status": "Self",
"team_name": 2,
"user": "host"
}
],
"team_name": "South Lisafurt Host Team"
}
]
}
"""
challenge_phase_list = """
{
"count": 2,
"next": null,
"previous": null,
"results": [
{
"challenge": 10,
"codename": "phase1",
"description": "Ipsa id minima commodi quo itaque. Reprehenderit eos iusto\
maiores iusto impedit dolores. Nihil possimus repudiandae animi quasi nulla\
molestias reiciendis necessitatibus. Minus eos similique facilis accusamus\
reprehenderit in officiis.",
"end_date": "2019-09-25T18:56:42.789372Z",
"id": 19,
"is_active": false,
"is_public": true,
"leaderboard_public": true,
"max_submissions": 100000,
"max_submissions_per_day": 100000,
"name": "Kimberly Phase",
"start_date": "2018-08-21T18:56:42.789363Z"
},
{
"challenge": 10,
"codename": "phase2",
"description": "Est nobis consequatur quam sint in nemo distinctio magni.\
Eaque a natus laboriosam ipsa molestiae corrupti.",
"end_date": "2019-09-25T18:56:42.789372Z",
"id": 20,
"is_active": false,
"is_public": true,
"leaderboard_public": true,
"max_submissions": 100000,
"max_submissions_per_day": 100000,
"name": "Philip Phase",
"start_date": "2018-08-21T18:56:42.789363Z"
}
]
}
"""
challenge_phase_details = """
{
"challenge": 10,
"codename": "phase2",
"description": "Est nobis consequatur quam sint in nemo distinctio magni. \
Eaque a natus laboriosam ipsa molestiae corrupti.",
"end_date": "2019-09-25T18:56:42.789372Z",
"id": 20,
"is_active": false,
"is_public": true,
"leaderboard_public": true,
"max_submissions": 100000,
"max_submissions_per_day": 100000,
"name": "Philip Phase",
"start_date": "2018-08-21T18:56:42.789363Z"
}
"""
object_error = """
{
"error": "Sorry, the object does not exist."
}
"""
invalid_token = '{"detail": "Invalid token"}'
token_expired = '{"detail": "Token has expired"}'
challenge_phase_splits = """
[
{
"challenge_phase": 4,
"challenge_phase_name": "William Phase",
"dataset_split": 3,
"dataset_split_name": "Split 3",
"id": 7,
"visibility": 3
},
{
"challenge_phase": 4,
"challenge_phase_name": "William Phase",
"dataset_split": 4,
"dataset_split_name": "Split 4",
"id": 8,
"visibility": 3
},
{
"challenge_phase": 3,
"challenge_phase_name": "Scott Phase",
"dataset_split": 3,
"dataset_split_name": "Split 3",
"id": 5,
"visibility": 3
},
{
"challenge_phase": 3,
"challenge_phase_name": "Scott Phase",
"dataset_split": 4,
"dataset_split_name": "Split 4",
"id": 6,
"visibility": 3
}
]
"""
leaderboard = """
{
"count": 6,
"next": null,
"previous": null,
"results": [
{
"challenge_phase_split": 189,
"filtering_score": 0.8202264740335806,
"id": 26652,
"leaderboard__schema": {
"default_order_by": "score",
"labels": [
"score"
]
},
"result": [
0.8202264740335806
],
"submission__participant_team__team_name": "cyberagent",
"submission__submitted_at": "2018-05-25T05:45:26.215498Z"
},
{
"challenge_phase_split": 189,
"filtering_score": 0.686372510737993,
"id": 17372,
"leaderboard__schema": {
"default_order_by": "score",
"labels": [
"score"
]
},
"result": [
0.686372510737993
],
"submission__participant_team__team_name": "ADVISE (PITT)",
"submission__submitted_at": "2018-05-14T02:24:22.639441Z"
},
{
"challenge_phase_split": 189,
"filtering_score": 0.6226474033580632,
"id": 16133,
"leaderboard__schema": {
"default_order_by": "score",
"labels": [
"score"
]
},
"result": [
0.6226474033580632
],
"submission__participant_team__team_name": "VSE (PITT)",
"submission__submitted_at": "2018-05-11T21:37:15.490292Z"
},
{
"challenge_phase_split": 189,
"filtering_score": 0.5284654431862553,
"id": 27346,
"leaderboard__schema": {
"default_order_by": "score",
"labels": [
"score"
]
},
"result": [
0.5284654431862553
],
"submission__participant_team__team_name": "planb",
"submission__submitted_at": "2018-05-29T16:04:37.491494Z"
},
{
"challenge_phase_split": 189,
"filtering_score": 0.24709098008590394,
"id": 27407,
"leaderboard__schema": {
"default_order_by": "score",
"labels": [
"score"
]
},
"result": [
0.24709098008590394
],
"submission__participant_team__team_name": "titan",
"submission__submitted_at": "2018-05-30T09:45:49.672613Z"
},
{
"challenge_phase_split": 189,
"filtering_score": 0.20484185864896526,
"id": 15304,
"leaderboard__schema": {
"default_order_by": "score",
"labels": [
"score"
]
},
"result": [
0.20484185864896526
],
"submission__participant_team__team_name": "idxm",
"submission__submitted_at": "2018-05-09T08:51:10.900548Z"
}
]
}
"""
empty_leaderboard = """
{
"count": 6,
"next": null,
"previous": null,
"results": [
]
}
"""
| 38.883544
| 114
| 0.454978
|
9f8b57427258138600df40245a4348ca99b5274a
| 526
|
py
|
Python
|
push_data_to_kafka.py
|
Pahulpreet86/Pahulpreet86-Real-Time-Data-Pipeline-Using-Kafka-and-Spark
|
c7ff90b4b0399d95d70aad787de45075198474e1
|
[
"MIT"
] | 4
|
2020-07-23T20:23:47.000Z
|
2022-03-23T03:25:49.000Z
|
push_data_to_kafka.py
|
Pahulpreet86/Real-Time-Data-Pipeline-Using-Kafka-and-Spark
|
c7ff90b4b0399d95d70aad787de45075198474e1
|
[
"MIT"
] | null | null | null |
push_data_to_kafka.py
|
Pahulpreet86/Real-Time-Data-Pipeline-Using-Kafka-and-Spark
|
c7ff90b4b0399d95d70aad787de45075198474e1
|
[
"MIT"
] | null | null | null |
import time
import json
import requests
import datetime
from kafka import KafkaProducer, KafkaClient
from websocket import create_connection
def get_sensor_data_stream():
try:
url = 'http://0.0.0.0:3030/sensordata'
r = requests.get(url)
return r.text
except:
return "Error in Connection"
producer = KafkaProducer(bootstrap_servers=['localhost:9092'])
while True:
msg = get_sensor_data_stream()
producer.send("RawSensorData", msg.encode('utf-8'))
time.sleep(1)
| 18.785714
| 62
| 0.692015
|
dfe9463100c9df51aa15e9de97cea539501e7868
| 4,267
|
py
|
Python
|
tests/workflow/scripts/test_qc_report_table.py
|
Monia234/NCI-GwasQc
|
9e3ca52085c891e1d4d7972e5337c4a1888f992c
|
[
"MIT"
] | null | null | null |
tests/workflow/scripts/test_qc_report_table.py
|
Monia234/NCI-GwasQc
|
9e3ca52085c891e1d4d7972e5337c4a1888f992c
|
[
"MIT"
] | 43
|
2021-03-02T04:10:01.000Z
|
2022-03-16T20:26:55.000Z
|
tests/workflow/scripts/test_qc_report_table.py
|
Monia234/NCI-GwasQc
|
9e3ca52085c891e1d4d7972e5337c4a1888f992c
|
[
"MIT"
] | 2
|
2021-03-02T12:27:00.000Z
|
2021-12-16T03:22:20.000Z
|
import pandas as pd
import pytest
from pandas.testing import assert_frame_equal
from cgr_gwas_qc.workflow.scripts import qc_report_table
@pytest.mark.real_data
def test_sample_qc(real_data_cache):
dtypes = {
"Contaminated": bool,
"Low Call Rate": bool,
"Sex Discordant": bool,
"Unexpected Replicate": bool,
}
legacy_file = (
real_data_cache
/ "legacy_outputs/word_doc/SR0446-001_12_QC_Report_1011201995419_casecontrol_20191011.xlsx"
)
legacy_df = (
pd.read_excel(legacy_file, sheet_name="ALL_QC", engine="openpyxl")
.set_index("Sample_ID")
.sort_index()
.reindex(dtypes.keys(), axis=1)
.fillna(False)
.astype(dtypes)
)
dev_df = (
qc_report_table._sample_qc(
real_data_cache / "dev_outputs/cgr_sample_sheet.csv",
real_data_cache / "dev_outputs/sample_level/sample_qc.csv",
)
.set_index("Sample_ID")
.sort_index()
.reindex(dtypes.keys(), axis=1)
.fillna(False)
.astype(dtypes)
)
assert_frame_equal(legacy_df, dev_df)
@pytest.fixture
def graf_text(tmp_path):
outfile = tmp_path / "graf.txt"
outfile.write_text(
"DS No.\tSample\t#SNPs\tGD1\tGD2\tGD3\tGD4\tF(%)\tE(%)\tA(%)\tAfrican\tEuropean\tAsian\tMexican\tIndian-Pakistani\n"
"1\tSB0001\t3366\t1.611165\t1.446184\t0.831981\t0.024308\t3.65\t96.35\t0.00\t1.110809\t0.895608\t1.182723\t0.962664\t0.938356\n"
"1\tSB0002\t3369\t1.359497\t1.204182\t0.792694\t0.004533\t84.09\t13.00\t2.91\t0.853367\t1.113336\t1.247473\t1.100141\t1.095608\n"
"1\tSB0003\t3371\t1.618876\t1.447833\t0.836734\t0.037455\t2.14\t97.86\t0.00\t1.119291\t0.895800\t1.179998\t0.971486\t0.934031\n"
"1\tSB0004\t3371\t1.653709\t1.444371\t0.830352\t0.020863\t0.00\t94.62\t5.38\t1.134156\t0.891016\t1.149434\t0.949329\t0.928466\n"
)
return outfile
@pytest.mark.real_data
def test_ancestry(sample_qc_df, graf_text, tmp_path):
fake_qc = sample_qc_df.head(4)
fake_qc.Sample_ID = ["SB0001", "SB0002", "SB0003", "SB0004"]
fake_qc.to_csv(tmp_path / "fake.csv", index=False)
obs_df = qc_report_table._ancestry(tmp_path / "fake.csv", graf_text)
assert (4, 18) == obs_df.shape
@pytest.mark.real_data
def test_sample_concordance(real_data_cache):
sample_qc_csv = real_data_cache / "dev_outputs/sample_level/sample_qc.csv"
sample_concordance_csv = real_data_cache / "dev_outputs/sample_level/concordance/summary.csv"
obs_df = qc_report_table._sample_concordance(sample_qc_csv, sample_concordance_csv)
assert qc_report_table._SAMPLE_CONCORDANCE_COLUMNS == obs_df.columns.tolist()
@pytest.mark.real_data
def test_population_concordance(real_data_cache, tmp_path):
agg_population_concordance_csv = real_data_cache / "dev_outputs/subject_level/concordance.csv"
test_file = tmp_path / "test.xlsx"
with pd.ExcelWriter(test_file) as writer:
qc_report_table._population_concordance(agg_population_concordance_csv, writer)
obs_df = pd.read_excel(test_file, "European_IBD", engine="openpyxl")
assert qc_report_table._POPULATION_CONCORDANCE_COLUMNS == obs_df.columns.tolist()
assert obs_df.shape[0] == pd.read_csv(agg_population_concordance_csv).shape[0]
@pytest.mark.real_data
def test_pca(real_data_cache, tmp_path):
filename = real_data_cache / "dev_outputs/subject_level/population_qc.csv"
test_file = tmp_path / "test.xlsx"
with pd.ExcelWriter(test_file) as writer:
qc_report_table._pca(filename, writer)
obs_df = pd.read_excel(test_file, "European_PCA", engine="openpyxl")
assert qc_report_table._PCA_COLUMNS == obs_df.columns.tolist()
assert not obs_df.isna().all(axis=0).any() # no empty columns
@pytest.mark.real_data
def test_het(real_data_cache, tmp_path):
filename = real_data_cache / "dev_outputs/subject_level/population_qc.csv"
test_file = tmp_path / "test.xlsx"
with pd.ExcelWriter(test_file) as writer:
qc_report_table._het(filename, writer)
obs_df = pd.read_excel(test_file, "European_HET", engine="openpyxl")
assert qc_report_table._HET_COLUMNS == obs_df.columns.tolist()
assert not obs_df.isna().all(axis=0).any() # no empty columns
| 38.441441
| 137
| 0.719006
|
5370e8e53a36c78872545723c30128e493b7c154
| 2,631
|
py
|
Python
|
gedl/schema/validate-gedl.py
|
gaps-closure/capo
|
894d2f6d291ff79e18c77e0ca7073531147cbee8
|
[
"BSD-3-Clause"
] | 1
|
2021-04-20T18:43:44.000Z
|
2021-04-20T18:43:44.000Z
|
gedl/schema/validate-gedl.py
|
gaps-closure/capo
|
894d2f6d291ff79e18c77e0ca7073531147cbee8
|
[
"BSD-3-Clause"
] | 1
|
2021-09-23T14:55:43.000Z
|
2021-09-23T18:09:35.000Z
|
gedl/schema/validate-gedl.py
|
gaps-closure/capo
|
894d2f6d291ff79e18c77e0ca7073531147cbee8
|
[
"BSD-3-Clause"
] | 1
|
2020-05-21T03:12:16.000Z
|
2020-05-21T03:12:16.000Z
|
#!/usr/bin/env python3
"""
Test the gedl schema, and a test GEDL json is valid using
json schema Draft 7
This requires MIT licensed jsonschema library 3.2.0 or newer (via pip)
For more information on jsonschema see:
github.com/Julian/jsonschema/
Author: Terrence Ezrol tezrol@perspectalabs.com
"""
import jsonschema
import json
import sys
import os.path
#the schema to json-schema draft 7
metaschema_file = "json-schema-draft7.json"
#the gedl schema
gedl_schema_file = "gedl-schema.json"
def validated_schema():
"""validates the gedl schema against the meta schema
thus is to split the exception in editing the gedl schema, as the schema is validated
in standard validation (thus this step)
"""
if(jsonschema.__version__ < "3.2.0"):
print("Please upgrade jsonschema, version 3.2.0 or newer is required")
raise(ModuleNotFoundError("Newer jsonschema version required"))
try:
meta = json.loads(
open(metaschema_file,"r",encoding="UTF-8").read()
)
except Exception as e:
print("Error reading meta schema %s"%(metaschema_file))
raise(e)
try:
gedl = json.loads(
open(gedl_schema_file,"r",encoding="UTF-8").read()
)
except Exception as e:
print("Error reading gedl schema %s"%(gedl_schema_file))
raise(e)
#schema loaded, validate and return schema
jsonschema.validate(gedl,meta)
return(gedl)
def validate_gedl(gedlfile,schema):
""" given in the gedl json, and the schema json validate the json with the json """
try:
test = json.loads(
open(gedlfile,"r",encoding="UTF-8").read()
)
except Exception as e:
print("Error provided gedl json %s"%(gedlfile))
raise(e)
jsonschema.validate(test,schema)
def main():
"""Run a test on the provided file sys.argv[1]"""
argc = len(sys.argv)
if(argc != 2 or sys.argv[1] == "--help"):
print("Usage: %s <gedl.json>"%(sys.argv[0]))
return(-1)
testfile = sys.argv[1]
if(not os.path.isfile(testfile)):
print("===== Test json (%s) not found/is not a file ====="%(testfile))
return(-4)
try:
schema = validated_schema()
except Exception as e:
print("===== Invalid GEDL Schema =====")
print(str(e))
return(-2)
try:
validate_gedl(testfile,schema)
except Exception as e:
print("==== Invalid GEDL ====")
print(str(e))
return(-3)
print("GEDL successfully validated")
return(0)
if(__name__ == "__main__"):
sys.exit(main())
| 26.575758
| 92
| 0.622577
|
73b607b5ad4c41cab9ae8818371009e1088fea68
| 43,214
|
py
|
Python
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_public_ip_addresses_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2019-08-23T21:14:00.000Z
|
2021-09-07T18:32:34.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_public_ip_addresses_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 4
|
2019-04-17T17:57:49.000Z
|
2020-04-24T21:11:22.000Z
|
sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_06_01/aio/operations/_public_ip_addresses_operations.py
|
beltr0n/azure-sdk-for-python
|
2f7fb8bee881b0fc0386a0ad5385755ceedd0453
|
[
"MIT"
] | 2
|
2021-05-23T16:46:31.000Z
|
2021-05-26T23:51:09.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PublicIPAddressesOperations:
"""PublicIPAddressesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
public_ip_address_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def get(
self,
resource_group_name: str,
public_ip_address_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.PublicIPAddress":
"""Gets the specified public IP address in a specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the subnet.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.PublicIPAddress",
**kwargs
) -> "_models.PublicIPAddress":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'PublicIPAddress')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.PublicIPAddress",
**kwargs
) -> AsyncLROPoller["_models.PublicIPAddress"]:
"""Creates or updates a static or dynamic public IP address.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to the create or update public IP address operation.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.PublicIPAddress
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PublicIPAddress or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.PublicIPAddress]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.PublicIPAddress":
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
public_ip_address_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> AsyncLROPoller["_models.PublicIPAddress"]:
"""Updates public IP address tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param public_ip_address_name: The name of the public IP address.
:type public_ip_address_name: str
:param parameters: Parameters supplied to update public IP address tags.
:type parameters: ~azure.mgmt.network.v2018_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either PublicIPAddress or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_06_01.models.PublicIPAddress]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
public_ip_address_name=public_ip_address_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses/{publicIpAddressName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets all the public IP addresses in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/publicIPAddresses'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets all public IP addresses in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/publicIPAddresses'} # type: ignore
def list_virtual_machine_scale_set_public_ip_addresses(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
**kwargs
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets information about all public IP addresses on a virtual machine scale set level.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/publicipaddresses'} # type: ignore
def list_virtual_machine_scale_set_vm_public_ip_addresses(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
**kwargs
) -> AsyncIterable["_models.PublicIPAddressListResult"]:
"""Gets information about all public IP addresses in a virtual machine IP configuration in a
virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The network interface name.
:type network_interface_name: str
:param ip_configuration_name: The IP configuration name.
:type ip_configuration_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PublicIPAddressListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_06_01.models.PublicIPAddressListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddressListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_public_ip_addresses.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('PublicIPAddressListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_public_ip_addresses.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses'} # type: ignore
async def get_virtual_machine_scale_set_public_ip_address(
self,
resource_group_name: str,
virtual_machine_scale_set_name: str,
virtualmachine_index: str,
network_interface_name: str,
ip_configuration_name: str,
public_ip_address_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.PublicIPAddress":
"""Get the specified public IP address in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the IP configuration.
:type ip_configuration_name: str
:param public_ip_address_name: The name of the public IP Address.
:type public_ip_address_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PublicIPAddress, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_06_01.models.PublicIPAddress
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.PublicIPAddress"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_public_ip_address.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'publicIpAddressName': self._serialize.url("public_ip_address_name", public_ip_address_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('PublicIPAddress', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_public_ip_address.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipconfigurations/{ipConfigurationName}/publicipaddresses/{publicIpAddressName}'} # type: ignore
| 51.32304
| 395
| 0.674457
|
d449a9f5dbb5bd14356719f92972c2579b305f51
| 731
|
py
|
Python
|
cabocha/filter.py
|
kenkov/cabocha
|
05baedbf66a928704d738c61738ba5065c1f0797
|
[
"MIT"
] | 53
|
2015-01-19T08:24:37.000Z
|
2021-11-15T12:34:28.000Z
|
ref_src/cabocha/filter.py
|
samlet/stack
|
47db17fd4fdab264032f224dca31a4bb1d19b754
|
[
"Apache-2.0"
] | 2
|
2018-11-05T04:49:18.000Z
|
2018-11-22T00:30:35.000Z
|
cabocha/filter.py
|
kenkov/cabocha
|
05baedbf66a928704d738c61738ba5065c1f0797
|
[
"MIT"
] | 1
|
2018-10-25T08:32:17.000Z
|
2018-10-25T08:32:17.000Z
|
#! /usr/bin/env python
# coding:utf-8
class NotFoundException(Exception):
"""述語が見つからない時に発生する例外"""
def _is_function_token(token):
return token.pos in {"動詞", "形容詞", "名詞"}
def _is_function_chunk(chunk):
for token in chunk:
if _is_function_token(token):
return True
return False
def find(iters, function):
res = []
for item in iters:
if function(item):
res.append(item)
return res
def rfind(iters, function):
for item in reversed(iters):
if function(item):
return item
raise NotFoundException()
def lfind(iters, function):
for item in iters:
if function(item):
return item
raise NotFoundException()
| 18.275
| 43
| 0.618331
|
888622ff8081c8b71bbd497221d3a68c93f19d99
| 11,882
|
py
|
Python
|
grr/server/grr_response_server/databases/mysql_users.py
|
dekoder/grr
|
27ba38dc0f5ad4f3e0cdbfb146a0a789e3b0d27b
|
[
"Apache-2.0"
] | 3
|
2018-09-30T01:31:29.000Z
|
2019-04-22T11:44:54.000Z
|
grr/server/grr_response_server/databases/mysql_users.py
|
tomchop/grr
|
27ba38dc0f5ad4f3e0cdbfb146a0a789e3b0d27b
|
[
"Apache-2.0"
] | 1
|
2022-03-02T09:58:05.000Z
|
2022-03-02T09:58:05.000Z
|
grr/server/grr_response_server/databases/mysql_users.py
|
tomchop/grr
|
27ba38dc0f5ad4f3e0cdbfb146a0a789e3b0d27b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""The MySQL database methods for GRR users and approval handling."""
from __future__ import unicode_literals
import MySQLdb
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
from grr_response_server import db
from grr_response_server.databases import mysql_utils
from grr_response_server.rdfvalues import objects as rdf_objects
def _IntToApprovalID(approval_id):
return u"%016x" % approval_id
def _ApprovalIDToInt(approval_id):
return int(approval_id, 16)
def _ResponseToApprovalsWithGrants(response):
"""Converts a generator with approval rows into ApprovalRequest objects."""
prev_triplet = None
cur_approval_request = None
for (approval_id_int, approval_timestamp, approval_request_bytes,
grantor_username, grant_timestamp) in response:
cur_triplet = (approval_id_int, approval_timestamp, approval_request_bytes)
if cur_triplet != prev_triplet:
prev_triplet = cur_triplet
if cur_approval_request:
yield cur_approval_request
cur_approval_request = mysql_utils.StringToRDFProto(
rdf_objects.ApprovalRequest, approval_request_bytes)
cur_approval_request.approval_id = _IntToApprovalID(approval_id_int)
cur_approval_request.timestamp = mysql_utils.MysqlToRDFDatetime(
approval_timestamp)
if grantor_username and grant_timestamp:
cur_approval_request.grants.append(
rdf_objects.ApprovalGrant(
grantor_username=grantor_username,
timestamp=mysql_utils.MysqlToRDFDatetime(grant_timestamp)))
if cur_approval_request:
yield cur_approval_request
class MySQLDBUsersMixin(object):
"""MySQLDB mixin for GRR users and approval related functions."""
@mysql_utils.WithTransaction()
def WriteGRRUser(self,
username,
password=None,
ui_mode=None,
canary_mode=None,
user_type=None,
cursor=None):
"""Writes user object for a user with a given name."""
columns = ["username"]
values = [username]
if password is not None:
columns.append("password")
values.append(password.SerializeToString())
if ui_mode is not None:
columns.append("ui_mode")
values.append(int(ui_mode))
if canary_mode is not None:
columns.append("canary_mode")
# TODO(amoser): This int conversion is dirty but necessary with
# the current MySQL driver.
values.append(int(bool(canary_mode)))
if user_type is not None:
columns.append("user_type")
values.append(int(user_type))
query = "INSERT INTO grr_users ({cols}) VALUES ({vals})".format(
cols=", ".join(columns), vals=", ".join(["%s"] * len(columns)))
if len(values) > 1:
updates = ", ".join(
["{c} = VALUES ({c})".format(c=col) for col in columns[1:]])
query += "ON DUPLICATE KEY UPDATE " + updates
cursor.execute(query, values)
def _RowToGRRUser(self, row):
"""Creates a GRR user object from a database result row."""
username, password, ui_mode, canary_mode, user_type = row
result = rdf_objects.GRRUser(
username=username,
ui_mode=ui_mode,
canary_mode=canary_mode,
user_type=user_type)
if password:
result.password.ParseFromString(password)
return result
@mysql_utils.WithTransaction(readonly=True)
def ReadGRRUser(self, username, cursor=None):
"""Reads a user object corresponding to a given name."""
cursor.execute(
"SELECT username, password, ui_mode, canary_mode, user_type "
"FROM grr_users WHERE username=%s", [username])
row = cursor.fetchone()
if row is None:
raise db.UnknownGRRUserError("User '%s' not found." % username)
return self._RowToGRRUser(row)
@mysql_utils.WithTransaction(readonly=True)
def ReadAllGRRUsers(self, cursor=None):
cursor.execute("SELECT username, password, ui_mode, canary_mode, user_type "
"FROM grr_users")
res = []
for row in cursor.fetchall():
res.append(self._RowToGRRUser(row))
return res
@mysql_utils.WithTransaction()
def WriteApprovalRequest(self, approval_request, cursor=None):
"""Writes an approval request object."""
# Copy the approval_request to ensure we don't modify the source object.
approval_request = approval_request.Copy()
# Generate random approval id.
approval_id_int = utils.PRNG.GetUInt64()
now_str = mysql_utils.RDFDatetimeToMysqlString(rdfvalue.RDFDatetime.Now())
grants = approval_request.grants
approval_request.grants = None
query = ("INSERT INTO approval_request (username, approval_type, "
"subject_id, approval_id, timestamp, expiration_time, "
"approval_request) VALUES (%s, %s, %s, %s, %s, %s, %s)")
args = [
approval_request.requestor_username,
int(approval_request.approval_type), approval_request.subject_id,
approval_id_int, now_str,
mysql_utils.RDFDatetimeToMysqlString(approval_request.expiration_time),
approval_request.SerializeToString()
]
cursor.execute(query, args)
for grant in grants:
grant_query = ("INSERT INTO approval_grant (username, approval_id, "
"grantor_username, timestamp) VALUES (%s, %s, %s, %s)")
grant_args = [
approval_request.requestor_username, approval_id_int,
grant.grantor_username, now_str
]
cursor.execute(grant_query, grant_args)
return _IntToApprovalID(approval_id_int)
@mysql_utils.WithTransaction()
def GrantApproval(self,
requestor_username,
approval_id,
grantor_username,
cursor=None):
"""Grants approval for a given request using given username."""
now_str = mysql_utils.RDFDatetimeToMysqlString(rdfvalue.RDFDatetime.Now())
grant_query = ("INSERT INTO approval_grant (username, approval_id, "
"grantor_username, timestamp) VALUES (%s, %s, %s, %s)")
grant_args = [
requestor_username,
_ApprovalIDToInt(approval_id), grantor_username, now_str
]
cursor.execute(grant_query, grant_args)
@mysql_utils.WithTransaction(readonly=True)
def ReadApprovalRequest(self, requestor_username, approval_id, cursor=None):
"""Reads an approval request object with a given id."""
query = ("SELECT approval_request.approval_id, approval_request.timestamp, "
"approval_request.approval_request, "
"approval_grant.grantor_username, approval_grant.timestamp "
"FROM approval_request "
"LEFT JOIN approval_grant USING (username, approval_id) "
"WHERE approval_request.approval_id=%s "
"AND approval_request.username=%s")
cursor.execute(query, [_ApprovalIDToInt(approval_id), requestor_username])
res = cursor.fetchall()
if not res:
raise db.UnknownApprovalRequestError(
"Approval '%s' not found." % approval_id)
approval_id_int, timestamp, approval_request_bytes, _, _ = res[0]
approval_request = mysql_utils.StringToRDFProto(rdf_objects.ApprovalRequest,
approval_request_bytes)
approval_request.approval_id = _IntToApprovalID(approval_id_int)
approval_request.timestamp = mysql_utils.MysqlToRDFDatetime(timestamp)
for _, _, _, grantor_username, timestamp in res:
if not grantor_username:
continue
# Note: serialized approval_request objects are guaranteed to not
# have any grants.
approval_request.grants.append(
rdf_objects.ApprovalGrant(
grantor_username=grantor_username,
timestamp=mysql_utils.MysqlToRDFDatetime(timestamp)))
return approval_request
@mysql_utils.WithTransaction(readonly=True)
def ReadApprovalRequests(self,
requestor_username,
approval_type,
subject_id=None,
include_expired=False,
cursor=None):
"""Reads approval requests of a given type for a given user."""
query = ("SELECT ar.approval_id, ar.timestamp, ar.approval_request, "
"ag.grantor_username, ag.timestamp "
"FROM approval_request ar "
"LEFT JOIN approval_grant AS ag USING (username, approval_id) "
"WHERE ar.username=%s AND ar.approval_type=%s")
args = [requestor_username, int(approval_type)]
if subject_id:
query += " AND ar.subject_id = %s"
args.append(subject_id)
query += " ORDER BY ar.approval_id"
ret = []
now = rdfvalue.RDFDatetime.Now()
cursor.execute(query, args)
for approval_request in _ResponseToApprovalsWithGrants(cursor.fetchall()):
if include_expired or approval_request.expiration_time >= now:
ret.append(approval_request)
return ret
@mysql_utils.WithTransaction()
def WriteUserNotification(self, notification, cursor=None):
"""Writes a notification for a given user."""
# Copy the notification to ensure we don't modify the source object.
notification = notification.Copy()
if not notification.timestamp:
notification.timestamp = rdfvalue.RDFDatetime.Now()
query = ("INSERT INTO user_notification (username, timestamp, "
"notification_state, notification) "
"VALUES (%s, %s, %s, %s)")
args = [
notification.username,
mysql_utils.RDFDatetimeToMysqlString(notification.timestamp),
int(notification.state),
notification.SerializeToString()
]
try:
cursor.execute(query, args)
except MySQLdb.IntegrityError:
raise db.UnknownGRRUserError("User %s not found!" % notification.username)
@mysql_utils.WithTransaction(readonly=True)
def ReadUserNotifications(self,
username,
state=None,
timerange=None,
cursor=None):
"""Reads notifications scheduled for a user within a given timerange."""
query = ("SELECT timestamp, notification_state, notification "
"FROM user_notification "
"WHERE username=%s ")
args = [username]
if state is not None:
query += "AND notification_state = %s "
args.append(int(state))
if timerange is not None:
time_from, time_to = timerange # pylint: disable=unpacking-non-sequence
if time_from is not None:
query += "AND timestamp >= %s "
args.append(mysql_utils.RDFDatetimeToMysqlString(time_from))
if time_to is not None:
query += "AND timestamp <= %s "
args.append(mysql_utils.RDFDatetimeToMysqlString(time_to))
query += "ORDER BY timestamp DESC "
ret = []
cursor.execute(query, args)
for timestamp, state, notification_ser in cursor.fetchall():
n = rdf_objects.UserNotification.FromSerializedString(notification_ser)
n.timestamp = mysql_utils.MysqlToRDFDatetime(timestamp)
n.state = state
ret.append(n)
return ret
@mysql_utils.WithTransaction()
def UpdateUserNotifications(self,
username,
timestamps,
state=None,
cursor=None):
"""Updates existing user notification objects."""
query = ("UPDATE user_notification n "
"SET n.notification_state = %s "
"WHERE n.username = %s AND n.timestamp IN ({})").format(", ".join(
["%s"] * len(timestamps)))
args = [
int(state),
username,
] + [mysql_utils.RDFDatetimeToMysqlString(t) for t in timestamps]
cursor.execute(query, args)
| 35.57485
| 80
| 0.660158
|
2849a58cfd3f0a3f71c796f6a6b58d790f42576a
| 4,725
|
py
|
Python
|
toontown/catalog/CatalogEmoteItemNotWorking.py
|
CrankySupertoon01/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2021-02-13T22:40:50.000Z
|
2021-02-13T22:40:50.000Z
|
toontown/catalog/CatalogEmoteItemNotWorking.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 1
|
2018-07-28T20:07:04.000Z
|
2018-07-30T18:28:34.000Z
|
toontown/catalog/CatalogEmoteItemNotWorking.py
|
CrankySupertoonArchive/Toontown-2
|
60893d104528a8e7eb4aced5d0015f22e203466d
|
[
"MIT"
] | 2
|
2019-12-02T01:39:10.000Z
|
2021-02-13T22:41:00.000Z
|
import CatalogItem
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPLocalizer
from direct.interval.IntervalGlobal import *
class CatalogEmoteItem(CatalogItem.CatalogItem):
sequenceNumber = 0
pictureToon = None
def makeNewItem(self, emoteIndex, isSpecial = False):
self.emoteIndex = emoteIndex
self.isSpecial = isSpecial
CatalogItem.CatalogItem.makeNewItem(self)
def getPurchaseLimit(self):
return 1
def reachedPurchaseLimit(self, avatar):
if self in avatar.onOrder or self in avatar.mailboxContents or self in avatar.onGiftOrder:
return 1
if self.emoteIndex >= len(avatar.emoteAccess):
return 0
return avatar.emoteAccess[self.emoteIndex] != 0
def getAcceptItemErrorText(self, retcode):
if retcode == ToontownGlobals.P_ItemAvailable:
return TTLocalizer.CatalogAcceptEmote
return CatalogItem.CatalogItem.getAcceptItemErrorText(self, retcode)
def saveHistory(self):
return 1
def getTypeName(self):
return TTLocalizer.EmoteTypeName
def getName(self):
return OTPLocalizer.EmoteList[self.emoteIndex]
def recordPurchase(self, avatar, optional):
if self.emoteIndex < 0 or self.emoteIndex > len(avatar.emoteAccess):
self.notify.warning('Invalid emote access: %s for avatar %s' % (self.emoteIndex, avatar.doId))
return ToontownGlobals.P_InvalidIndex
avatar.emoteAccess[self.emoteIndex] = 1
avatar.d_setEmoteAccess(avatar.emoteAccess)
return ToontownGlobals.P_ItemAvailable
def getPicture(self, avatar):
from toontown.toon import Toon
from toontown.toon import ToonHead
from toontown.toon import TTEmote
from otp.avatar import Emote
self.hasPicture = True
if self.emoteIndex in Emote.globalEmote.getHeadEmotes():
toon = ToonHead.ToonHead()
toon.setupHead(avatar.style, forGui=1)
else:
toon = Toon.Toon()
toon.setDNA(avatar.style)
toon.loop('neutral')
toon.setH(180)
model, ival = self.makeFrameModel(toon, 0)
track, duration = Emote.globalEmote.doEmote(toon, self.emoteIndex, volume=self.volume)
if duration == None:
duration = 0
name = 'emote-item-%s' % self.sequenceNumber
CatalogEmoteItem.sequenceNumber += 1
if track != None:
track = Sequence(Sequence(track, duration=0), Wait(duration + 2), name=name)
else:
track = Sequence(Func(Emote.globalEmote.doEmote, toon, self.emoteIndex), Wait(duration + 4), name=name)
self.pictureToon = toon
return (model, track)
def changeIval(self, volume):
from toontown.toon import Toon
from toontown.toon import ToonHead
from toontown.toon import TTEmote
from otp.avatar import Emote
self.volume = volume
if not hasattr(self, 'pictureToon'):
return Sequence()
track, duration = Emote.globalEmote.doEmote(self.pictureToon, self.emoteIndex, volume=self.volume)
if duration == None:
duration = 0
name = 'emote-item-%s' % self.sequenceNumber
CatalogEmoteItem.sequenceNumber += 1
if track != None:
track = Sequence(Sequence(track, duration=0), Wait(duration + 2), name=name)
else:
track = Sequence(Func(Emote.globalEmote.doEmote, toon, self.emoteIndex), Wait(duration + 4), name=name)
return track
def cleanupPicture(self):
CatalogItem.CatalogItem.cleanupPicture(self)
self.pictureToon.emote.finish()
self.pictureToon.emote = None
self.pictureToon.delete()
self.pictureToon = None
return
def output(self, store = -1):
return 'CatalogEmoteItem(%s%s)' % (self.emoteIndex, self.formatOptionalData(store))
def compareTo(self, other):
return self.emoteIndex - other.emoteIndex
def getHashContents(self):
return self.emoteIndex
def getBasePrice(self):
return 550
def decodeDatagram(self, di, versionNumber, store):
CatalogItem.CatalogItem.decodeDatagram(self, di, versionNumber, store)
self.emoteIndex = di.getUint8()
self.isSpecial = di.getBool()
if self.emoteIndex > len(OTPLocalizer.EmoteList):
raise ValueError
def encodeDatagram(self, dg, store):
CatalogItem.CatalogItem.encodeDatagram(self, dg, store)
dg.addUint8(self.emoteIndex)
dg.addBool(self.isSpecial)
def isGift(self):
return not self.getEmblemPrices()
| 36.914063
| 115
| 0.660317
|
f66de99d96b46d730d75b4e207da96f396a26e41
| 16,504
|
py
|
Python
|
pyannote/audio/applications/pyannote_audio.py
|
emmaducos/pyannote-audio
|
52cd867b5ed6a19fafd79a8dfa365d067234c2d0
|
[
"MIT"
] | 1
|
2022-02-23T08:46:10.000Z
|
2022-02-23T08:46:10.000Z
|
pyannote/audio/applications/pyannote_audio.py
|
emmaducos/pyannote-audio
|
52cd867b5ed6a19fafd79a8dfa365d067234c2d0
|
[
"MIT"
] | null | null | null |
pyannote/audio/applications/pyannote_audio.py
|
emmaducos/pyannote-audio
|
52cd867b5ed6a19fafd79a8dfa365d067234c2d0
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf-8
# The MIT License (MIT)
# Copyright (c) 2019-2020 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# AUTHORS
# Hervé BREDIN - http://herve.niderb.fr
"""
Neural building blocks for speaker diarization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Usage:
pyannote-audio (sad | scd | ovl | emb | dom) train [--cpu | --gpu] [options] <root> <protocol>
pyannote-audio (sad | scd | ovl | emb | dom) validate [--cpu | --gpu] [options] <train> <protocol>
pyannote-audio (sad | scd | ovl | emb | dom) apply [--cpu | --gpu] [options] <validate> <protocol>
pyannote-audio -h | --help
pyannote-audio --version
This command line tool can be used to train, validate, and apply neural networks
for the following blocks of a speaker diarization pipeline:
* (sad) speech activity detection consists in detecting speech regions in
an audio recording.
* (scd) speaker change detection consists in detecting timestamps of
speaker change point.
* (ovl) overlapped speech detection consists in detection regions with two
or more simultaneous speakers.
* (emb) speaker embedding consists in projecting audio chunk into a
(usually high-dimensional) vector space where same speaker
embeddings are close to each other, and different speaker embeddings
are not.
* (dom) domain classification consists in predicting the domain of an
audio recording
Running a complete speech activity detection experiment on the provided
"debug" dataset would go like this:
* Run experiment on this pyannote.database protocol
$ export DATABASE=Debug.SpeakerDiarization.Debug
* This directory will contain experiments artifacts:
$ mkdir my_experiment && cd my_experiment
* A unique configuration file describes the experiment hyper-parameters
(see "Configuration file" below for details):
$ edit config.yml
* This will train the model on the training set:
$ pyannote-audio sad train ${PWD} ${DATABASE}
* Training artifacts (including model weights) are stored in a sub-directory
whose name makes it clear which dataset and subset (train, by default)
were used for training the model.
$ cd train/${DATABASE}.train
* This will validate the model on the development set:
$ pyannote-audio sad validate ${PWD} ${DATABASE}
* Validation artifacts (including the selection of the best epoch) are
stored in a sub-directory named after the dataset and subset (development,
by default) used for validating the model.
$ cd validate/${DATABASE}.development
* This will apply the best model (according to the validation step) to the
test set:
$ pyannote-audio sad apply ${PWD} ${DATABASE}
* Inference artifacts are stored in a sub-directory whose name makes it
clear which epoch has been used (e.g. apply/0125). Artifacts include:
* raw output of the best model (one numpy array per file than can be
loaded with pyannote.audio.features.Precomputed API and handled with
pyannote.core.SlidingWindowFeature API)
* (depending on the task) a file "${DATABASE}.test.rttm" containing the
post-processing of raw output.
* (depending on the task) a file "${DATABASE}.test.eval" containing the
evaluation result computed with pyannote.metrics.
pyannote.database support
~~~~~~~~~~~~~~~~~~~~~~~~~
PYANNOTE_DATABASE_CONFIG=
Configuration file <root>/config.yml
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Reproducible research is facilitated by the systematic use of configuration
files stored in <root>/config.yml in YAML format.
.......................... <root>/config.yml ..........................
task:
name:
params:
feature_extraction:
name:
params:
data_augmentation:
name:
params:
architecture:
name:
params:
scheduler:
name:
params:
preprocessors:
callbacks:
...................................................................
File <root>/config.yml is mandatory, unless option --pretrained is used.
When fine-tuning a model with option --pretrained=<model>, one can omit it
and the original <model> configuration file is used instead. If (a possibly
partial) <root>/config.yml file is provided anyway, it is used to override
<model> configuration file.
Tensorboard support
~~~~~~~~~~~~~~~~~~~
A bunch of metrics are logged during training and validation (e.g. loss,
learning rate, computation time, validation metric). They can be visualized
using tensorboard:
$ tensorboard --logdir=<root>
Common options
~~~~~~~~~~~~~~
<root> Experiment root directory. Should contain config.yml
configuration file, unless --pretrained option is
used (for which config.yml is optional).
<protocol> Name of protocol to use for training, validation, or
inference. Have a look at pyannote.database
documentation for instructions on how to define a
protocol with your own dataset:
https://github.com/pyannote/pyannote-database#custom-protocols
<train> Path to <root> sub-directory containing training
artifacts (e.g. <root>/train/<protocol>.train)
<validate> Path to <train> sub-directory containing validation
artifacts (e.g. <train>/validate/<protocol>.development)
In case option --pretrained=<model> is used, the
output of the pretrained model is dumped into the
<validate> directory.
--subset=<subset> Subset to use for training (resp. validation,
inference). Defaults to "train" (resp. "development",
"test") for strict enforcement of machine learning
good practices.
--gpu Run on GPU. When multiple GPUs are available, use
CUDA_AVAILABLE_DEVICES environment variable to force
using a specific one. Defaults to using CPU if no GPU
is available.
--cpu Run on CPU. Defaults to using GPU when available.
--debug Run using PyTorch's anomaly detection. This will throw
an error if a NaN value is produced, and the stacktrace
will point to the origin of it. This option can
considerably slow execution.
--from=<epoch> Start training (resp. validating) at epoch <epoch>.
Use --from=last to start from last available epoch at
launch time. Not used for inference [default: 0].
--to=<epoch> End training (resp. validating) at epoch <epoch>.
Use --end=last to validate until last available epoch
at launch time. Not used for inference [default: 100].
--batch=<size> Set batch size used for validation and inference.
Has no effect when training as this parameter should
be defined in the configuration file [default: 32].
--step=<ratio> Ratio of audio chunk duration used as step between
two consecutive audio chunks [default: 0.25]
--parallel=<n_jobs> Use at most that many threads for generating training
samples or validating files. Defaults to using all
CPUs but one.
Speaker embedding
~~~~~~~~~~~~~~~~~
--duration=<duration> Use audio chunks with that duration. Defaults to the
fixed duration used during training, when available.
--metric=<metric> Use this metric (e.g. "cosine" or "euclidean") to
compare embeddings. Defaults to the metric defined in
<root>/config.yml configuration file.
Pretrained model options
~~~~~~~~~~~~~~~~~~~~~~~~
--pretrained=<model> Warm start training with pre-trained model. Can be
either a path to an existing checkpoint (e.g.
<train>/weights/0050.pt) or the name of a model
available in torch.hub.list('pyannote/pyannote.audio')
This option can also be used to apply a pretrained
model. See description of <validate> for more details.
Validation options
~~~~~~~~~~~~~~~~~~
--every=<epoch> Validate model every <epoch> epochs [default: 1].
--evergreen Prioritize validation of most recent epoch.
For speech activity and overlapped speech detection, validation consists in
looking for the value of the detection threshold that maximizes the f-score
of recall and precision.
For speaker change detection, validation consists in looking for the value of
the peak detection threshold that maximizes the f-score of purity and
coverage:
--diarization Use diarization purity and coverage instead of
(default) segmentation purity and coverage.
For speaker embedding and verification protocols, validation runs the actual
speaker verification experiment (representing each recording by its average
embedding) and reports equal error rate.
For speaker embedding and diarization protocols, validation runs a speaker
diarization pipeline based on oracle segmentation and "pool-linkage"
agglomerative clustering of speech turns (represented by their average
embedding), and looks for the threshold that maximizes the f-score of purity
and coverage.
"""
import sys
import warnings
from docopt import docopt
from pathlib import Path
import multiprocessing
import torch
from .base import apply_pretrained
from .speech_detection import SpeechActivityDetection
from .change_detection import SpeakerChangeDetection
from .overlap_detection import OverlapDetection
from .speaker_embedding import SpeakerEmbedding
from .domain_classification import DomainClassification
def main():
# TODO: update version automatically
arg = docopt(__doc__, version='pyannote-audio 2.0')
params = {}
if arg['sad']:
Application = SpeechActivityDetection
elif arg['scd']:
Application = SpeakerChangeDetection
elif arg['ovl']:
Application = OverlapDetection
elif arg['emb']:
Application = SpeakerEmbedding
elif arg['dom']:
Application = DomainClassification
device = 'cuda' if torch.cuda.is_available() else 'cpu'
if arg['--gpu'] and device == 'cpu':
msg = 'No GPU is available. Using CPU instead.'
warnings.warn(msg)
if arg['--cpu'] and device == 'cuda':
device = 'cpu'
params['device'] = torch.device(device)
protocol = arg['<protocol>']
subset = arg['--subset']
if arg['--debug']:
msg = 'Debug mode is enabled, this option might slow execution considerably.'
warnings.warn(msg, RuntimeWarning)
torch.autograd.set_detect_anomaly(True)
n_jobs = arg['--parallel']
if n_jobs is None:
n_jobs = max(1, multiprocessing.cpu_count() - 1)
params['n_jobs'] = int(n_jobs)
if arg['train']:
params['subset'] = 'train' if subset is None else subset
# start training at this epoch (defaults to 0, but 'last' is supported)
warm_start = arg['--from']
if warm_start != 'last':
warm_start = int(warm_start)
# or start from pretrained model
pretrained = arg['--pretrained']
pretrained_config_yml = None
if pretrained is not None:
# start from an existing model checkpoint
# (from a different experiment)
if Path(pretrained).exists():
warm_start = Path(pretrained)
else:
try:
warm_start = torch.hub.load(
# TODO. change to 'pyannote/pyannote-audio'
# after 2.0 release
'pyannote/pyannote-audio:develop',
pretrained).weights_pt_
except Exception as e:
msg = (
f'Could not load "{warm_start}" model from torch.hub.'
f'The following exception was raised:\n\n{e}\n\n')
sys.exit(msg)
pretrained_config_yml = warm_start.parents[3] / 'config.yml'
params['warm_start'] = warm_start
# stop training at this epoch (defaults to never stop)
params['epochs'] = int(arg['--to'])
root_dir = Path(arg['<root>']).expanduser().resolve(strict=True)
app = Application(root_dir, training=True,
pretrained_config_yml=pretrained_config_yml)
app.train(protocol, **params)
if arg['validate']:
train_dir = Path(arg['<train>']).expanduser().resolve(strict=True)
app = Application.from_train_dir(train_dir, training=False)
params['subset'] = 'development' if subset is None else subset
start = arg['--from']
if start != 'last':
start = int(start)
params['start'] = start
end = arg['--to']
if end != 'last':
end = int(end)
params['end'] = end
params['every'] = int(arg['--every'])
params['chronological'] = not arg['--evergreen']
params['batch_size'] = int(arg['--batch'])
params['diarization'] = arg['--diarization']
duration = arg['--duration']
if duration is None:
duration = getattr(app.task_, 'duration', None)
if duration is None:
msg = ("Task has no 'duration' defined. "
"Use '--duration' option to provide one.")
raise ValueError(msg)
else:
duration = float(duration)
params['duration'] = duration
params['step'] = float(arg['--step'])
if arg['emb']:
metric = arg['--metric']
if metric is None:
metric = getattr(app.task_, 'metric', None)
if metric is None:
msg = ("Approach has no 'metric' defined. "
"Use '--metric' option to provide one.")
raise ValueError(msg)
params['metric'] = metric
# FIXME: parallel is broken in pyannote.metrics
params['n_jobs'] = 1
app.validate(protocol, **params)
if arg['apply']:
validate_dir = Path(arg['<validate>']).expanduser().resolve(strict=True)
params['subset'] = 'test' if subset is None else subset
params['batch_size'] = int(arg['--batch'])
duration = arg['--duration']
if duration is not None:
duration = float(duration)
params['duration'] = duration
params['step'] = float(arg['--step'])
params['Pipeline'] = getattr(Application, 'Pipeline', None)
params['pretrained'] = arg['--pretrained']
apply_pretrained(validate_dir, protocol, **params)
| 37.94023
| 103
| 0.61373
|
b2ce03c30a249309be4681b95f5589aa50717a03
| 2,524
|
py
|
Python
|
nanodlna/cli.py
|
Nightdavisao/nano-dlna
|
1489bf2cba1f3fd52fdf95c2f9d298900ea56397
|
[
"MIT"
] | null | null | null |
nanodlna/cli.py
|
Nightdavisao/nano-dlna
|
1489bf2cba1f3fd52fdf95c2f9d298900ea56397
|
[
"MIT"
] | null | null | null |
nanodlna/cli.py
|
Nightdavisao/nano-dlna
|
1489bf2cba1f3fd52fdf95c2f9d298900ea56397
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from __future__ import print_function
import argparse
import json
import os
import sys
from . import devices, dlna, streaming
def get_subtitle(file_video):
video, extension = os.path.splitext(file_video)
file_subtitle = "{0}.srt".format(video)
if not os.path.exists(file_subtitle):
return None
return file_subtitle
def list_devices(args):
my_devices = devices.get_devices(args.timeout)
for i, device in enumerate(my_devices, 1):
print("Device {0}:\n{1}\n\n".format(i, json.dumps(device, indent=4)))
def play(args):
# Get video and subtitle file names
files = {"file_video": args.file_video}
if args.use_subtitle:
if not args.file_subtitle:
args.file_subtitle = get_subtitle(args.file_video)
if args.file_subtitle:
files["file_subtitle"] = args.file_subtitle
# Select device to play
device = None
if args.device_url:
device = devices.register_device(args.device_url)
else:
my_devices = devices.get_devices(args.timeout)
if len(my_devices) > 0:
if args.device_query:
device = [
device for device in my_devices
if args.device_query.lower() in str(device).lower()][0]
else:
device = my_devices[0]
if not device:
sys.exit("No devices found.")
# Configure streaming server
target_ip = device["hostname"]
serve_ip = streaming.get_serve_ip(target_ip)
files_urls = streaming.start_server(files, serve_ip)
# Play the video through DLNA protocol
dlna.play(files_urls, device)
def run():
parser = argparse.ArgumentParser(
description="A minimal UPnP/DLNA media streamer.")
parser.add_argument("-t", "--timeout", type=float, default=5)
subparsers = parser.add_subparsers(dest="subparser_name")
p_list = subparsers.add_parser('list')
p_list.set_defaults(func=list_devices)
p_play = subparsers.add_parser('play')
p_play.add_argument("-d", "--device", dest="device_url")
p_play.add_argument("-q", "--query-device", dest="device_query")
p_play.add_argument("-s", "--subtitle", dest="file_subtitle")
p_play.add_argument("-n", "--no-subtitle",
dest="use_subtitle", action="store_false")
p_play.add_argument("file_video")
p_play.set_defaults(func=play)
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
run()
| 24.745098
| 77
| 0.650158
|
e07d5198ebec6f7388646a14a3d25e22b1a181d3
| 1,070
|
py
|
Python
|
python/graphs/binary_indexed_tree.py
|
tachyonsoftware/algorithms
|
29149a55de173b42dfa461838877fa66d018629f
|
[
"MIT"
] | 17
|
2016-06-29T08:41:51.000Z
|
2022-02-04T13:39:13.000Z
|
python/graphs/binary_indexed_tree.py
|
tachyonsoftware/algorithms
|
29149a55de173b42dfa461838877fa66d018629f
|
[
"MIT"
] | null | null | null |
python/graphs/binary_indexed_tree.py
|
tachyonsoftware/algorithms
|
29149a55de173b42dfa461838877fa66d018629f
|
[
"MIT"
] | 4
|
2016-05-09T19:02:47.000Z
|
2021-02-22T02:05:41.000Z
|
class BinaryIndexedTree(object):
def __init__(self, nums=tuple()):
self.bit = [0] * (len(nums) + 1)
for index, val in enumerate(nums):
self.update(index, val)
def update(self, index, val):
index += 1
while index < len(self.bit):
self.bit[index] += val
index = index + (index & -index)
def sum(self, start, end):
if start > end:
raise IllegalArgumentException('Start %d is greater than end %d' % start, end)
elif start < 0 or start >= len(self.bit) - 1:
raise IndexError('Start %d is out of bound' % start)
elif end < 0 or end >= len(self.bit) - 1:
raise IndexError('End %d is out of bound' % end)
return self._sum(end) - self._sum(start - 1)
def _sum(self, end):
end += 1
total = 0
while end > 0:
total += self.bit[end]
end = end & (end - 1)
return total
if __name__ == '__main__':
nums = [1, 2, 3, 4, 5]
bit = BinaryIndexedTree(nums)
for i in range(len(nums)):
for j in range(i, len(nums)):
print (i, j), bit.sum(i, j) == sum(nums[i: j + 1])
| 27.435897
| 84
| 0.578505
|
96c4e9ec76a5ab40d9c8e43603ad70354ba4c394
| 6,831
|
py
|
Python
|
src/python/grpcio_tests/tests/unit/_auth_context_test.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
src/python/grpcio_tests/tests/unit/_auth_context_test.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
src/python/grpcio_tests/tests/unit/_auth_context_test.py
|
samotarnik/grpc
|
3278bdceda8030d5aa130f12765e5f07263c860d
|
[
"Apache-2.0"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# Copyright 2017 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests exposure of SSL auth context"""
import pickle
import unittest
import logging
import grpc
from grpc import _channel
from grpc.experimental import session_cache
import six
from tests.unit import test_common
from tests.unit import resources
_REQUEST = b'\x00\x00\x00'
_RESPONSE = b'\x00\x00\x00'
_UNARY_UNARY = '/test/UnaryUnary'
_SERVER_HOST_OVERRIDE = 'foo.test.google.fr'
_CLIENT_IDS = (
b'*.test.google.fr',
b'waterzooi.test.google.be',
b'*.test.youtube.com',
b'192.168.1.3',
)
_ID = 'id'
_ID_KEY = 'id_key'
_AUTH_CTX = 'auth_ctx'
_PRIVATE_KEY = resources.private_key()
_CERTIFICATE_CHAIN = resources.certificate_chain()
_TEST_ROOT_CERTIFICATES = resources.test_root_certificates()
_SERVER_CERTS = ((_PRIVATE_KEY, _CERTIFICATE_CHAIN),)
_PROPERTY_OPTIONS = ((
'grpc.ssl_target_name_override',
_SERVER_HOST_OVERRIDE,
),)
def handle_unary_unary(request, servicer_context):
return pickle.dumps({
_ID: servicer_context.peer_identities(),
_ID_KEY: servicer_context.peer_identity_key(),
_AUTH_CTX: servicer_context.auth_context()
})
class AuthContextTest(unittest.TestCase):
def testInsecure(self):
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
port = server.add_insecure_port('[::]:0')
server.start()
with grpc.insecure_channel('localhost:%d' % port) as channel:
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
server.stop(None)
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
self.assertDictEqual({}, auth_data[_AUTH_CTX])
def testSecureNoCert(self):
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port('[::]:0', server_cred)
server.start()
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES)
channel = grpc.secure_channel(
'localhost:{}'.format(port),
channel_creds,
options=_PROPERTY_OPTIONS)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
channel.close()
server.stop(None)
auth_data = pickle.loads(response)
self.assertIsNone(auth_data[_ID])
self.assertIsNone(auth_data[_ID_KEY])
self.assertDictEqual({
'transport_security_type': [b'ssl'],
'ssl_session_reused': [b'false'],
}, auth_data[_AUTH_CTX])
def testSecureClientCert(self):
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(
_SERVER_CERTS,
root_certificates=_TEST_ROOT_CERTIFICATES,
require_client_auth=True)
port = server.add_secure_port('[::]:0', server_cred)
server.start()
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES,
private_key=_PRIVATE_KEY,
certificate_chain=_CERTIFICATE_CHAIN)
channel = grpc.secure_channel(
'localhost:{}'.format(port),
channel_creds,
options=_PROPERTY_OPTIONS)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
channel.close()
server.stop(None)
auth_data = pickle.loads(response)
auth_ctx = auth_data[_AUTH_CTX]
six.assertCountEqual(self, _CLIENT_IDS, auth_data[_ID])
self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY])
self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type'])
self.assertSequenceEqual([b'*.test.google.com'],
auth_ctx['x509_common_name'])
def _do_one_shot_client_rpc(self, channel_creds, channel_options, port,
expect_ssl_session_reused):
channel = grpc.secure_channel(
'localhost:{}'.format(port), channel_creds, options=channel_options)
response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
auth_data = pickle.loads(response)
self.assertEqual(expect_ssl_session_reused,
auth_data[_AUTH_CTX]['ssl_session_reused'])
channel.close()
def testSessionResumption(self):
# Set up a secure server
handler = grpc.method_handlers_generic_handler('test', {
'UnaryUnary':
grpc.unary_unary_rpc_method_handler(handle_unary_unary)
})
server = test_common.test_server()
server.add_generic_rpc_handlers((handler,))
server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
port = server.add_secure_port('[::]:0', server_cred)
server.start()
# Create a cache for TLS session tickets
cache = session_cache.ssl_session_cache_lru(1)
channel_creds = grpc.ssl_channel_credentials(
root_certificates=_TEST_ROOT_CERTIFICATES)
channel_options = _PROPERTY_OPTIONS + (
('grpc.ssl_session_cache', cache),)
# Initial connection has no session to resume
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port,
expect_ssl_session_reused=[b'false'])
# Subsequent connections resume sessions
self._do_one_shot_client_rpc(
channel_creds,
channel_options,
port,
expect_ssl_session_reused=[b'true'])
server.stop(None)
if __name__ == '__main__':
logging.basicConfig()
unittest.main(verbosity=2)
| 35.030769
| 80
| 0.669302
|
7d36bb9ea203eff6282835edce65b7f78c8c6e35
| 2,152
|
py
|
Python
|
services/workshop/utils/messages.py
|
jay-nanduri/crAPI
|
082c3effd4171e077d2dd61786faf9e3f95a072f
|
[
"Apache-2.0",
"0BSD"
] | 89
|
2021-02-09T16:30:26.000Z
|
2022-03-27T17:14:18.000Z
|
services/workshop/utils/messages.py
|
jay-nanduri/crAPI
|
082c3effd4171e077d2dd61786faf9e3f95a072f
|
[
"Apache-2.0",
"0BSD"
] | 12
|
2021-07-28T03:05:25.000Z
|
2022-02-28T01:27:30.000Z
|
services/workshop/utils/messages.py
|
jay-nanduri/crAPI
|
082c3effd4171e077d2dd61786faf9e3f95a072f
|
[
"Apache-2.0",
"0BSD"
] | 38
|
2021-02-10T07:11:00.000Z
|
2022-03-28T13:58:58.000Z
|
# Copyright 2020 Traceable, Inc.
#
# Licensed under the Apache License, Version 2.0 (the “License”);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an “AS IS” BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
contains all constant messages
which can be used in views
"""
METHOD_NOT_ALLOWED = "Method Not Allowed!"
BAD_REQUEST = "Bad Request!"
JWT_REQUIRED = "JWT Token required!"
INVALID_TOKEN = "Invalid JWT Token!"
TOKEN_EXPIRED = "Token Expired!"
EMAIL_ALREADY_EXISTS = "Email already Registered!"
MEC_CODE_ALREADY_EXISTS = "Mechanic Code already exists!"
MEC_CREATED = "Mechanic created with email: {}"
NO_OF_REPEATS_EXCEEDED = "Service unavailable. Seems like you caused layer 7 DoS :)"
MIN_NO_OF_REPEATS_FAILED = " 'number_of_repeats' should be between 1 and 100."
ERROR_UPLOADING_FILE = "Error Uploading File!"
PRODUCT_SAVED = "Product saved with id {}"
INSUFFICIENT_BALANCE = "Insufficient Balance. Please apply coupons to get more balance!"
ORDER_CREATED = "Order sent successfully."
ORDER_RETURNED_PENDING = "This order is already requested for returning!"
ORDER_ALREADY_RETURNED = "This order is already returned!"
ORDER_RETURNING = "Please use the following QR code to return your order to a UPS store!"
COUPON_ALREADY_APPLIED = "This coupon code is already claimed by you!! Please try with another coupon code"
COUPON_APPLIED = "Coupon successfully applied!"
RESTRICTED = "You are not allowed to access this resource!"
INVALID_STATUS = "The value of 'status' has to be 'delivered','return pending' or 'returned'"
REPORT_ID_MISSING = "Please enter the report_id value."
INVALID_REPORT_ID = "Please enter a valid report_id value."
REPORT_DOES_NOT_EXIST = "The Report does not exist for given report_id."
COULD_NOT_CONNECT = "Could not connect to mechanic api."
| 47.822222
| 107
| 0.777416
|
e748bf6fbc038f5225f40130b420b883e1aff89d
| 17,470
|
py
|
Python
|
lib/src/cgcloud/lib/ec2.py
|
ompcloud/cgcloud
|
ec97c3e6df2df549ebf45c69f16fb6d118877d9c
|
[
"Apache-2.0"
] | null | null | null |
lib/src/cgcloud/lib/ec2.py
|
ompcloud/cgcloud
|
ec97c3e6df2df549ebf45c69f16fb6d118877d9c
|
[
"Apache-2.0"
] | null | null | null |
lib/src/cgcloud/lib/ec2.py
|
ompcloud/cgcloud
|
ec97c3e6df2df549ebf45c69f16fb6d118877d9c
|
[
"Apache-2.0"
] | null | null | null |
import errno
import logging
import time
from collections import Iterator
from operator import attrgetter
from bd2k.util.exceptions import panic
from bd2k.util.retry import retry
from boto.ec2.ec2object import TaggedEC2Object
from boto.ec2.instance import Instance
from boto.ec2.spotinstancerequest import SpotInstanceRequest
from boto.exception import EC2ResponseError
from cgcloud.lib.util import UserError
a_short_time = 5
a_long_time = 60 * 60
log = logging.getLogger( __name__ )
def not_found( e ):
return e.error_code.endswith( '.NotFound' )
def retry_ec2( retry_after=a_short_time, retry_for=10 * a_short_time, retry_while=not_found ):
t = retry_after
return retry( delays=(t,t,t*2,t*4), timeout=retry_for, predicate=retry_while )
class EC2VolumeHelper( object ):
"""
A helper for creating, looking up and attaching an EBS volume in EC2
"""
def __init__( self, ec2, name, size, availability_zone, volume_type="standard" ):
"""
:param ec2: the Boto EC2 connection object
:type ec2: boto.ec2.connection.EC2Connection
"""
super( EC2VolumeHelper, self ).__init__( )
self.availability_zone = availability_zone
self.ec2 = ec2
self.name = name
self.volume_type = volume_type
volume = self.__lookup( )
if volume is None:
log.info( "Creating volume %s, ...", self.name )
volume = self.ec2.create_volume( size, availability_zone, volume_type=self.volume_type )
self.__wait_transition( volume, { 'creating' }, 'available' )
volume.add_tag( 'Name', self.name )
log.info( '... created %s.', volume.id )
volume = self.__lookup( )
self.volume = volume
def attach( self, instance_id, device ):
if self.volume.attach_data.instance_id == instance_id:
log.info( "Volume '%s' already attached to instance '%s'." %
(self.volume.id, instance_id) )
else:
self.__assert_attachable( )
self.ec2.attach_volume( volume_id=self.volume.id,
instance_id=instance_id,
device=device )
self.__wait_transition( self.volume, { 'available' }, 'in-use' )
if self.volume.attach_data.instance_id != instance_id:
raise UserError( "Volume %s is not attached to this instance." )
def __lookup( self ):
"""
Ensure that an EBS volume of the given name is available in the current availability zone.
If the EBS volume exists but has been placed into a different zone, or if it is not
available, an exception will be thrown.
:rtype: boto.ec2.volume.Volume
"""
volumes = self.ec2.get_all_volumes( filters={ 'tag:Name': self.name } )
if len( volumes ) < 1:
return None
if len( volumes ) > 1:
raise UserError( "More than one EBS volume named %s" % self.name )
return volumes[ 0 ]
@staticmethod
def __wait_transition( volume, from_states, to_state ):
wait_transition( volume, from_states, to_state, attrgetter( 'status' ) )
def __assert_attachable( self ):
if self.volume.status != 'available':
raise UserError( "EBS volume %s is not available." % self.name )
expected_zone = self.availability_zone
if self.volume.zone != expected_zone:
raise UserError( "Availability zone of EBS volume %s is %s but should be %s."
% (self.name, self.volume.zone, expected_zone) )
class UnexpectedResourceState( Exception ):
def __init__( self, resource, to_state, state ):
super( UnexpectedResourceState, self ).__init__(
"Expected state of %s to be '%s' but got '%s'" %
(resource, to_state, state) )
def wait_transition( resource, from_states, to_state, state_getter=attrgetter( 'state' ) ):
"""
Wait until the specified EC2 resource (instance, image, volume, ...) transitions from any
of the given 'from' states to the specified 'to' state. If the instance is found in a state
other that the to state or any of the from states, an exception will be thrown.
:param resource: the resource to monitor
:param from_states:
a set of states that the resource is expected to be in before the transition occurs
:param to_state: the state of the resource when this method returns
"""
state = state_getter( resource )
while state in from_states:
time.sleep( a_short_time )
for attempt in retry_ec2( ):
with attempt:
resource.update( validate=True )
state = state_getter( resource )
if state != to_state:
raise UnexpectedResourceState( resource, to_state, state )
def running_on_ec2( ):
try:
with open( '/sys/hypervisor/uuid' ) as f:
return f.read( 3 ) == 'ec2'
except IOError as e:
if e.errno == errno.ENOENT:
return False
else:
raise
from collections import namedtuple
InstanceType = namedtuple( 'InstanceType', [
'name', # the API name of the instance type
'cores', # the number of cores
'ecu', # the computational power of the core times the number of cores
'memory', # RAM in GB
'virtualization_types', # the supported virtualization types, in order of preference
'disks', # the number of ephemeral (aka 'instance store') volumes
'disk_type', # the type of ephemeral volume
'disk_capacity', # the capacity of each ephemeral volume in GB
'spot_availability' # can this instance type be used on the spot market?
] )
hvm = 'hvm' # hardware virtualization
pv = 'paravirtual' # para-virtualization
ssd = 'SSD' # solid-state disk
hdd = 'HDD' # spinning disk
variable_ecu = -1 # variable ecu
_ec2_instance_types = [
# current generation instance types
InstanceType( 't2.micro', 1, variable_ecu, 1, [ hvm ], 0, None, 0, False ),
InstanceType( 't2.small', 1, variable_ecu, 2, [ hvm ], 0, None, 0, False ),
InstanceType( 't2.medium', 2, variable_ecu, 4, [ hvm ], 0, None, 0, False ),
InstanceType( 't2.large', 2, variable_ecu, 8, [ hvm ], 0, None, 0, False ),
InstanceType( 'm3.medium', 1, 3, 3.75, [ hvm, pv ], 1, ssd, 4, True ),
InstanceType( 'm3.large', 2, 6.5, 7.5, [ hvm, pv ], 1, ssd, 32, True ),
InstanceType( 'm3.xlarge', 4, 13, 15, [ hvm, pv ], 2, ssd, 40, True ),
InstanceType( 'm3.2xlarge', 8, 26, 30, [ hvm, pv ], 2, ssd, 80, True ),
InstanceType( 'm4.large', 2, 6.5, 8, [ hvm ], 0, None, 0, True ),
InstanceType( 'm4.xlarge', 4, 13, 16, [ hvm ], 0, None, 0, True ),
InstanceType( 'm4.2xlarge', 8, 26, 32, [ hvm ], 0, None, 0, True ),
InstanceType( 'm4.4xlarge', 16, 53.5, 64, [ hvm ], 0, None, 0, True ),
InstanceType( 'm4.10xlarge', 40, 124.5, 160, [ hvm ], 0, None, 0, True ),
InstanceType( 'c4.large', 2, 8, 3.75, [ hvm ], 0, None, 0, True ),
InstanceType( 'c4.xlarge', 4, 16, 7.5, [ hvm ], 0, None, 0, True ),
InstanceType( 'c4.2xlarge', 8, 31, 15, [ hvm ], 0, None, 0, True ),
InstanceType( 'c4.4xlarge', 16, 62, 30, [ hvm ], 0, None, 0, True ),
InstanceType( 'c4.8xlarge', 36, 132, 60, [ hvm ], 0, None, 0, True ),
InstanceType( 'c3.large', 2, 7, 3.75, [ hvm, pv ], 2, ssd, 16, True ),
InstanceType( 'c3.xlarge', 4, 14, 7.5, [ hvm, pv ], 2, ssd, 40, True ),
InstanceType( 'c3.2xlarge', 8, 28, 15, [ hvm, pv ], 2, ssd, 80, True ),
InstanceType( 'c3.4xlarge', 16, 55, 30, [ hvm, pv ], 2, ssd, 160, True ),
InstanceType( 'c3.8xlarge', 32, 108, 60, [ hvm, pv ], 2, ssd, 320, True ),
InstanceType( 'g2.2xlarge', 8, 26, 15, [ hvm ], 1, ssd, 60, True ),
InstanceType( 'r3.large', 2, 6.5, 15, [ hvm ], 1, ssd, 32, True ),
InstanceType( 'r3.xlarge', 4, 13, 30.5, [ hvm ], 1, ssd, 80, True ),
InstanceType( 'r3.2xlarge', 8, 26, 61, [ hvm ], 1, ssd, 160, True ),
InstanceType( 'r3.4xlarge', 16, 52, 122, [ hvm ], 1, ssd, 320, True ),
InstanceType( 'r3.8xlarge', 32, 104, 244, [ hvm ], 2, ssd, 320, True ),
InstanceType( 'i2.xlarge', 4, 14, 30.5, [ hvm ], 1, ssd, 800, False ),
InstanceType( 'i2.2xlarge', 8, 27, 61, [ hvm ], 2, ssd, 800, False ),
InstanceType( 'i2.4xlarge', 16, 53, 122, [ hvm ], 4, ssd, 800, False ),
InstanceType( 'i2.8xlarge', 32, 104, 244, [ hvm ], 8, ssd, 800, False ),
InstanceType( 'd2.xlarge', 4, 14, 30.5, [ hvm ], 3, hdd, 2000, True ),
InstanceType( 'd2.2xlarge', 8, 28, 61, [ hvm ], 6, hdd, 2000, True ),
InstanceType( 'd2.4xlarge', 16, 56, 122, [ hvm ], 12, hdd, 2000, True ),
InstanceType( 'd2.8xlarge', 36, 116, 244, [ hvm ], 24, hdd, 2000, True ),
# previous generation instance types
InstanceType( 'm1.small', 1, 1, 1.7, [ pv ], 1, hdd, 160, True ),
InstanceType( 'm1.medium', 1, 2, 3.75, [ pv ], 1, hdd, 410, True ),
InstanceType( 'm1.large', 2, 4, 7.5, [ pv ], 2, hdd, 420, True ),
InstanceType( 'm1.xlarge', 4, 8, 15, [ pv ], 4, hdd, 420, True ),
InstanceType( 'c1.medium', 2, 5, 1.7, [ pv ], 1, hdd, 350, True ),
InstanceType( 'c1.xlarge', 8, 20, 7, [ pv ], 4, hdd, 420, True ),
InstanceType( 'cc2.8xlarge', 32, 88, 60.5, [ hvm ], 4, hdd, 840, True ),
InstanceType( 'm2.xlarge', 2, 6.5, 17.1, [ pv ], 1, hdd, 420, True ),
InstanceType( 'm2.2xlarge', 4, 13, 34.2, [ pv ], 1, hdd, 850, True ),
InstanceType( 'm2.4xlarge', 8, 26, 68.4, [ pv ], 2, hdd, 840, True ),
InstanceType( 'cr1.8xlarge', 32, 88, 244, [ hvm ], 2, ssd, 120, True ),
InstanceType( 'hi1.4xlarge', 16, 35, 60.5, [ hvm, pv ], 2, ssd, 1024, True ),
InstanceType( 'hs1.8xlarge', 16, 35, 117, [ hvm, pv ], 24, hdd, 2048, False ),
InstanceType( 't1.micro', 1, variable_ecu, 0.615, [ pv ], 0, None, 0, True ) ]
ec2_instance_types = dict( (_.name, _) for _ in _ec2_instance_types )
def wait_instances_running( ec2, instances ):
"""
Wait until no instance in the given iterable is 'pending'. Yield every instance that
entered the running state as soon as it does.
:param boto.ec2.connection.EC2Connection ec2: the EC2 connection to use for making requests
:param Iterator[Instance] instances: the instances to wait on
:rtype: Iterator[Instance]
"""
running_ids = set( )
other_ids = set( )
while True:
pending_ids = set( )
for i in instances:
if i.state == 'pending':
pending_ids.add( i.id )
elif i.state == 'running':
assert i.id not in running_ids
running_ids.add( i.id )
yield i
else:
assert i.id not in other_ids
other_ids.add( i.id )
yield i
log.info( '%i instance(s) pending, %i running, %i other.',
*map( len, (pending_ids, running_ids, other_ids) ) )
if not pending_ids:
break
seconds = max( a_short_time, min( len( pending_ids ), 10 * a_short_time ) )
log.info( 'Sleeping for %is', seconds )
time.sleep( seconds )
for attempt in retry_ec2( ):
with attempt:
instances = ec2.get_only_instances( list( pending_ids ) )
def wait_spot_requests_active( ec2, requests, timeout=None, tentative=False ):
"""
Wait until no spot request in the given iterator is in the 'open' state or, optionally,
a timeout occurs. Yield spot requests as soon as they leave the 'open' state.
:param Iterator[SpotInstanceRequest] requests:
:param float timeout: Maximum time in seconds to spend waiting or None to wait forever. If a
timeout occurs, the remaining open requests will be cancelled.
:param bool tentative: if True, give up on a spot request at the earliest indication of it
not being fulfilled immediately
:rtype: Iterator[list[SpotInstanceRequest]]
"""
if timeout is not None:
timeout = time.time( ) + timeout
active_ids = set( )
other_ids = set( )
open_ids = None
def cancel( ):
log.warn( 'Cancelling remaining %i spot requests.', len( open_ids ) )
ec2.cancel_spot_instance_requests( list( open_ids ) )
def spot_request_not_found( e ):
error_code = 'InvalidSpotInstanceRequestID.NotFound'
return isinstance( e, EC2ResponseError ) and e.error_code == error_code
try:
while True:
open_ids, eval_ids, fulfill_ids = set( ), set( ), set( )
batch = [ ]
for r in requests:
if r.state == 'open':
open_ids.add( r.id )
if r.status.code == 'pending-evaluation':
eval_ids.add( r.id )
elif r.status.code == 'pending-fulfillment':
fulfill_ids.add( r.id )
else:
log.info( 'Request %s entered status %s indicating that it will not be '
'fulfilled anytime soon.', r.id, r.status.code )
elif r.state == 'active':
assert r.id not in active_ids
active_ids.add( r.id )
batch.append( r )
else:
assert r.id not in other_ids
other_ids.add( r.id )
batch.append( r )
if batch:
yield batch
log.info( '%i spot requests(s) are open (%i of which are pending evaluation and %i '
'are pending fulfillment), %i are active and %i are in another state.',
*map( len, (open_ids, eval_ids, fulfill_ids, active_ids, other_ids) ) )
if not open_ids or tentative and not eval_ids and not fulfill_ids:
break
sleep_time = 2 * a_short_time
if timeout is not None and time.time( ) + sleep_time >= timeout:
log.warn( 'Timed out waiting for spot requests.' )
break
log.info( 'Sleeping for %is', sleep_time )
time.sleep( sleep_time )
for attempt in retry_ec2( retry_while=spot_request_not_found ):
with attempt:
requests = ec2.get_all_spot_instance_requests( list( open_ids ) )
except:
if open_ids:
with panic( log ):
cancel( )
raise
else:
if open_ids:
cancel( )
def create_spot_instances( ec2, price, image_id, spec,
num_instances=1, timeout=None, tentative=False ):
"""
:rtype: Iterator[list[Instance]]
"""
for attempt in retry_ec2( retry_for=a_long_time,
retry_while=inconsistencies_detected ):
with attempt:
requests = ec2.request_spot_instances( price, image_id, count=num_instances, **spec )
num_active, num_other = 0, 0
# noinspection PyUnboundLocalVariable,PyTypeChecker
# request_spot_instances's type annotation is wrong
for batch in wait_spot_requests_active( ec2,
requests,
timeout=timeout,
tentative=tentative ):
instance_ids = [ ]
for request in batch:
if request.state == 'active':
instance_ids.append( request.instance_id )
num_active += 1
else:
log.info( 'Request %s in unexpected state %s.', request.id, request.state )
num_other += 1
if instance_ids:
# This next line is the reason we batch. It's so we can get multiple instances in
# a single request.
yield ec2.get_only_instances( instance_ids )
if not num_active:
message = 'None of the spot requests entered the active state'
if tentative:
log.warn( message + '.' )
else:
raise RuntimeError( message )
if num_other:
log.warn( '%i request(s) entered a state other than active.', num_other )
def inconsistencies_detected( e ):
if e.code == 'InvalidGroup.NotFound': return True
m = e.error_message.lower( )
return 'invalid iam instance profile' in m or 'no associated iam roles' in m
def create_ondemand_instances( ec2, image_id, spec, num_instances=1 ):
"""
Requests the RunInstances EC2 API call but accounts for the race between recently created
instance profiles, IAM roles and an instance creation that refers to them.
:rtype: list[Instance]
"""
instance_type = spec[ 'instance_type' ]
log.info( 'Creating %s instance(s) ... ', instance_type )
for attempt in retry_ec2( retry_for=a_long_time,
retry_while=inconsistencies_detected ):
with attempt:
return ec2.run_instances( image_id,
min_count=num_instances,
max_count=num_instances,
**spec ).instances
def tag_object_persistently( tagged_ec2_object, tags_dict ):
"""
Object tagging occasionally fails with "NotFound" types of errors so we need to
retry a few times. Sigh ...
:type tagged_ec2_object: TaggedEC2Object
"""
for attempt in retry_ec2( ):
with attempt:
tagged_ec2_object.add_tags( tags_dict )
| 41.794258
| 100
| 0.5917
|
73c45d840a7f3512a27b0f7cd808368a4b77c418
| 10,115
|
py
|
Python
|
gcloud/iam_auth/conf.py
|
wkma/bk-sops
|
8fb5609c0c4495c28d588fbafa9d9f5f2976929b
|
[
"Apache-2.0"
] | 2
|
2021-07-28T01:48:31.000Z
|
2021-11-17T11:02:26.000Z
|
gcloud/iam_auth/conf.py
|
wkma/bk-sops
|
8fb5609c0c4495c28d588fbafa9d9f5f2976929b
|
[
"Apache-2.0"
] | null | null | null |
gcloud/iam_auth/conf.py
|
wkma/bk-sops
|
8fb5609c0c4495c28d588fbafa9d9f5f2976929b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from iam import meta
SYSTEM_ID = settings.BK_IAM_SYSTEM_ID
SYSTEM_INFO = [
{"id": SYSTEM_ID, "name": settings.APP_NAME},
]
SEARCH_INSTANCE_CACHE_TIME = 60 * 10
RESOURCES = [
{"id": "project", "name": _("项目"), "parent_id": None},
{"id": "flow", "name": _("流程模板"), "parent_id": "project"},
{"id": "task", "name": _("任务实例"), "parent_id": "project"},
{"id": "common_flow", "name": _("公共流程"), "parent_id": None},
{"id": "mini_app", "name": _("轻应用"), "parent_id": "project"},
{"id": "periodic_task", "name": _("周期任务"), "parent_id": "project"},
]
ACTIONS = [
{
"id": "project_create",
"name": _("创建项目"),
"relate_resources": ["project"],
"relate_actions": [],
"resource_topo": ["project"],
},
{
"id": "project_view",
"name": _("查看项目"),
"relate_resources": ["project"],
"relate_actions": [],
"resource_topo": ["project"],
},
{
"id": "project_edit",
"name": _("编辑项目"),
"relate_resources": ["project"],
"relate_actions": ["project_view"],
"resource_topo": ["project"],
},
{
"id": "flow_create",
"name": _("创建流程"),
"relate_resources": ["project"],
"relate_actions": ["project_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "project_fast_create_task",
"name": _("快速新建一次性任务"),
"relate_resources": ["project"],
"relate_actions": ["project_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "flow_view",
"name": _("查看流程"),
"relate_resources": ["flow"],
"relate_actions": [],
"resource_topo": ["project", "flow"],
},
{
"id": "flow_edit",
"name": _("编辑流程"),
"relate_resources": ["flow"],
"relate_actions": ["flow_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "flow_delete",
"name": _("删除流程"),
"relate_resources": ["flow"],
"relate_actions": ["flow_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "flow_create_task",
"name": _("使用流程新建任务"),
"relate_resources": ["flow"],
"relate_actions": ["flow_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "flow_create_mini_app",
"name": _("新建轻应用"),
"relate_resources": ["flow"],
"relate_actions": ["flow_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "flow_create_periodic_task",
"name": _("新建周期任务"),
"relate_resources": ["flow"],
"relate_actions": ["flow_view"],
"resource_topo": ["project", "flow"],
},
{
"id": "task_view",
"name": _("查看任务"),
"relate_resources": ["task"],
"relate_actions": [],
"resource_topo": ["project", "task"],
},
{
"id": "task_edit",
"name": _("编辑任务"),
"relate_resources": ["task"],
"relate_actions": ["task_view"],
"resource_topo": ["project", "task"],
},
{
"id": "task_operate",
"name": _("操作任务"),
"relate_resources": ["task"],
"relate_actions": ["task_view"],
"resource_topo": ["project", "task"],
},
{
"id": "task_claim",
"name": _("认领任务"),
"relate_resources": ["task"],
"relate_actions": ["task_view"],
"resource_topo": ["project", "task"],
},
{
"id": "task_delete",
"name": _("删除任务"),
"relate_resources": ["task"],
"relate_actions": ["task_view"],
"resource_topo": ["project", "task"],
},
{
"id": "task_clone",
"name": _("克隆任务"),
"relate_resources": ["task"],
"relate_actions": ["task_view"],
"resource_topo": ["project", "task"],
},
{
"id": "common_flow_create",
"name": _("新建公共流程"),
"relate_resources": [],
"relate_actions": [],
"resource_topo": [],
},
{
"id": "common_flow_view",
"name": _("查看公共流程"),
"relate_resources": ["common_flow"],
"relate_actions": [],
"resource_topo": ["common_flow"],
},
{
"id": "common_flow_edit",
"name": _("编辑公共流程"),
"relate_resources": ["common_flow"],
"relate_actions": ["common_flow_view"],
"resource_topo": ["common_flow"],
},
{
"id": "common_flow_delete",
"name": _("删除公共流程"),
"relate_resources": ["common_flow"],
"relate_actions": ["common_flow_view"],
"resource_topo": ["common_flow"],
},
{
"id": "common_flow_create_task",
"name": _("使用公共流程新建任务"),
"relate_resources": ["common_flow", "project"],
"relate_actions": ["common_flow_view", "project_view"],
"resource_topo": ["common_flow"],
},
{
"id": "common_flow_create_periodic_task",
"name": _("使用公共流程新建周期任务"),
"relate_resources": ["common_flow", "project"],
"relate_actions": ["common_flow_view", "project_view"],
"resource_topo": ["common_flow"],
},
{
"id": "mini_app_view",
"name": _("查看轻应用"),
"relate_resources": ["mini_app"],
"relate_actions": [],
"resource_topo": ["project", "mini_app"],
},
{
"id": "mini_app_edit",
"name": _("编辑轻应用"),
"relate_resources": ["mini_app"],
"relate_actions": ["mini_app_view"],
"resource_topo": ["project", "mini_app"],
},
{
"id": "mini_app_delete",
"name": _("删除轻应用"),
"relate_resources": ["mini_app"],
"relate_actions": ["mini_app_view"],
"resource_topo": ["project", "mini_app"],
},
{
"id": "mini_app_create_task",
"name": _("使用轻应用创建任务"),
"relate_resources": ["mini_app"],
"relate_actions": ["mini_app_view"],
"resource_topo": ["project", "mini_app"],
},
{
"id": "periodic_task_view",
"name": _("查看周期任务"),
"relate_resources": ["periodic_task"],
"relate_actions": [],
"resource_topo": ["project", "periodic_task"],
},
{
"id": "periodic_task_edit",
"name": _("编辑周期任务"),
"relate_resources": ["periodic_task"],
"relate_actions": ["periodic_task_view"],
"resource_topo": ["project", "periodic_task"],
},
{
"id": "periodic_task_delete",
"name": _("删除周期任务"),
"relate_resources": ["periodic_task"],
"relate_actions": ["periodic_task_view"],
"resource_topo": ["project", "periodic_task"],
},
{"id": "admin_view", "name": _("后台管理查看"), "relate_resources": [], "relate_actions": []},
{"id": "admin_edit", "name": _("后台管理编辑"), "relate_resources": [], "relate_actions": []},
{"id": "audit_view", "name": _("查看审计中心"), "relate_resources": [], "relate_actions": []},
{"id": "function_view", "name": _("查看职能化中心"), "relate_resources": [], "relate_actions": []},
{"id": "statistics_view", "name": _("查看数据统计"), "relate_resources": [], "relate_actions": []},
]
class IAMMeta(object):
SYSTEM_ID = SYSTEM_ID
PROJECT_RESOURCE = "project"
FLOW_RESOURCE = "flow"
TASK_RESOURCE = "task"
COMMON_FLOW_RESOURCE = "common_flow"
MINI_APP_RESOURCE = "mini_app"
PERIODIC_TASK_RESOURCE = "periodic_task"
PROJECT_VIEW_ACTION = "project_view"
PROJECT_EDIT_ACTION = "project_edit"
PROJECT_FAST_CREATE_TASK_ACTION = "project_fast_create_task"
FLOW_CREATE_ACTION = "flow_create"
FLOW_VIEW_ACTION = "flow_view"
FLOW_EDIT_ACTION = "flow_edit"
FLOW_DELETE_ACTION = "flow_delete"
FLOW_CREATE_TASK_ACTION = "flow_create_task"
FLOW_CREATE_MINI_APP_ACTION = "flow_create_mini_app"
FLOW_CREATE_PERIODIC_TASK_ACTION = "flow_create_periodic_task"
TASK_VIEW_ACTION = "task_view"
TASK_EDIT_ACTION = "task_edit"
TASK_OPERATE_ACTION = "task_operate"
TASK_CLAIM_ACTION = "task_claim"
TASK_DELETE_ACTION = "task_delete"
TASK_CLONE_ACTION = "task_clone"
COMMON_FLOW_CREATE_TASK_ACTION = "common_flow_create_task"
COMMON_FLOW_CREATE_ACTION = "common_flow_create"
COMMON_FLOW_VIEW_ACTION = "common_flow_view"
COMMON_FLOW_EDIT_ACTION = "common_flow_edit"
COMMON_FLOW_DELETE_ACTION = "common_flow_delete"
COMMON_FLOW_CREATE_PERIODIC_TASK_ACTION = "common_flow_create_periodic_task"
MINI_APP_VIEW_ACTION = "mini_app_view"
MINI_APP_EDIT_ACTION = "mini_app_edit"
MINI_APP_DELETE_ACTION = "mini_app_delete"
MINI_APP_CREATE_TASK_ACTION = "mini_app_create_task"
PERIODIC_TASK_VIEW_ACTION = "periodic_task_view"
PERIODIC_TASK_EDIT_ACTION = "periodic_task_edit"
PERIODIC_TASK_DELETE_ACTION = "periodic_task_delete"
ADMIN_VIEW_ACTION = "admin_view"
ADMIN_EDIT_ACTION = "admin_edit"
AUDIT_VIEW_ACTION = "audit_view"
FUNCTION_VIEW_ACTION = "function_view"
STATISTICS_VIEW_ACTION = "statistics_view"
for system in SYSTEM_INFO:
meta.setup_system(system["id"], system["name"])
for resource in RESOURCES:
meta.setup_resource(SYSTEM_ID, resource["id"], resource["name"])
for action in ACTIONS:
meta.setup_action(SYSTEM_ID, action["id"], action["name"])
| 32.009494
| 115
| 0.581809
|
0690ea2fa92764711ec8101b43b1138d2377dcde
| 271
|
py
|
Python
|
playerhand.py
|
dlavery/pokerpractice
|
80f89974be4a08fc8f7d1c94b0ebfb7d5758b3e4
|
[
"MIT"
] | null | null | null |
playerhand.py
|
dlavery/pokerpractice
|
80f89974be4a08fc8f7d1c94b0ebfb7d5758b3e4
|
[
"MIT"
] | null | null | null |
playerhand.py
|
dlavery/pokerpractice
|
80f89974be4a08fc8f7d1c94b0ebfb7d5758b3e4
|
[
"MIT"
] | null | null | null |
class PlayerHand:
# player's hand
def __init__(self):
self.__hand = []
def deal(self, card):
# receive a card
self.__hand.append(card)
def gethand(self):
return self.__hand
def burnhand(self):
self.__hand = []
| 15.941176
| 32
| 0.564576
|
a27d44ad455fbc99f3e825520cfd1566a03d1296
| 542
|
py
|
Python
|
packages/pyright-internal/src/tests/samples/unpack1.py
|
steverice/pyright
|
27fb622a5e87597e03d1e204eae1ef1382ab5193
|
[
"MIT"
] | null | null | null |
packages/pyright-internal/src/tests/samples/unpack1.py
|
steverice/pyright
|
27fb622a5e87597e03d1e204eae1ef1382ab5193
|
[
"MIT"
] | 3
|
2022-03-03T03:03:24.000Z
|
2022-03-25T14:43:54.000Z
|
packages/pyright-internal/src/tests/samples/unpack1.py
|
isabella232/pyright
|
a192486099503353413e02078c41d0d82bd696e8
|
[
"MIT"
] | null | null | null |
# This sample tests the type checker's handling of the unpack operator.
# pyright: strict
class Foo: ...
class Bar: ...
a = [1, 'hello', 3.4, Foo()]
b = [*a]
def int_only(a: int): ...
for c in b:
if not isinstance(c, (float, str)):
# This should generate an error because c can
# be an int or foo.
int_only(c)
if not isinstance(c, Foo):
# This should not generate an error.
int_only(c)
# This should generate an error
x1 = *(1, 2, 3)
x2 = 2, *(1, 2, 3)
x3 = *(1, 2, 3), 2
| 18.066667
| 71
| 0.555351
|
57c6406a8f4b7dd0a901391e79d085141903ac3c
| 481
|
py
|
Python
|
back-end/quiz/migrations/0003_auto_20201203_1410.py
|
Lukmanhakim112/guro
|
6be3448f076c28f5495252f8da91545553e79d01
|
[
"MIT"
] | null | null | null |
back-end/quiz/migrations/0003_auto_20201203_1410.py
|
Lukmanhakim112/guro
|
6be3448f076c28f5495252f8da91545553e79d01
|
[
"MIT"
] | null | null | null |
back-end/quiz/migrations/0003_auto_20201203_1410.py
|
Lukmanhakim112/guro
|
6be3448f076c28f5495252f8da91545553e79d01
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.3 on 2020-12-03 14:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('quiz', '0002_score_score'),
]
operations = [
migrations.AlterField(
model_name='question',
name='quiz',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='question', to='quiz.quiz'),
),
]
| 24.05
| 122
| 0.636175
|
7683debdb79f13052c5c801fb12af4a140a9628e
| 1,995
|
py
|
Python
|
src/users/models/microsoftgraphworkbook.py
|
peombwa/Sample-Graph-Python-Client
|
3396f531fbe6bb40a740767c4e31aee95a3b932e
|
[
"MIT"
] | null | null | null |
src/users/models/microsoftgraphworkbook.py
|
peombwa/Sample-Graph-Python-Client
|
3396f531fbe6bb40a740767c4e31aee95a3b932e
|
[
"MIT"
] | null | null | null |
src/users/models/microsoftgraphworkbook.py
|
peombwa/Sample-Graph-Python-Client
|
3396f531fbe6bb40a740767c4e31aee95a3b932e
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class Microsoftgraphworkbook(Model):
"""Microsoftgraphworkbook.
:param id:
:type id: str
:param application:
:type application: ~users.models.MicrosoftgraphworkbookApplication
:param names:
:type names: list[~users.models.MicrosoftgraphworkbookNamedItem]
:param tables:
:type tables: list[~users.models.MicrosoftgraphworkbookTable]
:param worksheets:
:type worksheets: list[~users.models.MicrosoftgraphworkbookWorksheet]
:param comments:
:type comments: list[~users.models.MicrosoftgraphworkbookComment]
:param functions:
:type functions: ~users.models.MicrosoftgraphworkbookFunctions
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'application': {'key': 'application', 'type': 'MicrosoftgraphworkbookApplication'},
'names': {'key': 'names', 'type': '[MicrosoftgraphworkbookNamedItem]'},
'tables': {'key': 'tables', 'type': '[MicrosoftgraphworkbookTable]'},
'worksheets': {'key': 'worksheets', 'type': '[MicrosoftgraphworkbookWorksheet]'},
'comments': {'key': 'comments', 'type': '[MicrosoftgraphworkbookComment]'},
'functions': {'key': 'functions', 'type': 'MicrosoftgraphworkbookFunctions'},
}
def __init__(self, id=None, application=None, names=None, tables=None, worksheets=None, comments=None, functions=None):
super(Microsoftgraphworkbook, self).__init__()
self.id = id
self.application = application
self.names = names
self.tables = tables
self.worksheets = worksheets
self.comments = comments
self.functions = functions
| 40.714286
| 123
| 0.629574
|
b49a8dfbd135e68688630e12f1566b990248f71e
| 7,372
|
py
|
Python
|
kitectl/release.py
|
cihangir/kite
|
17e712e6fea3cae07fe3c792d3e0d6b8225c1125
|
[
"MIT"
] | 3,107
|
2015-01-04T15:00:31.000Z
|
2022-03-31T05:59:01.000Z
|
kitectl/release.py
|
cihangir/kite
|
17e712e6fea3cae07fe3c792d3e0d6b8225c1125
|
[
"MIT"
] | 176
|
2019-12-27T09:01:42.000Z
|
2021-08-03T06:19:39.000Z
|
kitectl/release.py
|
cihangir/kite
|
17e712e6fea3cae07fe3c792d3e0d6b8225c1125
|
[
"MIT"
] | 356
|
2015-01-13T04:05:46.000Z
|
2022-03-04T06:01:38.000Z
|
#!/usr/bin/env python2.7
"""
A script for packaging and releasing kite tool for OS X and Linux platforms.
It can also upload the generated package file to S3 if you provide --upload flag.
usage: release.py [-h] [--upload]
Run it at the same directory as this script. It will put the generated files
into the current working directory.
On OS X, the brew formula can be installed with the following command:
brew install kite.rb
On Linux, the deb package can be installed with the following command:
dpkg -i kite-0.0.1-linux.deb
"""
import argparse
import hashlib
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import boto
from boto.s3.key import Key
BREW_FORMULA = """\
require 'formula'
class Kite < Formula
homepage 'http://kite.koding.com'
# url and sha1 needs to be changed after new binary is uploaded.
url '{url}'
sha1 '{sha1}'
def install
bin.install "kite"
end
def test
system "#{{bin}}/kite", "version"
end
end
"""
DEB_CONTROL = """\
Package: kite
Version: {version}
Section: utils
Priority: optional
Architecture: amd64
Essential: no
Maintainer: Koding Developers <hello@koding.com>
Description: Kite command-line tool.
"""
def build_osx(binpath, version):
print "Making tar file..."
tarname = "kite-%s-osx.tar.gz" % version
with tarfile.open(tarname, "w:gz") as tar:
tar.add(binpath, arcname="kite")
return tarname
def build_linux(binpath, version):
workdir = tempfile.mkdtemp()
try:
debname = "kite-%s-linux" % version
packagedir = os.path.join(workdir, debname)
os.mkdir(packagedir)
debiandir = os.path.join(packagedir, "DEBIAN")
os.mkdir(debiandir)
controlpath = os.path.join(debiandir, "control")
with open(controlpath, "w") as f:
f.write(DEB_CONTROL.format(version=version))
usrdir = os.path.join(packagedir, "usr")
os.mkdir(usrdir)
bindir = os.path.join(usrdir, "bin")
os.mkdir(bindir)
shutil.move(binpath, bindir)
debfile = "%s.deb" % debname
subprocess.check_call(["fakeroot", "dpkg-deb", "--build",
packagedir, debfile])
return debfile
finally:
shutil.rmtree(workdir)
def postbuild_osx(package_name, args, bucket, package_s3_key):
if args.upload:
url = package_s3_key.generate_url(expires_in=0, query_auth=False)
else:
# For testing "brew install" locally
url = "http://127.0.0.1:8000/%s" % package_name
print "Generating formula..."
sha1 = sha1_file(package_name)
formula_str = BREW_FORMULA.format(url=url, sha1=sha1)
with open("kite.rb", "w") as f:
f.write(formula_str)
if args.upload:
print "Uploading new brew formula..."
formula_key = Key(bucket)
formula_key.key = "kite.rb"
formula_key.set_contents_from_string(formula_str)
formula_key.make_public()
formula_url = formula_key.generate_url(expires_in=0, query_auth=False)
print "kite tool has been uplaoded successfully.\n" \
"Users can install it with:\n " \
"brew install \"%s\"" % formula_url
else:
print "Did not upload to S3. " \
"If you want to upload, run with --upload flag."
def postbuild_linux(package_name, args, bucket, package_s3_key):
if args.upload:
print "Uploading again as kite-latest.linux.deb ..."
latest = Key(bucket)
latest.key = "kite-latest-linux.deb"
latest.set_contents_from_filename(package_name)
latest.make_public()
print "Uploaded:", latest.generate_url(expires_in=0, query_auth=False)
def main():
parser = argparse.ArgumentParser(
description="Compile kite tool and upload to S3.")
parser.add_argument('--upload', action='store_true', help="upload to s3")
parser.add_argument('--overwrite', action='store_true', help="overwrite existing package")
args = parser.parse_args()
if args.upload:
aws_key = os.environ['AWS_KEY']
aws_secret = os.environ['AWS_SECRET']
workdir = tempfile.mkdtemp()
try:
tardir = os.path.join(workdir, "kite") # dir to be tarred
os.mkdir(tardir)
binpath = os.path.join(tardir, "kite")
cmd = "go build -o %s %s" % (binpath, "kite/main.go")
env = os.environ.copy()
env["GOARCH"] = "amd64" # we only build for 64-bit
env["CGO_ENABLED"] = "1" # cgo must be enabled for some functions to run correctly
# Decide on platform (osx, linux, etc.)
if sys.platform.startswith("linux"):
env["GOOS"] = "linux"
platform = "linux"
elif sys.platform.startswith("darwin"):
env["GOOS"] = "darwin"
platform = "osx"
else:
print "%s platform is not supported" % sys.platform
sys.exit(1)
# Compile kite tool source code
print "Building for platform: %s" % platform
try:
subprocess.check_call(cmd.split(), env=env)
except subprocess.CalledProcessError:
print "Cannot compile kite tool. Try manually."
sys.exit(1)
# Get the version number from compiled binary
version = subprocess.check_output([binpath, "version"]).strip()
assert len(version.split(".")) == 3, "Please use 3-digits versioning"
print "Version:", version
# Build platform specific package
build_function = globals()["build_%s" % platform]
package = build_function(binpath, version)
if not os.path.exists(package):
print "Build is unsuccessful."
sys.exit(1)
print "Generated package:", package
# Upload to Amazon S3
bucket = package_key = None
if args.upload:
print "Uploading to Amazon S3..."
s3_connection = boto.connect_s3(aws_key, aws_secret)
bucket = s3_connection.get_bucket('kite-cli')
package_key = Key(bucket)
package_key.key = package
if package_key.exists() and not args.overwrite:
print "This version is already uploaded. " \
"Please do not overwrite the uploaded version, " \
"increment the version number and upload it again. " \
"If you must, you can use --overwrite option."
sys.exit(1)
package_key.set_contents_from_filename(package)
package_key.make_public()
url = package_key.generate_url(expires_in=0, query_auth=False)
print "Package is uploaded to S3:", url
# Run post-build actions
postbuild_function = globals().get("postbuild_%s" % platform)
if postbuild_function:
postbuild_function(package, args, bucket, package_key)
finally:
shutil.rmtree(workdir)
def sha1_file(path):
"""Calculate sha1 of path. Read file in chunks."""
assert os.path.isfile(path)
chunk_size = 1024 * 1024 # 1M
sha1_checksum = hashlib.sha1()
with open(path, "rb") as f:
byte = f.read(chunk_size)
while byte:
sha1_checksum.update(byte)
byte = f.read(chunk_size)
return sha1_checksum.hexdigest()
if __name__ == "__main__":
main()
| 31.775862
| 94
| 0.625475
|
5fd789836e68e5500bac265a50945d9cfa65eb4d
| 2,369
|
py
|
Python
|
examples/com.xxjr.cfs/test_login.py
|
100440175/facebook-wda
|
d335b5506c210e15a220d6734510f9c40d11261d
|
[
"MIT"
] | null | null | null |
examples/com.xxjr.cfs/test_login.py
|
100440175/facebook-wda
|
d335b5506c210e15a220d6734510f9c40d11261d
|
[
"MIT"
] | null | null | null |
examples/com.xxjr.cfs/test_login.py
|
100440175/facebook-wda
|
d335b5506c210e15a220d6734510f9c40d11261d
|
[
"MIT"
] | null | null | null |
#coding=utf-8
import os
import time
import unittest
import wda
bundle_id = 'com.xxjr.cfsApp'
c = wda.Client('http://localhost:8100')
# USERNAME = os.getenv('4080')
# PASSWORD = os.getenv('12345678')
USERNAME = '4080'
PASSWORD = '12345678'
def setup_function():
# 每次测试之前,保证帐号是登录的
global d
d = create_session()
account_login(d)
def teardown_function():
account_logout(d)
d.close() # 一次测试结束,关闭应用
# s = create_session()
# account_logout(s)
# s.close()
def alert_callback(session):
btns = set([u'不再提醒', 'OK', u'知道了', 'Allow', u'允许']).intersection(session.alert.buttons())
if len(btns) == 0:
raise RuntimeError("Alert can not handled, buttons: " + ', '.join(session.alert.buttons()))
session.alert.click(list(btns)[0])
def create_session():
d = c.session(bundle_id)
d.set_alert_callback(alert_callback)
return d
def account_login(d):
# 输入帐号密码,验证是否登录成功
d(type="TextField").clear_text()
d(type="TextField").set_text(USERNAME)
d(label=u"return").tap()
d(type="SecureTextField").tap()
d(type="SecureTextField").set_text(PASSWORD)
d(label=u"Done").tap()
d(label=u"登录", className='Button').tap()
assert d(label=u"首页").wait() # 等待8s
print("登录成功")
def account_logout(d):
d(label=u"user head", type='Button').tap() # not support \s, wired
# d(label=u"退出登录").scroll().tap()
d(label=u"退出登录").tap()
d.alert.click(u'确定')
# d(label=u"确定").tap()
assert d(label=u"登录").wait()
print("已退出登陆")
def test_new_loan():
"""
测试 新建报单
"""
assert d(label=u"首页").wait() # 等待8s
print("开始新建报单>>>")
print("点击快速报单>>>")
assert d(label=u"快速报单").wait()
d(name=u"home_icon_augment").tap() # 默认会寻找10s,所以不用担心点不到
print("点击快速报单成功>>>")
# assert d(label=u"报单信息").wait(timeout=120)
d(label=u"新增用户").tap()
# assert d(label=u"新增客户").wait()
print("开始新建客户>>>")
"""
error:输入框这里没有唯一的元素值,无法定位。 只能开发添加元素值才能定位
"""
d(type="TextField", instance=1).set_text("测试名")
d(type="TextField", instance=3).set_text("18012341185")
d(type="TextField", instance=7).set_text("360782198911215514")
d(label=u"保存").tap()
print("新建客户完成>>>")
d(type="TextField", instance=11).set_text("测试数据")
d(type="TextView", instance=1).set_text("测试数据")
d(label=u"保存").tap()
print("新建报单完成>>>")
| 22.140187
| 99
| 0.617982
|
6e134e1b390faffee93427e6af2f7c6532134c62
| 7,524
|
py
|
Python
|
src/monitor.py
|
javiermarcon/xmr-stak-monitor
|
cfdc9271da4e05cc605423d5255396e4f77ab6a6
|
[
"MIT"
] | null | null | null |
src/monitor.py
|
javiermarcon/xmr-stak-monitor
|
cfdc9271da4e05cc605423d5255396e4f77ab6a6
|
[
"MIT"
] | null | null | null |
src/monitor.py
|
javiermarcon/xmr-stak-monitor
|
cfdc9271da4e05cc605423d5255396e4f77ab6a6
|
[
"MIT"
] | null | null | null |
import chardet
import datetime
import eventlet
import json
import os
import requests
#import shlex
import subprocess
import threading
import traceback
class Command(object):
"""
code from: https://gist.github.com/kirpit/1306188
Enables to run subprocess commands in a different thread with TIMEOUT option.
Based on jcollado's solution:
http://stackoverflow.com/questions/1191374/subprocess-with-timeout/4825933#4825933
"""
command = None
process = None
status = None
output, error = '', ''
def __init__(self, command, logger):
#if isinstance(command, basestring):
# command = shlex.split(command)
self.command = command
self.logger = logger
self.logger.debug("init de Command")
def run(self, timeout=None, **kwargs):
""" Run a command then return: (status, output, error). """
self.logger.debug("Inicio run en command")
def target(**kwargs):
try:
self.logger.debug("inicio process en command")
self.process = subprocess.Popen(self.command, **kwargs)
self.logger.debug("---1---")
self.output, self.error = self.process.communicate()
self.logger.debug("---2---")
self.status = self.process.returncode
self.logger.debug("fin process en command")
except:
self.logger.debug("error en Command")
self.error = traceback.format_exc()
self.status = -1
# default stdout and stderr
if 'stdout' not in kwargs:
self.logger.debug("---stdout---")
kwargs['stdout'] = subprocess.PIPE
if 'stderr' not in kwargs:
self.logger.debug("---stderr---")
kwargs['stderr'] = subprocess.PIPE
# thread
self.logger.debug("---instancio thread---")
thread = threading.Thread(target=target, kwargs=kwargs)
self.logger.debug("---inicio thread---")
thread.start()
self.logger.debug("---join thread---")
thread.join(timeout)
if thread.is_alive():
self.logger.debug("---alive thread, terminate---")
self.process.terminate()
self.logger.debug("---join 1 thread---")
thread.join()
self.logger.debug("---return command---")
return self.status, self.output, self.error
class Monitor():
def __init__(self, options, logger):
self.logger = logger
self.options = options
self.timeout = int(options["common"]["page_timeout"])
self.nspath = os.path.abspath(options["common"]["nssm_exe"]).replace('\\', '\\\\')
self.jobs = self.make_jobs_dict()
def make_jobs_dict(self):
jobs = {}
for service in self.options["services"]:
if not "type" in service:
service["type"] = "xmrig"
check_content = True
if str(service["type"]).lower().strip() != "xmrig":
check_content = False
service["xmr_url"] = "{}/h".format(service["xmr_url"])
params = {}
params["url"] = service["xmr_url"]
params["check_content"] = check_content
for action in ["restart", "status"]:
cmd = [self.nspath, action, service["xmr_service"]]
params["cmd_{}".format(action)] = Command(cmd, self.logger)
jobs[service["xmr_service"]] = params
return jobs
def do_restart(self, svc):
self.logger.debug("primer call: {}".format(svc))
try:
self.logger.debug("inicio_exe")
(a, b, c) = self.jobs[svc]["cmd_restart"].run(timeout=self.timeout)
self.logger.debug(a)
self.logger.debug(self.bytes_decoder(b))
self.logger.debug(self.bytes_decoder(c))
#os.system(joined_cmd)
#subprocess.run(cmd, check=False)
# Para python2 usar subprocess.call(cmd)
self.logger.debug("fin exe")
except Exception as ee:
self.logger.debug(ee)
def bytes_decoder(self, byte_string):
if byte_string == b'':
return ''
enc = chardet.detect(byte_string)
return byte_string.decode(enc['encoding'])
def run(self):
self.logger.debug("I'm working...")
for svc in self.jobs:
url = self.jobs[svc]["url"]
pag = self.get_web_page(url)
self.logger.debug("got page {} for {}".format(url, svc))
if not self.check_http_response(pag, self.jobs[svc]["check_content"]):
self.logger.warn("page failed at {}.".format(datetime.datetime.now()))
self.logger.debug(pag)
self.do_restart(svc)
self.logger.debug("restarted..")
else:
self.logger.debug("pag ok.")
def get_web_page(self, url, http_method="GET"):
"""
gets a web page
:param url: string with the url to retrieve, for example: https://pepe.com:8000/sarasa
:param http_method: string with the method to use ("GET" or "POST")
:return: a dictionary with:
state: "ok" or "failure"
status_code: http response number (200=ok, 404=not found, etc.)
content: the web page html content in cas of success or the error in case of error.
"""
status = "FAILURE"
status_code = -1
self.logger.debug("Inicio Request ({})a {}".format(http_method, url))
try:
eventlet.monkey_patch()
with eventlet.Timeout(self.timeout):
if (http_method.lower().strip() == 'get'):
resp = requests.get(url, verify=False)
self.logger.debug("Fin request Get")
else:
resp = requests.post(url, verify=False)
self.logger.debug("Fin Request POST")
content = resp.content
status_code = resp.status_code
if status_code == 200:
status = "OK"
self.logger.debug("status:{} resp_code{}".format(status, status_code))
return {
"status": status,
"status_code": status_code,
"content": content
}
except (Exception, eventlet.timeout.Timeout) as ex:
self.logger.error("error trayendo web page")
self.logger.error(ex)
return {
"status": status,
"status_code": status_code,
"content": str(ex)
}
def check_http_response(self, response, do_check):
if response["status"] != "OK":
self.logger.debug("http response failed. Not OK.")
return False
if not do_check:
self.logger.debug("http response ok, no content check..")
return True
try:
jresp = json.loads(response["content"])
for thread in jresp["hashrate"]["threads"]:
if thread[0] == 0.0:
self.logger.debug("http response failed, thread on 0.0")
return False
self.logger.debug("http response ok with content checked.")
return True
except ValueError as er:
self.logger.debug("Error checking http response: {}".format(er))
return False
| 39.6
| 107
| 0.548113
|
a9f3713351de123631df3e3a7dff5517f135d6a6
| 29,820
|
py
|
Python
|
main.py
|
sourcery-ai-bot/discord-bot
|
528fcfb8909296b012cae5f416c3d90c84c25910
|
[
"MIT"
] | null | null | null |
main.py
|
sourcery-ai-bot/discord-bot
|
528fcfb8909296b012cae5f416c3d90c84c25910
|
[
"MIT"
] | null | null | null |
main.py
|
sourcery-ai-bot/discord-bot
|
528fcfb8909296b012cae5f416c3d90c84c25910
|
[
"MIT"
] | null | null | null |
# Aurexine 2020
## IMPORTS
import os
import shutil
import json
from datetime import datetime
from sqlitedict import SqliteDict
from discord import Guild, Message, User, Member, Embed, Game, TextChannel
from discord.ext import commands
from discord.ext.commands import Context
from helpers import *
VERSION = "2.2.1b1"
## FILESYSTEM
# Get the filesystem in ship-shape
try:
# Generate a default config if it doesn't exist
if not (os.path.exists("config") and os.path.exists("config/config.json")):
os.makedirs("config")
default_config = {
"Database": "database.sql",
"BackupDB": True,
"Botmasters": ["Discord user IDS", "Go here WITH QUOTES"],
"Prefix": "~",
"Token": "Bot token goes here",
"CommandsOnEdit": True,
"DeleteCommands": False,
"LogFile": "bot.log",
"LogMessages": True,
"LogEdits": True,
"LogDeletes": True,
"LogCommands": True
}
with open("config/config.json", "w") as gen:
gen.write(json.dumps(default_config, indent=4))
if not os.path.exists("db"):
os.makedirs("db")
if not os.path.exists("db/backups"):
os.makedirs("db/backups")
if not os.path.exists("plugins"):
os.makedirs("plugins")
except IOError as e:
print(e)
exit()
## CLASSES
class DiscordBot:
"""Data class to hold bot and config information."""
def __init__(self, description):
# Discord library initializations
self.description = description
self.bot = None
self.app_info = None
try:
with open("config/config.json") as cfg:
config = json.load(cfg)
self.database = config["Database"]
self.backup_db = config["BackupDB"]
self.config_prefix = config["Prefix"]
self.config_token = config["Token"]
self.cmd_on_edit = config["CommandsOnEdit"]
self.delete_cmds = config["DeleteCommands"]
self.log_file = config["LogFile"]
self.log_messages = config["LogMessages"]
self.log_edits = config["LogEdits"]
self.log_deletes = config["LogDeletes"]
self.log_commands = config["LogCommands"]
self.botmasters = config["Botmasters"]
except IOError as e:
print(e)
exit()
# Non-config initializations
self.blacklist = []
self.plugins = []
self.servers = {}
self.accounts = {}
self.first_launch = True
# Return current information
def mission_control(self) -> str:
if self.bot is None:
return "Bot not initialized."
else:
server_names = [i.name for i in self.bot.guilds]
return [
f'[------------------------STATUS------------------------]',
"Source: https://github.com/Aurexine/discord-bot",
f'Time: f"Time: {datetime.now()}"',
f'Version: f"Version: {VERSION}"',
f'Logged in as f"Logged in as {self.bot.user} ({self.bot.user.id})" (f"Logged in as {self.bot.user} ({self.bot.user.id})")',
f'Loaded plugins - f"Loaded plugins - {self.plugins}"',
f'Joined f"Joined {len(self.bot.guilds)} server(s) - {server_names}" server(s) - f"Joined {len(self.bot.guilds)} server(s) - {server_names}"',
f'[------------------------STATUS------------------------]',
]
# Function to build an account level embed
async def account_embed(member: Member, level: int) -> Embed:
tag = f"{member.name}#{member.discriminator}"
embed = Embed(title=f"{tag}'s Account", color=0x7289DA)
embed.set_thumbnail(url=str(member.avatar_url))
embed.add_field(name="Level", value=str(level), inline=False)
return embed
## INITIALIZATION
# This is hacky and bad, but that's this whole bot at this point
# I've learned a lot through making this and would do it quite differently next time
inst = DiscordBot("Extensible bot using Discord.py's Cogs.")
def initialize(instance: DiscordBot) -> commands.Bot:
"""Get the bot, database, and logger ready"""
# Logger
log = get_logger(instance.log_file)
# Discord.py's commands.ext Bot
bot = commands.Bot(
commands.when_mentioned_or(instance.config_prefix),
description=instance.description
)
instance.bot = bot
# Make any required backup and initialize the database
db_file = f"db/{instance.database}"
if os.path.exists(db_file) and instance.backup_db:
timestamp = f"{pretty_datetime(datetime.now(), display='FILE')}"
try:
shutil.copyfile(db_file, f"db/backups/{instance.database}-{timestamp}.sql")
except IOError as e:
error_file = f"db/backups/{instance.database}-{timestamp}.sql"
log.error(f"Unable to create file {error_file}\n - {e}")
db = SqliteDict(
filename=f"db/{instance.database}",
tablename="discord-bot",
encode=json.dumps,
decode=json.loads,
autocommit=True
)
if "blacklist" not in db:
db["blacklist"] = []
if "servers" not in db:
db["servers"] = {}
if "accounts" not in db:
db["accounts"] = {}
instance.blacklist = db["blacklist"]
instance.servers = db["servers"]
instance.accounts = db["accounts"]
## CHECKS
# Local check for if the user is a botmaster
def is_botmaster():
async def predicate(ctx: Context):
return str(ctx.author.id) in instance.botmasters
return commands.check(predicate)
# Global check for if the user is blacklisted
@bot.check
async def allowed(ctx: Context):
return str(ctx.author.id) not in instance.blacklist
# Local check for the user's bot account level
def level(required=0):
async def predicate(ctx: Context):
uid = str(ctx.author.id)
sid = str(ctx.guild.id)
# User doesn't have an account
if sid not in instance.accounts or uid not in instance.accounts[sid]:
return False
else:
return instance.accounts[sid][uid] >= required
return commands.check(predicate)
# Global check for if the plugin is enabled on the current server
@bot.check
async def plugin_enabled(ctx: Context):
try:
sid = str(ctx.guild.id)
# Assume all plugins are available in a direct message
except AttributeError:
return True
# Not a plugin
if ctx.cog is None:
return True
try:
return instance.servers[sid][ctx.cog.name]
except KeyError:
# Plugin will default to enabled if not set by a server admin
return True
# Global check used in a hacky way to delete command invokes if the config says so
@bot.check
async def delete_invokes(ctx: Context):
if instance.delete_cmds:
try:
await ctx.message.delete(delay=2)
except Exception as e:
log.warning(f"Unable to delete command message:\n - {e}")
return True
## EVENTS
@bot.event
async def on_ready():
# If this is the first launch (Not a reconnection from disconnect)
if instance.first_launch:
# Load all available plugins
load_plugins(bot, log, instance.plugins)
# Set the DiscordBot instance's application info
instance.app_info = await bot.application_info()
instance.first_launch = False
# Print mission control to the console
log.info("\n".join(instance.mission_control()))
# Register the servers that the bot has joined
for server in bot.guilds:
sid = str(server.id)
if sid not in instance.servers:
instance.servers[sid] = {}
update_db(db, instance.servers, "servers")
@bot.event
async def on_guild_join(guild: Guild):
sid = str(guild.id)
log.info(f"[JOIN] {guild.name}")
if sid not in instance.servers:
instance.servers[sid] = {}
update_db(db, instance.servers, "servers")
@bot.event
async def on_guild_remove(guild: Guild):
sid = str(guild.id)
log.info(f"[LEAVE] {guild.name}")
if sid in instance.servers:
instance.servers.pop(sid)
update_db(db, instance.servers, "servers")
@bot.event
async def on_message(msg: Message):
# Log messages to the console/log file if enabled
if instance.log_messages:
timestamp = pretty_datetime(datetime.now(), display="TIME")
message = f"[{msg.guild} - #{msg.channel}] <{msg.author}>: {msg.content}"
log.info(f"-{timestamp}- {message}")
# Process the commands from the message afterwards
await bot.process_commands(msg)
@bot.event
async def on_message_edit(former: Message, latter: Message):
sid = str(former.guild.id)
# Embeds cause message edit events even if the user didn't edit them
if former.content == latter.content and former.embeds != latter.embeds:
return
# Log the edit to the console/log file if enabled
if instance.log_edits:
timestamp = pretty_datetime(datetime.now(), display="TIME")
log.info(f"-{timestamp}- [EDIT] [{former.guild}] #{former.channel}")
log.info(f"[BEFORE] <{former.author}>: {former.content}")
log.info(f"[AFTER] <{latter.author}>: {latter.content}")
# Log the edit to a channel if the server has it set up
try:
if instance.servers[sid]["log_edits"]:
guild = former.guild
channel = guild.get_channel(int(instance.servers[sid]["log_channel"]))
embed = Embed(title="Message Edited", color=0xff0000)
embed.add_field(
name=f"Edited by {former.author.name}#{former.author.discriminator}",
value=f"Edited in {former.channel.mention}. UID: {former.author.id}"
)
embed.add_field(name="Before", value=former.content, inline=False)
embed.add_field(name="After", value=latter.content, inline=False)
await channel.send(embed=embed)
except KeyError:
pass
# Process the commands from the message afterwards if enabled
if instance.cmd_on_edit:
await bot.process_commands(latter)
@bot.event
async def on_message_delete(msg: Message):
sid = str(msg.guild.id)
# Log the delete to the console/log file if enabled
if instance.log_deletes:
timestamp = pretty_datetime(datetime.now(), display="TIME")
header = f"-{timestamp}- [DELETE] "
content = f"[{msg.guild}] #{msg.channel} <{msg.author}>: {msg.content}"
log.info(f"{header} {content}")
# Log the delete to a channel if the server has it set up
try:
if instance.servers[sid]["log_deletes"]:
guild = msg.guild
channel = guild.get_channel(int(instance.servers[sid]["log_channel"]))
embed = Embed(title="Message Deleted", color=0xff0000)
embed.add_field(
name=f"Author - {msg.author.name}#{msg.author.discriminator}",
value=f"Deleted from {msg.channel.mention} - UID: {msg.author.id}"
)
embed.add_field(name="Message", value=msg.content, inline=False)
await channel.send(embed=embed)
except KeyError:
pass
@bot.event
async def on_command(ctx: Context):
# Log the command to the console/log file if enabled
if instance.log_commands:
timestamp = pretty_datetime(datetime.now(), display="TIME")
command = ctx.message.content
author = ctx.author
location = f"[{ctx.guild}] - #{ctx.message.channel}"
header = f"-{timestamp}- [COMMAND] `{command}`"
log.info(f"{header} by `{author}` in `{location}`")
@bot.event
async def on_command_error(ctx: Context, error):
# Just send the error to the command's channel
await ctx.send(f":anger: Error: {error}")
@bot.event
async def on_error(error, *args, **kwargs):
log.error(f"[ERROR] {error} {args}")
## COMMANDS
# Basic
@bot.command(name="shutdown")
@is_botmaster()
async def cmd_shutdown(ctx: Context):
"""Shut the bot down compeletely.
Botmaster required.
"""
await ctx.send(":desktop: Shutting down.")
await bot.logout()
@bot.command(name="ping")
async def cmd_ping(ctx: Context):
"""Ping/pong test."""
await ctx.send(f":ping_pong: Pong {ctx.author.mention}")
@bot.command(name="echo")
async def cmd_echo(ctx: Context, *, message: str):
"""Echo command."""
await ctx.send(message)
@bot.command(name="status")
@is_botmaster()
async def cmd_status(ctx: Context, *, status: str):
"""Set the bot to 'Playing <Status>'."""
activity = Game(name=status)
await bot.change_presence(activity=activity)
@bot.command(name="info")
async def cmd_info(ctx: Context):
"""Show the bot's mission control."""
embed = Embed(title="Status", color=0x7289DA)
embed.add_field(name="Time", value=pretty_datetime(datetime.now(), "FULL"))
embed.add_field(name="Version", value=VERSION)
embed.add_field(
name="User",
value=f"{instance.bot.user} ({instance.bot.user.id})",
inline=False
)
embed.add_field(name="Plugins", value=f"[{', '.join(instance.plugins)}]")
embed.add_field(name="Servers", value=str(len(instance.servers)))
# Just in case something happened initializing the app info
if instance.app_info is not None:
embed.set_author(
name=instance.app_info.name,
icon_url=instance.app_info.icon_url
)
embed.set_footer(text="https://github.com/Aurexine/discord-bot")
await ctx.send(embed=embed)
@bot.command(name="blacklist", aliases=["bl", "block"])
@is_botmaster()
async def cmd_blacklist(ctx: Context, target: User, blacklist = True):
"""Add or remove a user from the blacklist.
Botmaster required.
"""
uid = str(target.id)
if uid in instance.blacklist:
# Trying to blacklist a user who is already blacklisted
if blacklist:
await ctx.send(f":anger: {target.name} is already blacklisted.")
# Remove a user from the blacklist
else:
instance.blacklist.remove(uid)
await ctx.send(
f":white_check_mark: {target.name} removed from blacklist."
)
else:
# Add a user to the blacklist
if blacklist:
instance.blacklist.append(uid)
await ctx.send(f":white_check_mark: {target.name} added to blacklist.")
# Trying to remove a user who is not blacklisted
else:
await ctx.send(f":anger: {target.name} is not blacklisted.")
update_db(db, instance.blacklist, "blacklist")
@bot.group(name="logs")
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def cmd_logs(ctx: Context):
"""Toggle logging message edits and deletes to a channel in your server.
Running the command without arguments will display your server's currect settings.
MUST HAVE SERVER ADMINISTRATOR PERMISSION
"""
if ctx.invoked_subcommand is None:
embed = Embed(title="Log Settings", color=0x7289DA)
sid = str(ctx.guild.id)
try:
guild = ctx.bot.get_guild(int(sid))
channel = guild.get_channel(int(instance.servers[sid]["log_channel"]))
embed.add_field(
name="Log Edits",
value=str(instance.servers[sid]["log_edits"])
)
embed.add_field(
name="Log Deletes",
value=str(instance.servers[sid]["log_deletes"])
)
embed.add_field(
name="Log Channel",
value=channel.mention
)
except KeyError:
await ctx.send("Server is not set up or channels have been changed.")
return
await ctx.send(embed=embed)
@cmd_logs.command(name="edits")
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def cmd_logs_edits(ctx: Context, enabled: bool):
"""Set logging of message edits to the server's log channel.
MUST HAVE SERVER ADMINISTRATOR PERMISSION
"""
sid = str(ctx.guild.id)
if sid not in instance.servers:
instance.servers[sid] = {}
instance.servers[sid]["log_edits"] = enabled
update_db(db, instance.servers, "servers")
await ctx.send(f":white_check_mark: Logging message edits set to {enabled}.")
@cmd_logs.command(name="deletes")
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def cmd_logs_deletes(ctx: Context, enabled: bool):
"""Set logging of message deletes to the server's log channel.
MUST HAVE SERVER ADMINISTRATOR PERMISSION
"""
sid = str(ctx.guild.id)
if sid not in instance.servers:
instance.servers[sid] = {}
instance.servers[sid]["log_deletes"] = enabled
update_db(db, instance.servers, "servers")
await ctx.send(f":white_check_mark: Logging message deletes set to {enabled}.")
@cmd_logs.command(name="channel")
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def cmd_logs_channel(ctx: Context, channel: TextChannel):
"""Set the message edit/delete logging channel.
MUST HAVE SERVER ADMINISTRATOR PERMISSION
"""
sid = str(ctx.guild.id)
if sid not in instance.servers:
instance.servers[sid] = {}
instance.servers[sid]["log_channel"] = str(channel.id)
update_db(db, instance.servers, "servers")
await ctx.send(f":white_check_mark: Logging channel set to {channel.mention}.")
# Accounts
@bot.group(name="account", aliases=["accounts", "accs"])
@commands.guild_only()
async def cmd_account(ctx: Context):
"""Add/remove/update accounts.
Running the command without arguments will display your current account level.
"""
if ctx.invoked_subcommand is None:
uid = str(ctx.author.id)
sid = str(ctx.guild.id)
if sid not in instance.accounts:
# Server hasn't been set up
await ctx.send(":anger: Server has no accounts.")
await ctx.send_help("account genesis")
elif uid not in instance.accounts[sid]:
# User has no account
await ctx.send(":anger: You do not have an account for this server.")
else:
# Send an embed with account information
embed = await account_embed(ctx.author, instance.accounts[sid][uid])
await ctx.send(embed=embed)
@cmd_account.command(name="search", aliases=["lookup", "find"])
@commands.guild_only()
async def account_search(ctx: Context, target: Member):
"""Look up a member's account."""
uid = str(target.id)
sid = str(ctx.guild.id)
if sid in instance.accounts and uid in instance.accounts[sid]:
embed = await account_embed(target, instance.accounts[sid][uid])
await ctx.send(embed=embed)
else:
await ctx.send(":anger: User has no account for this server.")
@cmd_account.command(name="add", aliases=["create", "new"])
@commands.guild_only()
@level(10)
async def account_add(ctx: Context, target: Member, level: int):
"""Add an account for a member.
Level 10 required.
"""
uid = str(target.id)
sid = str(ctx.guild.id)
if sid not in instance.accounts:
# Server hasn't been set up
await ctx.send(":anger: Server has no accounts.")
return
if uid not in instance.accounts[sid]:
instance.accounts[sid][uid] = level
await ctx.send(":white_check_mark: Account created.")
update_db(db, instance.accounts, "accounts")
else:
await ctx.send(":anger: User already has an account for this server.")
@cmd_account.command(name="remove", aliases=["delete", "destroy"])
@commands.guild_only()
@level(10)
async def account_remove(ctx: Context, target: Member):
"""Remove a member's account from the server.
Level 10 required.
"""
uid = str(target.id)
sid = str(ctx.guild.id)
if sid not in instance.accounts:
await ctx.send(":anger: Server has no accounts.")
return
elif uid not in instance.accounts[sid]:
await ctx.send(":anger: User has no account for this server.")
return
else:
instance.accounts[sid].pop(uid)
await ctx.send(":white_check_mark: Account removed.")
update_db(db, instance.accounts, "accounts")
@cmd_account.command(name="update", aliases=["change", "modify"])
@commands.guild_only()
@level(10)
async def account_update(ctx: Context, target: Member, level: int):
"""Change a member's account level.
Level 10 required.
"""
uid = str(target.id)
sid = str(ctx.guild.id)
if sid not in instance.accounts:
await ctx.send(":anger: Server has no accounts.")
return
elif uid not in instance.accounts[sid]:
await ctx.send(":anger User has no account for this server.")
return
else:
instance.accounts[sid][uid] = level
update_db(db, instance.accounts, "accounts")
await ctx.send(":white_check_mark: Account updated.")
@cmd_account.command(name="genesis")
@commands.guild_only()
@commands.has_permissions(administrator=True)
async def account_admin(ctx: Context):
"""Set yourself as an administrator of the server to create accounts.
MUST HAVE SERVER ADMINISTRATOR PERMISSION
"""
uid = str(ctx.author.id)
sid = str(ctx.guild.id)
if sid not in instance.accounts:
instance.accounts[sid] = {}
if uid not in instance.accounts[sid]:
instance.accounts[sid][uid] = 10
await ctx.send(":white_check_mark: Admin account created.")
update_db(db, instance.accounts, "accounts")
else:
await ctx.send(":anger: You already have an account.")
# Plugins
@bot.group(name="plugins", aliases=["pl", "cogs"])
async def cmd_plugins(ctx: Context):
"""Plugin handling.
Running the command without arguments will list loaded plugins.
"""
if ctx.invoked_subcommand is None:
embed = Embed(title="Loaded Plugins", color=0x7289DA)
for i in range(len(instance.plugins)):
embed.add_field(name=str(i + 1), value=instance.plugins[i])
await ctx.send(embed=embed)
@cmd_plugins.command(name="load")
@is_botmaster()
async def cmd_plugins_load(ctx: Context, name: str):
"""Load plugin (Cog). Do not include file extension.
Botmaster required.
"""
if name in instance.plugins:
await ctx.send(f":anger: Plugin {name}.py already loaded.")
return
if not os.path.isfile(f"plugins/{name}.py"):
await ctx.send(f":anger: Cannot find plugins/{name}.py")
else:
try:
bot.load_extension(f"plugins.{name}")
instance.plugins.append(name)
update_db(db, instance.plugins, "plugins")
await ctx.send(
f":white_check_mark: Plugin {name}.py successfully loaded."
)
except Exception as e:
exc = f"{type(e).__name__}, {e}"
await ctx.send(f":anger: Error loading {name}.py:\n```py\n{exc}\n```")
@cmd_plugins.command(name="unload")
@is_botmaster()
async def cmd_plugins_unload(ctx: Context, name: str):
"""Unload plugin (Cog). Do not include file extension.
Botmaster required.
"""
if name not in instance.plugins:
await ctx.send(f":anger: Plugin {name}.py is not loaded.")
else:
try:
bot.unload_extension(f"plugins.{name}")
instance.plugins.remove(name)
update_db(db, instance.plugins, "plugins")
await ctx.send(
f":white_check_mark: Plugin {name}.py successfully unloaded."
)
except Exception as e:
exc = f"{type(e).__name__}, {e}"
await ctx.send(f":anger: Error unloading {name}.py:\n```py\n{exc}\n```")
@cmd_plugins.command(name="reload")
@is_botmaster()
async def cmd_plugins_reload(ctx: Context, name: str):
"""Reload plugin (Cog). Do not include file extension.
Botmaster required.
"""
if name not in instance.plugins:
await ctx.send(f":anger: Plugin {name}.py is not loaded.")
else:
try:
bot.unload_extension(f"plugins.{name}")
instance.plugins.remove(name)
update_db(db, instance.plugins, "plugins")
await ctx.send(
f":white_check_mark: Plugin {name}.py successfully unloaded."
)
bot.load_extension(f"plugins.{name}")
instance.plugins.append(name)
update_db(db, instance.plugins, "plugins")
await ctx.send(f":white_check_mark: Plugin {name}.py successfully loaded.")
except Exception as e:
exc = f"{type(e).__name__}, {e}"
await ctx.send(
f":anger: Error reloading {name}.py:\n```py\n{exc}\n```"
)
@cmd_plugins.command(name="enable")
@level(10)
async def cmd_plugins_enable(ctx: Context, name: str):
"""Enable a loaded plugin (Cog) on your server.
Level 10 required.
"""
sid = str(ctx.guild.id)
if name not in instance.plugins:
# There is a distinction between server-loaded and bot-loaded plugins
# therefore I do not include the .py extension here purposefully
await ctx.send(f":anger: No plugin {name} is loaded.")
return
else:
instance.servers[sid][name] = True
update_db(db, instance.servers, "servers")
await ctx.send(f":white_check_mark: Plugin {name} enabled on your server.")
@cmd_plugins.command(name="disable")
@level(10)
async def cmd_plugins_disable(ctx: Context, name: str):
"""Disable a loaded plugin (Cog) on your server.
Level 10 required.
"""
sid = str(ctx.guild.id)
if name not in instance.plugins:
await ctx.send(f":anger: No plugin {name} is loaded.")
return
else:
instance.servers[sid][name] = False
update_db(db, instance.servers, "servers")
await ctx.send(f":white_check_mark: Plugin {name} disabled on your server.")
# Ensure there is at least one botmaster present before starting the bot
if instance.botmasters is None:
raise Exception("No botmasters defined.")
return bot
# Get user's account level (Not technically a check but needs to be here)
def get_account(server: Guild, member: Member) -> int:
"""Return the account level for a given user.
Intended for export to Cogs.
"""
uid = str(member.id)
sid = str(server.id)
# Get a temporary instance of the main database
database = SqliteDict(
filename=f"db/{inst.database}",
tablename="discord-bot",
encode=json.dumps,
decode=json.loads
)
if "accounts" not in database:
database.close()
raise KeyError("Database not initialized.")
db_dict = database["accounts"]
database.close()
if sid not in db_dict:
raise KeyError("Server has no accounts.")
if uid not in db_dict[sid]:
raise KeyError("User does not have an account for this server.")
else:
return db_dict[sid][uid]
# Exportable version of account level check
def is_level(required=0):
async def predicate(ctx: Context):
uid = str(ctx.author.id)
sid = str(ctx.guild.id)
db_dict = get_db_dict(f"db/{inst.database}", "discord-bot", "accounts")
if sid not in db_dict or uid not in db_dict[sid]:
return False
else:
return db_dict[sid][uid] >= required
return commands.check(predicate)
def main():
bot = initialize(inst)
bot.run(inst.config_token)
if __name__ == "__main__":
main()
| 35.331754
| 158
| 0.585714
|
e71d81f2b79048d093d2b47dc0c439e0c978dfd0
| 9,890
|
py
|
Python
|
homeassistant/components/nfandroidtv/notify.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 6
|
2017-08-02T19:26:39.000Z
|
2020-03-14T22:47:41.000Z
|
homeassistant/components/nfandroidtv/notify.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 60
|
2020-08-03T07:32:56.000Z
|
2022-03-31T06:02:07.000Z
|
homeassistant/components/nfandroidtv/notify.py
|
tbarbette/core
|
8e58c3aa7bc8d2c2b09b6bd329daa1c092d52d3c
|
[
"Apache-2.0"
] | 14
|
2018-08-19T16:28:26.000Z
|
2021-09-02T18:26:53.000Z
|
"""Notifications for Android TV notification service."""
import base64
import io
import logging
import requests
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_HOST, CONF_TIMEOUT, HTTP_OK, PERCENTAGE
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_DURATION = "duration"
CONF_FONTSIZE = "fontsize"
CONF_POSITION = "position"
CONF_TRANSPARENCY = "transparency"
CONF_COLOR = "color"
CONF_INTERRUPT = "interrupt"
DEFAULT_DURATION = 5
DEFAULT_FONTSIZE = "medium"
DEFAULT_POSITION = "bottom-right"
DEFAULT_TRANSPARENCY = "default"
DEFAULT_COLOR = "grey"
DEFAULT_INTERRUPT = False
DEFAULT_TIMEOUT = 5
DEFAULT_ICON = (
"iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAQAAAC1HAwCAAAAC0lEQVR4nGP6zwAAAgcBApo"
"cMXEAAAAASUVORK5CYII="
)
ATTR_DURATION = "duration"
ATTR_FONTSIZE = "fontsize"
ATTR_POSITION = "position"
ATTR_TRANSPARENCY = "transparency"
ATTR_COLOR = "color"
ATTR_BKGCOLOR = "bkgcolor"
ATTR_INTERRUPT = "interrupt"
ATTR_IMAGE = "filename2"
ATTR_FILE = "file"
# Attributes contained in file
ATTR_FILE_URL = "url"
ATTR_FILE_PATH = "path"
ATTR_FILE_USERNAME = "username"
ATTR_FILE_PASSWORD = "password"
ATTR_FILE_AUTH = "auth"
# Any other value or absence of 'auth' lead to basic authentication being used
ATTR_FILE_AUTH_DIGEST = "digest"
FONTSIZES = {"small": 1, "medium": 0, "large": 2, "max": 3}
POSITIONS = {
"bottom-right": 0,
"bottom-left": 1,
"top-right": 2,
"top-left": 3,
"center": 4,
}
TRANSPARENCIES = {
"default": 0,
f"0{PERCENTAGE}": 1,
f"25{PERCENTAGE}": 2,
f"50{PERCENTAGE}": 3,
f"75{PERCENTAGE}": 4,
f"100{PERCENTAGE}": 5,
}
COLORS = {
"grey": "#607d8b",
"black": "#000000",
"indigo": "#303F9F",
"green": "#4CAF50",
"red": "#F44336",
"cyan": "#00BCD4",
"teal": "#009688",
"amber": "#FFC107",
"pink": "#E91E63",
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_DURATION, default=DEFAULT_DURATION): vol.Coerce(int),
vol.Optional(CONF_FONTSIZE, default=DEFAULT_FONTSIZE): vol.In(FONTSIZES.keys()),
vol.Optional(CONF_POSITION, default=DEFAULT_POSITION): vol.In(POSITIONS.keys()),
vol.Optional(CONF_TRANSPARENCY, default=DEFAULT_TRANSPARENCY): vol.In(
TRANSPARENCIES.keys()
),
vol.Optional(CONF_COLOR, default=DEFAULT_COLOR): vol.In(COLORS.keys()),
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.Coerce(int),
vol.Optional(CONF_INTERRUPT, default=DEFAULT_INTERRUPT): cv.boolean,
}
)
def get_service(hass, config, discovery_info=None):
"""Get the Notifications for Android TV notification service."""
remoteip = config.get(CONF_HOST)
duration = config.get(CONF_DURATION)
fontsize = config.get(CONF_FONTSIZE)
position = config.get(CONF_POSITION)
transparency = config.get(CONF_TRANSPARENCY)
color = config.get(CONF_COLOR)
interrupt = config.get(CONF_INTERRUPT)
timeout = config.get(CONF_TIMEOUT)
return NFAndroidTVNotificationService(
remoteip,
duration,
fontsize,
position,
transparency,
color,
interrupt,
timeout,
hass.config.is_allowed_path,
)
class NFAndroidTVNotificationService(BaseNotificationService):
"""Notification service for Notifications for Android TV."""
def __init__(
self,
remoteip,
duration,
fontsize,
position,
transparency,
color,
interrupt,
timeout,
is_allowed_path,
):
"""Initialize the service."""
self._target = f"http://{remoteip}:7676"
self._default_duration = duration
self._default_fontsize = fontsize
self._default_position = position
self._default_transparency = transparency
self._default_color = color
self._default_interrupt = interrupt
self._timeout = timeout
self._icon_file = io.BytesIO(base64.b64decode(DEFAULT_ICON))
self.is_allowed_path = is_allowed_path
def send_message(self, message="", **kwargs):
"""Send a message to a Android TV device."""
_LOGGER.debug("Sending notification to: %s", self._target)
payload = {
"filename": (
"icon.png",
self._icon_file,
"application/octet-stream",
{"Expires": "0"},
),
"type": "0",
"title": kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT),
"msg": message,
"duration": "%i" % self._default_duration,
"fontsize": "%i" % FONTSIZES.get(self._default_fontsize),
"position": "%i" % POSITIONS.get(self._default_position),
"bkgcolor": "%s" % COLORS.get(self._default_color),
"transparency": "%i" % TRANSPARENCIES.get(self._default_transparency),
"offset": "0",
"app": ATTR_TITLE_DEFAULT,
"force": "true",
"interrupt": "%i" % self._default_interrupt,
}
data = kwargs.get(ATTR_DATA)
if data:
if ATTR_DURATION in data:
duration = data.get(ATTR_DURATION)
try:
payload[ATTR_DURATION] = "%i" % int(duration)
except ValueError:
_LOGGER.warning("Invalid duration-value: %s", str(duration))
if ATTR_FONTSIZE in data:
fontsize = data.get(ATTR_FONTSIZE)
if fontsize in FONTSIZES:
payload[ATTR_FONTSIZE] = "%i" % FONTSIZES.get(fontsize)
else:
_LOGGER.warning("Invalid fontsize-value: %s", str(fontsize))
if ATTR_POSITION in data:
position = data.get(ATTR_POSITION)
if position in POSITIONS:
payload[ATTR_POSITION] = "%i" % POSITIONS.get(position)
else:
_LOGGER.warning("Invalid position-value: %s", str(position))
if ATTR_TRANSPARENCY in data:
transparency = data.get(ATTR_TRANSPARENCY)
if transparency in TRANSPARENCIES:
payload[ATTR_TRANSPARENCY] = "%i" % TRANSPARENCIES.get(transparency)
else:
_LOGGER.warning("Invalid transparency-value: %s", str(transparency))
if ATTR_COLOR in data:
color = data.get(ATTR_COLOR)
if color in COLORS:
payload[ATTR_BKGCOLOR] = "%s" % COLORS.get(color)
else:
_LOGGER.warning("Invalid color-value: %s", str(color))
if ATTR_INTERRUPT in data:
interrupt = data.get(ATTR_INTERRUPT)
try:
payload[ATTR_INTERRUPT] = "%i" % cv.boolean(interrupt)
except vol.Invalid:
_LOGGER.warning("Invalid interrupt-value: %s", str(interrupt))
filedata = data.get(ATTR_FILE) if data else None
if filedata is not None:
# Load from file or URL
file_as_bytes = self.load_file(
url=filedata.get(ATTR_FILE_URL),
local_path=filedata.get(ATTR_FILE_PATH),
username=filedata.get(ATTR_FILE_USERNAME),
password=filedata.get(ATTR_FILE_PASSWORD),
auth=filedata.get(ATTR_FILE_AUTH),
)
if file_as_bytes:
payload[ATTR_IMAGE] = (
"image",
file_as_bytes,
"application/octet-stream",
{"Expires": "0"},
)
try:
_LOGGER.debug("Payload: %s", str(payload))
response = requests.post(self._target, files=payload, timeout=self._timeout)
if response.status_code != HTTP_OK:
_LOGGER.error("Error sending message: %s", str(response))
except requests.exceptions.ConnectionError as err:
_LOGGER.error("Error communicating with %s: %s", self._target, str(err))
def load_file(
self, url=None, local_path=None, username=None, password=None, auth=None
):
"""Load image/document/etc from a local path or URL."""
try:
if url is not None:
# Check whether authentication parameters are provided
if username is not None and password is not None:
# Use digest or basic authentication
if ATTR_FILE_AUTH_DIGEST == auth:
auth_ = HTTPDigestAuth(username, password)
else:
auth_ = HTTPBasicAuth(username, password)
# Load file from URL with authentication
req = requests.get(url, auth=auth_, timeout=DEFAULT_TIMEOUT)
else:
# Load file from URL without authentication
req = requests.get(url, timeout=DEFAULT_TIMEOUT)
return req.content
if local_path is not None:
# Check whether path is whitelisted in configuration.yaml
if self.is_allowed_path(local_path):
return open(local_path, "rb")
_LOGGER.warning("'%s' is not secure to load data from!", local_path)
else:
_LOGGER.warning("Neither URL nor local path found in params!")
except OSError as error:
_LOGGER.error("Can't load from url or local path: %s", error)
return None
| 35.57554
| 88
| 0.595349
|
e30887350809a016f498d3221d64f929e913fb1b
| 31,440
|
py
|
Python
|
grr/core/grr_response_core/lib/parsers/linux_file_parser.py
|
magnologan/grr
|
06eeb071e9a925b34f67caf776c3330b39154850
|
[
"Apache-2.0"
] | null | null | null |
grr/core/grr_response_core/lib/parsers/linux_file_parser.py
|
magnologan/grr
|
06eeb071e9a925b34f67caf776c3330b39154850
|
[
"Apache-2.0"
] | null | null | null |
grr/core/grr_response_core/lib/parsers/linux_file_parser.py
|
magnologan/grr
|
06eeb071e9a925b34f67caf776c3330b39154850
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""Simple parsers for Linux files."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import logging
import os
import re
from future.builtins import zip
from future.utils import iteritems
from future.utils import itervalues
from typing import Optional
from typing import Text
from grr_response_core import config
from grr_response_core.lib import parser
from grr_response_core.lib import utils
from grr_response_core.lib.parsers import config_file
from grr_response_core.lib.rdfvalues import anomaly as rdf_anomaly
from grr_response_core.lib.rdfvalues import client as rdf_client
from grr_response_core.lib.rdfvalues import protodict as rdf_protodict
from grr_response_core.lib.util import precondition
class PCIDevicesInfoParser(parser.FileMultiParser):
"""Parser for PCI devices' info files located in /sys/bus/pci/devices/*/*."""
output_types = [rdf_client.PCIDevice]
supported_artifacts = ["PCIDevicesInfoFiles"]
def ParseMultiple(self, stats, file_objects, unused_knowledge_base):
# Each file gives us only partial information for a particular PCI device.
# Iterate through all the files first to create a dictionary encapsulating
# complete information for each of the PCI device on the system. We need
# all information for a PCI device before a proto for it can be created.
# We will store data in a dictionary of dictionaries that looks like this:
# data = { '0000:7f:0d.0': { 'class': '0x088000',
# 'vendor': '0x8086',
# 'device': '0x0ee1' } }
# The key is location of PCI device on system in extended B/D/F notation
# and value is a dictionary containing filename:data pairs for each file
# returned by artifact collection for that PCI device.
# Extended B/D/F is of form "domain:bus:device.function". Compile a regex
# so we can use it to skip parsing files that don't match it.
hc = r"[0-9A-Fa-f]"
bdf_regex = re.compile(r"^%s+:%s+:%s+\.%s+" % (hc, hc, hc, hc))
# This will make sure that when a non-existing 'key' (PCI location)
# is accessed for the first time a new 'key':{} pair is auto-created
data = collections.defaultdict(dict)
for stat, file_obj in zip(stats, file_objects):
filename = stat.pathspec.Basename()
# Location of PCI device is the name of parent directory of returned file.
bdf = stat.pathspec.Dirname().Basename()
# Make sure we only parse files that are under a valid B/D/F folder
if bdf_regex.match(bdf):
# Remove newlines from all files except config. Config contains raw data
# so we don't want to touch it even if it has a newline character.
file_data = file_obj.read()
if filename != "config":
file_data = file_data.rstrip(b"\n")
data[bdf][filename] = file_data
# Now that we've captured all information for each PCI device. Let's convert
# the dictionary into a list of PCIDevice protos.
for bdf, bdf_filedata in iteritems(data):
pci_device = rdf_client.PCIDevice()
bdf_split = bdf.split(":")
df_split = bdf_split[2].split(".")
# We'll convert the hex into decimal to store in the protobuf.
pci_device.domain = int(bdf_split[0], 16)
pci_device.bus = int(bdf_split[1], 16)
pci_device.device = int(df_split[0], 16)
pci_device.function = int(df_split[1], 16)
pci_device.class_id = bdf_filedata.get("class")
pci_device.vendor = bdf_filedata.get("vendor")
pci_device.vendor_device_id = bdf_filedata.get("device")
pci_device.config = bdf_filedata.get("config")
yield pci_device
class PasswdParser(parser.FileParser):
"""Parser for passwd files. Yields User semantic values."""
output_types = [rdf_client.User]
supported_artifacts = ["UnixPasswd"]
@classmethod
def ParseLine(cls, index, line):
precondition.AssertType(line, Text)
fields = "username,password,uid,gid,fullname,homedir,shell".split(",")
try:
if not line:
return None
dat = dict(zip(fields, line.split(":")))
user = rdf_client.User(
username=dat["username"],
uid=int(dat["uid"]),
homedir=dat["homedir"],
shell=dat["shell"],
gid=int(dat["gid"]),
full_name=dat["fullname"])
return user
except (IndexError, KeyError):
raise parser.ParseError(
"Invalid passwd file at line %d. %s" % ((index + 1), line))
def Parse(self, stat, file_object, knowledge_base):
"""Parse the passwd file."""
_, _ = stat, knowledge_base
lines = [
l.strip()
for l in utils.ReadFileBytesAsUnicode(file_object).splitlines()
]
for index, line in enumerate(lines):
user = self.ParseLine(index, line)
if user is not None:
yield user
class PasswdBufferParser(parser.GrepParser):
"""Parser for lines grepped from passwd files."""
output_types = [rdf_client.User]
supported_artifacts = ["LinuxPasswdHomedirs", "NssCacheLinuxPasswdHomedirs"]
def Parse(self, filefinderresult, knowledge_base):
_ = knowledge_base
lines = [x.data.decode("utf-8") for x in filefinderresult.matches]
for index, line in enumerate(lines):
user = PasswdParser.ParseLine(index, line.strip())
if user is not None:
yield user
class UtmpStruct(utils.Struct):
"""Parse wtmp file from utmp.h."""
_fields = [
("h", "ut_type"),
("i", "pid"),
("32s", "line"),
("4s", "id"),
("32s", "user"),
("256s", "host"),
("i", "exit"),
("i", "session"),
("i", "sec"),
("i", "usec"),
("i", "ip_1"),
("i", "ip_2"),
("i", "ip_3"),
("i", "ip_4"),
("20s", "nothing"),
]
class LinuxWtmpParser(parser.FileParser):
"""Simplified parser for linux wtmp files.
Yields User semantic values for USER_PROCESS events.
"""
output_types = [rdf_client.User]
supported_artifacts = ["LinuxWtmp"]
def Parse(self, stat, file_object, knowledge_base):
"""Parse the wtmp file."""
_, _ = stat, knowledge_base
users = {}
wtmp = file_object.read()
while wtmp:
try:
record = UtmpStruct(wtmp)
except utils.ParsingError:
break
wtmp = wtmp[record.size:]
# Users only appear for USER_PROCESS events, others are system.
if record.ut_type != 7:
continue
# Lose the null termination
record.user = record.user.split(b"\x00", 1)[0]
# Store the latest login time.
# TODO(user): remove the 0 here once RDFDatetime can support times
# pre-epoch properly.
try:
users[record.user] = max(users[record.user], record.sec, 0)
except KeyError:
users[record.user] = record.sec
for user, last_login in iteritems(users):
yield rdf_client.User(
username=utils.SmartUnicode(user), last_logon=last_login * 1000000)
class NetgroupParser(parser.FileParser):
"""Parser that extracts users from a netgroup file."""
output_types = [rdf_client.User]
supported_artifacts = ["NetgroupConfiguration"]
# From useradd man page
USERNAME_REGEX = r"^[a-z_][a-z0-9_-]{0,30}[$]?$"
@classmethod
def ParseLines(cls, lines):
users = set()
filter_regexes = [
re.compile(x)
for x in config.CONFIG["Artifacts.netgroup_filter_regexes"]
]
username_regex = re.compile(cls.USERNAME_REGEX)
blacklist = config.CONFIG["Artifacts.netgroup_user_blacklist"]
for index, line in enumerate(lines):
if line.startswith("#"):
continue
splitline = line.split(" ")
group_name = splitline[0]
if filter_regexes:
filter_match = False
for regex in filter_regexes:
if regex.search(group_name):
filter_match = True
break
if not filter_match:
continue
for member in splitline[1:]:
if member.startswith("("):
try:
_, user, _ = member.split(",")
if user not in users and user not in blacklist:
if not username_regex.match(user):
yield rdf_anomaly.Anomaly(
type="PARSER_ANOMALY",
symptom="Invalid username: %s" % user)
else:
users.add(user)
yield rdf_client.User(username=utils.SmartUnicode(user))
except ValueError:
raise parser.ParseError(
"Invalid netgroup file at line %d: %s" % (index + 1, line))
def Parse(self, stat, file_object, knowledge_base):
"""Parse the netgroup file and return User objects.
Lines are of the form:
group1 (-,user1,) (-,user2,) (-,user3,)
Groups are ignored, we return users in lines that match the filter regexes,
or all users in the file if no filters are specified.
We assume usernames are in the default regex format specified in the adduser
man page. Notably no non-ASCII characters.
Args:
stat: unused statentry
file_object: netgroup VFSFile
knowledge_base: unused
Returns:
rdf_client.User
"""
_, _ = stat, knowledge_base
lines = [
l.strip()
for l in utils.ReadFileBytesAsUnicode(file_object).splitlines()
]
return self.ParseLines(lines)
class NetgroupBufferParser(parser.GrepParser):
"""Parser for lines grepped from /etc/netgroup files."""
output_types = [rdf_client.User]
def Parse(self, filefinderresult, knowledge_base):
_ = knowledge_base
return NetgroupParser.ParseLines(
[x.data.decode("utf-8").strip() for x in filefinderresult.matches])
class LinuxBaseShadowParser(parser.FileMultiParser):
"""Base parser to process user/groups with shadow files."""
# A list of hash types and hash matching expressions.
hashes = [("SHA512", re.compile(r"\$6\$[A-z\d\./]{0,16}\$[A-z\d\./]{86}$")),
("SHA256", re.compile(r"\$5\$[A-z\d\./]{0,16}\$[A-z\d\./]{43}$")),
("DISABLED", re.compile(r"!.*")), ("UNSET", re.compile(r"\*.*")),
("MD5", re.compile(r"\$1\$([A-z\d\./]{1,8}\$)?[A-z\d\./]{22}$")),
("DES", re.compile(r"[A-z\d\./]{2}.{11}$")),
("BLOWFISH", re.compile(r"\$2a?\$\d\d\$[A-z\d\.\/]{22}$")),
("NTHASH", re.compile(r"\$3\$")), ("UNUSED", re.compile(r"\$4\$"))]
# Prevents this from automatically registering.
__abstract = True # pylint: disable=g-bad-name
base_store = None
shadow_store = None
def __init__(self, *args, **kwargs):
super(LinuxBaseShadowParser, self).__init__(*args, **kwargs)
# Entries as defined by "getent", i.e. account databases used by nsswitch.
self.entry = {}
# Shadow files
self.shadow = {}
def GetPwStore(self, pw_attr):
"""Decide if the passwd field is a passwd or a reference to shadow.
Evaluates the contents of the password field to determine how the password
is stored.
- If blank either no password is required or no access is granted.
This behavior is system and application dependent.
- If 'x', the encrypted password is stored in /etc/shadow.
- Otherwise, the password is any other string, it's treated as an encrypted
password.
Args:
pw_attr: The password field as a string.
Returns:
An enum indicating the location of the password store.
"""
# PwEntry.PwStore enum values.
if pw_attr == "x":
return self.shadow_store
return self.base_store
def GetHashType(self, hash_str):
"""Identify the type of hash in a hash string.
Args:
hash_str: A string value that may be a hash.
Returns:
A string description of the type of hash.
"""
# Return the type of the first matching hash.
for hash_type, hash_re in self.hashes:
if hash_re.match(hash_str):
return hash_type
# No hash matched.
return "EMPTY"
def _ParseFile(self, file_obj, line_parser):
"""Process a file line by line.
Args:
file_obj: The file to parse.
line_parser: The parser method used to process and store line content.
Raises:
parser.ParseError if the parser is unable to process the line.
"""
lines = [
l.strip() for l in utils.ReadFileBytesAsUnicode(file_obj).splitlines()
]
try:
for index, line in enumerate(lines):
if line:
line_parser(line)
except (IndexError, KeyError) as e:
raise parser.ParseError("Invalid file at line %d: %s" % (index + 1, e))
def ReconcileShadow(self, store_type):
"""Verify that entries that claim to use shadow files have a shadow entry.
If the entries of the non-shadowed file indicate that a shadow file is used,
check that there is actually an entry for that file in shadow.
Args:
store_type: The type of password store that should be used (e.g.
/etc/shadow or /etc/gshadow)
"""
for k, v in iteritems(self.entry):
if v.pw_entry.store == store_type:
shadow_entry = self.shadow.get(k)
if shadow_entry is not None:
v.pw_entry = shadow_entry
else:
v.pw_entry.store = "UNKNOWN"
def _Anomaly(self, msg, found):
return rdf_anomaly.Anomaly(
type="PARSER_ANOMALY", symptom=msg, finding=found)
@staticmethod
def MemberDiff(data1, set1_name, data2, set2_name):
"""Helper method to perform bidirectional set differences."""
set1 = set(data1)
set2 = set(data2)
diffs = []
msg = "Present in %s, missing in %s: %s"
if set1 != set2:
in_set1 = set1 - set2
in_set2 = set2 - set1
if in_set1:
diffs.append(msg % (set1_name, set2_name, ",".join(in_set1)))
if in_set2:
diffs.append(msg % (set2_name, set1_name, ",".join(in_set2)))
return diffs
def ParseMultiple(self, stats, file_objs, kb):
"""Process files together."""
fileset = {stat.pathspec.path: obj for stat, obj in zip(stats, file_objs)}
return self.ParseFileset(fileset)
class LinuxSystemGroupParser(LinuxBaseShadowParser):
"""Parser for group files. Yields Group semantic values."""
output_types = [rdf_client.Group]
supported_artifacts = ["LoginPolicyConfiguration"]
base_store = rdf_client.PwEntry.PwStore.GROUP
shadow_store = rdf_client.PwEntry.PwStore.GSHADOW
def __init__(self, *args, **kwargs):
super(LinuxSystemGroupParser, self).__init__(*args, **kwargs)
self.gshadow_members = {}
def ParseGshadowEntry(self, line):
"""Extract the members of each group from /etc/gshadow.
Identifies the groups in /etc/gshadow and several attributes of the group,
including how the password is crypted (if set).
gshadow files have the format group_name:passwd:admins:members
admins are both group members and can manage passwords and memberships.
Args:
line: An entry in gshadow.
"""
fields = ("name", "passwd", "administrators", "members")
if line:
rslt = dict(zip(fields, line.split(":")))
# Add the shadow state to the internal store.
name = rslt["name"]
pw_entry = self.shadow.setdefault(name, rdf_client.PwEntry())
pw_entry.store = self.shadow_store
pw_entry.hash_type = self.GetHashType(rslt["passwd"])
# Add the members to the internal store.
members = self.gshadow_members.setdefault(name, set())
for accts in rslt["administrators"], rslt["members"]:
if accts:
members.update(accts.split(","))
def ParseGroupEntry(self, line):
"""Extract the members of a group from /etc/group."""
fields = ("name", "passwd", "gid", "members")
if line:
rslt = dict(zip(fields, line.split(":")))
name = rslt["name"]
group = self.entry.setdefault(name, rdf_client.Group(name=name))
group.pw_entry.store = self.GetPwStore(rslt["passwd"])
if group.pw_entry.store == self.base_store:
group.pw_entry.hash_type = self.GetHashType(rslt["passwd"])
# If the group contains NIS entries, they may not have a gid.
if rslt["gid"]:
group.gid = int(rslt["gid"])
group.members = set(rslt["members"].split(","))
def MergeMembers(self):
"""Add shadow group members to the group if gshadow is used.
Normally group and shadow should be in sync, but no guarantees. Merges the
two stores as membership in either file may confer membership.
"""
for group_name, members in iteritems(self.gshadow_members):
group = self.entry.get(group_name)
if group and group.pw_entry.store == self.shadow_store:
group.members = members.union(group.members)
def FindAnomalies(self):
"""Identify any anomalous group attributes or memberships."""
for grp_name, group in iteritems(self.entry):
shadow = self.shadow.get(grp_name)
gshadows = self.gshadow_members.get(grp_name, [])
if shadow is not None:
diff = self.MemberDiff(group.members, "group", gshadows, "gshadow")
if diff:
msg = "Group/gshadow members differ in group: %s" % grp_name
yield self._Anomaly(msg, diff)
diff = self.MemberDiff(self.entry, "group", self.gshadow_members, "gshadow")
if diff:
yield self._Anomaly("Mismatched group and gshadow files.", diff)
def ParseFileset(self, fileset=None):
"""Process linux system group and gshadow files.
Orchestrates collection of account entries from /etc/group and /etc/gshadow.
The group and gshadow entries are reconciled and member users are added to
the entry.
Args:
fileset: A dict of files mapped from path to an open file.
Yields:
- A series of Group entries, each of which is populated with group
[memberships and indications of the shadow state of any group password.
- A series of anomalies in cases where there are mismatches between group
and gshadow states.
"""
# Get relevant shadow attributes.
gshadow = fileset.get("/etc/gshadow")
if gshadow:
self._ParseFile(gshadow, self.ParseGshadowEntry)
else:
logging.debug("No /etc/gshadow file.")
group = fileset.get("/etc/group")
if group:
self._ParseFile(group, self.ParseGroupEntry)
else:
logging.debug("No /etc/group file.")
self.ReconcileShadow(self.shadow_store)
# Identify any anomalous group/shadow entries.
# This needs to be done before memberships are merged: merged memberships
# are the *effective* membership regardless of wierd configurations.
for anom in self.FindAnomalies():
yield anom
# Then add shadow group members to the group membership.
self.MergeMembers()
for group in itervalues(self.entry):
yield group
class LinuxSystemPasswdParser(LinuxBaseShadowParser):
"""Parser for local accounts."""
output_types = [rdf_client.User]
supported_artifacts = ["LoginPolicyConfiguration"]
base_store = rdf_client.PwEntry.PwStore.PASSWD
shadow_store = rdf_client.PwEntry.PwStore.SHADOW
def __init__(self, *args, **kwargs):
super(LinuxSystemPasswdParser, self).__init__(*args, **kwargs)
self.groups = {} # Groups mapped by name.
self.memberships = {} # Group memberships per user.
self.uids = {} # Assigned uids
self.gids = {} # Assigned gids
def ParseShadowEntry(self, line):
"""Extract the user accounts in /etc/shadow.
Identifies the users in /etc/shadow and several attributes of their account,
including how their password is crypted and password aging characteristics.
Args:
line: An entry of the shadow file.
"""
fields = ("login", "passwd", "last_change", "min_age", "max_age",
"warn_time", "inactivity", "expire", "reserved")
if line:
rslt = dict(zip(fields, line.split(":")))
pw_entry = self.shadow.setdefault(rslt["login"], rdf_client.PwEntry())
pw_entry.store = self.shadow_store
pw_entry.hash_type = self.GetHashType(rslt["passwd"])
# Tread carefully here in case these values aren't set.
last_change = rslt.get("last_change")
if last_change:
pw_entry.age = int(last_change)
max_age = rslt.get("max_age")
if max_age:
pw_entry.max_age = int(max_age)
def ParsePasswdEntry(self, line):
"""Process the passwd entry fields and primary group memberships."""
fields = ("uname", "passwd", "uid", "gid", "fullname", "homedir", "shell")
if line:
rslt = dict(zip(fields, line.split(":")))
user = self.entry.setdefault(rslt["uname"], rdf_client.User())
user.username = rslt["uname"]
user.pw_entry.store = self.GetPwStore(rslt["passwd"])
if user.pw_entry.store == self.base_store:
user.pw_entry.hash_type = self.GetHashType(rslt["passwd"])
# If the passwd file contains NIS entries they may not have uid/gid set.
if rslt["uid"]:
user.uid = int(rslt["uid"])
if rslt["gid"]:
user.gid = int(rslt["gid"])
user.homedir = rslt["homedir"]
user.shell = rslt["shell"]
user.full_name = rslt["fullname"]
# Map uid numbers to detect duplicates.
uids = self.uids.setdefault(user.uid, set())
uids.add(user.username)
# Map primary group memberships to populate memberships.
gid = self.gids.setdefault(user.gid, set())
gid.add(user.username)
def _Members(self, group):
"""Unify members of a group and accounts with the group as primary gid."""
group.members = set(group.members).union(self.gids.get(group.gid, []))
return group
def AddGroupMemberships(self):
"""Adds aggregate group membership from group, gshadow and passwd."""
self.groups = {g.name: self._Members(g) for g in itervalues(self.groups)}
# Map the groups a user is a member of, irrespective of primary/extra gid.
for g in itervalues(self.groups):
for user in g.members:
membership = self.memberships.setdefault(user, set())
membership.add(g.gid)
# Now add the completed membership to the user account.
for user in itervalues(self.entry):
user.gids = self.memberships.get(user.username)
def FindAnomalies(self):
"""Identify anomalies in the password/shadow and group/gshadow data."""
# Find anomalous group entries.
findings = []
group_entries = {g.gid for g in itervalues(self.groups)}
for gid in set(self.gids) - group_entries:
undefined = ",".join(self.gids.get(gid, []))
findings.append(
"gid %d assigned without /etc/groups entry: %s" % (gid, undefined))
if findings:
yield self._Anomaly("Accounts with invalid gid.", findings)
# Find any shared user IDs.
findings = []
for uid, names in iteritems(self.uids):
if len(names) > 1:
findings.append("uid %d assigned to multiple accounts: %s" %
(uid, ",".join(sorted(names))))
if findings:
yield self._Anomaly("Accounts with shared uid.", findings)
# Find privileged groups with unusual members.
findings = []
root_grp = self.groups.get("root")
if root_grp is not None:
root_members = sorted([m for m in root_grp.members if m != "root"])
if root_members:
findings.append("Accounts in 'root' group: %s" % ",".join(root_members))
if findings:
yield self._Anomaly("Privileged group with unusual members.", findings)
# Find accounts without passwd/shadow entries.
diffs = self.MemberDiff(self.entry, "passwd", self.shadow, "shadow")
if diffs:
yield self._Anomaly("Mismatched passwd and shadow files.", diffs)
def AddPassword(self, fileset):
"""Add the passwd entries to the shadow store."""
passwd = fileset.get("/etc/passwd")
if passwd:
self._ParseFile(passwd, self.ParsePasswdEntry)
else:
logging.debug("No /etc/passwd file.")
def AddShadow(self, fileset):
"""Add the shadow entries to the shadow store."""
shadow = fileset.get("/etc/shadow")
if shadow:
self._ParseFile(shadow, self.ParseShadowEntry)
else:
logging.debug("No /etc/shadow file.")
def ParseFileset(self, fileset=None):
"""Process linux system login files.
Orchestrates collection of account entries from /etc/passwd and
/etc/shadow. The passwd and shadow entries are reconciled and group
memberships are mapped to the account.
Args:
fileset: A dict of files mapped from path to an open file.
Yields:
- A series of User entries, each of which is populated with
group memberships and indications of the shadow state of the account.
- A series of anomalies in cases where there are mismatches between passwd
and shadow state.
"""
self.AddPassword(fileset)
self.AddShadow(fileset)
self.ReconcileShadow(self.shadow_store)
# Get group memberships using the files that were already collected.
# Separate out groups and anomalies.
for rdf in LinuxSystemGroupParser().ParseFileset(fileset):
if isinstance(rdf, rdf_client.Group):
self.groups[rdf.name] = rdf
else:
yield rdf
self.AddGroupMemberships()
for user in itervalues(self.entry):
yield user
for grp in itervalues(self.groups):
yield grp
for anom in self.FindAnomalies():
yield anom
class PathParser(parser.FileParser):
"""Parser for dotfile entries.
Extracts path attributes from dotfiles to infer effective paths for users.
This parser doesn't attempt or expect to determine path state for all cases,
rather, it is a best effort attempt to detect common misconfigurations. It is
not intended to detect maliciously obfuscated path modifications.
"""
output_types = [rdf_protodict.AttributedDict]
# TODO(user): Modify once a decision is made on contextual selection of
# parsed results for artifact data.
supported_artifacts = [
"GlobalShellConfigs", "RootUserShellConfigs", "UsersShellConfigs"
]
# https://cwe.mitre.org/data/definitions/426.html
_TARGETS = ("CLASSPATH", "LD_AOUT_LIBRARY_PATH", "LD_AOUT_PRELOAD",
"LD_LIBRARY_PATH", "LD_PRELOAD", "MODULE_PATH", "PATH",
"PERL5LIB", "PERLLIB", "PYTHONPATH", "RUBYLIB")
_SH_CONTINUATION = ("{", "}", "||", "&&", "export")
_CSH_FILES = (".login", ".cshrc", ".tcsh", "csh.cshrc", "csh.login",
"csh.logout")
# This matches "set a = (b . ../../.. )", "set a=(. b c)" etc.
_CSH_SET_RE = re.compile(r"(\w+)\s*=\s*\((.*)\)$")
# This matches $PATH, ${PATH}, "$PATH" and "${ PATH }" etc.
# Omits more fancy parameter expansion e.g. ${unset_val:=../..}
_SHELLVAR_RE = re.compile(r'"?\$\{?\s*(\w+)\s*\}?"?')
def __init__(self, *args, **kwargs):
super(PathParser, self).__init__(*args, **kwargs)
# Terminate entries on ";" to capture multiple values on one line.
self.parser = config_file.FieldParser(term=r"[\r\n;]")
def _ExpandPath(self, target, vals, paths):
"""Extract path information, interpolating current path values as needed."""
if target not in self._TARGETS:
return
expanded = []
for val in vals:
# Null entries specify the current directory, so :a::b:c: is equivalent
# to .:a:.:b:c:.
shellvar = self._SHELLVAR_RE.match(val)
if not val:
expanded.append(".")
elif shellvar:
# The value may actually be in braces as well. Always convert to upper
# case so we deal with stuff like lowercase csh path.
existing = paths.get(shellvar.group(1).upper())
if existing:
expanded.extend(existing)
else:
expanded.append(val)
else:
expanded.append(val)
paths[target] = expanded
def _ParseShVariables(self, lines):
"""Extract env_var and path values from sh derivative shells.
Iterates over each line, word by word searching for statements that set the
path. These are either variables, or conditions that would allow a variable
to be set later in the line (e.g. export).
Args:
lines: A list of lines, each of which is a list of space separated words.
Returns:
a dictionary of path names and values.
"""
paths = {}
for line in lines:
for entry in line:
if "=" in entry:
# Pad out the list so that it's always 2 elements, even if the split
# failed.
target, vals = (entry.split("=", 1) + [""])[:2]
if vals:
path_vals = vals.split(":")
else:
path_vals = []
self._ExpandPath(target, path_vals, paths)
elif entry not in self._SH_CONTINUATION:
# Stop processing the line unless the entry might allow paths to still
# be set, e.g.
# reserved words: "export"
# conditions: { PATH=VAL } && PATH=:$PATH || PATH=.
break
return paths
def _ParseCshVariables(self, lines):
"""Extract env_var and path values from csh derivative shells.
Path attributes can be set several ways:
- setenv takes the form "setenv PATH_NAME COLON:SEPARATED:LIST"
- set takes the form "set path_name=(space separated list)" and is
automatically exported for several types of files.
The first entry in each stanza is used to decide what context to use.
Other entries are used to identify the path name and any assigned values.
Args:
lines: A list of lines, each of which is a list of space separated words.
Returns:
a dictionary of path names and values.
"""
paths = {}
for line in lines:
if len(line) < 2:
continue
action = line[0]
if action == "setenv":
target = line[1]
path_vals = []
if line[2:]:
path_vals = line[2].split(":")
self._ExpandPath(target, path_vals, paths)
elif action == "set":
set_vals = self._CSH_SET_RE.search(" ".join(line[1:]))
if set_vals:
target, vals = set_vals.groups()
# Automatically exported to ENV vars.
if target in ("path", "term", "user"):
target = target.upper()
path_vals = vals.split()
self._ExpandPath(target, path_vals, paths)
return paths
def Parse(self, stat, file_obj, knowledge_base):
"""Identifies the paths set within a file.
Expands paths within the context of the file, but does not infer fully
expanded paths from external states. There are plenty of cases where path
attributes are unresolved, e.g. sourcing other files.
Lines are not handled literally. A field parser is used to:
- Break lines with multiple distinct statements into separate lines (e.g.
lines with a ';' separating stanzas.
- Strip out comments.
- Handle line continuations to capture multi-line configurations into one
statement.
Args:
stat: statentry
file_obj: VFSFile
knowledge_base: unused
Yields:
An attributed dict for each env vars. 'name' contains the path name, and
'vals' contains its vals.
"""
_ = knowledge_base
lines = self.parser.ParseEntries(utils.ReadFileBytesAsUnicode(file_obj))
if os.path.basename(stat.pathspec.path) in self._CSH_FILES:
paths = self._ParseCshVariables(lines)
else:
paths = self._ParseShVariables(lines)
for path_name, path_vals in iteritems(paths):
yield rdf_protodict.AttributedDict(
config=stat.pathspec.path, name=path_name, vals=path_vals)
| 35.890411
| 80
| 0.653085
|
5f6a16e9d80c068b3155f507c3440fe2487aec77
| 474
|
py
|
Python
|
convolut_telegram/settings.py
|
convolut/convolut-telegram
|
4a32484496aff293aca47b3454fa929fe3dc77a7
|
[
"Apache-2.0"
] | null | null | null |
convolut_telegram/settings.py
|
convolut/convolut-telegram
|
4a32484496aff293aca47b3454fa929fe3dc77a7
|
[
"Apache-2.0"
] | null | null | null |
convolut_telegram/settings.py
|
convolut/convolut-telegram
|
4a32484496aff293aca47b3454fa929fe3dc77a7
|
[
"Apache-2.0"
] | null | null | null |
import os
from .constants import TelegramMode
from convolut.settings import GLOBAL_PREFIX
LOGGER_TELEGRAM_TOKEN = os.environ.get(f"{GLOBAL_PREFIX}LOGGER_TELEGRAM_TOKEN", None)
LOGGER_TELEGRAM_CHAT_ID = os.environ.get(f"{GLOBAL_PREFIX}LOGGER_TELEGRAM_CHAT_ID", None)
LOGGER_TELEGRAM_MODE = os.environ.get(f"{GLOBAL_PREFIX}LOGGER_TELEGRAM_MODE", TelegramMode.Basic)
LOGGER_TELEGRAM_PROXY = os.environ.get(f"{GLOBAL_PREFIX}LOGGER_TELEGRAM_PROXY", 'https://api.telegram.org')
| 47.4
| 107
| 0.835443
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.