hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0b17dc1ba266c3902679d315702c8fa0f208d5e0 | 15,862 | py | Python | carpyncho.py | DrDub/carpyncho-py | 6394faf9da6dfa393d3d06bfff004f10c7e4cd29 | [
"BSD-3-Clause"
] | null | null | null | carpyncho.py | DrDub/carpyncho-py | 6394faf9da6dfa393d3d06bfff004f10c7e4cd29 | [
"BSD-3-Clause"
] | null | null | null | carpyncho.py | DrDub/carpyncho-py | 6394faf9da6dfa393d3d06bfff004f10c7e4cd29 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Juan B Cabral
# License: BSD-3-Clause
# Full Text: https://github.com/carpyncho/carpyncho-py/blob/master/LICENSE
# =============================================================================
# DOCS
# =============================================================================
"""Python client for Carpyncho VVV dataset collection.
This code access as a Pandas DataFrame all the data of the web version of
Carpyncho https://carpyncho.github.io/.
"""
__all__ = ["Carpyncho", "CARPYNCHOPY_DATA_PATH"]
__version__ = "0.0.5"
# =============================================================================
# IMPORTS
# =============================================================================
import os
import io
import bz2
import pathlib
import typing as t
import inspect
import hashlib
import functools
import urllib
import json
import attr
import diskcache as dcache
import tqdm
import humanize
import requests
import clize
import pandas as pd
# =============================================================================
# CONSTANTS
# =============================================================================
VERSION = __version__
#: Location of the entire dataset index.
CARPYNCHO_INDEX_URL = "https://raw.githubusercontent.com/carpyncho/carpyncho-py/master/data/index.json" # noqa
#: Google drive location.
DRIVE_URL = "https://docs.google.com/uc?export=download"
#: Where carpyncho gonna store the entire data.
CARPYNCHOPY_DATA_PATH = pathlib.Path(
os.path.expanduser(os.path.join('~', 'carpyncho_py_data')))
#: Chunk size when the library are download the big files of Carpyncho.
CHUNK_SIZE = 32768
#: Maximun cache size (10TB)
DEFAULT_CACHE_SIZE_LIMIT = int(1e10)
#: The location of the cache catabase and files.
DEFAULT_CACHE_DIR = CARPYNCHOPY_DATA_PATH / "_cache_"
# =============================================================================
# CACHE ORCHESTRATION
# =============================================================================
def from_cache(
cache, tag, function, cache_expire,
force=False, *args, **kwargs
):
"""Simplify cache orchestration.
Parameters
----------
tag: str
Normally every function call the cache with their own tag.
We sugest "module.function" or "module.Class.function"
function: callable
The function to be cached
force: bool (default=False)
If the vale of the cache must be ignored and re-execute the
function.
cache_expire: bool or None
Time in seconds to expire the function call
args and kwargs:
All the parameters needed to execute the function.
Returns
-------
The result of calling the function or the cached version of the same value.
"""
# start the cache orchestration
key = dcache.core.args_to_key(
base=("carpyncho", tag), args=args, kwargs=kwargs, typed=False)
with cache as c:
c.expire()
value = (
dcache.core.ENOVAL if force else
c.get(key, default=dcache.core.ENOVAL, retry=True))
if value is dcache.core.ENOVAL:
value = function(**kwargs)
c.set(
key, value, expire=cache_expire,
tag=f"carpyncho.{tag}", retry=True)
return value
# =============================================================================
# CLIENT
# =============================================================================
@attr.s(hash=False, frozen=True)
class Carpyncho:
"""Client to access the *Carpyncho VVV dataset collection*.
This code access as a Pandas Dataframe all the data of the web version of
Carpyncho. https://carpyncho.github.io/.
Parameters
----------
cache : ``diskcache.Cache``, ``diskcache.Fanout``,
or ``None`` (default: ``None``)
Any instance of ``diskcache.Cache``, ``diskcache.Fanout`` or
``None`` (Default). If it's ``None`` a ``diskcache.Cache``
istance is created with the parameter
``directory = carpyncho.DEFAULT_CACHE_DIR``.
More information: http://www.grantjenks.com/docs/diskcache
cache_expire : ``float`` or None (default=``None``)
Seconds until item expires (default ``None``, no expiry)
More information: http://www.grantjenks.com/docs/diskcache
"""
#: Local cache of the carpyncho database.
cache: t.Union[dcache.Cache, dcache.FanoutCache] = attr.ib()
#: Default timout of the catalog-cache.
#: Try to always set to None (default), the catalogs are big and mostly
#: never change.
cache_expire: float = attr.ib(default=None, repr=False)
#: Location of the carpyncho index (usefull for development)
index_url: str = attr.ib(default=CARPYNCHO_INDEX_URL)
# =========================================================================
# ATTRS ORCHESTRATION
# =========================================================================
@cache.default
def _cache_default(self):
return dcache.Cache(
directory=DEFAULT_CACHE_DIR, size_limit=DEFAULT_CACHE_SIZE_LIMIT)
# =========================================================================
# UTILITIES FOR CHECK THE REMOTE DATA
# =========================================================================
def retrieve_index(self, reset):
"""Access the remote index of the Carpyncho-Dataset.
The index is stored internally for 1 hr.
Parameters
----------
reset: bool
If its True the entire cache is ignored and a new index is
donwloaded and cached.
Returns
-------
dict with the index structure.
"""
def get_json_data(url):
parsed = urllib.parse.urlparse(url)
if parsed.scheme in ("http", "https", "ftp"):
response = requests.get(
url, headers={'Cache-Control': 'no-cache'})
return response.json()
with open(url) as fp:
return json.load(fp)
return from_cache(
cache=self.cache,
tag="get_index",
function=get_json_data,
cache_expire=3600,
force=reset,
url=self.index_url)
@property
def index_(self):
"""Structure of the Carpyncho dataset information as a Python-dict."""
return self.retrieve_index(reset=False)
def list_tiles(self):
"""Retrieve available tiles with catalogs as a tuple of str."""
index = self.index_
return tuple(k for k in index.keys() if not k.startswith("_"))
def list_catalogs(self, tile):
"""Retrieve the available catalogs for a given tile.
Parameters
----------
tile: str
The name of the tile to retrieve the catalogs.
Returns
-------
tuple of str:
The names of available catalogs in the given tile.
Raises
------
ValueError:
If the tile is not found.
"""
index = self.index_
if tile not in index:
raise ValueError(f"Tile {tile} not found")
return tuple(index[tile])
def has_catalog(self, tile, catalog):
"""Check if a given catalog and tile exists.
Parameters
----------
tile: str
The name of the tile.
catalog:
The name of the catalog.
Returns
-------
bool:
True if the convination tile+catalog exists.
"""
cat = self.index_.get(tile, {}).get(catalog)
return bool(cat)
def catalog_info(self, tile, catalog):
"""Retrieve the information about a given catalog.
Parameters
----------
tile: str
The name of the tile.
catalog:
The name of the catalog.
Returns
-------
dict:
The entire information of the given catalog file. This include
drive-id, md5 checksum, size in bytes, number of total records,
etc.
Raises
------
ValueError:
If the tile or the catalog is not found.
"""
index = self.index_
if tile not in index:
raise ValueError(f"Tile {tile} not found")
tile = index[tile]
if catalog not in tile:
raise ValueError(
f"Catalog {catalog} for tile {tile} not found")
return tile[catalog]
# =========================================================================
# THE DOWNLOAD PART
# =========================================================================
def _grive_download(self, tile, catalog, driveid, size, md5sum):
# https://stackoverflow.com/a/39225272
# https://stackoverflow.com/a/27508615
# prepare the parameters and download the token
params = {'id': driveid}
session = requests.Session()
response = session.get(
DRIVE_URL, params=params, stream=True,
headers={'Cache-Control': 'no-cache'})
# retrieve the token from gdrive page
token = None
for key, value in response.cookies.items():
if key.startswith('download_warning'):
token = value
break
# if we have token add to the parameters
if token:
params['confirm'] = token
# make the real deal request
response = session.get(
DRIVE_URL, params=params, stream=True,
headers={'Cache-Control': 'no-cache'})
# progress bar
pbar = tqdm.tqdm(
total=size, initial=0, unit='B',
unit_scale=True, desc=f"{tile}-{catalog}")
# the file is a bz2 file, we are going to decompress and store
# the raw parquet data into a BytesIO
decompressor = bz2.BZ2Decompressor()
parquet_stream = io.BytesIO()
# ademas necesitamos fijarnos que el md5 este ok
file_hash = hashlib.md5()
# retrive all the data one chunk at the time
for chunk in response.iter_content(CHUNK_SIZE):
if not chunk:
break
parquet_stream.write(decompressor.decompress(chunk))
file_hash.update(chunk)
pbar.update(CHUNK_SIZE)
# stop the progress bar
pbar.close()
# check if the file was download correctly
if file_hash.hexdigest() != md5sum:
raise IOError(
f"'{tile}-{catalog}' incorrect download.\n"
f"expected: {md5sum}\n"
f"caclulated: {file_hash.hexdigest()}")
# read the entire stream into a dataframe
df = pd.read_parquet(parquet_stream)
return df
def get_catalog(self, tile, catalog, force=False):
"""Retrieve a catalog from the carpyncho dataset.
Parameters
----------
tile: str
The name of the tile.
catalog:
The name of the catalog.
force: bool (default=False)
If its True, the cached version of the catalog is ignored and
redownloaded. Try to always set force to False.
Returns
-------
pandas.DataFrame:
The columns of the DataFrame changes between the different catalog.
Raises
------
ValueError:
If the tile or the catalog is not found.
IOError:
If the checksum not match.
"""
info = self.catalog_info(tile, catalog)
driveid, size = info["driveid"], info["size"]
md5sum = info["md5sum"].split()[0].strip().lower()
df = from_cache(
cache=self.cache,
tag="get_catalog",
function=self._grive_download,
cache_expire=None,
force=force,
# params to _gdrive_download
tile=tile, catalog=catalog,
driveid=driveid, size=size, md5sum=md5sum)
return df
# =============================================================================
# CLI
# =============================================================================
@attr.s(hash=False, frozen=True)
class CLI:
"""Carpyncho console client.
Explore and download the entire https://carpyncho.github.io/
catalogs from your command line.
"""
footnotes = "\n".join([
"This software is under the BSD 3-Clause License.",
"Copyright (c) 2020, Juan Cabral.",
"For bug reporting or other instructions please check:"
" https://github.com/carpyncho/carpyncho-py"])
#: Carpyncho client.
client = attr.ib()
def get_commands(self):
methods = {}
for k in dir(self):
if k.startswith("_"):
continue
v = getattr(self, k)
if inspect.ismethod(v) and k != "get_commands":
methods[k] = v
return methods
def version(self):
"""Print Carpyncho version."""
print(VERSION)
def list_tiles(self):
"""Show available tiles."""
for tile in self.client.list_tiles():
print(f"- {tile}")
def list_catalogs(self, tile):
"""Show the available catalogs for a given tile.
tile:
The name of the tile to retrieve the catalogs.
"""
print(f"Tile {tile}")
for catalog in self.client.list_catalogs(tile=tile):
print(f" - {catalog}")
def has_catalog(self, tile, catalog):
"""Check if a given catalog and tile exists.
tile:
catalog:
The name of the catalog.
"""
has = "" if self.client.has_catalog(tile, catalog) else "NO "
print(f"Catalog '{catalog}' or tile '{tile}': {has}exists")
def catalog_info(self, tile, catalog):
"""Retrieve the information about a given catalog.
tile:
The name of the tile.
catalog:
The name of the catalog.
"""
FORMATTERS = {
"size": functools.partial(humanize.naturalsize, binary=True),
"records": humanize.intcomma
}
print(f"Catalog {tile}-{catalog}")
for k, v in self.client.catalog_info(tile, catalog).items():
fmt = FORMATTERS.get(k, str)
print(f" - {k}: {fmt(v)}")
def download_catalog(self, tile, catalog, *, force=False, out):
"""Retrives a catalog from th Carpyncho dataset collection.
tile:
The name of the tile.
catalog:
The name of the catalog.
out:
Path to store the catalog. The extension of the file detemines
the format. Options are ".xlsx" (Excel), ".csv",
".pkl" (Python pickle) and ".parquet".
force:
Force to ignore the cached value and redownload the catalog.
Try to always set force to False.
"""
PARSERS = {
".xlsx": pd.DataFrame.to_excel,
".csv": pd.DataFrame.to_csv,
".pkl": pd.DataFrame.to_pickle,
".parquet": pd.DataFrame.to_parquet}
df = self.client.get_catalog(tile, catalog)
ext = os.path.splitext(out)[-1].lower()
if ext not in PARSERS:
raise clize.UserError(f"format '{ext}' not recognized")
print(f"Writing {out}...")
parser = PARSERS[ext]
parser(df, out)
def main():
"""Run the carpyncho CLI interface."""
cli = CLI(client=Carpyncho())
commands = tuple(cli.get_commands().values())
clize.run(
*commands,
description=cli.__doc__,
footnotes=cli.footnotes)
if __name__ == "__main__":
main()
| 28.683544 | 111 | 0.532468 | 11,809 | 0.744484 | 0 | 0 | 11,875 | 0.748645 | 0 | 0 | 9,105 | 0.574013 |
0b18861c6f4339a1ca04dfd8300878200d15cac5 | 498 | py | Python | visual_data/app/utils.py | SaidLopez/django_start | 2932d4433b022a729b2afaaf13baf678aafc4b0a | [
"MIT"
] | null | null | null | visual_data/app/utils.py | SaidLopez/django_start | 2932d4433b022a729b2afaaf13baf678aafc4b0a | [
"MIT"
] | null | null | null | visual_data/app/utils.py | SaidLopez/django_start | 2932d4433b022a729b2afaaf13baf678aafc4b0a | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import base64
from io import BytesIO
def get_graph():
buffer = BytesIO()
plt.savefig(buffer, format='png')
buffer.seek(0)
image_png = buffer.getvalue()
graph = base64.b64encode(image_png)
graph = graph.decode('utf-8')
buffer.close()
return graph
def get_plot(x,y):
plt.switch_backend('AGG')
plt.figure(figsize=(10,10))
plt.title('xyz')
plt.plot(x,y)
plt.tight_layout()
graph = get_graph()
return graph | 19.92 | 39 | 0.654618 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 22 | 0.044177 |
0b1891b092271e40a79284fe9042e306de9c61a6 | 26,953 | py | Python | qradar_utilities.py | intel471/titan_qradar_sync | 43d2d2bfcd18c3383e8f4f0377788a0d2f3844a7 | [
"MIT"
] | 1 | 2021-08-23T08:41:56.000Z | 2021-08-23T08:41:56.000Z | qradar_utilities.py | intel471/titan_qradar_sync | 43d2d2bfcd18c3383e8f4f0377788a0d2f3844a7 | [
"MIT"
] | null | null | null | qradar_utilities.py | intel471/titan_qradar_sync | 43d2d2bfcd18c3383e8f4f0377788a0d2f3844a7 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3.8
import time
from typing import List, Dict
import json
import requests
from requests.exceptions import HTTPError
from urllib3.exceptions import InsecureRequestWarning
from json_utilities import json_get
from titan_qradar_sync_config import TitanQRadarSyncConfig
class QRadarUtilities:
def __init__(self, config: TitanQRadarSyncConfig):
self.config = config
requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning)
def first_not_none_or_default(self, object_list: List, default):
result = default
try:
for item in object_list:
if item:
result = item
break
except Exception as e:
result = default
return result
def get_qradar_details(self) -> Dict:
qradar_details: Dict = None
try:
self.config.logger.info("Attempting to get QRadar details.")
request: str = self.config.qradar_base_url + "system/about"
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.get(request, headers=headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False)
if response.status_code == 200:
qradar_details = response.json()
else:
self.config.logger.info("Unable to obtain QRadar details.")
except HTTPError as http_err:
qradar_details = None
self.config.logger.error("Unable to get QRadar details: %s", {http_err})
except Exception as e:
qradar_details = None
self.config.logger.error("Unable to get QRadar details: %s", {e})
return qradar_details
def create_reference_set(self, set_name: str, element_type: str):
success: bool = True
try:
params: Dict = {
"name": set_name,
"element_type": element_type,
"time_to_live": self.config.qradar_reference_set_time_to_live,
"timeout_type": self.config.qradar_reference_set_timeout_type
}
self.config.logger.info("Attempting to create " + set_name + " reference set.")
request: str = self.config.qradar_base_url + "reference_data/sets"
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.post(request, headers=headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False, data=params)
if response.status_code == 201:
self.config.logger.info("Successfully created " + set_name + " reference set.")
else:
self.config.logger.info(response.content)
success = False
self.config.logger.info("Unable to create " + set_name + " reference set.")
except Exception as e:
success = False
self.config.logger.error("Unable to create " + set_name + " reference set: %s", {e})
return success
def create_reference_table(self, table_name: str):
success: bool = True
try:
key_name_types = (
"[{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Malware Family\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Malware Family Titan URL\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Type\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Indicator\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Indicator Titan URL\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Confidence Level\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Context\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"GIRs\"}, " +
"{\"element_type\": \"ALNIC\", " +
"\"key_name\": \"Mitre Tactics\"}, " +
"{\"element_type\": \"DATE\", " +
"\"key_name\": \"Activity First\"}, " +
"{\"element_type\": \"DATE\", " +
"\"key_name\": \"Activity Last\"}, " +
"{\"element_type\": \"DATE\", " +
"\"key_name\": \"Expires\"}]"
)
params: Dict = {
"name": table_name,
"outer_key_label": "UID",
"key_name_types": key_name_types,
"element_type": "ALNIC",
"time_to_live": self.config.qradar_reference_table_time_to_live,
"timeout_type": self.config.qradar_reference_table_timeout_type
}
self.config.logger.info("Attempting to create " + table_name + " reference table.")
request: str = self.config.qradar_base_url + "reference_data/tables"
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.post(request, headers=headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False, data=params)
if response.status_code == 201:
self.config.logger.info("Successfully created " + table_name + " reference table.")
else:
self.config.logger.info(response.content)
success = False
self.config.logger.info("Unable to create " + table_name + " reference table.")
except Exception as e:
success = False
self.config.logger.error("Unable to create " + table_name + " reference table: %s", {e})
return success
def check_create_reference_set(self, set_name: str, element_type: str) -> bool:
success: bool = True
try:
self.config.logger.info("Checking " + set_name + " reference set.")
request: str = self.config.qradar_base_url + "reference_data/sets/" + set_name
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.get(request, headers=headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False)
if response.status_code == 200:
self.config.logger.info(set_name + " reference set detected.")
else:
self.config.logger.info(set_name + " reference set not detected.")
success = self.create_reference_set(set_name, element_type)
except Exception as e:
success = False
self.config.logger.error("Unable to check/create reference set: %s", {e})
return success
def check_create_reference_table(self, table_name: str) -> bool:
success: bool = True
try:
self.config.logger.info("Checking " + table_name + " reference table.")
request: str = self.config.qradar_base_url + "reference_data/tables/" + table_name
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.get(request, headers= headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False)
if response.status_code == 200:
self.config.logger.info(table_name + " reference table detected.")
else:
self.config.logger.info(table_name + " reference table not detected.")
success = self.create_reference_table(table_name)
except Exception as e:
success = False
self.config.logger.error("Unable to check/create reference table: %s", {e})
return success
def check_create_reference_data_structures(self) -> bool:
success: bool = True
try:
# Reference sets.
if self.config.qradar_populate_malware_indicators_sets:
self.check_create_reference_set(self.config.qradar_malware_indicators_set_ip_medium_confidence, "IP")
self.check_create_reference_set(self.config.qradar_malware_indicators_set_ip_high_confidence, "IP")
self.check_create_reference_set(self.config.qradar_malware_indicators_set_hash_medium_confidence, "ALNIC")
self.check_create_reference_set(self.config.qradar_malware_indicators_set_hash_high_confidence, "ALNIC")
self.check_create_reference_set(self.config.qradar_malware_indicators_set_url_medium_confidence, "ALNIC")
self.check_create_reference_set(self.config.qradar_malware_indicators_set_url_high_confidence, "ALNIC")
# Reference tables.
if self.config.qradar_populate_malware_indicators_tables:
self.check_create_reference_table(self.config.qradar_malware_indicators_table)
except Exception as e:
success = False
self.config.logger.error("Unable to check/create reference data structures: %s", {e})
return success
def submit_indicator_batch_reference_set(self, indicator_batch_reference_set: List, set_name: str):
success: bool = True
try:
if len(indicator_batch_reference_set) > 0:
request: str = self.config.qradar_base_url + "reference_data/sets/bulk_load/" + set_name
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.post(request, headers=headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False, data=json.dumps(indicator_batch_reference_set))
if response.status_code == 200:
self.config.logger.info("Successfully submitted reference set indicator batch.")
else:
self.config.logger.info(response.content)
success = False
self.config.logger.info("Unable to submit reference set indicator batch.")
except Exception as e:
success = False
self.config.logger.error("Unable to submit reference set indicator batch: %s", {e})
return success
def submit_indicator_batch_reference_table(self, indicator_batch_reference_table: Dict, table_name: str):
success: bool = True
try:
request: str = self.config.qradar_base_url + "reference_data/tables/bulk_load/" + table_name
self.config.logger.info("Sending request: %s", request)
headers = {}
if self.config.qradar_user_agent:
headers = {"User-Agent": self.config.qradar_user_agent}
response = requests.post(request, headers=headers, auth=(self.config.qradar_username, self.config.qradar_password), verify=False, data=json.dumps(indicator_batch_reference_table))
if response.status_code == 200:
self.config.logger.info("Successfully submitted reference table indicator batch.")
else:
self.config.logger.info(response.content)
success = False
self.config.logger.info("Unable to submit reference table indicator batch.")
except Exception as e:
success = False
self.config.logger.error("Unable to submit reference table indicator batch: %s", {e})
return success
def create_indicator(self, indicator_context: str, indicator_type: str, indicator_girs: str, indicator_confidence_level: str, indicator_malware_family: str, indicator_malware_family_titan_url: str, indicator_expires: str, indicator_mitre_tactics: str, indicator_activity_first: str, indicator_activity_last: str, indicator_value: str, indicator_titan_url: str):
indicator: Dict = {}
try:
indicator = {
"Context": indicator_context,
"Type": indicator_type,
"GIRs": indicator_girs,
"Confidence Level": indicator_confidence_level,
"Malware Family": indicator_malware_family,
"Malware Family Titan URL": indicator_malware_family_titan_url,
"Expires": indicator_expires,
"Mitre Tactics": indicator_mitre_tactics,
"Indicator": indicator_value,
"Indicator Titan URL": indicator_titan_url,
"First Activity": indicator_activity_first,
"Last Activity": indicator_activity_last
}
except Exception as e:
indicator = {}
self.config.logger.error("Unable to create indicator: %s", {e})
return indicator
def process_indicators(self, indicators: List, reference_object_type: str) -> bool:
success: bool = True
try:
current_time: int = int(round(time.time() * 1000))
if reference_object_type == "Reference Sets":
indicator_batch_ip_medium = []
indicator_batch_ip_high = []
indicator_batch_hash_medium = []
indicator_batch_hash_high = []
indicator_batch_url_medium = []
indicator_batch_url_high = []
for indicator in indicators:
indicator_type: str = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_type"]), "")
indicator_confidence_level: str = self.first_not_none_or_default(json_get(indicator, ["data", "confidence"]), "")
indicator_expiration: int = self.first_not_none_or_default(json_get(indicator, ["data", "expiration"]), 0)
process_indicator: bool = True
if self.config.qradar_ignore_expired_malware_indicators_sets:
if indicator_expiration <= current_time:
process_indicator = False
if process_indicator:
if indicator_type == "ipv4":
indicator_value_ipv4 = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "address"]), "")
if indicator_value_ipv4:
if indicator_confidence_level == "high":
indicator_batch_ip_high.append(indicator_value_ipv4) if indicator_value_ipv4 not in indicator_batch_ip_high else indicator_batch_ip_high
else:
indicator_batch_ip_medium.append(indicator_value_ipv4) if indicator_value_ipv4 not in indicator_batch_ip_medium else indicator_batch_ip_medium
if indicator_type == "url":
indicator_value_url = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "url"]), "")
if indicator_value_url:
if indicator_confidence_level == "high":
indicator_batch_url_high.append(indicator_value_url) if indicator_value_url not in indicator_batch_url_high else indicator_batch_url_high
else:
indicator_batch_url_medium.append(indicator_value_url) if indicator_value_url not in indicator_batch_url_medium else indicator_batch_url_medium
if indicator_type == "file":
indicator_value_md5 = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "file", "md5"]), "")
if indicator_value_md5:
if indicator_confidence_level == "high":
indicator_batch_hash_high.append(indicator_value_md5) if indicator_value_md5 not in indicator_batch_hash_high else indicator_batch_hash_high
else:
indicator_batch_hash_medium.append(indicator_value_md5) if indicator_value_md5 not in indicator_batch_hash_medium else indicator_batch_hash_medium
indicator_value_sha1 = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "file", "sha1"]), "")
if indicator_value_sha1:
if indicator_confidence_level == "high":
indicator_batch_hash_high.append(indicator_value_sha1) if indicator_value_sha1 not in indicator_batch_hash_high else indicator_batch_hash_high
else:
indicator_batch_hash_medium.append(indicator_value_sha1) if indicator_value_sha1 not in indicator_batch_hash_medium else indicator_batch_hash_medium
indicator_value_sha256 = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "file", "sha256"]), "")
if indicator_value_sha256:
if indicator_confidence_level == "high":
indicator_batch_hash_high.append(indicator_value_sha256) if indicator_value_sha256 not in indicator_batch_hash_high else indicator_batch_hash_high
else:
indicator_batch_hash_medium.append(indicator_value_sha256) if indicator_value_sha256 not in indicator_batch_hash_medium else indicator_batch_hash_medium
if self.config.qradar_populate_malware_indicators_sets:
self.submit_indicator_batch_reference_set(indicator_batch_ip_medium, self.config.qradar_malware_indicators_set_ip_medium_confidence)
self.submit_indicator_batch_reference_set(indicator_batch_ip_high, self.config.qradar_malware_indicators_set_ip_high_confidence)
self.submit_indicator_batch_reference_set(indicator_batch_hash_medium, self.config.qradar_malware_indicators_set_hash_medium_confidence)
self.submit_indicator_batch_reference_set(indicator_batch_hash_high, self.config.qradar_malware_indicators_set_hash_high_confidence)
self.submit_indicator_batch_reference_set(indicator_batch_url_medium, self.config.qradar_malware_indicators_set_url_medium_confidence)
self.submit_indicator_batch_reference_set(indicator_batch_url_high, self.config.qradar_malware_indicators_set_url_high_confidence)
if reference_object_type == "Reference Tables":
indicator_batch = {}
for indicator in indicators:
indicator_uid_raw: str = self.first_not_none_or_default(json_get(indicator, ["data", "uid"]), "")
indicator_context: str = self.first_not_none_or_default(json_get(indicator, ["data", "context", "description"]), "")
indicator_type: str = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_type"]), "")
indicator_girs_list: str = self.first_not_none_or_default(json_get(indicator, ["data", "intel_requirements"]), [])
indicator_girs = ""
for gir in indicator_girs_list:
gir_name = ""
for gir_ref in self.config.girs:
if gir_ref[1] == gir:
gir_name = gir_ref[3]
break
if len(indicator_girs) > 0:
indicator_girs += ","
indicator_girs += "'" + gir + " - " + gir_name + "'"
indicator_girs = "[" + indicator_girs + "]"
indicator_confidence_level: str = self.first_not_none_or_default(json_get(indicator, ["data", "confidence"]), "")
indicator_malware_family: str = self.first_not_none_or_default(json_get(indicator, ["data", "threat", "data", "family"]), "")
indicator_malware_family_titan_url: str = self.config.titan_portal_base_url + "malware/" + self.first_not_none_or_default(json_get(indicator, ["data", "threat", "data", "malware_family_profile_uid"]), "")
indicator_expires: str = self.first_not_none_or_default(json_get(indicator, ["data", "expiration"]), "")
indicator_mitre_tactics: str = self.first_not_none_or_default(json_get(indicator, ["data", "mitre_tactics"]), "")
indicator_activity_first: str = self.first_not_none_or_default(json_get(indicator, ["activity", "first"]), "")
indicator_activity_last: str = self.first_not_none_or_default(json_get(indicator, ["activity", "last"]), "")
indicator_expiration: int = self.first_not_none_or_default(json_get(indicator, ["data", "expiration"]), 0)
process_indicator: bool = True
if self.config.qradar_ignore_expired_malware_indicators_tables:
if indicator_expiration <= current_time:
process_indicator = False
if process_indicator:
indicator_value: str = ""
if indicator_type == "ipv4":
indicator_value = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "address"]), "")
indicator_titan_url = self.config.titan_portal_base_url + "malware/indicator/" + self.first_not_none_or_default(json_get(indicator, ["uid"]), "")
indicator_uid = "ipv4-" + indicator_uid_raw
indicator_created = self.create_indicator(indicator_context, indicator_type, indicator_girs, indicator_confidence_level, indicator_malware_family, indicator_malware_family_titan_url, indicator_expires, indicator_mitre_tactics, indicator_activity_first, indicator_activity_last, indicator_value, indicator_titan_url)
if indicator_created:
indicator_batch[indicator_uid] = indicator_created
if indicator_type == "url":
indicator_value = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "url"]), "")
indicator_titan_url = self.config.titan_portal_base_url + "malware/indicator/" + self.first_not_none_or_default(json_get(indicator, ["uid"]), "")
indicator_uid = "url-" + indicator_uid_raw
indicator_created = self.create_indicator(indicator_context, indicator_type, indicator_girs, indicator_confidence_level, indicator_malware_family, indicator_malware_family_titan_url, indicator_expires, indicator_mitre_tactics, indicator_activity_first, indicator_activity_last, indicator_value, indicator_titan_url)
if indicator_created:
indicator_batch[indicator_uid] = indicator_created
if indicator_type == "file":
indicator_value = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "file", "md5"]), "")
indicator_titan_url = self.config.titan_portal_base_url + "malware/indicator/" + self.first_not_none_or_default(json_get(indicator, ["uid"]), "")
indicator_uid = "md5-" + indicator_uid_raw
indicator_created = self.create_indicator(indicator_context, indicator_type, indicator_girs, indicator_confidence_level, indicator_malware_family, indicator_malware_family_titan_url, indicator_expires, indicator_mitre_tactics, indicator_activity_first, indicator_activity_last, indicator_value, indicator_titan_url)
if indicator_created:
indicator_batch[indicator_uid] = indicator_created
indicator_value = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "file", "sha1"]), "")
indicator_titan_url = self.config.titan_portal_base_url + "malware/indicator/" + self.first_not_none_or_default(json_get(indicator, ["uid"]), "")
indicator_uid = "sha1-" + indicator_uid_raw
indicator_created = self.create_indicator(indicator_context, indicator_type, indicator_girs, indicator_confidence_level, indicator_malware_family, indicator_malware_family_titan_url, indicator_expires, indicator_mitre_tactics, indicator_activity_first, indicator_activity_last, indicator_value, indicator_titan_url)
if indicator_created:
indicator_batch[indicator_uid] = indicator_created
indicator_value = self.first_not_none_or_default(json_get(indicator, ["data", "indicator_data", "file", "sha256"]), "")
indicator_titan_url = self.config.titan_portal_base_url + "malware/indicator/" + self.first_not_none_or_default(json_get(indicator, ["uid"]), "")
indicator_uid = "sha256-" + indicator_uid_raw
indicator_created = self.create_indicator(indicator_context, indicator_type, indicator_girs, indicator_confidence_level, indicator_malware_family, indicator_malware_family_titan_url, indicator_expires, indicator_mitre_tactics, indicator_activity_first, indicator_activity_last, indicator_value, indicator_titan_url)
if indicator_created:
indicator_batch[indicator_uid] = indicator_created
if self.config.qradar_populate_malware_indicators_tables:
self.submit_indicator_batch_reference_table(indicator_batch, self.config.qradar_malware_indicators_table)
except Exception as e:
success = False
self.config.logger.error("Unable to process indicators: %s", {e})
return success
| 59.630531 | 365 | 0.61559 | 26,662 | 0.989203 | 0 | 0 | 0 | 0 | 0 | 0 | 3,759 | 0.139465 |
0b19361cb7bec29a8393dd2d4d6342be97e9c458 | 1,664 | py | Python | src/polls/polls/project/migrations/0002_shoppingitem_shoppinglist.py | Valeriu92/Shopping_list | 3614dfc6691c28cf88db8af77ba246a9c4943794 | [
"MIT"
] | null | null | null | src/polls/polls/project/migrations/0002_shoppingitem_shoppinglist.py | Valeriu92/Shopping_list | 3614dfc6691c28cf88db8af77ba246a9c4943794 | [
"MIT"
] | 6 | 2021-03-19T08:45:06.000Z | 2021-09-22T19:19:21.000Z | src/polls/polls/project/migrations/0002_shoppingitem_shoppinglist.py | Valeriu92/Shopping_list | 3614dfc6691c28cf88db8af77ba246a9c4943794 | [
"MIT"
] | null | null | null | # Generated by Django 3.0.5 on 2020-06-21 14:52
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('project', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ShoppingList',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_updated', models.DateTimeField(auto_now=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shopping_lists', to='project.Users')),
],
),
migrations.CreateModel(
name='ShoppingItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('quantity', models.IntegerField(default=0)),
('gramaj', models.CharField(max_length=20, null=True)),
('marca', models.CharField(max_length=50)),
('magazin', models.CharField(max_length=50)),
('date_created', models.DateTimeField(auto_now_add=True)),
('date_updated', models.DateTimeField(auto_now=True)),
('list', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='shopping_item', to='project.ShoppingList')),
],
),
]
| 42.666667 | 146 | 0.597356 | 1,538 | 0.924279 | 0 | 0 | 0 | 0 | 0 | 0 | 297 | 0.178486 |
0b195f0b6c15b4019237a4ae8b5b8c84db5c27fd | 2,766 | py | Python | utils.py | cswzhang/RESD | 87370182b8717a7fde007937c6e3e2fff02fb593 | [
"Apache-2.0"
] | 1 | 2021-11-29T09:32:48.000Z | 2021-11-29T09:32:48.000Z | utils.py | cswzhang/RESD | 87370182b8717a7fde007937c6e3e2fff02fb593 | [
"Apache-2.0"
] | null | null | null | utils.py | cswzhang/RESD | 87370182b8717a7fde007937c6e3e2fff02fb593 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
import logging
import numpy as np
import pandas as pd
from sklearn import metrics
from sklearn.linear_model import LogisticRegression
from sklearn.multiclass import OneVsRestClassifier
from sklearn.preprocessing import LabelBinarizer
from sklearn.utils import shuffle
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.ERROR)
def classification(embedding, lbl_path, split_ratio=0.7, loop=100):
eval_dict = {
'acc': 0.0,
'f1-micro': 0.0,
'f1-macro': 0.0,
}
label = pd.read_csv(lbl_path, header=None, sep=' ').values
for _ in range(loop):
labels_np = shuffle(label)
nodes = labels_np[:, 0]
labels = labels_np[:, 1]
lb = LabelBinarizer()
labels = lb.fit_transform(labels)
train_size = int(labels_np.shape[0] * split_ratio)
features = embedding[nodes]
train_x = features[:train_size, :]
train_y = labels[:train_size, :]
test_x = features[train_size:, :]
test_y = labels[train_size:, :]
clf = OneVsRestClassifier(
LogisticRegression(class_weight='balanced', solver='liblinear', n_jobs=-1))
clf.fit(train_x, train_y)
y_pred = clf.predict_proba(test_x)
y_pred = lb.transform(np.argmax(y_pred, 1))
acc = np.sum(np.argmax(y_pred, 1) == np.argmax(test_y, 1)) / len(y_pred)
eval_dict['acc'] += acc
eval_dict['f1-micro'] += metrics.f1_score(np.argmax(test_y, 1), np.argmax(y_pred, 1),
average='micro')
eval_dict['f1-macro'] += metrics.f1_score(np.argmax(test_y, 1), np.argmax(y_pred, 1),
average='macro')
for key in eval_dict.keys():
eval_dict[key] = round(1.0 * eval_dict[key] / loop, 4)
print('split_ratio: {}'.format(split_ratio))
print(eval_dict)
return eval_dict
def _k_precision(embedding, lbl_path, k, lbl):
label = pd.read_csv(lbl_path, header=None, sep=' ').values
nodes = label[np.where(label[:, 1] == lbl)][:, 0]
acc = 0.0
for node in nodes:
distance = {}
for i in range(embedding.shape[0]):
if i == node:
continue
distance[i] = np.linalg.norm(embedding[i] - embedding[node])
distance = sorted(distance.items(), key=lambda x: x[1])
distance = np.array(distance)[:k]
acc += distance[np.isin(distance[:, 0], nodes)].shape[0] / k
acc /= len(nodes)
return acc
def k_precision(embedding, lbl_path, k=50):
eval_dict = {
'precision': k,
'bots_acc': _k_precision(embedding, lbl_path, k, 1),
'admins_acc': _k_precision(embedding, lbl_path, k, 2)
}
print(eval_dict)
| 35.922078 | 93 | 0.60376 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 182 | 0.065799 |
0b1a7603f0853665e4cb6bd2c490cfde38e7a664 | 2,505 | py | Python | misc/make_struct.py | kwrobert/nanowire | 2dd003e9e1898b2ae9f78a261b3ce1406ec717b8 | [
"Apache-2.0",
"MIT"
] | null | null | null | misc/make_struct.py | kwrobert/nanowire | 2dd003e9e1898b2ae9f78a261b3ce1406ec717b8 | [
"Apache-2.0",
"MIT"
] | null | null | null | misc/make_struct.py | kwrobert/nanowire | 2dd003e9e1898b2ae9f78a261b3ce1406ec717b8 | [
"Apache-2.0",
"MIT"
] | 1 | 2019-06-23T01:26:44.000Z | 2019-06-23T01:26:44.000Z | import os
import glob
import argparse as ap
import shutil as sh
import re
def main():
parser = ap.ArgumentParser(description="""Uses minimum basis term file to extract the data for a
simulation that used the minimum number of basis terms for each frequency""")
parser.add_argument('min_file',type=str,help="""Path to file containing frequency in first
column and minimum number of basis terms in second column. Comma separated""")
parser.add_argument('s4_dir',type=str,help="""Path to top level dir containing all the frequency
subdirectories""")
parser.add_argument('dest_dir',type=str,help="""Path to top level dir which all files will be
moved to""")
args = parser.parse_args()
s4_dir = os.path.abspath(args.s4_dir)
min_file = os.path.abspath(args.min_file)
dest_dir = os.path.abspath(args.dest_dir)
if not os.path.isdir(args.s4_dir):
print("S4 dir does not exist")
quit()
if not os.path.isfile(min_file):
print('Min file does not exists')
quit()
try:
os.makedirs(dest_dir)
except OSError:
pass
with open(min_file,'r') as f:
data = [('{:G}'.format(float(line.split(',')[0])),str(line.split(',')[1].strip('\n'))) for line in f.readlines()[1:]]
print(data)
dir_glob = os.path.join(s4_dir,"frequency*")
freq_dirs = glob.glob(dir_glob)
for fdir in freq_dirs:
for freq, numbasis in data:
print('Frequency {} has minimum basis of {}'.format(freq,numbasis))
dat = freq.split('E')
regex = dat[0][0:4]+"[0-9]+E\\"+dat[1]
regex = regex.replace('.','\.')
m = re.search(regex,fdir)
if m:
print(m.group(0))
print('Frequency {} found in directory {}'.format(freq,fdir))
basis_glob = os.path.join(fdir,'numbasis_{}*'.format(numbasis))
basis_path = glob.glob(basis_glob)
assert len(basis_path) == 1
basis_path = basis_path[0]
if os.path.isdir(basis_path):
print('Found min basis path {}'.format(basis_path))
new_path = os.path.join(dest_dir,os.path.basename(fdir))
print('Copying {} to {}'.format(basis_path,new_path))
sh.copytree(basis_path,new_path)
else:
print('Missing {} !!!!'.format(basis_path))
break
if __name__ == '__main__':
main()
| 40.403226 | 125 | 0.591617 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 691 | 0.275848 |
0b1af5ee826d4553c7214ee6c50908fbb0aba15f | 1,291 | py | Python | rurina5/test2.py | TeaCondemns/rurina | 43725ebea5872953125271a9abb300a4e3a80a64 | [
"MIT"
] | null | null | null | rurina5/test2.py | TeaCondemns/rurina | 43725ebea5872953125271a9abb300a4e3a80a64 | [
"MIT"
] | null | null | null | rurina5/test2.py | TeaCondemns/rurina | 43725ebea5872953125271a9abb300a4e3a80a64 | [
"MIT"
] | null | null | null | from input import flip
from utilities.surface import *
import utilities.time as time
from nodes import Control, init
from shape import draw
import pygame
import pygame.key as key
from event import get, typename2
from input import map
screen = pygame.display.set_mode((800, 800), pygame.RESIZABLE)
_mask = AlphaSurface((100, 100))
_gradient = gradient((0, 0, 100, 100), [(200, 20, 20), (20, 200, 20), (20, 20, 200)], surface=pygame.Surface((100, 100)), offset=1)
pygame.draw.circle(_mask, (40, 240, 40), (50, 50), 50)
_gradient = mask(_gradient, _mask)
init()
control = Control()
control.focused_cursor = 2
print(pygame.BUTTON_LEFT, pygame.BUTTON_MIDDLE, pygame.BUTTON_RIGHT)
while True:
screen.fill((255, 255, 255))
flip()
time.flip()
pygame.display.get_surface().blit(_gradient, (0, 0))
draw(control.rect)
control.input(get(False))
# for e in get(False):
# print(typename2(e))
# if control.pressed:
# print('actions:', map._actions)
# print('events:', get(False))
#
# for e in get(False):
# if e.type == pygame.MOUSEBUTTONUP:
# if e.button == pygame.BUTTON_LEFT:
# exit()
pygame.event.get()
pygame.display.flip()
# print(time.dt)
# print(time.fps)
| 25.82 | 131 | 0.640589 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 315 | 0.243997 |
0b1b43897511e51e58442813b0108b29ef96226a | 15,379 | py | Python | Lab3/turingmachine.py | PedroDeSanti/PCS3616 | f39454013f14aba01155f145ba48385cb102c942 | [
"MIT"
] | null | null | null | Lab3/turingmachine.py | PedroDeSanti/PCS3616 | f39454013f14aba01155f145ba48385cb102c942 | [
"MIT"
] | null | null | null | Lab3/turingmachine.py | PedroDeSanti/PCS3616 | f39454013f14aba01155f145ba48385cb102c942 | [
"MIT"
] | null | null | null | # turingmachine.py - implementation of the Turing machine model
#
# Copyright 2014 Jeffrey Finkelstein.
#
# This file is part of turingmachine.
#
# turingmachine is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# turingmachine is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# turingmachine. If not, see <http://www.gnu.org/licenses/>.
"""Provides an implementation of the Turing machine model."""
import logging
import os.path
# Create and configure the logger which logs debugging information by default.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter('[%(levelname)s] %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(level=logging.DEBUG)
#: Represents a movement of the read/write head of the Turing machine to the
#: left.
L = -1
#: Represents a movement of the read/write head of the Turing machine to the
#: right.
R = +1
#Stores the loaded TM, output and logger state
tm=[None]
output=[None]
logs=[True]
class UnknownSymbol(Exception):
"""This exception is raised when the Turing machine encounters a symbol
that does not appear in the transition dictionary.
"""
pass
class UnknownState(Exception):
"""This exception is raised when the Turing machine enters a state that
does not appear in the transition dictionary.
"""
pass
class BadSymbol(Exception):
"""This exception is raised when the user attempts to specify a tape
alphabet that includes strings of length not equal to one.
"""
pass
class BadFile(Exception):
"""This exception is raised when the file given by the user is
inconsistent with the rules for it.
"""
pass
class NoMachine(Exception):
"""This exception is raised when the user tries to run without having
loaded any machine.
"""
pass
def load(file, max_steps=10000):
'''Import parameters from given file, that has to be in the following
structure:
1st line: "ATM"
2nd line: Any comment you might want
3rd line: Input alphabet
4th line: Tape alphabet
5th line: Number of tapes (must be 1 yet)
6th line: Number of trails on each tape (must be 1 yet)
7th line: Number of directions on which the tapes are infinite (must be 2 yet)
8th line: Initial state
9th line: Final state
10th line and beyond: transitions in the following way:
current state
symbol read
next state
symbol to be written
direction to move
last line: "end"
Comments are made with "//"
'''
#Check if file exists, open it and separate lines
if not os.path.exists(file): raise BadFile("File does not exist.")
file=open(file, "r")
file=file.read()
file=file.split("\n")
#Create new list removing every comment
raw=[]
for line in file:
i=0
while i<len(line):
if line[i]=="/"and line[i+1]=="/":
raw.append(line[:i])
break
i+=1
if i==len(line): raw.append(line)
#Create descriptor separating words and removing null strings/lists
desc=[]
for line in raw:
carac=0
while carac<len(line):
if line[carac]=="\t": line[carac]=" "
carac+=1
desc.append(line.split(" "))
i=0
while i<len(desc[-1]):
if desc[-1][i]=="":
desc[-1].pop(i)
i-=1
i+=1
line=0
while line<len(desc):
if len(desc[line])==0:
desc.pop(line)
line-=1
line+=1
#Check static values in code and raise exceptions
if desc[0][0]!="ATM": raise BadFile("First line is wrong.")
if desc[4][0]!="1": raise BadFile("Number of tapes must be 1.")
if desc[5][0]!="1": raise BadFile("Number of trails must be 1.")
if desc[6][0]!="2": raise BadFile("Number of infinite directions must be 2.")
if desc[-1][0]!="end": raise BadFile("Last line is wrong.")
#Get the input and tape alphabets
in_alph=desc[2]
tp_alph=desc[3]
#Get initial and final states
if len(desc[7])>1: raise BadFile("Initial state must be one only thing.")
ini=desc[7][0]
if len(desc[8])>1: raise BadFile("Final state must be one only thing.")
fin=desc[8][0]
#Remove from escriptor all the preamble and last line
desc=desc[9:-1]
#generate transitions dictionary with the remaining lines
transitions={}
states=[]
i=0
while i<len(desc):
#Check if the line sintax is correct and raise exceptions
if len(desc[i])!=5: raise BadFile("Transition "+str(i+1)+" badly formulated.")
if desc[i][1] not in tp_alph: raise BadFile("All symbols must be declared in input alphabet.")
if desc[i][3] not in tp_alph: raise BadFile("All symbols must be declared in input alphabet.")
if desc[i][4]!="R" and desc[i][4]!="L": raise BadFile("Directions must be either R or L.")
#Add transition
#If state alredy exist in states list, just add the transition depending on direction
if desc[i][0] in states:
if desc[i][4]=="R":transitions[desc[i][0]][desc[i][1]]=(desc[i][2],desc[i][3],R)
else:transitions[desc[i][0]][desc[i][1]]=(desc[i][2],desc[i][3],L)
#Otherwise add state and dictionary for it in transitions and add the transition
else:
states.append(desc[i][0])
transitions[desc[i][0]]={}
if desc[i][4]=="R":transitions[desc[i][0]][desc[i][1]]=(desc[i][2],desc[i][3],R)
else:transitions[desc[i][0]][desc[i][1]]=(desc[i][2],desc[i][3],L)
#If destination state does not exist, create it and it's dictionary in transitions
if not desc[i][2] in states:
states.append(desc[i][2])
transitions[desc[i][2]]={}
i+=1
#Check if initial and final states are in the transitions and raise errors
if not ini in states: raise BadFile("Initial state must be in transitions.")
if not fin in states: raise BadFile("Final state must be in transitions")
#Return the TM class
tm[0]=TuringMachine(states, in_alph, ini, fin, transitions, max_steps)
def run(string):
'''Runs the Turing Machine with given string
'''
if tm[0]==None: raise NoMachine("You must first load an machine.")
print(tm[0](string))
def test(io_file):
'''Test all the cases in given file and return correctness percentage
'''
if not os.path.exists(io_file): raise BadFile(io_file+" does not exist.")
file=open(io_file)
file=file.read()
file=file.split("\n")
line=0
while line<len(file):
file[line]=file[line].split(" ")
item=0
while item<len(file[line]):
if file[line][item]=="":
file[line].pop(item)
item-=1
item+=1
if len(file[line])==0 or file[line][0][0]=="#":
file.pop(line)
line-=1
line+=1
tests=[]
for line in file:
tests.append([line[0], line[2]])
logs[0]=False
cont=0
n=1
for test in tests:
res=tm[0](test[0])
if res:
string=output[0]
while string[0]=="B": string=string[1:]
while string[-1]=="B": string=string[:-1]
if string==test[1]:
cont+=1
print("test "+str(n)+": "+"\033[92m {}\033[00m" .format("Correct"))
else:
print("test "+str(n)+": "+"\033[91m {}\033[00m" .format("Wrong"))
elif test[1]=="STOP_FAIL":
cont+=1
print("test "+str(n)+": "+"\033[92m {}\033[00m" .format("Correct"))
else:
print("test "+str(n)+": "+"\033[91m {}\033[00m" .format("Wrong"))
n+=1
logs[0]=True
print(f"Nota final: {round(10*cont/len(tests), 2)}")
class TuringMachine(object):
"""An implementation of the Turing machine model.
Once instantiated, the Turing machine can be executed by calling it, and it
can be reset to its initial state by calling :meth:`reset`.
`states` is a set of states. A "state" can be anything, but usually simple
integers suffice.
`initial_state` is the state of the machine before it starts reading
symbols from an input string. This state must be a member of `states`. When
:meth:`reset` is called, the state of the machine will be set to this
state.
`accept_state` is the state that will cause the machine to halt and accept
(that is, return ``True``). This set must be a member of `states`.
`transition` is a two-dimensional dictionary specifying how the
"configuration" of the machine (that is, the head location, state, and
string) changes each time it reads from its input string. The dictionary is
indexed first by state, then by symbol. Each entry in this two-dimensional
dictionary must be a three-tuple, *(new_state, new_symbol, direction)*,
where *new_state* is the next state in which the Turing machine will be,
*new_symbol* is the symbol that will be written in the current location on
the string, and *direction* is either :data:`L` or :data:`R`, representing
movement of the head left or right, repectively.
`max_steps` is the maximum number of steps the machine can walk before
it is considered infinite loop and returns False
The transition dictionary need not have an entry for the accept and reject
states. For example, the accept and reject states need not be in
`transition`, because the implementation of :meth:`__call__` checks if this
Turing machine has entered one of these states and immediately halts
execution.
Altohugh they would otherwise be necessary in the formal mathematical
definition of a Turing machine, this class requires the user to specify
neither the input alphabet nor the tape alphabet.
"""
def __init__(self, states, in_alph, initial_state, accept_state,
transition, max_steps, *args, **kw):
self.states = states
self.in_alph=in_alph
self.accept_state = accept_state
self.initial_state = initial_state
self.transition = transition
self.max_steps=max_steps
def _log_state(self, string, head_location, current_state):
"""Logs a visual representation of the current head location, state,
and contents of the tape of this Turing machine.
For example, if the Turing machine has ``'_010_'`` on its input tape
(that is, if `string` is ``'_010_'``), is in state ``4``, and has
read/write head at the location of the ``1`` symbol, this method would
log the following messages, one line at a time.
_010_
^
4
The caret represents the current location of the read/write head, and
the number beneath it represents the current state of the machine.
This method should be called from :meth:`__call__`, during the
execution of the Turing machine on a particular string.
"""
logger.debug('')
logger.debug(string)
logger.debug(' ' * head_location + '^')
logger.debug(' ' * head_location + str(current_state))
def __call__(self, string):
"""Runs the computer program specified by this Turing machine on
`string`.
`string` must be a Python string whose first and last characters are
underscores (``'B'``). The underscore represents a blank on the
theoretical infinite input tape, and denotes the left and right ends of
the input string.
The initial head location of the Turing machine is the left-most
non-blank character of the string.
Calling this Turing machine executes the program specified by its
transition function and returns ``True`` if the Turing machine halts
and accepts and ``False`` if the Turing machine halts and rejects. This
method may never terminate if the transition function indicates that
the Turing machine should loop forever.
"""
#Check if string is valid
for char in string:
if not char in self.in_alph: raise UnknownSymbol("String does not correspond to given input alphabet.")
current_state = self.initial_state
string="B"+string+"B"
# We assume that all strings will be input with one blank on the left
# and one blank on the right, so the head is initially at position 1.
head_location = 1
steps=0
# may loop forever if accept or reject state are never found
while True:
# If the head has moved too far to the left or right, add a blank
# to the current string in the appropriate location. If a blank is
# added to the left, the head location must be incremented, since
# the string has now essentially been shifted right by one cell.
if head_location < 0:
string = 'B' + string
head_location += 1
elif head_location >= len(string):
string += 'B'
if logs[0]: self._log_state(string, head_location, current_state)
# check for accepting or rejecting configurations
if current_state == self.accept_state:
output[0]=string
return True
if steps>self.max_steps:
return False
# for the sake of brevity, rename some verbose variables
h = head_location
q = current_state
s = string[h]
# if the current_state is not in the transition table, raise error
if q not in self.transition:
raise UnknownState('{} is not in transition'
' dictionary'.format(q))
# check if the transition table has an entry for the current symbol
if s not in self.transition[q]:
return False
# compute the new configuration from the transition function
new_state, new_symbol, direction = self.transition[q][s]
# assert that the symbol to write is a string of length one
if len(new_symbol) != 1:
raise BadSymbol('tape alphabet must only include symbols of'
' length 1 ({})'.format(new_symbol))
# write the specified new symbol to the string; Python strings are
# immutable, so we must create a new one
string = string[:h] + new_symbol + string[h + 1:]
# set the new state and head location
current_state = new_state
# direction is either L or R, which are defined to be -1 and +1
head_location += direction
steps+=1
raise Exception('Turing machine somehow halted without accepting or'
' rejecting.')
| 39.534704 | 115 | 0.629755 | 7,915 | 0.514663 | 0 | 0 | 0 | 0 | 0 | 0 | 9,079 | 0.59035 |
0b1b5ca00bb0a54148f31c9c2c856cd992807448 | 14,274 | py | Python | stdlib2-src/dist-packages/quodlibet/formats/xiph.py | ch1huizong/Scode | c34fb9d0f9b73fe199ba370f2e3ffb30b8f70895 | [
"MIT"
] | null | null | null | stdlib2-src/dist-packages/quodlibet/formats/xiph.py | ch1huizong/Scode | c34fb9d0f9b73fe199ba370f2e3ffb30b8f70895 | [
"MIT"
] | null | null | null | stdlib2-src/dist-packages/quodlibet/formats/xiph.py | ch1huizong/Scode | c34fb9d0f9b73fe199ba370f2e3ffb30b8f70895 | [
"MIT"
] | null | null | null | # Copyright 2004-2005 Joe Wreschnig, Michael Urman
# 2009-2014 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation
import sys
import base64
import mutagen
from mutagen.flac import Picture, error as FLACError
from quodlibet import config
from quodlibet import const
from quodlibet.config import RATINGS
from quodlibet.util.path import get_temp_cover_file
from quodlibet.formats._audio import AudioFile
from quodlibet.formats._image import EmbeddedImage, APICType
# Migrate old layout
sys.modules["formats.flac"] = sys.modules[__name__]
sys.modules["formats.oggvorbis"] = sys.modules[__name__]
class MutagenVCFile(AudioFile):
format = "Unknown Mutagen + vorbiscomment"
MutagenType = None
can_change_images = True
def __init__(self, filename, audio=None):
# If we're done a type probe, use the results of that to avoid
# reopening the file.
if audio is None:
audio = self.MutagenType(filename)
self["~#length"] = int(audio.info.length)
try:
self["~#bitrate"] = int(audio.info.bitrate / 1000)
except AttributeError:
pass
# mutagen keys are lower cased
for key, value in (audio.tags or {}).items():
self[key] = "\n".join(value)
self.__post_read()
self.sanitize(filename)
def __post_read_total(self, main, fallback, single):
one = None
total = None
if single in self:
parts = self[single].split("/", 1)
if parts[0]:
one = parts[0]
if len(parts) > 1:
total = parts[1]
del self[single]
if main in self:
total = self[main]
del self[main]
else:
if fallback in self:
total = self[fallback]
del self[fallback]
final = None
if one is not None:
final = one
if total is not None:
if final is None:
final = "/" + total
else:
final += "/" + total
if final is not None:
self[single] = final
def __post_read(self):
email = config.get("editing", "save_email").strip()
maps = {"rating": float, "playcount": int}
for keyed_key, func in maps.items():
for subkey in ["", ":" + const.EMAIL, ":" + email]:
key = keyed_key + subkey
if key in self:
try:
self["~#" + keyed_key] = func(self[key])
except ValueError:
pass
del(self[key])
if "metadata_block_picture" in self:
self.has_images = True
del(self["metadata_block_picture"])
if "coverart" in self:
self.has_images = True
del(self["coverart"])
if "coverartmime" in self:
del(self["coverartmime"])
self.__post_read_total("tracktotal", "totaltracks", "tracknumber")
self.__post_read_total("disctotal", "totaldiscs", "discnumber")
def get_images(self):
try:
audio = self.MutagenType(self["~filename"])
except EnvironmentError:
return []
# metadata_block_picture
images = []
for data in audio.get("metadata_block_picture", []):
try:
cover = Picture(base64.b64decode(data))
except (TypeError, FLACError):
continue
f = get_temp_cover_file(cover.data)
images.append(EmbeddedImage(
f, cover.mime, cover.width, cover.height, cover.depth,
cover.type))
# coverart + coverartmime
cover = audio.get("coverart")
try:
cover = cover and base64.b64decode(cover[0])
except TypeError:
cover = None
if cover:
mime = audio.get("coverartmime")
mime = (mime and mime[0]) or "image/"
f = get_temp_cover_file(cover)
images.append(EmbeddedImage(f, mime))
images.sort(key=lambda c: c.sort_key)
return images
def get_primary_image(self):
"""Returns the primary embedded image"""
try:
audio = self.MutagenType(self["~filename"])
except EnvironmentError:
return None
pictures = []
for data in audio.get("metadata_block_picture", []):
try:
pictures.append(Picture(base64.b64decode(data)))
except (TypeError, FLACError):
pass
cover = None
for pic in pictures:
if pic.type == APICType.COVER_FRONT:
cover = pic
break
cover = cover or pic
if cover:
f = get_temp_cover_file(cover.data)
return EmbeddedImage(
f, cover.mime, cover.width, cover.height, cover.depth,
cover.type)
cover = audio.get("coverart")
try:
cover = cover and base64.b64decode(cover[0])
except TypeError:
cover = None
if not cover:
self.has_images = False
return
mime = audio.get("coverartmime")
mime = (mime and mime[0]) or "image/"
f = get_temp_cover_file(cover)
return EmbeddedImage(f, mime)
def clear_images(self):
"""Delete all embedded images"""
if not self.has_images:
return
try:
audio = self.MutagenType(self["~filename"])
except EnvironmentError:
return
audio.pop("metadata_block_picture", None)
audio.pop("coverart", None)
audio.pop("coverartmime", None)
audio.save()
self.has_images = False
def set_image(self, image):
"""Replaces all embedded images by the passed image"""
try:
audio = self.MutagenType(self["~filename"])
data = image.file.read()
except EnvironmentError:
return
pic = Picture()
pic.data = data
pic.type = APICType.COVER_FRONT
pic.mime = image.mime_type
pic.width = image.width
pic.height = image.height
pic.depth = image.color_depth
audio.pop("coverart", None)
audio.pop("coverartmime", None)
audio["metadata_block_picture"] = base64.b64encode(pic.write())
audio.save()
self.has_images = True
def can_change(self, k=None):
if k is None:
return super(MutagenVCFile, self).can_change(None)
else:
l = k.lower()
return (super(MutagenVCFile, self).can_change(k) and
l not in ["rating", "playcount",
"metadata_block_picture",
"coverart", "coverartmime"] and
not l.startswith("rating:") and
not l.startswith("playcount:"))
def __prep_write(self, comments):
email = config.get("editing", "save_email").strip()
for key in comments.keys():
if key.startswith("rating:") or key.startswith("playcount:"):
if key.split(":", 1)[1] in [const.EMAIL, email]:
del(comments[key])
elif key not in ["metadata_block_picture", "coverart",
"coverartmime"]:
del(comments[key])
if config.getboolean("editing", "save_to_songs"):
email = email or const.EMAIL
rating = self("~#rating")
if rating != RATINGS.default:
comments["rating:" + email] = str(rating)
playcount = self.get("~#playcount", 0)
if playcount != 0:
comments["playcount:" + email] = str(playcount)
def __prep_write_total(self, comments, main, fallback, single):
lower = self.as_lowercased()
for k in [main, fallback, single]:
if k in comments:
del comments[k]
if single in lower:
parts = lower[single].split("/", 1)
if parts[0]:
comments[single] = [parts[0]]
if len(parts) > 1:
comments[main] = [parts[1]]
if main in lower:
comments[main] = lower.list(main)
if fallback in lower:
if main in comments:
comments[fallback] = lower.list(fallback)
else:
comments[main] = lower.list(fallback)
def write(self):
audio = self.MutagenType(self["~filename"])
if audio.tags is None:
audio.add_tags()
self.__prep_write(audio.tags)
lower = self.as_lowercased()
for key in lower.realkeys():
audio.tags[key] = lower.list(key)
self.__prep_write_total(audio.tags,
"tracktotal", "totaltracks", "tracknumber")
self.__prep_write_total(audio.tags,
"disctotal", "totaldiscs", "discnumber")
audio.save()
self.sanitize()
extensions = []
ogg_formats = []
try:
from mutagen.oggvorbis import OggVorbis
except ImportError:
OggVorbis = None
else:
extensions.append(".ogg")
extensions.append(".oga")
ogg_formats.append(OggVorbis)
try:
from mutagen.flac import FLAC, FLACNoHeaderError
except ImportError:
FLAC = None
else:
extensions.append(".flac")
ogg_formats.append(FLAC)
try:
from mutagen.oggflac import OggFLAC
except ImportError:
OggFLAC = None
else:
extensions.append(".oggflac")
ogg_formats.append(OggFLAC)
try:
from mutagen.oggspeex import OggSpeex
except ImportError:
OggSpeex = None
else:
extensions.append(".spx")
ogg_formats.append(OggSpeex)
from mutagen.oggtheora import OggTheora
extensions.append(".ogv")
ogg_formats.append(OggTheora)
try:
from mutagen.oggopus import OggOpus
except ImportError:
OggOpus = None
else:
extensions.append(".opus")
ogg_formats.append(OggOpus)
try:
from mutagen.id3 import ID3
except ImportError:
ID3 = None
class OggFile(MutagenVCFile):
format = "Ogg Vorbis"
mimes = ["audio/vorbis", "audio/ogg; codecs=vorbis"]
MutagenType = OggVorbis
class OggFLACFile(MutagenVCFile):
format = "Ogg FLAC"
mimes = ["audio/x-oggflac", "audio/ogg; codecs=flac"]
MutagenType = OggFLAC
class OggSpeexFile(MutagenVCFile):
format = "Ogg Speex"
mimes = ["audio/x-speex", "audio/ogg; codecs=speex"]
MutagenType = OggSpeex
class OggTheoraFile(MutagenVCFile):
format = "Ogg Theora"
mimes = ["video/x-theora", "video/ogg; codecs=theora"]
MutagenType = OggTheora
class OggOpusFile(MutagenVCFile):
format = "Ogg Opus"
mimes = ["audio/ogg; codecs=opus"]
MutagenType = OggOpus
class FLACFile(MutagenVCFile):
format = "FLAC"
mimes = ["audio/x-flac", "application/x-flac"]
MutagenType = FLAC
def __init__(self, filename, audio=None):
if audio is None:
audio = FLAC(filename)
super(FLACFile, self).__init__(filename, audio)
if audio.pictures:
self.has_images = True
def get_images(self):
images = super(FLACFile, self).get_images()
try:
tag = FLAC(self["~filename"])
except EnvironmentError:
return images
for cover in tag.pictures:
fileobj = get_temp_cover_file(cover.data)
images.append(EmbeddedImage(
fileobj, cover.mime, cover.width, cover.height, cover.depth,
cover.type))
images.sort(key=lambda c: c.sort_key)
return images
def get_primary_image(self):
"""Returns the primary embedded image"""
try:
tag = FLAC(self["~filename"])
except EnvironmentError:
return None
covers = tag.pictures
if not covers:
return super(FLACFile, self).get_primary_image()
covers.sort(key=lambda c: APICType.sort_key(c.type))
cover = covers[0]
fileobj = get_temp_cover_file(cover.data)
return EmbeddedImage(
fileobj, cover.mime, cover.width, cover.height, cover.depth,
cover.type)
def clear_images(self):
"""Delete all embedded images"""
try:
tag = FLAC(self["~filename"])
except EnvironmentError:
return
tag.clear_pictures()
tag.save()
# clear vcomment tags
super(FLACFile, self).clear_images()
self.has_images = False
def set_image(self, image):
"""Replaces all embedded images by the passed image"""
try:
tag = FLAC(self["~filename"])
data = image.file.read()
except EnvironmentError:
return
pic = Picture()
pic.data = data
pic.type = APICType.COVER_FRONT
pic.mime = image.mime_type
pic.width = image.width
pic.height = image.height
pic.depth = image.color_depth
tag.add_picture(pic)
tag.save()
# clear vcomment tags
super(FLACFile, self).clear_images()
self.has_images = True
def write(self):
if ID3 is not None:
ID3().delete(filename=self["~filename"])
super(FLACFile, self).write()
types = []
for var in globals().values():
if getattr(var, 'MutagenType', None):
types.append(var)
def info(filename):
try:
audio = mutagen.File(filename, options=ogg_formats)
except AttributeError:
audio = OggVorbis(filename)
if audio is None and FLAC is not None:
# FLAC with ID3
try:
audio = FLAC(filename)
except FLACNoHeaderError:
pass
if audio is None:
raise IOError("file type could not be determined")
Kind = type(audio)
for klass in globals().values():
if Kind is getattr(klass, 'MutagenType', None):
return klass(filename, audio)
raise IOError("file type could not be determined")
| 28.153846 | 76 | 0.570478 | 11,748 | 0.823035 | 0 | 0 | 0 | 0 | 0 | 0 | 2,150 | 0.150624 |
0b1b93c744caf57fd123170dd118feda4553c189 | 413 | py | Python | test find weather.py | jacblo/tests-and-early-projects | 16ca33498fe336b089e24981e148ad81e57adb13 | [
"CC0-1.0"
] | null | null | null | test find weather.py | jacblo/tests-and-early-projects | 16ca33498fe336b089e24981e148ad81e57adb13 | [
"CC0-1.0"
] | null | null | null | test find weather.py | jacblo/tests-and-early-projects | 16ca33498fe336b089e24981e148ad81e57adb13 | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 5 23:04:10 2020
@author: y3
749e8aa818a63c61c31acd7ee948d6d8
"""
import requests
api_address = "https://api.openweathermap.org/data/2.5/weather?q="
api_key_url = "&APPID=749e8aa818a63c61c31acd7ee948d6d8"
city_name = "Bet shemesh,IL"
weather_data = requests.get(api_address+city_name+api_key_url).json()
print(weather_data["main"]["temp"]) | 24.294118 | 69 | 0.745763 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 256 | 0.619855 |
0b1c2d8897a8a76a29c7ff91737c1d331580a06a | 17,750 | py | Python | customer_match/cli.py | Esquire-Digital/customer-match-translator | 7b1530bed0fc1a7a9bfc898e31daad6dacc30ddd | [
"MIT"
] | null | null | null | customer_match/cli.py | Esquire-Digital/customer-match-translator | 7b1530bed0fc1a7a9bfc898e31daad6dacc30ddd | [
"MIT"
] | null | null | null | customer_match/cli.py | Esquire-Digital/customer-match-translator | 7b1530bed0fc1a7a9bfc898e31daad6dacc30ddd | [
"MIT"
] | null | null | null | #!/usr/bin/python3
import os
import click
import sys
import csv
import time
import pandas as pd
import country_converter as coco
import hashlib
import phonenumbers
from tqdm import tqdm
from uszipcode import SearchEngine
HEADER_TRANSLATIONS = {
"email1": "Email",
"phone1": "Phone",
"person_country": "Country",
}
REQUIRED_HEADERS = {"First Name", "Last Name", "Phone", "Email", "Country", "Zip"}
OPTIONAL_HEADERS = set() # TODO: Add optional headers that can be uploaded.
# All headers that can be in a Customer Match CSV.
ALL_HEADERS = REQUIRED_HEADERS.union(OPTIONAL_HEADERS)
DO_NOT_HASH = {"Country", "Zip"}
# ANSI codes to color/format terminal prints.
ANSI = {
"YELLOW": "\u001b[33m",
"RED": "\u001b[31m",
"CYAN": "\u001b[36m",
"BOLD": "\u001b[1m",
"RESET": "\u001b[0m",
}
class Error(ValueError):
"""Base class for other custom exceptions"""
pass
class FormatError(Error):
"""Raised when a file is not in the correct format."""
pass
class NoZipError(FormatError):
"""Raised when a zip code is not found in a spreadsheet. Sometimes recoverable."""
pass
# ==========================
# Formatted console prints
# ==========================
def warn(message: str):
tqdm.write(f"{ANSI['BOLD'] + ANSI['YELLOW']}WARNING:{ANSI['RESET']} {message}")
def notify(message: str):
tqdm.write(f"{ANSI['BOLD'] + ANSI['CYAN']}INFO:{ANSI['RESET']} {message}")
def check_path(filepath: str):
"""Checks that the path to a file exists. To check if a path to the file and the file itself exists,
use check_csv
Args:
filepath (str): The path to the file
Raises:
ValueError: If the path to the file does not exist
"""
path = os.path.dirname(filepath)
if path.strip() and not os.path.exists(path):
raise ValueError(f"The path {path} does not exist.")
def check_csv(filepath: str) -> csv.Dialect:
"""Runs checks on a CSV file, such as whether it exists and if it can be parsed, and returns
its dialect object
Args:
filepath (str): Path to the CSV file
Raises:
ValueError: If the path does not exist, or the file cannot be read as a CSV
Returns:
csv.Dialect: Parsed CSV dialect from the file
"""
# Check that the file exists, and is a file.
basename = os.path.basename(filepath)
if not os.path.exists(filepath):
raise ValueError(f"The path {filepath} does not exist.")
if not os.path.isfile(filepath):
raise ValueError(f"{basename} is not a file.")
# Try to open the file and verify it can be read as a CSV.
try:
file = open(filepath, encoding="utf8")
dialect = csv.Sniffer().sniff(file.read(100000))
file.seek(0)
file.close()
return dialect
except csv.Error as e:
raise ValueError(
f"Could not get a CSV dialect for file {basename}. Is it a CSV file? Is it maybe too large?"
)
def parse_google_fields(filepath: str, ignore_zip: bool = False) -> dict:
"""Parse the header of the CSV to get the Google field names.
Args:
filepath (str): Path to the CSV file.
ignore_zip (bool): Flag to ignore the zip code column, and not throw an error if it is missing.
Raises:
ValueError: If not all required headers can be found
Returns:
dict: A map from the field name that was found in the CSV to Google's field name.
eg: "first_name": "First Name"
"""
field_map = {}
found_headers = []
with open(filepath, "r", encoding="utf8") as file:
reader = csv.DictReader(file)
field_names = reader.fieldnames
# For each field in the header column, try to translate
# them to a header recognized by Google.
for field in field_names:
header = None
# Check if there is a direct translation first:
if field in HEADER_TRANSLATIONS:
header = HEADER_TRANSLATIONS[field]
# Otherwise attempt to translate snake case:
elif (translated_field := field.replace("_", " ").title()) in ALL_HEADERS:
header = translated_field
# If we have not found this header yet, add it to the map.
# Otherwise, if we have found the header already, warn the user.
if header is not None and header not in found_headers:
notify(f"Detected header name '{header}' as '{field}' in CSV file")
field_map[field] = header
found_headers.append(header)
elif header in found_headers:
warn(
f"Duplicate header name '{header}' was extracted as '{field}'. Keeping column with header '{field_map[header]}'"
)
# Check if we have all required headers.
# All required headers are found if the required headers set is a subset of the headers found.
if not REQUIRED_HEADERS.issubset(field_map.values()):
missing_headers = REQUIRED_HEADERS.difference(field_map.values())
if len(missing_headers) == 1 and list(missing_headers)[0] == "Zip":
if not ignore_zip:
raise NoZipError(field_map)
else:
raise FormatError(
f"Not all required headers found. Missing: {', '.join(missing_headers)}"
)
return field_map
def parse_location_fields(filepath: str) -> dict:
"""Parse a header of a CSV file to get the country and city.
Args:
filepath (str): Path to the CSV file
Raises:
FormatError: If the city, country or both columns cannot be found.
Returns:
dict: A map from the field name that was found in the CSV to the standardized name.
eg: "person_city": "City"
"""
WANTED_FIELDS = {"state", "city"}
found_translations = []
field_map = {}
with open(filepath, "r", encoding="utf8") as file:
reader = csv.DictReader(file)
field_names = reader.fieldnames
for field in field_names:
# Salesql CSVs prefix state and city by person_.
field = field.lower()
salesql_field = field.replace("person_", "")
possible_fields = {field, salesql_field}
if found_set := WANTED_FIELDS.intersection(possible_fields):
translation = list(found_set)[0]
notify(f"Detected header name '{translation}' as '{field}' in CSV file")
found_translations.append(translation)
field_map[field] = translation
if not WANTED_FIELDS.issubset(field_map.values()):
missing_fields = WANTED_FIELDS.difference(field_map.values())
raise FormatError(
f"Could not find state and city columns. Missing: {', '.join(missing_fields)}"
)
return field_map
def hash_element(element: any) -> str:
"""Produces a sha256 hash of an element of data.
Args:
element (any): The data to be hashed
Returns:
str: The sha256 hash hex digest
"""
element = str(element).encode("utf-8")
return hashlib.sha256(element).hexdigest()
def hash_series(series: pd.Series):
"""Hashes a series, usually represnting columns in a CSV.
Args:
series (pd.Series): [description]
Returns:
[type]: [description]
"""
# If the name of the series is a field
# that shouldn't be hashed (eg: Zip), don't hash it.
if series.name in DO_NOT_HASH:
return series
else:
return series.map(hash_element)
def hash_dataframe(dataframe: pd.DataFrame) -> pd.DataFrame:
"""Hashes all elements in a Pandas dataframe.
Args:
dataframe (pd.DataFrame): The dataframe to be hashed
Returns:
pd.DataFrame: The dataframe with all elements hashed
"""
notify(f"Hashing {dataframe.size} elements...")
start = time.time()
dataframe = dataframe.apply(hash_series, axis=0)
notify(
f"Finished hashing {dataframe.size} elements in {time.time() - start} seconds."
)
return dataframe
def get_dataframe(filepath: str) -> pd.DataFrame:
"""Gets a dataframe for a given CSV file.
Args:
filepath (str): Path to the CSV file.
Returns:
pd.DataFrame: [description]
"""
dialect = check_csv(filepath)
return pd.read_csv(
filepath,
warn_bad_lines=False,
error_bad_lines=False,
sep=dialect.delimiter,
low_memory=False,
dtype=str,
)
def translate_dataframe(dataframe: pd.DataFrame, field_map: dict) -> pd.DataFrame:
"""Translates a CSV file to use Google's desired field names in the header.
Any columns with field names that are not recognized by the Customer Match
specification are removed.
Args:
dataframe (pd.DataFrame): The DataFrame of the CSV file.
Returns:
pd.DataFrame: The pandas dataframe that was translated.
Can be exported to a CSV with the save_csv function.
"""
# Parse the headers into a field_map.
# Keep only the columns that have matching headers.
dataframe = dataframe[field_map.keys()]
# Reverse the map to rename columns to Google's expectation.
dataframe = dataframe.rename(columns=field_map)
return dataframe
def save_csv(dataframe: pd.DataFrame, output: str):
"""Saves a dataframe to a CSV file.
Args:
dataframe (pd.DataFrame): The dataframe to be saved
output (str): The filepath to be saved to
"""
dataframe.to_csv(output, index=False, encoding="utf-8")
notify(f"Succesfully saved Customer Match data file to {os.path.abspath(output)}.")
def get_zip(row: pd.Series, search: SearchEngine) -> str:
"""Get the zip code for a row in a dataframe with the city and state.
Args:
row (pd.Series): A series containing a city and state field.
search (SearchEngine): The search engine object to lookup the zipcode.
Returns:
str: The zipcode if found. None otherwise.
"""
try:
if row.count() == 2:
res = search.by_city_and_state(city=row["city"], state=row["state"])
return res[0].zipcode
else:
warn(f"NaN detected for {row['city']}, {row['state']}.")
return ""
except (AttributeError, IndexError):
warn(f"Zip lookup for {row['city']}, {row['state']} failed.")
return ""
def get_zips(dataframe: pd.DataFrame) -> pd.Series:
"""Gets the zips for a dataframe with city and state columns.
Args:
dataframe (pd.DataFrame): The dataframe, must have city and state columns.
Returns:
pd.Series: A series of zip codes correlating to the zips for each city and state.
"""
search = SearchEngine()
tqdm.pandas(desc="Getting zipcodes")
zips = dataframe.progress_apply(lambda row: get_zip(row, search), axis=1)
zips = zips.rename("Zip")
return zips
def convert_to_iso(dataframe: pd.DataFrame) -> pd.DataFrame:
"""Converts a dataframe's Country column to ISO2 format (United States => US)
Args:
dataframe (pd.DataFrame): A dataframe with a Country column.
Returns:
pd.DataFrame: The dataframe with the Country column in ISO2 format.
"""
notify(f"Converting {len(dataframe.index)} countries to ISO2 format...")
start = time.time()
iso2_names = coco.convert(names=dataframe["Country"], to="ISO2", not_found=None)
dataframe["Country"] = pd.Series(iso2_names)
notify(
f"Finished converting countries to ISO2 format in {time.time() - start} seconds."
)
return dataframe
def normalize_series(column: pd.Series) -> pd.Series:
"""Formats a series (usually a column) of strings to be all lowercase and without whitespace.
Args:
column (pd.Series): The series of strings to be normalized
Returns:
pd.Series: The same series, with normalized strings.
"""
def format(el: str) -> str:
el = el.strip()
el = el.lower()
return el
return column.map(format)
def get_e164(row: pd.Series) -> str:
"""Takes a series containing a Phone and Country column and returns the
phone number in E.164 format.
Args:
row (pd.Series): A series containing at least a Phone and Country column.
Returns:
str: The phone number in E.164 format, if it could be formatted.
None otherwise.
"""
if row.count() == 2:
try:
number = phonenumbers.parse(row["Phone"], row["Country"])
return phonenumbers.format_number(
number, phonenumbers.PhoneNumberFormat.E164
)
except phonenumbers.NumberParseException:
warn(
f"Can't parse phone number {row['Phone']} for country {row['Country']}. It is not recognized as a valid number."
)
return None
else:
# warn(
# f"Can't convert phone number {row['Phone']} for country {row['Country']} due to missing data."
# )
return None
def convert_to_e164(dataframe: pd.DataFrame) -> pd.DataFrame:
"""Converts a dataframe's Phone column to E.164. Requires a Country column.
Args:
dataframe (pd.DataFrame): A dataframe with a Phone and Country column
Returns:
pd.DataFrame: The same dataframe with the Phone column reformatted to E.164.
"""
tqdm.pandas(desc="Converting phone numbers to E.164 format")
numbers = dataframe[["Country", "Phone"]].progress_apply(get_e164, axis=1)
dataframe["Phone"] = numbers
return dataframe
def format_for_hashing(dataframe: pd.DataFrame) -> pd.DataFrame:
"""Performs formatting on a dataframe necessary for accurate hashing.
Will convert the Country column to ISO, normalize all strings, and convert
the phone number column to E.164 format.
Args:
dataframe (pd.DataFrame): A dataframe to be formatted
Returns:
pd.DataFrame: The same dataframe formatted. May have many NaN values!
"""
notify("Formatting file for hashing...")
dataframe = dataframe.apply(normalize_series, axis=0)
dataframe = convert_to_iso(dataframe)
dataframe = convert_to_e164(dataframe)
notify("Done formatting file.")
return dataframe
def prune(dataframe: pd.DataFrame) -> pd.DataFrame:
"""Drops any rows in a dataframe that contain NaN, and prints
how many rows were affected.
Args:
dataframe (pd.DataFrame): Dataframe to be pruned
Returns:
pd.DataFrame: Same dataframe without rows that have NaN.
"""
total_rows = len(dataframe.index)
notify(f"Removing rows with empty values...")
dataframe = dataframe.dropna()
pruned_rows = len(dataframe.index)
notify(f"Removed {total_rows - pruned_rows} rows with empty values.")
return dataframe
@click.command(
help="Generates a Google Ads Customer Match compliant CSV file from a (potentially large) CSV file in another format."
)
@click.option("-o", "--output", default="result.csv", help="Path to output file.")
@click.option(
"--hash",
"do_hash",
help="SHA256 hash each element in the resulting CSV.",
is_flag=True,
)
@click.option(
"--ignore-empty",
help="Don't remove rows with empty elements.",
is_flag=True,
)
@click.option(
"--format",
help="Format the document as it would before hashing with E.164 phone numbers and lowercase names. Will remove a significant amount of rows.",
is_flag=True,
)
@click.argument("filepath")
def main(
filepath: str, output: str, do_hash: bool, ignore_empty: bool, format: bool
):
try:
file = None
# Attempt to translate to Google's standard.
try:
check_path(output)
file = get_dataframe(filepath)
field_map = parse_google_fields(filepath)
file = translate_dataframe(file, field_map)
# If the no zip is found, it is possible to lookup zip
# codes. Ask the user if they want to try.
except NoZipError:
warn(
"A zip code column could not be found in the CSV file. If there is a state and city column, the zip codes may be able to be automatically detected. This may take hours, depending on your file size."
)
if click.confirm("Would you like to try to detect zip codes?"):
field_map = parse_location_fields(filepath)
states_and_cities = translate_dataframe(file, field_map)
zip_codes = get_zips(states_and_cities)
field_map = parse_google_fields(filepath, ignore_zip=True)
translated = translate_dataframe(file, field_map)
file = pd.concat([translated, zip_codes], axis=1)
else:
sys.exit()
if not ignore_empty:
file = prune(file)
# Format the file for hashing if we are going to hash.
# Country codes are converted to ISO as a step in hashing, so
# we only have to convert if we are not hashing.
if do_hash or format:
file = format_for_hashing(file)
else:
file = convert_to_iso(file)
# Check again for empty values, if phone numbers can't be formatted
# or ISO formats can't be found.
if not ignore_empty:
file = prune(file)
# Hashing must be the last step, or else NaN will be hashed.
if do_hash:
file = hash_dataframe(file)
save_csv(file, output)
return 0
except ValueError as e:
sys.exit(f"{ANSI['BOLD'] + ANSI['RED']}ERROR:{ANSI['RESET']} {e}")
if __name__ == "__main__":
main() | 32.809612 | 214 | 0.633915 | 304 | 0.017127 | 0 | 0 | 2,797 | 0.157577 | 0 | 0 | 9,421 | 0.530761 |
0b1c55f58b7ea7cb439e436ac338143526ad5ff4 | 2,383 | py | Python | pwndbg/color/__init__.py | R2S4X/pwndbg | 351d479f08a43c71d30a8d8c098b7657bbb9ef0e | [
"MIT"
] | 287 | 2015-03-23T17:22:49.000Z | 2022-01-06T19:57:21.000Z | pwndbg/color/__init__.py | R2S4X/pwndbg | 351d479f08a43c71d30a8d8c098b7657bbb9ef0e | [
"MIT"
] | 28 | 2015-04-13T19:59:44.000Z | 2016-05-27T19:09:55.000Z | pwndbg/color/__init__.py | R2S4X/pwndbg | 351d479f08a43c71d30a8d8c098b7657bbb9ef0e | [
"MIT"
] | 42 | 2015-04-17T18:13:00.000Z | 2020-07-23T08:37:51.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import re
import pwndbg.memoize
NORMAL = "\x1b[0m"
BLACK = "\x1b[30m"
RED = "\x1b[31m"
GREEN = "\x1b[32m"
YELLOW = "\x1b[33m"
BLUE = "\x1b[34m"
PURPLE = "\x1b[35m"
CYAN = "\x1b[36m"
LIGHT_GREY = LIGHT_GRAY = "\x1b[37m"
FOREGROUND = "\x1b[39m"
GREY = GRAY = "\x1b[90m"
LIGHT_RED = "\x1b[91m"
LIGHT_GREEN = "\x1b[92m"
LIGHT_YELLOW = "\x1b[93m"
LIGHT_BLUE = "\x1b[94m"
LIGHT_PURPLE = "\x1b[95m"
LIGHT_CYAN = "\x1b[96m"
WHITE = "\x1b[97m"
BOLD = "\x1b[1m"
UNDERLINE = "\x1b[4m"
def none(x): return str(x)
def normal(x): return colorize(x, NORMAL)
def black(x): return colorize(x, BLACK)
def red(x): return colorize(x, RED)
def green(x): return colorize(x, GREEN)
def yellow(x): return colorize(x, YELLOW)
def blue(x): return colorize(x, BLUE)
def purple(x): return colorize(x, PURPLE)
def cyan(x): return colorize(x, CYAN)
def light_gray(x): return colorize(x, LIGHT_GRAY)
def foreground(x): return colorize(x, FOREGROUND)
def gray(x): return colorize(x, GRAY)
def light_red(x): return colorize(x, LIGHT_RED)
def light_green(x): return colorize(x, LIGHT_GREEN)
def light_yellow(x): return colorize(x, LIGHT_YELLOW)
def light_blue(x): return colorize(x, LIGHT_BLUE)
def light_purple(x): return colorize(x, LIGHT_PURPLE)
def light_cyan(x): return colorize(x, LIGHT_CYAN)
def white(x): return colorize(x, WHITE)
def bold(x): return colorize(x, BOLD)
def underline(x): return colorize(x, UNDERLINE)
def colorize(x, color): return color + terminateWith(str(x), color) + NORMAL
@pwndbg.memoize.reset_on_stop
def generateColorFunctionInner(old, new):
def wrapper(text):
return new(old(text))
return wrapper
def generateColorFunction(config):
function = lambda x: x
for color in str(config).split(','):
function = generateColorFunctionInner(function, globals()[color.lower().replace('-', '_')])
return function
def strip(x):
return re.sub('\x1b\\[\d+m', '', x)
def terminateWith(x, color):
return re.sub('\x1b\\[0m', NORMAL + color, x)
def ljust_colored(x, length, char=' '):
return x + (length - len(strip(x))) * char
| 31.355263 | 99 | 0.666807 | 0 | 0 | 0 | 0 | 143 | 0.060008 | 0 | 0 | 279 | 0.117079 |
0b1d9491c3c3b996bb1b47ef4c74d5c4882e9c92 | 1,473 | py | Python | private_inference/app/commons.py | xiyueyiwan/private-ml-for-health | c77181d0628b3a04c411a01f7e402fccb4d34e09 | [
"MIT"
] | 28 | 2020-11-01T09:19:37.000Z | 2022-03-31T07:23:42.000Z | private_inference/app/commons.py | HongleGuo/private-ml-for-health | 98b66920c659422bc963d70964f780b76fa67a48 | [
"MIT"
] | 2 | 2021-03-19T11:39:04.000Z | 2021-04-21T12:48:52.000Z | private_inference/app/commons.py | HongleGuo/private-ml-for-health | 98b66920c659422bc963d70964f780b76fa67a48 | [
"MIT"
] | 11 | 2021-03-17T02:28:08.000Z | 2022-03-31T07:23:48.000Z | import io
from PIL import Image
from torchvision import models
import torch
import torchvision.transforms as transforms
import torch.nn as nn
import torch.nn.functional as F
import urllib
import os
def get_model_from_global_agent():
global_model = models.squeezenet1_1(pretrained=True)
global_model.classifier[1] = nn.Conv2d(512, 5, kernel_size=(1,1), stride=(1,1))
global_model.num_classes = 5
global_model.to(torch.device('cpu'))
map_location=torch.device('cpu')
model_weights_link = 'https://drive.google.com/uc?id=11pb2yJKXgyYC9XnB9cd6HlNCFNxnlY1D'
model_weights_path = './model/squeezenet_0.pt'
urllib.request.urlretrieve(model_weights_link, model_weights_path)
global_model.load_state_dict(torch.load("./model/squeezenet_0.pt", map_location=torch.device('cpu')))
os.remove(model_weights_path)
global_model.eval()
return global_model
def transform_image(image_bytes):
apply_transform = transforms.Compose([transforms.Resize(265),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
image = Image.open(io.BytesIO(image_bytes)).convert('RGB')
return apply_transform(image).unsqueeze(0)
# change to DR dataset format
def format_class_name(class_name):
class_name = class_name.replace('_', ' ')
class_name = class_name.title()
return class_name
| 35.926829 | 105 | 0.706721 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 171 | 0.11609 |
9bc4cf79003c2ee2dd4622a00c59cd89da0d7b15 | 1,599 | py | Python | eg-retracepoly.py | Cocodidou/projetFusee | 9ce85fb3463b1039e4a2e414c055b680ba909fb8 | [
"Unlicense"
] | null | null | null | eg-retracepoly.py | Cocodidou/projetFusee | 9ce85fb3463b1039e4a2e414c055b680ba909fb8 | [
"Unlicense"
] | null | null | null | eg-retracepoly.py | Cocodidou/projetFusee | 9ce85fb3463b1039e4a2e414c055b680ba909fb8 | [
"Unlicense"
] | null | null | null | # line drawing as shape, where drawing doesn't "lift pen from paper"
# coords are given as a list from (0,0), and sublists can be used to
# branch off then return to a particular point
#
# drawing the outline of a polygon, then retracing the steps backwards,
# renders the polygon outline as opposed to filling it
import turtle
import engine
WIDTH = 640
HEIGHT = 480
class Tree(engine.GameObject):
def __init__(self):
super().__init__(0, 0, 0, 0, 'tree', 'black')
def heading(self):
return 90
def makeshape_r(S, L, scale):
assert type(L) == type( [] )
for elem in L:
if type(elem) == type( [] ):
makeshape_r( [ S[-1] ], elem, scale)
else:
assert type(elem) == type( () )
assert len(elem) == 2
elem = (scale * elem[0], scale * elem[1])
turtle.goto(elem)
S.append(elem)
# now unwind backwards
while len(S) > 0:
elem = S.pop()
turtle.goto(elem)
def makeshape(name, scale, L):
turtle.home()
turtle.begin_poly()
stack = [ (0, 0) ]
makeshape_r(stack, L, scale)
turtle.end_poly()
poly = turtle.get_poly()
turtle.register_shape(name, poly)
if __name__ == '__main__':
engine.init_screen(WIDTH, HEIGHT)
engine.init_engine()
makeshape('tree', 5, [
(-3,-3), (0.5,0), (4,-2.7), (0.4,-0.5), (0.4,-3),
(-3.2,-5), (0.4,-3.5), (3.8,-5), (0.45,-4), (0.45,-5.8),
(-2,-8.2), (0.51,-6.3), (3.9,-8), (0.51,-7),
# trunk
(0.55,-8), (0.6,-9), (0.7,-10), (1,-11), (1.1,-12),
# ground
(-2,-12.2), (-0.5,-12.5), [ (3,-12.1) ], [ (2,-12) ],
[ (-1,-12.75), [ (-2.8,-12.6) ], (0.8,-12.7), (2.5,-12.5) ]
])
tree = Tree()
engine.add_obj(tree)
engine.engine()
| 25.790323 | 71 | 0.594121 | 131 | 0.081926 | 0 | 0 | 0 | 0 | 0 | 0 | 374 | 0.233896 |
9bc4ea9a9e8106e96e6d58d6d3b3f711f0e09666 | 1,671 | py | Python | migrations/versions/25276717e6b8_.py | myfreeweb/crawllog | e86bda8adb7201e4382b774332e51b26297d008c | [
"Unlicense"
] | 3 | 2016-04-04T22:19:45.000Z | 2020-11-08T03:11:12.000Z | migrations/versions/25276717e6b8_.py | unrelentingtech/crawllog | e86bda8adb7201e4382b774332e51b26297d008c | [
"Unlicense"
] | 1 | 2016-04-05T14:36:36.000Z | 2016-06-18T17:59:16.000Z | migrations/versions/25276717e6b8_.py | myfreeweb/crawllog | e86bda8adb7201e4382b774332e51b26297d008c | [
"Unlicense"
] | null | null | null | """Initial schema
Revision ID: 25276717e6b8
Revises: None
Create Date: 2016-03-25 17:58:19.963883
"""
# revision identifiers, used by Alembic.
revision = '25276717e6b8'
down_revision = None
from alembic import op
import sqlalchemy as sa
def downgrade():
op.drop_table('server_log')
op.drop_table('user')
op.drop_table('server')
op.drop_table('user_on_server')
def upgrade():
op.create_table('server',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.TEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('uri', sa.TEXT(), nullable=True),
sa.Column('micropub_uri', sa.TEXT(), nullable=True),
sa.Column('access_token', sa.TEXT(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('user_on_server',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.TEXT(), nullable=True),
sa.Column('auto_pub_threshold', sa.INTEGER(), nullable=True),
sa.Column('server_id', sa.INTEGER(), nullable=True),
sa.Column('user_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['server_id'], ['server.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('server_log',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('uri', sa.TEXT(), nullable=True),
sa.Column('position', sa.INTEGER(), nullable=True),
sa.Column('server_id', sa.INTEGER(), nullable=True),
sa.ForeignKeyConstraint(['server_id'], ['server.id'], ),
sa.PrimaryKeyConstraint('id')
)
| 30.381818 | 65 | 0.661281 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 446 | 0.266906 |
9bc53ea3709915443ca7298e421e40de7c28933a | 911 | py | Python | Day 15/FLOW016.py | arpit1920/Python-Codechef-Problems | a8d2d48140c93c70fc2de9da90a967f3d43036a1 | [
"MIT"
] | null | null | null | Day 15/FLOW016.py | arpit1920/Python-Codechef-Problems | a8d2d48140c93c70fc2de9da90a967f3d43036a1 | [
"MIT"
] | null | null | null | Day 15/FLOW016.py | arpit1920/Python-Codechef-Problems | a8d2d48140c93c70fc2de9da90a967f3d43036a1 | [
"MIT"
] | null | null | null | """
This Code is Contributed by Arpit Bhushan Sharma
Codechef-@koderarpit
Github - @arpit1920
Kaggle - arpit3043
Mail - arpit3043@gmail.com
Two integers A and B are the inputs. Write a program to find GCD and LCM of A and B.
Input
The first line contains an integer T, total number of testcases.
Then follow T lines, each line contains an integer A and B.
Output
Display the GCD and LCM of A and B separated by space respectively.
The answer for each test case must be displayed in a new line.
Constraints
1 ≤ T ≤ 1000
1 ≤ A,B ≤ 1000000
Example
Input
3
120 140
10213 312
10 30
Output
20 840
1 3186456
10 30
"""
# cook your dish here
def gcd(a,b):
if (a%b==0):
return b
return gcd(b, a%b)
for i in range(int(input())):
a,b=map(int,input().split())
if (a<b):
a,b=b,a
HCF = gcd(a,b)
LCM = (a*b)//HCF
print(HCF, LCM) | 20.244444 | 85 | 0.643249 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 677 | 0.73667 |
9bc5c0aaba0c3abcdcd825f0ec975266b287ad70 | 4,270 | py | Python | recupero/migrations/0001_initial.py | cluster311/ggg | 262173c66fe40ada30083d439a79f16f841f5772 | [
"BSD-3-Clause"
] | 6 | 2020-03-16T02:51:16.000Z | 2020-11-10T00:58:01.000Z | recupero/migrations/0001_initial.py | cluster311/ggg | 262173c66fe40ada30083d439a79f16f841f5772 | [
"BSD-3-Clause"
] | 204 | 2019-09-19T02:00:57.000Z | 2022-02-10T10:48:52.000Z | recupero/migrations/0001_initial.py | cluster311/ggg | 262173c66fe40ada30083d439a79f16f841f5772 | [
"BSD-3-Clause"
] | 3 | 2019-09-16T22:59:24.000Z | 2022-03-21T22:52:44.000Z | # Generated by Django 2.2.4 on 2019-10-22 00:36
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('cie10_django', '0001_initial'),
('nhpgd_django', '0001_initial'),
('profesionales', '0006_profesional_dni'),
('centros_de_salud', '0005_auto_20191014_0001'),
('obras_sociales', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='DocumentoAnexo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('documento_adjunto', models.FileField(upload_to='documentos_anexos')),
],
),
migrations.CreateModel(
name='Factura',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha', models.DateField(default=django.utils.timezone.now)),
('especialidad', models.CharField(max_length=50)),
('centro_de_salud', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='facturas', to='centros_de_salud.CentroDeSalud')),
('cies_extras', models.ManyToManyField(blank=True, related_name='extras_facturas', to='cie10_django.CIE10')),
('codigo_cie_principal', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='facturas', to='cie10_django.CIE10')),
('obra_social', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='facturas', to='obras_sociales.ObraSocial')),
('profesional', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='facturas', to='profesionales.Profesional')),
],
),
migrations.CreateModel(
name='TipoDocumentoAnexo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(help_text='Tipo de documento', max_length=30)),
],
),
migrations.CreateModel(
name='TipoPrestacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(help_text='Tipo de atención', max_length=30)),
('tipo', models.PositiveIntegerField(choices=[(100, 'Consulta'), (200, 'Práctica'), (300, 'Internación')], default=100)),
('documentos_requeridos', models.ManyToManyField(blank=True, related_name='requerido_en_tipos', to='recupero.TipoDocumentoAnexo')),
('documentos_sugeridos', models.ManyToManyField(blank=True, related_name='sugerido_en_tipos', to='recupero.TipoDocumentoAnexo')),
],
),
migrations.CreateModel(
name='Prestacion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha', models.DateField(default=django.utils.timezone.now, help_text='Fecha de la prestación')),
('cantidad', models.IntegerField(default=1)),
('documentos_adjuntados', models.ManyToManyField(blank=True, to='recupero.DocumentoAnexo')),
('factura', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='prestaciones', to='recupero.Factura')),
('nomenclador', models.ForeignKey(help_text='Servicio realizado o entregado', on_delete=django.db.models.deletion.CASCADE, related_name='prestaciones', to='nhpgd_django.NomencladorHPGD')),
('tipo', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='recupero.TipoPrestacion')),
],
),
migrations.AddField(
model_name='documentoanexo',
name='tipo',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='recupero.TipoDocumentoAnexo'),
),
]
| 56.184211 | 204 | 0.638642 | 4,119 | 0.963734 | 0 | 0 | 0 | 0 | 0 | 0 | 1,185 | 0.277258 |
9bc97edd815322dc6a747c2ab760525b0de68666 | 1,057 | py | Python | main(terminal).py | fcondo/GUI-sudoku-solver | 6d5af3790d1e1c13402bdc6fa6f8ae4ed79121d2 | [
"Apache-2.0"
] | null | null | null | main(terminal).py | fcondo/GUI-sudoku-solver | 6d5af3790d1e1c13402bdc6fa6f8ae4ed79121d2 | [
"Apache-2.0"
] | null | null | null | main(terminal).py | fcondo/GUI-sudoku-solver | 6d5af3790d1e1c13402bdc6fa6f8ae4ed79121d2 | [
"Apache-2.0"
] | null | null | null | """
main(terminal).py
Author: Fabio Condomitti
"""
from solver import print_grid, solve
def main():
sudoku_grid = [ [0,8,0, 0,0,0, 2,0,0],
[0,0,0, 0,8,4, 0,9,0],
[0,0,6, 3,2,0, 0,1,0],
[0,9,7, 0,0,0, 0,8,0],
[8,0,0, 9,0,3, 0,0,2],
[0,1,0, 0,0,0, 9,5,0],
[0,7,0, 0,4,5, 8,0,0],
[0,3,0, 7,1,0, 0,0,0],
[0,0,8, 0,0,0, 0,4,0]
]
sudoku_grid = [
[2,5,0, 0,9,7, 3,0,6],
[0,0,7, 3,0,0, 1,0,2],
[0,3,1, 4,0,5, 8,0,0],
[0,6,0, 8,0,0, 0,2,7],
[0,2,4, 0,0,1, 0,3,8],
[0,8,0, 9,0,0, 6,1,0],
[3,0,5, 0,0,4, 0,0,1],
[0,0,6, 0,0,9, 7,0,0],
[0,7,0, 5,1,0, 4,0,3]
]
print_grid(sudoku_grid)
print('....................................')
copy = sudoku_grid
solve(copy, 9)
print_grid(copy)
if __name__ == "__main__":
main() | 27.102564 | 49 | 0.325449 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 107 | 0.10123 |
9bc9a25d3b23e0b184104f975e4e6a31a9862183 | 2,239 | py | Python | win/ass_commander.py | janakhpon/PersonalAssistant | bacd6743d23d139af1199df12c7bf99d092764b1 | [
"MIT"
] | null | null | null | win/ass_commander.py | janakhpon/PersonalAssistant | bacd6743d23d139af1199df12c7bf99d092764b1 | [
"MIT"
] | null | null | null | win/ass_commander.py | janakhpon/PersonalAssistant | bacd6743d23d139af1199df12c7bf99d092764b1 | [
"MIT"
] | null | null | null | import subprocess
import os
import requests
import pyttsx3
from bs4 import BeautifulSoup
class Commander:
def __init__(self):
self.confirm = ["yes", "ok", "go on", "sure", "do it", "yeah", "yaa", "Imm", "confirm", "of course"]
self.cancel = ["nope", "no", "noo", "not yet", "don't", "do not", "stop", "wait", "hold on", "not now"]
def discover(self, text):
if "what" in text:
if "my name" in text:
self.respond("You haven't told me your name yet")
if "your name" in text:
self.respond(" I am Personal assistant. May i help you ? ")
else:
params = {"q": text}
r = requests.get("https://www.bing.com/search", params=params)
soup = BeautifulSoup(r.text, "html.parser")
results = soup.find_all("div", class_="dc_mn")
for result in results:
print(result.get_text())
if "tell me about" in text:
con = text.split(" ", 3)[-1] # expression in python 1 equals the second word
self.respond("Wait a minute, let me think about " + con)
self.respond("Ok, i got it ")
URL = 'https://en.wikipedia.org/wiki/' + con
content = requests.get(URL)
soup = BeautifulSoup(content.text, 'html.parser')
try:
results = soup.find('div', id='mw-content-text').find('div', class_="mw-parser-output").find_all('p', limit=5)
except:
results = ""
if results == "":
self.respond("Sorry, try asking something else")
else:
for result in results:
self.respond(result.get_text().rstrip())
if "I don't like you" in text:
self.respond("Ok go on, i don't give a fuck!")
if "*** you" in text:
self.respond("So am I, fuck you triple x time")
def respond(self, response):
print(response)
engine = pyttsx3.init()
engine.setProperty('rate', 150) # Speed percent (can go over 100)
engine.setProperty('volume', 0.9) # Volume 0-1
engine.say(response)
engine.runAndWait()
| 38.603448 | 126 | 0.529701 | 2,147 | 0.95891 | 0 | 0 | 0 | 0 | 0 | 0 | 696 | 0.310853 |
9bc9bcdd30f3d72eeeddaa6891ef97194fa18480 | 251 | py | Python | circular.py | beninato8/pokemon-go | 35c24a8fe948ebea7e4471282f9fe23888b91a7c | [
"MIT"
] | null | null | null | circular.py | beninato8/pokemon-go | 35c24a8fe948ebea7e4471282f9fe23888b91a7c | [
"MIT"
] | 4 | 2021-06-02T00:54:46.000Z | 2022-03-12T00:58:36.000Z | circular.py | beninato8/pokemon-go | 35c24a8fe948ebea7e4471282f9fe23888b91a7c | [
"MIT"
] | null | null | null | def rotations(l):
out = []
for i in range(len(l)):
a = shift(l, i)
out += [a]
return out
def shift(l, n):
return l[n:] + l[:n]
if __name__ == '__main__':
l = [0,1,2,3,4]
for x in rotations(l):
print(x) | 17.928571 | 27 | 0.47012 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 | 0.039841 |
9bca087d33ec78af86014248cb1d2a83ed4c9f78 | 423 | py | Python | GetDataFrame.py | Adhmir/mcdm | c1d8bec4f3628f5d95ee7cd3bfdfb9ff54783dce | [
"MIT"
] | null | null | null | GetDataFrame.py | Adhmir/mcdm | c1d8bec4f3628f5d95ee7cd3bfdfb9ff54783dce | [
"MIT"
] | null | null | null | GetDataFrame.py | Adhmir/mcdm | c1d8bec4f3628f5d95ee7cd3bfdfb9ff54783dce | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 28 17:41:18 2021
@author: Adhmir Renan Voltoni Gomes
"""
import pandas as pd
import MCDM_V_001 as mcdm
def pegar_dados():
file_path = mcdm.label_file["text"]
excel_filename = r"{}".format(file_path)
if excel_filename[-4:] == ".csv":
df = pd.read_csv(excel_filename)
else:
df = pd.read_excel(excel_filename)
return df
| 23.5 | 45 | 0.617021 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 125 | 0.295508 |
9bca5af1c759f195927e0d826898e9abc033b944 | 742 | py | Python | Individual task.py | IsSveshuD/lab_2_11 | db922488c813610b42272bd93b1975956a8d986f | [
"MIT"
] | null | null | null | Individual task.py | IsSveshuD/lab_2_11 | db922488c813610b42272bd93b1975956a8d986f | [
"MIT"
] | null | null | null | Individual task.py | IsSveshuD/lab_2_11 | db922488c813610b42272bd93b1975956a8d986f | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# _*_ coding: utf-8 -*-
# Используя замыкания функций, объявите внутреннюю функцию,
# которая принимает в качестве аргумента коллекцию (список
# или кортеж) и возвращает или минимальное значение, или
# максимальное, в зависимости от значения параметра type внешней
# функции. Если type равен «max», то возвращается максимальное
# значение, иначе – минимальное. По умолчанию type должно
# принимать значение «max». Вызовите внутреннюю функцию
# замыкания и отобразите на экране результат ее работы.
def fun1(type_='max'):
def fun2(lst):
return eval(f'{type_}(lst)')
print(type_)
return fun2
a = [1, 2, 34, 54, 36, 7, 8]
max_fun = fun1()
min_fun = fun1('min')
print(max_fun(a))
print(min_fun(a))
| 28.538462 | 64 | 0.722372 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 906 | 0.814748 |
9bcb27a7d1f03b098af4f51756f0b10851a103ba | 2,561 | py | Python | cmsplugin_remote_form/migrations/0005_auto_20200429_1442.py | georgmzimmer/cmsplugin-remote-form | 804e67ccc18964223247b39cc6e359f977a16556 | [
"BSD-3-Clause"
] | null | null | null | cmsplugin_remote_form/migrations/0005_auto_20200429_1442.py | georgmzimmer/cmsplugin-remote-form | 804e67ccc18964223247b39cc6e359f977a16556 | [
"BSD-3-Clause"
] | 1 | 2020-02-13T17:30:13.000Z | 2020-02-13T17:30:13.000Z | cmsplugin_remote_form/migrations/0005_auto_20200429_1442.py | georgmzimmer/cmsplugin-remote-form | 804e67ccc18964223247b39cc6e359f977a16556 | [
"BSD-3-Clause"
] | 4 | 2020-01-16T03:52:18.000Z | 2020-04-29T19:35:16.000Z | # Generated by Django 2.2.12 on 2020-04-29 18:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cmsplugin_remote_form', '0004_remoteform_notification_emails'),
]
operations = [
migrations.AlterField(
model_name='extrafield',
name='fieldType',
field=models.CharField(choices=[('CharField', 'CharField'), ('BooleanField', 'BooleanField'), ('EmailField', 'EmailField'), ('DecimalField', 'DecimalField'), ('FloatField', 'FloatField'), ('IntegerField', 'IntegerField'), ('FileField', 'FileField'), ('ImageField', 'ImageField'), ('USStateSelect', 'US State Selector'), ('IPAddressField', 'IPAddressField'), ('MathCaptcha', 'Math Captcha'), ('auto_Textarea', 'CharField as Textarea'), ('auto_hidden_input', 'CharField as HiddenInput'), ('auto_referral_page', 'Referral page as HiddenInput'), ('auto_GET_parameter', 'GET parameter as HiddenInput'), ('CharFieldWithValidator', 'CharFieldWithValidator'), ('ChoiceField', 'ChoiceField'), ('ReCaptcha', 'reCAPTCHA')], max_length=100),
),
migrations.AlterField(
model_name='extrafield',
name='initial',
field=models.CharField(blank=True, max_length=4096, null=True),
),
migrations.AlterField(
model_name='extrafield',
name='name',
field=models.CharField(default='', max_length=100, verbose_name='Name'),
),
migrations.AlterField(
model_name='remoteform',
name='error_notification_emails',
field=models.CharField(blank=True, help_text='multiple emails separated by commas', max_length=250, null=True, verbose_name='Email Errors To:'),
),
migrations.AlterField(
model_name='remoteform',
name='on_submit',
field=models.CharField(blank=True, help_text='Google Analytics Code', max_length=400, null=True),
),
migrations.AlterField(
model_name='remoteform',
name='post_url',
field=models.CharField(default='#remoteURL', max_length=200, null=True, verbose_name='Remote URL'),
),
migrations.AlterField(
model_name='remoteform',
name='template',
field=models.CharField(choices=[('cmsplugin_remote_form_templates/default.html', 'Default'), ('cmsplugin_remote_form_templates/vertical_onecol.html', 'Vertical - One Col')], default='cmsplugin_remote_form/default.html', max_length=255),
),
]
| 52.265306 | 741 | 0.646232 | 2,467 | 0.963296 | 0 | 0 | 0 | 0 | 0 | 0 | 1,123 | 0.438501 |
9bcb2e6ea72e15c7c10a699c8ebe55ab4cd553e3 | 17,348 | py | Python | model.py | bfMendonca/CarND-Behavioral-Cloning-P3 | 564b8e0c542292acdc6daf3829522cdcd98a1c95 | [
"MIT"
] | null | null | null | model.py | bfMendonca/CarND-Behavioral-Cloning-P3 | 564b8e0c542292acdc6daf3829522cdcd98a1c95 | [
"MIT"
] | null | null | null | model.py | bfMendonca/CarND-Behavioral-Cloning-P3 | 564b8e0c542292acdc6daf3829522cdcd98a1c95 | [
"MIT"
] | null | null | null | import csv
import cv2
import numpy as np
import pandas as pd
import sys
from datetime import datetime
from numpy.random import RandomState
import keras
import tensorflow as tf
from keras.models import Sequential
from keras.callbacks import ModelCheckpoint
from keras.layers import Flatten, Dense, Lambda, Cropping2D, Conv2D, Dropout, MaxPool2D
def DrivingNetV1():
model = Sequential()
model.add( Cropping2D( cropping=( (90,20), (0,0) ), input_shape=( 160, 320, 3 ) ) )
model.add( Lambda( lambda x: (x/255.0) - 0.5 ) )
model.add( Flatten( ) )
model.add( Dense(1) )
return model
def NVIDIANetV0( lr=1e-3):
model = Sequential( name="NVIDIANetV0" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
#model.add( Dense(1164, activation='relu' ) )
#model.add( Dropout(0.2))
model.add( Dense(100, activation='linear' ) )
model.add( Dense(50, activation='linear' ) )
model.add( Dense(10, activation='linear' ) )
model.add( Dense(1, activation='linear') )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def NVIDIANetV1( lr=1e-3):
model = Sequential( name="NVIDIANetV1" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
#model.add( Dense(1164, activation='relu' ) )
#model.add( Dropout(0.2))
model.add( Dense(100, activation='tanh' ) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def NVIDIANetV2( lr=1e-3):
model = Sequential( name="NVIDIANetV2" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
model.add( Dense(100, activation='linear' ) )
model.add( Dense(50, activation='linear' ) )
model.add( Dense(10, activation='linear' ) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def NVIDIANetV3( lr=1e-3):
model = Sequential( name="NVIDIANetV3" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
model.add( Dense(100, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def NVIDIANetV4( lr=1e-3):
model = Sequential( name="NVIDIANetV4" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
#model.add( Dense(1164, activation='relu' ) )
#model.add( Dropout(0.2))
model.add( Dense(100, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dropout(0.25) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dropout(0.125) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def NVIDIANetV5( lr=1e-3):
model = Sequential( name="NVIDIANetV5" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
model.add( Dense(100, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dropout(0.25) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def NVIDIANetV6( lr=1e-3):
model = Sequential( name="NVIDIANetV6" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
model.add( Conv2D( 24, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 36, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 48, 5, 2, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Conv2D( 64, 3, activation='relu', padding='valid' ) )
model.add( Flatten( ) )
model.add( Dropout(0.5) )
model.add( Dense(100, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dropout(0.25) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def ModNVIDIANetV1( lr=1e-3):
model = Sequential( name = "ModNVIDIANetV1" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
#Keeping padding as "same" and applygin a max
model.add( Conv2D( 24, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 36, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 48, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 64, 3, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 64, 3, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( pool_size=(4, 2) ) ) #forcing to this output to become an "flat"
model.add( Flatten( ) )
#model.add( Dense(1164, activation='relu' ) )
#model.add( Dropout(0.2))
model.add( Dense(300, activation='tanh' ) )
model.add( Dense(100, activation='tanh' ) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dense(1, activation='linear' ) )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def ModNVIDIANetV2( lr=1e-3):
model = Sequential( name = "ModNVIDIANetV2" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
#Keeping padding as "same" and applygin a max
model.add( Conv2D( 24, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 36, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 48, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 64, 3, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 64, 3, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( pool_size=(4, 2) ) ) #forcing to this output to become an "flat"
model.add( Flatten( ) )
model.add( Dense(300, activation='linear' ) )
model.add( Dense(100, activation='linear' ) )
model.add( Dense(50, activation='linear' ) )
model.add( Dense(10, activation='linear' ) )
model.add( Dense(1, activation='linear' ) )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
def ModNVIDIANetV3( lr=1e-3):
model = Sequential( name = "ModNVIDIANetV3" )
model.add( Lambda( lambda x: (x/255.0) - 0.5, input_shape=( 160, 320, 3 ) ) )
model.add( Cropping2D( cropping=( (70,25), (0,0) ) ) )
#Keeping padding as "same" and applygin a max
model.add( Conv2D( 24, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 36, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 48, 5, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 64, 3, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( ) )
model.add( Conv2D( 64, 3, 1, activation='relu', padding='same' ) )
model.add( MaxPool2D( pool_size=(4, 2) ) ) #forcing to this output to become an "flat"
model.add( Flatten( ) )
model.add( Dense(100, activation='tanh' ) )
model.add( Dropout(0.5) )
model.add( Dense(50, activation='tanh' ) )
model.add( Dropout(0.25) )
model.add( Dense(10, activation='tanh' ) )
model.add( Dense(1, activation='linear') )
#Converting curvature to angle, assuming wheelbase of 2 meters, then going from rad to deg
#The network output was supposed to be 1/R (r), the function then convert it to steering angle (alpha) [deg]
# alpha = atan(l*r)*57.3. l = wheelbase, supossed to be 2 meters
model.add( Lambda( lambda x: tf.multiply( tf.atan( tf.multiply( x, 2 ) ), 57.3 ) ) )
opt = keras.optimizers.Adam(learning_rate=lr )
model.compile( loss='mse', optimizer=opt )
return model
#Hyper parameters
BATCH_SIZE=64
LEARNING_RATE=1e-4
EPOCHS=5
model_name = sys.argv[1]
model = Sequential()
if( model_name == 'NVIDIANetV0'):
model = NVIDIANetV0( LEARNING_RATE )
elif( model_name == 'NVIDIANetV1'):
model = NVIDIANetV1( LEARNING_RATE )
elif( model_name == 'NVIDIANetV2' ):
model = NVIDIANetV2( LEARNING_RATE )
elif( model_name == 'NVIDIANetV3' ):
model = NVIDIANetV3( LEARNING_RATE )
elif( model_name == 'NVIDIANetV4' ):
model = NVIDIANetV4( LEARNING_RATE )
elif( model_name == 'NVIDIANetV5' ):
model = NVIDIANetV5( LEARNING_RATE )
elif( model_name == 'NVIDIANetV6' ):
model = NVIDIANetV6( LEARNING_RATE )
elif( model_name == 'ModNVIDIANetV1' ):
model = ModNVIDIANetV1( LEARNING_RATE )
elif( model_name == 'ModNVIDIANetV2' ):
model = ModNVIDIANetV2( LEARNING_RATE )
elif( model_name == 'ModNVIDIANetV3' ):
model = ModNVIDIANetV3( LEARNING_RATE )
else:
raise Exception('Invalid model name')
#Load data. Split data into train and validation
df = pd.read_csv('data/driving_log.csv', names=['center', 'left', 'right', 'measurement', '1', '2', '3'])
rng = RandomState()
train = df.sample( frac=0.7, random_state=rng )
valid = df.loc[~df.index.isin(train.index) ]
NUM_TRAIN_IMAGES = train.shape[0]
NUM_TEST_IMAGES = valid.shape[0]
#Deffining the generator
def load_data( df, batch_size, augument=False ):
i = 0
while True:
images = []
measurements = []
while len(images) < batch_size:
image_path = df.iloc[i,:]['center'].split('/')[-1]
current_path = './data/IMG/' + image_path
measurement = float( df.iloc[i,:]['measurement'] )
image = cv2.imread( current_path )
measurements.append( measurement )
images.append( image )
if( augument ):
flipped_image = cv2.flip( image, 1 )
images.append( flipped_image )
measurements.append( -1.0*measurement )
# image_path = df.iloc[i,:]['left'].split('/')[-1]
# current_path = './data/IMG/' + image_path
# measurement = float( +0.9 )
# image = cv2.imread( current_path )
# measurements.append( measurement )
# images.append( image )
# image_path = df.iloc[i,:]['right'].split('/')[-1]
# current_path = './data/IMG/' + image_path
# measurement = float( -0.9 )
# image = cv2.imread( current_path )
# measurements.append( measurement )
# images.append( image )
i += 1
if( i == df.shape[0] ):
i =0
yield ( np.array( images ), np.array( measurements ) )
#Define the generators
trainGen = load_data( train, BATCH_SIZE, True)
validGen = load_data( valid, BATCH_SIZE )
NUM_TRAIN_IMAGES = 2*NUM_TRAIN_IMAGES
NUM_TEST_IMAGES = NUM_TEST_IMAGES
print(model.summary())
#Using tensorboard
logdir = "logs/scalars/" + model.name
#defiining tensorboard callback
tensorboard_callback = keras.callbacks.TensorBoard(log_dir=logdir)
model.fit(
x=trainGen,
steps_per_epoch=NUM_TRAIN_IMAGES//BATCH_SIZE,
verbose=1,
validation_data=validGen,
validation_steps=NUM_TEST_IMAGES//BATCH_SIZE, epochs=EPOCHS,
callbacks=[tensorboard_callback] )
model.save( model.name + '.h5')
| 37.468683 | 112 | 0.654427 | 0 | 0 | 1,141 | 0.065771 | 0 | 0 | 0 | 0 | 4,898 | 0.282338 |
9bcf66aa506518f09109e3c3f1caf8194c4a6b3a | 1,418 | py | Python | tools/debug_discovery.py | s1rd4v3/homebridge-tuya-web-es6-js | 4d02d05117f88e4a4251158716ef0677a2af92db | [
"MIT"
] | 172 | 2020-05-17T10:51:17.000Z | 2022-03-21T08:54:00.000Z | tools/debug_discovery.py | s1rd4v3/homebridge-tuya-web-es6-js | 4d02d05117f88e4a4251158716ef0677a2af92db | [
"MIT"
] | 303 | 2020-05-17T20:42:57.000Z | 2022-03-30T07:37:32.000Z | tools/debug_discovery.py | s1rd4v3/homebridge-tuya-web-es6-js | 4d02d05117f88e4a4251158716ef0677a2af92db | [
"MIT"
] | 85 | 2020-05-02T13:24:22.000Z | 2022-03-23T15:48:04.000Z | # The script is intended to get a list of all devices available via Tuya Home Assistant API endpoint.
import requests
import pprint
# CHANGE THIS - BEGINNING
USERNAME = ""
PASSWORD = ""
REGION = "eu" # cn, eu, us
COUNTRY_CODE = "1" # Your account country code, e.g., 1 for USA or 86 for China
BIZ_TYPE = "smart_life" # tuya, smart_life, jinvoo_smart
FROM = "tuya" # you likely don't need to touch this
# CHANGE THIS - END
# NO NEED TO CHANGE ANYTHING BELOW
TUYACLOUDURL = "https://px1.tuya{}.com"
pp = pprint.PrettyPrinter(indent=4)
print("Getting credentials")
auth_response = requests.post(
(TUYACLOUDURL + "/homeassistant/auth.do").format(REGION),
data={
"userName": USERNAME,
"password": PASSWORD,
"countryCode": COUNTRY_CODE,
"bizType": BIZ_TYPE,
"from": FROM,
},
)
print("Got credentials")
auth_response = auth_response.json()
pp.pprint(auth_response)
header = {"name": "Discovery", "namespace": "discovery", "payloadVersion": 1}
payload = {"accessToken": auth_response["access_token"]}
data = {"header": header, "payload": payload}
print("Getting devices")
discovery_response = requests.post(
(TUYACLOUDURL + "/homeassistant/skill").format(REGION), json=data
)
print("Got devices")
discovery_response = discovery_response.json()
pp.pprint(discovery_response)
print("!!! NOW REMOVE THIS FILE, SO YOUR CREDENTIALS (username, password) WON'T LEAK !!!")
| 31.511111 | 101 | 0.703808 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 717 | 0.505642 |
9bd1df98ec0a7f71b2cc5017976bf6fbcc6f9846 | 3,562 | py | Python | yamtbx/dataproc/adxv.py | 7l2icj/kamo_clone | 5f4a5eed3cd9d91a021d805e46125c19cc2ed1b6 | [
"BSD-3-Clause"
] | 16 | 2016-05-20T11:19:40.000Z | 2021-01-01T19:44:23.000Z | yamtbx/dataproc/adxv.py | 7l2icj/kamo_clone | 5f4a5eed3cd9d91a021d805e46125c19cc2ed1b6 | [
"BSD-3-Clause"
] | 4 | 2017-03-10T00:51:11.000Z | 2021-02-07T17:18:46.000Z | yamtbx/dataproc/adxv.py | 7l2icj/kamo_clone | 5f4a5eed3cd9d91a021d805e46125c19cc2ed1b6 | [
"BSD-3-Clause"
] | 9 | 2016-12-15T16:00:06.000Z | 2021-09-10T08:34:14.000Z | """
(c) RIKEN 2015. All rights reserved.
Author: Keitaro Yamashita
This software is released under the new BSD License; see LICENSE.
"""
import socket
import subprocess
import time
import os
import getpass
import tempfile
class Adxv:
def __init__(self, adxv_bin=None, no_adxv_beam_center=True):
self.adxv_bin = adxv_bin
self.no_adxv_beam_center = no_adxv_beam_center
if self.adxv_bin is None: self.adxv_bin = "adxv"
self.adxv_proc = None # subprocess object
self.adxv_port = 8100 # adxv's default port. overridden later.
self.sock = None
self.spot_type_counter = -1
# __init__()
def start(self, cwd=None):
adxv_comm = self.adxv_bin + " -socket %d"
if self.no_adxv_beam_center: adxv_comm += " -no_adxv_beam_center"
if not self.is_alive():
# find available port number
sock_test = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_test.bind(("localhost", 0))
self.adxv_port = sock_test.getsockname()[1]
sock_test.close()
# start adxv
self.adxv_proc = subprocess.Popen(adxv_comm%self.adxv_port, shell=True, cwd=cwd)
for i in xrange(10): # try for 5 seconds.
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # On OSX(?), need to re-create object when failed
self.sock.connect(("localhost", self.adxv_port))
break
except socket.error:
time.sleep(.5)
continue
# start()
def is_alive(self):
return self.adxv_proc is not None and self.adxv_proc.poll() is None # None means still running.
# is_alive()
def open_image(self, imgfile, raise_window=True):
self.start(cwd=os.path.dirname(imgfile))
sent = self.sock.send("load_image %s\n"%imgfile)
if sent == 0:
raise RuntimeError("adxv load failed! Close adxv and double-click again.")
if raise_window:
sent = self.sock.send("raise_window Control\n") # raise_window is available from adxv 1.9.9
sent = self.sock.send("raise_window Image\n")
#sock.close()
# open_image()
def open_hdf5(self, h5file, frameno_or_path, tmpdir=None, raise_window=True, binning=1):
from yamtbx.dataproc import eiger
if tmpdir is None:
tmpdir = "/dev/shm" if os.path.isdir("/dev/shm") else tempfile.gettempdir()
imgfile = os.path.join(tmpdir, "adxvtmp-%s-%s.cbf"%(getpass.getuser(), os.getpid()))
eiger.extract_to_minicbf(h5file, frameno_or_path, imgfile, binning=binning)
self.open_image(imgfile, raise_window=raise_window)
# open_hdf5()
def define_spot(self, color, radius=0, box=0):
self.spot_type_counter += 1
sent = self.sock.send("box %d %d\n" % (box,box)) # seems ignored?
sent = self.sock.send("define_type %d color %s radius %d\n"%(self.spot_type_counter, color, radius))
print sent
if sent == 0:
print "define_spot failed!"
sent = self.sock.send("box 20 20\n")
return self.spot_type_counter
# define_spot()
def load_spots(self, spots):
if len(spots) == 0:
return
sent = self.sock.send("load_spots %d\n" % len(spots))
for x, y, t in spots:
sent = self.sock.send("%.2f %.2f %d\n" % (x, y, t))
sent = self.sock.send("end_of_pack\n")
# load_spots()
# class Adxv
| 33.603774 | 131 | 0.609208 | 3,322 | 0.932622 | 0 | 0 | 0 | 0 | 0 | 0 | 858 | 0.240876 |
9bd2380bf4dfb9713249af00b966084de05b5ade | 4,661 | py | Python | model/net.py | JiazeWang/Luna16 | 5ef7f4b539cc1ca72291e93d17cc18f408a3119d | [
"MIT"
] | 47 | 2020-08-04T19:06:15.000Z | 2022-03-17T08:46:48.000Z | model/net.py | JiazeWang/Luna16 | 5ef7f4b539cc1ca72291e93d17cc18f408a3119d | [
"MIT"
] | 10 | 2020-08-06T13:07:58.000Z | 2021-09-08T02:22:48.000Z | model/net.py | JiazeWang/Luna16 | 5ef7f4b539cc1ca72291e93d17cc18f408a3119d | [
"MIT"
] | 22 | 2020-08-06T13:21:58.000Z | 2022-03-16T04:14:04.000Z | import torch
from torch import nn
from configs import ANCHOR_SIZES
class PostRes(nn.Module):
def __init__(self, n_in, n_out, stride=1):
super(PostRes, self).__init__()
self.conv1 = nn.Conv3d(n_in, n_out, kernel_size=3, stride=stride, padding=1)
self.bn1 = nn.BatchNorm3d(n_out)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv3d(n_out, n_out, kernel_size=3, padding=1)
self.bn2 = nn.BatchNorm3d(n_out)
if stride != 1 or n_out != n_in:
self.shortcut = nn.Sequential(
nn.Conv3d(n_in, n_out, kernel_size=1, stride=stride),
nn.BatchNorm3d(n_out))
else:
self.shortcut = None
def forward(self, x):
residual = x
if self.shortcut is not None:
residual = self.shortcut(x)
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out += residual
out = self.relu(out)
return out
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.preBlock = nn.Sequential(
nn.Conv3d(1, 24, kernel_size=3, padding=1),
nn.BatchNorm3d(24),
nn.ReLU(inplace=True),
nn.Conv3d(24, 24, kernel_size=3, padding=1),
nn.BatchNorm3d(24),
nn.ReLU(inplace=True))
num_blocks_forw = [2, 2, 3, 3]
num_blocks_back = [3, 3]
self.featureNum_forw = [24, 32, 64, 64, 64]
self.featureNum_back = [128, 64, 64]
for i in range(len(num_blocks_forw)):
blocks = []
for j in range(num_blocks_forw[i]):
if j == 0:
blocks.append(PostRes(self.featureNum_forw[i], self.featureNum_forw[i + 1]))
else:
blocks.append(PostRes(self.featureNum_forw[i + 1], self.featureNum_forw[i + 1]))
setattr(self, 'forw' + str(i + 1), nn.Sequential(*blocks))
for i in range(len(num_blocks_back)):
blocks = []
for j in range(num_blocks_back[i]):
if j == 0:
if i == 0:
addition = 3
else:
addition = 0
blocks.append(PostRes(self.featureNum_back[i + 1] + self.featureNum_forw[i + 2] + addition,
self.featureNum_back[i]))
else:
blocks.append(PostRes(self.featureNum_back[i], self.featureNum_back[i]))
setattr(self, 'back' + str(i + 2), nn.Sequential(*blocks))
self.maxpool1 = nn.MaxPool3d(kernel_size=2, stride=2, return_indices=True)
self.maxpool2 = nn.MaxPool3d(kernel_size=2, stride=2, return_indices=True)
self.maxpool3 = nn.MaxPool3d(kernel_size=2, stride=2, return_indices=True)
self.maxpool4 = nn.MaxPool3d(kernel_size=2, stride=2, return_indices=True)
self.unmaxpool1 = nn.MaxUnpool3d(kernel_size=2, stride=2)
self.unmaxpool2 = nn.MaxUnpool3d(kernel_size=2, stride=2)
self.path1 = nn.Sequential(
nn.ConvTranspose3d(64, 64, kernel_size=2, stride=2),
nn.BatchNorm3d(64),
nn.ReLU(inplace=True))
self.path2 = nn.Sequential(
nn.ConvTranspose3d(64, 64, kernel_size=2, stride=2),
nn.BatchNorm3d(64),
nn.ReLU(inplace=True))
self.drop = nn.Dropout3d(p=0.5, inplace=False)
self.output = nn.Sequential(nn.Conv3d(self.featureNum_back[0], 64, kernel_size=1),
nn.ReLU(),
nn.Conv3d(64, 5 * len(ANCHOR_SIZES), kernel_size=1))
def forward(self, x, coord):
out = self.preBlock(x) # 16
out_pool, indices0 = self.maxpool1(out)
out1 = self.forw1(out_pool) # 32
out1_pool, indices1 = self.maxpool2(out1)
out2 = self.forw2(out1_pool) # 64
out2_pool, indices2 = self.maxpool3(out2)
out3 = self.forw3(out2_pool) # 96
out3_pool, indices3 = self.maxpool4(out3)
out4 = self.forw4(out3_pool) # 96
rev3 = self.path1(out4)
comb3 = self.back3(torch.cat((rev3, out3), 1)) # 96+96
rev2 = self.path2(comb3)
comb2 = self.back2(torch.cat((rev2, out2, coord), 1)) # 64+64
comb2 = self.drop(comb2)
out = self.output(comb2)
size = out.size()
out = out.view(out.size(0), out.size(1), -1)
out = out.transpose(1, 2).contiguous().view(size[0], size[2], size[3], size[4], len(ANCHOR_SIZES), 5)
return out
| 40.885965 | 111 | 0.556104 | 4,588 | 0.984338 | 0 | 0 | 0 | 0 | 0 | 0 | 46 | 0.009869 |
9bd2b6d1890b4393a8dfbeef61de65778cef6198 | 5,743 | py | Python | multiWindowTest.py | LukasHegenbarth/lableImg | 82d96193889cd41d25cc38b3425172a47034baf9 | [
"MIT"
] | 2 | 2021-01-31T16:24:17.000Z | 2021-11-06T14:52:34.000Z | multiWindowTest.py | LukasHegenbarth/lableImg | 82d96193889cd41d25cc38b3425172a47034baf9 | [
"MIT"
] | null | null | null | multiWindowTest.py | LukasHegenbarth/lableImg | 82d96193889cd41d25cc38b3425172a47034baf9 | [
"MIT"
] | 1 | 2021-07-09T06:47:03.000Z | 2021-07-09T06:47:03.000Z | import codecs
import distutils.spawn
import os.path
import platform
import re
import subprocess
import sys
from collections import defaultdict
from functools import partial
try:
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWidgets import *
from PyQt5.QtWebEngineWidgets import QWebEngineView
except ImportError:
# needed for py3+qt4
# Ref:
# http://pyqt.sourceforge.net/Docs/PyQt4/incompatible_apis.html
# http://stackoverflow.com/questions/21217399/pyqt4-qtcore-qvariant-object-instead-of-a-string
if sys.version_info.major >= 3:
import sip
sip.setapi('QVariant', 2)
from PyQt4.QtGui import *
from PyQt4.QtCore import *
class stackedWindow(QWidget):
def __init__(self):
super(stackedWindow, self).__init__()
self.button1 = QPushButton('Data \n Labeling')
self.button1.setStyleSheet("background-color: #0cdd8c")
self.button2 = QPushButton('Neural Network \n Training')
self.button2.setStyleSheet("background-color: #eeeeee")
self.button3 = QPushButton('Third Page \n')
self.button3.setStyleSheet("background-color: #eeeeee")
self.stack1 = QWidget()
self.stack2 = QWidget()
self.stack3 = QWidget()
self.stack1UI()
self.stack2UI()
self.stack3UI()
self.Stack = QStackedWidget(self)
self.Stack.addWidget(self.stack1)
self.Stack.addWidget(self.stack2)
self.Stack.addWidget(self.stack3)
self.mainVBox = QVBoxLayout(self)
self.button_layout = QHBoxLayout(self)
self.button_layout.addWidget(self.button1)
self.button_layout.addWidget(self.button2)
self.button_layout.addWidget(self.button3)
self.button_layout.setAlignment(Qt.AlignLeft)
self.window_layout = QVBoxLayout(self)
self.window_layout.addWidget(self.Stack)
self.mainVBox.addLayout(self.button_layout)
self.mainVBox.addLayout(self.window_layout)
self.setLayout(self.mainVBox)
self.button1.clicked.connect(self.button1_fcn)
self.button2.clicked.connect(self.button2_fcn)
self.button3.clicked.connect(self.button3_fcn)
self.setGeometry(300, 50, 10, 10)
# self.setWindowTitle('Training Data Pipeline')
def stack1UI(self):
layout = QFormLayout()
label = QLabel('labelImg app will be included here')
layout.addWidget(label)
self.stack1.setLayout(layout)
def stack2UI(self):
hbox = QHBoxLayout(self)
#TODO add param list for training config
paramVBox = QVBoxLayout(self)
#add all necessary param fields
self.addParamLine(paramVBox, 'batch size')
self.addParamLine(paramVBox, 'learning rate')
self.addParamLine(paramVBox, 'total steps')
self.addParamLine(paramVBox, 'warmup steps')
self.addParamLine(paramVBox, 'warmup learning rate')
self.trainingButton = QPushButton('start Training')
self.trainingStatus = QLabel('Training Status')
self.trainingStatus.setMaximumWidth(250)
self.trainingStatus.setStyleSheet("background-color: transparent")
paramVBox.addWidget(self.trainingButton)
paramVBox.addWidget(self.trainingStatus)
paramVBox.setAlignment(Qt.AlignTop)
self.webEngineView = QWebEngineView()
self.loadPage()
hbox.addLayout(paramVBox)
hbox.addWidget(self.webEngineView)
self.stack2.setLayout(hbox)
def stack3UI(self):
layout = QFormLayout()
label = QLabel('This page shows nothing yet')
layout.addWidget(label)
self.stack3.setLayout(layout)
def addParamLine(self, layout, param):
paramBox = QHBoxLayout(self)
paramName = QLabel(param)
paramName.setMaximumWidth(150)
paramName.setStyleSheet("background-color: transparent")
paramBox.addWidget(paramName)
lineEdit = QLineEdit()
lineEdit.setMaximumWidth(200)
lineEdit.setStyleSheet("background-color: #ffffff")
paramBox.addWidget(lineEdit)
layout.addLayout(paramBox)
def loadPage(self):
self.webEngineView.load(QUrl('http://localhost:6006/'))
def button1_fcn(self):
self.Stack.setCurrentIndex(0)
self.button1.setStyleSheet("background-color: #0cdd8c")
self.button2.setStyleSheet("background-color: #eeeeee")
self.button3.setStyleSheet("background-color: #eeeeee")
def button2_fcn(self):
self.Stack.setCurrentIndex(1)
self.button1.setStyleSheet("background-color: #eeeeee")
self.button2.setStyleSheet("background-color: #0cdd8c")
self.button3.setStyleSheet("background-color: #eeeeee")
def button3_fcn(self):
self.Stack.setCurrentIndex(2)
self.button1.setStyleSheet("background-color: #eeeeee")
self.button2.setStyleSheet("background-color: #eeeeee")
self.button3.setStyleSheet("background-color: #0cdd8c")
class MainWindow(QMainWindow):
def __init__(self, *args, **kwargs):
super(MainWindow, self).__init__(*args, **kwargs)
self.setWindowTitle("Data Pipeline")
stacked_window = stackedWindow()
self.setCentralWidget(stacked_window)
def main():
app = QApplication(sys.argv)
# stacked_window = stackedWindow()
# # stacked_window.setStyleSheet("background-color: #cfd8dc")
# stacked_window.setStyleSheet("background-color: qlineargradient(x1: 0, y1: 0, x2: 1, y2: 1,"
# "stop: 0 white, stop: 1 grey);")
# stacked_window.show()
window = MainWindow()
window.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| 35.233129 | 99 | 0.673516 | 4,577 | 0.79697 | 0 | 0 | 0 | 0 | 0 | 0 | 1,286 | 0.223925 |
9bd2ef61ca9a089dc9825ad4627fb4388dc0d5d8 | 571 | py | Python | api/accounts/urls.py | paurushofficial/InvitationApi | 35a33fab52bf9d2269b804d8b5e9958b41f7506c | [
"Apache-2.0"
] | null | null | null | api/accounts/urls.py | paurushofficial/InvitationApi | 35a33fab52bf9d2269b804d8b5e9958b41f7506c | [
"Apache-2.0"
] | null | null | null | api/accounts/urls.py | paurushofficial/InvitationApi | 35a33fab52bf9d2269b804d8b5e9958b41f7506c | [
"Apache-2.0"
] | null | null | null | from django.urls import path
from rest_framework_simplejwt.views import (
TokenObtainPairView,
TokenRefreshView,
TokenVerifyView,
)
from . views import *
urlpatterns = [
path('register/', UserRegisterView.as_view()),
path('logout/', LogoutView.as_view()),
path('token/', TokenObtainPairView.as_view(), name='token_obtain_pair'),
path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'),
path('token/verify/', TokenVerifyView.as_view(), name='token_verify'),
path('change_password/', ChangePasswordView.as_view()),
]
| 30.052632 | 77 | 0.718039 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 125 | 0.218914 |
9bd32e9affe9d4798bdc0f0e2c528ef20a36adfd | 3,450 | py | Python | PhyTestOnline/PhyTestOnline.py | JerryLife/PhyTestOnline | c3ca3ec396195e587b7409b492e3848daffca6fd | [
"MIT"
] | 1 | 2016-12-12T06:09:29.000Z | 2016-12-12T06:09:29.000Z | PhyTestOnline/PhyTestOnline.py | JerryLife/PhyTestOnline | c3ca3ec396195e587b7409b492e3848daffca6fd | [
"MIT"
] | null | null | null | PhyTestOnline/PhyTestOnline.py | JerryLife/PhyTestOnline | c3ca3ec396195e587b7409b492e3848daffca6fd | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2016 Jerry Life
import urllib
import urllib2
import re
import xlwt
START_PAGE = 0 # start page
ALL_PAGE = 82 # number of all pages
class PhyTestOnline(object):
"""
This class is specially for a HUST Physics Test Online providing a crawler to download
the Keys and to save them as Excel(.xls). For convenience, you can just use PhyTestOnline.main()
to finish the whole procedure.
Attention: This is a simple practice in crawler, which is only for study and communication.
It should never be used for illegal or improper ways like cheating. If so, the one who did it is
responsible for his own behavior instead of the author.
"""
def __init__(self, baseURL="http://115.156.215.236/admin/menu/query/queryandchoose/xianshi?paperNum=0"):
self.baseURL = baseURL
def getFirstPage(self, url=None):
if not url:
url = self.baseURL
try:
user_agent = 'Mozilla/5.0 (Windows NT 10.0; WOW64) ' \
'AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.99 Safari/537.36'
headers = {'User-Agent': user_agent}
data = urllib.urlencode({})
request = urllib2.Request(url, data, headers)
response = urllib2.urlopen(request, timeout=10)
content = response.read()
return content.decode('GBK')
except urllib2.URLError, e:
if hasattr(e, "reason"):
print "Fail to connect to PhysicsTestOnline:", e.reason
return None
def getText(self, url=None):
textModel = re.compile('<td width="146">([0-9]+\.jpg)')
ansModel = re.compile('<td width="41">(.*)</td>')
html = self.getFirstPage(url)
if not html:
return None
text = re.findall(textModel, html)
ans = re.findall(ansModel, html)
if len(text) == len(ans):
print "%d Got" % len(ans)
return zip(text, ans)
else:
print "Answer or picture lost!"
return None
def getAll(self, allPage=ALL_PAGE):
startPage = self.baseURL[0:-1]
ansList = []
for i in range(START_PAGE, allPage+1):
url = startPage + str(i)
ans = self.getText(url)
if not ans:
pass
else:
print "Page%d finished.%d%%" % (i, i*100/allPage)
ansList += ans
print "Program complete."
return ansList
def saveAns(self, ansList, fileName='D:\TestAnswer.xls'):
ans = xlwt.Workbook()
sheet = ans.add_sheet('Sheet1')
numOfProblems = len(ansList)
for i in range(numOfProblems):
sheet.write(i, 0, ansList[i][0])
sheet.write(i, 1, ansList[i][1])
print "Line %d saved.%d%% Finished" % (i+1, (i+1)*100/numOfProblems)
# need protect?
ans.protect = True
ans.wnd_protect = True
ans.obj_protect = True
ans.save(fileName)
print "All saved."
return None
def main(self):
ansList = self.getAll()
iSave = raw_input('Save now? y/n: ')
if iSave == 'y':
self.saveAns(ansList)
else:
return None
return True
ans = PhyTestOnline()
ans.main() | 34.5 | 109 | 0.553623 | 3,223 | 0.934203 | 0 | 0 | 0 | 0 | 0 | 0 | 1,088 | 0.315362 |
9bd4563be99dc04513758c49ad620fb919be44b1 | 7,554 | py | Python | examples/pytorch/DROCC/main_tabular.py | KimSangYeon-DGU/EdgeML | a1021dfa1238b764e33476318d299206f20599e2 | [
"MIT"
] | null | null | null | examples/pytorch/DROCC/main_tabular.py | KimSangYeon-DGU/EdgeML | a1021dfa1238b764e33476318d299206f20599e2 | [
"MIT"
] | null | null | null | examples/pytorch/DROCC/main_tabular.py | KimSangYeon-DGU/EdgeML | a1021dfa1238b764e33476318d299206f20599e2 | [
"MIT"
] | null | null | null | from __future__ import print_function
import os
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import DataLoader, Dataset
from collections import OrderedDict
import numpy as np
from edgeml_pytorch.trainer.drocc_trainer import DROCCTrainer
class MLP(nn.Module):
"""
Multi-layer perceptron with single hidden layer.
"""
def __init__(self,
input_dim=2,
num_classes=1,
num_hidden_nodes=20):
super(MLP, self).__init__()
self.input_dim = input_dim
self.num_classes = num_classes
self.num_hidden_nodes = num_hidden_nodes
activ = nn.ReLU(True)
self.feature_extractor = nn.Sequential(OrderedDict([
('fc', nn.Linear(self.input_dim, self.num_hidden_nodes)),
('relu1', activ)]))
self.size_final = self.num_hidden_nodes
self.classifier = nn.Sequential(OrderedDict([
('fc1', nn.Linear(self.size_final, self.num_classes))]))
def forward(self, input):
features = self.feature_extractor(input)
logits = self.classifier(features.view(-1, self.size_final))
return logits
def adjust_learning_rate(epoch, total_epochs, only_ce_epochs, learning_rate, optimizer):
"""Adjust learning rate during training.
Parameters
----------
epoch: Current training epoch.
total_epochs: Total number of epochs for training.
only_ce_epochs: Number of epochs for initial pretraining.
learning_rate: Initial learning rate for training.
"""
#We dont want to consider the only ce
#based epochs for the lr scheduler
epoch = epoch - only_ce_epochs
drocc_epochs = total_epochs - only_ce_epochs
# lr = learning_rate
if epoch <= drocc_epochs:
lr = learning_rate * 0.001
if epoch <= 0.90 * drocc_epochs:
lr = learning_rate * 0.01
if epoch <= 0.60 * drocc_epochs:
lr = learning_rate * 0.1
if epoch <= 0.30 * drocc_epochs:
lr = learning_rate
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return optimizer
class CustomDataset(Dataset):
def __init__(self, data, labels):
self.data = data
self.labels = labels
def __len__(self):
return len(self.data)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
return torch.from_numpy(self.data[idx]), (self.labels[idx]), torch.tensor([0])
def load_data(path):
train_data = np.load(os.path.join(path, 'train_data.npy'), allow_pickle = True)
train_lab = np.ones((train_data.shape[0])) #All positive labelled data points collected
test_data = np.load(os.path.join(path, 'test_data.npy'), allow_pickle = True)
test_lab = np.load(os.path.join(path, 'test_labels.npy'), allow_pickle = True)
## preprocessing
mean=np.mean(train_data,0)
std=np.std(train_data,0)
train_data=(train_data-mean)/ (std + 1e-4)
num_features = train_data.shape[1]
test_data = (test_data - mean)/(std + 1e-4)
train_samples = train_data.shape[0]
test_samples = test_data.shape[0]
print("Train Samples: ", train_samples)
print("Test Samples: ", test_samples)
return CustomDataset(train_data, train_lab), CustomDataset(test_data, test_lab), num_features
def main():
train_dataset, test_dataset, num_features = load_data(args.data_path)
train_loader = DataLoader(train_dataset, args.batch_size, shuffle=True)
test_loader = DataLoader(test_dataset, args.batch_size, shuffle=True)
model = MLP(input_dim=num_features, num_hidden_nodes=args.hd, num_classes=1).to(device)
if args.optim == 1:
optimizer = optim.SGD(model.parameters(),
lr=args.lr,
momentum=args.mom)
print("using SGD")
else:
optimizer = optim.Adam(model.parameters(),
lr=args.lr)
print("using Adam")
# Training the model
trainer = DROCCTrainer(model, optimizer, args.lamda, args.radius, args.gamma, device)
# Restore from checkpoint
if args.restore == 1:
if os.path.exists(os.path.join(args.model_dir, 'model.pt')):
trainer.load(args.model_dir)
print("Saved Model Loaded")
trainer.train(train_loader, test_loader, args.lr, adjust_learning_rate, args.epochs,
metric=args.metric, ascent_step_size=args.ascent_step_size, only_ce_epochs = args.only_ce_epochs)
trainer.save(args.model_dir)
if __name__ == '__main__':
torch.set_printoptions(precision=5)
parser = argparse.ArgumentParser(description='PyTorch Simple Training')
parser.add_argument('--batch_size', type=int, default=128, metavar='N',
help='batch size for training')
parser.add_argument('--epochs', type=int, default=100, metavar='N',
help='number of epochs to train')
parser.add_argument('-oce,', '--only_ce_epochs', type=int, default=50, metavar='N',
help='number of epochs to train with only CE loss')
parser.add_argument('--ascent_num_steps', type=int, default=50, metavar='N',
help='Number of gradient ascent steps')
parser.add_argument('--hd', type=int, default=128, metavar='N',
help='Number of hidden nodes for LSTM model')
parser.add_argument('--lr', type=float, default=0.001, metavar='LR',
help='learning rate')
parser.add_argument('--ascent_step_size', type=float, default=0.001, metavar='LR',
help='step size of gradient ascent')
parser.add_argument('--mom', type=float, default=0.99, metavar='M',
help='momentum')
parser.add_argument('--model_dir', default='log',
help='path where to save checkpoint')
parser.add_argument('--one_class_adv', type=int, default=1, metavar='N',
help='adv loss to be used or not, 1:use 0:not use(only CE)')
parser.add_argument('--radius', type=float, default=0.2, metavar='N',
help='radius corresponding to the definition of set N_i(r)')
parser.add_argument('--lamda', type=float, default=1, metavar='N',
help='Weight to the adversarial loss')
parser.add_argument('--reg', type=float, default=0, metavar='N',
help='weight reg')
parser.add_argument('--restore', type=int, default=0, metavar='N',
help='whether to load a pretrained model, 1: load 0: train from scratch')
parser.add_argument('--optim', type=int, default=0, metavar='N',
help='0 : Adam 1: SGD')
parser.add_argument('--gamma', type=float, default=2.0, metavar='N',
help='r to gamma * r projection for the set N_i(r)')
parser.add_argument('-d', '--data_path', type=str, default='.')
parser.add_argument('--metric', type=str, default='F1')
args = parser.parse_args()
# settings
#Checkpoint store path
model_dir = args.model_dir
if not os.path.exists(model_dir):
os.makedirs(model_dir)
use_cuda = torch.cuda.is_available()
device = torch.device("cuda" if use_cuda else "cpu")
main()
| 41.966667 | 105 | 0.622452 | 1,281 | 0.169579 | 0 | 0 | 0 | 0 | 0 | 0 | 1,625 | 0.215118 |
9bd475cdfb843175aafda09a546901e32a2f92de | 1,518 | py | Python | funcs.py | amsynist/gmail_counter_deleter | 44c7ff780a13d7aae7a1ccee96abfc3b63dcc77a | [
"MIT"
] | null | null | null | funcs.py | amsynist/gmail_counter_deleter | 44c7ff780a13d7aae7a1ccee96abfc3b63dcc77a | [
"MIT"
] | null | null | null | funcs.py | amsynist/gmail_counter_deleter | 44c7ff780a13d7aae7a1ccee96abfc3b63dcc77a | [
"MIT"
] | null | null | null | import re
import imaplib
def checkinbox(user,passw,imapserver):
imap = imaplib.IMAP4_SSL(imapserver)
try:
imap.login(user,passw)
print("Connecting and Fetching required info,Please Wait..")
select_folder = input("Enter the folder : ")
imap.select(select_folder)
status, messages = imap.select(select_folder)
messages = int(messages[0])
print(f"Total Number of Mails in {select_folder} : {messages} ")
except Exception as err:
print(' -- !! AUTHENTICATION-FAILED !! --')
print("It seems that password was incorrect.")
def checkemail(user,passw,imapserver):
regex = '^(\w|\.|\_|\-)+[@](\w|\_|\-|\.)+[.]\w{2,3}$'
if(re.search(regex, user)):
print("Valid Email Entered")
else:
print("!! Enter a Valid Email and Try Again !!")
def deleteEmail(user, passw, imapserver):
checkemail(user,passw,imapserver)
mail = imaplib.IMAP4_SSL(imapserver)
mail.login(user, passw)
try:
print("Connecting and Fetching required info,Please Wait..")
mail.select("inbox")
print(f"Deleting all Emails from the inbox")
typ, data = mail.search(None, 'ALL')
for num in data[0].split():
mail.store(num, '+FLAGS', r'(\Deleted)')
print("Deleting....")
mail.expunge()
print("Emails Deleted")
mail.close()
mail.logout()
except Exception as err:
print(' -- !! AUTHENTICATION-FAILED !! --')
print("It seems that password was incorrect.")
| 34.5 | 70 | 0.614625 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 541 | 0.35639 |
9bd59710cf5b8329b4da0192b5e1698bf07e999d | 13,736 | py | Python | plugin-python/proto/pyvcloudprovider_pb2_grpc.py | srinarayanant/terraform-provider-vcloud-director-1 | 1e805550e69fb5284d4a746a2f86326ec72c565f | [
"BSD-2-Clause"
] | null | null | null | plugin-python/proto/pyvcloudprovider_pb2_grpc.py | srinarayanant/terraform-provider-vcloud-director-1 | 1e805550e69fb5284d4a746a2f86326ec72c565f | [
"BSD-2-Clause"
] | null | null | null | plugin-python/proto/pyvcloudprovider_pb2_grpc.py | srinarayanant/terraform-provider-vcloud-director-1 | 1e805550e69fb5284d4a746a2f86326ec72c565f | [
"BSD-2-Clause"
] | null | null | null | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from proto import catalog_item_pb2 as proto_dot_catalog__item__pb2
from proto import pyvcloudprovider_pb2 as proto_dot_pyvcloudprovider__pb2
from proto import vapp_pb2 as proto_dot_vapp__pb2
class PyVcloudProviderStub(object):
"""Interface exported by the server.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Login = channel.unary_unary(
'/proto.PyVcloudProvider/Login',
request_serializer=proto_dot_pyvcloudprovider__pb2.
LoginCredentials.SerializeToString,
response_deserializer=proto_dot_pyvcloudprovider__pb2.LoginResult.
FromString,
)
self.ReadCatalog = channel.unary_unary(
'/proto.PyVcloudProvider/ReadCatalog',
request_serializer=proto_dot_pyvcloudprovider__pb2.Catalog.
SerializeToString,
response_deserializer=proto_dot_pyvcloudprovider__pb2.
ReadCatalogResult.FromString,
)
self.CreateCatalog = channel.unary_unary(
'/proto.PyVcloudProvider/CreateCatalog',
request_serializer=proto_dot_pyvcloudprovider__pb2.Catalog.
SerializeToString,
response_deserializer=proto_dot_pyvcloudprovider__pb2.
CreateCatalogResult.FromString,
)
self.DeleteCatalog = channel.unary_unary(
'/proto.PyVcloudProvider/DeleteCatalog',
request_serializer=proto_dot_pyvcloudprovider__pb2.Catalog.
SerializeToString,
response_deserializer=proto_dot_pyvcloudprovider__pb2.
DeleteCatalogResult.FromString,
)
self.CatalogUploadMedia = channel.unary_unary(
'/proto.PyVcloudProvider/CatalogUploadMedia',
request_serializer=proto_dot_catalog__item__pb2.
CatalogUploadMediaInfo.SerializeToString,
response_deserializer=proto_dot_catalog__item__pb2.
CatalogUploadMediaResult.FromString,
)
self.CatalogUploadOva = channel.unary_unary(
'/proto.PyVcloudProvider/CatalogUploadOva',
request_serializer=proto_dot_catalog__item__pb2.
CatalogUploadOvaInfo.SerializeToString,
response_deserializer=proto_dot_catalog__item__pb2.
CatalogUploadOvaResult.FromString,
)
self.OvaCheckResolved = channel.unary_unary(
'/proto.PyVcloudProvider/OvaCheckResolved',
request_serializer=proto_dot_catalog__item__pb2.
CatalogCheckResolvedInfo.SerializeToString,
response_deserializer=proto_dot_pyvcloudprovider__pb2.
CheckResolvedResult.FromString,
)
self.DeleteCatalogItem = channel.unary_unary(
'/proto.PyVcloudProvider/DeleteCatalogItem',
request_serializer=proto_dot_catalog__item__pb2.
DeleteCatalogItemInfo.SerializeToString,
response_deserializer=proto_dot_catalog__item__pb2.
DeleteCatalogItemResult.FromString,
)
self.isPresentCatalogItem = channel.unary_unary(
'/proto.PyVcloudProvider/isPresentCatalogItem',
request_serializer=proto_dot_catalog__item__pb2.
IsPresentCatalogItemInfo.SerializeToString,
response_deserializer=proto_dot_catalog__item__pb2.
IsPresentCatalogItemResult.FromString,
)
self.CaptureVapp = channel.unary_unary(
'/proto.PyVcloudProvider/CaptureVapp',
request_serializer=proto_dot_catalog__item__pb2.CaptureVAppInfo.
SerializeToString,
response_deserializer=proto_dot_catalog__item__pb2.
CaptureVAppResult.FromString,
)
self.CreateVApp = channel.unary_unary(
'/proto.PyVcloudProvider/CreateVApp',
request_serializer=proto_dot_vapp__pb2.CreateVAppInfo.
SerializeToString,
response_deserializer=proto_dot_vapp__pb2.CreateVAppResult.
FromString,
)
self.DeleteVApp = channel.unary_unary(
'/proto.PyVcloudProvider/DeleteVApp',
request_serializer=proto_dot_vapp__pb2.DeleteVAppInfo.
SerializeToString,
response_deserializer=proto_dot_vapp__pb2.DeleteVAppResult.
FromString,
)
self.ReadVApp = channel.unary_unary(
'/proto.PyVcloudProvider/ReadVApp',
request_serializer=proto_dot_vapp__pb2.ReadVAppInfo.
SerializeToString,
response_deserializer=proto_dot_vapp__pb2.ReadVAppResult.
FromString,
)
self.StopPlugin = channel.unary_unary(
'/proto.PyVcloudProvider/StopPlugin',
request_serializer=proto_dot_pyvcloudprovider__pb2.StopInfo.
SerializeToString,
response_deserializer=proto_dot_pyvcloudprovider__pb2.StopResult.
FromString,
)
class PyVcloudProviderServicer(object):
"""Interface exported by the server.
"""
def Login(self, request, context):
"""Tenant Loging to VCD
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReadCatalog(self, request, context):
"""check if catalog is preset and return true and the catalog details
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateCatalog(self, request, context):
"""create a new catalog
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteCatalog(self, request, context):
"""delete a catalog
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CatalogUploadMedia(self, request, context):
"""catalog upload Media - anything other than ova
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CatalogUploadOva(self, request, context):
"""catalog upload ova
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def OvaCheckResolved(self, request, context):
"""check resolved after upload
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteCatalogItem(self, request, context):
"""catalog item delete
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def isPresentCatalogItem(self, request, context):
"""check if catalog item is preset and return true
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CaptureVapp(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def CreateVApp(self, request, context):
"""create vApp
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteVApp(self, request, context):
"""delete VApp
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReadVApp(self, request, context):
"""Read VApp
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def StopPlugin(self, request, context):
"""remote stop interface for the plugin
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PyVcloudProviderServicer_to_server(servicer, server):
rpc_method_handlers = {
'Login':
grpc.unary_unary_rpc_method_handler(
servicer.Login,
request_deserializer=proto_dot_pyvcloudprovider__pb2.
LoginCredentials.FromString,
response_serializer=proto_dot_pyvcloudprovider__pb2.LoginResult.
SerializeToString,
),
'ReadCatalog':
grpc.unary_unary_rpc_method_handler(
servicer.ReadCatalog,
request_deserializer=proto_dot_pyvcloudprovider__pb2.Catalog.
FromString,
response_serializer=proto_dot_pyvcloudprovider__pb2.
ReadCatalogResult.SerializeToString,
),
'CreateCatalog':
grpc.unary_unary_rpc_method_handler(
servicer.CreateCatalog,
request_deserializer=proto_dot_pyvcloudprovider__pb2.Catalog.
FromString,
response_serializer=proto_dot_pyvcloudprovider__pb2.
CreateCatalogResult.SerializeToString,
),
'DeleteCatalog':
grpc.unary_unary_rpc_method_handler(
servicer.DeleteCatalog,
request_deserializer=proto_dot_pyvcloudprovider__pb2.Catalog.
FromString,
response_serializer=proto_dot_pyvcloudprovider__pb2.
DeleteCatalogResult.SerializeToString,
),
'CatalogUploadMedia':
grpc.unary_unary_rpc_method_handler(
servicer.CatalogUploadMedia,
request_deserializer=proto_dot_catalog__item__pb2.
CatalogUploadMediaInfo.FromString,
response_serializer=proto_dot_catalog__item__pb2.
CatalogUploadMediaResult.SerializeToString,
),
'CatalogUploadOva':
grpc.unary_unary_rpc_method_handler(
servicer.CatalogUploadOva,
request_deserializer=proto_dot_catalog__item__pb2.
CatalogUploadOvaInfo.FromString,
response_serializer=proto_dot_catalog__item__pb2.
CatalogUploadOvaResult.SerializeToString,
),
'OvaCheckResolved':
grpc.unary_unary_rpc_method_handler(
servicer.OvaCheckResolved,
request_deserializer=proto_dot_catalog__item__pb2.
CatalogCheckResolvedInfo.FromString,
response_serializer=proto_dot_pyvcloudprovider__pb2.
CheckResolvedResult.SerializeToString,
),
'DeleteCatalogItem':
grpc.unary_unary_rpc_method_handler(
servicer.DeleteCatalogItem,
request_deserializer=proto_dot_catalog__item__pb2.
DeleteCatalogItemInfo.FromString,
response_serializer=proto_dot_catalog__item__pb2.
DeleteCatalogItemResult.SerializeToString,
),
'isPresentCatalogItem':
grpc.unary_unary_rpc_method_handler(
servicer.isPresentCatalogItem,
request_deserializer=proto_dot_catalog__item__pb2.
IsPresentCatalogItemInfo.FromString,
response_serializer=proto_dot_catalog__item__pb2.
IsPresentCatalogItemResult.SerializeToString,
),
'CaptureVapp':
grpc.unary_unary_rpc_method_handler(
servicer.CaptureVapp,
request_deserializer=proto_dot_catalog__item__pb2.CaptureVAppInfo.
FromString,
response_serializer=proto_dot_catalog__item__pb2.CaptureVAppResult.
SerializeToString,
),
'CreateVApp':
grpc.unary_unary_rpc_method_handler(
servicer.CreateVApp,
request_deserializer=proto_dot_vapp__pb2.CreateVAppInfo.FromString,
response_serializer=proto_dot_vapp__pb2.CreateVAppResult.
SerializeToString,
),
'DeleteVApp':
grpc.unary_unary_rpc_method_handler(
servicer.DeleteVApp,
request_deserializer=proto_dot_vapp__pb2.DeleteVAppInfo.FromString,
response_serializer=proto_dot_vapp__pb2.DeleteVAppResult.
SerializeToString,
),
'ReadVApp':
grpc.unary_unary_rpc_method_handler(
servicer.ReadVApp,
request_deserializer=proto_dot_vapp__pb2.ReadVAppInfo.FromString,
response_serializer=proto_dot_vapp__pb2.ReadVAppResult.
SerializeToString,
),
'StopPlugin':
grpc.unary_unary_rpc_method_handler(
servicer.StopPlugin,
request_deserializer=proto_dot_pyvcloudprovider__pb2.StopInfo.
FromString,
response_serializer=proto_dot_pyvcloudprovider__pb2.StopResult.
SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'proto.PyVcloudProvider', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler, ))
| 40.759644 | 79 | 0.683241 | 8,604 | 0.626383 | 0 | 0 | 0 | 0 | 0 | 0 | 2,238 | 0.16293 |
9bd599d6657b8c94098c98e04397485d867aa089 | 1,820 | py | Python | bot/triggers/commands/math.py | elihschiff/Rubber-Duck-Python | 24dea3b64a8a46368cd8dd995c800375f355b55e | [
"MIT"
] | 7 | 2020-07-07T20:58:14.000Z | 2021-12-23T02:51:20.000Z | bot/triggers/commands/math.py | elihschiff/Rubber-Duck-Python | 24dea3b64a8a46368cd8dd995c800375f355b55e | [
"MIT"
] | null | null | null | bot/triggers/commands/math.py | elihschiff/Rubber-Duck-Python | 24dea3b64a8a46368cd8dd995c800375f355b55e | [
"MIT"
] | 1 | 2020-03-29T13:36:43.000Z | 2020-03-29T13:36:43.000Z | from . import Command
from .. import utils
import wolframalpha
class Math(Command):
names = ["math", "calc", "calculate", "solve"]
description = "Solves a math problem"
usage = "!math [expression]"
examples = "!math d/dx sin(x)^2"
show_in_help = True
async def execute_command(self, client, msg, content, **kwargs):
if not content:
await utils.delay_send(msg.channel, f"Usage: {self.usage}", reply_to=msg)
return
wolfram = wolframalpha.Client(client.config["wolfram_id"])
query_res = wolfram.query(content)
try:
for pod in query_res.pods:
if (
pod.title.startswith("Result")
or pod.title.startswith("Exact result")
or pod.title.startswith("Power of 10 representation")
or pod.title.startswith("Decimal approximation")
):
await msg.channel.send(
f"The answer for `{content}` is: `{utils.sanitized(pod.text)}`",
reference=msg,
mention_author=True,
)
return
if pod.title.startswith("Plot"):
await msg.channel.send(
# TODO: this should be more robust
f"The answer for `{content}` is: {list(list(pod.subpod)[0].img)[0]['@src']}",
reference=msg,
mention_author=True,
)
return
except (KeyError, AttributeError):
pass
await msg.channel.send(
f"I could not find an answer for `{utils.sanitized(content)}`",
reference=msg,
mention_author=True,
)
| 37.142857 | 101 | 0.503297 | 1,754 | 0.963736 | 0 | 0 | 0 | 0 | 1,541 | 0.846703 | 442 | 0.242857 |
9bd5d5dda5851509b8acc46153917b825b7624b6 | 854 | py | Python | home/forms.py | Legaeldan/siobhan-mcgowan-photography | 76ad93baf9792f194a48b7e3d42d274f35152fc0 | [
"W3C"
] | null | null | null | home/forms.py | Legaeldan/siobhan-mcgowan-photography | 76ad93baf9792f194a48b7e3d42d274f35152fc0 | [
"W3C"
] | 4 | 2021-06-08T21:02:03.000Z | 2022-01-13T02:21:06.000Z | home/forms.py | Legaeldan/siobhan-mcgowan-photography | 76ad93baf9792f194a48b7e3d42d274f35152fc0 | [
"W3C"
] | 1 | 2020-03-31T14:39:06.000Z | 2020-03-31T14:39:06.000Z | from django import forms
class ContactForm(forms.Form):
user_name = forms.CharField(max_length=60, label='', required=True, widget=forms.TextInput(attrs={'placeholder': 'Your Name'}))
user_email = forms.EmailField(label='', required=True)
message = forms.CharField(label='', required=True, widget=forms.Textarea(attrs={'placeholder': 'Message', 'rows': '4'}))
def __init__(self, *args, **kwargs):
super(ContactForm, self).__init__(*args, **kwargs)
for visible in self.visible_fields():
if visible.name == "user_email":
visible.field.widget.attrs['class'] = 'validate-required validate-email field-error'
visible.field.widget.attrs['placeholder'] = 'Email Address'
else:
visible.field.widget.attrs['class'] = 'validate-required field-error'
| 42.7 | 131 | 0.651054 | 820 | 0.960187 | 0 | 0 | 0 | 0 | 0 | 0 | 192 | 0.224824 |
9bd5f6eb4686ccd15c9d2237b780310a0cf0d3f6 | 6,517 | py | Python | lasttester/components/configs/ftp.py | gitdachong/lasttester | 4c637d7704e9d577050e666f6ce01fc5d3752044 | [
"MIT"
] | null | null | null | lasttester/components/configs/ftp.py | gitdachong/lasttester | 4c637d7704e9d577050e666f6ce01fc5d3752044 | [
"MIT"
] | null | null | null | lasttester/components/configs/ftp.py | gitdachong/lasttester | 4c637d7704e9d577050e666f6ce01fc5d3752044 | [
"MIT"
] | null | null | null | #coding:utf-8
import ftplib
import os
from ...core import constants
from . import base
class Configurer(base.Configurer):
def __init__(self,config):
self._config = config
self._key = constants.KEY_CONFIGURER_INSTANCES
self._results = {}
self.instance = ftplib.FTP()
self.instance.set_debuglevel(0)
self.lines = []
self._ftp_log = []
def connect(self,_config):
self._ftp_log.append(self.instance.connect(_config.get('host'),int(_config.get('port',21))))
_pasv_mode =True if _config.get('mode') ==1 else False
self.instance.set_pasv(_pasv_mode)
self._ftp_log.append(self.instance.login(_config.get('username'),_config.get('password')))
if 'defaut_path' in _config:
self.opendir(_config.get('defaut_path'))
self.__current_dir = self.instance.pwd()
def parse(self):
_config = self._config.get('config_body')
self.connect(_config)
self._results[self._config.get('name')] = self
return [(self._key,self._results)]
def close(self):
if self.instance:
self.instance.quit()
def upload(self,remotepath, localpath):
self._ftp_log = []
if os.path.isdir(localpath):
_files = os.listdir(localpath)
_dirs = []
for _file in _files:
_path = os.path.join(localpath,_file)
_remotepath = '{}/{}'.format(remotepath.rstrip('/'),_file)
if os.path.isdir(_path):
_dirs.append((_remotepath,_path))
else:
self.__upload_file(_remotepath,_path)
#解决目录顺序混乱导致多次重复打开子目录和父目录因此的效率问题
for _remotepath,_path in _dirs:
self.upload(_remotepath, _path)
else:
self.__upload_file(remotepath, localpath)
def __upload_file(self,remotepath, localpath):
bufsize = 1024
dirname,basename = self.__split(remotepath)
self.opendir(dirname)
fp = open(localpath, 'rb')
_result = self.instance.storbinary('STOR ' + basename, fp, bufsize)
print('__upload_file_1',type(_result),_result)
fp.close()
def download(self,remotepath, localpath):
self._ftp_log = []
try:
_result = self.instance.cwd(remotepath)
print('download',type(_result), _result)
self.__download_dir(remotepath,localpath)
except ftplib.error_perm:
self.__download_file(remotepath, localpath)
def __download_file(self,remotepath, localpath):
bufsize = 1024
dirname, basename = self.__split(remotepath)
self.opendir(dirname)
fp = open(localpath, 'wb')
_result = self.instance.retrbinary('RETR ' + basename, fp.write, bufsize)
print('__download_file_1', type(_result), _result)
fp.close()
def __download_dir(self,remotepath, localpath):
if not os.path.exists(localpath):
os.makedirs(localpath)
self.__clear_lines()
self.opendir(remotepath)
_result = self.instance.retrlines("LIST", callback=self.__save_line)
print('__download_dir_1', type(_result), _result)
for line in self.lines:
name = line.split(" ")[-1]
if name in ['.','..']:
continue
_remote_path = '{}/{}'.format(remotepath.rstrip('/'),name)
_local_path = os.path.join(localpath,name)
if line[0] == "d":
self.__download_dir(_remote_path,_local_path)
else:
self.__download_file(_remote_path,_local_path)
def delete(self,remotepath):
self._ftp_log = []
try:
self.instance.cwd(remotepath)
self.delete_dir(remotepath)
except ftplib.error_perm:
_result = self.instance.delete(remotepath)
print('__delete_1', type(_result), _result)
def delete_dir(self,remotepath):
self.__clear_lines()
self.opendir(remotepath)
self.instance.retrlines("LIST", callback=self.__save_line)
for line in self.lines:
name = line.split(" ")[-1]
if name in ['.','..']:
continue
_path = remotepath + "/" + name
if line[0] == "d":
self.delete_dir(_path)
else:
_result = self.instance.delete(_path)
print('delete_dir1', type(_result), _result)
if remotepath !='/':
_result = self.instance.rmd(remotepath)
print('delete_dir_2', type(_result), _result)
def opendir(self,remotepath):
remotepath = remotepath.rstrip(r'/')
if not remotepath or self.__compare_path(remotepath,self.__current_dir):
return True
if self.__current_dir.find(remotepath) ==0:
self.instance.cwd(remotepath)
else:
_diff = remotepath.lstrip(self.__current_dir)
_common = remotepath.rstrip(_diff)
if _common and not self.__compare_path(_common,self.__current_dir):
self._ftp_log.append(self.instance.cwd(_common))
dir_lists = _diff.split(r'/')
for _dir in dir_lists:
try:
self._ftp_log.append(self.instance.cwd(_dir))
except ftplib.error_perm:
try:
_result = self.instance.mkd(_dir)
self._ftp_log.append('mkdir dir {} successful'.format(_result))
except ftplib.error_perm:
pass
self.instance.cwd(_dir)
self.__current_dir = self.instance.pwd()
return True
def __clear_lines(self):
self.lines = []
def __save_line(self, line):
self.lines.append(line)
def __split(self,path):
while(path.find('//') !=-1):
path = path.replace('//','/')
index = path.rfind('/')
if index == -1:
return '',path
return path[:index],path[index + 1:]
def __common_path(self,remotepath,oldpath):
if len(remotepath) < oldpath:
remotepath,oldpath = oldpath,remotepath
for _idnex,_char in enumerate(remotepath):
if _char != oldpath[_idnex]:
break
return remotepath[:_idnex].rstrip(r'/')
def __compare_path(self,path,t_path):
return path.rstrip(r'/') == t_path.rstrip(r'/')
| 35.612022 | 100 | 0.577566 | 6,488 | 0.986468 | 0 | 0 | 0 | 0 | 0 | 0 | 437 | 0.066444 |
9bd6f70a62ad5148b40fc54e7119d371615bc893 | 4,248 | py | Python | util/load.py | juvalen/mb-checker | d1c869cf38af4bed409b901fe260feb54523bd6e | [
"MIT"
] | 12 | 2020-02-01T18:43:51.000Z | 2022-03-18T17:53:43.000Z | util/load.py | juvalen/mb-checker | d1c869cf38af4bed409b901fe260feb54523bd6e | [
"MIT"
] | 6 | 2019-10-19T18:32:16.000Z | 2021-09-16T13:34:13.000Z | util/load.py | juvalen/mb-checker | d1c869cf38af4bed409b901fe260feb54523bd6e | [
"MIT"
] | null | null | null | # Name: load.py
# Date: June 2019
# Function: goes trough a bookmark file checking the status of each URL
# Input: bookmark file in json format
# Output: new text and json files including those URLs according with their status
import os
import ast
try:
import requests
except:
sys.stderr.write("%s: Please install the required module 'requests'.\n" % sys.argv[0])
sys.exit(1)
try:
import json
except:
# Python < 2.6
try:
import simplejson as json
except:
sys.stderr.write("%s: Please install the required module 'simplejson'.\n" % sys.argv[0])
sys.exit(1)
DIRNAME = "output/"
JSONIN = DIRNAME + "chrome_bookmarks.json"
JSONOK = DIRNAME + "OK.json"
URLERROR = DIRNAME + "error.url"
URL404 = DIRNAME + "404.url"
URLOK = DIRNAME + "OK.url"
RED='\033[0;31m'
NC='\033[0m' # No Color
# Read source bookmark file
input_filename = open(JSONIN, "r")
bookmark_data = json.load(input_filename)
input_filename.close()
# Compute number of elements, including categories and end nodes
elements = len(bookmark_data)
print("Checking", str(elements), "entries in bookmark data")
# Create output/ directory if not exists
try:
os.mkdir(DIRNAME)
print("Directory" , DIRNAME , "created ")
except:
print("Directory" , DIRNAME , "preserved")
# Defining output files
urlError = open(URLERROR,"w")
jsonOK = open(JSONOK,"w")
urlOK = open(URLOK,"w")
url404 = open(URL404,"w")
jsonOK.write("[")
count = 1
for dict in bookmark_data:
# Shredding dict into variables
id = str(dict["id"])
dateAddedLocal = str(dict["dateAddedLocal"])
dateAddedUTC = str(dict["dateAddedUTC"])
index = str(dict["index"])
parentId = dict["parentId"]
string = str(dict["title"])
try:
url = dict["url"]
except:
url = ""
# Tweak title here
title = string.replace('"', '')
#
print("@@@@@@@@@@@@", id)
#print(" L ", dateAddedLocal)
#print(" U ", dateAddedUTC)
#print(" I ", index)
#print(" P ", parentId)
# if there is something in url
if url:
print(" T ", title)
# Try here to access that URL
try:
try:
folder = parent[parentId]
except:
folder = "1"
print(" > [", folder, "] ", url)
req = requests.head(url, timeout=10)
# Attends all & timeout
except:
print(RED + "XXX" + NC)
urlError.write(url + "\n")
else:
status = req.status_code
if status == 404:
print(RED + "404" + NC)
url404.write(url + "\n")
else:
print(" + ", status)
# Original json entries pasted here
# Approach from scratch
# Write to file
jsonOK.write('{\n')
jsonOK.write(' "id": ' + id + ',\n')
jsonOK.write(' "dateAddedLocal": "' + dateAddedLocal + '",\n')
jsonOK.write(' "dateAddedUTC": "' + dateAddedUTC + '",\n')
jsonOK.write(' "index": ' + index + ',\n')
jsonOK.write(' "parentId": ' + parentId + ',\n')
jsonOK.write(' "title": "' + title + '",\n')
jsonOK.write(' "url": "' + url + '"\n')
if count<elements:
jsonOK.write('},\n')
else:
jsonOK.write('}]\n')
urlOK.write(url + '\n')
# When it is only a bookmark folder
# Original json entries be pasted here
else:
lastTitle = "[" + title + "]"
print(lastTitle)
# Create parent dictionary
parent = {}
parent[id] = title
# Write to file
jsonOK.write('{\n')
jsonOK.write(' "id": ' + id + ',\n')
jsonOK.write(' "dateAddedLocal": "' + dateAddedLocal + '",\n')
jsonOK.write(' "dateAddedUTC": "' + dateAddedUTC + '",\n')
jsonOK.write(' "index": ' + index + ',\n')
jsonOK.write(' "parentId": ' + parentId + ',\n')
jsonOK.write(' "title": "' + title + '"\n')
if count<elements:
jsonOK.write('},\n')
else:
jsonOK.write('}]\n')
urlOK.write(url + '\n')
count += 1
jsonOK.close()
urlError.close()
url404.close()
urlOK.close()
| 28.897959 | 96 | 0.545433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,590 | 0.374294 |
9bd75770d8f779d1f6be976a5153f3df87d94f4e | 1,248 | py | Python | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/errors.py | jeikabu/lumberyard | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | [
"AML"
] | 8 | 2019-10-07T16:33:47.000Z | 2020-12-07T03:59:58.000Z | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/errors.py | jeikabu/lumberyard | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | [
"AML"
] | 1 | 2018-04-03T22:37:40.000Z | 2018-04-03T23:53:43.000Z | dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/cuda/errors.py | jeikabu/lumberyard | 07228c605ce16cbf5aaa209a94a3cb9d6c1a4115 | [
"AML"
] | 5 | 2020-08-27T20:44:18.000Z | 2021-08-21T22:54:11.000Z | from __future__ import print_function, absolute_import
import numbers
class KernelRuntimeError(RuntimeError):
def __init__(self, msg, tid=None, ctaid=None):
self.tid = tid
self.ctaid = ctaid
self.msg = msg
t = ("An exception was raised in thread=%s block=%s\n"
"\t%s")
msg = t % (self.tid, self.ctaid, self.msg)
super(KernelRuntimeError, self).__init__(msg)
def normalize_kernel_dimensions(griddim, blockdim):
"""
Normalize and validate the user-supplied kernel dimensions.
"""
def check_dim(dim, name):
if not isinstance(dim, (tuple, list)):
dim = [dim]
else:
dim = list(dim)
if len(dim) > 3:
raise ValueError('%s must be a sequence of 1, 2 or 3 integers, got %r'
% (name, dim))
for v in dim:
if not isinstance(v, numbers.Integral):
raise TypeError('%s must be a sequence of integers, got %r'
% (name, dim))
while len(dim) < 3:
dim.append(1)
return dim
griddim = check_dim(griddim, 'griddim')
blockdim = check_dim(blockdim, 'blockdim')
return griddim, blockdim
| 29.714286 | 82 | 0.563301 | 352 | 0.282051 | 0 | 0 | 0 | 0 | 0 | 0 | 245 | 0.196314 |
9bd8035c6b3b5723ba2c49f36471229439b947c4 | 1,013 | py | Python | gitrack/exceptions.py | AuHau/giTrack | 802ee23513d60b2379f0f5968e595288d5b6c31d | [
"MIT"
] | 5 | 2019-02-19T10:56:56.000Z | 2020-11-28T11:37:45.000Z | gitrack/exceptions.py | AuHau/giTrack | 802ee23513d60b2379f0f5968e595288d5b6c31d | [
"MIT"
] | 63 | 2019-01-21T21:44:28.000Z | 2022-03-21T14:01:11.000Z | gitrack/exceptions.py | AuHau/giTrack | 802ee23513d60b2379f0f5968e595288d5b6c31d | [
"MIT"
] | 2 | 2019-01-04T19:31:52.000Z | 2020-12-10T21:40:09.000Z |
class GitrackException(Exception):
"""
General giTrack's exception
"""
pass
class ConfigException(GitrackException):
"""
Exception related to Config functionality.
"""
pass
class InitializedRepoException(GitrackException):
"""
Raised when user tries to initialized repo that has been already initialized before.
"""
pass
class UninitializedRepoException(GitrackException):
"""
Raised when giTrack invoke in Git repository that has not been initialized.
"""
pass
class UnknownShell(GitrackException):
pass
class PromptException(GitrackException):
pass
class ProviderException(GitrackException):
def __init__(self, provider_name, message, *args, **kwargs):
self.message = message
self.provider_name = provider_name
super().__init__(*args, **kwargs)
def __str__(self):
return 'Provider \'{}\': {}'.format(self.provider_name, self.message)
class RunningEntry(ProviderException):
pass
| 19.862745 | 88 | 0.691017 | 990 | 0.977295 | 0 | 0 | 0 | 0 | 0 | 0 | 313 | 0.308983 |
9bd93ddac308f79528b349ef3589bc678b64c99a | 4,617 | py | Python | Protheus_WebApp/Modules/SIGAPLS/PLSA809TESTCASE.py | 98llm/tir-script-samples | 0bff8393b79356aa562e9e6512c11ee6e039b177 | [
"MIT"
] | 17 | 2018-09-24T17:27:08.000Z | 2021-09-16T19:09:46.000Z | Protheus_WebApp/Modules/SIGAPLS/PLSA809TESTCASE.py | 98llm/tir-script-samples | 0bff8393b79356aa562e9e6512c11ee6e039b177 | [
"MIT"
] | 4 | 2018-09-24T17:30:32.000Z | 2022-01-03T11:39:30.000Z | Protheus_WebApp/Modules/SIGAPLS/PLSA809TESTCASE.py | 98llm/tir-script-samples | 0bff8393b79356aa562e9e6512c11ee6e039b177 | [
"MIT"
] | 18 | 2019-06-07T17:41:34.000Z | 2022-01-31T18:17:31.000Z | from tir import Webapp
import unittest
from tir.technologies.apw_internal import ApwInternal
import datetime
import time
DateSystem = datetime.datetime.today().strftime('%d/%m/%Y')
DateVal = datetime.datetime(2120, 5, 17)
"""-------------------------------------------------------------------
/*/{Protheus.doc} PLSA809TestCase
TIR - Casos de testes da rotina Indicacao de Prestador via CallCenter
@author Silvia SantAnna
@since 10/2020
@version 12
-------------------------------------------------------------------"""
class PLSA809(unittest.TestCase):
@classmethod
def setUpClass(inst):
inst.oHelper = Webapp()
inst.oHelper.Setup("SIGAPLS","13/10/2020","T1","M SP 01","33")
inst.oHelper.Program("PLSA809")
inst.oHelper.AddParameter("MV_PLCALPG","" , "2")
inst.oHelper.AddParameter("MV_PL809VL","" , ".F.")
inst.oHelper.SetParameters()
def test_PLSA809_001(self):
# INCLUIR
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("M SP 01 ")
self.oHelper.SetValue("B9Y_CARTEI","00010100000001024", check_value = False)
self.oHelper.SetValue("B9Y_CRMCGC","41226834671", check_value = False)
time.sleep(10)
self.oHelper.SetValue("B9Y_NOME","PLS DSAUPC TIR INCLUSAO")
self.oHelper.SetValue("B9Y_EMAIL","DSAUPC@EMAIL.COM")
self.oHelper.SetValue("B9Y_TEL","11332220000", check_value = False)
self.oHelper.SetValue("B9Y_TIPOAT", "3 - Ambos")
self.oHelper.SetValue("B9Y_OBS", "TESTE 2 TIR INCLUSAO")
# Grid Enderecos
self.oHelper.ClickGridCell("Cód Logr",row=1, grid_number=1)
self.oHelper.SetKey("Enter", grid=True, grid_number=1)
self.oHelper.SetValue("B9V_CODLOG","008")
self.oHelper.ClickGridCell("Endereço",row=1, grid_number=1)
self.oHelper.SetKey("Enter", grid=True, grid_number=1)
time.sleep(10)
self.oHelper.SetValue("B9V_ENDER","ALBERT BARTHOLOME")
self.oHelper.ClickGridCell("Nº",row=1, grid_number=1)
self.oHelper.SetKey("Enter", grid=True, grid_number=1)
time.sleep(10)
self.oHelper.SetValue("B9V_NUMERO","434")
#self.oHelper.ClickGridCell("Complemento",row=1, grid_number=1)
#self.oHelper.SetKey("Enter", grid=True, grid_number=1)
time.sleep(30)
#self.oHelper.SetValue("B9V_COMEND","SALA 10")
#self.oHelper.ClickGridCell("Bairro",row=1, grid_number=1)
#self.oHelper.SetKey("Enter", grid=True, grid_number=1)
time.sleep(30)
#self.oHelper.SetValue("B9V_BAIRRO","BUTANTA")
#self.oHelper.ClickGridCell("Cód Cidade",row=1, grid_number=1)
#self.oHelper.SetKey("Enter", grid=True, grid_number=1)
time.sleep(30)
#self.oHelper.SetValue("B9V_CODCID","3550308")
#self.oHelper.ClickGridCell("CEP",row=1, grid_number=1)
#self.oHelper.SetKey("Enter", grid=True, grid_number=1)
time.sleep(30)
#self.oHelper.SetValue("B9V_CEP","05541000", check_value = False)
# Grid Especialidades
self.oHelper.ClickGridCell("Cod Espec",row=1, grid_number=2)
self.oHelper.SetKey("Enter", grid=True, grid_number=2)
time.sleep(10)
self.oHelper.SetValue("B9Q_CODESP","002")
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Fechar") # "O beneficiário não possui email cadastrado na base de dados, favor informar o protocolo a ele para que seja possível acompanhar a indicação feita"
self.oHelper.SetButton("Fechar") # "Registro inserido com sucesso."
# VISUALIZAR
self.oHelper.SetButton("Visualizar")
self.oHelper.CheckResult("B9Y_CRMCGC","41226834671")
self.oHelper.SetButton("Fechar")
# INCLUSÃO COM MESMO CRM/CNPJ
self.oHelper.SetButton("Incluir")
self.oHelper.SetBranch("M SP 01 ")
self.oHelper.SetValue("B9Y_CARTEI","00010100000001024", check_value = False)
self.oHelper.SetValue("B9Y_CRMCGC","41226834671", check_value = False)
time.sleep(10)
self.oHelper.SetValue("B9Y_NOME","PLS DSAUPC TIR INCLUSAO 2")
self.oHelper.SetValue("B9Y_EMAIL","DSAUPC2@EMAIL.COM")
self.oHelper.SetValue("B9Y_TEL","11333331234", check_value = False)
self.oHelper.SetValue("B9Y_TIPOAT", "2 - Assistencial")
self.oHelper.SetValue("B9Y_OBS", "TESTE 2 TIR INCLUSAO COM MESMO CRM/CNPJ")
# Grid Especialidades
self.oHelper.ClickGridCell("Indicar",row=1, grid_number=2)
self.oHelper.SetKey("Enter", grid=True, grid_number=2)
self.oHelper.SetButton("Confirmar")
self.oHelper.SetButton("Fechar") # "O beneficiário não possui email cadastrado na base de dados, favor informar o protocolo a ele para que seja possível acompanhar a indicação feita"
self.oHelper.SetButton("Fechar") # "Registro inserido com sucesso."
self.oHelper.SetButton('x')
self.oHelper.AssertTrue()
@classmethod
def tearDownClass(inst):
inst.oHelper.TearDown()
if __name__ == '__main__':
unittest.main() | 41.594595 | 185 | 0.718648 | 4,066 | 0.877807 | 0 | 0 | 359 | 0.077504 | 0 | 0 | 2,275 | 0.491149 |
9bda521421d444cb1361c910a70adc7a47fdcd9f | 80,233 | py | Python | sympy/integrals/rubi/rubi_tests/tests/test_hyperbolic_sine.py | Michal-Gagala/sympy | 3cc756c2af73b5506102abaeefd1b654e286e2c8 | [
"MIT"
] | null | null | null | sympy/integrals/rubi/rubi_tests/tests/test_hyperbolic_sine.py | Michal-Gagala/sympy | 3cc756c2af73b5506102abaeefd1b654e286e2c8 | [
"MIT"
] | null | null | null | sympy/integrals/rubi/rubi_tests/tests/test_hyperbolic_sine.py | Michal-Gagala/sympy | 3cc756c2af73b5506102abaeefd1b654e286e2c8 | [
"MIT"
] | null | null | null | import sys
from sympy.external import import_module
matchpy = import_module("matchpy")
if not matchpy:
#bin/test will not execute any tests now
disabled = True
if sys.version_info[:2] < (3, 6):
disabled = True
from sympy.integrals.rubi.utility_function import (
sympy_op_factory, Int, Sum, Set, With, Module, Scan, MapAnd, FalseQ,
ZeroQ, NegativeQ, NonzeroQ, FreeQ, NFreeQ, List, Log, PositiveQ,
PositiveIntegerQ, NegativeIntegerQ, IntegerQ, IntegersQ,
ComplexNumberQ, PureComplexNumberQ, RealNumericQ, PositiveOrZeroQ,
NegativeOrZeroQ, FractionOrNegativeQ, NegQ, Equal, Unequal, IntPart,
FracPart, RationalQ, ProductQ, SumQ, NonsumQ, Subst, First, Rest,
SqrtNumberQ, SqrtNumberSumQ, LinearQ, Sqrt, ArcCosh, Coefficient,
Denominator, Hypergeometric2F1, Not, Simplify, FractionalPart,
IntegerPart, AppellF1, EllipticPi, EllipticE, EllipticF, ArcTan,
ArcCot, ArcCoth, ArcTanh, ArcSin, ArcSinh, ArcCos, ArcCsc, ArcSec,
ArcCsch, ArcSech, Sinh, Tanh, Cosh, Sech, Csch, Coth, LessEqual, Less,
Greater, GreaterEqual, FractionQ, IntLinearcQ, Expand, IndependentQ,
PowerQ, IntegerPowerQ, PositiveIntegerPowerQ, FractionalPowerQ, AtomQ,
ExpQ, LogQ, Head, MemberQ, TrigQ, SinQ, CosQ, TanQ, CotQ, SecQ, CscQ,
Sin, Cos, Tan, Cot, Sec, Csc, HyperbolicQ, SinhQ, CoshQ, TanhQ, CothQ,
SechQ, CschQ, InverseTrigQ, SinCosQ, SinhCoshQ, LeafCount, Numerator,
NumberQ, NumericQ, Length, ListQ, Im, Re, InverseHyperbolicQ,
InverseFunctionQ, TrigHyperbolicFreeQ, InverseFunctionFreeQ, RealQ,
EqQ, FractionalPowerFreeQ, ComplexFreeQ, PolynomialQ, FactorSquareFree,
PowerOfLinearQ, Exponent, QuadraticQ, LinearPairQ, BinomialParts,
TrinomialParts, PolyQ, EvenQ, OddQ, PerfectSquareQ, NiceSqrtAuxQ,
NiceSqrtQ, Together, PosAux, PosQ, CoefficientList, ReplaceAll,
ExpandLinearProduct, GCD, ContentFactor, NumericFactor,
NonnumericFactors, MakeAssocList, GensymSubst, KernelSubst,
ExpandExpression, Apart, SmartApart, MatchQ,
PolynomialQuotientRemainder, FreeFactors, NonfreeFactors,
RemoveContentAux, RemoveContent, FreeTerms, NonfreeTerms,
ExpandAlgebraicFunction, CollectReciprocals, ExpandCleanup,
AlgebraicFunctionQ, Coeff, LeadTerm, RemainingTerms, LeadFactor,
RemainingFactors, LeadBase, LeadDegree, Numer, Denom, hypergeom, Expon,
MergeMonomials, PolynomialDivide, BinomialQ, TrinomialQ,
GeneralizedBinomialQ, GeneralizedTrinomialQ, FactorSquareFreeList,
PerfectPowerTest, SquareFreeFactorTest, RationalFunctionQ,
RationalFunctionFactors, NonrationalFunctionFactors, Reverse,
RationalFunctionExponents, RationalFunctionExpand, ExpandIntegrand,
SimplerQ, SimplerSqrtQ, SumSimplerQ, BinomialDegree, TrinomialDegree,
CancelCommonFactors, SimplerIntegrandQ, GeneralizedBinomialDegree,
GeneralizedBinomialParts, GeneralizedTrinomialDegree,
GeneralizedTrinomialParts, MonomialQ, MonomialSumQ,
MinimumMonomialExponent, MonomialExponent, LinearMatchQ,
PowerOfLinearMatchQ, QuadraticMatchQ, CubicMatchQ, BinomialMatchQ,
TrinomialMatchQ, GeneralizedBinomialMatchQ, GeneralizedTrinomialMatchQ,
QuotientOfLinearsMatchQ, PolynomialTermQ, PolynomialTerms,
NonpolynomialTerms, PseudoBinomialParts, NormalizePseudoBinomial,
PseudoBinomialPairQ, PseudoBinomialQ, PolynomialGCD, PolyGCD,
AlgebraicFunctionFactors, NonalgebraicFunctionFactors,
QuotientOfLinearsP, QuotientOfLinearsParts, QuotientOfLinearsQ,
Flatten, Sort, AbsurdNumberQ, AbsurdNumberFactors,
NonabsurdNumberFactors, SumSimplerAuxQ, Prepend, Drop,
CombineExponents, FactorInteger, FactorAbsurdNumber,
SubstForInverseFunction, SubstForFractionalPower,
SubstForFractionalPowerOfQuotientOfLinears,
FractionalPowerOfQuotientOfLinears, SubstForFractionalPowerQ,
SubstForFractionalPowerAuxQ, FractionalPowerOfSquareQ,
FractionalPowerSubexpressionQ, Apply, FactorNumericGcd,
MergeableFactorQ, MergeFactor, MergeFactors, TrigSimplifyQ,
TrigSimplify, TrigSimplifyRecur, Order, FactorOrder, Smallest,
OrderedQ, MinimumDegree, PositiveFactors, Sign, NonpositiveFactors,
PolynomialInAuxQ, PolynomialInQ, ExponentInAux, ExponentIn,
PolynomialInSubstAux, PolynomialInSubst, Distrib, DistributeDegree,
FunctionOfPower, DivideDegreesOfFactors, MonomialFactor, FullSimplify,
FunctionOfLinearSubst, FunctionOfLinear, NormalizeIntegrand,
NormalizeIntegrandAux, NormalizeIntegrandFactor,
NormalizeIntegrandFactorBase, NormalizeTogether,
NormalizeLeadTermSigns, AbsorbMinusSign, NormalizeSumFactors,
SignOfFactor, NormalizePowerOfLinear, SimplifyIntegrand, SimplifyTerm,
TogetherSimplify, SmartSimplify, SubstForExpn, ExpandToSum, UnifySum,
UnifyTerms, UnifyTerm, CalculusQ, FunctionOfInverseLinear,
PureFunctionOfSinhQ, PureFunctionOfTanhQ, PureFunctionOfCoshQ,
IntegerQuotientQ, OddQuotientQ, EvenQuotientQ, FindTrigFactor,
FunctionOfSinhQ, FunctionOfCoshQ, OddHyperbolicPowerQ, FunctionOfTanhQ,
FunctionOfTanhWeight, FunctionOfHyperbolicQ, SmartNumerator,
SmartDenominator, SubstForAux, ActivateTrig, ExpandTrig, TrigExpand,
SubstForTrig, SubstForHyperbolic, InertTrigFreeQ, LCM,
SubstForFractionalPowerOfLinear, FractionalPowerOfLinear,
InverseFunctionOfLinear, InertTrigQ, InertReciprocalQ, DeactivateTrig,
FixInertTrigFunction, DeactivateTrigAux, PowerOfInertTrigSumQ,
PiecewiseLinearQ, KnownTrigIntegrandQ, KnownSineIntegrandQ,
KnownTangentIntegrandQ, KnownCotangentIntegrandQ,
KnownSecantIntegrandQ, TryPureTanSubst, TryTanhSubst, TryPureTanhSubst,
AbsurdNumberGCD, AbsurdNumberGCDList, ExpandTrigExpand,
ExpandTrigReduce, ExpandTrigReduceAux, NormalizeTrig, TrigToExp,
ExpandTrigToExp, TrigReduce, FunctionOfTrig, AlgebraicTrigFunctionQ,
FunctionOfHyperbolic, FunctionOfQ, FunctionOfExpnQ, PureFunctionOfSinQ,
PureFunctionOfCosQ, PureFunctionOfTanQ, PureFunctionOfCotQ,
FunctionOfCosQ, FunctionOfSinQ, OddTrigPowerQ, FunctionOfTanQ,
FunctionOfTanWeight, FunctionOfTrigQ, FunctionOfDensePolynomialsQ,
FunctionOfLog, PowerVariableExpn, PowerVariableDegree,
PowerVariableSubst, EulerIntegrandQ, FunctionOfSquareRootOfQuadratic,
SquareRootOfQuadraticSubst, Divides, EasyDQ, ProductOfLinearPowersQ,
Rt, NthRoot, AtomBaseQ, SumBaseQ, NegSumBaseQ, AllNegTermQ,
SomeNegTermQ, TrigSquareQ, RtAux, TrigSquare, IntSum, IntTerm, Map2,
ConstantFactor, SameQ, ReplacePart, CommonFactors,
MostMainFactorPosition, FunctionOfExponentialQ, FunctionOfExponential,
FunctionOfExponentialFunction, FunctionOfExponentialFunctionAux,
FunctionOfExponentialTest, FunctionOfExponentialTestAux, stdev,
rubi_test, If, IntQuadraticQ, IntBinomialQ, RectifyTangent,
RectifyCotangent, Inequality, Condition, Simp, SimpHelp, SplitProduct,
SplitSum, SubstFor, SubstForAux, FresnelS, FresnelC, Erfc, Erfi, Gamma,
FunctionOfTrigOfLinearQ, ElementaryFunctionQ, Complex, UnsameQ,
_SimpFixFactor, SimpFixFactor, _FixSimplify, FixSimplify,
_SimplifyAntiderivativeSum, SimplifyAntiderivativeSum,
_SimplifyAntiderivative, SimplifyAntiderivative, _TrigSimplifyAux,
TrigSimplifyAux, Cancel, Part, PolyLog, D, Dist, Sum_doit, PolynomialQuotient, Floor,
PolynomialRemainder, Factor, PolyLog, CosIntegral, SinIntegral, LogIntegral, SinhIntegral,
CoshIntegral, Rule, Erf, PolyGamma, ExpIntegralEi, ExpIntegralE, LogGamma , UtilityOperator, Factorial,
Zeta, ProductLog, DerivativeDivides, HypergeometricPFQ, IntHide, OneQ
)
from sympy.core.add import Add
from sympy.core.mod import Mod
from sympy.core.mul import Mul
from sympy.core.numbers import (Float, I, Integer)
from sympy.core.power import Pow
from sympy.core.singleton import S
from sympy.functions.elementary.complexes import Abs
from sympy.functions.elementary.miscellaneous import sqrt
from sympy.integrals.integrals import Integral as Integrate
from sympy.logic.boolalg import (And, Or)
from sympy.simplify.simplify import simplify
from sympy.integrals.rubi.symbol import WC
from sympy.core.symbol import symbols, Symbol
from sympy.functions import (sin, cos, tan, cot, csc, sec, sqrt, erf, exp, log)
from sympy.functions.elementary.hyperbolic import (acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch)
from sympy.functions.elementary.trigonometric import (atan, acsc, asin, acot, acos, asec)
from sympy.integrals.rubi.rubimain import rubi_integrate
from sympy.core.numbers import pi as Pi
a, b, c, d, e, f, m, n, x, u , k, p, r, s, t, i, j= symbols('a b c d e f m n x u k p r s t i j')
A, B, C, D, a, b, c, d, e, f, g, h, y, z, m, n, p, q, u, v, w, F = symbols('A B C D a b c d e f g h y z m n p q u v w F', )
def test_1():
assert rubi_test(rubi_integrate((c + d*x)**S(4)*sinh(a + b*x), x), x, (c + d*x)**S(4)*cosh(a + b*x)/b - S(4)*d*(c + d*x)**S(3)*sinh(a + b*x)/b**S(2) + S(12)*d**S(2)*(c + d*x)**S(2)*cosh(a + b*x)/b**S(3) - S(24)*d**S(3)*(c + d*x)*sinh(a + b*x)/b**S(4) + S(24)*d**S(4)*cosh(a + b*x)/b**S(5), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*sinh(a + b*x), x), x, (c + d*x)**S(3)*cosh(a + b*x)/b - S(3)*d*(c + d*x)**S(2)*sinh(a + b*x)/b**S(2) + S(6)*d**S(2)*(c + d*x)*cosh(a + b*x)/b**S(3) - S(6)*d**S(3)*sinh(a + b*x)/b**S(4), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*sinh(a + b*x), x), x, (c + d*x)**S(2)*cosh(a + b*x)/b - S(2)*d*(c + d*x)*sinh(a + b*x)/b**S(2) + S(2)*d**S(2)*cosh(a + b*x)/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*sinh(a + b*x), x), x, (c + d*x)*cosh(a + b*x)/b - d*sinh(a + b*x)/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x), x), x, CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/d + SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**S(2), x), x, b*CoshIntegral(b*c/d + b*x)*cosh(a - b*c/d)/d**S(2) + b*SinhIntegral(b*c/d + b*x)*sinh(a - b*c/d)/d**S(2) - sinh(a + b*x)/(d*(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**S(3), x), x, b**S(2)*CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(2)*d**S(3)) + b**S(2)*SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(2)*d**S(3)) - b*cosh(a + b*x)/(S(2)*d**S(2)*(c + d*x)) - sinh(a + b*x)/(S(2)*d*(c + d*x)**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(4)*sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(5)/(S(10)*d) + (c + d*x)**S(4)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - d*(c + d*x)**S(3)*sinh(a + b*x)**S(2)/b**S(2) - d*(c + d*x)**S(3)/(S(2)*b**S(2)) + S(3)*d**S(2)*(c + d*x)**S(2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b**S(3)) - S(3)*d**S(4)*x/(S(4)*b**S(4)) - S(3)*d**S(3)*(c + d*x)*sinh(a + b*x)**S(2)/(S(2)*b**S(4)) + S(3)*d**S(4)*sinh(a + b*x)*cosh(a + b*x)/(S(4)*b**S(5)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(4)/(S(8)*d) + (c + d*x)**S(3)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - S(3)*c*d**S(2)*x/(S(4)*b**S(2)) - S(3)*d**S(3)*x**S(2)/(S(8)*b**S(2)) - S(3)*d*(c + d*x)**S(2)*sinh(a + b*x)**S(2)/(S(4)*b**S(2)) + S(3)*d**S(2)*(c + d*x)*sinh(a + b*x)*cosh(a + b*x)/(S(4)*b**S(3)) - S(3)*d**S(3)*sinh(a + b*x)**S(2)/(S(8)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(3)/(S(6)*d) + (c + d*x)**S(2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - d**S(2)*x/(S(4)*b**S(2)) - d*(c + d*x)*sinh(a + b*x)**S(2)/(S(2)*b**S(2)) + d**S(2)*sinh(a + b*x)*cosh(a + b*x)/(S(4)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*sinh(a + b*x)**S(2), x), x, -c*x/S(2) - d*x**S(2)/S(4) + (c + d*x)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - d*sinh(a + b*x)**S(2)/(S(4)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x), x), x, CoshIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/(S(2)*d) + SinhIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/(S(2)*d) - log(c + d*x)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**S(2), x), x, b*CoshIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/d**S(2) + b*SinhIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/d**S(2) - sinh(a + b*x)**S(2)/(d*(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**S(3), x), x, b**S(2)*CoshIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/d**S(3) + b**S(2)*SinhIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/d**S(3) - b*sinh(a + b*x)*cosh(a + b*x)/(d**S(2)*(c + d*x)) - sinh(a + b*x)**S(2)/(S(2)*d*(c + d*x)**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**S(4), x), x, S(2)*b**S(3)*CoshIntegral(S(2)*b*c/d + S(2)*b*x)*sinh(S(2)*a - S(2)*b*c/d)/(S(3)*d**S(4)) + S(2)*b**S(3)*SinhIntegral(S(2)*b*c/d + S(2)*b*x)*cosh(S(2)*a - S(2)*b*c/d)/(S(3)*d**S(4)) - S(2)*b**S(2)*sinh(a + b*x)**S(2)/(S(3)*d**S(3)*(c + d*x)) - b**S(2)/(S(3)*d**S(3)*(c + d*x)) - b*sinh(a + b*x)*cosh(a + b*x)/(S(3)*d**S(2)*(c + d*x)**S(2)) - sinh(a + b*x)**S(2)/(S(3)*d*(c + d*x)**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(4)*sinh(a + b*x)**S(3), x), x, (c + d*x)**S(4)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**S(4)*cosh(a + b*x)/(S(3)*b) - S(4)*d*(c + d*x)**S(3)*sinh(a + b*x)**S(3)/(S(9)*b**S(2)) + S(8)*d*(c + d*x)**S(3)*sinh(a + b*x)/(S(3)*b**S(2)) + S(4)*d**S(2)*(c + d*x)**S(2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(9)*b**S(3)) - S(80)*d**S(2)*(c + d*x)**S(2)*cosh(a + b*x)/(S(9)*b**S(3)) - S(8)*d**S(3)*(c + d*x)*sinh(a + b*x)**S(3)/(S(27)*b**S(4)) + S(160)*d**S(3)*(c + d*x)*sinh(a + b*x)/(S(9)*b**S(4)) + S(8)*d**S(4)*cosh(a + b*x)**S(3)/(S(81)*b**S(5)) - S(488)*d**S(4)*cosh(a + b*x)/(S(27)*b**S(5)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*sinh(a + b*x)**S(3), x), x, (c + d*x)**S(3)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**S(3)*cosh(a + b*x)/(S(3)*b) - d*(c + d*x)**S(2)*sinh(a + b*x)**S(3)/(S(3)*b**S(2)) + S(2)*d*(c + d*x)**S(2)*sinh(a + b*x)/b**S(2) + S(2)*d**S(2)*(c + d*x)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(9)*b**S(3)) - S(40)*d**S(2)*(c + d*x)*cosh(a + b*x)/(S(9)*b**S(3)) - S(2)*d**S(3)*sinh(a + b*x)**S(3)/(S(27)*b**S(4)) + S(40)*d**S(3)*sinh(a + b*x)/(S(9)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*sinh(a + b*x)**S(3), x), x, (c + d*x)**S(2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*d*(c + d*x)*sinh(a + b*x)**S(3)/(S(9)*b**S(2)) + S(4)*d*(c + d*x)*sinh(a + b*x)/(S(3)*b**S(2)) + S(2)*d**S(2)*cosh(a + b*x)**S(3)/(S(27)*b**S(3)) - S(14)*d**S(2)*cosh(a + b*x)/(S(9)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*sinh(a + b*x)**S(3), x), x, (c + d*x)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)*cosh(a + b*x)/(S(3)*b) - d*sinh(a + b*x)**S(3)/(S(9)*b**S(2)) + S(2)*d*sinh(a + b*x)/(S(3)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x), x), x, -S(3)*CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(4)*d) + CoshIntegral(S(3)*b*c/d + S(3)*b*x)*sinh(S(3)*a - S(3)*b*c/d)/(S(4)*d) - S(3)*SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(4)*d) + SinhIntegral(S(3)*b*c/d + S(3)*b*x)*cosh(S(3)*a - S(3)*b*c/d)/(S(4)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**S(2), x), x, -S(3)*b*CoshIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(4)*d**S(2)) + S(3)*b*CoshIntegral(S(3)*b*c/d + S(3)*b*x)*cosh(S(3)*a - S(3)*b*c/d)/(S(4)*d**S(2)) - S(3)*b*SinhIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(4)*d**S(2)) + S(3)*b*SinhIntegral(S(3)*b*c/d + S(3)*b*x)*sinh(S(3)*a - S(3)*b*c/d)/(S(4)*d**S(2)) - sinh(a + b*x)**S(3)/(d*(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**S(3), x), x, -S(3)*b**S(2)*CoshIntegral(b*c/d + b*x)*sinh(a - b*c/d)/(S(8)*d**S(3)) + S(9)*b**S(2)*CoshIntegral(S(3)*b*c/d + S(3)*b*x)*sinh(S(3)*a - S(3)*b*c/d)/(S(8)*d**S(3)) - S(3)*b**S(2)*SinhIntegral(b*c/d + b*x)*cosh(a - b*c/d)/(S(8)*d**S(3)) + S(9)*b**S(2)*SinhIntegral(S(3)*b*c/d + S(3)*b*x)*cosh(S(3)*a - S(3)*b*c/d)/(S(8)*d**S(3)) - S(3)*b*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(2)*d**S(2)*(c + d*x)) - sinh(a + b*x)**S(3)/(S(2)*d*(c + d*x)**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/sinh(a + b*x), x), x, -S(2)*(c + d*x)**S(3)*atanh(exp(a + b*x))/b - S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -exp(a + b*x))/b**S(2) + S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), exp(a + b*x))/b**S(2) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), -exp(a + b*x))/b**S(3) - S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), exp(a + b*x))/b**S(3) - S(6)*d**S(3)*PolyLog(S(4), -exp(a + b*x))/b**S(4) + S(6)*d**S(3)*PolyLog(S(4), exp(a + b*x))/b**S(4), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/sinh(a + b*x), x), x, -S(2)*(c + d*x)**S(2)*atanh(exp(a + b*x))/b - S(2)*d*(c + d*x)*PolyLog(S(2), -exp(a + b*x))/b**S(2) + S(2)*d*(c + d*x)*PolyLog(S(2), exp(a + b*x))/b**S(2) + S(2)*d**S(2)*PolyLog(S(3), -exp(a + b*x))/b**S(3) - S(2)*d**S(2)*PolyLog(S(3), exp(a + b*x))/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/sinh(a + b*x), x), x, -S(2)*(c + d*x)*atanh(exp(a + b*x))/b - d*PolyLog(S(2), -exp(a + b*x))/b**S(2) + d*PolyLog(S(2), exp(a + b*x))/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*sinh(a + b*x)), x), x, Integrate(S(1)/((c + d*x)*sinh(a + b*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)), x), x, Integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(3)/b - (c + d*x)**S(3)/(b*tanh(a + b*x)) + S(3)*d*(c + d*x)**S(2)*log(-exp(S(2)*a + S(2)*b*x) + S(1))/b**S(2) + S(3)*d**S(2)*(c + d*x)*PolyLog(S(2), exp(S(2)*a + S(2)*b*x))/b**S(3) - S(3)*d**S(3)*PolyLog(S(3), exp(S(2)*a + S(2)*b*x))/(S(2)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/sinh(a + b*x)**S(2), x), x, -(c + d*x)**S(2)/b - (c + d*x)**S(2)/(b*tanh(a + b*x)) + S(2)*d*(c + d*x)*log(-exp(S(2)*a + S(2)*b*x) + S(1))/b**S(2) + d**S(2)*PolyLog(S(2), exp(S(2)*a + S(2)*b*x))/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/sinh(a + b*x)**S(2), x), x, -(c + d*x)/(b*tanh(a + b*x)) + d*log(sinh(a + b*x))/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(2)), x), x, Integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(2)), x), x, Integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/sinh(a + b*x)**S(3), x), x, (c + d*x)**S(3)*atanh(exp(a + b*x))/b - (c + d*x)**S(3)/(S(2)*b*sinh(a + b*x)*tanh(a + b*x)) + S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -exp(a + b*x))/(S(2)*b**S(2)) - S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), exp(a + b*x))/(S(2)*b**S(2)) - S(3)*d*(c + d*x)**S(2)/(S(2)*b**S(2)*sinh(a + b*x)) - S(3)*d**S(2)*(c + d*x)*PolyLog(S(3), -exp(a + b*x))/b**S(3) + S(3)*d**S(2)*(c + d*x)*PolyLog(S(3), exp(a + b*x))/b**S(3) - S(6)*d**S(2)*(c + d*x)*atanh(exp(a + b*x))/b**S(3) - S(3)*d**S(3)*PolyLog(S(2), -exp(a + b*x))/b**S(4) + S(3)*d**S(3)*PolyLog(S(2), exp(a + b*x))/b**S(4) + S(3)*d**S(3)*PolyLog(S(4), -exp(a + b*x))/b**S(4) - S(3)*d**S(3)*PolyLog(S(4), exp(a + b*x))/b**S(4), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/sinh(a + b*x)**S(3), x), x, (c + d*x)**S(2)*atanh(exp(a + b*x))/b - (c + d*x)**S(2)/(S(2)*b*sinh(a + b*x)*tanh(a + b*x)) + d*(c + d*x)*PolyLog(S(2), -exp(a + b*x))/b**S(2) - d*(c + d*x)*PolyLog(S(2), exp(a + b*x))/b**S(2) - d*(c + d*x)/(b**S(2)*sinh(a + b*x)) - d**S(2)*PolyLog(S(3), -exp(a + b*x))/b**S(3) + d**S(2)*PolyLog(S(3), exp(a + b*x))/b**S(3) - d**S(2)*atanh(cosh(a + b*x))/b**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/sinh(a + b*x)**S(3), x), x, (c + d*x)*atanh(exp(a + b*x))/b - (c + d*x)/(S(2)*b*sinh(a + b*x)*tanh(a + b*x)) + d*PolyLog(S(2), -exp(a + b*x))/(S(2)*b**S(2)) - d*PolyLog(S(2), exp(a + b*x))/(S(2)*b**S(2)) - d/(S(2)*b**S(2)*sinh(a + b*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(3)), x), x, Integrate(S(1)/((c + d*x)*sinh(a + b*x)**S(3)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(3)), x), x, Integrate(S(1)/((c + d*x)**S(2)*sinh(a + b*x)**S(3)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(5)/2)*sinh(a + b*x), x), x, -S(15)*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(16)*b**(S(7)/2)) - S(15)*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(16)*b**(S(7)/2)) + (c + d*x)**(S(5)/2)*cosh(a + b*x)/b - S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)/(S(2)*b**S(2)) + S(15)*d**S(2)*sqrt(c + d*x)*cosh(a + b*x)/(S(4)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(3)/2)*sinh(a + b*x), x), x, -S(3)*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(8)*b**(S(5)/2)) + S(3)*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(8)*b**(S(5)/2)) + (c + d*x)**(S(3)/2)*cosh(a + b*x)/b - S(3)*d*sqrt(c + d*x)*sinh(a + b*x)/(S(2)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)*sinh(a + b*x), x), x, -sqrt(Pi)*sqrt(d)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(4)*b**(S(3)/2)) - sqrt(Pi)*sqrt(d)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(4)*b**(S(3)/2)) + sqrt(c + d*x)*cosh(a + b*x)/b, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/sqrt(c + d*x), x), x, -sqrt(Pi)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(2)*sqrt(b)*sqrt(d)) + sqrt(Pi)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(2)*sqrt(b)*sqrt(d)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**(S(3)/2), x), x, sqrt(Pi)*sqrt(b)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/d**(S(3)/2) + sqrt(Pi)*sqrt(b)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/d**(S(3)/2) - S(2)*sinh(a + b*x)/(d*sqrt(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**(S(5)/2), x), x, -S(2)*sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(3)*d**(S(5)/2)) + S(2)*sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(3)*d**(S(5)/2)) - S(4)*b*cosh(a + b*x)/(S(3)*d**S(2)*sqrt(c + d*x)) - S(2)*sinh(a + b*x)/(S(3)*d*(c + d*x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)/(c + d*x)**(S(7)/2), x), x, S(4)*sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(15)*d**(S(7)/2)) + S(4)*sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(15)*d**(S(7)/2)) - S(8)*b**S(2)*sinh(a + b*x)/(S(15)*d**S(3)*sqrt(c + d*x)) - S(4)*b*cosh(a + b*x)/(S(15)*d**S(2)*(c + d*x)**(S(3)/2)) - S(2)*sinh(a + b*x)/(S(5)*d*(c + d*x)**(S(5)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(5)/2)*sinh(a + b*x)**S(2), x), x, S(15)*sqrt(S(2))*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(512)*b**(S(7)/2)) - S(15)*sqrt(S(2))*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(512)*b**(S(7)/2)) - (c + d*x)**(S(7)/2)/(S(7)*d) + (c + d*x)**(S(5)/2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)**S(2)/(S(8)*b**S(2)) - S(5)*d*(c + d*x)**(S(3)/2)/(S(16)*b**S(2)) + S(15)*d**S(2)*sqrt(c + d*x)*sinh(S(2)*a + S(2)*b*x)/(S(64)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(3)/2)*sinh(a + b*x)**S(2), x), x, S(3)*sqrt(S(2))*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(128)*b**(S(5)/2)) + S(3)*sqrt(S(2))*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(128)*b**(S(5)/2)) - (c + d*x)**(S(5)/2)/(S(5)*d) + (c + d*x)**(S(3)/2)*sinh(a + b*x)*cosh(a + b*x)/(S(2)*b) - S(3)*d*sqrt(c + d*x)*sinh(a + b*x)**S(2)/(S(8)*b**S(2)) - S(3)*d*sqrt(c + d*x)/(S(16)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)*sinh(a + b*x)**S(2), x), x, sqrt(S(2))*sqrt(Pi)*sqrt(d)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(32)*b**(S(3)/2)) - sqrt(S(2))*sqrt(Pi)*sqrt(d)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(32)*b**(S(3)/2)) - (c + d*x)**(S(3)/2)/(S(3)*d) + sqrt(c + d*x)*sinh(S(2)*a + S(2)*b*x)/(S(4)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/sqrt(c + d*x), x), x, sqrt(S(2))*sqrt(Pi)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(8)*sqrt(b)*sqrt(d)) + sqrt(S(2))*sqrt(Pi)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(8)*sqrt(b)*sqrt(d)) - sqrt(c + d*x)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(3)/2), x), x, -sqrt(S(2))*sqrt(Pi)*sqrt(b)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(2)*d**(S(3)/2)) + sqrt(S(2))*sqrt(Pi)*sqrt(b)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(2)*d**(S(3)/2)) - S(2)*sinh(a + b*x)**S(2)/(d*sqrt(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(5)/2), x), x, S(2)*sqrt(S(2))*sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(3)*d**(S(5)/2)) + S(2)*sqrt(S(2))*sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(3)*d**(S(5)/2)) - S(8)*b*sinh(a + b*x)*cosh(a + b*x)/(S(3)*d**S(2)*sqrt(c + d*x)) - S(2)*sinh(a + b*x)**S(2)/(S(3)*d*(c + d*x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(7)/2), x), x, -S(8)*sqrt(S(2))*sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(15)*d**(S(7)/2)) + S(8)*sqrt(S(2))*sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(15)*d**(S(7)/2)) - S(32)*b**S(2)*sinh(a + b*x)**S(2)/(S(15)*d**S(3)*sqrt(c + d*x)) - S(16)*b**S(2)/(S(15)*d**S(3)*sqrt(c + d*x)) - S(8)*b*sinh(a + b*x)*cosh(a + b*x)/(S(15)*d**S(2)*(c + d*x)**(S(3)/2)) - S(2)*sinh(a + b*x)**S(2)/(S(5)*d*(c + d*x)**(S(5)/2)), expand=True, _diff=True, _numerical=True)
# taking long time assert rubi_test(rubi_integrate(sinh(a + b*x)**S(2)/(c + d*x)**(S(9)/2), x), x, S(32)*sqrt(S(2))*sqrt(Pi)*b**(S(7)/2)*Erf(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(2)*a + S(2)*b*c/d)/(S(105)*d**(S(9)/2)) + S(32)*sqrt(S(2))*sqrt(Pi)*b**(S(7)/2)*Erfi(sqrt(S(2))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(2)*a - S(2)*b*c/d)/(S(105)*d**(S(9)/2)) - S(128)*b**S(3)*sinh(a + b*x)*cosh(a + b*x)/(S(105)*d**S(4)*sqrt(c + d*x)) - S(32)*b**S(2)*sinh(a + b*x)**S(2)/(S(105)*d**S(3)*(c + d*x)**(S(3)/2)) - S(16)*b**S(2)/(S(105)*d**S(3)*(c + d*x)**(S(3)/2)) - S(8)*b*sinh(a + b*x)*cosh(a + b*x)/(S(35)*d**S(2)*(c + d*x)**(S(5)/2)) - S(2)*sinh(a + b*x)**S(2)/(S(7)*d*(c + d*x)**(S(7)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(5)/2)*sinh(a + b*x)**S(3), x), x, S(45)*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(64)*b**(S(7)/2)) - S(5)*sqrt(S(3))*sqrt(Pi)*d**(S(5)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(1728)*b**(S(7)/2)) + S(45)*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(64)*b**(S(7)/2)) - S(5)*sqrt(S(3))*sqrt(Pi)*d**(S(5)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(1728)*b**(S(7)/2)) + (c + d*x)**(S(5)/2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**(S(5)/2)*cosh(a + b*x)/(S(3)*b) - S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)**S(3)/(S(18)*b**S(2)) + S(5)*d*(c + d*x)**(S(3)/2)*sinh(a + b*x)/(S(3)*b**S(2)) - S(45)*d**S(2)*sqrt(c + d*x)*cosh(a + b*x)/(S(16)*b**S(3)) + S(5)*d**S(2)*sqrt(c + d*x)*cosh(S(3)*a + S(3)*b*x)/(S(144)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**(S(3)/2)*sinh(a + b*x)**S(3), x), x, S(9)*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(32)*b**(S(5)/2)) - sqrt(S(3))*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(288)*b**(S(5)/2)) - S(9)*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(32)*b**(S(5)/2)) + sqrt(S(3))*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(288)*b**(S(5)/2)) + (c + d*x)**(S(3)/2)*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(3)*b) - S(2)*(c + d*x)**(S(3)/2)*cosh(a + b*x)/(S(3)*b) - d*sqrt(c + d*x)*sinh(a + b*x)**S(3)/(S(6)*b**S(2)) + d*sqrt(c + d*x)*sinh(a + b*x)/b**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)*sinh(a + b*x)**S(3), x), x, S(3)*sqrt(Pi)*sqrt(d)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(16)*b**(S(3)/2)) - sqrt(S(3))*sqrt(Pi)*sqrt(d)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(144)*b**(S(3)/2)) + S(3)*sqrt(Pi)*sqrt(d)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(16)*b**(S(3)/2)) - sqrt(S(3))*sqrt(Pi)*sqrt(d)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(144)*b**(S(3)/2)) - S(3)*sqrt(c + d*x)*cosh(a + b*x)/(S(4)*b) + sqrt(c + d*x)*cosh(S(3)*a + S(3)*b*x)/(S(12)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/sqrt(c + d*x), x), x, S(3)*sqrt(Pi)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(8)*sqrt(b)*sqrt(d)) - sqrt(S(3))*sqrt(Pi)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(24)*sqrt(b)*sqrt(d)) - S(3)*sqrt(Pi)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(8)*sqrt(b)*sqrt(d)) + sqrt(S(3))*sqrt(Pi)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(24)*sqrt(b)*sqrt(d)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**(S(3)/2), x), x, -S(3)*sqrt(Pi)*sqrt(b)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(4)*d**(S(3)/2)) + sqrt(S(3))*sqrt(Pi)*sqrt(b)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(4)*d**(S(3)/2)) - S(3)*sqrt(Pi)*sqrt(b)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(4)*d**(S(3)/2)) + sqrt(S(3))*sqrt(Pi)*sqrt(b)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(4)*d**(S(3)/2)) - S(2)*sinh(a + b*x)**S(3)/(d*sqrt(c + d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**(S(5)/2), x), x, sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(2)*d**(S(5)/2)) - sqrt(S(3))*sqrt(Pi)*b**(S(3)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(2)*d**(S(5)/2)) - sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(2)*d**(S(5)/2)) + sqrt(S(3))*sqrt(Pi)*b**(S(3)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(2)*d**(S(5)/2)) - S(4)*b*sinh(a + b*x)**S(2)*cosh(a + b*x)/(d**S(2)*sqrt(c + d*x)) - S(2)*sinh(a + b*x)**S(3)/(S(3)*d*(c + d*x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
# long time assert rubi_test(rubi_integrate(sinh(a + b*x)**S(3)/(c + d*x)**(S(7)/2), x), x, -sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-a + b*c/d)/(S(5)*d**(S(7)/2)) + S(3)*sqrt(S(3))*sqrt(Pi)*b**(S(5)/2)*Erf(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(-S(3)*a + S(3)*b*c/d)/(S(5)*d**(S(7)/2)) - sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(a - b*c/d)/(S(5)*d**(S(7)/2)) + S(3)*sqrt(S(3))*sqrt(Pi)*b**(S(5)/2)*Erfi(sqrt(S(3))*sqrt(b)*sqrt(c + d*x)/sqrt(d))*exp(S(3)*a - S(3)*b*c/d)/(S(5)*d**(S(7)/2)) - S(24)*b**S(2)*sinh(a + b*x)**S(3)/(S(5)*d**S(3)*sqrt(c + d*x)) - S(16)*b**S(2)*sinh(a + b*x)/(S(5)*d**S(3)*sqrt(c + d*x)) - S(4)*b*sinh(a + b*x)**S(2)*cosh(a + b*x)/(S(5)*d**S(2)*(c + d*x)**(S(3)/2)) - S(2)*sinh(a + b*x)**S(3)/(S(5)*d*(c + d*x)**(S(5)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((d*x)**(S(3)/2)*sinh(f*x), x), x, -S(3)*sqrt(Pi)*d**(S(3)/2)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(8)*f**(S(5)/2)) + S(3)*sqrt(Pi)*d**(S(3)/2)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(8)*f**(S(5)/2)) - S(3)*d*sqrt(d*x)*sinh(f*x)/(S(2)*f**S(2)) + (d*x)**(S(3)/2)*cosh(f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(d*x)*sinh(f*x), x), x, -sqrt(Pi)*sqrt(d)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(4)*f**(S(3)/2)) - sqrt(Pi)*sqrt(d)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(4)*f**(S(3)/2)) + sqrt(d*x)*cosh(f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(f*x)/sqrt(d*x), x), x, -sqrt(Pi)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(2)*sqrt(d)*sqrt(f)) + sqrt(Pi)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(2)*sqrt(d)*sqrt(f)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(f*x)/(d*x)**(S(3)/2), x), x, sqrt(Pi)*sqrt(f)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/d**(S(3)/2) + sqrt(Pi)*sqrt(f)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/d**(S(3)/2) - S(2)*sinh(f*x)/(d*sqrt(d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(f*x)/(d*x)**(S(5)/2), x), x, -S(2)*sqrt(Pi)*f**(S(3)/2)*Erf(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(3)*d**(S(5)/2)) + S(2)*sqrt(Pi)*f**(S(3)/2)*Erfi(sqrt(f)*sqrt(d*x)/sqrt(d))/(S(3)*d**(S(5)/2)) - S(2)*sinh(f*x)/(S(3)*d*(d*x)**(S(3)/2)) - S(4)*f*cosh(f*x)/(S(3)*d**S(2)*sqrt(d*x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(c + d*x)/sinh(a + b*x), x), x, Integrate(sqrt(c + d*x)/sinh(a + b*x), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(sqrt(c + d*x)*sinh(a + b*x)), x), x, Integrate(S(1)/(sqrt(c + d*x)*sinh(a + b*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sinh(x)**(S(3)/2)/x**S(3), x), x, S(3)*Integrate(S(1)/(x*sqrt(sinh(x))), x)/S(8) + S(9)*Integrate(sinh(x)**(S(3)/2)/x, x)/S(8) - S(3)*sqrt(sinh(x))*cosh(x)/(S(4)*x) - sinh(x)**(S(3)/2)/(S(2)*x**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(-x*sqrt(sinh(x)) + x/sinh(x)**(S(3)/2), x), x, -S(2)*x*cosh(x)/sqrt(sinh(x)) + S(4)*sqrt(sinh(x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x/(S(3)*sqrt(sinh(x))) + x/sinh(x)**(S(5)/2), x), x, -S(2)*x*cosh(x)/(S(3)*sinh(x)**(S(3)/2)) - S(4)/(S(3)*sqrt(sinh(x))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(3)*x*sqrt(sinh(x))/S(5) + x/sinh(x)**(S(7)/2), x), x, S(6)*x*cosh(x)/(S(5)*sqrt(sinh(x))) - S(2)*x*cosh(x)/(S(5)*sinh(x)**(S(5)/2)) - S(12)*sqrt(sinh(x))/S(5) - S(4)/(S(15)*sinh(x)**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(-x**S(2)*sqrt(sinh(x)) + x**S(2)/sinh(x)**(S(3)/2), x), x, -S(2)*x**S(2)*cosh(x)/sqrt(sinh(x)) + S(8)*x*sqrt(sinh(x)) - S(16)*I*EllipticE(Pi/S(4) - I*x/S(2), S(2))*sqrt(sinh(x))/sqrt(I*sinh(x)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((b*sinh(e + f*x))**n*(c + d*x)**m, x), x, Integrate((b*sinh(e + f*x))**n*(c + d*x)**m, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*sinh(a + b*x)**S(3), x), x, S(3)**(-m + S(-1))*(-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(3)*b*(c + d*x)/d)*exp(S(3)*a - S(3)*b*c/d)/(S(8)*b) + S(3)**(-m + S(-1))*(b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(3)*b*(c + d*x)/d)*exp(-S(3)*a + S(3)*b*c/d)/(S(8)*b) - S(3)*(-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -b*(c + d*x)/d)*exp(a - b*c/d)/(S(8)*b) - S(3)*(b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), b*(c + d*x)/d)*exp(-a + b*c/d)/(S(8)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-3))*(-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*b*(c + d*x)/d)*exp(S(2)*a - S(2)*b*c/d)/b - S(2)**(-m + S(-3))*(b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*b*(c + d*x)/d)*exp(-S(2)*a + S(2)*b*c/d)/b - (c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*sinh(a + b*x), x), x, (-b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -b*(c + d*x)/d)*exp(a - b*c/d)/(S(2)*b) + (b*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), b*(c + d*x)/d)*exp(-a + b*c/d)/(S(2)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/sinh(a + b*x), x), x, Integrate((c + d*x)**m/sinh(a + b*x), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/sinh(a + b*x)**S(2), x), x, Integrate((c + d*x)**m/sinh(a + b*x)**S(2), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(3))*sinh(a + b*x), x), x, -x**m*(-b*x)**(-m)*Gamma(m + S(4), -b*x)*exp(a)/(S(2)*b**S(4)) + x**m*(b*x)**(-m)*Gamma(m + S(4), b*x)*exp(-a)/(S(2)*b**S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(2))*sinh(a + b*x), x), x, x**m*(-b*x)**(-m)*Gamma(m + S(3), -b*x)*exp(a)/(S(2)*b**S(3)) + x**m*(b*x)**(-m)*Gamma(m + S(3), b*x)*exp(-a)/(S(2)*b**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(1))*sinh(a + b*x), x), x, -x**m*(-b*x)**(-m)*Gamma(m + S(2), -b*x)*exp(a)/(S(2)*b**S(2)) + x**m*(b*x)**(-m)*Gamma(m + S(2), b*x)*exp(-a)/(S(2)*b**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**m*sinh(a + b*x), x), x, x**m*(-b*x)**(-m)*Gamma(m + S(1), -b*x)*exp(a)/(S(2)*b) + x**m*(b*x)**(-m)*Gamma(m + S(1), b*x)*exp(-a)/(S(2)*b), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-1))*sinh(a + b*x), x), x, -x**m*(-b*x)**(-m)*Gamma(m, -b*x)*exp(a)/S(2) + x**m*(b*x)**(-m)*Gamma(m, b*x)*exp(-a)/S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-2))*sinh(a + b*x), x), x, b*x**m*(-b*x)**(-m)*Gamma(m + S(-1), -b*x)*exp(a)/S(2) + b*x**m*(b*x)**(-m)*Gamma(m + S(-1), b*x)*exp(-a)/S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-3))*sinh(a + b*x), x), x, -b**S(2)*x**m*(-b*x)**(-m)*Gamma(m + S(-2), -b*x)*exp(a)/S(2) + b**S(2)*x**m*(b*x)**(-m)*Gamma(m + S(-2), b*x)*exp(-a)/S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(3))*sinh(a + b*x)**S(2), x), x, -S(2)**(-m + S(-6))*x**m*(-b*x)**(-m)*Gamma(m + S(4), -S(2)*b*x)*exp(S(2)*a)/b**S(4) - S(2)**(-m + S(-6))*x**m*(b*x)**(-m)*Gamma(m + S(4), S(2)*b*x)*exp(-S(2)*a)/b**S(4) - x**(m + S(4))/(S(2)*m + S(8)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(2))*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-5))*x**m*(-b*x)**(-m)*Gamma(m + S(3), -S(2)*b*x)*exp(S(2)*a)/b**S(3) - S(2)**(-m + S(-5))*x**m*(b*x)**(-m)*Gamma(m + S(3), S(2)*b*x)*exp(-S(2)*a)/b**S(3) - x**(m + S(3))/(S(2)*m + S(6)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(1))*sinh(a + b*x)**S(2), x), x, -S(2)**(-m + S(-4))*x**m*(-b*x)**(-m)*Gamma(m + S(2), -S(2)*b*x)*exp(S(2)*a)/b**S(2) - S(2)**(-m + S(-4))*x**m*(b*x)**(-m)*Gamma(m + S(2), S(2)*b*x)*exp(-S(2)*a)/b**S(2) - x**(m + S(2))/(S(2)*m + S(4)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**m*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-3))*x**m*(-b*x)**(-m)*Gamma(m + S(1), -S(2)*b*x)*exp(S(2)*a)/b - S(2)**(-m + S(-3))*x**m*(b*x)**(-m)*Gamma(m + S(1), S(2)*b*x)*exp(-S(2)*a)/b - x**(m + S(1))/(S(2)*m + S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-1))*sinh(a + b*x)**S(2), x), x, -S(2)**(-m + S(-2))*x**m*(-b*x)**(-m)*Gamma(m, -S(2)*b*x)*exp(S(2)*a) - S(2)**(-m + S(-2))*x**m*(b*x)**(-m)*Gamma(m, S(2)*b*x)*exp(-S(2)*a) - x**m/(S(2)*m), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-2))*sinh(a + b*x)**S(2), x), x, S(2)**(-m + S(-1))*b*x**m*(-b*x)**(-m)*Gamma(m + S(-1), -S(2)*b*x)*exp(S(2)*a) - S(2)**(-m + S(-1))*b*x**m*(b*x)**(-m)*Gamma(m + S(-1), S(2)*b*x)*exp(-S(2)*a) + x**(m + S(-1))/(-S(2)*m + S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**(m + S(-3))*sinh(a + b*x)**S(2), x), x, x**(m + S(-2))/(-S(2)*m + S(4)) - S(2)**(-m)*b**S(2)*x**m*(-b*x)**(-m)*Gamma(m + S(-2), -S(2)*b*x)*exp(S(2)*a) - S(2)**(-m)*b**S(2)*x**m*(b*x)**(-m)*Gamma(m + S(-2), S(2)*b*x)*exp(-S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x*sqrt(S(1)/sinh(x))/S(3) + x/(S(1)/sinh(x))**(S(3)/2), x), x, S(2)*x*cosh(x)/(S(3)*sqrt(S(1)/sinh(x))) - S(4)/(S(9)*(S(1)/sinh(x))**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(3)*x/(S(5)*sqrt(S(1)/sinh(x))) + x/(S(1)/sinh(x))**(S(5)/2), x), x, S(2)*x*cosh(x)/(S(5)*(S(1)/sinh(x))**(S(3)/2)) - S(4)/(S(25)*(S(1)/sinh(x))**(S(5)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(-S(5)*x*sqrt(S(1)/sinh(x))/S(21) + x/(S(1)/sinh(x))**(S(7)/2), x), x, -S(10)*x*cosh(x)/(S(21)*sqrt(S(1)/sinh(x))) + S(2)*x*cosh(x)/(S(7)*(S(1)/sinh(x))**(S(5)/2)) + S(20)/(S(63)*(S(1)/sinh(x))**(S(3)/2)) - S(4)/(S(49)*(S(1)/sinh(x))**(S(7)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(x**S(2)*sqrt(S(1)/sinh(x))/S(3) + x**S(2)/(S(1)/sinh(x))**(S(3)/2), x), x, S(2)*x**S(2)*cosh(x)/(S(3)*sqrt(S(1)/sinh(x))) - S(8)*x/(S(9)*(S(1)/sinh(x))**(S(3)/2)) - S(16)*I*sqrt(I*sinh(x))*sqrt(S(1)/sinh(x))*EllipticF(Pi/S(4) - I*x/S(2), S(2))/S(27) + S(16)*cosh(x)/(S(27)*sqrt(S(1)/sinh(x))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*(I*a*sinh(e + f*x) + a), x), x, -S(6)*I*a*d**S(3)*sinh(e + f*x)/f**S(4) + S(6)*I*a*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) - S(3)*I*a*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) + I*a*(c + d*x)**S(3)*cosh(e + f*x)/f + a*(c + d*x)**S(4)/(S(4)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a), x), x, S(2)*I*a*d**S(2)*cosh(e + f*x)/f**S(3) - S(2)*I*a*d*(c + d*x)*sinh(e + f*x)/f**S(2) + I*a*(c + d*x)**S(2)*cosh(e + f*x)/f + a*(c + d*x)**S(3)/(S(3)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*(I*a*sinh(e + f*x) + a), x), x, -I*a*d*sinh(e + f*x)/f**S(2) + I*a*(c + d*x)*cosh(e + f*x)/f + a*(c + d*x)**S(2)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)/(c + d*x), x), x, I*a*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d + I*a*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d + a*log(c + d*x)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)/(c + d*x)**S(2), x), x, -I*a*sinh(e + f*x)/(d*(c + d*x)) - a/(d*(c + d*x)) + I*a*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) + I*a*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)/(c + d*x)**S(3), x), x, -I*a*sinh(e + f*x)/(S(2)*d*(c + d*x)**S(2)) - a/(S(2)*d*(c + d*x)**S(2)) - I*a*f*cosh(e + f*x)/(S(2)*d**S(2)*(c + d*x)) + I*a*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/(S(2)*d**S(3)) + I*a*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/(S(2)*d**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)*(I*a*sinh(e + f*x) + a)**S(2), x), x, S(3)*a**S(2)*c*d**S(2)*x/(S(4)*f**S(2)) + S(3)*a**S(2)*d**S(3)*x**S(2)/(S(8)*f**S(2)) + S(3)*a**S(2)*d**S(3)*sinh(e + f*x)**S(2)/(S(8)*f**S(4)) - S(12)*I*a**S(2)*d**S(3)*sinh(e + f*x)/f**S(4) - S(3)*a**S(2)*d**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) + S(12)*I*a**S(2)*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) + S(3)*a**S(2)*d*(c + d*x)**S(2)*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) - S(6)*I*a**S(2)*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) - a**S(2)*(c + d*x)**S(3)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) + S(2)*I*a**S(2)*(c + d*x)**S(3)*cosh(e + f*x)/f + S(3)*a**S(2)*(c + d*x)**S(4)/(S(8)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a)**S(2), x), x, a**S(2)*d**S(2)*x/(S(4)*f**S(2)) - a**S(2)*d**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) + S(4)*I*a**S(2)*d**S(2)*cosh(e + f*x)/f**S(3) + a**S(2)*d*(c + d*x)*sinh(e + f*x)**S(2)/(S(2)*f**S(2)) - S(4)*I*a**S(2)*d*(c + d*x)*sinh(e + f*x)/f**S(2) - a**S(2)*(c + d*x)**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) + S(2)*I*a**S(2)*(c + d*x)**S(2)*cosh(e + f*x)/f + a**S(2)*(c + d*x)**S(3)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)*(I*a*sinh(e + f*x) + a)**S(2), x), x, a**S(2)*c*x/S(2) + a**S(2)*d*x**S(2)/S(4) + a**S(2)*d*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) - S(2)*I*a**S(2)*d*sinh(e + f*x)/f**S(2) - a**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) + S(2)*I*a**S(2)*(c + d*x)*cosh(e + f*x)/f + a**S(2)*(c + d*x)**S(2)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**S(2)/(c + d*x), x), x, S(2)*I*a**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d - a**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) + S(2)*I*a**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d - a**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) + S(3)*a**S(2)*log(c + d*x)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**S(2)/(c + d*x)**S(2), x), x, -S(4)*a**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)/(d*(c + d*x)) + S(2)*I*a**S(2)*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) - a**S(2)*f*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(2) + S(2)*I*a**S(2)*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2) - a**S(2)*f*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**S(2)/(c + d*x)**S(3), x), x, -S(2)*a**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)/(d*(c + d*x)**S(2)) - S(4)*a**S(2)*f*sinh(I*Pi/S(4) + e/S(2) + f*x/S(2))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(3)/(d**S(2)*(c + d*x)) + I*a**S(2)*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(3) - a**S(2)*f**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(3) + I*a**S(2)*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(3) - a**S(2)*f**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(I*a*sinh(e + f*x) + a), x), x, S(12)*d**S(3)*PolyLog(S(3), -exp(I*Pi/S(2) + e + f*x))/(a*f**S(4)) - S(12)*d**S(2)*(c + d*x)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(a*f**S(3)) - S(6)*d*(c + d*x)**S(2)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(a*f**S(2)) + (c + d*x)**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f) + (c + d*x)**S(3)/(a*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(I*a*sinh(e + f*x) + a), x), x, -S(4)*d**S(2)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(a*f**S(3)) - S(4)*d*(c + d*x)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(a*f**S(2)) + (c + d*x)**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f) + (c + d*x)**S(2)/(a*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(I*a*sinh(e + f*x) + a), x), x, -S(2)*d*log(cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)))/(a*f**S(2)) + (c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/((c + d*x)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), x)/(S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/((c + d*x)**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), x)/(S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(I*a*sinh(e + f*x) + a)**S(2), x), x, S(4)*d**S(3)*PolyLog(S(3), -exp(I*Pi/S(2) + e + f*x))/(a**S(2)*f**S(4)) + S(4)*d**S(3)*log(cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)))/(a**S(2)*f**S(4)) - S(4)*d**S(2)*(c + d*x)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(a**S(2)*f**S(3)) - S(2)*d**S(2)*(c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a**S(2)*f**S(3)) - S(2)*d*(c + d*x)**S(2)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(a**S(2)*f**S(2)) + d*(c + d*x)**S(2)/(S(2)*a**S(2)*f**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)) + (c + d*x)**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f) + (c + d*x)**S(3)/(S(3)*a**S(2)*f) + (c + d*x)**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(6)*a**S(2)*f*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(I*a*sinh(e + f*x) + a)**S(2), x), x, -S(4)*d**S(2)*PolyLog(S(2), -exp(I*Pi/S(2) + e + f*x))/(S(3)*a**S(2)*f**S(3)) - S(2)*d**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f**S(3)) - S(4)*d*(c + d*x)*log(exp(I*Pi/S(2) + e + f*x) + S(1))/(S(3)*a**S(2)*f**S(2)) + d*(c + d*x)/(S(3)*a**S(2)*f**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)) + (c + d*x)**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f) + (c + d*x)**S(2)/(S(3)*a**S(2)*f) + (c + d*x)**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(6)*a**S(2)*f*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(I*a*sinh(e + f*x) + a)**S(2), x), x, -S(2)*d*log(cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)))/(S(3)*a**S(2)*f**S(2)) + d/(S(6)*a**S(2)*f**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)) + (c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(3)*a**S(2)*f) + (c + d*x)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(6)*a**S(2)*f*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)*(I*a*sinh(e + f*x) + a)**S(2)), x), x, Integrate(S(1)/((c + d*x)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)), x)/(S(4)*a**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((c + d*x)**S(2)*(I*a*sinh(e + f*x) + a)**S(2)), x), x, Integrate(S(1)/((c + d*x)**S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4)), x)/(S(4)*a**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(I*a*sinh(e + f*x) + a)/x, x), x, sqrt(I*a*sinh(e + f*x) + a)*CoshIntegral(f*x/S(2))*cosh(I*Pi/S(4) + e/S(2))/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)) + sqrt(I*a*sinh(e + f*x) + a)*SinhIntegral(f*x/S(2))*sinh(I*Pi/S(4) + e/S(2))/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(I*a*sinh(e + f*x) + a)/x**S(2), x), x, f*sqrt(I*a*sinh(e + f*x) + a)*CoshIntegral(f*x/S(2))*sinh(I*Pi/S(4) + e/S(2))/(S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) + f*sqrt(I*a*sinh(e + f*x) + a)*SinhIntegral(f*x/S(2))*cosh(I*Pi/S(4) + e/S(2))/(S(2)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) - sqrt(I*a*sinh(e + f*x) + a)/x, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(sqrt(I*a*sinh(e + f*x) + a)/x**S(3), x), x, f**S(2)*sqrt(I*a*sinh(e + f*x) + a)*CoshIntegral(f*x/S(2))*cosh(I*Pi/S(4) + e/S(2))/(S(8)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) + f**S(2)*sqrt(I*a*sinh(e + f*x) + a)*SinhIntegral(f*x/S(2))*sinh(I*Pi/S(4) + e/S(2))/(S(8)*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))) - f*sqrt(I*a*sinh(e + f*x) + a)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(4)*x) - sqrt(I*a*sinh(e + f*x) + a)/(S(2)*x**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x*sqrt(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/(x*sqrt(I*a*sinh(e + f*x) + a)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x**S(2)*sqrt(I*a*sinh(e + f*x) + a)), x), x, Integrate(S(1)/(x**S(2)*sqrt(I*a*sinh(e + f*x) + a)), x), expand=True, _diff=True, _numerical=True)
''' long time
# assert rubi_test(rubi_integrate(x**S(3)/(I*a*sinh(e + f*x) + a)**(S(3)/2), x), x, x**S(3)*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f*sqrt(I*a*sinh(e + f*x) + a)) + x**S(3)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(2)*a*f*sqrt(I*a*sinh(e + f*x) + a)) - S(3)*I*x**S(2)*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(3)*I*x**S(2)*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(3)*x**S(2)/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) - S(24)*x*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) + S(12)*I*x*PolyLog(S(3), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) - S(12)*I*x*PolyLog(S(3), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) + S(24)*I*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)) - S(24)*I*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)) - S(24)*I*PolyLog(S(4), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)) + S(24)*I*PolyLog(S(4), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(4)*sqrt(I*a*sinh(e + f*x) + a)), expand=True, _diff=True, _numerical=True)
# assert rubi_test(rubi_integrate(x**S(2)/(I*a*sinh(e + f*x) + a)**(S(3)/2), x), x, x**S(2)*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f*sqrt(I*a*sinh(e + f*x) + a)) + x**S(2)*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(2)*a*f*sqrt(I*a*sinh(e + f*x) + a)) - S(2)*I*x*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(2)*I*x*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(2)*x/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) - S(4)*ArcTan(sinh(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) + S(4)*I*PolyLog(S(3), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)) - S(4)*I*PolyLog(S(3), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(3)*sqrt(I*a*sinh(e + f*x) + a)), expand=True, _diff=True, _numerical=True)
# assert rubi_test(rubi_integrate(x/(I*a*sinh(e + f*x) + a)**(S(3)/2), x), x, x*ArcTan(exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f*sqrt(I*a*sinh(e + f*x) + a)) + x*tanh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(S(2)*a*f*sqrt(I*a*sinh(e + f*x) + a)) - I*PolyLog(S(2), -I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + I*PolyLog(S(2), I*exp(I*Pi/S(4) + e/S(2) + f*x/S(2)))*cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)) + S(1)/(a*f**S(2)*sqrt(I*a*sinh(e + f*x) + a)), expand=True, _diff=True, _numerical=True)
'''
assert rubi_test(rubi_integrate(S(1)/(x*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), x, Integrate(S(1)/(x*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x**S(2)*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), x, Integrate(S(1)/(x**S(2)*(I*a*sinh(e + f*x) + a)**(S(3)/2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/(x*(I*a*sinh(c + d*x) + a)**(S(5)/2)), x), x, Integrate(S(1)/(x*(I*a*sinh(c + d*x) + a)**(S(5)/2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((I*a*sinh(e + f*x) + a)**(S(1)/3)/x, x), x, Integrate((I*a*sinh(e + f*x) + a)**(S(1)/3)/x, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**n, x), x, Integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**n, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**S(3), x), x, -S(3)*S(2)**(-m + S(-3))*a**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f + S(3)*S(2)**(-m + S(-3))*a**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f - S(3)**(-m + S(-1))*I*a**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(3)*f*(c + d*x)/d)*exp(-S(3)*c*f/d + S(3)*e)/(S(8)*f) - S(3)**(-m + S(-1))*I*a**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(3)*f*(c + d*x)/d)*exp(S(3)*c*f/d - S(3)*e)/(S(8)*f) + S(15)*I*a**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(8)*f) + S(15)*I*a**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(8)*f) + S(5)*a**S(3)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a)**S(2), x), x, -S(2)**(-m + S(-3))*a**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f + S(2)**(-m + S(-3))*a**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f + I*a**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/f + I*a**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/f + S(3)*a**S(2)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m*(I*a*sinh(e + f*x) + a), x), x, I*a*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(2)*f) + I*a*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(2)*f) + a*(c + d*x)**(m + S(1))/(d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(I*a*sinh(e + f*x) + a), x), x, Integrate((c + d*x)**m/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(2), x)/(S(2)*a), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(I*a*sinh(e + f*x) + a)**S(2), x), x, Integrate((c + d*x)**m/cosh(I*Pi/S(4) + e/S(2) + f*x/S(2))**S(4), x)/(S(4)*a**S(2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x)**S(3), x), x, a*(c + d*x)**S(4)/(S(4)*d) - S(6)*b*d**S(3)*sinh(e + f*x)/f**S(4) + S(6)*b*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) - S(3)*b*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) + b*(c + d*x)**S(3)*cosh(e + f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x)**S(2), x), x, a*(c + d*x)**S(3)/(S(3)*d) + S(2)*b*d**S(2)*cosh(e + f*x)/f**S(3) - S(2)*b*d*(c + d*x)*sinh(e + f*x)/f**S(2) + b*(c + d*x)**S(2)*cosh(e + f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x), x), x, a*(c + d*x)**S(2)/(S(2)*d) - b*d*sinh(e + f*x)/f**S(2) + b*(c + d*x)*cosh(e + f*x)/f, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))/(c + d*x), x), x, a*log(c + d*x)/d + b*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d + b*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d, expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))/(c + d*x)**S(2), x), x, -a/(d*(c + d*x)) - b*sinh(e + f*x)/(d*(c + d*x)) + b*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) + b*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))/(c + d*x)**S(3), x), x, -a/(S(2)*d*(c + d*x)**S(2)) - b*sinh(e + f*x)/(S(2)*d*(c + d*x)**S(2)) - b*f*cosh(e + f*x)/(S(2)*d**S(2)*(c + d*x)) + b*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/(S(2)*d**S(3)) + b*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/(S(2)*d**S(3)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(3), x), x, a**S(2)*(c + d*x)**S(4)/(S(4)*d) - S(12)*a*b*d**S(3)*sinh(e + f*x)/f**S(4) + S(12)*a*b*d**S(2)*(c + d*x)*cosh(e + f*x)/f**S(3) - S(6)*a*b*d*(c + d*x)**S(2)*sinh(e + f*x)/f**S(2) + S(2)*a*b*(c + d*x)**S(3)*cosh(e + f*x)/f - S(3)*b**S(2)*c*d**S(2)*x/(S(4)*f**S(2)) - S(3)*b**S(2)*d**S(3)*x**S(2)/(S(8)*f**S(2)) - S(3)*b**S(2)*d**S(3)*sinh(e + f*x)**S(2)/(S(8)*f**S(4)) + S(3)*b**S(2)*d**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) - S(3)*b**S(2)*d*(c + d*x)**S(2)*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) + b**S(2)*(c + d*x)**S(3)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) - b**S(2)*(c + d*x)**S(4)/(S(8)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(2), x), x, a**S(2)*(c + d*x)**S(3)/(S(3)*d) + S(4)*a*b*d**S(2)*cosh(e + f*x)/f**S(3) - S(4)*a*b*d*(c + d*x)*sinh(e + f*x)/f**S(2) + S(2)*a*b*(c + d*x)**S(2)*cosh(e + f*x)/f - b**S(2)*d**S(2)*x/(S(4)*f**S(2)) + b**S(2)*d**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(4)*f**S(3)) - b**S(2)*d*(c + d*x)*sinh(e + f*x)**S(2)/(S(2)*f**S(2)) + b**S(2)*(c + d*x)**S(2)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f) - b**S(2)*(c + d*x)**S(3)/(S(6)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x), x), x, a**S(2)*(c + d*x)**S(2)/(S(2)*d) - S(2)*a*b*d*sinh(e + f*x)/f**S(2) + S(2)*a*b*(c + d*x)*cosh(e + f*x)/f - b**S(2)*c*x/S(2) - b**S(2)*d*x**S(2)/S(4) - b**S(2)*d*sinh(e + f*x)**S(2)/(S(4)*f**S(2)) + b**S(2)*(c + d*x)*sinh(e + f*x)*cosh(e + f*x)/(S(2)*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)/(c + d*x), x), x, a**S(2)*log(c + d*x)/d + S(2)*a*b*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d + S(2)*a*b*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d + b**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) + b**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/(S(2)*d) - b**S(2)*log(c + d*x)/(S(2)*d), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)/(c + d*x)**S(2), x), x, -a**S(2)/(d*(c + d*x)) - S(2)*a*b*sinh(e + f*x)/(d*(c + d*x)) + S(2)*a*b*f*CoshIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(2) + S(2)*a*b*f*SinhIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(2) - b**S(2)*sinh(e + f*x)**S(2)/(d*(c + d*x)) + b**S(2)*f*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(2) + b**S(2)*f*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(2), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)/(c + d*x)**S(3), x), x, -a**S(2)/(S(2)*d*(c + d*x)**S(2)) - a*b*sinh(e + f*x)/(d*(c + d*x)**S(2)) - a*b*f*cosh(e + f*x)/(d**S(2)*(c + d*x)) + a*b*f**S(2)*CoshIntegral(c*f/d + f*x)*sinh(-c*f/d + e)/d**S(3) + a*b*f**S(2)*SinhIntegral(c*f/d + f*x)*cosh(-c*f/d + e)/d**S(3) - b**S(2)*sinh(e + f*x)**S(2)/(S(2)*d*(c + d*x)**S(2)) - b**S(2)*f*sinh(e + f*x)*cosh(e + f*x)/(d**S(2)*(c + d*x)) + b**S(2)*f**S(2)*CoshIntegral(S(2)*c*f/d + S(2)*f*x)*cosh(-S(2)*c*f/d + S(2)*e)/d**S(3) + b**S(2)*f**S(2)*SinhIntegral(S(2)*c*f/d + S(2)*f*x)*sinh(-S(2)*c*f/d + S(2)*e)/d**S(3), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(a + b*sinh(e + f*x)), x), x, S(6)*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(4)*sqrt(a**S(2) + b**S(2))) - S(6)*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(4)*sqrt(a**S(2) + b**S(2))) - S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) - S(3)*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) + (c + d*x)**S(3)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))) - (c + d*x)**S(3)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(a + b*sinh(e + f*x)), x), x, -S(2)*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(2)*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*sqrt(a**S(2) + b**S(2))) + S(2)*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) - S(2)*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) + (c + d*x)**S(2)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))) - (c + d*x)**S(2)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(a + b*sinh(e + f*x)), x), x, d*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) - d*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*sqrt(a**S(2) + b**S(2))) + (c + d*x)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))) - (c + d*x)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*sqrt(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)**S(2)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))*(c + d*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(3)/(a + b*sinh(e + f*x))**S(2), x), x, S(6)*a*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))**(S(3)/2)) - S(6)*a*d**S(3)*PolyLog(S(4), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))**(S(3)/2)) - S(6)*a*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(6)*a*d**S(2)*(c + d*x)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(3)*a*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - S(3)*a*d*(c + d*x)**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + a*(c + d*x)**S(3)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - a*(c + d*x)**S(3)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - b*(c + d*x)**S(3)*cosh(e + f*x)/(f*(a + b*sinh(e + f*x))*(a**S(2) + b**S(2))) - S(6)*d**S(3)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))) - S(6)*d**S(3)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(4)*(a**S(2) + b**S(2))) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(6)*d**S(2)*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(3)*d*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) + S(3)*d*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) - (c + d*x)**S(3)/(f*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**S(2)/(a + b*sinh(e + f*x))**S(2), x), x, -S(2)*a*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(2)*a*d**S(2)*PolyLog(S(3), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) + S(2)*a*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - S(2)*a*d*(c + d*x)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + a*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - a*(c + d*x)**S(2)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - b*(c + d*x)**S(2)*cosh(e + f*x)/(f*(a + b*sinh(e + f*x))*(a**S(2) + b**S(2))) + S(2)*d**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(2)*d**S(2)*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(3)*(a**S(2) + b**S(2))) + S(2)*d*(c + d*x)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) + S(2)*d*(c + d*x)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f**S(2)*(a**S(2) + b**S(2))) - (c + d*x)**S(2)/(f*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)/(a + b*sinh(e + f*x))**S(2), x), x, a*d*PolyLog(S(2), -b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - a*d*PolyLog(S(2), -b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))))/(f**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + a*(c + d*x)*log(b*exp(e + f*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - a*(c + d*x)*log(b*exp(e + f*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(f*(a**S(2) + b**S(2))**(S(3)/2)) - b*(c + d*x)*cosh(e + f*x)/(f*(a + b*sinh(e + f*x))*(a**S(2) + b**S(2))) + d*log(a + b*sinh(e + f*x))/(f**S(2)*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(2)), x), x, Integrate(S(1)/((a + b*sinh(e + f*x))**S(2)*(c + d*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
# long time assert rubi_test(rubi_integrate((e + f*x)**S(2)/(a + b*sinh(c + d*x))**S(3), x), x, S(3)*a**S(2)*(e + f*x)**S(2)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*(e + f*x)**S(2)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) + S(3)*a**S(2)*f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(5)/2)) + S(3)*a**S(2)*f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a*b*(e + f*x)**S(2)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))**S(2)) - S(3)*a*(e + f*x)**S(2)/(S(2)*d*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f*(e + f*x)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(d**S(2)*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f*(e + f*x)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(d**S(2)*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f**S(2)*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f**S(2)*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**S(2)) - b*(e + f*x)**S(2)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))**S(2)*(a**S(2) + b**S(2))) - (e + f*x)**S(2)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) + (e + f*x)**S(2)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) - f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + f*(e + f*x)*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - f*(e + f*x)/(d**S(2)*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))) + f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) - f**S(2)*PolyLog(S(3), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(d**S(3)*(a**S(2) + b**S(2))**(S(3)/2)) - S(2)*f**S(2)*atanh((-a*tanh(c/S(2) + d*x/S(2)) + b)/sqrt(a**S(2) + b**S(2)))/(d**S(3)*(a**S(2) + b**S(2))**(S(3)/2)), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((e + f*x)/(a + b*sinh(c + d*x))**S(3), x), x, S(3)*a**S(2)*(e + f*x)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*(e + f*x)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(5)/2)) + S(3)*a**S(2)*f*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a**S(2)*f*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(5)/2)) - S(3)*a*b*(e + f*x)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))**S(2)) + S(3)*a*f*log(a + b*sinh(c + d*x))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**S(2)) - b*(e + f*x)*cosh(c + d*x)/(S(2)*d*(a + b*sinh(c + d*x))**S(2)*(a**S(2) + b**S(2))) - (e + f*x)*log(b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) + (e + f*x)*log(b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))) + S(1))/(S(2)*d*(a**S(2) + b**S(2))**(S(3)/2)) - f*PolyLog(S(2), -b*exp(c + d*x)/(a - sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) + f*PolyLog(S(2), -b*exp(c + d*x)/(a + sqrt(a**S(2) + b**S(2))))/(S(2)*d**S(2)*(a**S(2) + b**S(2))**(S(3)/2)) - f/(S(2)*d**S(2)*(a + b*sinh(c + d*x))*(a**S(2) + b**S(2))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)), x), x, Integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)**S(2)), x), x, Integrate(S(1)/((a + b*sinh(c + d*x))**S(3)*(e + f*x)**S(2)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**n*(c + d*x)**m, x), x, Integrate((a + b*sinh(e + f*x))**n*(c + d*x)**m, x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(3)*(c + d*x)**m, x), x, S(3)*S(2)**(-m + S(-3))*a*b**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f - S(3)*S(2)**(-m + S(-3))*a*b**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f + S(3)**(-m + S(-1))*b**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(3)*f*(c + d*x)/d)*exp(-S(3)*c*f/d + S(3)*e)/(S(8)*f) + S(3)**(-m + S(-1))*b**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(3)*f*(c + d*x)/d)*exp(S(3)*c*f/d - S(3)*e)/(S(8)*f) + a**S(3)*(c + d*x)**(m + S(1))/(d*(m + S(1))) + S(3)*a**S(2)*b*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(2)*f) + S(3)*a**S(2)*b*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(2)*f) - S(3)*a*b**S(2)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))) - S(3)*b**S(3)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(8)*f) - S(3)*b**S(3)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(8)*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))**S(2)*(c + d*x)**m, x), x, S(2)**(-m + S(-3))*b**S(2)*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -S(2)*f*(c + d*x)/d)*exp(-S(2)*c*f/d + S(2)*e)/f - S(2)**(-m + S(-3))*b**S(2)*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), S(2)*f*(c + d*x)/d)*exp(S(2)*c*f/d - S(2)*e)/f + a**S(2)*(c + d*x)**(m + S(1))/(d*(m + S(1))) + a*b*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/f + a*b*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/f - b**S(2)*(c + d*x)**(m + S(1))/(S(2)*d*(m + S(1))), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((a + b*sinh(e + f*x))*(c + d*x)**m, x), x, a*(c + d*x)**(m + S(1))/(d*(m + S(1))) + b*(-f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), -f*(c + d*x)/d)*exp(-c*f/d + e)/(S(2)*f) + b*(f*(c + d*x)/d)**(-m)*(c + d*x)**m*Gamma(m + S(1), f*(c + d*x)/d)*exp(c*f/d - e)/(S(2)*f), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(a + b*sinh(e + f*x)), x), x, Integrate((c + d*x)**m/(a + b*sinh(e + f*x)), x), expand=True, _diff=True, _numerical=True)
assert rubi_test(rubi_integrate((c + d*x)**m/(a + b*sinh(e + f*x))**S(2), x), x, Integrate((c + d*x)**m/(a + b*sinh(e + f*x))**S(2), x), expand=True, _diff=True, _numerical=True)
| 251.514107 | 2,469 | 0.511161 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7,540 | 0.093976 |
9bdab67b4c98abd166793cc86942cb2809dc9195 | 3,759 | py | Python | P13pt/mascril/modules/lockin2gates.py | green-mercury/P13pt | 38c4f079ec071b5b2651415d7bcb03be7a8a1dab | [
"MIT"
] | 3 | 2019-08-27T16:32:29.000Z | 2019-09-28T06:56:14.000Z | P13pt/mascril/modules/lockin2gates.py | DamienFruleux/P13pt | a2cd74e97b8b2e5fb518886237575ae769229e49 | [
"MIT"
] | 4 | 2018-10-29T22:06:54.000Z | 2019-02-28T12:39:03.000Z | P13pt/mascril/modules/lockin2gates.py | DamienFruleux/P13pt | a2cd74e97b8b2e5fb518886237575ae769229e49 | [
"MIT"
] | 1 | 2019-02-13T13:29:18.000Z | 2019-02-13T13:29:18.000Z | from __future__ import print_function
from P13pt.mascril.measurement import MeasurementBase
from P13pt.mascril.parameter import Sweep, String, Folder, Boolean
from P13pt.drivers.bilt import Bilt, BiltVoltageSource, BiltVoltMeter
from P13pt.drivers.zilockin import ZILockin
import time
import numpy as np
import os
class Measurement(MeasurementBase):
params = {
'Vg1s': Sweep([0.0]),
'Vg2s': Sweep([0.0]),
'commongate': Boolean(False),
'Rg1': 100e3,
'Rg2': 100e3,
'Rds': 2.2e3,
'stabilise_time': 0.5,
'comment': String(''),
'data_dir': Folder(r'D:\meso\Desktop\testdata')
}
observables = ['Vg1', 'Vg1m', 'Ileak1', 'Vg2', 'Vg2m', 'Ileak2', 'Vds', 'Vdsm', 'Vdsm_std', 'Rs']
alarms = [
['np.abs(Ileak1) > 1e-8', MeasurementBase.ALARM_CALLCOPS],
['np.abs(Ileak2) > 1e-8', MeasurementBase.ALARM_CALLCOPS],
['np.abs(Vg1-Vg2)', MeasurementBase.ALARM_SHOWVALUE] # useful if we just want to know how much voltage
# is applied between the two gates
]
def measure(self, data_dir, Vg1s, Vg2s, commongate, Rg1, Rg2, Rds, stabilise_time, **kwargs):
print("===================================")
print("Starting acquisition script...")
# initialise instruments
try:
print("Setting up DC sources and voltmeters...")
bilt = Bilt('TCPIP0::192.168.0.2::5025::SOCKET')
self.sourceVg1 = sourceVg1 = BiltVoltageSource(bilt, "I2", "12", "1", 0.01, "Vg1")
self.sourceVg2 = sourceVg2 = BiltVoltageSource(bilt, "I3", "12", "1", 0.01, "Vg2")
self.meterVg1 = meterVg1 = BiltVoltMeter(bilt, "I5;C2", "2", "Vg1m")
self.meterVg2 = meterVg2 = BiltVoltMeter(bilt, "I5;C3", "2", "Vg2m")
print("DC sources and voltmeters are set up.")
except:
print("There has been an error setting up DC sources and voltmeters.")
raise
try:
print("Setting up lock-in amplifier")
self.lockin = lockin = ZILockin()
print("Lock in amplifier is set up.")
except:
print("There has been an error setting up the lock-in amplifier.")
raise
timestamp = time.strftime('%Y-%m-%d_%Hh%Mm%Ss')
# save lock in settings (in case we need to check something later)
lockin.save_settings(os.path.join(data_dir, 'ZIsettings', timestamp+'.ZIsettings.txt'))
# prepare saving data
filename = timestamp + '.txt'
self.prepare_saving(os.path.join(data_dir, filename))
# loops
Vds = lockin.rms_amp
for Vg2 in Vg2s:
sourceVg2.set_voltage(Vg2)
for Vg1 in Vg1s:
if self.flags['quit_requested']:
return locals()
sourceVg1.set_voltage(Vg1)
# stabilise
time.sleep(stabilise_time)
# measure
Vg1m = meterVg1.get_voltage()
Vg2m = meterVg2.get_voltage()
Vdsm, Vdsm_std = lockin.poll_data()
# do calculations
Ileak1 = (Vg1-Vg1m)/Rg1
Ileak2 = (Vg2-Vg2m)/Rg2
Rs = Rds*Vdsm/(Vds-Vdsm)
# save data
self.save_row(locals())
print("Acquisition done.")
return locals()
def tidy_up(self):
self.end_saving()
print("Driving all voltages back to zero...")
self.sourceVg1.set_voltage(0.)
self.sourceVg2.set_voltage(0.)
self.lockin.tidy_up()
if __name__ == "__main__":
m = Measurement()
m.run() | 34.172727 | 117 | 0.554403 | 3,380 | 0.899175 | 0 | 0 | 0 | 0 | 0 | 0 | 1,046 | 0.278265 |
9bdb227b8986247b3e187af1afb2dd0572b47533 | 1,281 | py | Python | scripts/parse_coredump_bin.py | lucasdietrich/AVRTOS | a8a3b7890ed54fbb1bd718fd13b0eb620ecf9b13 | [
"Apache-2.0"
] | 3 | 2021-12-10T21:16:03.000Z | 2022-03-20T08:09:53.000Z | scripts/parse_coredump_bin.py | Adecy/atmega328p-multithreading | b0f2da9f31fba7bcccf5f06f827af87494a0bcb3 | [
"Apache-2.0"
] | null | null | null | scripts/parse_coredump_bin.py | Adecy/atmega328p-multithreading | b0f2da9f31fba7bcccf5f06f827af87494a0bcb3 | [
"Apache-2.0"
] | null | null | null | import re
import struct
from typing import List
class Core:
def __init__(self):
self.registers = []
def SP(self) -> int:
return self.registers[0]
def PC(self) -> int:
return self.registers[1]
def SREG(self) -> int:
return self.registers[-1]
def r(self, i: int) -> int:
return self.registers[i + 2]
def __repr__(self):
return f"Core\n" \
f" SP={self.SP():0{4}X} PC={self.PC():0{4}X} SREG={self.SREG():0{2}X} " + \
(" ".join(f"r{i}={self.r(i)}" for i in range(32)))
re_coredump = re.compile(b"<<<<.{37}>>>>")
def parse_core(filename: str) -> List[Core]:
with open(filename, "br") as fp:
content = fp.read()
match = re_coredump.findall(content)
cores = []
print(match)
for parsed in match:
data = parsed[4:41]
# https://docs.python.org/3/library/struct.html
core = Core()
core.registers = struct.unpack("HHB" + "B" * 32, data)
cores.append(core)
print(core.registers, core)
return cores
def decode(core: bytes):
return
if __name__ == "__main__":
import sys
if len(sys.argv) == 2:
parse_core(sys.argv[1])
else:
raise Exception("argument problem") | 19.707692 | 91 | 0.552693 | 523 | 0.408275 | 0 | 0 | 0 | 0 | 0 | 0 | 206 | 0.160812 |
9bdd49b2261226a6e0a3e0a8388b9e1d582f8dd9 | 1,046 | py | Python | src/utils/time_helpers.py | pdkary/black-scholes-plus | 1df6d0e18416900ce9380e5428da58af6bb785fa | [
"MIT"
] | 2 | 2021-02-18T04:22:55.000Z | 2021-02-20T23:40:29.000Z | src/utils/time_helpers.py | pdkary/black-scholes-plus | 1df6d0e18416900ce9380e5428da58af6bb785fa | [
"MIT"
] | null | null | null | src/utils/time_helpers.py | pdkary/black-scholes-plus | 1df6d0e18416900ce9380e5428da58af6bb785fa | [
"MIT"
] | null | null | null | from datetime import datetime,timedelta
import re
def get_time_to_expiry(maturity):
if maturity ==0:
return 0
# Today's date
day_0 = datetime.today()
# Maturity date
date_format = "%Y-%m-%d"
day_exp = datetime.strptime(maturity, date_format)
# Delta date: i.e. 1 day, 0:00:00
date_to_exp = day_exp - day_0
# Delta days: i.e. 1 day
days_to_exp = date_to_exp.days
return days_to_exp if days_to_exp > 0 else 0
def get_d_m_y(maturity):
d = datetime.strptime(maturity,"%Y-%m-%d")
return (d.day,d.month,d.year)
def interval_to_timedelta(interval:str):
a = re.search("(\d+)(\w+)",interval)
val = int(a.group1(1))
typ = a.group(2)
if typ=="m":
return timedelta(minutes=val)
elif typ=="h":
return timedelta(hours=val)
elif typ=="d":
return timedelta(days=val)
elif typ=="mo":
return timedelta(weeks=4*val)
elif typ=="y":
return timedelta(weeks=52*val)
def timestamp_to_string(stmp):
return stmp.strftime("%Y-%m-%d") | 27.526316 | 54 | 0.629063 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 144 | 0.137667 |
9bdd52b91453871245afa987abaad8fffe050c55 | 484 | py | Python | scripts/publish_source.py | apguerrera/DreamFrames | ad6c7c081378f02010583dbdcb33e8ff112dd94b | [
"MIT"
] | 2 | 2020-06-09T02:12:21.000Z | 2021-02-06T07:33:31.000Z | scripts/publish_source.py | apguerrera/DreamFrames | ad6c7c081378f02010583dbdcb33e8ff112dd94b | [
"MIT"
] | null | null | null | scripts/publish_source.py | apguerrera/DreamFrames | ad6c7c081378f02010583dbdcb33e8ff112dd94b | [
"MIT"
] | 2 | 2019-05-01T01:53:42.000Z | 2020-05-11T14:11:56.000Z | from brownie import *
from .contract_addresses import *
import time
def publish():
if network.show_active() == "development":
return False
else:
return True
def main():
publish_Goober_nft()
def publish_Goober_nft():
dream_frames_NFT_address = CONTRACTS[network.show_active()]["dream_frames_NFT"]
if dream_frames_NFT_address != '':
goober_nft = DreamFramesNFT.at(goober_nft_address)
DreamFramesNFT.publish_source(goober_nft)
| 24.2 | 83 | 0.706612 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 33 | 0.068182 |
9bde10aee3baf3325c77d739728c73601b1e2704 | 1,787 | py | Python | examples/demo_classic.py | ria02/InquirerPy | e748d21846ae91553cfb78f61464e35432e04bea | [
"MIT"
] | 120 | 2021-01-17T08:57:14.000Z | 2022-03-19T16:17:41.000Z | examples/demo_classic.py | ria02/InquirerPy | e748d21846ae91553cfb78f61464e35432e04bea | [
"MIT"
] | 37 | 2021-01-29T12:10:11.000Z | 2022-03-08T04:47:39.000Z | examples/demo_classic.py | ria02/InquirerPy | e748d21846ae91553cfb78f61464e35432e04bea | [
"MIT"
] | 5 | 2021-04-29T21:57:14.000Z | 2022-02-01T12:34:21.000Z | # NOTE: Following example requires boto3 package.
import boto3
from InquirerPy import prompt
from InquirerPy.exceptions import InvalidArgument
from InquirerPy.validator import PathValidator
client = boto3.client("s3")
def get_bucket(_):
return [bucket["Name"] for bucket in client.list_buckets()["Buckets"]]
def walk_s3_bucket(result):
response = []
paginator = client.get_paginator("list_objects")
for result in paginator.paginate(Bucket=result["bucket"]):
for file in result["Contents"]:
response.append(file["Key"])
return response
def is_upload(result):
return result[0] == "Upload"
questions = [
{
"message": "Select an S3 action:",
"type": "list",
"choices": ["Upload", "Download"],
},
{
"message": "Enter the filepath to upload:",
"type": "filepath",
"when": is_upload,
"validate": PathValidator(),
"only_files": True,
},
{
"message": "Select a bucket:",
"type": "fuzzy",
"choices": get_bucket,
"name": "bucket",
"spinner_enable": True,
},
{
"message": "Select files to download:",
"type": "fuzzy",
"when": lambda _: not is_upload(_),
"choices": walk_s3_bucket,
"multiselect": True,
"spinner_enable": True,
},
{
"message": "Enter destination folder:",
"type": "filepath",
"when": lambda _: not is_upload(_),
"only_directories": True,
"validate": PathValidator(),
},
{"message": "Confirm?", "type": "confirm", "default": False},
]
try:
result = prompt(questions, vi_mode=True)
except InvalidArgument:
print("No available choices")
# Download or Upload the file based on result ...
| 24.819444 | 74 | 0.593173 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 639 | 0.357583 |
9bdfb44396698ccfce25133f1a6c45d37c79360e | 1,999 | py | Python | python/level1_single_api/9_amct/amct_pytorch/tensor_decompose/src/common/model.py | Ascend/samples | 5e060ddf8c502cf0e248ecbe1c8986e95351cbbd | [
"Apache-2.0"
] | 25 | 2020-11-20T09:01:35.000Z | 2022-03-29T10:35:38.000Z | python/level1_single_api/9_amct/amct_pytorch/tensor_decompose/src/common/model.py | Ascend/samples | 5e060ddf8c502cf0e248ecbe1c8986e95351cbbd | [
"Apache-2.0"
] | 5 | 2021-02-28T20:49:37.000Z | 2022-03-04T21:50:27.000Z | python/level1_single_api/9_amct/amct_pytorch/tensor_decompose/src/common/model.py | Ascend/samples | 5e060ddf8c502cf0e248ecbe1c8986e95351cbbd | [
"Apache-2.0"
] | 16 | 2020-12-06T07:26:13.000Z | 2022-03-01T07:51:55.000Z | """
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
"""Network definition."""
def __init__(self, c_in=1, h_in=28, w_in=28, classes=10):
"""
Network initialize method.
Args:
c_in: Input tensor channels.
h_in: Input tensor height.
w_in: Input tensor width.
classes: Number of classes.
"""
super(Net, self).__init__()
self.conv1 = nn.Conv2d(c_in, 128, 3, 1)
self.conv2 = nn.Conv2d(self.conv1.out_channels, 128, 3, 1)
self.relu = nn.ReLU(inplace=True)
self.flatten = nn.Flatten(1)
self.fc = nn.Linear((h_in - 4) * (w_in - 4) * 128, classes)
nn.init.kaiming_normal_(self.conv1.weight, mode='fan_out')
nn.init.constant_(self.conv1.bias, 0)
nn.init.kaiming_normal_(self.conv2.weight, mode='fan_out')
nn.init.constant_(self.conv2.bias, 0)
nn.init.normal_(self.fc.weight, 0, 0.01)
nn.init.constant_(self.fc.bias, 0)
def forward(self, x):
"""
Network forward method.
Args:
x: Input tensor.
Returns:
Output tensor.
"""
x = self.conv1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.relu(x)
x = self.flatten(x)
x = self.fc(x)
x = F.log_softmax(x, dim=1)
return x
| 30.287879 | 74 | 0.608304 | 1,340 | 0.670335 | 0 | 0 | 0 | 0 | 0 | 0 | 1,001 | 0.50075 |
9bdfc1ceca2e6426e5eb5dda16444705d6a08581 | 7,211 | py | Python | layers/poky/scripts/lib/argparse_oe.py | dtischler/px30-test | 55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f | [
"Apache-2.0"
] | 53 | 2018-02-28T08:51:32.000Z | 2022-02-28T06:49:23.000Z | layers/poky/scripts/lib/argparse_oe.py | dtischler/px30-test | 55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f | [
"Apache-2.0"
] | 27 | 2018-01-25T00:26:53.000Z | 2020-08-09T05:20:04.000Z | layers/poky/scripts/lib/argparse_oe.py | dtischler/px30-test | 55dce0b7aff1c4a7dea3ac94f94cc9c67fba7c9f | [
"Apache-2.0"
] | 51 | 2018-02-21T04:46:08.000Z | 2022-03-02T04:20:41.000Z | import sys
import argparse
from collections import defaultdict, OrderedDict
class ArgumentUsageError(Exception):
"""Exception class you can raise (and catch) in order to show the help"""
def __init__(self, message, subcommand=None):
self.message = message
self.subcommand = subcommand
class ArgumentParser(argparse.ArgumentParser):
"""Our own version of argparse's ArgumentParser"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('formatter_class', OeHelpFormatter)
self._subparser_groups = OrderedDict()
super(ArgumentParser, self).__init__(*args, **kwargs)
self._positionals.title = 'arguments'
self._optionals.title = 'options'
def error(self, message):
"""error(message: string)
Prints a help message incorporating the message to stderr and
exits.
"""
self._print_message('%s: error: %s\n' % (self.prog, message), sys.stderr)
self.print_help(sys.stderr)
sys.exit(2)
def error_subcommand(self, message, subcommand):
if subcommand:
action = self._get_subparser_action()
try:
subparser = action._name_parser_map[subcommand]
except KeyError:
self.error('no subparser for name "%s"' % subcommand)
else:
subparser.error(message)
self.error(message)
def add_subparsers(self, *args, **kwargs):
if 'dest' not in kwargs:
kwargs['dest'] = '_subparser_name'
ret = super(ArgumentParser, self).add_subparsers(*args, **kwargs)
# Need a way of accessing the parent parser
ret._parent_parser = self
# Ensure our class gets instantiated
ret._parser_class = ArgumentSubParser
# Hacky way of adding a method to the subparsers object
ret.add_subparser_group = self.add_subparser_group
return ret
def add_subparser_group(self, groupname, groupdesc, order=0):
self._subparser_groups[groupname] = (groupdesc, order)
def parse_args(self, args=None, namespace=None):
"""Parse arguments, using the correct subparser to show the error."""
args, argv = self.parse_known_args(args, namespace)
if argv:
message = 'unrecognized arguments: %s' % ' '.join(argv)
if self._subparsers:
subparser = self._get_subparser(args)
subparser.error(message)
else:
self.error(message)
sys.exit(2)
return args
def _get_subparser(self, args):
action = self._get_subparser_action()
if action.dest == argparse.SUPPRESS:
self.error('cannot get subparser, the subparser action dest is suppressed')
name = getattr(args, action.dest)
try:
return action._name_parser_map[name]
except KeyError:
self.error('no subparser for name "%s"' % name)
def _get_subparser_action(self):
if not self._subparsers:
self.error('cannot return the subparser action, no subparsers added')
for action in self._subparsers._group_actions:
if isinstance(action, argparse._SubParsersAction):
return action
class ArgumentSubParser(ArgumentParser):
def __init__(self, *args, **kwargs):
if 'group' in kwargs:
self._group = kwargs.pop('group')
if 'order' in kwargs:
self._order = kwargs.pop('order')
super(ArgumentSubParser, self).__init__(*args, **kwargs)
def parse_known_args(self, args=None, namespace=None):
# This works around argparse not handling optional positional arguments being
# intermixed with other options. A pretty horrible hack, but we're not left
# with much choice given that the bug in argparse exists and it's difficult
# to subclass.
# Borrowed from http://stackoverflow.com/questions/20165843/argparse-how-to-handle-variable-number-of-arguments-nargs
# with an extra workaround (in format_help() below) for the positional
# arguments disappearing from the --help output, as well as structural tweaks.
# Originally simplified from http://bugs.python.org/file30204/test_intermixed.py
positionals = self._get_positional_actions()
for action in positionals:
# deactivate positionals
action.save_nargs = action.nargs
action.nargs = 0
namespace, remaining_args = super(ArgumentSubParser, self).parse_known_args(args, namespace)
for action in positionals:
# remove the empty positional values from namespace
if hasattr(namespace, action.dest):
delattr(namespace, action.dest)
for action in positionals:
action.nargs = action.save_nargs
# parse positionals
namespace, extras = super(ArgumentSubParser, self).parse_known_args(remaining_args, namespace)
return namespace, extras
def format_help(self):
# Quick, restore the positionals!
positionals = self._get_positional_actions()
for action in positionals:
if hasattr(action, 'save_nargs'):
action.nargs = action.save_nargs
return super(ArgumentParser, self).format_help()
class OeHelpFormatter(argparse.HelpFormatter):
def _format_action(self, action):
if hasattr(action, '_get_subactions'):
# subcommands list
groupmap = defaultdict(list)
ordermap = {}
subparser_groups = action._parent_parser._subparser_groups
groups = sorted(subparser_groups.keys(), key=lambda item: subparser_groups[item][1], reverse=True)
for subaction in self._iter_indented_subactions(action):
parser = action._name_parser_map[subaction.dest]
group = getattr(parser, '_group', None)
groupmap[group].append(subaction)
if group not in groups:
groups.append(group)
order = getattr(parser, '_order', 0)
ordermap[subaction.dest] = order
lines = []
if len(groupmap) > 1:
groupindent = ' '
else:
groupindent = ''
for group in groups:
subactions = groupmap[group]
if not subactions:
continue
if groupindent:
if not group:
group = 'other'
groupdesc = subparser_groups.get(group, (group, 0))[0]
lines.append(' %s:' % groupdesc)
for subaction in sorted(subactions, key=lambda item: ordermap[item.dest], reverse=True):
lines.append('%s%s' % (groupindent, self._format_action(subaction).rstrip()))
return '\n'.join(lines)
else:
return super(OeHelpFormatter, self)._format_action(action)
def int_positive(value):
ivalue = int(value)
if ivalue <= 0:
raise argparse.ArgumentTypeError(
"%s is not a positive int value" % value)
return ivalue
| 40.740113 | 125 | 0.61947 | 6,937 | 0.962002 | 0 | 0 | 0 | 0 | 0 | 0 | 1,605 | 0.222577 |
9be072e436a510815bc7fcb98815452de49a5068 | 491 | py | Python | datahub/omis/invoice/migrations/0006_invoice_contact_email.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 6 | 2019-12-02T16:11:24.000Z | 2022-03-18T10:02:02.000Z | datahub/omis/invoice/migrations/0006_invoice_contact_email.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 1,696 | 2019-10-31T14:08:37.000Z | 2022-03-29T12:35:57.000Z | datahub/omis/invoice/migrations/0006_invoice_contact_email.py | Staberinde/data-hub-api | 3d0467dbceaf62a47158eea412a3dba827073300 | [
"MIT"
] | 9 | 2019-11-22T12:42:03.000Z | 2021-09-03T14:25:05.000Z | # Generated by Django 2.0.1 on 2018-01-03 15:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('omis_invoice', '0005_populate_billing_fields'),
]
operations = [
migrations.AddField(
model_name='invoice',
name='contact_email',
field=models.EmailField(blank=True, help_text='Email address of the contact at the time of invoice creation.', max_length=255),
),
]
| 25.842105 | 139 | 0.645621 | 398 | 0.810591 | 0 | 0 | 0 | 0 | 0 | 0 | 178 | 0.362525 |
9be124ef67f3b453a9c7cec4c9b6b95772c4ee79 | 71 | py | Python | src/waldur_openstack/openstack/__init__.py | geant-multicloud/MCMS-mastermind | 81333180f5e56a0bc88d7dad448505448e01f24e | [
"MIT"
] | 26 | 2017-10-18T13:49:58.000Z | 2021-09-19T04:44:09.000Z | src/waldur_openstack/openstack/__init__.py | geant-multicloud/MCMS-mastermind | 81333180f5e56a0bc88d7dad448505448e01f24e | [
"MIT"
] | 14 | 2018-12-10T14:14:51.000Z | 2021-06-07T10:33:39.000Z | src/waldur_openstack/openstack/__init__.py | geant-multicloud/MCMS-mastermind | 81333180f5e56a0bc88d7dad448505448e01f24e | [
"MIT"
] | 32 | 2017-09-24T03:10:45.000Z | 2021-10-16T16:41:09.000Z | default_app_config = 'waldur_openstack.openstack.apps.OpenStackConfig'
| 35.5 | 70 | 0.873239 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 0.690141 |
9be66a9b0c5bd06f8460d9a3b8c3707b90aaf766 | 1,513 | py | Python | bin/system-setup.py | filipecosta90/readies | ffd48b39cd7c124e08eb3d770a0e0581445ffa37 | [
"BSD-3-Clause"
] | 9 | 2019-12-17T17:57:54.000Z | 2022-02-04T15:43:35.000Z | bin/system-setup.py | filipecosta90/readies | ffd48b39cd7c124e08eb3d770a0e0581445ffa37 | [
"BSD-3-Clause"
] | 87 | 2021-01-06T08:59:37.000Z | 2022-02-16T05:10:03.000Z | bin/system-setup.py | filipecosta90/readies | ffd48b39cd7c124e08eb3d770a0e0581445ffa37 | [
"BSD-3-Clause"
] | 11 | 2019-12-11T13:30:23.000Z | 2022-01-06T12:21:26.000Z | #!/bin/sh
''''[ ! -z $VIRTUAL_ENV ] && exec python -u -- "$0" ${1+"$@"}; command -v python3 > /dev/null && exec python3 -u -- "$0" ${1+"$@"}; exec python2 -u -- "$0" ${1+"$@"} # '''
import sys
import os
import argparse
HERE = os.path.dirname(__file__)
ROOT = os.path.abspath(os.path.join(HERE, ".."))
sys.path.insert(0, ROOT)
import paella
#----------------------------------------------------------------------------------------------
class SystemSetup(paella.Setup):
def __init__(self, nop=False):
paella.Setup.__init__(self, nop)
def common_first(self):
# self.install("")
# self.group_install("")
# self.setup_pip()
# self.pip_install("")
print("common_first")
def debian_compat(self):
print("debian_compat")
def redhat_compat(self):
print("redhat_compat")
def fedora(self):
print("fedora")
def macos(self):
print("macos")
def common_last(self):
print("common_last")
#----------------------------------------------------------------------------------------------
parser = argparse.ArgumentParser(description='Set up system for build.')
parser.add_argument('-n', '--nop', action="store_true", help='no operation')
# parser.add_argument('--bool', action="store_true", help="flag")
# parser.add_argument('--int', type=int, default=1, help='number')
# parser.add_argument('--str', type=str, default='str', help='string')
args = parser.parse_args()
SystemSetup(nop = args.nop).setup()
| 29.666667 | 171 | 0.540648 | 556 | 0.367482 | 0 | 0 | 0 | 0 | 0 | 0 | 792 | 0.523463 |
9be827bbd8f9d8f8bfa67b4e37d5499806d0fe07 | 1,261 | py | Python | server/block-pyhook.py | Strangemother/python-websocket-server | d52ae1708ddc13e81360ce3dd01d826db81e0204 | [
"MIT"
] | null | null | null | server/block-pyhook.py | Strangemother/python-websocket-server | d52ae1708ddc13e81360ce3dd01d826db81e0204 | [
"MIT"
] | null | null | null | server/block-pyhook.py | Strangemother/python-websocket-server | d52ae1708ddc13e81360ce3dd01d826db81e0204 | [
"MIT"
] | null | null | null | import pyHook
from threading import Timer
import win32gui
import logging
class blockInput():
def OnKeyboardEvent(self,event):
return False
def OnMouseEvent(self,event):
return False
def unblock(self):
logging.info(" -- Unblock!")
if self.t.is_alive():
self.t.cancel()
try: self.hm.UnhookKeyboard()
except: pass
try: self.hm.UnhookMouse()
except: pass
def block(self, timeout = 10, keyboard = True, mouse = True):
self.t = Timer(timeout, self.unblock)
self.t.start()
logging.info(" -- Block!")
if mouse:
self.hm.MouseAll = self.OnMouseEvent
self.hm.HookMouse()
if keyboard:
self.hm.KeyAll = self.OnKeyboardEvent
self.hm.HookKeyboard()
win32gui.PumpWaitingMessages()
def __init__(self):
self.hm = pyHook.HookManager()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
block = blockInput()
block.block()
import time
t0 = time.time()
while time.time() - t0 < 10:
time.sleep(1)
print(time.time() - t0)
block.unblock()
logging.info("Done.")
| 24.25 | 66 | 0.563045 | 877 | 0.69548 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.0341 |
9be85a856102dd5c673bbdbba790b94ac201e3f7 | 302 | py | Python | algorithms/implementation/cut_the_sticks.py | avenet/hackerrank | e522030a023af4ff50d5fc64bd3eba30144e006c | [
"MIT"
] | null | null | null | algorithms/implementation/cut_the_sticks.py | avenet/hackerrank | e522030a023af4ff50d5fc64bd3eba30144e006c | [
"MIT"
] | null | null | null | algorithms/implementation/cut_the_sticks.py | avenet/hackerrank | e522030a023af4ff50d5fc64bd3eba30144e006c | [
"MIT"
] | null | null | null | def make_cut(l):
smallest = min(l)
return [
x - smallest
for x
in l
if x - smallest > 0
]
length = int(input())
current = list(
map(
int,
input().split()
)
)
while current:
print(len(current))
current = make_cut(current)
| 12.583333 | 31 | 0.490066 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9be888d23e35c264f22caee9146fdcf281c9f560 | 2,631 | py | Python | datasets.py | RAMitchell/GLM-Benchmarks | 68a5e2a45f299231dded8227cb6b40cbfefd589a | [
"MIT"
] | 1 | 2020-12-28T09:27:24.000Z | 2020-12-28T09:27:24.000Z | datasets.py | RAMitchell/GLM-Benchmarks | 68a5e2a45f299231dded8227cb6b40cbfefd589a | [
"MIT"
] | null | null | null | datasets.py | RAMitchell/GLM-Benchmarks | 68a5e2a45f299231dded8227cb6b40cbfefd589a | [
"MIT"
] | null | null | null | import gzip
import numpy as np
import os
import pandas as pd
import shutil
import sys
import tarfile
import urllib
import zipfile
from scipy.sparse import vstack
from sklearn import datasets
from sklearn.externals.joblib import Memory
if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
from urllib import urlretrieve
mem = Memory("./mycache")
@mem.cache
def get_higgs(num_rows=None):
url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/00280/HIGGS.csv.gz'
filename = 'HIGGS.csv'
if not os.path.isfile(filename):
urlretrieve(url, filename + '.gz')
with gzip.open(filename + '.gz', 'rb') as f_in:
with open(filename, 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
higgs = pd.read_csv(filename)
X = higgs.iloc[:, 1:].values
y = higgs.iloc[:, 0].values
if num_rows is not None:
X = X[0:num_rows]
y = y[0:num_rows]
return X, y
@mem.cache
def get_cover_type(num_rows=None):
data = datasets.fetch_covtype()
X = data.data
y = data.target
if num_rows is not None:
X = X[0:num_rows]
y = y[0:num_rows]
return X, y
@mem.cache
def get_synthetic_regression(num_rows=None):
if num_rows is None:
num_rows = 10000000
return datasets.make_regression(n_samples=num_rows, bias=100, noise=1.0)
@mem.cache
def get_year(num_rows=None):
url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/00203/YearPredictionMSD.txt.zip'
filename = 'YearPredictionMSD.txt'
if not os.path.isfile(filename):
urlretrieve(url, filename + '.zip')
zip_ref = zipfile.ZipFile(filename + '.zip', 'r')
zip_ref.extractall()
zip_ref.close()
year = pd.read_csv('YearPredictionMSD.txt', header=None)
X = year.iloc[:, 1:].values
y = year.iloc[:, 0].values
if num_rows is not None:
X = X[0:num_rows]
y = y[0:num_rows]
return X, y
@mem.cache
def get_url(num_rows=None):
url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/url/url_svmlight.tar.gz'
filename = 'url_svmlight.tar.gz'
if not os.path.isfile(filename):
urlretrieve(url, filename)
tar = tarfile.open(filename, "r:gz")
tar.extractall()
tar.close()
num_files = 120
files = ['url_svmlight/Day{}.svm'.format(day) for day in range(num_files)]
data = datasets.load_svmlight_files(files)
X = vstack(data[::2])
y = np.concatenate(data[1::2])
y[y < 0.0] = 0.0
if num_rows is not None:
X = X[0:num_rows]
y = y[0:num_rows]
return X, y
| 26.049505 | 101 | 0.645382 | 0 | 0 | 0 | 0 | 2,242 | 0.852147 | 0 | 0 | 404 | 0.153554 |
9bebc0cc96c6c240408e6ad6840dd2690e533560 | 2,458 | py | Python | lv_set/Main.py | Ramesh-X/Level-Set | 95e01171a284f2aadfd1eb88dde1c26e4f4878e9 | [
"MIT"
] | 122 | 2017-11-10T09:31:17.000Z | 2022-03-30T03:33:02.000Z | lv_set/Main.py | Ramesh-X/Level-Set | 95e01171a284f2aadfd1eb88dde1c26e4f4878e9 | [
"MIT"
] | 1 | 2018-11-05T05:38:50.000Z | 2018-11-05T07:44:08.000Z | lv_set/Main.py | Ramesh-X/Level-Set | 95e01171a284f2aadfd1eb88dde1c26e4f4878e9 | [
"MIT"
] | 61 | 2017-11-06T04:49:25.000Z | 2022-03-10T08:41:26.000Z | """
This python code demonstrates an edge-based active contour model as an application of the
Distance Regularized Level Set Evolution (DRLSE) formulation in the following paper:
C. Li, C. Xu, C. Gui, M. D. Fox, "Distance Regularized Level Set Evolution and Its Application to Image Segmentation",
IEEE Trans. Image Processing, vol. 19 (12), pp. 3243-3254, 2010.
Author: Ramesh Pramuditha Rathnayake
E-mail: rsoft.ramesh@gmail.com
Released Under MIT License
"""
import numpy as np
from skimage.io import imread
from lv_set.find_lsf import find_lsf
from lv_set.potential_func import *
from lv_set.show_fig import draw_all
def gourd_params():
img = imread('gourd.bmp', True)
img = np.interp(img, [np.min(img), np.max(img)], [0, 255])
# initialize LSF as binary step function
c0 = 2
initial_lsf = c0 * np.ones(img.shape)
# generate the initial region R0 as two rectangles
initial_lsf[24:35, 19:25] = -c0
initial_lsf[24:35, 39:50] = -c0
# parameters
return {
'img': img,
'initial_lsf': initial_lsf,
'timestep': 1, # time step
'iter_inner': 10,
'iter_outer': 30,
'lmda': 5, # coefficient of the weighted length term L(phi)
'alfa': -3, # coefficient of the weighted area term A(phi)
'epsilon': 1.5, # parameter that specifies the width of the DiracDelta function
'sigma': 0.8, # scale parameter in Gaussian kernel
'potential_function': DOUBLE_WELL,
}
def two_cells_params():
img = imread('twocells.bmp', True)
img = np.interp(img, [np.min(img), np.max(img)], [0, 255])
# initialize LSF as binary step function
c0 = 2
initial_lsf = c0 * np.ones(img.shape)
# generate the initial region R0 as two rectangles
initial_lsf[9:55, 9:75] = -c0
# parameters
return {
'img': img,
'initial_lsf': initial_lsf,
'timestep': 5, # time step
'iter_inner': 5,
'iter_outer': 40,
'lmda': 5, # coefficient of the weighted length term L(phi)
'alfa': 1.5, # coefficient of the weighted area term A(phi)
'epsilon': 1.5, # parameter that specifies the width of the DiracDelta function
'sigma': 1.5, # scale parameter in Gaussian kernel
'potential_function': DOUBLE_WELL,
}
params = gourd_params()
# params = two_cells_params()
phi = find_lsf(**params)
print('Show final output')
draw_all(phi, params['img'], 10)
| 31.113924 | 120 | 0.650936 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,361 | 0.553702 |
9becb4cf172ca0bea4ede6f78c4348b44117361b | 2,421 | py | Python | object_detection/handlers.py | pavelbystrov1/detection_bot | b48c360ae0ddf93ad87edd5609f00e6f1b57a5c6 | [
"Apache-2.0"
] | null | null | null | object_detection/handlers.py | pavelbystrov1/detection_bot | b48c360ae0ddf93ad87edd5609f00e6f1b57a5c6 | [
"Apache-2.0"
] | null | null | null | object_detection/handlers.py | pavelbystrov1/detection_bot | b48c360ae0ddf93ad87edd5609f00e6f1b57a5c6 | [
"Apache-2.0"
] | null | null | null | """ bot handlers """
import os
from pymongo import MongoClient
from utils import main_keyboard
import settings
from db import get_or_create_user, save_detected_defects, save_car_counts
from db import defects_stat, cars_stat
from processing import process_picture
from cars_counting import detect_all_autos
from dl import CARS_RCNN_MODEL, DEFECTS_MODEL, LABEL_ENCODER
CLIENT = MongoClient(settings.MONGO_LINK)
DB = CLIENT["testdb"]
def detect_defects(update, context):
""" detect defects """
get_or_create_user(DB, update.effective_user, update.message.chat.id)
os.makedirs("downloads", exist_ok=True)
if "last_image" not in context.user_data:
update.message.reply_text("Загрyзите изображение")
return
image = context.user_data["last_image"]
print("Ищем дефекты")
result, y_pred = process_picture(DEFECTS_MODEL, LABEL_ENCODER, image)
user_id = update.effective_user.id
save_detected_defects(DB, user_id, y_pred, result, img_name=image)
update.message.reply_text("Это " + result, reply_markup=main_keyboard())
def get_stats(update, context):
""" get stats """
results = defects_stat(DB)
total = cars_stat(DB)
text = f"""
всего найдено машин: {total}
изображений с асфальтом: {results[0]}
изображений с дефектом: {results[1]}
изображений с посторонним предметом: {results[2]}"""
update.message.reply_text(text, reply_markup=main_keyboard())
def count_cars(update, context):
""" count cars """
get_or_create_user(DB, update.effective_user, update.message.chat.id)
os.makedirs("downloads", exist_ok=True)
if "last_image" not in context.user_data:
update.message.reply_text("Загрyзите изображение")
return
update.message.reply_text("Пожалуйста подождите - идет обработка")
print("Ищем машины на " + context.user_data["last_image"])
car_count, msg, out_file = detect_all_autos(CARS_RCNN_MODEL, context.user_data["last_image"])
user_id = update.effective_user.id
save_car_counts(DB, user_id, car_count, 0.0, img_name=context.user_data["last_image"])
chat_id = update.effective_chat.id
with open(out_file, "rb") as img:
context.bot.send_photo(chat_id=chat_id, photo=img)
update.message.reply_text(msg, reply_markup=main_keyboard())
def start(update, context):
""" start function """
update.message.reply_text("Hello", reply_markup=main_keyboard())
| 37.246154 | 97 | 0.736473 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 697 | 0.267357 |
9bee7b095f164de13e05289910aabe44e6f21162 | 873 | py | Python | src/utils.py | Martins6/stats-img-processing | 0254b451543770319e7b629dd00dd09cf763a3a5 | [
"Unlicense"
] | null | null | null | src/utils.py | Martins6/stats-img-processing | 0254b451543770319e7b629dd00dd09cf763a3a5 | [
"Unlicense"
] | 1 | 2022-01-13T03:51:44.000Z | 2022-01-13T03:51:44.000Z | src/utils.py | Martins6/stats-img-processing | 0254b451543770319e7b629dd00dd09cf763a3a5 | [
"Unlicense"
] | null | null | null | import os
import matplotlib.pyplot as plt
def biplot(score,coeff,pcax_index,pcay_index,labels=None):
pcax=pcax_index
pcay=pcay_index
pca1=pcax-1
pca2=pcay-1
xs = score[:,pca1]
ys = score[:,pca2]
n=score.shape[1]
scalex = 1.0/(xs.max()- xs.min())
scaley = 1.0/(ys.max()- ys.min())
plt.scatter(xs*scalex,ys*scaley)
for i in range(n):
plt.arrow(0, 0, coeff[i,pca1], coeff[i,pca2],color='r',alpha=0.5)
if labels is None:
plt.text(coeff[i,pca1]* 1.15, coeff[i,pca2] * 1.15, "Var"+str(i+1), color='g', ha='center',
va='center')
else:
plt.text(coeff[i,pca1]* 1.15, coeff[i,pca2] * 1.15, labels[i], color='g', ha='center',
va='center')
plt.xlim(-1,1)
plt.ylim(-1,1)
plt.xlabel("PC{}".format(pcax))
plt.ylabel("PC{}".format(pcay))
plt.grid()
| 29.1 | 103 | 0.562428 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 58 | 0.066438 |
9beffefe216348049fb0a4b50410d3ac25cedf08 | 176 | py | Python | minigame/button.py | AndrewCarterUK/MiniGame | 6d699c045e84ee3834f23eb0483245195438eff7 | [
"MIT"
] | null | null | null | minigame/button.py | AndrewCarterUK/MiniGame | 6d699c045e84ee3834f23eb0483245195438eff7 | [
"MIT"
] | null | null | null | minigame/button.py | AndrewCarterUK/MiniGame | 6d699c045e84ee3834f23eb0483245195438eff7 | [
"MIT"
] | null | null | null | import abc
class Button(abc.ABC):
@abc.abstractmethod
def register_callback(self, callabck):
pass
@abc.abstractmethod
def pressed(self):
pass
| 16 | 42 | 0.647727 | 163 | 0.926136 | 0 | 0 | 130 | 0.738636 | 0 | 0 | 0 | 0 |
9bf1a15848cc787927ffbf31caa854fbcd128fdd | 4,384 | py | Python | Polynomial_vec.py | chapman-phys227-2016s/hw-7-malfa100 | 8002e45f8435aeb66a93b4f30331b21ddaf99ef5 | [
"MIT"
] | null | null | null | Polynomial_vec.py | chapman-phys227-2016s/hw-7-malfa100 | 8002e45f8435aeb66a93b4f30331b21ddaf99ef5 | [
"MIT"
] | null | null | null | Polynomial_vec.py | chapman-phys227-2016s/hw-7-malfa100 | 8002e45f8435aeb66a93b4f30331b21ddaf99ef5 | [
"MIT"
] | null | null | null | """
File: Polynomial_vec.py
Copyright (c) 2016 Andrew Malfavon
License: MIT
Exercise 7.27
Description: Use vectorized expressions in the Polynomial class.
"""
import numpy as np
import unittest as ut
class Polynomial:
def __init__(self, coefficients):
if isinstance(coefficients, np.ndarray):
self.coeff = coefficients
else:
self.coeff = np.zeros(len(coefficients))
for i in range(len(coefficients)):
self.coeff[i] = coefficients[i]
def __call__(self, x):
x_array = np.zeros(len(self.coeff))
for i in range(len(x_array)):
x_array[i] = x**i
return np.dot(self.coeff, x_array)
def __add__(self, other):
minimum = min(len(self.coeff), len(other.coeff))
added = np.zeros(minimum)
for i in range(minimum):
added[i] = self.coeff[i] + other.coeff[i]
"""concatenate the original arrays to the end of the 'added' array.
only concatenates after the 'minimum' value which will be nothing for the smaller array."""
result_coeff = np.concatenate((added, self.coeff[minimum:], other.coeff[minimum:]), axis = 0)
return Polynomial(result_coeff)
def differentiate(self):
n = len(self.coeff)
while n > 1:#prevents linspace freaking out if n-1 is negative
self.coeff[:-1] = np.linspace(1, n-1, n-1) * self.coeff[1:]
self.coeff = self.coeff[:-1]
return Polynomial(self.coeff)
def __mul__(self, other):
#same as nonvectorized __mull__
c = self.coeff
d = other.coeff
M = len(c) - 1
N = len(d) - 1
result_coeff = np.zeros(M + N + 1)
for i in range(0, M + 1):
for j in range(0, N + 1):
result_coeff[i + j] += c[i] * d[j]
return Polynomial(result_coeff)
def derivative(self):
dpdx = Polynomial(self.coeff[:])
dpdx.differentiate()
return dpdx
def __call_nonvec__(self, x):
s = 0
for i in range(len(self.coeff)):
s += self.coeff[i] * x**i
return s
def __add_nonvec__(self, other):
if len(self.coeff) > len(other.coeff):
result_coeff = self.coeff[:]#copy
for i in range(len(other.coeff)):
result_coeff[i] += other.coeff[i]
else:
result_coeff = other.coeff[:]#copy
for i in range(len(self.coeff)):
result_coeff[i] += self.coeff[i]
return Polynomial(result_coeff)
def differentiate_nonvec(self):
for i in range(1, len(self.coeff)):
self.coeff[i-1] = i*self.coeff[i]
self.coeff[0] = 1
return Polynomial(self.coeff)
def __str__(self):
s = ""
for i in range(len(self.coeff)):
if(self.coeff[i] != 0):
s += " + %g*x^%d" % (self.coeff[i], i)
s = s.replace("+ -", "- ")
s = s.replace("x^0", "1")
s = s.replace(" 1*", " ")
s = s.replace("x^1 ", "x ")
if s[0:3] == " + ":
s = s[3:]
if s[0:3] == " - ":
s = "-" + s[3:]
return s
p1 = Polynomial([1, -1])
p2 = Polynomial([0, 1, 0, 0, -6, -1])
class test_Polynomial(ut.TestCase):
def test_add(self):
p_add = p1.__add__(p2)
p_add_exact = Polynomial([1, 0, 0, 0, -6, -1])
assert np.array_equal(p_add.coeff, p_add_exact.coeff)
def test_derivative(self):
p_derivative_1 = p1.derivative()
p_derivative_2 = p2.derivative()
p_derivative_exact_1= Polynomial([-1])
p_derivative_exact_2 = Polynomial([1, 0, 0, -24, -5])
assert np.array_equal(p_derivative_1.coeff, p_derivative_exact_1.coeff)
assert np.array_equal(p_derivative_2.coeff, p_derivative_exact_2.coeff)
"""
#I left this out because the assertion is failing but if I print separately it prints 'True'
def test_mul(self):
p_mull = p1.__mul__(p2)
p_mull_exact = Polynomial([0, 1, -1, 0, -6, 5, 1])
assert np.array_equal(p_mull.coeff, p_mull_exact.coeff)
"""
def test_differentiate(self):
p3 = Polynomial([0, 1 ,2])
p_differentiate = p3.differentiate()
p_differentiate_nonvec = p3.differentiate_nonvec()
assert np.array_equal(p_differentiate.coeff, p_differentiate_nonvec.coeff) | 35.072 | 101 | 0.57208 | 4,116 | 0.938869 | 0 | 0 | 0 | 0 | 0 | 0 | 768 | 0.175182 |
9bf1fa19b0bbd61d2f84487789a9ba6eac519047 | 18,637 | py | Python | OOP/Shop_OOP.py | Deego88/MPP_Assignment | f28ec02d1dbfb0f6079b87a99510771502e46c78 | [
"MIT"
] | null | null | null | OOP/Shop_OOP.py | Deego88/MPP_Assignment | f28ec02d1dbfb0f6079b87a99510771502e46c78 | [
"MIT"
] | null | null | null | OOP/Shop_OOP.py | Deego88/MPP_Assignment | f28ec02d1dbfb0f6079b87a99510771502e46c78 | [
"MIT"
] | null | null | null | # Student:Richard Deegan
# Student NUmber: G00387896
# Import libraries
import os
import csv
#****** CREATE DATA CLASS******#
# Create a data class for Product
class Product:
def __init__(self, name, price=0):
self.name = name
self.price = price
def __repr__(self):
return f"Product: {self.name}; \tPrice: {self.price:.2f}"
# Create a data class for ProductStock
class Product_stock:
def __init__(self, product, quantity):
self.product = product
self.quantity = quantity
# method to allow self as instance of the class
def name(self):
return self.product.name
def unit_price(self):
return self.product.price
def cost(self):
return self.unit_price() * self.quantity
def __repr__(self):
# self.product below is an instance of a class
return f"{self.product} \tAvailable amount: {self.quantity:.0f}"
# Create a data class for Customer
class Customer:
def __init__(self, path):
self.shopping_list = []
with open(path) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
first_row = next(csv_reader)
self.name = first_row[0]
self.budget = float(first_row[1])
for row in csv_reader:
name = row[0]
quantity = float(row[1])
p = Product(name)
ps = Product_stock(p, quantity)
self.shopping_list.append(ps)
def get_costs(self, price_list):
total_cost = 0
for list_item in self.shopping_list:
for shop_item in price_list:
if (list_item.name() == shop_item.name()): # the product is in stock
list_item.product.price = shop_item.unit_price()
sub_total = list_item.quantity * list_item.product.price
total_cost = + sub_total
return print(
f"(test: {list_item.name()}) there is enough of the product and sub-total would be €{sub_total}")
else:
print("not in stock")
pass
def check_quantity(self, stock_list):
pass
def order_cost(self):
cost = 0
for list_item in self.shopping_list:
cost += list_item.cost()
return cost
# Customer's shopping list
def evaluate_order(self, sh):
# Show customers details
print("**********************************************************************************")
print(
f"\nThe customer name is: {self.name}, the customer budget is: €{self.budget:.2f}")
print("**********************************************************************************")
print(f"{self.name} wants the following products: ")
# initialise
self.total_cost = 0
self.total_order_list = []
# Create a for loop to loop over shopping list
for cust_item in self.shopping_list:
# Show customers details
print(
f" -{cust_item.product.name}, quantity {cust_item.quantity:.0f}. ", end="")
# initialise
sub_total = 0
# control the messages about the customer
customer_stock_state = 0 # stock check
# loop over the stock list to find a match
for shop_item in sh.stock:
# check if match exists
if (cust_item.name() == shop_item.name()) and (cust_item.quantity <= shop_item.quantity):
# get the product price form shop
cust_item.product.price = shop_item.unit_price()
sub_total_full = cust_item.quantity * cust_item.product.price
# update total cost
sub_total = + sub_total_full
# update list of items that are making it for purchasing
n = cust_item.name() # product name variable
q = cust_item.quantity # product quantity variable
p = Product(n) # new instance of class
sub_order = Product_stock(p, q) # a new instance
self.total_order_list.append(
sub_order) # append the item
# stock check
customer_stock_state = 1
# not enough stock for customer roder
elif (cust_item.name() == shop_item.name()) and (cust_item.quantity > shop_item.quantity):
# check how many can be bought
partial_order_qty = cust_item.quantity - \
(cust_item.quantity -
shop_item.quantity) # purchase all stock
# Cost of the (i-th) item from the customer's shopping list
sub_total_partial = partial_order_qty * \
shop_item.product.price
# update total cost
sub_total = + sub_total_partial
# as above
n = cust_item.name()
q = partial_order_qty
p = Product(n)
sub_order = Product_stock(p, q)
self.total_order_list.append(
sub_order) # append
# stock check
customer_stock_state = 2
# none in stock
elif ((cust_item.name() == shop_item.name()) and (shop_item.quantity <= 0)):
# Prints out cost of all items of the product
customer_stock_state = 0 # none in stock
# else:
# customer_stock_state = 0 # none in stock
# addition of sub totals
self.total_cost = self.total_cost + sub_total
if customer_stock_state == 1:
# stock check - all quantity can satisfied
print(
f"\tThe shop has stock and sub-total cost would be €{sub_total_full:.2f}")
elif customer_stock_state == 2:
# stock check - partial quantity can satisfied
print(
f"\Sorry only {partial_order_qty:.0f} is available and sub-total cost for that many would be €{sub_total_partial:.2f}.")
elif customer_stock_state == 0:
# stock check - item not available
print(
f"\tSorry but this product is not available. Sub-total cost will be €{sub_total:.2f}.")
print(
f"Total shopping cost will be: €{self.total_cost:.2f}. \n")
self.total_cost
self.total_order_list
def __repr__(self):
for item in self.shopping_list:
cost = item.cost()
str = ""
str += f"\n{item}"
if (cost == 0):
str += f" {self.name} doesn't know how much that costs :("
else:
str += f" COST: {cost:.2f}"
str += f"\nThe cost would be: {self.order_cost():.2f}, he would have {self.budget - self.order_cost():.2f} left"
return str
#****** SHOP_DETAILS ******#
class Shop:
def __init__(self, path):
self.stock = []
with open(path) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
first_row = next(csv_reader)
self.cash = float(first_row[0])
for row in csv_reader:
p = Product(row[0], float(row[1]))
ps = Product_stock(p, float(row[2]))
self.stock.append(ps)
#****** SHOP_DETAILS_ORDER ******#
def process_order(self, cust, sh, total_cost, total_order_list):
# Check whether the customer can afford the desired items
if (cust.budget < total_cost): # customer is short of money
print(
f"Sorry, you do not have enough funds, you require €{(total_cost - cust.budget):.2f}. ", end="")
# else customer has enough money
else:
# loop over the items in the customer shopping list
for cust_item in cust.total_order_list:
# Initialise (no match=0)
match_exist = 0
# Assign the (i-th) product from the customer schopping list as a shorthand
cust_item_name = cust_item.product.name
# loop over the stock list to find a match
for sh_item in sh.stock:
# assign the (j-th) product from the shop stock list as a shorthand
sh_item_name = sh_item.product.name
# check if there is a match
if (cust_item_name == sh_item_name):
match_exist = + 1
# IF sufficient amount exists do the following
if (cust_item.quantity <= sh_item.quantity):
# update the shop stock
sh_item.quantity = sh_item.quantity - cust_item.quantity
print(
f"Stock quantity of {cust_item.product.name} is now updated to: {sh_item.quantity:.0f}")
else: # customer wants more than in stock
# buy whole stock of the current item
partial_order_qty = cust_item.quantity - \
(cust_item.quantity - sh_item.quantity)
# update the shop stock
sh_item.quantity = sh_item.quantity - partial_order_qty
print(
f"Shop product {cust_item.product.name} is now updated to {sh_item.quantity:.0f}.")
# IF product is not in the shop, there is no match
if (match_exist == 0):
print(
f"\tSorry the shop doesn't have this product.")
# update shop and customer
sh.cash = sh.cash + total_cost
cust.budget = cust.budget - total_cost
print(f"\nThe shop now has €{sh.cash:.2f} in cash. ")
# updated customer's budget
print(f"{cust.name} has €{cust.budget:.2f} remaining for shopping.")
print("")
return
# ****** LIVE_MODE ******
def interactive_mode(self, sh, budget):
# print stock
print(f"\nThis is a list of products for sale in the shop:")
print(self)
# initialise
product_name = ""
quantity = 0
# initialise a forever loop forcing the user to exit only with an x
while product_name != "x":
print()
# Request input from the user, assign to the variable
product_name = input(
"Please enter your product name (press x to exit): ")
# initialise (0 = no match)
match_exist = 0
# loop over shop stock list looking for a match from customer's list
for sh_item in sh.stock:
# initialise
sub_total = 0
# assign the (j-th) product from the shop stock list as a shorthand
sh_item_name = sh_item.product.name
# IF there is a match
if (product_name == sh_item_name):
match_exist += 1 # set match
quantity = int(
input("Please enter your requested quantity: "))
# check products availability
if (quantity <= sh_item.quantity):
# check product price and calculate sub-total cost
sub_total = sh_item.product.price * quantity
# IF customer has enough funds
if (budget >= sub_total):
# update customer's funds
budget = budget - sub_total
print(
f"Congrats! you bought the product. Sub total cost was €{sub_total:.2f}. Your funds are now €{budget:.2f}.")
# update the shop stock and cash
sh_item.quantity = sh_item.quantity - quantity
# update the shop cash
sh.cash = sh.cash + sub_total
print(
f"Shop quantity of {sh_item_name} in now: {sh_item.quantity:.0f}. The shop has now: {sh.cash:.2f}.")
else: # customer cannot afford all
print(
f"Sorry you do not have enough funds, you require €{(sub_total - budget):.2f} extra. ", end="")
# customer wants more than in stock
else:
# check how many can be bought and buy all that is in stock
partial_order_qty = quantity - \
(quantity - sh_item.quantity)
# perform the sub-total cost for the item
sub_total_partial = partial_order_qty * \
sh_item.product.price
# Prints out cost of all items of the product
print(
f"Only {partial_order_qty:.0f} is available. Sub-total cost was €{sub_total_partial:.2f}. ")
# update customer's budget
budget = budget - sub_total_partial
print(
f"Customer budget is: €{budget:.2f} after buying this item.")
# update the shop stock adn cash
sh_item.quantity = sh_item.quantity - partial_order_qty
sh.cash = sh.cash + sub_total_partial
print(
f"This product is not avilable in the shop: {sh_item.quantity:.0f}). Cash in shop now: {sh.cash:.2f}.")
if (match_exist == 0): # product not available in stock
print("Product unavailable.")
# ****** SHOP_MENU ******
def display_menu(self):
while True: # this is a 'forever' loop, unless interupted (break)
# Main menu screen
print("******************************\n")
print("Welcome to the Shop Main Menu:\n")
print("******************************\n")
print("[1] - Shop Details\n")
print("[2] - Customer A: good case\n")
print("[3] - Customer B: Broke funds case\n")
print("[4] - Customer C: exceeding order case\n")
print("[5] - Live Mode\n")
print("[9] - Exit\n")
print("******************************\n")
# Request user input
choice = input("Please enter your choice: ")
if (choice == "1"):
print(self)
elif (choice == "2"):
# create customer A- good case csv
customer_A = Customer("../Data/customer_good.csv")
# print customer details
customer_A.evaluate_order(self)
# total_cost = evaluate_order(customer_A, shop)
# process customer's shopping list by calling relevant method
self.process_order(customer_A, self,
customer_A.total_cost, customer_A.total_order_list)
elif (choice == "3"):
# create customer B- broke case
customer_B = Customer(
"../Data/customer_broke.csv")
# print customer details and evaluate shopping list
customer_B.evaluate_order(self)
# process customer's shopping list by calling relevant method
self.process_order(customer_B, self,
customer_B.total_cost, customer_B.total_order_list)
elif (choice == "4"):
# create customer C- exceeding case
# read data from a file
customer_C = Customer("../Data/customer_exceeding_order.csv")
# print customer details and evaluate shopping list
customer_C.evaluate_order(self)
# process customer's shopping list by calling relevant method
self.process_order(customer_C, self,
customer_C.total_cost, customer_C.total_order_list)
elif (choice == "5"):
# Live Mode welcome message
print("-------------------------")
print("You are now in Live Mode")
print("-------------------------")
# get user's name
self.customer_name = input("Enter your name please: ")
print(
f"Welcome, {self.customer_name} to the live shopping experience. ")
# get user's budget
self.budget = float(
input("Please tell me your shopping budget: "))
# go to the interactive mode
self.interactive_mode(self, self.budget)
elif (choice == "9"): # Exit condition
print("")
break
#******SHOP_SELF******#
def __repr__(self):
str = ""
str += f"\nShop has {self.cash:.2f} in cash \n==== ==== ==== ==== ==== ==== ==== ==== ==== ==== ==== ==== ==== ====\n"
for item in self.stock:
str += f"{item}\n"
return str
def main():
# Clear screen
os.system("cls") # for Windows
os.system("cls") # for Linux
shop_one = Shop("../Data/shop_stock.csv")
# function that displays the shop menu
shop_one.display_menu()
# deafault customer
c = Customer("../Data/customer.csv")
c.get_costs(shop_one.stock)
print(c)
if __name__ == "__main__":
# execute only if run as a script
main()
| 37.803245 | 141 | 0.489403 | 17,927 | 0.960461 | 0 | 0 | 0 | 0 | 0 | 0 | 6,964 | 0.373105 |
9bf23ce8f874e49f7dbc801b56d74fadb35d9f6f | 2,607 | py | Python | 7/assembly.py | Keilan/advent-of-code-2018 | 3f3b4952c3633df4008e734da15e219fa67ec635 | [
"MIT"
] | null | null | null | 7/assembly.py | Keilan/advent-of-code-2018 | 3f3b4952c3633df4008e734da15e219fa67ec635 | [
"MIT"
] | null | null | null | 7/assembly.py | Keilan/advent-of-code-2018 | 3f3b4952c3633df4008e734da15e219fa67ec635 | [
"MIT"
] | null | null | null | import sys
import copy
def find_available(graph, steps):
return [s for s in steps if s not in graph]
def trim_graph(graph, item):
removed_keys = []
for step, items in graph.items():
if item in items:
items.remove(item)
if len(items) == 0:
removed_keys.append(step)
else:
graph[step] = items
for key in removed_keys:
del graph[key]
return graph
def assembly():
steps = set()
rules = []
for directions in sys.stdin:
parts = directions.split()
rules.append((parts[1], parts[7]))
steps.add(parts[1])
steps.add(parts[7])
graph = {}
for requirement, step in rules:
if step in graph:
graph[step].append(requirement)
else:
graph[step] = [requirement]
# Iterate through until no steps are remaining
solution = ""
current_graph = copy.deepcopy(graph)
remaining_steps = steps.copy()
while(current_graph):
options = find_available(current_graph, remaining_steps)
choice = min(options)
#Add to solution and remove from graph and choices
solution += choice
remaining_steps.remove(choice)
current_graph = trim_graph(current_graph, choice)
# Add final item
solution += remaining_steps.pop()
print(f'The correct order is {solution}')
# Setup variables to simulate progress
base_seconds = 60
workers = 5
total_seconds = 0
workers = ['free']*workers
remaining_time = {step: ord(step)-64+base_seconds for step in steps}
# Process until all steps are completed
while(steps):
# Assign workers
if 'free' in workers:
options = sorted(find_available(graph, steps))
for step in options:
if 'free' in workers and step not in workers:
workers[workers.index('free')] = step
# Decrement time on all active steps
for step in remaining_time:
#Only increment steps being worked on
if step not in workers:
continue
remaining_time[step] -= 1
# If the step is finished, remove it from the graph
if remaining_time[step] == 0:
#print(f'Finished {step}')
steps.remove(step)
graph = trim_graph(graph, step)
workers[workers.index(step)] = 'free'
print(total_seconds,workers,remaining_time)
total_seconds += 1
print(f'Assembly took {total_seconds} seconds')
assembly() | 27.442105 | 72 | 0.589567 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 461 | 0.176832 |
9bf246a2909d52a0002e7bef877a273448d9ca53 | 14,502 | py | Python | server.py | andersonsso/vogon | dd6b812c0a48c2ff99bb7690536e0b2e46f226bc | [
"Apache-2.0"
] | 98 | 2015-01-20T05:57:01.000Z | 2022-01-04T12:03:39.000Z | server.py | andersonsso/vogon | dd6b812c0a48c2ff99bb7690536e0b2e46f226bc | [
"Apache-2.0"
] | 13 | 2016-08-28T05:58:59.000Z | 2021-01-14T10:55:01.000Z | server.py | andersonsso/vogon | dd6b812c0a48c2ff99bb7690536e0b2e46f226bc | [
"Apache-2.0"
] | 57 | 2015-01-24T09:23:34.000Z | 2021-12-09T06:18:27.000Z | #!/usr/bin/python
# vim: set fileencoding=utf-8 :
# Copyright 2019 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Web-based Vogon configuration file editor.
This utility starts a web server and opens a local web page in the user's
default browser, which contains a GUI to edit the JSON configuration file.
"""
from os import path
import sys
program_dir = path.abspath(path.dirname(__file__))
sys.path.insert(0, program_dir + '/third_party/bottle/')
import argparse
from bottle import get, post, delete, request, route, run, static_file, response
import codecs
from io import StringIO
import csv
from distutils.dir_util import copy_tree
import http.client
import json
import os
import platform
import re
import shutil
import subprocess
import tempfile
import threading
import time
import urllib
import zipfile
import vogon
import yt_api
import google_ads_editor_csv as g_ads_editor
################################################################################
# YOUTUBE AUTHENTICATION
################################################################################
@post('/api/youtube_auth/get_device_code')
def get_device_code():
yt_status, yt_response = yt_api.get_device_code()
response.status = yt_status
return yt_response
@post('/api/youtube_auth/check_device_authorization')
def check_device_authorization():
yt_status, yt_response = yt_api.check_device_authorization(request.json['code'])
response.status = yt_status
return yt_response
@post('/api/youtube/list_channels')
def list_channels():
_, refresh_token_response = yt_api.refresh_access_token(request.json['refresh_token'])
new_access_token = json.loads(refresh_token_response)['access_token']
yt_status, yt_response = yt_api.list_channels(new_access_token)
yt_response_content = json.loads(yt_response)
response.status = yt_status
yt_response_content['access_token'] = new_access_token
yt_response_content['refresh_token'] = request.json['refresh_token']
return yt_response_content
@post('/api/youtube/start_video_upload')
def start_video_upload():
return yt_api.start_video_upload(request.json)
@post('/api/youtube/remove_uploaded_videos')
def remove_uploaded_videos():
return yt_api.remove_uploaded_videos(request.json)
@get('/api/youtube/read_log/<project_id>')
def read_log(project_id):
return yt_api.read_log(project_id, 1)
################################################################################
# CONFIG ACTIONS
################################################################################
@get('/api/projects/<project_folder>/config')
def get_config(project_folder):
config_file = os.path.join("projects", project_folder, "config.json")
return static_file(config_file, root='./')
@get('/api/sheets_client_id')
def get_secrest_json():
secret_file = "credentials/oauth_2_client_secret.json"
with open(secret_file) as s_file:
ctn = json.loads(s_file.read())
s_file.close()
return json.dumps(ctn["web"]["client_id"])
@post('/api/projects/<project_folder>/config')
def post_config(project_folder):
config_file = os.path.join("projects", project_folder, "config.json")
with open(config_file, 'w') as f:
json.dump(request.json, f, indent=2)
f.close()
################################################################################
# VIDEO GENERATION ACTIONS
################################################################################
@get('/api/projects/<project_folder>/preview/row/<index>')
def generate_preview(project_folder, index):
config_file = os.path.join("projects", project_folder, "config.json")
video = vogon.generate_preview(config_file, int(index),
project_dir=project_folder)
return static_file(video, root='./', download=video)
@post('/api/projects/<project_id>/generate_all_videos')
def generate_all_variations(project_id):
arg = (project_id,)
t = threading.Thread(target=vogon.generate_all_video_variations, args=arg)
t.start()
return json.dumps("Started")
@get('/api/projects/<project_id>/cancel_video_generation')
def cancel_video_generation(project_id):
vogon.stop_video_generation(project_id)
return json.dumps("Canceled")
@get('/api/projects/<project_id>/update_on_video_generation')
def update_on_video_generation(project_id):
started_at, current_state = vogon.get_video_generation_percent(project_id)
current_state = current_state.decode('utf-8') if current_state != "--" else ""
return json.dumps({
"started_at": str(started_at),
"current_state": current_state
})
################################################################################
# PROJECT MANAGEMENT ACTIONS
################################################################################
@get('/api/projects/list')
def get_available_projects():
if not os.path.exists("projects"):
os.makedirs("projects")
dirs = os.listdir("projects")
output = []
for d in dirs:
if d[0] != ".":
output.append({
"name": d,
"size": du("projects/"+d)
})
return json.dumps(output)
@post('/api/projects/new/name/<project_folder>')
def copy_base_project(project_folder):
project_folder = re.sub(r'[^\w_]', '', project_folder)
project_dir = os.path.join("projects", project_folder)
base_dir = "base_project/"
is_taken = os.path.isdir(project_dir)
if not is_taken:
# copies base project
copy_tree(base_dir, project_dir)
# fixes config file
conf_file_path = os.path.join(project_dir, "config.json")
data = ""
with open(conf_file_path, 'r') as config_file:
data = config_file.read().replace("{{project_id}}", project_folder)
config_file.close()
with open(conf_file_path, 'w') as config_file:
config_file.write(data)
time.sleep(2)
# returns success
return json.dumps({"success":True, "project": project_folder})
else:
return json.dumps({"success":False, "project": project_folder})
@post('/api/projects/<project_folder>/clear')
def clear_project(project_folder):
project_folder = os.path.join("projects", project_folder, "output")
shutil.rmtree(project_folder)
os.mkdir(project_folder)
return json.dumps("True")
@post('/api/projects/<project_folder>/delete')
def delete_project(project_folder):
project_folder = os.path.join("projects", project_folder)
shutil.rmtree(project_folder)
return json.dumps("True")
################################################################################
# ASSETS MANAGEMENT ACTIONS
################################################################################
@get('/api/projects/<project_id>/google_ads_editor_file')
def generate_and_download_editor_file(project_id):
(uploaded, missing),error = g_ads_editor.build_csv(project_id)
if error is not None:
return json.dumps({"msg": "ERROR generating CSV: %s" % error})
elif missing >0:
return json.dumps({
"msg": "CSV file has %s of %s videos, please make sure to generate "
"all videos and upload all of them to YouTube Before "
"downloading the Editor CSV." % (uploaded, missing)
})
else:
feed_name = "google_ads_editor.csv"
feed_path = os.path.join("projects", project_id)
file_path = os.path.join(feed_path, feed_name)
filename = str(os.path.basename(file_path))
# renders to browser as file to download, not to display.
response.headers['Content-Type'] = 'application/octet-stream'
response.headers['Content-Disposition'] = 'attachment; filename="%s"'
response.headers['Content-Disposition'] %= (filename)
return static_file(feed_name, root=feed_path, download=filename)
@post('/api/projects/<project_id>/feed_content_upload')
def feed_content_upload(project_id):
feed_uri = "projects/%s/feed.csv" % project_id
feed_data = json.loads(request.body.read())['feed_data']
queue = StringIO()
writer = csv.writer(queue, dialect=csv.excel)
encoder = codecs.getincrementalencoder('utf-8')()
with open(feed_uri,'wb') as feed_file:
for feed_row in feed_data:
writer.writerow([v for v in feed_row])
data = queue.getvalue()
feed_file.write(encoder.encode(data))
queue.truncate(0)
feed_file.close()
return json.dumps({'success': True})
@get('/api/projects/<project_id>/fonts')
def get_font_list(project_id):
matches = []
font_dirs = []
font_dirs.append("projects/%s/assets" % project_id)
font_dirs.append("/Library/Fonts")
font_dirs.append("/System/Library/Fonts")
font_dirs.append("/usr/share/fonts")
font_dirs.append("~/fonts")
font_dirs.append("~/.fonts")
for font_dir in font_dirs:
try:
for root, dirnames, filenames in os.walk(font_dir):
for filename in filenames:
font_file = os.path.join(root, filename)
if filename[-4:] in (".ttf",".otf"):
beaut_name = filename[:-4].replace("_"," ").replace("-"," ")
beaut_name = " ".join(re.findall('[A-Z][^A-Z]*', beaut_name))
if not beaut_name:
beaut_name = filename[:-4].replace("_"," ").replace("-"," ").split(" ")
beaut_name = " ".join([w.capitalize() for w in beaut_name])
matches.append([beaut_name, font_file])
except Exception as e:
print("error loading fonts: "+e)
return json.dumps(sorted(matches))
@get('/api/projects/<project_id>/assets')
def get_assets_list(project_id):
assets_path = "projects/%s/assets" % project_id
matches = []
for root, dirnames, filenames in os.walk(assets_path):
for filename in filenames:
asset = os.path.join(root, filename)
asset = asset.replace(assets_path, '')[1:]
matches.append(asset)
return json.dumps(sorted(matches))
@post('/api/projects/<project_id>/assets')
def post_single_asset(project_id):
assets_path = "projects/%s/assets/" % project_id
with PostedFileWriter(request) as file_path:
if file_path[-4:] == '.zip':
with zipfile.ZipFile(file_path, 'r') as f:
f.extractall(assets_path)
else:
_, asset_name = os.path.split(file_path)
new_asset_path = assets_path + asset_name
shutil.copy(file_path, new_asset_path)
return get_assets_list(project_id)
def move_file(origin_path, dest_path):
upload = bottle.request.files.get('file')
upload.save(dest_path)
return 1
with open(origin_path, 'r') as of:
with open(dest_path, 'w') as df:
df.write(of.read())
df.close()
of.close()
@delete('/api/projects/<project_id>/assets/')
def delete_asset(project_id):
asset_name = request.query['asset_path']
try:
assets_path = "projects/%s/assets/" % project_id
asset_full_path = os.path.join(assets_path, asset_name)
os.unlink(asset_full_path)
except Exception as e: # pylint: disable=broad-except
error_msg = 'Error unlinking file %s.\nError: %s.'
error_msg %= (request.body, e)
print(error_msg)
return get_assets_list(project_id)
@get('/api/projects/<project_id>/download/assets/')
def download_asset(project_id):
asset_name = request.query['asset_path']
assets_path = "projects/%s/assets/" % project_id
file_path = os.path.join(assets_path, asset_name)
filename = str(os.path.basename(file_path))
# renders to browser as file to download, not to display.
response.headers['Content-Type'] = 'application/octet-stream'
response.headers['Content-Disposition'] = 'attachment; filename="%s"'
response.headers['Content-Disposition'] %= (filename)
return static_file(asset_name, root=assets_path, download=filename)
################################################################################
# Main page Actions
################################################################################
@get('/#!/project/<project_folder>')
def get_index(project_folder):
filename = 'html/index.html'
return get_static(filename)
@get('/')
def get_index():
filename = 'html/index.html'
return get_static(filename)
################################################################################
# Static Files
################################################################################
@get('/static/<filepath:path>')
def get_static(filepath):
static_dir = program_dir + '/static/'
return static_file(filepath, root=static_dir)
################################################################################
# Helpers
################################################################################
def du(path):
"""disk usage in human readable format (e.g. '2,1GB')"""
return subprocess.check_output(['du','-sh', path]).split()[0].decode('utf-8')
class PostedFileWriter(object):
"""Defines a resource to use on 'with' statements to clean up after upload."""
# not used
def __init__(self, request):
self.request = request
# not used
def __enter__(self):
self.temp_dir = tempfile.mkdtemp()
# ngFileUpload sends the content in the "file" parameter by default
input_file = request.files.get("file")
if input_file.filename:
file_name = input_file.filename
file_path = os.path.join(self.temp_dir, file_name)
buffer_size = 2**16 # 65k
with open(file_path, 'wb') as output_file:
buf = input_file.file.read(buffer_size)
while buf:
output_file.write(buf)
buf = input_file.file.read(buffer_size)
output_file.close()
return file_path
else:
return None
# not used
def __exit__(self, type_, value, traceback_):
shutil.rmtree(self.temp_dir)
################################################################################
# Main
################################################################################
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--debug",
help="Enable debug mode",
action="store_true")
args = parser.parse_args()
run(host='0.0.0.0', port=8080, debug=args.debug)
if __name__=='__main__':
main()
| 35.284672 | 88 | 0.635912 | 901 | 0.062129 | 0 | 0 | 9,761 | 0.67308 | 0 | 0 | 5,339 | 0.368156 |
9bf2b2dc723f4b7d00b4319b8189354d02aff030 | 4,318 | py | Python | 3/konks_lab_3.py | exucutional/study_comp_math | c73b6f00e86b2d19c4cb81c377ca4706d70b2831 | [
"MIT"
] | null | null | null | 3/konks_lab_3.py | exucutional/study_comp_math | c73b6f00e86b2d19c4cb81c377ca4706d70b2831 | [
"MIT"
] | null | null | null | 3/konks_lab_3.py | exucutional/study_comp_math | c73b6f00e86b2d19c4cb81c377ca4706d70b2831 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# # Lab 02
#
# ## Solving a system of nonlinear equations
#
# ### Konks Eric, Б01-818
#
# IV.12.7.д
# $$\begin{cases} x^7 - 5x^2y^4 + 1510 = 0 \\ y^3 - 3x^4y - 105 = 0 \end{cases}$$
# $$\begin{cases} x_{n+1} = \sqrt{\frac{x_n^7 + 1510}{5y_n^4}} \\ y_{n+1} = \sqrt[3]{3x_{n}^4y_{n}+105} \end{cases}$$
# $$J=\begin{pmatrix}7x^6-10xy^4 & -20x^2y^3\\-12x^3y & 3y^2-3x^4\end{pmatrix}$$
# In[1]:
import unittest
import logging
import numpy as np
import pandas as pd
# In[2]:
#logging.basicConfig(level=logging.DEBUG)
# In[3]:
class FPI:
def __init__(self, f_vec):
self.__f_vec = f_vec
self.iter = 0
self.log = logging.getLogger("FPI")
def __is_stop(self, next_x, cur_x, q, delta):
if next_x == cur_x:
return False
if sum(np.abs((next_x[i] - cur_x[i])) for i in range(len(cur_x))) <= delta * (1 - q):
return True
return False
def solve(self, init_x, q, delta):
cur_x = init_x
next_x = init_x
while not self.__is_stop(next_x, cur_x, q, delta):
cur_x = next_x
next_x = cur_x[:]
for i in range(len(self.__f_vec)):
next_x[i] = self.__f_vec[i](cur_x)
self.log.debug(f"Iter[{self.iter}]: Init: {cur_x} Next: {next_x}")
self.iter = self.iter + 1
return next_x
# In[4]:
class Newton:
def __init__(self, f_vec, J):
self.__f_vec = f_vec
self.__J = J
self.iter = 0
self.log = logging.getLogger("Newton")
def __J_mul_f(self, x, i):
return sum(self.__f_vec[j](x) * self.__J[i][j](x) for j in range(len(self.__f_vec)))
def __is_stop(self, next_x, cur_x, M2, m1, delta):
if next_x == cur_x:
return False
if sum(np.abs(next_x[i] - cur_x[i]) for i in range(len(cur_x))) < np.sqrt(2*delta*m1/M2):
return True
return False
def solve(self, init_x, M2, m1, delta):
self.iter = 0
cur_x = init_x
next_x = init_x
while not self.__is_stop(next_x, cur_x, M2, m1, delta):
cur_x = next_x
next_x = cur_x[:]
for i in range(len(self.__f_vec)):
next_x[i] = cur_x[i] - self.__J_mul_f(cur_x, i)
self.log.debug(f"Iter[{self.iter}]: Init: {cur_x} Next: {next_x}")
self.iter = self.iter + 1
return next_x
# In[5]:
def fpi_f1(x):
return np.sqrt((x[0]**7 + 1510)/(5 * (x[1]**4)))
def fpi_f2(x):
return np.cbrt(3*(x[0]**4)*x[1] + 105)
fpi = FPI([fpi_f1, fpi_f2])
# In[6]:
def newton_f1(x):
return x[0]**7-5*(x[0]**2)*(x[1]**4)+1510
def newton_f2(x):
return x[1]**3-3*(x[0]**4)*x[1]-105
def J00(x):
return 7*(x[0]**6)-10*x[0]*(x[1]**4)
def J01(x):
return -20*(x[0]**2)*(x[1]**3)
def J10(x):
return -12*(x[0]**3)*x[1]
def J11(x):
return 3*(x[1]**2) - 3*(x[0]**4)
def J(x):
return [[J00(x), J01(x)], [J10(x), J11(x)]]
def J00_inv(x):
return J11(x)/(J00(x)*J11(x)-J10(x)*J01(x))
def J01_inv(x):
return - J01(x)/(J00(x)*J11(x)-J10(x)*J01(x))
def J10_inv(x):
return - J10(x)/(J00(x)*J11(x)-J10(x)*J01(x))
def J11_inv(x):
return J00(x)/(J00(x)*J11(x)-J10(x)*J01(x))
J_inv = [[J00_inv, J01_inv], [J10_inv, J11_inv]]
newton = Newton([newton_f1, newton_f2], J_inv)
# In[7]:
log = logging.getLogger()
x_init_vec_fpi = [[1,5], [3, -4], [-1, 5]]
x_init_vec_newton = [[1,5], [3, -4], [-1, 5], [-4, 0], [-2, -2]]
delta = 10**-5
q = 0.5
m1 = 1
M2 = 1
fpi_results = []
fpi_iterations = []
newton_results = []
newton_iterations = []
for x in x_init_vec_fpi:
fpi_results.append(fpi.solve(x, q, delta))
fpi_iterations.append(fpi.iter)
for x in x_init_vec_newton:
newton_results.append(newton.solve(x, M2, m1, delta))
newton_iterations.append(newton.iter)
# In[8]:
fpi_dt = pd.DataFrame({"Начальное приближение": x_init_vec_fpi, "Результат": fpi_results, "Итераций": fpi_iterations})
newton_dt = pd.DataFrame({"Начальное приближение": x_init_vec_newton, "Результат": newton_results, "Итераций": newton_iterations})
print("Метод простых итераций")
print(fpi_dt)
print("\nМетод Ньютона")
print(newton_dt)
| 22.968085 | 130 | 0.556739 | 1,944 | 0.439223 | 0 | 0 | 0 | 0 | 0 | 0 | 865 | 0.195436 |
9bf3343933efdd880c3c62f4485b50df56cf0721 | 9,035 | py | Python | tictactoe.py | pauloue/tictactoe-turtle | 017127dee455828ed355dac29c9b9be4439d5cdb | [
"MIT"
] | 1 | 2021-02-12T03:08:27.000Z | 2021-02-12T03:08:27.000Z | tictactoe.py | pauloue/tictactoe-turtle | 017127dee455828ed355dac29c9b9be4439d5cdb | [
"MIT"
] | null | null | null | tictactoe.py | pauloue/tictactoe-turtle | 017127dee455828ed355dac29c9b9be4439d5cdb | [
"MIT"
] | 1 | 2018-05-23T09:29:24.000Z | 2018-05-23T09:29:24.000Z | #!/usr/bin/env python3
"""A tic tac toe game with an unbeatable AI that uses the Python turtle
module. The AI player chooses it's move using the minimax algorithm.
"""
from ttt_util import TicTacToeUI, HumanPlayer, BotPlayer
from random import shuffle
class TicTacToe:
def __init__(self, player1, player2):
"""Initialize a game of tic tac toe with the given players
(HumanPlayer or BotPlayer objects). The player with the first
move alternates each game, starting with player1.
"""
if player1.mark == player2.mark:
raise ValueError("players must not use the same mark")
self.p1 = player1
self.p2 = player2
self.turn_order = [self.p1, self.p2]
self.board = [None] * 9
self.ties = 0
self.win_lines = [[0, 1, 2], [3, 4, 5], [6, 7, 8], [0, 3, 6],
[1, 4, 7], [2, 5, 8], [0, 4, 8], [2, 4, 6]]
self.UI = TicTacToeUI()
self.UI.draw_grid()
self.print_stats()
self.start_game()
self.UI.wn.mainloop()
def start_game(self):
"""Call the correct game function based on the player types."""
self.players = self.turn_order.copy()
first = self.players[0]
if all(player.player_type == 'human' for player in self.players):
self.UI.display(first.name, 'top', first.color)
self.UI.wn.onclick(self.human_game)
elif all(player.player_type == 'bot' for player in self.players):
self.bot_game()
else:
if first.player_type == 'bot':
self.bot_take_turn(first)
self.players.reverse()
self.UI.wn.onclick(self.human_bot_game)
def human_game(self, x, y):
"""Start a game with two human players. Accepts the coordinates
of a user click passed by an onclick call.
"""
# Remove event binding
self.UI.wn.onclick(None)
# Get the board section of the clicked point
pos = self.get_position(x, y)
# Exit if click is outside the grid or the grid section isn't empty
if pos is None or self.board[pos] is not None:
self.UI.wn.onclick(self.human_game)
return
player = self.players[0]
self.take_turn(player, pos)
if self.check_if_won(self.board, player.mark):
self.end_game(player)
return
elif None not in self.board:
self.end_game('tie')
return
self.players.reverse()
# Reactivate event binding and display who's turn it is
self.UI.wn.onclick(self.human_game)
self.UI.display(self.players[0].name, 'top', self.players[0].color)
def bot_game(self):
"""Start a game with two bot players. Should result in a tie
every time.
"""
while None in self.board:
self.bot_take_turn(self.players[0])
if self.check_if_won(self.board, self.players[0].mark):
self.end_game(self.players[0])
return
self.players.reverse()
self.end_game('tie')
def human_bot_game(self, x, y):
"""Start a game with a human and bot player. Accepts the
coordinates of a user click passed by an onclick call.
"""
self.UI.wn.onclick(None)
usr_pos = self.get_position(x, y)
if usr_pos is None or self.board[usr_pos] is not None:
self.UI.wn.onclick(self.human_bot_game)
return
for player in self.players:
if player.player_type == 'human':
self.take_turn(player, usr_pos)
else:
self.bot_take_turn(player)
if self.check_if_won(self.board, player.mark):
self.end_game(player)
return
elif None not in self.board:
self.end_game('tie')
return
self.UI.wn.onclick(self.human_bot_game)
def print_stats(self):
"""Print or update the stats text."""
stats_text = (
f"{self.p1.name} ({self.p1.mark}): {self.p1.wins} Ties: "
f"{self.ties} {self.p2.name} ({self.p2.mark}): {self.p2.wins}")
self.UI.display(stats_text, 'bottom')
def get_position(self, x, y):
"""Return the grid section (0-8) of the given coordinates."""
if x > -225 and x < -75 and y > 75 and y < 225:
position = 0
elif x > -75 and x < 75 and y > 75 and y < 225:
position = 1
elif x > 75 and x < 225 and y > 75 and y < 225:
position = 2
elif x > -225 and x < -75 and y > -75 and y < 75:
position = 3
elif x > -75 and x < 75 and y > -75 and y < 75:
position = 4
elif x > 75 and x < 225 and y > -75 and y < 75:
position = 5
elif x > -225 and x < -75 and y > -225 and y < -75:
position = 6
elif x > -75 and x < 75 and y > -225 and y < -75:
position = 7
elif x > 75 and x < 225 and y > -225 and y < -75:
position = 8
else:
position = None
return position
def take_turn(self, player, position):
"""Update the board with player's move at the given position."""
self.board[position] = player.mark
print(player.name, "marks section", position)
self.UI.mark(player.mark, position, player.color)
def bot_take_turn(self, player):
"""Take a turn with the given player at the position chosen by
the minimax algorithm.
"""
self.minimax_calls = 0
pos, score = self.minimax_choose_pos(self.board, player.mark)
print(f"Minimax score: {score}, function calls: {self.minimax_calls}")
self.take_turn(player, pos)
def minimax_choose_pos(self, board, turn):
"""Return a tuple with the best position for the player with the
given mark to play on the given board and the score associated
with that move. A score of 1 means the player has a guaranteed
win, -1 indicates a loss if the opponent plays well, and 0 means
the game will end in a tie.
How it works:
1. Play in each empty position on a copy of the board.
2. If the board is at a terminal state, score it for the current
player: 1 for a win, -1 for a loss, 0 for a tie.
3. If the board is not at a terminal state, recursively run the
function on the new board for the opponent until a terminal
state is reached.
4. Return the maximum score along with the position that
resulted in that score.
"""
self.minimax_calls += 1
opponent = 'o' if turn == 'x' else 'x'
empty_pos = [pos for pos in range(9) if not board[pos]]
shuffle(empty_pos)
max_score = -10
for pos in empty_pos:
# Play on a new board
new_board = board.copy()
new_board[pos] = turn
# Score the board
if self.check_if_won(new_board, turn):
score = 1
elif self.check_if_won(new_board, opponent):
score = -1
elif None not in new_board:
score = 0
else:
# Game is not over, recursively check child nodes
score = -self.minimax_choose_pos(new_board, opponent)[1]
# Maximize the score
if score == 1:
# 1 is the best possible score so we can stop searching
return (pos, score)
if score > max_score:
best_pos = pos
max_score = score
return (best_pos, max_score)
def check_if_won(self, board, mark):
"""Return True if the player with the given mark has won."""
return any(all(board[p] == mark for p in l) for l in self.win_lines)
def end_game(self, winner):
"""Show game over text and update the stats based on the winner
(a player object or 'tie').
Bind a screen click event to reset().
"""
if winner == 'tie':
self.ties += 1
msg = "Tie Game"
color = 'black'
else:
winner.wins += 1
msg = "{} Wins!".format(winner.name)
color = winner.color
self.UI.display(msg, 'top', color)
print(msg, "\n")
self.print_stats()
self.UI.wn.onclick(self.reset)
def reset(self, *_): # Ignore coordinates from onclick call
"""Clear game over text, reset board, and start a new game."""
self.UI.wn.onclick(None)
self.UI.t_top_text.clear()
self.UI.t_marks.clear()
self.board = [None] * 9
self.turn_order.reverse()
self.start_game()
def main():
"""Initialize two players and a game of tic tac toe."""
p1 = HumanPlayer("Player", 'x')
p2 = BotPlayer("Bot", 'o')
game = TicTacToe(p1, p2)
if __name__ == '__main__':
main()
| 37.962185 | 78 | 0.563365 | 8,568 | 0.948312 | 0 | 0 | 0 | 0 | 0 | 0 | 2,956 | 0.327172 |
9bf3bc1a17de012e467867d34b900bdba5a0b239 | 21,722 | py | Python | cjktools/tests/resources/test_tatoeba.py | pganssle/cjktools | 853dec98177b5552eab4ea18ca2ce481aff28d2d | [
"BSD-3-Clause"
] | 17 | 2016-04-09T00:47:53.000Z | 2020-06-09T10:16:10.000Z | cjktools/tests/resources/test_tatoeba.py | pganssle/cjktools | 853dec98177b5552eab4ea18ca2ce481aff28d2d | [
"BSD-3-Clause"
] | 7 | 2016-04-19T20:14:11.000Z | 2016-12-23T11:09:56.000Z | cjktools/tests/resources/test_tatoeba.py | pganssle/cjktools | 853dec98177b5552eab4ea18ca2ce481aff28d2d | [
"BSD-3-Clause"
] | 6 | 2015-12-10T02:28:32.000Z | 2021-10-17T05:23:14.000Z | # -*- coding: utf-8 -*-
#
# test_tatoeba.py
# cjktools
#
from __future__ import unicode_literals
import os
from .._common import to_unicode_stream, to_string_stream
import unittest
from six import text_type
from functools import partial
from datetime import datetime
from cjktools.resources import tatoeba, auto_format, cjkdata
from cjktools.common import sopen
from nose_parameterized import parameterized
def get_data_loc(key, suffix='_00', extension='.csv'):
script_dir = os.path.dirname(os.path.realpath(__file__))
data_loc = os.path.join(script_dir, 'sample_data/')
base_names = dict(sentences='sentences',
jpn_indices='jpn_indices',
links='links',
sentences_detailed='sentences_detailed',
edict='je_edict')
fname = base_names[key] + suffix + extension
return os.path.join(data_loc, fname)
def get_edict():
if getattr(get_edict, '_cached', None) is not None:
return get_edict._cached
with sopen(get_data_loc('edict', extension=''), mode='r') as edf:
edict = auto_format.load_dictionary(edf)
get_edict._cached = edict
return edict
class ReaderBaseCase(unittest.TestCase):
def resource_fpath(self):
return get_data_loc(self._resource_name)
def load_file(self, resource=None, **kwargs):
"""
Loads the file into a TatoebaSentenceReader object.
"""
if resource is None:
resource = self.resource_fpath()
reader = self.ReaderClass(resource, **kwargs)
return reader
class FileObjReaderMixin(object):
"""
Mixin to run the existing tests using an already opened file object.
"""
def load_file(self, resource=None, **kwargs):
if resource is None:
resource = self.resource_fpath()
with open(self.resource_fpath(), mode='r') as resource_file:
try:
resource_file.filename = self.resource_fpath()
except AttributeError:
class FileWrapper(object):
def __init__(self, fin, fpath):
self.f = fin
self.filename = fpath
def __getattr__(self, attr):
return getattr(self.f, attr)
def __iter__(self):
return iter(self.f)
resource_file = FileWrapper(resource_file,
self.resource_fpath())
r = super(FileObjReaderMixin, self).load_file(resource_file,
**kwargs)
return r
###
# Tatoeba Reader base class test
class TatoebaReaderTest(unittest.TestCase):
def test_not_implemented(self):
with self.assertRaises(NotImplementedError):
tatoeba.TatoebaReader()
###
# Tatoeba Sentence Reader tests
class TatoebaSentenceReaderTest(ReaderBaseCase):
_resource_name = 'sentences'
ReaderClass = tatoeba.TatoebaSentenceReader
def test_basic(self):
# Test that all the sentence IDs load properly
sr = self.load_file()
sent_ids = [
6381, 29390, 36809, 46235, 54432, 62093, 68807, 82526, 93620,
109744, 112733, 156245, 192227, 199398, 208975, 224758, 231440, 258289,
290943, 293946, 310087, 321190, 410787, 508870, 723598, 817971,
2031040, 2031042, 2172488
]
self.assertEqual(sorted(sr.keys()), sent_ids)
self.assertEqual(sorted(sr.keys()), sorted(sr.sentence_ids))
def test_language_filter(self):
sr = self.load_file(languages={'fra', 'pol', 'rus'})
sent_ids = [6381, 508870, 2172488]
self.assertEqual(sorted(sr.keys()), sent_ids)
def test_custom_filter_row(self):
class TatoebaSentenceSubsetReader(tatoeba.TatoebaSentenceReader):
def __init__(self, sentences, subset=None, **kwargs):
self._subset = set(subset)
super(TatoebaSentenceSubsetReader, self).__init__(
sentences, **kwargs)
def filter_row(self, row):
return int(row[0]) not in self._subset
sr = TatoebaSentenceSubsetReader(
sentences=self.resource_fpath(),
subset={224758, 6381, 29390},
languages={'eng', 'jpn'})
sent_inds = [29390, 224758]
self.assertEqual(sorted(sr.keys()), sent_inds)
def test_language(self):
sr = self.load_file()
for sent_id in [82526, 93620, 109744, 208975]:
self.assertEqual(sr.language(sent_id), 'jpn')
for sent_id in [36809, 293946, 410787, 2031042]:
self.assertEqual(sr.language(sent_id), 'eng')
def test_language_error(self):
sr = self.load_file()
with self.assertRaises(tatoeba.InvalidIDError):
sr.language(193)
def test_sentence(self):
sr = self.load_file()
sentence_pairs = [
(192227, 'ロールプレイングのテレビゲームは時間を食う。'),
(29390, 'Role-playing video games are time consuming.'),
(208975, 'その上手な運転手は車の列を縫うように車を走らせた。'),
(46235, 'The good driver wove his way through the traffic.')
]
for sent_id, sentence in sentence_pairs:
self.assertEqual(sr[sent_id], sentence)
def test_sentence_error(self):
sr = self.load_file()
with self.assertRaises(tatoeba.InvalidIDError):
sr.sentence(24)
def test_details_error(self):
sr = self.load_file()
# One that is in the data set
with self.assertRaises(tatoeba.MissingDataError):
sr.details(192227)
# One that's not in the data set
with self.assertRaises(tatoeba.MissingDataError):
sr.details(0)
def test_repr(self):
sr = self.load_file()
expected_fmt = "TatoebaSentenceReader(sentences='{}')"
expected = expected_fmt.format(self.resource_fpath())
self.assertEqual(repr(sr), expected)
class TatoebaSentenceReaderDetailedTest(TatoebaSentenceReaderTest):
_resource_name = 'sentences_detailed'
def test_details(self):
sr = self.load_file()
# Note: I modified 410787 in the csv to make the added/modified
# different.
details_pairs = [
(82526, (None, None, None)),
(199398, (None, None, None)),
(258289, ('CK', None, datetime(2010, 10, 7, 15, 55, 17))),
(410787, ('CK',
datetime(2010, 6, 24, 14, 20, 10),
datetime(2010, 6, 24, 14, 20, 28)))
]
for sent_id, details in details_pairs:
self.assertEqual(sr.details(sent_id), details)
def test_details_error(self):
sr = self.load_file()
with self.assertRaises(tatoeba.InvalidIDError):
sr.details(24)
class TatoebaSentenceReaderMiscTests(unittest.TestCase):
@parameterized.expand([
# Too few columns
('too_few', '3444\teng\n3949\tjp\n'),
# Too many for undetailed, too few for detailed
('in_between', '3444\teng\tThe boy ate a tiger\tMB'),
# Too many even for detailed
('too_many', '3444\teng\tThe boy ate a tiger\tMB\t\\N\t\\N\t\\N'),
# In between, but with unicode
('in_between_unicode', '3444\teng\tThe boy ate a 虎\tMB')
])
def test_invalid_file(self, name, rowstr):
invalid = to_string_stream(rowstr)
with self.assertRaises(tatoeba.InvalidFileError):
sr = tatoeba.TatoebaSentenceReader(invalid)
class TatoebaSentenceReaderFObjTest(FileObjReaderMixin,
TatoebaSentenceReaderTest):
pass
class TatoebaSentenceReaderDetailedFObjTest(FileObjReaderMixin,
TatoebaSentenceReaderDetailedTest):
pass
###
# Tatoeba Links Reader tests
class TatoebaLinksReaderTests(ReaderBaseCase):
_resource_name = 'links'
ReaderClass = tatoeba.TatoebaLinksReader
def test_basic(self):
lr = self.load_file()
self.assertEqual(len(lr), 29)
groups = [
(6381, {6381, 156245, 258289, 817971}),
(29390, {29390, 192227}),
(36809, {36809, 54432, 199398, 410787}),
(46235, {46235, 208975}),
(62093, {62093, 224758, 723598, 2031040, 2031042}),
(68807, {68807, 231440}),
(82526, {82526, 321190, 508870}),
(93620, {93620, 310087, 2172488}),
(109744, {109744, 293946}),
(112733, {112733, 290943})
]
for sent_id, group in groups:
self.assertEqual(lr[sent_id], group)
self.assertEqual(lr.group(sent_id), group)
def test_filter_both(self):
subset = {6381, 82526, 258289, 192227, 508870}
lr = self.load_file(sentence_ids=subset, sentence_ids_filter='both')
groups = [
(6381, {6381, 258289}),
(82526, {82526, 508870})
]
for sent_id, group in groups:
self.assertEqual(lr[sent_id], group)
self.assertEqual(lr.sentence_ids_filter, 'both')
def test_filter_sent_id(self):
# Normally there isn't much difference between sent_id and
# trans_id because the links file stores things redundantly, but
# I've removed all but the 6381->817971 edge in the graph, so it
# will show up in sent_id, but not in trans_id
subset = {6381, 82526, 258289, 192227, 508870}
lr = self.load_file(sentence_ids=subset, sentence_ids_filter='sent_id')
groups = [
(6381, {6381, 156245, 258289, 817971}),
(82526, {82526, 321190, 508870}),
(29390, {29390, 192227})
]
for sent_id, group in groups:
self.assertEqual(lr[sent_id], group)
self.assertEqual(lr.sentence_ids_filter, 'sent_id')
def test_filter_trans_id(self):
subset = {6381, 82526, 258289, 192227, 508870}
lr = self.load_file(sentence_ids=subset, sentence_ids_filter='trans_id')
groups = [
(6381, {6381, 156245, 258289}),
(82526, {82526, 321190, 508870}),
(29390, {29390, 192227})
]
for sent_id, group in groups:
self.assertEqual(lr[sent_id], group)
self.assertEqual(lr.sentence_ids_filter, 'trans_id')
def test_filter_error(self):
subset = {6381, 82526, 258289, 192227, 508870}
with self.assertRaises(ValueError):
lr = self.load_file(sentence_ids=subset,
sentence_ids_filter='banana')
def test_group_error(self):
lr = self.load_file()
with self.assertRaises(tatoeba.InvalidIDError):
lr.group(100)
def test_groups(self):
lr = self.load_file(sentence_ids={6381, 156245, 29390, 192227})
groups = [
{6381, 156245},
{29390, 192227}
]
actual = sorted(lr.groups(), key=lambda x: min(x))
self.assertEqual(groups, actual)
def test_links(self):
lr = self.load_file()
self.assertEqual(lr.links, self.resource_fpath())
def test_repr(self):
lr = self.load_file()
expected_fmt = "TatoebaLinksReader(links='{}')"
expected = expected_fmt.format(self.resource_fpath())
self.assertEqual(repr(lr), expected)
class TatoebaLinksReaderMiscTests(unittest.TestCase):
@parameterized.expand([
# Too few columns
('too_few', '3444\n3949\n'),
# Too many columns
('too_many', '3444\teng\tThe boy ate a tiger\tMB'),
# Too many columns, with unicode
('too_many', '3444\teng\tThe boy ate a tiger (虎)\tMB'),
])
def test_invalid_file(self, name, rowstr):
# Too few columns
invalid = to_string_stream(rowstr)
with self.assertRaises(tatoeba.InvalidFileError):
sr = tatoeba.TatoebaLinksReader(invalid)
###
# Tanaka word tests
class TanakaWordTests(unittest.TestCase):
WordClass = tatoeba.TanakaWord
def _default_args(self, args):
default_args = (None, None, None, None, False)
return args + default_args[len(args):]
@parameterized.expand([
('alone', 'は|1', ('は',)),
('after_reading', '度(ど)|1', ('度', 'ど')),
('before_sense', 'は|1[01]', ('は', None, 1)),
('before_disp', 'ばれる|1{ばれた}', ('ばれる', None, None, 'ばれた')),
('before_example', 'わっと|2~', ('わっと', None, None, None, True))
])
def test_legacy_tag(self, name, tagstr, expected):
exp_word = self.WordClass(*self._default_args(expected))
act = self.WordClass.from_text(tagstr)
self.assertEqual(exp_word, act)
self.assertEqual(exp_word.display, act.display)
@parameterized.expand([
('headword', ('を',), 'を'),
('reading', ('時', 'とき'), '時(とき)'),
('sense', ('が', None, 3), 'が[03]'),
('read_sense', ('大学', 'だいがく', 1), '大学(だいがく)[01]'),
('display', ('である', None, None, 'であった'), 'である{であった}'),
('read_disp', ('為る', 'する', None, 'し'), '為る(する){し}'),
('sense_disp', ('其の', None, 1, 'その'), '其の[01]{その}'),
('read_sense_disp', ('其の', 'その', 1, 'その'), '其の(その)[01]{その}'),
('example', ('ロールプレイング', None, None, None, True),
'ロールプレイング~'),
('read_ex', ('時', 'とき', None, None, True), '時(とき)~'),
('sense_ex', ('食う', None, 7, None, True), '食う[07]~'),
('read_sense_ex', ('彼', 'かれ', 1, None, True), '彼(かれ)[01]~'),
('disp_ex',
('ネイティブアメリカン', None, None, 'ネイティブ・アメリカン', True),
'ネイティブアメリカン{ネイティブ・アメリカン}~'),
('read_disp_ex',
('喝采を送る', 'かっさいをおくる', None, '喝采を送った', True),
'喝采を送る(かっさいをおくる){喝采を送った}~'),
('sense_disp_ex', ('ソフト', None, 1, 'ソフトな', True),
'ソフト[01]{ソフトな}~'),
('read_sense_disp_ex', ('立て', 'たて', 2, 'たて', True),
'立て(たて)[02]{たて}~'),
])
def test_str(self, name, args, expected):
word = self.WordClass(*self._default_args(args))
self.assertEqual(text_type(word), expected)
@parameterized.expand([
('headword', ('を',), ('が',)),
('reading', ('時', 'とき'), ('時', 'じ')),
('sense', ('が', None, 3), ('が', None, 2)),
('display',
('飲ませる', None, None, '飲ませて'),
('飲ませる', None, None, '飲ませない')),
('example',
('ロールプレイング', None, None, None, True),
('ロールプレイング', None, None, None, False)),
])
def test_neq(self, name, arg1, arg2):
w1, w2 = (self.WordClass(*self._default_args(arg))
for arg in (arg1, arg2))
self.assertNotEqual(w1, w2)
def test_req(self):
class StrEq(object):
def __init__(self, base_str):
self.base_str = base_str
def __eq__(self, other):
return text_type(other) == self.base_str
w = self.WordClass('時', 'とき', None, None, False)
self.assertTrue(w == StrEq('時(とき)'))
self.assertTrue(StrEq('時(とき)') == w)
def test_rneq(self):
class StrEq(object):
def __init__(self, base_str):
self.base_str = base_str
def __eq__(self, other):
return text_type(other) == self.base_str
w = self.WordClass('時', 'とき', None, None, False)
self.assertFalse(w != StrEq('時(とき)'))
self.assertFalse(StrEq('時(とき)') != w)
###
# Tatoeba Index Reader tests
class TatoebaIndexReaderTests(ReaderBaseCase):
_resource_name = 'jpn_indices'
ReaderClass = tatoeba.TatoebaIndexReader
WordClass = tatoeba.TanakaWord
@property
def sentences(self):
_sentences = {
109744: [
self.WordClass('彼', 'かれ', 1, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('英語', None, None, None, False),
self.WordClass('が', None, None, None, False),
self.WordClass('苦手', None, None, None, False),
self.WordClass('だ', None, None, None, False),
self.WordClass('が', None, 3, None, False),
self.WordClass('数学', None, None, None, False),
self.WordClass('で', None, None, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('誰にも', None, None, None, False),
self.WordClass('劣る', None, None, '劣らない', False)
],
112733: [
self.WordClass('彼', 'かれ', 1, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('其の', None, 1, 'その', False),
self.WordClass('時', 'とき', None, None, False),
self.WordClass('大学', None, None, None, False),
self.WordClass('を', None, None, None, False),
self.WordClass('卒業', None, 1, None, False),
self.WordClass('為る', 'する', None, 'し', False),
self.WordClass('立て', 'たて', 2, 'たて', True),
self.WordClass('である', None, None, 'であった', False)
],
192227: [
self.WordClass('ロールプレイング', None, None, None, True),
self.WordClass('の', None, None, None, False),
self.WordClass('テレビゲーム', None, None, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('時間', None, None, None, False),
self.WordClass('を', None, None, None, False),
self.WordClass('食う', None, 7, None, False)
]
}
return _sentences
def test_basic(self):
ir = self.load_file()
# A selection of the sentences
for sent_id, sent in self.sentences.items():
exp_sent = sent
act_sent = ir[sent_id]
self.assertEqual(sent, act_sent)
# In this case, we're going to make sure that the actual
# .display property was actually set correctly as well
for exp_word, act_word in zip(exp_sent, act_sent):
self.assertEqual(exp_word.display, act_word.display)
def test_link(self):
ir = self.load_file()
links = {
82526: 321190, 93620: 310087, 109744: 293946, 112733: 290943,
156245: 258289, 192227: 29390, 199398: 54432, 208975: 46235,
224758: 62093, 231440: 68807
}
for sent_id, link_id in links.items():
self.assertEqual(link_id, ir.link(sent_id))
def test_link_error(self):
ir = self.load_file()
with self.assertRaises(tatoeba.InvalidIDError):
ir.link(1224)
def test_jpn_indices(self):
ir = self.load_file()
self.assertEqual(ir.jpn_indices, self.resource_fpath())
def test_subset(self):
ir = self.load_file(sentence_ids=(112733, 109744))
self.assertEqual(len(ir), 2)
self.assertEqual(ir.sentence_id_subset, {112733, 109744})
self.assertEqual(set(ir.keys()), {112733, 109744})
class TatoebaIndexReaderEdictTests(TatoebaIndexReaderTests):
ReaderClass = partial(tatoeba.TatoebaIndexReader, edict=get_edict())
@property
def sentences(self):
_sentences = {
109744: [
self.WordClass('彼', 'かれ', 1, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('英語', 'えいご', None, None, False),
self.WordClass('が', None, None, None, False),
self.WordClass('苦手', 'にがて', None, None, False),
self.WordClass('だ', None, None, None, False),
self.WordClass('が', None, 3, None, False),
self.WordClass('数学', 'すうがく', None, None, False),
self.WordClass('で', None, None, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('誰にも', 'だれにも', None, None, False),
self.WordClass('劣る', 'おとる', None, '劣らない', False)
],
112733: [
self.WordClass('彼', 'かれ', 1, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('其の', 'その', 1, 'その', False),
self.WordClass('時', 'とき', None, None, False),
self.WordClass('大学', 'だいがく', None, None, False),
self.WordClass('を', None, None, None, False),
self.WordClass('卒業', 'そつぎょう', 1, None, False),
self.WordClass('為る', 'する', None, 'し', False),
self.WordClass('立て', 'たて', 2, 'たて', True),
self.WordClass('である', None, None, 'であった', False)
],
192227: [
self.WordClass('ロールプレイング', None, None, None, True),
self.WordClass('の', None, None, None, False),
self.WordClass('テレビゲーム', None, None, None, False),
self.WordClass('は', None, None, None, False),
self.WordClass('時間', 'じかん', None, None, False),
self.WordClass('を', None, None, None, False),
self.WordClass('食う', 'くう', 7, None, False)
]
}
return _sentences
class TatoebaIndexReaderMiscTests(unittest.TestCase):
def test_invalid_file(self):
# First word has display before sense.
invalid_str = ('93620\t310087\t'
'彼女{テレビ}[01] は|1 一時間{1時間} 以内 に 戻る{戻ります}\n')
with self.assertRaises(tatoeba.InvalidEntryError):
ir = tatoeba.TatoebaIndexReader(to_string_stream(invalid_str))
| 34.154088 | 83 | 0.564727 | 21,430 | 0.938924 | 0 | 0 | 8,578 | 0.375832 | 0 | 0 | 4,335 | 0.189932 |
9bf44b2d04766a196cb4f85df49ddd09eb5801af | 65 | py | Python | sql/__init__.py | realDragonium/small-flask-project | 5688210418997dbdf5b38e0ce77750eab7eefd3a | [
"Unlicense"
] | null | null | null | sql/__init__.py | realDragonium/small-flask-project | 5688210418997dbdf5b38e0ce77750eab7eefd3a | [
"Unlicense"
] | null | null | null | sql/__init__.py | realDragonium/small-flask-project | 5688210418997dbdf5b38e0ce77750eab7eefd3a | [
"Unlicense"
] | null | null | null | from .sql_models import SQLQuiz, SQLQuestion, SQLAnswerOption
| 13 | 61 | 0.815385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9bf4cfbcb37e69da790172521eacefd9fe4daf66 | 899 | py | Python | setup.py | fabaff/penin | 6a93984357b4478907746225bfadea0bfc8b681c | [
"Apache-2.0"
] | 4 | 2019-06-04T04:14:42.000Z | 2021-04-02T17:25:28.000Z | setup.py | fabaff/penin | 6a93984357b4478907746225bfadea0bfc8b681c | [
"Apache-2.0"
] | null | null | null | setup.py | fabaff/penin | 6a93984357b4478907746225bfadea0bfc8b681c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
"""PenIn setup script."""
from setuptools import setup, find_packages
from penin.core.version import get_version
VERSION = get_version()
readme_file = open("README.md", "r")
LONG_DESCRIPTION = readme_file.read()
readme_file.close()
setup(
name="penin",
version=VERSION,
description="Information gathering and penetration testing framework",
long_description=LONG_DESCRIPTION,
long_description_content_type="text/markdown",
author="Fabian Affolter",
author_email="fabian@affolter-engineering.ch",
url="https://github.com/fabaff/penin",
license="Apache 2.0",
packages=find_packages(exclude=["ez_setup", "tests*"]),
package_data={"penin": ["templates/*"]},
include_package_data=True,
install_requires=["cement", "pyyaml", "colorlog", "jinja2", "tinydb"],
entry_points={"console_scripts": ["penin = penin.main:main"]},
)
| 32.107143 | 74 | 0.716352 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 356 | 0.395996 |
9bf5111951f211230177602f66faaa2faeecb8c9 | 65 | py | Python | tyled/tileset/orthogonal/__init__.py | kfields/tyled | 65f57c3f060c369d1e3875d94363a03a11fe7c3e | [
"MIT"
] | 1 | 2020-04-14T09:25:25.000Z | 2020-04-14T09:25:25.000Z | tyled/tileset/orthogonal/__init__.py | kfields/tyled | 65f57c3f060c369d1e3875d94363a03a11fe7c3e | [
"MIT"
] | 2 | 2021-09-08T01:52:49.000Z | 2022-01-13T02:32:08.000Z | tyled/tileset/orthogonal/__init__.py | kfields/tyled | 65f57c3f060c369d1e3875d94363a03a11fe7c3e | [
"MIT"
] | null | null | null | from tyled.tileset.orthogonal.orthogonal import OrthogonalTileset | 65 | 65 | 0.907692 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9bf527aac7e8ff43497cfbcad130e15221237397 | 107 | py | Python | run_length_encoding.py | wolfd/jpeg-svg | 42370757ec98642b57486c7d2fd3fae7df9bc271 | [
"MIT"
] | null | null | null | run_length_encoding.py | wolfd/jpeg-svg | 42370757ec98642b57486c7d2fd3fae7df9bc271 | [
"MIT"
] | null | null | null | run_length_encoding.py | wolfd/jpeg-svg | 42370757ec98642b57486c7d2fd3fae7df9bc271 | [
"MIT"
] | null | null | null | import typing as T
import numpy as np
def decode_run_length(compressed: T.Iterable[int]):
compressed
| 15.285714 | 51 | 0.766355 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
9bf7c5e1835265f229893ed245fb8e8ced3ce5f3 | 3,556 | py | Python | pdf2data/data.py | inducer/pdf2data | 7554c998148ac29adc9993643dff649a9108619a | [
"MIT"
] | 11 | 2019-08-10T18:01:24.000Z | 2021-07-05T20:22:02.000Z | pdf2data/data.py | inducer/pdf2data | 7554c998148ac29adc9993643dff649a9108619a | [
"MIT"
] | null | null | null | pdf2data/data.py | inducer/pdf2data | 7554c998148ac29adc9993643dff649a9108619a | [
"MIT"
] | 6 | 2019-09-01T07:56:50.000Z | 2021-06-20T12:16:52.000Z | __copyright__ = "Copyright (C) 2019 Andreas Kloeckner"
__license__ = """
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
__doc__ = """
Data handling
-------------
This module contains tools for quickly inserting JSON-ish list-of-dicts data
into a DB-API v2-compliant database. Mildly specific to :mod:`sqlite3`, but
likely easily generalized.
.. autofunction:: set_up_table
.. autofunction:: row_to_db
"""
import sqlite3
# {{{ sqlite utilities
def set_up_table(db, table_name, col_names_and_types):
rowspecs = ", ".join(
f"{name} {type}" for name, type in col_names_and_types)
try:
db.execute(
f"create table {table_name} (id integer primary key, {rowspecs})")
except sqlite3.OperationalError as e:
if "already exists" in str(e):
return
else:
raise
def key_to_col_name(key, col_name_mapping):
return col_name_mapping.get(key, key.lower().replace(" ", "_"))
def row_to_db(
db_cursor, table_name, row, *,
col_name_mapping=None, value_converters=None,
upsert_unique_cols=None):
if col_name_mapping is None:
col_name_mapping = {}
if value_converters is None:
value_converters = {}
col_names = []
values = []
for key, value in row.items():
col_names.append(key_to_col_name(key, col_name_mapping))
if key in value_converters:
value = value_converters[key](value)
values.append(value)
if upsert_unique_cols:
for col in upsert_unique_cols:
assert col in col_names
where_clause = " and ".join(
f"{col_name}=?"
for col_name in col_names
if col_name in upsert_unique_cols)
upsert_values = [
value for col_name, value in zip(col_names, values)
if col_name in upsert_unique_cols]
other_rows = list(db_cursor.execute(
f"select id from {table_name} where {where_clause} limit 2",
upsert_values))
if len(other_rows) == 1:
return other_rows[0][0]
elif len(other_rows) > 1:
raise ValueError(f"non-unique row in table {table_name}")
else:
# fall through to insertion below
pass
placeholders = ", ".join("?" for col in col_names)
col_names = ", ".join(col_names)
db_cursor.execute(
f"insert into {table_name} ({col_names}) values ({placeholders})",
values)
return db_cursor.lastrowid
# }}}
# vim: foldmethod=marker
| 31.192982 | 78 | 0.667885 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,731 | 0.486783 |
9bfb702a893a75e9d3f590535033334c0f14a5ea | 3,742 | py | Python | exact/exact/tagger_messages/management/commands/testdb.py | maubreville/Exact | 2f4ce50054bfe5350a106ef3fa1a2f03c90bbbef | [
"MIT"
] | 43 | 2020-01-29T17:19:21.000Z | 2022-03-29T11:11:32.000Z | exact/exact/tagger_messages/management/commands/testdb.py | maubreville/Exact | 2f4ce50054bfe5350a106ef3fa1a2f03c90bbbef | [
"MIT"
] | 41 | 2020-01-31T09:31:31.000Z | 2022-02-24T15:55:21.000Z | exact/exact/tagger_messages/management/commands/testdb.py | maubreville/Exact | 2f4ce50054bfe5350a106ef3fa1a2f03c90bbbef | [
"MIT"
] | 16 | 2020-02-11T18:26:32.000Z | 2021-07-30T09:05:15.000Z | from django.core.management.base import BaseCommand
from exact.users.models import User, Team, TeamMembership
from exact.tagger_messages.models import TeamMessage, GlobalMessage
from datetime import timedelta
from django.utils import timezone
from random import randint
from faker import Faker
import sys
faker = Faker()
class Command(BaseCommand):
help = 'Test database performance for the messages'
def handle(self, *args, **options):
user_count = 5000
team_count = 30
message_count = 50000
announcement_count = 5000
if self.confirm():
self.create_users(user_count)
self.create_teams(team_count)
self.create_messages(message_count)
self.create_announcements(announcement_count)
def create_users(self, user_count):
User.objects.all().delete()
for i in range(user_count):
fake_name = faker.user_name()
while User.objects.filter(username=fake_name).exists():
fake_name = faker.user_name()
User.objects.create(username=fake_name)
def create_teams(self, team_count):
TeamMembership.objects.all().delete()
Team.objects.all().delete()
user_count = User.objects.all().count()
for i in range(team_count):
fake_team_name = faker.user_name()
team = Team.objects.create(name=fake_team_name)
added_users = set()
for user in range(int(randint(0, user_count) / 2)):
rand_user_id = randint(0, user_count - 1)
if rand_user_id not in added_users:
user_obj = User.objects.all()[rand_user_id]
TeamMembership.objects.create(user=user_obj, team=team)
added_users.add(rand_user_id)
def create_messages(self, message_count):
TeamMessage.objects.all().delete()
user_count = User.objects.all().count()
team_count = Team.objects.all().count()
for i in range(message_count):
team_obj = Team.objects.all()[randint(0, team_count - 1)]
user_obj = User.objects.all()[randint(0, user_count - 1)]
offset = timedelta(days=randint(-200, 200))
start_date = timezone.now() + timedelta(days=randint(-20, 0)) + offset
exp_date = timezone.now() + timedelta(days=randint(0, 30)) + offset
TeamMessage.objects.create(title=faker.sentences(nb=1, ext_word_list=None)[0],
content=faker.text(max_nb_chars=200, ext_word_list=None),
team=team_obj, creator=user_obj, start_time=start_date,
expire_time=exp_date)
def create_announcements(self, announcement_count):
GlobalMessage.objects.all().delete()
user_count = User.objects.all().count()
for i in range(announcement_count):
user_obj = User.objects.all()[randint(0, user_count - 1)]
offset = timedelta(days=randint(-200, 200))
start_date = timezone.now() + timedelta(days=randint(-20, 0)) + offset
exp_date = timezone.now() + timedelta(days=randint(0, 30)) + offset
GlobalMessage.objects.create(title=faker.sentences(nb=1, ext_word_list=None)[0], creator=user_obj, start_time=start_date, expire_time=exp_date)
def confirm(self):
print("Do you realy want flush the Database!!! (yes/no)")
yes = {'yes', 'y', 'ye', ''}
no = {'no', 'n'}
choice = input().lower()
if choice in yes:
return True
elif choice in no:
return False
else:
sys.stdout.write("Please respond with 'yes' or 'no'")
| 43.011494 | 155 | 0.615714 | 3,417 | 0.913148 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.040086 |
9bfd1efc408b89ae999c37acf631f9234b2c8e00 | 1,529 | py | Python | Libraries/Simplynet.py | frknayk/WeightVis | 255114ed0981467e29c5a28d0130af24373db1f1 | [
"MIT"
] | 2 | 2021-02-24T06:00:27.000Z | 2022-02-16T11:14:25.000Z | Libraries/Simplynet.py | frknayk/WeightVis | 255114ed0981467e29c5a28d0130af24373db1f1 | [
"MIT"
] | 1 | 2020-07-28T09:14:53.000Z | 2020-07-28T09:17:47.000Z | Libraries/Simplynet.py | frknayk/WeightVis | 255114ed0981467e29c5a28d0130af24373db1f1 | [
"MIT"
] | null | null | null | import numpy as np
import pickle
from Libraries.Enums import NNLibs
from Libraries.Reader import Reader
#TODO Complete read() function for SimplyNet
#TODO SimplyNet init(self,path) - path should go to read's parameter(like torch)
class SimplyNet(Reader):
def __init__(self,path):
self.weights_list = []
self.biases_list = []
self.load_weights(path)
def read(self):
pass
def get_weights(self,path):
"""Load weights and biases """
file_name = path + ".pickle"
with open(file_name, 'rb') as handle:
b = pickle.load(handle)
return b
def load_weights(self,path):
"""Get weights from list of layers """
weights_list = self.get_weights(path)
for idx in range(len(weights_list)):
# Layers are stored as dict where layer name is key in SimplyNet
layer_dict = weights_list[idx]
# Get the key (there is single key : layer name!)
key_list = [thing for thing in layer_dict.keys()]
# Get weights dictionary
weigts_dict = layer_dict[key_list[0]]
# Read weights and biases
W = weigts_dict['W'].T
b = weigts_dict['b'].T
# bias must be in the shape of (dim_bias,)
b = b.reshape(b.shape[1])
self.weights_list.append(W)
self.biases_list.append(b)
def get_lib(self):
"""Get enumeration of lib"""
return NNLibs.SimplyNet
| 30.58 | 80 | 0.589274 | 1,296 | 0.847613 | 0 | 0 | 0 | 0 | 0 | 0 | 443 | 0.289732 |
9bfe168e6a6c1653360f3b09a3512b52d2922a38 | 2,636 | py | Python | code/python/archive/c0103_timestamp_records.py | jesnyder/MeasuredStress | 8009529da326a66733c26983cc59af8619f6cb42 | [
"MIT"
] | null | null | null | code/python/archive/c0103_timestamp_records.py | jesnyder/MeasuredStress | 8009529da326a66733c26983cc59af8619f6cb42 | [
"MIT"
] | null | null | null | code/python/archive/c0103_timestamp_records.py | jesnyder/MeasuredStress | 8009529da326a66733c26983cc59af8619f6cb42 | [
"MIT"
] | null | null | null | from datetime import datetime
from dateutil import tz
import glob
import os
import pandas as pd
import sys
import time
def timestamp_records():
"""
"""
print("running format_source")
sensors = ['EDA', 'HR', 'TEMP']
path_folders = os.path.join('..', '..', 'source_measurements', 'PMR', 'ref' )
save_file = os.path.join(path_folders, 'source_list_02' + '.csv' )
df = pd.read_csv(save_file)
del df['Unnamed: 0']
print(df)
timestamped_path = []
for sensor in sensors:
for record in df['record']:
df_record = df[(df['record']==record)]
print('df_record =')
print(df_record)
shared_start = max(list(df_record['starts']))
shared_end = min(list(df_record['ends']))
for path in df_record['path_long']:
wearable = list(df[(df['path_long']==path)]['wearable'])[0]
source_path = os.path.join(path, sensor + '.csv')
df_source = pd.read_csv(source_path)
print('df_source = ')
print(df_source)
header = list(df_source.columns.values)[0]
print('header = ')
print(header)
information = list(df_source[header])
freq = information[0]
print('frequency = ' + str(freq))
information = information[1:]
record_start = float(header)
record_length = len(information)/freq
record_end = record_start + record_length
print('record start = ' + str(record_start) + ' record end = ' + str(record_end) + ' length = ' + str(record_length/60) )
time_unix = []
for info in information:
time_unix.append(record_start + len(time_unix)/freq)
df_timestamped = pd.DataFrame()
df_timestamped[str(str(wearable) + '_time_unix')] = time_unix
df_timestamped[str(str(wearable) + '_measurements')] = information
path_folders = os.path.join('..', '..', 'source_measurements', 'PMR', 'timestamped', sensor )
print('path folders = ' + str(path_folders))
if not os.path.exists(path_folders):
os.mkdir(path_folders)
save_file = os.path.join(path_folders, str(str(wearable) + ' ' + str(record).zfill(2) + '.csv'))
# os.mkdir(save_file)
df_timestamped.to_csv(save_file)
timestamped_path.append(save_file)
if __name__ == "__main__":
format_source()
| 31.380952 | 137 | 0.546282 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 419 | 0.158953 |
9bfea3c6d3e9981e8fc5494581fd8d33faa52aee | 15,114 | py | Python | NVIDIA/benchmarks/ssd/implementations/pytorch/utils.py | goswamig/training_results_v0.7 | 4278ce8a0f3d4db6b5e6054277724ca36278d7a3 | [
"Apache-2.0"
] | 48 | 2020-07-29T18:09:23.000Z | 2021-10-09T01:53:33.000Z | NVIDIA/benchmarks/ssd/implementations/pytorch/utils.py | goswamig/training_results_v0.7 | 4278ce8a0f3d4db6b5e6054277724ca36278d7a3 | [
"Apache-2.0"
] | 9 | 2021-04-02T02:28:07.000Z | 2022-03-26T18:23:59.000Z | NVIDIA/benchmarks/ssd/implementations/pytorch/utils.py | lablup/training_results_v0.7 | f5bb59aa0f8b18b602763abe47d1d24d0d54b197 | [
"Apache-2.0"
] | 42 | 2020-08-01T06:41:24.000Z | 2022-01-20T10:33:08.000Z | # MIT License
#
# Copyright (c) 2018 kuangliu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# ------------------------------------------------------------------------------
#
# MIT License
#
# Copyright (c) 2017 Max deGroot, Ellis Brown
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# ------------------------------------------------------------------------------
#
# Copyright (c) 2018-2019, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torchvision
import torchvision.transforms as transforms
import torch.utils.data as data
from PIL import Image
from xml.etree import ElementTree
import os
import glob
from pathlib import Path
import numpy as np
import random
import itertools
import torch.nn.functional as F
try:
import ujson as json
except ImportError:
import json
import gc
import time
import bz2
import pickle
from math import sqrt, ceil, cos, sin, pi
from mlperf_logging.mllog import constants
from mlperf_logger import log_event
from SSD import _C as C
from fused_color_jitter import FusedColorJitter
# This function is from https://github.com/kuangliu/pytorch-ssd
def calc_iou_tensor(box1, box2):
""" Calculation of IoU based on two boxes tensor,
Reference to https://github.com/kuangliu/pytorch-ssd
input:
box1 (N, 4)
box2 (M, 4)
output:
IoU (N, M)
"""
N = box1.size(0)
M = box2.size(0)
be1 = box1.unsqueeze(1).expand(-1, M, -1)
be2 = box2.unsqueeze(0).expand(N, -1, -1)
# Left Top & Right Bottom
lt = torch.max(be1[:,:,:2], be2[:,:,:2])
#mask1 = (be1[:,:, 0] < be2[:,:, 0]) ^ (be1[:,:, 1] < be2[:,:, 1])
#mask1 = ~mask1
rb = torch.min(be1[:,:,2:], be2[:,:,2:])
#mask2 = (be1[:,:, 2] < be2[:,:, 2]) ^ (be1[:,:, 3] < be2[:,:, 3])
#mask2 = ~mask2
delta = rb - lt
delta[delta < 0] = 0
intersect = delta[:,:,0]*delta[:,:,1]
#*mask1.float()*mask2.float()
delta1 = be1[:,:,2:] - be1[:,:,:2]
area1 = delta1[:,:,0]*delta1[:,:,1]
delta2 = be2[:,:,2:] - be2[:,:,:2]
area2 = delta2[:,:,0]*delta2[:,:,1]
iou = intersect/(area1 + area2 - intersect)
return iou
# This class is from https://github.com/chauhan-utk/ssd.DomainAdaptation
class SSDCropping(object):
""" Cropping for SSD, according to original paper
Choose between following 3 conditions:
1. Preserve the original image
2. Random crop minimum IoU is among 0.1, 0.3, 0.5, 0.7, 0.9
3. Random crop
Reference to https://github.com/chauhan-utk/ssd.DomainAdaptation
"""
def __init__(self):
self.sample_options = (
# Do nothing
None,
# min IoU, max IoU
(0.1, None),
(0.3, None),
(0.5, None),
(0.7, None),
(0.9, None),
# no IoU requirements
(None, None),
)
# Implementation uses 1 iteration to find a possible candidate, this
# was shown to produce the same mAP as using more iterations.
self.num_cropping_iterations = 1
log_event(key=constants.MAX_SAMPLES,
value=self.num_cropping_iterations)
def __call__(self, img, img_size, bboxes, labels):
# Ensure always return cropped image
while True:
mode = random.choice(self.sample_options)
if mode is None:
return img, img_size, bboxes, labels
htot, wtot = img_size
min_iou, max_iou = mode
min_iou = float("-inf") if min_iou is None else min_iou
max_iou = float("+inf") if max_iou is None else max_iou
# Implementation use 50 iteration to find possible candidate
for _ in range(self.num_cropping_iterations):
# suze of each sampled path in [0.1, 1] 0.3*0.3 approx. 0.1
w = random.uniform(0.3 , 1.0)
h = random.uniform(0.3 , 1.0)
if w/h < 0.5 or w/h > 2:
continue
# left 0 ~ wtot - w, top 0 ~ htot - h
left = random.uniform(0, 1.0 - w)
top = random.uniform(0, 1.0 - h)
right = left + w
bottom = top + h
ious = calc_iou_tensor(bboxes, torch.tensor([[left, top, right, bottom]]))
# tailor all the bboxes and return
if not ((ious > min_iou) & (ious < max_iou)).all():
continue
# discard any bboxes whose center not in the cropped image
xc = 0.5*(bboxes[:, 0] + bboxes[:, 2])
yc = 0.5*(bboxes[:, 1] + bboxes[:, 3])
masks = (xc > left) & (xc < right) & (yc > top) & (yc < bottom)
# if no such boxes, continue searching again
if not masks.any():
continue
bboxes[bboxes[:, 0] < left, 0] = left
bboxes[bboxes[:, 1] < top, 1] = top
bboxes[bboxes[:, 2] > right, 2] = right
bboxes[bboxes[:, 3] > bottom, 3] = bottom
#print(left, top, right, bottom)
#print(labels, bboxes, masks)
bboxes = bboxes[masks, :]
labels = labels[masks]
left_idx = int(left*wtot)
top_idx = int(top*htot)
right_idx = int(right*wtot)
bottom_idx = int(bottom*htot)
#print(left_idx,top_idx,right_idx,bottom_idx)
#img = img[:, top_idx:bottom_idx, left_idx:right_idx]
img = img.crop((left_idx, top_idx, right_idx, bottom_idx))
bboxes[:, 0] = (bboxes[:, 0] - left)/w
bboxes[:, 1] = (bboxes[:, 1] - top)/h
bboxes[:, 2] = (bboxes[:, 2] - left)/w
bboxes[:, 3] = (bboxes[:, 3] - top)/h
htot = bottom_idx - top_idx
wtot = right_idx - left_idx
return img, (htot, wtot), bboxes, labels
# Don't need to cast to float, already there (from FusedColorJitter)
class ToTensor(object):
def __init__(self):
pass
def __call__(self, img):
img = torch.Tensor(np.array(img))
# Transform from HWC to CHW
img = img.permute(2, 0 ,1).div(255)
return img
class RandomHorizontalFlip(object):
def __init__(self, p=0.5):
self.p = p
def __call__(self, image, bboxes):
if random.random() < self.p:
bboxes[:, 0], bboxes[:, 2] = 1.0 - bboxes[:, 2], 1.0 - bboxes[:, 0]
return image.transpose(Image.FLIP_LEFT_RIGHT), bboxes
return image, bboxes
# Do data augumentation
class SSDTransformer(object):
""" SSD Data Augumentation, according to original paper
Composed by several steps:
Cropping
Resize
Flipping
Jittering
"""
def __init__(self, size = (300, 300), val=False):
# define vgg16 mean
self.size = size
self.val = val
self.crop = SSDCropping()
self.img_trans = transforms.Compose([
transforms.Resize(self.size),
#transforms.ColorJitter(brightness=0.125, contrast=0.5,
# saturation=0.5, hue=0.05
#),
#transforms.ToTensor(),
FusedColorJitter(),
ToTensor(),
])
self.hflip = RandomHorizontalFlip()
# All Pytorch Tensor will be normalized
# https://discuss.pytorch.org/t/how-to-preprocess-input-for-pre-trained-networks/683
normalization_mean = [0.485, 0.456, 0.406]
normalization_std = [0.229, 0.224, 0.225]
self.normalize = transforms.Normalize(mean=normalization_mean,
std=normalization_std)
self.trans_val = transforms.Compose([
transforms.Resize(self.size),
transforms.ToTensor(),
self.normalize,])
def __call__(self, img, img_size, bbox=None, label=None, max_num=200):
#img = torch.tensor(img)
if self.val:
bbox_out = torch.zeros(max_num, 4)
label_out = torch.zeros(max_num, dtype=torch.long)
bbox_out[:bbox.size(0), :] = bbox
label_out[:label.size(0)] = label
return self.trans_val(img), img_size, bbox_out, label_out
# random crop
img, img_size, bbox, label = self.crop(img, img_size, bbox, label)
# random horiz. flip
img, bbox = self.hflip(img, bbox)
# [Resize, ColorJitter, ToTensor]
img = self.img_trans(img).contiguous()
img = self.normalize(img)
return img, img_size, bbox, label
# Implement a datareader for COCO dataset
class COCODetection(data.Dataset):
def __init__(self, img_folder, annotate_file, transform=None, data=None):
self.img_folder = img_folder
self.annotate_file = annotate_file
if data:
self.data = data
else:
# Start processing annotation
with open(annotate_file) as fin:
# loading huge json files tends to cause the gc (cycle collector) to
# waste a lot of time so:
gc_old = gc.isenabled()
gc.disable()
self.data = json.load(fin)
if gc_old: gc.enable()
self.images = {}
self.label_map = {}
self.label_info = {}
#print("Parsing COCO data...")
start_time = time.time()
# 0 stand for the background
cnt = 0
self.label_info[cnt] = "background"
for cat in self.data["categories"]:
cnt += 1
self.label_map[cat["id"]] = cnt
self.label_info[cnt] = cat["name"]
# build inference for images
for img in self.data["images"]:
img_id = img["id"]
img_name = img["file_name"]
img_size = (img["height"],img["width"])
#print(img_name)
if img_id in self.images: raise Exception("dulpicated image record")
self.images[img_id] = (img_name, img_size, [])
# read bboxes
for bboxes in self.data["annotations"]:
img_id = bboxes["image_id"]
category_id = bboxes["category_id"]
bbox = bboxes["bbox"]
bbox_label = self.label_map[bboxes["category_id"]]
self.images[img_id][2].append((bbox, bbox_label))
for k, v in list(self.images.items()):
if len(v[2]) == 0:
#print("empty image: {}".format(k))
self.images.pop(k)
self.img_keys = list(self.images.keys())
self.transform = transform
#print("End parsing COCO data, total time {}".format(time.time()-start_time))
@property
def labelnum(self):
return len(self.label_info)
@staticmethod
def load(pklfile):
#print("Loading from {}".format(pklfile))
with bz2.open(pklfile, "rb") as fin:
ret = pickle.load(fin)
return ret
def save(self, pklfile):
#print("Saving to {}".format(pklfile))
with bz2.open(pklfile, "wb") as fout:
pickle.dump(self, fout)
def __len__(self):
return len(self.images)
def __getitem__(self, idx):
img_id = self.img_keys[idx]
img_data = self.images[img_id]
fn = img_data[0]
img_path = os.path.join(self.img_folder, fn)
s = time.time()
img = Image.open(img_path).convert("RGB")
e = time.time()
decode_time = e - s
htot, wtot = img_data[1]
bbox_sizes = []
bbox_labels = []
#for (xc, yc, w, h), bbox_label in img_data[2]:
for (l,t,w,h), bbox_label in img_data[2]:
r = l + w
b = t + h
#l, t, r, b = xc - 0.5*w, yc - 0.5*h, xc + 0.5*w, yc + 0.5*h
bbox_size = (l/wtot, t/htot, r/wtot, b/htot)
# filter out zero-size bboxes
if l == r or t == b:
continue
bbox_sizes.append(bbox_size)
bbox_labels.append(bbox_label)
bbox_sizes = torch.tensor(bbox_sizes)
bbox_labels = torch.tensor(bbox_labels)
s = time.time()
if self.transform != None:
img, (htot, wtot), bbox_sizes, bbox_labels = \
self.transform(img, (htot, wtot), bbox_sizes, bbox_labels)
else:
pass # img = transforms.ToTensor()(img)
return img, img_id, (htot, wtot), bbox_sizes, bbox_labels
| 34.905312 | 92 | 0.580918 | 10,181 | 0.673614 | 0 | 0 | 254 | 0.016806 | 0 | 0 | 5,931 | 0.392418 |
9bfeddde243de4b1d35ced3b1d4b4c7363007028 | 90 | py | Python | src/rubinlander/parsers/lsstdoc/__init__.py | lsst-sqre/rubin-lander-plugin | f570acb92629493e640baf632bd7ceee78516efd | [
"MIT"
] | null | null | null | src/rubinlander/parsers/lsstdoc/__init__.py | lsst-sqre/rubin-lander-plugin | f570acb92629493e640baf632bd7ceee78516efd | [
"MIT"
] | 6 | 2021-04-19T06:07:06.000Z | 2022-03-07T03:02:22.000Z | src/rubinlander/parsers/lsstdoc/__init__.py | lsst-sqre/rubin-lander-plugin | f570acb92629493e640baf632bd7ceee78516efd | [
"MIT"
] | null | null | null | from rubinlander.parsers.lsstdoc.parser import LsstDocParser
__all__ = ["LsstDocParser"]
| 22.5 | 60 | 0.822222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 0.166667 |
9bfeed4ac7968ce26851e608ac8b9ee8b625a069 | 961 | py | Python | network/__init__.py | nivilo/Chase | 921e4b0bf2ca2cbd1a315156287f2af0e8a19c6c | [
"MIT"
] | 68 | 2018-07-23T03:49:17.000Z | 2022-01-11T04:56:32.000Z | network/__init__.py | mpalaiokostas/Chase | 921e4b0bf2ca2cbd1a315156287f2af0e8a19c6c | [
"MIT"
] | null | null | null | network/__init__.py | mpalaiokostas/Chase | 921e4b0bf2ca2cbd1a315156287f2af0e8a19c6c | [
"MIT"
] | 30 | 2018-07-24T20:35:20.000Z | 2021-11-23T00:56:07.000Z | import abc
from config import MODEL_DIR
class Network:
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def __init__(self):
self.model = None
self.model_history = None
self.X_train = None
self.X_test = None
self.y_train = None
self.y_test = None
@abc.abstractmethod
def set_train_test_split(self):
pass
@abc.abstractmethod
def build_model(self):
pass
@abc.abstractmethod
def train_model(self):
pass
@abc.abstractmethod
def forecast_model(self, start_datetime, end_datetime, freq):
pass
@abc.abstractmethod
def visualize_output(self):
pass
def evaluate_model(self):
return self.model.evaluate(self.X_test, self.y_test)
def save_model(self, filename):
self.model.save_weights(MODEL_DIR + filename)
def load_model(self, filename):
self.model.load_weights(MODEL_DIR + filename)
| 20.891304 | 65 | 0.650364 | 917 | 0.954214 | 0 | 0 | 561 | 0.583767 | 0 | 0 | 0 | 0 |
9bffb8d4ff59f69668f6f2eebe842833a4a032ab | 3,000 | py | Python | rdfdatabank/lib/reqclassifier.py | dataflow/RDFDatabank | 8a3abd28fefc62cbbfb9f77e7ddc920e23794f34 | [
"MIT"
] | 4 | 2016-01-10T09:05:22.000Z | 2019-09-09T09:57:25.000Z | rdfdatabank/lib/reqclassifier.py | dataflow/RDFDatabank | 8a3abd28fefc62cbbfb9f77e7ddc920e23794f34 | [
"MIT"
] | null | null | null | rdfdatabank/lib/reqclassifier.py | dataflow/RDFDatabank | 8a3abd28fefc62cbbfb9f77e7ddc920e23794f34 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Copyright (c) 2012 University of Oxford
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from webob import Request
import zope.interface
from repoze.who.classifiers import default_request_classifier
from repoze.who.interfaces import IRequestClassifier
import ConfigParser
from pylons import config
def custom_request_classifier(environ):
""" Returns one of the classifiers 'app', 'browser' or any
standard classifiers returned by
repoze.who.classifiers:default_request_classifier
"""
classifier = default_request_classifier(environ)
if classifier == 'browser':
login_form_url = '/login'
login_handler = '/login_handler'
logout_handler = '/logout_handler'
logout_url = '/logout'
# Decide if the client is a (user-driven) browser or an application
if config.has_key("who.config_file"):
config_file = config["who.config_file"]
config_who = ConfigParser.ConfigParser()
config_who.readfp(open(config_file))
login_form_url = config_who.get("plugin:friendlyform", "login_form_url")
login_handler = config_who.get("plugin:friendlyform", "login_handler_path")
logout_handler = config_who.get("plugin:friendlyform", "logout_handler_path")
logout_url = config_who.get("plugin:friendlyform", "post_logout_url")
path_info = environ['PATH_INFO']
#request = Request(environ)
#if not request.accept.best_match(['application/xhtml+xml', 'text/html']):
# # In our view, any client who doesn't support HTML/XHTML is an "app",
# # not a (user-driven) "browser".
# classifier = 'app'
if not path_info in [login_form_url, login_handler, logout_handler, logout_url]:
# In our view, any client who hasn't come in from the login url is an app
classifier = 'app'
return classifier
zope.interface.directlyProvides(custom_request_classifier, IRequestClassifier)
| 44.776119 | 89 | 0.726333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,897 | 0.632333 |
50007b22b789b5bf2347dd946c7516260c7c35b4 | 31,291 | py | Python | gui/apod_linux_config.py | danahynes/APOD_Linux | fe2bb52e77be77628dc99251cb2c43ec30ddea69 | [
"WTFPL"
] | null | null | null | gui/apod_linux_config.py | danahynes/APOD_Linux | fe2bb52e77be77628dc99251cb2c43ec30ddea69 | [
"WTFPL"
] | null | null | null | gui/apod_linux_config.py | danahynes/APOD_Linux | fe2bb52e77be77628dc99251cb2c43ec30ddea69 | [
"WTFPL"
] | null | null | null | #!/usr/bin/env python3
#------------------------------------------------------------------------------#
# Filename: apod_linux_config.py / \ #
# Project : APOD_Linux | () | #
# Date : 06/23/2021 | | #
# Author : Dana Hynes | \____/ | #
# License : WTFPLv2 \ / #
#------------------------------------------------------------------------------#
# imports
import gi
gi.require_version("Gtk", "3.0")
from gi.repository import Gtk
import logging
import os
import subprocess
str_prog_name = "apod_linux"
# find the config file
home_dir = os.path.expanduser("~")
pic_dir = os.path.join(home_dir, "." + str_prog_name)
conf_file = os.path.join(pic_dir, str_prog_name + ".conf")
# get log file name`
log_file = os.path.join(pic_dir, str_prog_name + ".log")
# set up logging
logging.basicConfig(filename = log_file, level = logging.DEBUG,
format = "%(asctime)s - %(message)s")
# set defaults for first tab
def_enabled = True
def_delay = 30
def_caption = True
def_position = "BR"
# set defaults for second tab
def_text_r = 255
def_text_g = 255
def_text_b = 255
def_text_a = 100
def_bg_r = 0
def_bg_g = 0
def_bg_b = 0
def_bg_a = 75
# set defaults for third tab
def_width = 500
def_font_size = 15
def_corner = 15
def_border = 20
def_top_pad = 50
def_bottom_pad = 10
def_side_pad = 10
# default strings
str_title = "APOD_Linux"
str_label_enabled = "Enable " + str_title + ":"
str_tooltip_enabled = "Enables or disables the " + str_title + " program"
str_label_delay = "Delay (0-60):"
str_tooltip_delay = "How long to wait (in seconds) for an internet connection \
before downloading"
str_label_caption = "Use caption:"
str_tooltip_caption = "Enables or disables the caption on top of the wallpaper"
str_tab_general = "General"
str_label_text = "<b>Text</b>"
str_label_text_r = "Red (0-255):"
str_tooltip_text_r = "The red value for the caption text"
str_label_text_g = "Green (0-255):"
str_tooltip_text_g = "The green value for the caption text"
str_label_text_b = "Blue (0-255):"
str_tooltip_text_b = "The blue value for the caption text"
str_label_text_a = "Alpha % (0-100):"
str_tooltip_text_a = "The alpha (transparency) value for the caption text"
str_label_bg = "<b>Background</b>"
str_label_bg_r = "Red (0-255):"
str_tooltip_bg_r = "The red value for the caption background"
str_label_bg_g = "Green (0-255):"
str_tooltip_bg_g = "The green value for the caption background"
str_label_bg_b = "Blue (0-255):"
str_tooltip_bg_b = "The blue value for the caption background"
str_label_bg_a = "Alpha % (0-100):"
str_tooltip_bg_a = "The alpha (transparency) value for the caption background"
str_tab_colors = "Colors"
str_label_position = "Position:"
str_tooltip_position = "The position of the caption relative to the screen"
str_label_width = "Width (0-1000):"
str_tooltip_width = "The width of the caption bubble"
str_label_font_size = "Font size (0-50):"
str_tooltip_font_size = "The font size of the caption"
str_label_corner = "Corner radius (0-50):"
str_tooltip_corner = "The corner radius of the caption bubble"
str_label_border = "Border (0-50):"
str_tooltip_border = "The spacing between the caption text and the background \
bubble"
str_label_top_pad = "Top padding (0-100):"
str_tooltip_top_pad = "The spacing between the caption and the top of the \
screen"
str_label_bottom_pad = "Bottom padding (0-100):"
str_tooltip_bottom_pad = "The spacing between the caption and the bottom of \
the screen"
str_label_side_pad = "Side padding (0-100):"
str_tooltip_side_pad = "The spacing between the caption and the sides of the \
screen"
str_tab_sizes = "Sizes"
str_button_ok = "OK"
str_button_cancel = "Cancel"
str_button_apply = "Apply"
str_tl = "Top Left"
str_tr = "Top Right"
str_bl = "Bottom Left"
str_br = "Bottom Right"
str_c = "Center"
# map short names to display strings
position_map = {
"TL" : str_tl,
"TR" : str_tr,
"BL" : str_bl,
"BR" : str_br,
"C" : str_c
}
run_prog_cmd = "python3 /usr/bin/apod_linux.py & disown"
# the main window class
class MyWindow(Gtk.Window):
# constructor
def __init__(self):
# call super constructor
Gtk.Window.__init__(self, title=str_title)
# the padding between the window edge and the content
self.set_border_width(20)
# set new width and default (fit) height
self.set_default_size(600, -1)
# don't allow resizing of window
self.set_resizable(False)
# create the stack BEFORE switcher and set props
stack = Gtk.Stack()
stack.set_transition_type(Gtk.StackTransitionType.NONE)
# create the switcher and attach stack
stack_switcher = Gtk.StackSwitcher()
stack_switcher.set_stack(stack)
# create a box for the switcher that keeps it centered horizontally
# resize box to fill parent but do not resize child (switcher)
# in an hbox, the child automatically fills vertically but is centered
# horizontally
hbox_switcher = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
hbox_switcher.pack_start(stack_switcher, True, False, 0)
# the first tab
# create a grid with inter-spacig
grid_general = Gtk.Grid()
grid_general.set_row_spacing(20)
grid_general.set_column_spacing(20)
# add a label and switch
label_enabled = Gtk.Label(label=str_label_enabled)
label_enabled.set_alignment(1, 0)
grid_general.attach(label_enabled, 0, 0, 1, 1)
self.switch_enabled = Gtk.Switch()
self.switch_enabled.connect("notify::active",
self.switch_enabled_clicked)
self.switch_enabled.set_tooltip_text(str_tooltip_enabled)
hbox_enabled = Gtk.Box(orientation = Gtk.Orientation.HORIZONTAL)
hbox_enabled.pack_start(self.switch_enabled, False, False, 0)
grid_general.attach(hbox_enabled, 1, 0, 1, 1)
# add a label
label_delay = Gtk.Label(label=str_label_delay)
label_delay.set_alignment(1, 0)
grid_general.attach(label_delay, 0, 1, 1, 1)
# add a spinbox that grows horizontally
adj_delay = Gtk.Adjustment(
0.0,
0.0,
60.0,
1.0,
5.0,
0.0
)
self.spin_delay = Gtk.SpinButton(adjustment=adj_delay, hexpand=True)
self.spin_delay.set_numeric(True)
self.spin_delay.set_tooltip_text(str_tooltip_delay)
grid_general.attach(self.spin_delay, 1, 1, 1, 1)
# add a label and switch
label_caption = Gtk.Label(label=str_label_caption)
label_caption.set_alignment(1, 0)
grid_general.attach(label_caption, 0, 2, 1, 1)
self.switch_caption = Gtk.Switch()
self.switch_caption.connect("notify::active",
self.switch_caption_clicked)
self.switch_caption.set_tooltip_text(str_tooltip_caption)
hbox_caption = Gtk.Box(orientation = Gtk.Orientation.HORIZONTAL)
hbox_caption.pack_start(self.switch_caption, False, False, 0)
grid_general.attach(hbox_caption, 1, 2, 1, 1)
label_position = Gtk.Label(label=str_label_position)
label_position.set_alignment(1, 0)
grid_general.attach(label_position, 0, 3, 1, 1)
# combos can take keys and vals and will only diplay vals
self.combo_position = Gtk.ComboBoxText()
self.combo_position.set_tooltip_text(str_tooltip_position)
grid_general.attach(self.combo_position, 1, 3, 1, 1)
for key, val in position_map.items():
self.combo_position.append(key, val)
# add the grid to the stack with a name and a title
stack.add_titled(grid_general, "general", str_tab_general)
# the second tab
# create a grid with inter-spacig
grid_colors = Gtk.Grid()
grid_colors.set_row_spacing(20)
grid_colors.set_column_spacing(20)
label_text = Gtk.Label()
label_text.set_markup(str_label_text)
sep_text = Gtk.HSeparator()
# a box to vertically center the separator
# resize the box to fill the cell but do not resize child
# in a vbox, the child automatically fills the width but is centered
# vertically
vbox_sep_text = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
vbox_sep_text.pack_start(sep_text, True, False, 0)
# a box to contain label and separator box
# label is F, F to make it as small as possible
# box is T, T to make it as big as possible
hbox_text = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
hbox_text.pack_start(label_text, False, False, 0)
hbox_text.pack_start(vbox_sep_text, True, True, 0)
grid_colors.attach(hbox_text, 0, 0, 2, 1)
# right-align labels, set spin min/max, and numeric only
label_text_r = Gtk.Label(label=str_label_text_r)
label_text_r.set_alignment(1, 0)
grid_colors.attach(label_text_r, 0, 1, 1, 1)
adj_text_r = Gtk.Adjustment(
0.0,
0.0,
255.0,
1.0,
5.0,
0.0
)
self.spin_text_r = Gtk.SpinButton(adjustment=adj_text_r, hexpand=True)
self.spin_text_r.set_numeric(True)
self.spin_text_r.set_tooltip_text(str_tooltip_text_r)
grid_colors.attach(self.spin_text_r, 1, 1, 1, 1)
label_text_g = Gtk.Label(label=str_label_text_g)
label_text_g.set_alignment(1, 0)
grid_colors.attach(label_text_g, 0, 2, 1, 1)
adj_text_g = Gtk.Adjustment(
0.0,
0.0,
255.0,
1.0,
5.0,
0.0
)
self.spin_text_g = Gtk.SpinButton(adjustment=adj_text_g, hexpand=True)
self.spin_text_g.set_numeric(True)
self.spin_text_g.set_tooltip_text(str_tooltip_text_g)
grid_colors.attach(self.spin_text_g, 1, 2, 1, 1)
label_text_b = Gtk.Label(label=str_label_text_b)
label_text_b.set_alignment(1, 0)
grid_colors.attach(label_text_b, 0, 3, 1, 1)
adj_text_b = Gtk.Adjustment(
0.0,
0.0,
255.0,
1.0,
5.0,
0.0
)
self.spin_text_b = Gtk.SpinButton(adjustment=adj_text_b, hexpand=True)
self.spin_text_b.set_numeric(True)
self.spin_text_b.set_tooltip_text(str_tooltip_text_b)
grid_colors.attach(self.spin_text_b, 1, 3, 1, 1)
label_text_a = Gtk.Label(label=str_label_text_a)
label_text_a.set_alignment(1, 0)
grid_colors.attach(label_text_a, 0, 4, 1, 1)
adj_text_a = Gtk.Adjustment(
0.0,
0.0,
100.0,
1.0,
5.0,
0.0
)
self.spin_text_a = Gtk.SpinButton(adjustment=adj_text_a, hexpand=True)
self.spin_text_a.set_numeric(True)
self.spin_text_a.set_tooltip_text(str_tooltip_text_a)
grid_colors.attach(self.spin_text_a, 1, 4, 1, 1)
label_bg = Gtk.Label()
label_bg.set_markup(str_label_bg)
sep_bg = Gtk.HSeparator()
# a box to vertically center the separator
# resize the box to fill the cell but do not resize child
# in a vbox, the child automatically fills the width but is centered
# vertically
vbox_sep_bg = Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
vbox_sep_bg.pack_start(sep_bg, True, False, 0)
# a box to contain label and separator box
# label is F, F to make it as small as possible
# box is T, T to make it as big as possible
hbox_bg = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL, spacing=5)
hbox_bg.pack_start(label_bg, False, False, 0)
hbox_bg.pack_start(vbox_sep_bg, True, True, 0)
grid_colors.attach(hbox_bg, 0, 5, 2, 1)
# right-align labels, set spin min/max, and numeric only
label_bg_r = Gtk.Label(label=str_label_bg_r)
label_bg_r.set_alignment(1, 0)
grid_colors.attach(label_bg_r, 0, 6, 1, 1)
adj_bg_r = Gtk.Adjustment(
0.0,
0.0,
255.0,
1.0,
5.0,
0.0
)
self.spin_bg_r = Gtk.SpinButton(adjustment=adj_bg_r, hexpand=True)
self.spin_bg_r.set_numeric(True)
self.spin_bg_r.set_tooltip_text(str_tooltip_bg_r)
grid_colors.attach(self.spin_bg_r, 1, 6, 1, 1)
label_bg_g = Gtk.Label(label=str_label_bg_g)
label_bg_g.set_alignment(1, 0)
grid_colors.attach(label_bg_g, 0, 7, 1, 1)
adj_bg_g = Gtk.Adjustment(
0.0,
0.0,
255.0,
1.0,
5.0,
0.0
)
self.spin_bg_g = Gtk.SpinButton(adjustment=adj_bg_g, hexpand=True)
self.spin_bg_g.set_numeric(True)
self.spin_bg_g.set_tooltip_text(str_tooltip_bg_g)
grid_colors.attach(self.spin_bg_g, 1, 7, 1, 1)
label_bg_b = Gtk.Label(label=str_label_bg_b)
label_bg_b.set_alignment(1, 0)
grid_colors.attach(label_bg_b, 0, 8, 1, 1)
adj_bg_b = Gtk.Adjustment(
0.0,
0.0,
255.0,
1.0,
5.0,
0.0
)
self.spin_bg_b = Gtk.SpinButton(adjustment=adj_bg_b, hexpand=True)
self.spin_bg_b.set_numeric(True)
self.spin_bg_b.set_tooltip_text(str_tooltip_bg_b)
grid_colors.attach(self.spin_bg_b, 1, 8, 1, 1)
label_bg_a = Gtk.Label(label=str_label_bg_a)
label_bg_a.set_alignment(1, 0)
grid_colors.attach(label_bg_a, 0, 9, 1, 1)
adj_bg_a = Gtk.Adjustment(
0.0,
0.0,
100.0,
1.0,
5.0,
0.0
)
self.spin_bg_a = Gtk.SpinButton(adjustment=adj_bg_a, hexpand=True)
self.spin_bg_a.set_numeric(True)
self.spin_bg_a.set_tooltip_text(str_tooltip_bg_a)
grid_colors.attach(self.spin_bg_a, 1, 9, 1, 1)
# add the grid to the stack with a name and a title
stack.add_titled(grid_colors, "colors", str_tab_colors)
# the third tab
# create a grid with inter-spacig
grid_sizes = Gtk.Grid()
grid_sizes.set_row_spacing(20)
grid_sizes.set_column_spacing(20)
# create all the labels and spins with adjustments and numeric only
label_width = Gtk.Label(label=str_label_width)
label_width.set_alignment(1, 0)
grid_sizes.attach(label_width, 0, 0, 1, 1)
adj_width = Gtk.Adjustment(
0.0,
0.0,
1000.0,
1.0,
5.0,
0.0
)
self.spin_width = Gtk.SpinButton(adjustment=adj_width, hexpand=True)
self.spin_width.set_numeric(True)
self.spin_width.set_tooltip_text(str_tooltip_width)
grid_sizes.attach(self.spin_width, 1, 0, 1, 1)
label_font_size = Gtk.Label(label=str_label_font_size)
label_font_size.set_alignment(1, 0)
grid_sizes.attach(label_font_size, 0, 1, 1, 1)
adj_font_size = Gtk.Adjustment(
0.0,
0.0,
50.0,
1.0,
5.0,
0.0
)
self.spin_font_size = Gtk.SpinButton(adjustment=adj_font_size,
hexpand=True)
self.spin_font_size.set_numeric(True)
self.spin_font_size.set_tooltip_text(str_tooltip_font_size)
grid_sizes.attach(self.spin_font_size, 1, 1, 1, 1)
label_corner = Gtk.Label(label=str_label_corner)
label_corner.set_alignment(1, 0)
grid_sizes.attach(label_corner, 0, 2, 1, 1)
adj_corner = Gtk.Adjustment(
0.0,
0.0,
50.0,
1.0,
5.0,
0.0
)
self.spin_corner = Gtk.SpinButton(adjustment=adj_corner, hexpand=True)
self.spin_corner.set_numeric(True)
self.spin_corner.set_tooltip_text(str_tooltip_corner)
grid_sizes.attach(self.spin_corner, 1, 2, 1, 1)
label_border = Gtk.Label(label=str_label_border)
label_border.set_alignment(1, 0)
grid_sizes.attach(label_border, 0, 3, 1, 1)
adj_border = Gtk.Adjustment(
0.0,
0.0,
50.0,
1.0,
5.0,
0.0
)
self.spin_border = Gtk.SpinButton(adjustment=adj_border, hexpand=True)
self.spin_border.set_numeric(True)
self.spin_border.set_tooltip_text(str_tooltip_border)
grid_sizes.attach(self.spin_border, 1, 3, 1, 1)
label_top_pad = Gtk.Label(label=str_label_top_pad)
label_top_pad.set_alignment(1, 0)
grid_sizes.attach(label_top_pad, 0, 4, 1, 1)
adj_top_pad = Gtk.Adjustment(
0.0,
0.0,
100.0,
1.0,
5.0,
0.0
)
self.spin_top_pad = Gtk.SpinButton(adjustment=adj_top_pad, hexpand=True)
self.spin_top_pad.set_numeric(True)
self.spin_top_pad.set_tooltip_text(str_tooltip_top_pad)
grid_sizes.attach(self.spin_top_pad, 1, 4, 1, 1)
label_bottom_pad = Gtk.Label(label=str_label_bottom_pad)
label_bottom_pad.set_alignment(1, 0)
grid_sizes.attach(label_bottom_pad, 0, 5, 1, 1)
adj_bottom_pad = Gtk.Adjustment(
0.0,
0.0,
100.0,
1.0,
5.0,
0.0
)
self.spin_bottom_pad = Gtk.SpinButton(adjustment=adj_bottom_pad,
hexpand=True)
self.spin_bottom_pad.set_numeric(True)
self.spin_bottom_pad.set_tooltip_text(str_tooltip_bottom_pad)
grid_sizes.attach(self.spin_bottom_pad, 1, 5, 1, 1)
label_side_pad = Gtk.Label(label=str_label_side_pad)
label_side_pad.set_alignment(1, 0)
grid_sizes.attach(label_side_pad, 0, 6, 1, 1)
adj_side_pad = Gtk.Adjustment(
0.0,
0.0,
100.0,
1.0,
5.0,
0.0
)
self.spin_side_pad = Gtk.SpinButton(adjustment=adj_side_pad,
hexpand=True)
self.spin_side_pad.set_numeric(True)
self.spin_side_pad.set_tooltip_text(str_tooltip_side_pad)
grid_sizes.attach(self.spin_side_pad, 1, 6, 1, 1)
# add the grid to the stack with a name and a title
stack.add_titled(grid_sizes, "sizes", str_tab_sizes)
# create a box for the buttons
hbox_buttons = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL,
spacing=20)
# create the buttons
button_ok = Gtk.Button(label=str_button_ok)
button_ok.connect("clicked", self.button_ok_clicked)
hbox_buttons.pack_start(button_ok, True, True, 0)
button_cancel = Gtk.Button(label=str_button_cancel)
button_cancel.connect("clicked", self.button_cancel_clicked)
hbox_buttons.pack_start(button_cancel, True, True, 0)
button_apply = Gtk.Button(label=str_button_apply)
button_apply.connect("clicked", self.button_apply_clicked)
hbox_buttons.pack_start(button_apply, True, True, 0)
# create a vbox for the switcher box, stack, and button box and add it
# as main window's content
vbox_content = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=50)
self.add(vbox_content)
# add the switcher's box, the stack, and button box as content
# do not resize switcher's box (horizontal fill is implicit)
# fully resize stack
# do not resize button box either
vbox_content.pack_start(hbox_switcher, False, False, 0)
vbox_content.pack_start(stack, True, True, 0)
vbox_content.pack_start(hbox_buttons, False, False, 0)
# load props or defaults
self.load_config()
# do switch routines
self.switch_caption_clicked(self.switch_caption, 0)
self.switch_enabled_clicked(self.switch_enabled, 0)
# load values from config file
def load_config(self):
# set defaults
self.switch_enabled.set_active(int(def_enabled))
self.spin_delay.set_value(int(def_delay))
self.switch_caption.set_active(int(def_caption))
self.spin_text_r.set_value(int(def_text_r))
self.spin_text_g.set_value(int(def_text_g))
self.spin_text_b.set_value(int(def_text_b))
self.spin_text_a.set_value(int(def_text_a))
self.spin_bg_r.set_value(int(def_bg_r))
self.spin_bg_g.set_value(int(def_bg_g))
self.spin_bg_b.set_value(int(def_bg_b))
self.spin_bg_a.set_value(int(def_bg_a))
for short_pos, long_pos in position_map.items():
if def_position == short_pos:
self.combo_position.set_active_id(short_pos)
self.spin_width.set_value(int(def_width))
self.spin_font_size.set_value(int(def_font_size))
self.spin_corner.set_value(int(def_corner))
self.spin_border.set_value(int(def_border))
self.spin_top_pad.set_value(int(def_top_pad))
self.spin_bottom_pad.set_value(int(def_bottom_pad))
self.spin_side_pad.set_value(int(def_side_pad))
# check if config file exists
if os.path.exists(conf_file):
# open config file and get all lines
with open(conf_file, "r") as f:
lines = f.readlines()
# try to find a value in the conf file
for line in lines:
line_clean = line.strip().upper()
# ignore comment lines
if line_clean.startswith("#") or line_clean == "":
continue
# split key off at equals
key_val = line_clean.split("=")
key = key_val[0].strip()
# split val off ignoring trailing comments
val = ""
if (len(key_val) > 1):
val_array = key_val[1].split("#")
val = val_array[0].strip()
# set values for keys
if key == "ENABLED":
if val != "":
self.switch_enabled.set_active(int(val))
if key == "DELAY":
if val != "":
self.spin_delay.set_value(int(val))
if key == "CAPTION":
if val != "":
self.switch_caption.set_active(int(val))
if key == "POSITION":
for short_pos, long_pos in position_map.items():
if val == short_pos:
self.combo_position.set_active_id(short_pos)
if key == "TEXT_R":
if val != "":
self.spin_text_r.set_value(int(val))
if key == "TEXT_G":
if val != "":
self.spin_text_g.set_value(int(val))
if key == "TEXT_B":
if val != "":
self.spin_text_b.set_value(int(val))
if key == "TEXT_A":
if val != "":
self.spin_text_a.set_value(int(val))
if key == "BG_R":
if val != "":
self.spin_bg_r.set_value(int(val))
if key == "BG_G":
if val != "":
self.spin_bg_g.set_value(int(val))
if key == "BG_B" in key:
if val != "":
self.spin_bg_b.set_value(int(val))
if key == "BG_A":
if val != "":
self.spin_bg_a.set_value(int(val))
if key == "WIDTH":
if val != "":
self.spin_width.set_value(int(val))
if key == "FONT_SIZE":
if val != "":
self.spin_font_size.set_value(int(val))
if key == "CORNER_RADIUS":
if val != "":
self.spin_corner.set_value(int(val))
if key == "BORDER":
if val != "":
self.spin_border.set_value(int(val))
if key == "TOP_PADDING":
if val != "":
if val != "":
self.spin_top_pad.set_value(int(val))
if key == "BOTTOM_PADDING":
if val != "":
self.spin_bottom_pad.set_value(int(val))
if key == "SIDE_PADDING":
if val != "":
self.spin_side_pad.set_value(int(val))
def save_config(self):
# open or create config file
with open(conf_file, "w+") as f:
# TODO: find line for key, replace value instead of overwriting
# whole file
f.write("# DO NOT EDIT THIS FILE BY HAND!\n\n")
# start writing options
f.write("ENABLED=" + str(int(self.switch_enabled.get_active())) +
"\n")
f.write("DELAY=" + str(int(self.spin_delay.get_value())) + "\n")
f.write("CAPTION=" + str(int(self.switch_caption.get_active())) +
"\n")
# fudge the position option from the array
val = self.combo_position.get_active_text()
for short_pos, long_pos in position_map.items():
if val == long_pos:
f.write("POSITION=" + short_pos + "\n")
break
f.write("TEXT_R=" + str(int(self.spin_text_r.get_value())) + "\n")
f.write("TEXT_G=" + str(int(self.spin_text_g.get_value())) + "\n")
f.write("TEXT_B=" + str(int(self.spin_text_b.get_value())) + "\n")
f.write("TEXT_A=" + str(int(self.spin_text_a.get_value())) + "\n")
f.write("BG_R=" + str(int(self.spin_bg_r.get_value())) + "\n")
f.write("BG_G=" + str(int(self.spin_bg_g.get_value())) + "\n")
f.write("BG_B=" + str(int(self.spin_bg_b.get_value())) + "\n")
f.write("BG_A=" + str(int(self.spin_bg_a.get_value())) + "\n")
f.write("WIDTH=" + str(int(self.spin_width.get_value())) + "\n")
f.write("FONT_SIZE=" + str(int(self.spin_font_size.get_value())) +
"\n")
f.write("CORNER_RADIUS=" + str(int(self.spin_corner.get_value())) +
"\n")
f.write("BORDER=" + str(int(self.spin_border.get_value())) + "\n")
f.write("TOP_PADDING=" + str(int(self.spin_top_pad.get_value())) +
"\n")
f.write("BOTTOM_PADDING=" +
str(int(self.spin_bottom_pad.get_value())) + "\n")
f.write("SIDE_PADDING=" + str(int(self.spin_side_pad.get_value())) +
"\n")
def run_prog(self):
logging.debug('GUI')
# only run once, no listener
cmd_array = run_prog_cmd.split()
# non-blocking subprocess
subprocess.Popen(cmd_array)
def switch_enabled_clicked(self, widget, gparam):
if widget.get_active():
self.spin_delay.set_sensitive(True)
self.switch_caption.set_sensitive(True)
self.switch_caption_clicked(self.switch_caption, 0)
else:
self.spin_delay.set_sensitive(False)
self.switch_caption.set_sensitive(False)
self.spin_text_r.set_sensitive(False)
self.spin_text_g.set_sensitive(False)
self.spin_text_b.set_sensitive(False)
self.spin_text_a.set_sensitive(False)
self.spin_bg_r.set_sensitive(False)
self.spin_bg_g.set_sensitive(False)
self.spin_bg_b.set_sensitive(False)
self.spin_bg_a.set_sensitive(False)
self.combo_position.set_sensitive(False)
self.spin_width.set_sensitive(False)
self.spin_font_size.set_sensitive(False)
self.spin_corner.set_sensitive(False)
self.spin_border.set_sensitive(False)
self.spin_top_pad.set_sensitive(False)
self.spin_bottom_pad.set_sensitive(False)
self.spin_side_pad.set_sensitive(False)
def switch_caption_clicked(self, widget, gparam):
if widget.get_active():
self.spin_text_r.set_sensitive(True)
self.spin_text_g.set_sensitive(True)
self.spin_text_b.set_sensitive(True)
self.spin_text_a.set_sensitive(True)
self.spin_bg_r.set_sensitive(True)
self.spin_bg_g.set_sensitive(True)
self.spin_bg_b.set_sensitive(True)
self.spin_bg_a.set_sensitive(True)
self.combo_position.set_sensitive(True)
self.spin_width.set_sensitive(True)
self.spin_font_size.set_sensitive(True)
self.spin_corner.set_sensitive(True)
self.spin_border.set_sensitive(True)
self.spin_top_pad.set_sensitive(True)
self.spin_bottom_pad.set_sensitive(True)
self.spin_side_pad.set_sensitive(True)
else:
self.spin_text_r.set_sensitive(False)
self.spin_text_g.set_sensitive(False)
self.spin_text_b.set_sensitive(False)
self.spin_text_a.set_sensitive(False)
self.spin_bg_r.set_sensitive(False)
self.spin_bg_g.set_sensitive(False)
self.spin_bg_b.set_sensitive(False)
self.spin_bg_a.set_sensitive(False)
self.combo_position.set_sensitive(False)
self.spin_width.set_sensitive(False)
self.spin_font_size.set_sensitive(False)
self.spin_corner.set_sensitive(False)
self.spin_border.set_sensitive(False)
self.spin_top_pad.set_sensitive(False)
self.spin_bottom_pad.set_sensitive(False)
self.spin_side_pad.set_sensitive(False)
def button_ok_clicked(self, widget):
self.save_config()
self.run_prog()
self.destroy()
def button_cancel_clicked(self, widget):
self.destroy()
def button_apply_clicked(self, widget):
self.save_config()
self.run_prog()
win = MyWindow()
win.connect("destroy", Gtk.main_quit)
win.show_all()
Gtk.main()
# -)
| 36.769683 | 80 | 0.580934 | 26,967 | 0.861813 | 0 | 0 | 0 | 0 | 0 | 0 | 5,544 | 0.177176 |
50038d02e89c3e78533be863fdaf0e1d485f0ce8 | 1,083 | py | Python | setup.py | marianaalbano/python_mail | 4f222894a2faa0714b2211ee9210af8f5cb5f1ed | [
"MIT"
] | null | null | null | setup.py | marianaalbano/python_mail | 4f222894a2faa0714b2211ee9210af8f5cb5f1ed | [
"MIT"
] | null | null | null | setup.py | marianaalbano/python_mail | 4f222894a2faa0714b2211ee9210af8f5cb5f1ed | [
"MIT"
] | null | null | null | #!/usr/bin/python3
#-*- coding: utf-8 -*-
import setuptools
longdesc = """
Esse módulo foi criado com o objetivo de realizar a busca de mensagens na caixa de e-mail de forma
simplificada e intuitiva utilizando o módulo imaplib para conexão.
A instalação pode ser feita utilizando:
``pip install git+https://github.com/marianaalbano/python_mail.git``.
"""
setuptools.setup(
name="python_mail",
version="1.0.2",
author="Mariana Albano",
author_email="mariana.albano@outlook.com",
description="Management email module",
license='MIT License',
long_description="Esse módulo foi criado com o objetivo de realizar a busca de mensagens na caixa de e-mail de forma simplificada e intuitiva utilizando o módulo imaplib para conexão.",
long_description_content_type="text/markdown",
url="https://github.com/marianaalbano/python_mail.git",
packages=setuptools.find_packages(),
classifiers=(
"Programming Language :: Python :: 3.5",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
),
) | 34.935484 | 189 | 0.715605 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 783 | 0.71769 |
5003a46189d875f28555bd972e68fab9ee40bd4b | 2,156 | py | Python | visualize/MultiPersonVisualizer/Scene.py | ys1998/motion-forecast | ef8fa9d597906a756f28952a731f6bc8d178f2bf | [
"MIT"
] | null | null | null | visualize/MultiPersonVisualizer/Scene.py | ys1998/motion-forecast | ef8fa9d597906a756f28952a731f6bc8d178f2bf | [
"MIT"
] | null | null | null | visualize/MultiPersonVisualizer/Scene.py | ys1998/motion-forecast | ef8fa9d597906a756f28952a731f6bc8d178f2bf | [
"MIT"
] | null | null | null | import vtk
class Scene(object):
def __init__(self):
self.sceneSources = list()
self.sceneMappers = list()
self.sceneActors = list()
self.sceneLights = list()
self.sceneSources.append(vtk.vtkCubeSource())
self.sceneSources[-1].SetXLength(50000)
self.sceneSources[-1].SetYLength(50000)
self.sceneSources[-1].SetZLength(5)
# self.sceneMappers.append(vtk.vtkPolyDataMapper())
# self.sceneMappers[-1].SetInputConnection(self.sceneSources[-1].GetOutputPort())
reader = vtk.vtkJPEGReader()
reader.SetFileName("blackandwhite.jpg")
# reader.SetFileName("white.jpg")
# Create texture object
texture = vtk.vtkTexture()
texture.SetInputConnection(reader.GetOutputPort())
texture.RepeatOn()
#Map texture coordinates
map_to_plane = vtk.vtkTextureMapToPlane()
map_to_plane.SetInputConnection(self.sceneSources[-1].GetOutputPort())
# Create mapper and set the mapped texture as input
mapperplane = vtk.vtkPolyDataMapper()
mapperplane.SetInputConnection(map_to_plane.GetOutputPort())
self.sceneActors.append(vtk.vtkActor())
self.sceneActors[-1].RotateX(90)
self.sceneActors[-1].SetPosition(1300,-800,2500) # -1200
self.sceneActors[-1].SetMapper(mapperplane)
self.sceneActors[-1].SetTexture(texture)
# self.sceneActors[-1].GetProperty().SetColor(1,1,1)
self.addLight(1.0, 1.0, 1.0, 1000, 1000, -1000, 0.75, 180, 0.75)
self.addLight(1.0, 1.0, 1.0, -1000, 500, 1000, 0.5, 180, 0.0)
self.addLight(1.0, 1.0, 1.0, -1000, 500,- 1000, 0.5, 180, 0.0)
def addLight(self, cR, cG, cB, pX, pY, pZ, Intensity, ConeAngle, Attenuation):
self.sceneLights.append(vtk.vtkLight())
self.sceneLights[-1].SetColor(cR, cG, cB)
self.sceneLights[-1].SetPosition(pX, pY, pZ)
self.sceneLights[-1].SetIntensity(Intensity)
self.sceneLights[-1].SetConeAngle(ConeAngle)
self.sceneLights[-1].SetShadowAttenuation(Attenuation)
self.sceneLights[-1].SetLightTypeToSceneLight()
| 38.5 | 89 | 0.651206 | 2,143 | 0.99397 | 0 | 0 | 0 | 0 | 0 | 0 | 341 | 0.158163 |
5003c44eefcd2839eee365d7fdc0530270ef3daa | 7,856 | py | Python | db-scripts/refseq.py | Zorino/BacterialDB-Fetcher | dbdf0a2e82f742ac642230d122675f7dafc21add | [
"CC-BY-4.0"
] | 6 | 2017-12-29T22:47:55.000Z | 2021-05-09T20:35:36.000Z | db-scripts/refseq.py | Zorino/BacterialDB-Fetcher | dbdf0a2e82f742ac642230d122675f7dafc21add | [
"CC-BY-4.0"
] | 1 | 2018-08-20T08:48:49.000Z | 2018-08-20T08:48:49.000Z | db-scripts/refseq.py | Zorino/BacterialDB-Fetcher | dbdf0a2e82f742ac642230d122675f7dafc21add | [
"CC-BY-4.0"
] | 4 | 2017-02-23T20:35:50.000Z | 2021-05-07T20:52:06.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: maxime déraspe
# email: maximilien1er@gmail.com
# date: 2017-08-24
# version: 0.01
from subprocess import call
import os
import argparse
import gzip
import re
import glob
from Bio import SeqIO
#from Bio.SeqFeature import FeatureLocation
def open_file(f):
if f[-3:] == ".gz":
handle = gzip.open(f, "rt")
else:
handle = open(f, "rt")
return handle
# search for a feature
def search_feature(opt):
handle_gbf = open_file(opt.genbank)
handle_fna = open_file(opt.fna)
rec_gbf = SeqIO.parse(handle_gbf, "genbank")
rec_fna = SeqIO.parse(handle_fna, "fasta")
for r_gbf in rec_gbf:
r_fna = next(rec_fna)
for ft in r_gbf.features:
if opt.ft_type in ft.type:
if opt.complete and (">" in str(ft.location) or "<" in str(ft.location)):
continue
seq = r_fna.seq[ft.location.start:ft.location.end]
if ft.location.strand != 1:
seq = seq.complement()
print(">%s %s" %(r_gbf.id, ft.qualifiers['product'][0]))
print(seq)
handle_gbf.close()
handle_fna.close()
return 0
def create_ancestor_path(taxa, tax_id):
path = []
path.append(tax_id)
while taxa[tax_id]['parent'] != "":
tax_id = taxa[tax_id]['parent']
path.insert(0, tax_id)
return path
def create_ancestor_path_all(taxa):
for t in taxa:
path = create_ancestor_path(taxa, t)
path_str = "/".join(path)
taxa[t]['path'] = path_str
def load_taxonomy(nodes_file, division):
taxa = {}
with open(nodes_file) as n:
for l in n:
fields = re.compile("[\s+\t+]\|[\s+\t+]").split(l.strip())
if fields[4] != division:
continue
if fields[1] not in taxa:
taxa[fields[1]] = {'child': [], 'parent': "", 'rank': "", 'path': ""}
if fields[0] not in taxa:
taxa[fields[0]] = {'child': [], 'parent': "", 'rank': "", 'path': ""}
taxa[fields[1]]['child'].append(fields[0])
taxa[fields[0]]['parent'] = fields[1]
taxa[fields[0]]['rank'] = fields[2]
create_ancestor_path_all(taxa)
return taxa
def create_taxonomy_directories(taxa):
taxon_done = []
for t in taxa:
path_str = taxa[t]['path']
path = path_str.split("/")
if t not in taxon_done:
taxon_done.extend(path)
print("create dir %s" % path_str)
call(['mkdir', '-p', path_str])
with open(path_str+"/rank", "w") as rank_file:
rank_file.write(taxa[t]['rank'])
return 0
def create_names(taxa, names_file):
with open(names_file) as n:
for l in n:
fields = re.compile("[\s*\t*]\|[\s*\t*]").split(l)
if fields[0] not in taxa:
continue
print("writing names into %s" % taxa[fields[0]]['path'])
if fields[3] == "scientific name":
with open(taxa[fields[0]]['path']+"/name", "w") as name_file:
name_file.write(fields[1])
else:
with open(taxa[fields[0]]['path']+"/name_other", "a") as name_file:
out = fields[1] + "\t" + fields[3] + "\n"
name_file.write(out)
return 0
def load_ids(ids_file, taxa):
genomes = {}
ids_files = ids_file.split(",")
for ids_f in ids_files:
print(ids_f)
ids = open_file(ids_f)
for l in ids:
fields = re.compile("\t").split(l.strip())
if fields[2] in taxa:
genomes[fields[0]] = fields[2]
ids.close()
return genomes
def create_sequences(genome_ids, taxa, fna_dir, gbf_dir):
all_fna = glob.glob(fna_dir + "/*.fna.gz")
all_gbf = glob.glob(gbf_dir + "/*.gbff.gz")
for gbf in all_gbf:
with open_file(gbf) as f:
rec_gbf = SeqIO.parse(f, "genbank")
file_id = ".".join(gbf.split("/")[-1:][0].split(".")[:2]) + "."
file_fna = [s for s in all_fna if file_id in s][0]
handle_fna = open_file(file_fna)
rec_fna = SeqIO.parse(handle_fna, "fasta")
for r_gbf in rec_gbf:
r_fna = next(rec_fna)
biosample = ""
biosample_xref = [s for s in r_gbf.dbxrefs if "BioSample" in s]
biosample_assembly = [s for s in r_gbf.dbxrefs if "Assembly" in s]
if len(biosample_xref) > 0:
biosample= biosample_xref[0].replace("BioSample:","")
elif len(biosample_assembly) > 0:
biosample = biosample_assembly[0].replace("Assembly:","")
else:
biosample = "OTHER_SAMPLES"
# continue
print("%s\t%s\t%s" % (biosample, r_gbf.id, ";".join(r_gbf.dbxrefs)))
genome_id = r_gbf.id.split(".")[0]
if genome_id in genome_ids:
path_str = taxa[genome_ids[genome_id]]['path']
else:
path_str = "./unidentified"
samples_path = path_str + "/samples"
if not os.path.exists(samples_path):
call(['mkdir', '-p', samples_path ])
print(samples_path)
with open(samples_path+"/"+biosample+".fasta","a") as f:
f.write(r_fna.format("fasta"))
with open(samples_path+"/"+biosample+".gb","a") as f:
f.write(r_gbf.format("genbank"))
handle_fna.close()
return 0
def taxonomy(opt):
print("Loading taxonomy nodes..")
taxa = load_taxonomy(opt.nodes, opt.div)
print("Creating taxonomy directories..")
create_taxonomy_directories(taxa)
if opt.names:
print("Creating names for each taxon..")
create_names(taxa, opt.names)
print("Loading genome ids..")
genome_ids = load_ids(opt.ids, taxa)
print("Dumping sequences to taxon directory..")
create_sequences(genome_ids, taxa, opt.fna, opt.fna)
return 0
# Main #
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog='refseq.py')
subparsers = parser.add_subparsers()
# subparser to search sequence
parser_search = subparsers.add_parser('ft_search', help='search for a feature in genbank file')
parser_search.add_argument("--ft-type", "-t", type=str, default="", metavar="FEATURE_TYPE")
parser_search.add_argument("--genbank", "-g", type=str, required=True)
parser_search.add_argument("--fna", "-f", type=str, required=True)
parser_search.add_argument("--complete", action="store_true", help="need a complete sequence, default: false")
parser_search.set_defaults(which='search')
# subparser to search sequence
parser_search = subparsers.add_parser('taxonomy', help='group sequence by taxonomic ids inside directories')
parser_search.add_argument("--ids", type=str, required=True, metavar="IDS-TAXA")
parser_search.add_argument("--nodes", "-n", type=str, required=True, metavar="nodes.dmp")
parser_search.add_argument("--names", type=str, metavar="names.dmp")
parser_search.add_argument("--div", "-d", type=str, required=True, metavar="bacteria(0), phages(3), viruses(9)..")
parser_search.add_argument("--fna", "-f", type=str, required=False, metavar="FNA_Directory")
parser_search.set_defaults(which='taxonomy')
args = parser.parse_args()
if hasattr(args, "which"):
if args.which == "search":
search_feature(args)
elif args.which == "taxonomy":
taxonomy(args)
else:
parser.print_help()
else:
# print(ValueError)
parser.print_help()
| 30.332046 | 118 | 0.565046 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,465 | 0.186458 |
5004ed5f387e0978045f2498de54cc45f0d3b5ef | 2,875 | py | Python | cobras_ts/notebookquerier_images.py | achenbachsven/learningSkript | 7af067cbf0c8d7eed010806923f8af2e38977be2 | [
"BSD-3-Clause"
] | null | null | null | cobras_ts/notebookquerier_images.py | achenbachsven/learningSkript | 7af067cbf0c8d7eed010806923f8af2e38977be2 | [
"BSD-3-Clause"
] | 3 | 2020-03-24T15:59:06.000Z | 2022-02-10T01:53:37.000Z | cobras_ts/notebookquerier_images.py | achenbachsven/learningSkript | 7af067cbf0c8d7eed010806923f8af2e38977be2 | [
"BSD-3-Clause"
] | null | null | null | import sys
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import random
from cobras_ts.querier import Querier
from IPython import display
def _query_yes_no(question, default="yes"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
Taken from: http://code.activestate.com/recipes/577058/
"""
valid = {"yes": True, "y": True, "ye": True,
"no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' "
"(or 'y' or 'n').\n")
class NotebookQuerierImages(Querier):
def __init__(self, fns):
super(NotebookQuerierImages, self).__init__()
self.fns = fns
plt.figure(figsize=(20,20))
def query_points(self, idx1, idx2):
plt.clf()
plt.subplot(1,2,1)
print(idx1)
img = mpimg.imread(self.fns[idx1])
imgplot = plt.imshow(img)
plt.subplot(1,2,2)
img = mpimg.imread(self.fns[idx2])
imgplot = plt.imshow(img)
display.clear_output(wait=True)
display.display(plt.gcf())
return _query_yes_no(
"Should the following instances be in the same cluster? " + str(idx1) + " and " + str(idx2))
def update_clustering(self, clustering):
plt.clf()
plt.subplots_adjust(wspace=0.2, hspace=0.0)
n_clusters = len(clustering.clusters)
for cluster_idx, cluster in enumerate(clustering.clusters):
idxs = cluster.get_all_points()
n_to_plot = min(5, len(idxs))
random_selection = random.sample(idxs, n_to_plot)
for idx, pt_idx in enumerate(random_selection):
plt.subplot(len(clustering.clusters),5,cluster_idx * 5 + idx+1)
img = mpimg.imread(self.fns[pt_idx])
imgplot = plt.imshow(img)
#plt.subplot(1,n_clusters,cluster_idx+1)
#plt.plot(self.fns[clusterid, :], alpha=0.5)
display.clear_output(wait=True)
display.display(plt.gcf())
return _query_yes_no("Continue querying?") | 34.22619 | 105 | 0.594087 | 1,538 | 0.534957 | 0 | 0 | 0 | 0 | 0 | 0 | 752 | 0.261565 |
50079bd502e213830c9687dc496863d92ba09cde | 4,669 | py | Python | train.py | yjang43/WaveNet | fb0edafa813ea2c5be706aceb7ac41802a5d423b | [
"MIT"
] | null | null | null | train.py | yjang43/WaveNet | fb0edafa813ea2c5be706aceb7ac41802a5d423b | [
"MIT"
] | null | null | null | train.py | yjang43/WaveNet | fb0edafa813ea2c5be706aceb7ac41802a5d423b | [
"MIT"
] | null | null | null | import os
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from dataclasses import dataclass
from argparse import ArgumentParser
from tqdm import tqdm
from torch.utils.data import DataLoader
from data import VCTKAudio
from model import WaveNet
def set_option():
parser = ArgumentParser()
parser.add_argument('--DEVICE', default='cuda', type=str)
parser.add_argument('--epoch', default=20, type=int)
parser.add_argument('--lr', default=1e-3, type=float)
parser.add_argument('--batch_sz', default=32, type=int)
parser.add_argument('--num_class', default=256, type=int)
parser.add_argument('--clip', default=1.0, type=float)
parser.add_argument('--max_itr', default=10_000, type=int)
parser.add_argument('--src_len', default=1024 + 64, type=int)
parser.add_argument('--tgt_len', default=64, type=int)
parser.add_argument('--num_block', default=4, type=int)
parser.add_argument('--num_layer', default=10, type=int)
parser.add_argument('--residual_dim', default=32, type=int)
parser.add_argument('--dilation_dim', default=128, type=int)
parser.add_argument('--skip_dim', default=256, type=int)
parser.add_argument('--kernel_size', default=2, type=int)
parser.add_argument('--bias', default=False, type=bool)
parser.add_argument('--loss_update_itr', default=100, type=int)
parser.add_argument('--ckpt_dir', default='ckpt', type=str)
parser.add_argument('--ckpt_name', default='', type=str)
parser.add_argument('--dataset_path', default='dataset.npz', type=str)
return parser.parse_args()
def save_ckpt(ckpt_path, model, optimizer, misc=None):
is_train = model.training
device = next(model.parameters()).device
# eval mode and cpu
model.eval()
model.cpu()
# save checkpoint
torch.save({
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),
'last_epoch': misc['epoch'],
'losses': misc['losses'],
}, ckpt_path)
# recover mode and device
if is_train:
model.train()
model.to(device)
if __name__ == '__main__':
opt = set_option()
os.makedirs(opt.ckpt_dir, exist_ok=True)
# prepare dataset and dataloader
dataset = VCTKAudio(opt.dataset_path, opt.src_len, opt.tgt_len, opt.num_class) # TODO: give parameter accordingly
dataloader = DataLoader(dataset,
batch_size=opt.batch_sz,
shuffle=True,
num_workers=2)
pbar = tqdm(range(opt.epoch * min(opt.max_itr, len(dataloader))))
# prepare model
model = WaveNet(
num_block = opt.num_block,
num_layer = opt.num_layer, # 10,
class_dim = opt.num_class,
residual_dim = opt.residual_dim,
dilation_dim = opt.dilation_dim,
skip_dim = opt.skip_dim,
kernel_size = opt.kernel_size,
bias=opt.bias
)
# prepare optimizer
loss_fn = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=opt.lr)
losses = []
last_epoch = 0
# load from checkpoint
if opt.ckpt_name:
ckpt = torch.load(os.path.join(opt.ckpt_dir, opt.ckpt_name))
model.load_state_dict(ckpt['model_state_dict'])
optimizer.load_state_dict(ckpt['optimizer_state_dict'])
last_epoch = ckpt['last_epoch']
losses = ckpt['losses']
# load model to device
model.train()
model.to(opt.DEVICE)
# train
for e in range(last_epoch, opt.epoch):
accum_loss = 0
for idx, batch in enumerate(dataloader):
src, tgt = batch['src'].to(opt.DEVICE), batch['tgt'].to(opt.DEVICE)
pred = model(src)[:, :, -opt.tgt_len: ]
loss = loss_fn(pred, tgt)
optimizer.zero_grad()
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), opt.clip)
optimizer.step()
accum_loss += loss.item()
pbar.update()
if (idx + 1) % opt.loss_update_itr == 0:
avg_loss = accum_loss / opt.loss_update_itr
pbar.set_description(f"Epoch {round(e + idx / min(opt.max_itr, len(dataloader)), 2)} | Loss: {round(avg_loss, 5)}")
losses.append(avg_loss)
accum_loss = 0
if idx + 1 == opt.max_itr:
break
# save checkpoint
save_ckpt(os.path.join(opt.ckpt_dir, str(e) + '.pt'),
model,
optimizer,
{"epoch": e + 1,
"losses": losses})
| 32.880282 | 131 | 0.618762 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 773 | 0.16556 |
5008bf50f1c889a0cb8f758f821ceff1b2688937 | 1,933 | py | Python | swmmio/version_control/tests/compare_inp.py | jennwuu/swmmio | 6918ecfb69c10333cbc65ce0ab6554f8a04ef8f9 | [
"MIT"
] | 76 | 2016-04-26T14:04:02.000Z | 2022-03-24T10:10:29.000Z | swmmio/version_control/tests/compare_inp.py | Kimi-Monica/swmmio | 54dd6c1f7a3e47db5702b1f703beca0a8945a250 | [
"MIT"
] | 94 | 2016-05-06T15:32:51.000Z | 2022-02-10T08:03:30.000Z | swmmio/version_control/tests/compare_inp.py | Kimi-Monica/swmmio | 54dd6c1f7a3e47db5702b1f703beca0a8945a250 | [
"MIT"
] | 26 | 2016-09-01T22:51:47.000Z | 2022-02-09T09:13:23.000Z | import os
def remove_comments_and_crlf(inp_path, comment_string=';', overwrite=False):
tmpfilename = os.path.splitext(os.path.basename(inp_path))[0] + '_mod.inp'
tmpfilepath = os.path.join(os.path.dirname(inp_path), tmpfilename)
with open (inp_path) as oldf:
with open(tmpfilepath, 'w') as newf:
for line in oldf:
if ';' in line:
#remove the comments
if line.strip()[0] == comment_string:
#skip the whole line
pass
else:
#write the line to the left of the comment
non_comment_line = line.split(';')[0]
newf.write(non_comment_line + '\n')
elif line == '\n':
pass
else:
newf.write(line)
if overwrite:
os.remove(inp_path)
os.rename(tmpfilepath, inp_path)
def line_by_line(path1, path2, outfile):
"""
given paths to two INP files, return a text file showing where differences
occur in line-by-line fashion. If the order of elements do not match, this
will be recorded as a difference.
ignores any spaces in a file such that lines with more or less white space
having the same non-whitespace will be considered equal.
"""
#outfile =r"P:\06_Tools\v_control\Testing\cleaned\linebyline.txt"
with open(outfile, 'w') as diff_file:
with open (path1) as f1:
with open(path2) as f2:
line1 = next(f1)
line2 = next(f2)
while line1 and line2:
#replace all white space to check only actual content
if line1.replace(" ", "") != line2.replace(" ", ""):
diff_file.write(line1)
line1 = next(f1)
line2 = next(f2)
| 33.327586 | 78 | 0.53492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 592 | 0.30626 |
50098859afc4b632919855885ac4d97901250f2d | 221 | py | Python | codeforces/math数学/1000/1023B两个玩具.py | yofn/pyacm | e573f8fdeea77513711f00c42f128795cbba65a6 | [
"Apache-2.0"
] | null | null | null | codeforces/math数学/1000/1023B两个玩具.py | yofn/pyacm | e573f8fdeea77513711f00c42f128795cbba65a6 | [
"Apache-2.0"
] | null | null | null | codeforces/math数学/1000/1023B两个玩具.py | yofn/pyacm | e573f8fdeea77513711f00c42f128795cbba65a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# https://codeforces.com/problemset/problem/1023/A
def f(ll):
n,k = ll #1e14
f = k//2 + 1
e = min(k-1,n)
return max(0,e-f+1)
l = list(map(int,input().split()))
print(f(l))
| 17 | 50 | 0.552036 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 77 | 0.348416 |
500afac65e9dcc07d02e6f844d28cdea7099e875 | 9,956 | py | Python | nwb/combine_messages.py | bendichter/api-python | 52e97e7642021913ae6505ab63b7cc77d2622d76 | [
"BSD-3-Clause"
] | 32 | 2015-08-21T14:14:44.000Z | 2017-08-31T09:33:14.000Z | nwb/combine_messages.py | bendichter/api-python | 52e97e7642021913ae6505ab63b7cc77d2622d76 | [
"BSD-3-Clause"
] | 24 | 2015-11-18T11:17:04.000Z | 2019-12-31T19:44:18.000Z | nwb/combine_messages.py | bendichter/api-python | 52e97e7642021913ae6505ab63b7cc77d2622d76 | [
"BSD-3-Clause"
] | 18 | 2015-10-07T03:04:41.000Z | 2022-03-11T18:52:20.000Z | import re
import pprint
pp = pprint.PrettyPrinter(indent=4)
from sys import version_info # py3, for checking type of input
def combine_messages(messages):
""" Combines messages that have one or more integers in them, such as
"trial001" "trial002", into a single message like "trial# (#=1-2)".
This is to reduce the number of messages required to be displayed.
Operates by creating the following structure, named "ti" for "template info":
{
't2tn': {} - maps each template (containing "#") to a template number (tn)
'tn2t': [] - list of templates, indexed by the template number
'm2tns': {} - maps each message number (index in messages) to
array of template numbers (tns)
'tn2dm': {} - maps each template number to a dictionary that has as keys the digits
used to make the template, and with value the message number used to make the template
with those digits. i.e.:
{ tn1: {d1: m1, d2: m2}, tn2: {d3: m3, d4: m4}, tn2: { ...}}
where:
tn - template number
d: m - digits used to make template from message number m
'tn2md': {} - maps each template number of a dictionary that has keys the message number
and value the digits used to make the message. These reverse the key-values in 'tn2dm', e.g.:
{ tn1: {m1: d1, m2: d2}, tn2: {m3: d3, m4: d4}, tn2: { ...}}
where:
tn - template number
d: m - digits used to make template from message number m
This array is used to dynamically remove entries in 'tn2dm' as each message in a
template is displayed so that structure always has an accurate list of remaining messages.
'mout': [] - messages to display (output), formed by combining messages
'mfin': [] - set of message numbers "finished" (already included in mout).
}
This function works by first creating everything except mout and mfin, then
going through each message, finding the template numbers that have the most
digits, and using those to make the combined message.
"""
ti = {}
ti['t2tn'] = {}
ti['tn2t'] = []
ti['m2tns'] = {}
ti['tn2dm'] = {}
ti['tn2md'] = {}
# debug_msg = "/acquisition/timeseries/fov_15002_17/data"
# debug_mn = -1
for mn in range(len(messages)):
msg = messages[mn]
if version_info[0] > 2:
assert isinstance(msg, str), "in Python 3, messages must be str (unicode) type"
# if msg.startswith(debug_msg):
# debug_mn = mn
found_nums = re.findall("\d+", msg)
if not found_nums:
# no numbers found, don't process
continue
# remove any duplicates
found_nums = list(set(found_nums))
for digits in found_nums:
pattern = "(?<!\d)%s(?!\d)" % digits # substitute only if digits not surrounded by other digits
template = re.sub(pattern, "#", msg) # make template for this message and digits
if template not in ti['t2tn']:
tn = len(ti['tn2t']) # template number
ti['tn2t'].append(template) # add template to list of templates
ti['t2tn'][template] = tn # add entry to map of template to template number
else:
tn = ti['t2tn'][template]
# save template number (tn) in 'm2tns'
if mn not in ti['m2tns']:
ti['m2tns'][mn] = [tn,]
else:
ti['m2tns'][mn].append(tn)
# save template number, digits and message number in 'tn2dm'
idigits = int(digits)
if tn not in ti['tn2dm']:
ti['tn2dm'][tn] = {idigits: mn}
ti['tn2md'][tn] = {mn: idigits}
else:
if digits in ti['tn2dm'][tn]:
print ("duplicate message found: %s" % msg)
break
ti['tn2dm'][tn][idigits] = mn
ti['tn2md'][tn][mn] = idigits
# done building needed structures. Now generate 'output' (i.e. ti['mfin'] and ti['mout']
ti['mout'] = []
ti['mfin'] = set([])
for mn in range(len(messages)):
# if mn == debug_mn:
# print ("found mn %i '%s'" % (debug_mn, debug_msg))
# import pdb; pdb.set_trace()
if mn in ti['mfin']:
# message has already been displayed (using a template)
continue
if mn not in ti['m2tns']:
# no digits found in this message, just display as is
ti['mout'].append(messages[mn])
ti['mfin'].add(mn)
continue
# this message has at least one pattern. Find template with largest number of other messages
# that have not been displayed yet
# build list of pairs, (a, b); a - template number, b - number of messages in template
tn_nm_pairs = [ (tn, len(ti['tn2dm'][tn])) for tn in ti['m2tns'][mn] ]
# get those pairs that have the largest number of messages
ltn_nm_pairs = largest_pairs(tn_nm_pairs)
# nmax = 0
# for tn in ti['m2tns'][mn]:
# dm = ti['tn2dm'][tn]
# num_messages = len(ti['tn2dm'][tn]) # num messages associated with this template
# if num_messages > nmax:
# max_tn = [tn]
# nmax = num_messages
# elif num_messages == nmax:
# # multiple templates have the same number of messages, will need to select
# # one in a deterministic way
# max_tn.append(tn)
# # if no other messages use pattern, just display as is
# if nmax == 1:
if ltn_nm_pairs[0][1] == 1:
# only one messages uses pattern, just display as is
ti['mout'].append(messages[mn])
ti['mfin'].add(mn)
continue
# if len(max_tn) > 1:
if len(ltn_nm_pairs) == 1:
# only one template found that has maximal number of messages. use it.
max_tn = ltn_nm_pairs[0][0]
else:
# multiple templates have the same maximal number of messages. Select the one
# with the rightmost position of '#' in the template
# build list of pairs, (a,b): a - template number, b - index of '#' in template
tn_ix_pairs = [ (ltn_nm_pairs[i][0], ti['tn2t'][ltn_nm_pairs[i][0]].index('#'))
for i in range(len(ltn_nm_pairs))]
tn_ix_pairs = largest_pairs(tn_ix_pairs)
if len(tn_ix_pairs) > 1:
# should never happen since templates made for the same message cannot have
# the same position for the '#'
sys.exit("found multiple templates with same maximal number of messages and same template")
# use the template found
max_tn = tn_ix_pairs[0][0]
# other messages use this template. Get list message numbers and digits that share this template
s_digits = list(ti['tn2dm'][max_tn].keys()) # shared digits
s_mns = list(ti['tn2dm'][max_tn].values()) # shared message numbers
# update tn2dm to remove messages that will be displayed shortly (in this template)
for mn in s_mns:
for tn in ti['m2tns'][mn]:
idigit = ti['tn2md'][tn][mn]
del ti['tn2dm'][tn][idigit]
# make new message by combining shared digits with template
template = ti['tn2t'][max_tn]
# convert digits from string to int
# i_digits = sorted([int(i) for i in s_digits])
i_digits = sorted(s_digits)
# make string representing ranges of digits
prevn = i_digits[0] # initialize previous number to first
sr = str(prevn) # string of ranges being generated
in_range = False
for i in range(1, len(i_digits)):
newn = i_digits[i]
if newn == prevn + 1:
# in a range
in_range = True
else:
# not in a range. But if was previously save end of previous range
if in_range:
sr = "%s-%i" % (sr, prevn)
in_range = False
# save new number
sr = "%s,%i" % (sr, newn)
prevn = newn
# append final number if in range
if in_range:
sr = "%s-%i" % (sr, newn)
new_message = template + " (#=%s)" % sr
ti['mout'].append(new_message)
# add all messages that share this template to ti['mfin'] so they are not displayed again
ti['mfin'].update(s_mns)
# return list of combined messages
return ti['mout']
def largest_pairs(pairs):
""""Input is a list of two-element tuples, e.g. [(5, 4), (2, 7), ...]
Output is list of those, which have the largest 2nd element, e.g. [(2,7)]"""
largest = -1
for pair in pairs:
a, b = pair
if b > largest:
largest = b
lpairs = [pair]
elif b == largest:
lpairs.append(pair)
return lpairs
def test_combine_messages():
""" tests combine_messages function"""
messages = [
"some prefix trial-none",
"some prefix trial23",
"some prefix trial23/timestamps",
"some prefix trial23 timestamps",
"some prefix trial23\ntimestamps",
"some prefix 32-bits, trial32",
"some prefix 32-bits, trial33",
"some prefix 32-bits, trial34",
"some prefix 32-bits, trial35",
"some prefix trial-11",
"some prefix trial23 and trial23 again",
"some prefix trial27",
"some prefix trial27/timestamps",
"some prefix trial27 timestamps",
"some prefix trial27\ntimestamps",
"some prefix 32-bits, trial27",
"some prefix trial27 and trial27 again"]
cm = combine_messages(messages)
pp.pprint(cm)
if __name__ == '__main__':
test_combine_messages()
| 44.64574 | 108 | 0.569004 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 6,077 | 0.610386 |
500cdcbd8b9588b7badc6c30cf7210ed1d19b9b3 | 639 | py | Python | webapp/app/encryptioncontext/migrations/0001_initial.py | aws-samples/aws-secrets-manager-credential-rotation-without-container-restart | 11ad22e8f1d55bf48af219fecdd4ba208c88dff4 | [
"MIT-0"
] | 3 | 2021-08-10T21:05:32.000Z | 2021-11-08T10:25:57.000Z | webapp/app/encryptioncontext/migrations/0001_initial.py | aws-samples/aws-secrets-manager-credential-rotation-without-container-restart | 11ad22e8f1d55bf48af219fecdd4ba208c88dff4 | [
"MIT-0"
] | null | null | null | webapp/app/encryptioncontext/migrations/0001_initial.py | aws-samples/aws-secrets-manager-credential-rotation-without-container-restart | 11ad22e8f1d55bf48af219fecdd4ba208c88dff4 | [
"MIT-0"
] | 1 | 2021-08-10T21:05:33.000Z | 2021-08-10T21:05:33.000Z | # Generated by Django 3.0.1 on 2020-01-02 22:21
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='CustomerProfile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('account_number', models.CharField(max_length=8)),
('userid', models.CharField(max_length=6)),
('account_encrypted', models.BinaryField(max_length=4096)),
],
),
]
| 26.625 | 114 | 0.588419 | 546 | 0.85446 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.179969 |
500def4cbc4626bb2afe20baf06a6df2008bf456 | 509 | py | Python | spec/python/test_cast_nested.py | DarkShadow44/kaitai_struct_tests | 4bb13cef82965cca66dda2eb2b77cd64e9f70a12 | [
"MIT"
] | 11 | 2018-04-01T03:58:15.000Z | 2021-08-14T09:04:55.000Z | spec/python/test_cast_nested.py | DarkShadow44/kaitai_struct_tests | 4bb13cef82965cca66dda2eb2b77cd64e9f70a12 | [
"MIT"
] | 73 | 2016-07-20T10:27:15.000Z | 2020-12-17T18:56:46.000Z | spec/python/test_cast_nested.py | DarkShadow44/kaitai_struct_tests | 4bb13cef82965cca66dda2eb2b77cd64e9f70a12 | [
"MIT"
] | 37 | 2016-08-15T08:25:56.000Z | 2021-08-28T14:48:46.000Z | # Autogenerated from KST: please remove this line if doing any edits by hand!
import unittest
from cast_nested import CastNested
class TestCastNested(unittest.TestCase):
def test_cast_nested(self):
with CastNested.from_file('src/switch_opcodes.bin') as r:
self.assertEqual(r.opcodes_0_str.value, u"foobar")
self.assertEqual(r.opcodes_0_str_value, u"foobar")
self.assertEqual(r.opcodes_1_int.value, 66)
self.assertEqual(r.opcodes_1_int_value, 66)
| 36.357143 | 77 | 0.719057 | 376 | 0.738703 | 0 | 0 | 0 | 0 | 0 | 0 | 119 | 0.233792 |
500ef1d473debd5ed56d175f4b097ca7c05550eb | 6,390 | py | Python | person/person_processor.py | RuizheYang/deeplearning-proj | f2b0a11cb622a43f55038da577661ba9656e0573 | [
"Apache-2.0"
] | null | null | null | person/person_processor.py | RuizheYang/deeplearning-proj | f2b0a11cb622a43f55038da577661ba9656e0573 | [
"Apache-2.0"
] | null | null | null | person/person_processor.py | RuizheYang/deeplearning-proj | f2b0a11cb622a43f55038da577661ba9656e0573 | [
"Apache-2.0"
] | null | null | null | """
Model Description
@author: Xiayu Li
@contact: xiayu_li@shannonai.com
@version: 0.1
@license: Apache Licence
@file: person_processor.py
@time: 2019/11/14 3:08 PM
"""
import json
import os
from typing import List, Tuple, Dict
from allennlp.predictors.predictor import Predictor
from allennlp.modules.token_embedders.bert_token_embedder import PretrainedBertModel
from extractor.common.base_processor import BaseProcessor, EntityProcessor
from extractor.common.collector_input_output_record import PersonInputRecord
from extractor.person.ner_predictor import PersonNERPredictor
from extractor.person.identity_predictor import PersonIdentityPredcitor
from extractor.person.person_collector import PersonCollector
from extractor.utils.text_process import find_sentence
from extractor.common import CONFIG_PATH
class PersonProcessor(EntityProcessor):
def __init__(self, config_path, cuda_device=0, batch_size=64):
super().__init__(config_path, cuda_device=cuda_device, batch_size=batch_size)
self.ner_engine = PersonNERPredictor(model_path=getattr(self.config.model_path, "person_ner"),
cuda_device=cuda_device,
batch_size=batch_size,
bert_path=self.config.chinese_bert_path)
PretrainedBertModel._cache.clear()
self.identify_engine = PersonIdentityPredcitor(model_path=getattr(self.config.model_path, "person_classifier"),
cuda_device=cuda_device,
batch_size=batch_size)
self.collector = PersonCollector(config_path)
def process_batch_title_content(self, title_content_aritcle_type: List[Tuple[str, str]], meta_data: List[str]=None):
merged_results = []
for i, title_content_single in enumerate(title_content_aritcle_type):
title, content = title_content_single
article_type = meta_data[i]
title_result, content_result = self.process_title_content(title, content, article_type)
title_content_results = []
for result in title_result:
title_content_results.append(PersonInputRecord(PER_NAME=result['name'],
HINT_SENT=result['hint_sent'],
PER_ATTRIBUTE=result['attribute'],
SOURCE_FIELD=result['source_field'],
ARTICLE_TYPE=result['article_type'],
TITLE=title,
CONFIDENCE=result['confidence']))
for result in content_result:
title_content_results.append(PersonInputRecord(PER_NAME=result['name'],
HINT_SENT=result['hint_sent'],
PER_ATTRIBUTE=result['attribute'],
SOURCE_FIELD=result['source_field'],
ARTICLE_TYPE=result['article_type'],
TITLE=title,
CONFIDENCE=result['confidence']))
title_content_results = self.collector.result_collect(title_content_results)
merged_results.append(title_content_results)
return merged_results
if __name__ == "__main__":
person_processor = PersonProcessor(CONFIG_PATH, cuda_device=1, batch_size=64)
ans = person_processor.process_content(content='习近平 在 参加 党 的 十九大 贵州省 代表团 讨论 时 强调 万众一心 开拓进取 把 新时代 中国 特色 社会主义 推向 前进 | 习近平在参加党的十九大贵州省代表团讨论时强调万众一心开拓进取把新时代中国特色社会主义推向前进10月19日,习近平同志参加党的十九大贵州省代表团讨论。新华社记者李涛摄习近平同志19日上午在参加党的十九大贵州省代表团讨论时强调,党的十九大报告进一步指明了党和国家事业的前进方向,是我们党团结带领全国各族人民在新时代坚持和发展中国特色社会主义的政治宣言和行动纲领。要深刻学习领会中国特色社会主义进入新时代的新论断,深刻学习领会我国社会主要矛盾发生变化的新特点,深刻学习领会分两步走全面建设社会主义现代化国家的新目标,深刻学习领会党的建设的新要求,激励全党全国各族人民万众一心,开拓进取,把新时代中国特色社会主义推向前进。贵州省代表团讨论气氛热烈。孙志刚、谌贻琴、余留芬、潘克刚、周建琨、钟晶、杨波、张蜀新、黄俊琼等9位代表分别结合实际,对报告发表了意见,畅谈了认识体会。大家认为,党的十九大报告是一个实事求是、与时俱进,凝心聚力、催人奋进的报告,是一个动员和激励全党为决胜全面建成小康社会,夺取新时代中国特色社会主义伟大胜利,实现中华民族伟大复兴的中国梦不懈奋斗的报告,一致表示拥护这个报告。习近平边听边记,同代表们深入讨论。六盘水市盘州市淤泥乡岩博村党委书记余留芬发言时说,广大农民对党的十九大报告提出土地承包到期后再延长30年的政策十分满意,习近平听了十分高兴,说这是要给广大农民吃个“定心丸”。遵义市播州区枫香镇花茂村党总支书记潘克刚讲到乡村农家乐旅游成为乡亲致富新路,习近平说既要鼓励发展乡村农家乐,也要对乡村旅游作分析和预测,提前制定措施,确保乡村旅游可持续发展。毕节市委书记周建琨讲到把支部建在生产小组上、发展脱贫攻坚讲习所,习近平强调,新时代的农民讲习所是一个创新,党的根基在基层,一定要抓好基层党建,在农村始终坚持党的领导。黔西南州贞丰县龙场镇龙河村卫生室医生钟晶讲到农村医疗保障问题,习近平详细询问现在农民一年交多少医疗保险费、贫困乡村老百姓生产生活条件有没有改善。贵州六盘水市钟山区大湾镇海嘎村党支部第一书记杨波谈了自己连续8年坚持当驻村第一书记、带领乡亲脱贫致富的体会,习近平表示,对在脱贫攻坚一线的基层干部要关心爱护,各方面素质好、条件具备的要提拔使用,同时要鼓励年轻干部到脱贫攻坚一线去历练。习近平还对黔东南州镇远县江古镇中心小学教师黄俊琼说,老少边穷地区的教育培训工作要加大力度,让更多乡村和基层教师受到专业培训。在认真听取代表发言后,习近平表示,很高兴作为贵州省代表团的代表参加讨论。习近平向在座各位代表和贵州全省各族干部群众致以诚挚的问候。习近平指出,5年来,贵州认真贯彻落实党中央决策部署,各方面工作不断有新进展。综合实力显著提升,脱贫攻坚成效显著,生态环境持续改善,改革开放取得重大进展,人民群众获得感不断增强,政治生态持续向好。贵州取得的成绩,是党的十八大以来党和国家事业大踏步前进的一个缩影。这从一个角度说明了党的十八大以来党中央确定的大政方针和工作部署是完全正确的。习近平希望贵州的同志全面贯彻落实党的十九大精神,大力培育和弘扬团结奋进、拼搏创新、苦干实干、后发赶超的精神,守好发展和生态两条底线,创新发展思路,发挥后发优势,决战脱贫攻坚,决胜同步小康,续写新时代贵州发展新篇章,开创百姓富、生态美的多彩贵州新未来。习近平指出,中国特色社会主义进入了新时代,这是我国发展新的历史方位。作出这个重大政治判断,是一项关系全局的战略考量,我们必须按照新时代的要求,完善发展战略和各项政策,推进和落实各项工作。我国社会主要矛盾的变化是关系全局的历史性变化,对党和国家工作提出了许多新要求,我们要深入贯彻新发展理念,着力解决好发展不平衡不充分问题,更好满足人民多方面日益增长的需要,更好推动人的全面发展、全体人民共同富裕。我们要紧密结合党的十九大对我国未来发展作出的战略安排,推进党和国家各项工作,特别是要保持各项战略、工作、政策、措施的连续性和前瞻性,一步接一步,连续不断朝着我们确定的目标前进。习近平强调,办好中国的事情,关键在党。全面从严治党不仅是党长期执政的根本要求,也是实现中华民族伟大复兴的根本保证。我们党要团结带领人民进行伟大斗争、推进伟大事业、实现伟大梦想,必须毫不动摇把党建设得更加坚强有力。全面从严治党永远在路上。在全面从严治党这个问题上,我们不能有差不多了,该松口气、歇歇脚的想法,不能有打好一仗就一劳永逸的想法,不能有初见成效就见好就收的想法。必须持之以恒、善作善成,把管党治党的螺丝拧得更紧,把全面从严治党的思路举措搞得更加科学、更加严密、更加有效,推动全面从严治党向纵深发展。各级党组织和全体党员、各级领导干部必须坚决维护党中央权威,坚决服从党中央集中统一领导,把“四个意识”落实在岗位上、落实在行动上,不折不扣执行党中央决策部署,始终在思想上政治上行动上同党中央保持高度一致。习近平指出,大会之后,要认真组织好党的十九大精神宣传教育工作和学习培训工作,注重宣传各地区各部门学习贯彻的具体举措和实际行动,注重反映基层干部群众学习贯彻的典型事迹和良好风貌。要充分利用各种宣传形式和手段,采取人民群众喜闻乐见的形式,推动党的十九大精神进企业、进农村、进机关、进校园、进社区、进军营,让干部鼓足干劲。要组织好集中宣讲活动,把党的十九大精神讲清楚、讲明白,让老百姓听得懂、能领会、可落实。',
article_type='新闻报道_讲话稿')
print(ans)
| 86.351351 | 2,387 | 0.684507 | 2,987 | 0.27869 | 0 | 0 | 0 | 0 | 0 | 0 | 7,019 | 0.65488 |
500f0349827ded93b1215069df6a905dce9513ac | 1,130 | py | Python | application/__init__.py | ppawlo97/si-summer-2020 | ddbb4e29ba9da9af9aaf658df07f891e36737d10 | [
"MIT"
] | null | null | null | application/__init__.py | ppawlo97/si-summer-2020 | ddbb4e29ba9da9af9aaf658df07f891e36737d10 | [
"MIT"
] | 3 | 2021-05-21T16:19:13.000Z | 2022-02-10T00:50:32.000Z | application/__init__.py | ppawlo97/si-summer-2020 | ddbb4e29ba9da9af9aaf658df07f891e36737d10 | [
"MIT"
] | null | null | null | import logging
logging.basicConfig(level=logging.INFO)
from flask import Flask
from application.config import Config
app = Flask(__name__)
app.config.from_object(Config)
from application.models.classifiers.CNNClassifier import CNNClassifier
from application.models.classifiers.MLPClassifier import MLPClassifier
from application.models.classifiers.NaiveBayesClassifier import NaiveBayesClassifier
from application.models.classifiers.SVMClassifier import SVMClassifier
from application.models.detectors.CasClasDetector import CasClasDetector
from application.models.detectors.MTCNNDetector import MTCNNDetector
from application.utils import get_urls_list
logging.info("Loading models...")
MODELS = {"mtcnn": MTCNNDetector(),
"casclas": CasClasDetector(app.config["PRETRAINED_CASCLAS"]),
"mlp": MLPClassifier(app.config["MLP_WEIGHTS"]),
"svm": SVMClassifier(app.config["SVM"]),
"cnn": CNNClassifier(app.config["CNN_WEIGHTS"]),
"nb": NaiveBayesClassifier(app.config["CATEGORICAL_NB"])}
IMG_URLS = get_urls_list(app.config["OFFLINE_IMG_URLS"])
from application import routes
| 35.3125 | 84 | 0.79115 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 139 | 0.123009 |
5010b73121964b1e2039f7c97a78c65b09242001 | 21,089 | py | Python | CalibTracker/SiStripChannelGain/test/Cosmic_B38/InputFiles_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | CalibTracker/SiStripChannelGain/test/Cosmic_B38/InputFiles_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | CalibTracker/SiStripChannelGain/test/Cosmic_B38/InputFiles_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | 'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/80C4285C-779E-DD11-9889-001617E30CA4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/A83BF5EE-6E9E-DD11-8082-000423D94700.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/8266853E-999E-DD11-8B73-001D09F2432B.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/2AAFE9A9-A19E-DD11-821B-000423D99F3E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/067E98F3-489F-DD11-B309-000423D996B4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/64C1D6F5-489F-DD11-90B7-000423D986A8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0012/3C084F93-679C-DD11-A361-000423D9989E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/9C14E69F-069D-DD11-AC41-001617DBCF1E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/5439B4CA-309D-DD11-84E5-000423D944F8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/D20BB375-AE9D-DD11-BF49-000423D944FC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/E2E8FE03-A69D-DD11-8699-000423D98750.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/B40C29EC-B69D-DD11-A665-000423D6A6F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/843C7874-1F9F-DD11-8E03-000423D98804.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7EB3DF8E-0E9F-DD11-A451-001D09F29146.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CEB001F9-169F-DD11-A5E6-000423D94494.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/382BAEE2-D39E-DD11-A0A4-000423D98EC8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CC5B37A1-A99E-DD11-816F-001617DBD230.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/6EDD168B-2F9F-DD11-ADF5-001617C3B79A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/EE4B4C82-999C-DD11-86EC-000423D99F3E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/1CC8332F-459E-DD11-BFE1-001617C3B65A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7A6A133C-999E-DD11-9155-001D09F2462D.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/F292BE7F-409F-DD11-883A-001617C3B6FE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/B870AA81-409F-DD11-B549-001617C3B78C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/9003F328-899C-DD11-83D7-000423D986C4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/500B13D3-6F9C-DD11-8745-001617DC1F70.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/4CBAEDCC-309D-DD11-A617-001617E30D06.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/AED19458-399D-DD11-B9AC-000423D9A2AE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/A6688D1F-959D-DD11-B5B7-000423D6A6F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/F0076F20-F59E-DD11-8B57-000423D944F0.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/EC6C6EA4-499D-DD11-AC7D-000423D98DB4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/DA099105-639D-DD11-9C3E-001617E30F50.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/2E40EDED-1A9E-DD11-9014-001617DBD5AC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/7647004C-F19D-DD11-8BAA-001617DBD224.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/38881706-5E9E-DD11-B487-000423D98868.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/1098901B-569E-DD11-BE60-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/5E4E7508-919C-DD11-AEB1-000423D9853C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/060DD475-179D-DD11-A003-000423D94908.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/8A563E55-289D-DD11-BA24-000423D6BA18.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/545F9B54-D09D-DD11-A58B-000423D6B5C4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/68795DEE-D79D-DD11-ADB7-000423D98DB4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3AD49E1B-F59E-DD11-81C4-000423D94700.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/548891AB-8C9D-DD11-8989-001617C3B69C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/745CD91D-529D-DD11-8908-000423D6B48C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3EF1CC87-2F9F-DD11-9EFC-001617DF785A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FCB4F2BA-3C9E-DD11-82C7-000423D99160.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/ECC4D018-569E-DD11-80C4-001617C3B6FE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/20C97175-669E-DD11-8ADD-00161757BF42.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/52683098-A99E-DD11-BCD0-000423D94AA8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/F6C17BA7-A19E-DD11-B57C-000423D98634.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/D844B765-519F-DD11-96F9-001617E30D0A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/02EB3FD3-6F9C-DD11-8C35-001617C3B6FE.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/0EB355C8-309D-DD11-85B7-001617C3B64C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/8E478481-BA9E-DD11-9573-000423D6B358.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/A4775BE3-739D-DD11-843D-001617C3B778.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/8E8B21C6-F99D-DD11-BF05-000423D986A8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/0EF20D52-139E-DD11-9473-000423D6B5C4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7C00B404-389F-DD11-AB81-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/AE67CFF1-279F-DD11-B6DC-000423D98804.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7400A101-389F-DD11-B540-000423D60FF6.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/2A630CF2-279F-DD11-942A-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/1CD3DEA6-F59C-DD11-986D-000423D98BC4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/D809EECD-7F9E-DD11-B4D7-00161757BF42.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/64451F65-779E-DD11-869D-001617E30D40.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/BA532E6C-519F-DD11-8DE7-000423D98FBC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/021AEBFE-7A9F-DD11-863E-0019DB29C620.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CA5A90F6-489F-DD11-8F60-000423D6B2D8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/028578E0-809C-DD11-AF7D-001617C3B6E8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0012/08A6038E-679C-DD11-A4B9-001617E30D0A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0012/18BA3290-679C-DD11-B9A1-001617C3B77C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/B0CD45DB-D39E-DD11-BC03-000423D985E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/125FE86B-CB9E-DD11-B054-000423DD2F34.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/6E783236-849D-DD11-A9FF-001617C3B654.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/800FA4BD-5A9D-DD11-ACBB-001617DBD5AC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/760FB963-7C9D-DD11-B812-001D09F231C9.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/52CBD0DE-0A9E-DD11-B583-000423D6B358.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/905B1953-349E-DD11-8022-001D09F2AD7F.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/7A7A6D05-389F-DD11-9D08-000423D98804.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/C223029D-E59C-DD11-A125-001617E30D40.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3293D2A6-4D9E-DD11-81D1-000423D98B5C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4E5AEDFC-5D9E-DD11-BD7D-001617C3B5F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/2A9CA4B8-909E-DD11-857B-001617E30D38.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/9E30F47D-409F-DD11-A947-001617C3B6E8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/745BB069-519F-DD11-A8F9-000423D94700.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/4493CD28-899C-DD11-AF14-000423D6CA02.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/0085D14F-289D-DD11-862E-000423D6006E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/841A2D63-E09D-DD11-BDA5-001617DF785A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/5658C98B-9D9D-DD11-9B46-000423D99F1E.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/3ABEFDFB-169F-DD11-94E3-000423D98BC4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FC20EEFC-059F-DD11-A7CA-001617C3B5F4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/CE7B883A-ED9E-DD11-A737-0019DB29C614.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4261BA6C-CB9E-DD11-AE94-000423D986A8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/9A134C3C-849D-DD11-8A1C-000423D98C20.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FE7A7A73-1F9F-DD11-A841-001617DBD230.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/6097BB22-FE9C-DD11-AA3C-000423D944F0.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/F4AE9DE3-DC9C-DD11-9223-000423D6B42C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/3CA664CA-309D-DD11-A642-000423D951D4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/2C9D3EE0-C79D-DD11-AAF0-000423D94534.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/7E81C76A-BF9D-DD11-9970-001617E30F50.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4EA98C8F-0E9F-DD11-A48E-001D09F253FC.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/0A4BBAF5-C29E-DD11-967D-0016177CA778.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/4253601B-B29E-DD11-9725-001617DBD224.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/E00BC4D5-D39E-DD11-861A-001617C3B5E4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/EA733333-419D-DD11-9B49-000423D99660.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/40A87664-239E-DD11-8ABC-000423D944F8.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/DA448F60-239E-DD11-8347-000423D98DD4.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/0E9D6303-389F-DD11-8C22-001617E30D0A.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/8A40053E-889E-DD11-9442-000423D944F0.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/082ED767-999E-DD11-962C-0019B9F70607.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/205DAE07-0F9D-DD11-9FD4-000423D9890C.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/041B05FD-059F-DD11-871E-001617E30D52.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/FE7823F2-C29E-DD11-81F1-0019DB29C614.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/BC834BD5-2B9E-DD11-A8D9-001617C3B706.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0013/966DFADC-E89D-DD11-A90E-000423D99264.root',
'rfio:///?svcClass=cmscaf&path=/castor/cern.ch/cms/store/data/Commissioning08/Cosmics/ALCARECO/CRAFT_V2P_StreamALCARECOTkAlCosmics0T_v7/0014/B4FD3F7C-409F-DD11-8F2B-001617DBCF90.root'
| 183.382609 | 184 | 0.843283 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 20,862 | 0.989236 |
5011d21366dcb69f240f7e58a464b1188024fc1b | 1,410 | py | Python | mpf/devices/ball_device/ball_device_state_handler.py | Wolfmarsh/mpf | ad71f381ce8a0e65f28958e51cf8a8b38a6154fb | [
"MIT"
] | null | null | null | mpf/devices/ball_device/ball_device_state_handler.py | Wolfmarsh/mpf | ad71f381ce8a0e65f28958e51cf8a8b38a6154fb | [
"MIT"
] | null | null | null | mpf/devices/ball_device/ball_device_state_handler.py | Wolfmarsh/mpf | ad71f381ce8a0e65f28958e51cf8a8b38a6154fb | [
"MIT"
] | null | null | null | """Base class for ball device handlers."""
import asyncio
from mpf.core.utility_functions import Util
class BallDeviceStateHandler:
"""Base class for ball device handler."""
__slots__ = ["ball_device", "machine", "_task"]
def __init__(self, ball_device):
"""Initialise handler.
Args:
ball_device(mpf.devices.ball_device.ball_device.BallDevice): parent ball device
"""
self.ball_device = ball_device
self.machine = ball_device.machine
self._task = None
def stop(self):
"""Stop handler."""
if self._task:
self._task.cancel()
try:
self.machine.clock.loop.run_until_complete(self._task)
except asyncio.CancelledError:
pass
def debug_log(self, *args, **kwargs):
"""Debug log."""
self.ball_device.debug_log(*args, **kwargs)
def info_log(self, *args, **kwargs):
"""Info log."""
self.ball_device.info_log(*args, **kwargs)
def warning_log(self, *args, **kwargs):
"""Warning log."""
self.ball_device.warning_log(*args, **kwargs)
async def initialise(self):
"""Initialise handler."""
self._task = self.machine.clock.loop.create_task(self._run())
self._task.add_done_callback(Util.raise_exceptions)
async def _run(self):
raise NotImplementedError()
| 27.647059 | 91 | 0.611348 | 1,304 | 0.924823 | 0 | 0 | 0 | 0 | 248 | 0.175887 | 346 | 0.24539 |
501249edf828430cf33dfa53f6c8ad7a2568f14d | 500 | py | Python | valtypes/__init__.py | vanburgerberg/constypes | e1a527d21a30782e1e5bfc1005f18791e29fdeb2 | [
"MIT"
] | 2 | 2021-08-30T20:08:49.000Z | 2021-11-28T13:16:54.000Z | valtypes/__init__.py | vanburgerberg/valtypes | e1a527d21a30782e1e5bfc1005f18791e29fdeb2 | [
"MIT"
] | null | null | null | valtypes/__init__.py | vanburgerberg/valtypes | e1a527d21a30782e1e5bfc1005f18791e29fdeb2 | [
"MIT"
] | null | null | null | from .validator import (
And,
Attr,
Chain,
Const,
Contains,
ExcMax,
ExcMin,
Float,
Macro,
Max,
Min,
MultipleOf,
Not,
Or,
Pattern,
Proto,
Type,
Validator,
Xor,
)
__all__ = [
"And",
"Attr",
"Chain",
"Const",
"Validator",
"Contains",
"ExcMax",
"ExcMin",
"Float",
"Macro",
"Max",
"Min",
"MultipleOf",
"Not",
"Or",
"Pattern",
"Proto",
"Type",
"Xor",
]
| 11.363636 | 24 | 0.436 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 134 | 0.268 |
5012f3d859038bf6c7551a9b09f30a8dc7a2a8ae | 7,549 | py | Python | ml_techniques/tests/tests.py | tgquintela/ml_techniques_in_python | 242ff12b32edb313c983394f3153a8b7eb8ce183 | [
"MIT"
] | null | null | null | ml_techniques/tests/tests.py | tgquintela/ml_techniques_in_python | 242ff12b32edb313c983394f3153a8b7eb8ce183 | [
"MIT"
] | null | null | null | ml_techniques/tests/tests.py | tgquintela/ml_techniques_in_python | 242ff12b32edb313c983394f3153a8b7eb8ce183 | [
"MIT"
] | null | null | null | import numpy as np
import unittest
from itertools import product
from ml_techniques.svm import *
class PermutationDataTest(unittest.TestCase):
def testpropershape(self):
data = np.random.random((10, 4))
labels = np.random.randint(0, 2, 10)*2-1
data_per = permut_data(data)
self.assertEqual(data_per.shape, data.shape)
data_per, labels_per = permut_data(data, labels)
self.assertEqual(data_per.shape, data.shape)
self.assertEqual(labels_per.shape, labels.shape)
class BatchCreatorTest(unittest.TestCase):
def test_run_batch_iterator(self):
data_size = 100
batch_size = 9
for init, endit in batch_size_iter(data_size, batch_size):
self.assertTrue(init != endit)
self.assertTrue(init < endit)
self.assertEqual(endit, data_size)
data_size = 100
batch_size = 10
for init, endit in batch_size_iter(data_size, batch_size):
self.assertTrue(init != endit)
self.assertTrue(init < endit)
self.assertEqual(endit, data_size)
class RegularizationTest(unittest.TestCase):
def assert_regularization(self, reg):
reg.parameters
reg.regularize(np.random.randn(10), 1)
reg.gradient_regularization(np.random.randn(10), 1)
def test_abstractregularization(self):
reg = Regularization.create_regularization('l2', 1.)
self.assert_regularization(reg)
reg = Regularization.create_regularization(reg)
self.assert_regularization(reg)
reg = Regularization.create_regularization(Null_Regularization)
self.assert_regularization(reg)
def test_l2_regularization(self):
reg = L1_Regularization(1.)
self.assert_regularization(reg)
def test_l1_regularization(self):
reg = L1_Regularization(1.)
self.assert_regularization(reg)
class AccuracyFunctionTest(unittest.TestCase):
def test_order_independency(self):
n = 10
n_tests = 20
for i in range(n_tests):
y0 = np.random.randint(0, 2, n)
y1 = np.random.randint(0, 2, n)
reindices = np.random.permutation(n)
self.assertEqual(accuracy(y0, y1),
accuracy(y0[reindices], y1[reindices]))
def test_symetry(self):
n = 10
n_tests = 20
for i in range(n_tests):
y0 = np.random.randint(0, 2, n)
y1 = np.random.randint(0, 2, n)
self.assertEqual(accuracy(y0, y1), accuracy(y1, y0))
class LossFunctionTest(unittest.TestCase):
def _generator_labels(self, n):
return np.random.randint(0, 2, n)*2-1
def test_abstractloss(self):
lossf = LossFunction.create_lossfunction('Hinge')
lossf = LossFunction.create_lossfunction(lossf)
lossf = LossFunction.create_lossfunction(Hinge)
def test_loss(self):
n = 20
y0 = np.random.random(n)*2-1
y1 = self._generator_labels(n)
thresholds = [0, 1, 2]
for thr in thresholds:
lossf = Hinge(thr)
lossf.loss(y0, y1)
def test_gradient(self):
n, n_feats = 20, 10
y0 = np.random.random(n)*2-1
y1 = self._generator_labels(n)
x = np.random.random((n, n_feats))
thresholds = [0, 1, 2]
for thr in thresholds:
lossf = Hinge(thr)
grad_w, grad_w0 = lossf.gradient_loss(y0, y1, x)
self.assertEqual(len(grad_w), n_feats)
class Modeltest(unittest.TestCase):
def setUp(self):
n = 100
self.create_X = lambda n_feats: np.random.random((n, n_feats))
def assert_linearmodel(self, linearmodel):
w, w0 = linearmodel.parameters
if w is not None:
linearmodel.compute(self.create_X(len(w)))
linearmodel.reset_model()
def test_abstractmodel(self):
mod = Model.create_model('svm', np.random.randn(10), 0.)
Model.create_model(mod)
Model.create_model(LinearModel, np.random.randn(10), 0.)
def test_linearmodel(self):
lm = LinearModel(None)
self.assert_linearmodel(lm)
lm = LinearModel(np.random.randn(10), 0.)
self.assert_linearmodel(lm)
lm = LinearModel.weights_initialization(10, 'gauss')
self.assert_linearmodel(lm)
lm = LinearModel.weights_initialization(10, 'zeros')
self.assert_linearmodel(lm)
class SVMTest(unittest.TestCase):
def setUp(self):
loss = ['Hinge', Hinge()]
reg_pars = [0.01, 1., 10.]
batch_size = [10]
n_epochs = [0, 100]
learning_rate = [0.001, 1.]
stop_step = [.00001, 100]
history = [True, False]
verbose = [True, False]
self.var_names = ['loss', 'reg_pars', 'batch_size', 'n_epochs',
'learning_rate', 'stop_step', 'history', 'verbose']
self.possibilities = [loss, reg_pars, batch_size, n_epochs,
learning_rate, stop_step, history, verbose]
def test_initialization(self):
n, n_feats = 100, 20
data = np.random.random((n, n_feats))
labels = np.random.randint(0, 2, n)*2-1
for p in product(*self.possibilities):
solver = SVM(**dict(zip(self.var_names, p)))
## General asserts
self.assertEqual(solver.optimizer, 'SGD')
self.assertEqual(solver.batch_size, p[2])
self.assertEqual(solver.n_epochs, p[3])
self.assertEqual(solver.learning_rate, p[4])
self.assertEqual(solver.stop_step, p[5])
## Special cases
if not p[6]:
self.assertIsNone(solver.train_loss_history)
self.assertIsNone(solver.test_loss_history)
self.assertIsNone(solver.train_accuracy_history)
self.assertIsNone(solver.test_accuracy_history)
## Weights initialization
solver.model = solver.model.weights_initialization(n_feats)
solver._reset_history()
## Batch creation testing
for x_batch, y_batch in solver._batch_generator(data, labels):
self.assertTrue(len(x_batch) >= p[2])
## Computer functions
if p[7]:
# model._initialization_weights(n_feats, init_type='gauss')
solver.compute_epoch_measures(data, labels, None, None)
solver.compute_epoch_measures(data, labels, data, labels)
def test_fitmodel(self):
n, n_feats = 100, 5
data = np.random.random((n, n_feats))
labels = np.random.randint(0, 2, n)*2-1
for p in product(*self.possibilities):
solver = SVM(**dict(zip(self.var_names, p)))
solver.report_results()
solver.n_epochs = 100
solver.fit(data, labels)
solver.fit(data, labels, data, labels)
solver.predict(data)
solver.score(data, labels)
if p[6]:
self.assertEqual(solver.epoch_learned,
len(solver.train_loss_history))
self.assertEqual(solver.epoch_learned,
len(solver.train_accuracy_history))
self.assertEqual(solver.epoch_learned,
len(solver.test_loss_history))
self.assertEqual(solver.epoch_learned,
len(solver.test_accuracy_history))
solver.report_results()
| 33.700893 | 77 | 0.601537 | 7,430 | 0.984236 | 0 | 0 | 0 | 0 | 0 | 0 | 303 | 0.040138 |
501312c476ded79ec6cb9b0965b516133cb44842 | 430 | bzl | Python | deps/deno/repository.bzl | y0psolo/YAD | 0f1f9c5140687345dee591667793d6f8ed6e29e5 | [
"Apache-2.0"
] | 1 | 2021-11-05T09:13:57.000Z | 2021-11-05T09:13:57.000Z | deps/deno/repository.bzl | y0psolo/YAD | 0f1f9c5140687345dee591667793d6f8ed6e29e5 | [
"Apache-2.0"
] | 9 | 2021-12-02T13:25:52.000Z | 2022-01-26T14:24:05.000Z | deps/deno/repository.bzl | y0psolo/YAD | 0f1f9c5140687345dee591667793d6f8ed6e29e5 | [
"Apache-2.0"
] | null | null | null | load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def deno_repository():
# Get Deno archive
http_archive(
name = "deno-amd64",
build_file = "//ext/deno:BUILD",
sha256 = "7b883e3c638d21dd1875f0108819f2f13647b866ff24965135e679c743013f46",
type = "zip",
urls = ["https://github.com/denoland/deno/releases/download/v1.17.3/deno-x86_64-unknown-linux-gnu.zip"],
)
| 35.833333 | 112 | 0.669767 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 273 | 0.634884 |
501610d9ff0b5e179cb5002235857a130ed0cb4e | 8,184 | py | Python | src/oci/ai_vision/models/detected_language.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/ai_vision/models/detected_language.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/ai_vision/models/detected_language.py | LaudateCorpus1/oci-python-sdk | b0d3ce629d5113df4d8b83b7a6502b2c5bfa3015 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class DetectedLanguage(object):
"""
Language detected in a document.
"""
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "ENG"
LANGUAGE_CODE_ENG = "ENG"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "CES"
LANGUAGE_CODE_CES = "CES"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "DAN"
LANGUAGE_CODE_DAN = "DAN"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "NLD"
LANGUAGE_CODE_NLD = "NLD"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "FIN"
LANGUAGE_CODE_FIN = "FIN"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "FRA"
LANGUAGE_CODE_FRA = "FRA"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "DEU"
LANGUAGE_CODE_DEU = "DEU"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "ELL"
LANGUAGE_CODE_ELL = "ELL"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "HUN"
LANGUAGE_CODE_HUN = "HUN"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "ITA"
LANGUAGE_CODE_ITA = "ITA"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "NOR"
LANGUAGE_CODE_NOR = "NOR"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "POL"
LANGUAGE_CODE_POL = "POL"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "POR"
LANGUAGE_CODE_POR = "POR"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "RON"
LANGUAGE_CODE_RON = "RON"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "RUS"
LANGUAGE_CODE_RUS = "RUS"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "SLK"
LANGUAGE_CODE_SLK = "SLK"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "SPA"
LANGUAGE_CODE_SPA = "SPA"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "SWE"
LANGUAGE_CODE_SWE = "SWE"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "TUR"
LANGUAGE_CODE_TUR = "TUR"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "ARA"
LANGUAGE_CODE_ARA = "ARA"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "CHI_SIM"
LANGUAGE_CODE_CHI_SIM = "CHI_SIM"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "HIN"
LANGUAGE_CODE_HIN = "HIN"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "JPN"
LANGUAGE_CODE_JPN = "JPN"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "KOR"
LANGUAGE_CODE_KOR = "KOR"
#: A constant which can be used with the language_code property of a DetectedLanguage.
#: This constant has a value of "OTHERS"
LANGUAGE_CODE_OTHERS = "OTHERS"
def __init__(self, **kwargs):
"""
Initializes a new DetectedLanguage object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param language_code:
The value to assign to the language_code property of this DetectedLanguage.
Allowed values for this property are: "ENG", "CES", "DAN", "NLD", "FIN", "FRA", "DEU", "ELL", "HUN", "ITA", "NOR", "POL", "POR", "RON", "RUS", "SLK", "SPA", "SWE", "TUR", "ARA", "CHI_SIM", "HIN", "JPN", "KOR", "OTHERS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type language_code: str
:param confidence:
The value to assign to the confidence property of this DetectedLanguage.
:type confidence: float
"""
self.swagger_types = {
'language_code': 'str',
'confidence': 'float'
}
self.attribute_map = {
'language_code': 'languageCode',
'confidence': 'confidence'
}
self._language_code = None
self._confidence = None
@property
def language_code(self):
"""
**[Required]** Gets the language_code of this DetectedLanguage.
Language of the document, abbreviated according to ISO 639-2.
Allowed values for this property are: "ENG", "CES", "DAN", "NLD", "FIN", "FRA", "DEU", "ELL", "HUN", "ITA", "NOR", "POL", "POR", "RON", "RUS", "SLK", "SPA", "SWE", "TUR", "ARA", "CHI_SIM", "HIN", "JPN", "KOR", "OTHERS", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The language_code of this DetectedLanguage.
:rtype: str
"""
return self._language_code
@language_code.setter
def language_code(self, language_code):
"""
Sets the language_code of this DetectedLanguage.
Language of the document, abbreviated according to ISO 639-2.
:param language_code: The language_code of this DetectedLanguage.
:type: str
"""
allowed_values = ["ENG", "CES", "DAN", "NLD", "FIN", "FRA", "DEU", "ELL", "HUN", "ITA", "NOR", "POL", "POR", "RON", "RUS", "SLK", "SPA", "SWE", "TUR", "ARA", "CHI_SIM", "HIN", "JPN", "KOR", "OTHERS"]
if not value_allowed_none_or_none_sentinel(language_code, allowed_values):
language_code = 'UNKNOWN_ENUM_VALUE'
self._language_code = language_code
@property
def confidence(self):
"""
**[Required]** Gets the confidence of this DetectedLanguage.
Confidence score between 0 to 1.
:return: The confidence of this DetectedLanguage.
:rtype: float
"""
return self._confidence
@confidence.setter
def confidence(self, confidence):
"""
Sets the confidence of this DetectedLanguage.
Confidence score between 0 to 1.
:param confidence: The confidence of this DetectedLanguage.
:type: float
"""
self._confidence = confidence
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 38.971429 | 253 | 0.669599 | 7,644 | 0.934018 | 0 | 0 | 7,674 | 0.937683 | 0 | 0 | 5,946 | 0.72654 |
501674bfb30dc18bcd6f3983353a1c018cbc6af0 | 1,905 | py | Python | source-code/Word Ladder 127.py | ttungl/Coding-Interview-Challenge | d80c3e15468d50b42ee53fcc73e9326c6c816495 | [
"MIT"
] | null | null | null | source-code/Word Ladder 127.py | ttungl/Coding-Interview-Challenge | d80c3e15468d50b42ee53fcc73e9326c6c816495 | [
"MIT"
] | null | null | null | source-code/Word Ladder 127.py | ttungl/Coding-Interview-Challenge | d80c3e15468d50b42ee53fcc73e9326c6c816495 | [
"MIT"
] | null | null | null | # 127. Word Ladder
# ttungl@gmail.com
# Given two words (beginWord and endWord), and a dictionary's word list, find the length of shortest transformation sequence from beginWord to endWord, such that:
# Only one letter can be changed at a time.
# Each transformed word must exist in the word list. Note that beginWord is not a transformed word.
# For example,
# Given:
# beginWord = "hit"
# endWord = "cog"
# wordList = ["hot","dot","dog","lot","log","cog"]
# As one shortest transformation is "hit" -> "hot" -> "dot" -> "dog" -> "cog",
# return its length 5.
# Note:
# Return 0 if there is no such transformation sequence.
# All words have the same length.
# All words contain only lowercase alphabetic characters.
# You may assume no duplicates in the word list.
# You may assume beginWord and endWord are non-empty and are not the same.
# UPDATE (2017/1/20):
# The wordList parameter had been changed to a list of strings (instead of a set of strings). Please reload the code definition to get the latest changes.
class Solution(object):
def ladderLength(self, beginWord, endWord, wordList):
"""
:type beginWord: str
:type endWord: str
:type wordList: List[str]
:rtype: int
"""
# sol 1:
# BFS iterative
# time O(n*m) space O(n)
# runtime: 804ms
wordList = set(wordList)
queue = collections.deque([(beginWord, 1)])
while queue: # O(n)
word, length = queue.popleft() # O(1)
if word == endWord:
return length
for i in range(len(word)):
for c in string.ascii_lowercase:
next_word = word[:i] + c + word[i+1:]
if next_word in wordList:
wordList.remove(next_word)
queue.append((next_word, length + 1))
return 0
| 32.288136 | 162 | 0.607874 | 862 | 0.452493 | 0 | 0 | 0 | 0 | 0 | 0 | 1,198 | 0.628871 |
5017d91a4f43fb84e8e4e3ab54b9dd80d44b71a3 | 2,530 | py | Python | lexer/groovylexer.py | rhyolight/pygments-groovy | b80282f873d09d5709508078179a41a69a8c3ed4 | [
"BSD-3-Clause"
] | 1 | 2016-05-08T23:59:33.000Z | 2016-05-08T23:59:33.000Z | lexer/groovylexer.py | rhyolight/pygments-groovy | b80282f873d09d5709508078179a41a69a8c3ed4 | [
"BSD-3-Clause"
] | null | null | null | lexer/groovylexer.py | rhyolight/pygments-groovy | b80282f873d09d5709508078179a41a69a8c3ed4 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Lexer for the Groovy programming language
http://groovy.codehaus.org
"""
from pygments.lexer import RegexLexer
class GroovyLexer(RegexLexer):
"""
For `Groovy <http://groovy.codehaus.org/>`_ source code.
"""
name = 'Groovy'
aliases = ['groovy']
filenames = ['*.groovy']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][a-zA-Z0-9_\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_][a-zA-Z0-9_]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment),
(r'/\*.*?\*/', Comment),
(r'@[a-zA-Z_][a-zA-Z0-9_\.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
Keyword),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\"|[^"])*"', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
(r'(\.)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_][a-zA-Z0-9_]*:', Name.Label),
(r'[a-zA-Z_\$][a-zA-Z0-9_]*', Name),
(r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'import': [
(r'[a-zA-Z0-9_.]+\*?', Name.Namespace, '#pop')
],
} | 39.53125 | 86 | 0.462451 | 2,381 | 0.941107 | 0 | 0 | 0 | 0 | 0 | 0 | 1,249 | 0.493676 |
5019020d7b680c6e2d3cae1add3c2247c0136f62 | 10,243 | py | Python | official/nlp/modeling/layers/cls_head.py | e10101/models | 5c3e08b7697f0035b8731607277dc4e47e18317c | [
"Apache-2.0"
] | 1 | 2021-12-03T00:11:14.000Z | 2021-12-03T00:11:14.000Z | official/nlp/modeling/layers/cls_head.py | e10101/models | 5c3e08b7697f0035b8731607277dc4e47e18317c | [
"Apache-2.0"
] | 2 | 2021-03-24T23:09:46.000Z | 2021-03-25T18:42:10.000Z | official/nlp/modeling/layers/cls_head.py | e10101/models | 5c3e08b7697f0035b8731607277dc4e47e18317c | [
"Apache-2.0"
] | 1 | 2021-03-26T02:26:42.000Z | 2021-03-26T02:26:42.000Z | # Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Classification head layer which is common used with sequence encoders."""
import tensorflow as tf
from official.modeling import tf_utils
from official.nlp.modeling.layers import gaussian_process
from official.nlp.modeling.layers import spectral_normalization
class ClassificationHead(tf.keras.layers.Layer):
"""Pooling head for sentence-level classification tasks."""
def __init__(self,
inner_dim,
num_classes,
cls_token_idx=0,
activation="tanh",
dropout_rate=0.0,
initializer="glorot_uniform",
**kwargs):
"""Initializes the `ClassificationHead`.
Args:
inner_dim: The dimensionality of inner projection layer. If 0 or `None`
then only the output projection layer is created.
num_classes: Number of output classes.
cls_token_idx: The index inside the sequence to pool.
activation: Dense layer activation.
dropout_rate: Dropout probability.
initializer: Initializer for dense layer kernels.
**kwargs: Keyword arguments.
"""
super().__init__(**kwargs)
self.dropout_rate = dropout_rate
self.inner_dim = inner_dim
self.num_classes = num_classes
self.activation = tf_utils.get_activation(activation)
self.initializer = tf.keras.initializers.get(initializer)
self.cls_token_idx = cls_token_idx
if self.inner_dim:
self.dense = tf.keras.layers.Dense(
units=self.inner_dim,
activation=self.activation,
kernel_initializer=self.initializer,
name="pooler_dense")
self.dropout = tf.keras.layers.Dropout(rate=self.dropout_rate)
self.out_proj = tf.keras.layers.Dense(
units=num_classes, kernel_initializer=self.initializer, name="logits")
def call(self, features):
if not self.inner_dim:
x = features
else:
x = features[:, self.cls_token_idx, :] # take <CLS> token.
x = self.dense(x)
x = self.dropout(x)
x = self.out_proj(x)
return x
def get_config(self):
config = {
"cls_token_idx": self.cls_token_idx,
"dropout_rate": self.dropout_rate,
"num_classes": self.num_classes,
"inner_dim": self.inner_dim,
"activation": tf.keras.activations.serialize(self.activation),
"initializer": tf.keras.initializers.serialize(self.initializer),
}
config.update(super(ClassificationHead, self).get_config())
return config
@classmethod
def from_config(cls, config, custom_objects=None):
return cls(**config)
@property
def checkpoint_items(self):
return {self.dense.name: self.dense}
class MultiClsHeads(tf.keras.layers.Layer):
"""Pooling heads sharing the same pooling stem."""
def __init__(self,
inner_dim,
cls_list,
cls_token_idx=0,
activation="tanh",
dropout_rate=0.0,
initializer="glorot_uniform",
**kwargs):
"""Initializes the `MultiClsHeads`.
Args:
inner_dim: The dimensionality of inner projection layer. If 0 or `None`
then only the output projection layer is created.
cls_list: a list of pairs of (classification problem name and the numbers
of classes.
cls_token_idx: The index inside the sequence to pool.
activation: Dense layer activation.
dropout_rate: Dropout probability.
initializer: Initializer for dense layer kernels.
**kwargs: Keyword arguments.
"""
super().__init__(**kwargs)
self.dropout_rate = dropout_rate
self.inner_dim = inner_dim
self.cls_list = cls_list
self.activation = tf_utils.get_activation(activation)
self.initializer = tf.keras.initializers.get(initializer)
self.cls_token_idx = cls_token_idx
if self.inner_dim:
self.dense = tf.keras.layers.Dense(
units=inner_dim,
activation=self.activation,
kernel_initializer=self.initializer,
name="pooler_dense")
self.dropout = tf.keras.layers.Dropout(rate=self.dropout_rate)
self.out_projs = []
for name, num_classes in cls_list:
self.out_projs.append(
tf.keras.layers.Dense(
units=num_classes, kernel_initializer=self.initializer,
name=name))
def call(self, features):
if not self.inner_dim:
x = features
else:
x = features[:, self.cls_token_idx, :] # take <CLS> token.
x = self.dense(x)
x = self.dropout(x)
outputs = {}
for proj_layer in self.out_projs:
outputs[proj_layer.name] = proj_layer(x)
return outputs
def get_config(self):
config = {
"dropout_rate": self.dropout_rate,
"cls_token_idx": self.cls_token_idx,
"cls_list": self.cls_list,
"inner_dim": self.inner_dim,
"activation": tf.keras.activations.serialize(self.activation),
"initializer": tf.keras.initializers.serialize(self.initializer),
}
config.update(super().get_config())
return config
@classmethod
def from_config(cls, config, custom_objects=None):
return cls(**config)
@property
def checkpoint_items(self):
items = {self.dense.name: self.dense}
items.update({v.name: v for v in self.out_projs})
return items
class GaussianProcessClassificationHead(ClassificationHead):
"""Gaussian process-based pooling head for sentence classification.
This class implements a classifier head for BERT encoder that is based on the
spectral-normalized neural Gaussian process (SNGP) [1]. SNGP is a simple
method to improve a neural network's uncertainty quantification ability
without sacrificing accuracy or lantency. It applies spectral normalization to
the hidden pooler layer, and then replaces the dense output layer with a
Gaussian process.
[1]: Jeremiah Liu et al. Simple and Principled Uncertainty Estimation with
Deterministic Deep Learning via Distance Awareness.
In _Neural Information Processing Systems_, 2020.
https://arxiv.org/abs/2006.10108
"""
def __init__(self,
inner_dim,
num_classes,
cls_token_idx=0,
activation="tanh",
dropout_rate=0.0,
initializer="glorot_uniform",
use_spec_norm=True,
use_gp_layer=True,
**kwargs):
"""Initializes the `GaussianProcessClassificationHead`.
Args:
inner_dim: The dimensionality of inner projection layer. If 0 or `None`
then only the output projection layer is created.
num_classes: Number of output classes.
cls_token_idx: The index inside the sequence to pool.
activation: Dense layer activation.
dropout_rate: Dropout probability.
initializer: Initializer for dense layer kernels.
use_spec_norm: Whether to apply spectral normalization to pooler layer.
use_gp_layer: Whether to use Gaussian process as the output layer.
**kwargs: Additional keyword arguments.
"""
# Collects spectral normalization and Gaussian process args from kwargs.
self.use_spec_norm = use_spec_norm
self.use_gp_layer = use_gp_layer
self.spec_norm_kwargs = extract_spec_norm_kwargs(kwargs)
self.gp_layer_kwargs = extract_gp_layer_kwargs(kwargs)
super().__init__(
inner_dim=inner_dim,
num_classes=num_classes,
cls_token_idx=cls_token_idx,
activation=activation,
dropout_rate=dropout_rate,
initializer=initializer,
**kwargs)
# Applies spectral normalization to the dense pooler layer.
if self.use_spec_norm and hasattr(self, "dense"):
self.dense = spectral_normalization.SpectralNormalization(
self.dense, inhere_layer_name=True, **self.spec_norm_kwargs)
# Replace Dense output layer with the Gaussian process layer.
if use_gp_layer:
self.out_proj = gaussian_process.RandomFeatureGaussianProcess(
self.num_classes,
kernel_initializer=self.initializer,
name="logits",
**self.gp_layer_kwargs)
def get_config(self):
config = dict(
use_spec_norm=self.use_spec_norm, use_gp_layer=self.use_gp_layer)
config.update(self.spec_norm_kwargs)
config.update(self.gp_layer_kwargs)
config.update(super(GaussianProcessClassificationHead, self).get_config())
return config
def extract_gp_layer_kwargs(kwargs):
"""Extracts Gaussian process layer configs from a given kwarg."""
return dict(
num_inducing=kwargs.pop("num_inducing", 1024),
normalize_input=kwargs.pop("normalize_input", True),
gp_cov_momentum=kwargs.pop("gp_cov_momentum", 0.999),
gp_cov_ridge_penalty=kwargs.pop("gp_cov_ridge_penalty", 1e-6),
scale_random_features=kwargs.pop("scale_random_features", False),
l2_regularization=kwargs.pop("l2_regularization", 0.),
gp_cov_likelihood=kwargs.pop("gp_cov_likelihood", "gaussian"),
return_gp_cov=kwargs.pop("return_gp_cov", True),
return_random_features=kwargs.pop("return_random_features", False),
use_custom_random_features=kwargs.pop("use_custom_random_features", True),
custom_random_features_initializer=kwargs.pop(
"custom_random_features_initializer", "random_normal"),
custom_random_features_activation=kwargs.pop(
"custom_random_features_activation", None))
def extract_spec_norm_kwargs(kwargs):
"""Extracts spectral normalization configs from a given kwarg."""
return dict(
iteration=kwargs.pop("iteration", 1),
norm_multiplier=kwargs.pop("norm_multiplier", .99))
| 35.689895 | 80 | 0.691399 | 8,131 | 0.79381 | 0 | 0 | 412 | 0.040223 | 0 | 0 | 4,097 | 0.39998 |
501a6e4b0542dfab3ece831a797cbcd4d5a9b0c2 | 7,119 | py | Python | oembed/migrations/0001_initial.py | EightMedia/djangoembed | ee325f7375c48405f9c3e7e2c0fa7f5a08fafd48 | [
"MIT"
] | 8 | 2015-02-06T19:18:49.000Z | 2021-01-01T05:46:02.000Z | oembed/migrations/0001_initial.py | EightMedia/djangoembed | ee325f7375c48405f9c3e7e2c0fa7f5a08fafd48 | [
"MIT"
] | null | null | null | oembed/migrations/0001_initial.py | EightMedia/djangoembed | ee325f7375c48405f9c3e7e2c0fa7f5a08fafd48 | [
"MIT"
] | 5 | 2015-03-15T11:41:26.000Z | 2018-03-08T09:45:26.000Z | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.conf import settings
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'StoredOEmbed'
db.create_table('oembed_storedoembed', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('match', self.gf('django.db.models.fields.TextField')()),
('response_json', self.gf('django.db.models.fields.TextField')()),
('resource_type', self.gf('django.db.models.fields.CharField')(max_length=8)),
('date_added', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('date_expires', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('maxwidth', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('maxheight', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('object_id', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='related_storedoembed', null=True, to=orm['contenttypes.ContentType'])),
))
db.send_create_signal('oembed', ['StoredOEmbed'])
# Adding unique constraint on 'StoredOEmbed', fields ['match', 'maxwidth', 'maxheight']
if 'mysql' not in settings.DATABASES['default']['ENGINE']:
db.create_unique('oembed_storedoembed', ['match', 'maxwidth', 'maxheight'])
# Adding model 'StoredProvider'
db.create_table('oembed_storedprovider', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('endpoint_url', self.gf('django.db.models.fields.CharField')(max_length=255)),
('regex', self.gf('django.db.models.fields.CharField')(max_length=255)),
('wildcard_regex', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('resource_type', self.gf('django.db.models.fields.CharField')(max_length=8)),
('active', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('provides', self.gf('django.db.models.fields.BooleanField')(default=False, blank=True)),
('scheme_url', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
))
db.send_create_signal('oembed', ['StoredProvider'])
# Adding model 'AggregateMedia'
db.create_table('oembed_aggregatemedia', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('url', self.gf('django.db.models.fields.TextField')()),
('object_id', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='aggregate_media', null=True, to=orm['contenttypes.ContentType'])),
))
db.send_create_signal('oembed', ['AggregateMedia'])
def backwards(self, orm):
# Deleting model 'StoredOEmbed'
db.delete_table('oembed_storedoembed')
# Removing unique constraint on 'StoredOEmbed', fields ['match', 'maxwidth', 'maxheight']
if 'mysql' not in settings.DATABASES['default']['ENGINE']:
db.delete_unique('oembed_storedoembed', ['match', 'maxwidth', 'maxheight'])
# Deleting model 'StoredProvider'
db.delete_table('oembed_storedprovider')
# Deleting model 'AggregateMedia'
db.delete_table('oembed_aggregatemedia')
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'oembed.aggregatemedia': {
'Meta': {'object_name': 'AggregateMedia'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'aggregate_media'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.TextField', [], {})
},
'oembed.storedoembed': {
'Meta': {'ordering': "('-date_added',)", 'unique_together': "(('match', 'maxwidth', 'maxheight'),)", 'object_name': 'StoredOEmbed'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'related_storedoembed'", 'null': 'True', 'to': "orm['contenttypes.ContentType']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_expires': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'match': ('django.db.models.fields.TextField', [], {}),
'maxheight': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'maxwidth': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'resource_type': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'response_json': ('django.db.models.fields.TextField', [], {})
},
'oembed.storedprovider': {
'Meta': {'ordering': "('endpoint_url', 'resource_type', 'wildcard_regex')", 'object_name': 'StoredProvider'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'endpoint_url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'provides': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'regex': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'resource_type': ('django.db.models.fields.CharField', [], {'max_length': '8'}),
'scheme_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'wildcard_regex': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
}
}
complete_apps = ['oembed']
| 62.447368 | 197 | 0.606827 | 6,958 | 0.977384 | 0 | 0 | 0 | 0 | 0 | 0 | 4,227 | 0.593763 |