blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ca2ae996f994c2921230d0d35c9f69c38a27eabf | 877c8c11143be609c3105d6f29a18f992cca5cfc | /0064_20190415_204400.py | 096608f9dc3ad2ee5fa514fd243a57b9cbb9e5c6 | [] | no_license | IvanaGyro/LeetCode-Answer | 8a0b0c56d7d751002f430f676280f6ee9dba1939 | aab9b4e6fda821763ce54e4f580a39c622971349 | refs/heads/master | 2020-05-01T17:53:15.651545 | 2020-04-18T11:49:20 | 2020-04-18T11:49:20 | 177,611,356 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 627 | py | class Solution:
def minPathSum(self, grid: List[List[int]]) -> int:
if not grid or not grid[0]:
return 0
if len(grid) > 1:
for i in range(1, len(grid)):
grid[i][0] += grid[i - 1][0]
if len(grid[0]) > 1:
for j in range(1, len(grid[0])):
grid[0][j] += grid[0][j - 1]
if len(grid) > 1 and len(grid[0]) > 1:
for i in range(1, len(grid)):
for j in range(1, len(grid[0])):
grid[i][j] += min(grid[i-1][j], grid[i][j-1])
return grid[-1][-1] | [
"iven00000000@gmail.com"
] | iven00000000@gmail.com |
038769006e9dcbff4aa1248ab9f5b7c86a38959a | 5cd04ee165edb98c80fdfab4ca2ceaf3352f3a60 | /cflearn/models/ddr/loss.py | 4ae8915ad1fab6995fceed631a5eb62fe2106b0f | [
"MIT"
] | permissive | adbmd/carefree-learn | f99e620ead71e15d7e91c0a74bb564e05afa8ba5 | 10970de9e9b96673f56104bf410bbd4927e86334 | refs/heads/master | 2022-12-21T07:48:28.780174 | 2020-08-01T02:37:23 | 2020-08-01T02:37:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,539 | py | import torch
import logging
import torch.nn as nn
from typing import *
from cftool.ml import Anneal
from cftool.misc import LoggingMixin
from ...misc.toolkit import tensor_dict_type
from ...modules.auxiliary import MTL
class DDRLoss(nn.Module, LoggingMixin):
def __init__(self,
config: Dict[str, Any],
device: torch.device):
super().__init__()
self._joint_training = config["joint_training"]
self._use_dynamic_dual_loss_weights = config["use_dynamic_weights"]
self._use_anneal, self._anneal_step = config["use_anneal"], config["anneal_step"]
self._median_pressure = config.setdefault("median_pressure", 3.)
self._median_pressure_inv = 1. / self._median_pressure
self.mtl = MTL(16, config["mtl_method"])
self._target_loss_warned = False
self._zero = torch.zeros([1], dtype=torch.float32).to(device)
if self._use_anneal:
anneal_config = config.setdefault("anneal_config", {})
anneal_methods = anneal_config.setdefault("methods", {})
anneal_ratios = anneal_config.setdefault("ratios", {})
anneal_floors = anneal_config.setdefault("floors", {})
anneal_ceilings = anneal_config.setdefault("ceilings", {})
default_anneal_methods = {
"median_anneal": "linear", "main_anneal": "linear",
"monotonous_anneal": "sigmoid", "anchor_anneal": "linear",
"dual_anneal": "sigmoid", "recover_anneal": "sigmoid", "pressure_anneal": "sigmoid"
}
default_anneal_ratios = {
"median_anneal": 0.25, "main_anneal": 0.25,
"monotonous_anneal": 0.2, "anchor_anneal": 0.2,
"dual_anneal": 0.75, "recover_anneal": 0.75, "pressure_anneal": 0.5
}
default_anneal_floors = {
"median_anneal": 1., "main_anneal": 0.,
"monotonous_anneal": 0., "anchor_anneal": 0.,
"dual_anneal": 0., "recover_anneal": 0., "pressure_anneal": 0.
}
default_anneal_ceilings = {
"median_anneal": 2.5, "main_anneal": 0.8,
"monotonous_anneal": 2.5, "anchor_anneal": 2.,
"dual_anneal": 0.1, "recover_anneal": 0.1, "pressure_anneal": 1.,
}
for anneal in default_anneal_methods:
anneal_methods.setdefault(anneal, default_anneal_methods[anneal])
anneal_ratios.setdefault(anneal, default_anneal_ratios[anneal])
anneal_floors.setdefault(anneal, default_anneal_floors[anneal])
anneal_ceilings.setdefault(anneal, default_anneal_ceilings[anneal])
for anneal in default_anneal_methods:
attr = f"_{anneal}"
if anneal_methods[anneal] is None:
setattr(self, attr, None)
else:
setattr(self, attr, Anneal(
anneal_methods[anneal], round(self._anneal_step * anneal_ratios[anneal]),
anneal_floors[anneal], anneal_ceilings[anneal]
))
def forward(self,
predictions: tensor_dict_type,
target: torch.Tensor,
*,
check_monotonous_only: bool = False) -> Tuple[torch.Tensor, tensor_dict_type]:
# anneal
if not self._use_anneal or not self.training or check_monotonous_only:
main_anneal = median_anneal = None
monotonous_anneal = anchor_anneal = None
dual_anneal = recover_anneal = pressure_anneal = None
else:
main_anneal = None if self._main_anneal is None else self._main_anneal.pop()
median_anneal = None if self._median_anneal is None else self._median_anneal.pop()
monotonous_anneal = None if self._monotonous_anneal is None else self._monotonous_anneal.pop()
anchor_anneal = None if self._median_anneal is None else self._anchor_anneal.pop()
dual_anneal = None if self._median_anneal is None else self._dual_anneal.pop()
recover_anneal = None if self._median_anneal is None else self._recover_anneal.pop()
pressure_anneal = None if self._pressure_anneal is None else self._pressure_anneal.pop()
self._last_main_anneal, self._last_pressure_anneal = main_anneal, pressure_anneal
if self._use_anneal and check_monotonous_only:
main_anneal, pressure_anneal = self._last_main_anneal, self._last_pressure_anneal
# median
median = predictions["predictions"]
median_loss = nn.functional.l1_loss(median, target)
if median_anneal is not None:
median_loss = median_loss * median_anneal
# get
anchor_batch, cdf_raw = map(predictions.get, ["anchor_batch", "cdf_raw"])
sampled_anchors, sampled_cdf_raw = map(predictions.get, ["sampled_anchors", "sampled_cdf_raw"])
quantile_batch, median_residual, quantile_residual, quantile_sign = map(
predictions.get, ["quantile_batch", "median_residual", "quantile_residual", "quantile_sign"])
sampled_quantiles, sampled_quantile_residual = map(
predictions.get, ["sampled_quantiles", "sampled_quantile_residual"])
cdf_gradient, quantile_residual_gradient = map(
predictions.get, ["cdf_gradient", "quantile_residual_gradient"])
dual_quantile, quantile_cdf_raw = map(predictions.get, ["dual_quantile", "quantile_cdf_raw"])
dual_cdf, cdf_quantile_residual = map(predictions.get, ["dual_cdf", "cdf_quantile_residual"])
# cdf
fetch_cdf = cdf_raw is not None
cdf_anchor_loss = cdf_monotonous_loss = None
if not fetch_cdf or check_monotonous_only:
cdf_loss = cdf_losses = None
else:
cdf_losses = self._get_cdf_loss(target, cdf_raw, anchor_batch, False)
if main_anneal is not None:
cdf_losses = cdf_losses * main_anneal
cdf_loss = cdf_losses.mean()
if sampled_cdf_raw is not None:
cdf_anchor_loss = self._get_cdf_loss(target, sampled_cdf_raw, sampled_anchors, True)
if anchor_anneal is not None:
cdf_anchor_loss = cdf_anchor_loss * anchor_anneal
# cdf monotonous
if cdf_gradient is not None:
cdf_monotonous_loss = nn.functional.relu(-cdf_gradient).mean()
if anchor_anneal is not None:
cdf_monotonous_loss = cdf_monotonous_loss * monotonous_anneal
# quantile
fetch_quantile = quantile_residual is not None
quantile_anchor_loss = quantile_monotonous_loss = None
if not fetch_quantile or check_monotonous_only:
median_residual_loss = quantile_loss = quantile_losses = None
else:
target_median_residual = target - predictions["median_detach"]
median_residual_loss = self._get_median_residual_loss(
target_median_residual, median_residual, quantile_sign)
if anchor_anneal is not None:
median_residual_loss = median_residual_loss * anchor_anneal
quantile_losses = self._get_quantile_residual_loss(
target_median_residual, quantile_residual, quantile_batch, False)
quantile_loss = quantile_losses.mean() + median_residual_loss
if main_anneal is not None:
quantile_loss = quantile_loss * main_anneal
if sampled_quantile_residual is not None:
quantile_anchor_loss = self._get_quantile_residual_loss(
target_median_residual, sampled_quantile_residual,
sampled_quantiles, True
)
if anchor_anneal is not None:
quantile_anchor_loss = quantile_anchor_loss * anchor_anneal
# median pressure
if not fetch_quantile:
median_pressure_loss = None
else:
median_pressure_loss = self._get_median_pressure_loss(predictions)
if pressure_anneal is not None:
median_pressure_loss = median_pressure_loss * pressure_anneal
# quantile monotonous
quantile_monotonous_losses = []
if quantile_residual_gradient is not None:
quantile_monotonous_losses.append(nn.functional.relu(-quantile_residual_gradient).mean())
if median_residual is not None and quantile_sign is not None:
quantile_monotonous_losses.append(
self._get_median_residual_monotonous_loss(median_residual, quantile_sign))
if quantile_monotonous_losses:
quantile_monotonous_loss = sum(quantile_monotonous_losses)
if anchor_anneal is not None:
quantile_monotonous_loss = quantile_monotonous_loss * monotonous_anneal
# dual
if not self._joint_training or not fetch_cdf or not fetch_quantile or check_monotonous_only:
dual_cdf_loss = dual_quantile_loss = None
cdf_recover_loss = quantile_recover_loss = None
else:
# dual cdf (cdf -> quantile [recover loss] -> cdf [dual loss])
quantile_recover_loss, quantile_recover_losses, quantile_recover_loss_weights = \
self._get_dual_recover_loss(dual_quantile, anchor_batch, cdf_losses)
if quantile_cdf_raw is None:
dual_quantile_loss = None
else:
dual_quantile_losses = self._get_cdf_loss(target, quantile_cdf_raw, anchor_batch, False)
if quantile_recover_losses is None or not self._use_dynamic_dual_loss_weights:
dual_quantile_loss_weights = 1.
else:
quantile_recover_losses_detach = quantile_recover_losses.detach()
dual_quantile_loss_weights = 0.5 * (
quantile_recover_loss_weights + 1 / (1 + 2 * torch.tanh(quantile_recover_losses_detach)))
dual_quantile_loss = (dual_quantile_losses * dual_quantile_loss_weights).mean()
# dual quantile (quantile -> cdf [recover loss] -> quantile [dual loss])
cdf_recover_loss, cdf_recover_losses, cdf_recover_loss_weights = \
self._get_dual_recover_loss(dual_cdf, quantile_batch, quantile_losses)
if cdf_quantile_residual is None:
dual_cdf_loss = None
else:
dual_cdf_losses = self._get_quantile_residual_loss(
target, cdf_quantile_residual, quantile_batch, False)
if cdf_recover_losses is None or not self._use_dynamic_dual_loss_weights:
dual_cdf_loss_weights = 1.
else:
cdf_recover_losses_detach = cdf_recover_losses.detach()
dual_cdf_loss_weights = 0.5 * (
cdf_recover_loss_weights + 1 / (1 + 10 * cdf_recover_losses_detach))
dual_cdf_loss = (dual_cdf_losses * dual_cdf_loss_weights).mean() + median_residual_loss
if dual_anneal is not None:
if dual_cdf_loss is not None:
dual_cdf_loss = dual_cdf_loss * dual_anneal
if dual_quantile_loss is not None:
dual_quantile_loss = dual_quantile_loss * dual_anneal
if recover_anneal is not None:
if cdf_recover_loss is not None:
cdf_recover_loss = cdf_recover_loss * recover_anneal
if quantile_recover_loss is not None:
quantile_recover_loss = quantile_recover_loss * recover_anneal
# combine
if check_monotonous_only:
losses = {}
else:
losses = {"median": median_loss}
if not self._joint_training:
if cdf_anchor_loss is not None:
losses["cdf_anchor"] = cdf_anchor_loss
if quantile_anchor_loss is not None:
losses["quantile_anchor"] = quantile_anchor_loss
else:
if fetch_cdf:
losses["cdf"] = cdf_loss
if cdf_anchor_loss is not None:
losses["cdf_anchor"] = cdf_anchor_loss
if fetch_quantile:
losses["quantile"] = quantile_loss
if quantile_anchor_loss is not None:
losses["quantile_anchor"] = quantile_anchor_loss
if fetch_cdf and fetch_quantile:
losses["quantile_recover"], losses["cdf_recover"] = quantile_recover_loss, cdf_recover_loss
losses["dual_quantile"], losses["dual_cdf"] = dual_quantile_loss, dual_cdf_loss
if median_residual_loss is not None:
losses["median_residual_loss"] = median_residual_loss
if median_pressure_loss is not None:
key = "synthetic_median_pressure_loss" if check_monotonous_only else "median_pressure_loss"
losses[key] = median_pressure_loss
if cdf_monotonous_loss is not None:
key = "synthetic_cdf_monotonous" if check_monotonous_only else "cdf_monotonous"
losses[key] = cdf_monotonous_loss
if quantile_monotonous_loss is not None:
key = "synthetic_quantile_monotonous" if check_monotonous_only else "quantile_monotonous"
losses[key] = quantile_monotonous_loss
if not losses:
return self._zero, {"loss": self._zero}
if not self.mtl.registered:
self.mtl.register(losses.keys())
return self.mtl(losses), losses
def _get_dual_recover_loss(self, dual_prediction, another_input_batch, another_losses):
if dual_prediction is None:
recover_loss = recover_losses = recover_loss_weights = None
else:
recover_losses = torch.abs(another_input_batch - dual_prediction)
if not self._use_dynamic_dual_loss_weights:
recover_loss_weights = 1.
else:
another_losses_detach = another_losses.detach()
recover_loss_weights = 1 / (1 + 2 * torch.tanh(another_losses_detach))
recover_loss = (recover_losses * recover_loss_weights).mean()
return recover_loss, recover_losses, recover_loss_weights
@staticmethod
def _get_cdf_loss(target, cdf_raw, anchor_batch, reduce):
indicative = (target <= anchor_batch).to(torch.float32)
cdf_losses = -indicative * cdf_raw + nn.functional.softplus(cdf_raw)
return cdf_losses if not reduce else cdf_losses.mean()
@staticmethod
def _get_median_residual_monotonous_loss(median_residual, quantile_sign):
return nn.functional.relu(-median_residual * quantile_sign).mean()
@staticmethod
def _get_quantile_residual_loss(target_residual, quantile_residual, quantile_batch, reduce):
quantile_error = target_residual - quantile_residual
quantile_losses = torch.max(quantile_batch * quantile_error, (quantile_batch - 1) * quantile_error)
return quantile_losses if not reduce else quantile_losses.mean()
def _get_median_residual_loss(self, target_median_residual, median_residual, quantile_sign):
same_sign_mask = quantile_sign * torch.sign(target_median_residual) > 0
tmr, mr = map(lambda tensor: tensor[same_sign_mask], [target_median_residual, median_residual])
median_residual_mae = self._median_pressure * torch.abs(tmr - mr).mean()
residual_monotonous_loss = DDRLoss._get_median_residual_monotonous_loss(median_residual, quantile_sign)
return median_residual_mae + residual_monotonous_loss
def _get_median_pressure_loss(self, predictions):
pressure_pos_dict, pressure_neg_dict = map(
predictions.get, map(lambda attr: f"pressure_sub_quantile_{attr}_dict", ["pos", "neg"]))
additive_pos, additive_neg = pressure_pos_dict["add"], pressure_neg_dict["add"]
multiply_pos, multiply_neg = pressure_pos_dict["mul"], pressure_neg_dict["mul"]
# additive net & multiply net are tend to be zero here
# because median pressure batch receives 0.5 as input
return sum(
torch.max(
-self._median_pressure * sub_quantile,
self._median_pressure_inv * sub_quantile
).mean()
for sub_quantile in [
additive_pos, -additive_neg,
multiply_pos, multiply_neg
]
)
__all__ = ["DDRLoss"]
| [
"syameimaru_kurumi@pku.edu.cn"
] | syameimaru_kurumi@pku.edu.cn |
beaa4c42310beb20c73ad1cf96be7aa287176ab4 | b1d90fa399c2f4cb1f5eba0846d60d72044fc4b9 | /wASmaster/configure_was/configure_servers_and_cluster.py | c1bbfa285f3c144a3c45f9b43aa241d97a38b4e4 | [] | no_license | igmatovina/webSphere-automatization | a497b0ec70b1bee833082c58410ed4409e1ae84b | c4ec7fdba9d57ce176b7186dfd6697c95ebd6214 | refs/heads/main | 2023-01-02T00:34:02.523424 | 2020-10-19T11:10:29 | 2020-10-19T11:10:29 | 305,355,822 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,126 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# exit()
from java.lang import System as jvm
import sys
sys.modules['AdminConfig'] = AdminConfig
sys.modules['AdminControl'] = AdminControl
sys.modules['AdminApp'] = AdminApp
sys.modules['AdminTask'] = AdminTask
sys.modules['Help'] = Help
import myfunctions as fl
import xml.etree.ElementTree as ET
try:
tree = ET.parse('config/new_cluster.xml')
root = tree.getroot()
scopes = root.findall('.//scope')
except:
e = sys.exc_info()
sys.exit(e)
def createserver(node, server_name):
AdminTask.createApplicationServer(node, ['-name', server_name])
def converttocluster(server_scope, cluster_name):
AdminConfig.convertToCluster(server_scope, cluster_name)
def createclustermember(cluster, node, server_name):
AdminConfig.createClusterMember(cluster, node, [['memberName',
server_name]])
for scope in scopes:
scope_attributes = scope.attrib
cluster_scope_type = fl.getScopeType(scope_attributes)
cluster_name = scope.attrib['Cluster']
print cluster_name
members = scope.findall('.//member')
for member in members:
member_attributes = member.attrib
node_name = member.attrib['Node']
server_name = member.attrib['Server']
scope_type = fl.getScopeType(member_attributes)
if AdminClusterManagement.checkIfClusterExists(cluster_name) \
== 'false':
createserver(node_name, server_name)
server_id = fl.getScopeId(scope_type, member_attributes)
converttocluster(server_id, cluster_name)
elif AdminClusterManagement.checkIfClusterMemberExists(cluster_name,
server_name) == 'false':
clusterid = fl.getScopeId(cluster_scope_type,
scope_attributes)
node_id = fl.getNodeId(node_name)
createclustermember(clusterid, node_id, server_name)
else:
print ''
print 'Cluster ' + cluster_name + ' and cluster member ' \
+ server_name + ' already exist'
print ''
AdminConfig.save()
| [
"noreply@github.com"
] | noreply@github.com |
b9950dd4f6bb688de78a9a92c88f0ae70755ed6e | 8f6a9ff4c63fd24d145088077d5da1c3e4caaa3a | /programming trade/easyhistory - download 备份修改/easyhistroy/history.py | ea5e4c058c768ff89e5b70d20e111adb96f0d2fc | [] | no_license | liaofuwei/pythoncoding | 6fd2afba0d27c4a4bbb4b2d321b3fa402a60d6fe | 966bd99459be933cf48287412a40e0c7a3d0b8e5 | refs/heads/master | 2021-07-15T10:34:57.701528 | 2017-10-10T05:27:13 | 2017-10-10T05:27:13 | 107,651,470 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,621 | py | # coding:utf-8
import os
import pandas as pd
import talib
class Indicator(object):
def __init__(self, stock_code, history):
self.stock_code = stock_code
self.history = history
def load_csv_files(self, path):
file_list = [f for f in os.listdir(path) if f.endswith('.csv')]
for stock_csv in file_list:
csv_ext_index_start = -4
stock_code = stock_csv[:csv_ext_index_start]
self.market[stock_code] = pd.read_csv(stock_csv, index_col='date')
def __getattr__(self, item):
def talib_func(*args, **kwargs):
str_args = ''.join(map(str, args))
if self.history.get(item + str_args) is not None:
return self.history
func = getattr(talib, item)
res_arr = func(self.history['close'].values, *args, **kwargs)
self.history[item + str_args] = res_arr
return self.history
return talib_func
class History(object):
def __init__(self, dtype='D', path='history'):
self.market = dict()
data_path = os.path.join(path, 'day', 'data')
self.load_csv_files(data_path)
def load_csv_files(self, path):
file_list = [f for f in os.listdir(path) if f.endswith('.csv')]
for stock_csv in file_list:
csv_ext_index_start = -4
stock_code = stock_csv[:csv_ext_index_start]
csv_path = os.path.join(path, stock_csv)
self.market[stock_code] = Indicator(stock_code, pd.read_csv(csv_path, index_col='date'))
def __getitem__(self, item):
return self.market[item]
| [
"459193023@qq.com"
] | 459193023@qq.com |
5957fddb7c3f6e6aa3a69b0ba94279abc367d105 | d09b14a13e05adcd3d0f1714384b3ab65be4aa7c | /controller/UserRoleManagementDialog.py | 88b9e2a569b3cea650f01750526b2b90f2bedf97 | [] | no_license | ankhbold/lm2 | bd61a353b95d6d8e351cf4b0af48b1b936db8b9f | 30dfbeced57f123d39a69cb4d643a15429b8bfde | refs/heads/master | 2021-07-24T20:57:16.534659 | 2017-11-03T16:33:43 | 2017-11-03T16:33:43 | 90,375,636 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64,172 | py | __author__ = 'ankhaa'
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from sqlalchemy import exc, or_
from sqlalchemy.exc import DatabaseError, SQLAlchemyError
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy import func, or_, and_, desc,extract
from inspect import currentframe
from ..view.Ui_UserRoleManagementDialog import *
from ..model.SetRole import *
from ..model.AuLevel1 import *
from ..model.AuLevel2 import *
from ..model.LM2Exception import LM2Exception
from ..model.DialogInspector import DialogInspector
from ..model.ClPositionType import *
from ..model.ClGroupRole import *
from ..model.SetPositionGroupRole import *
from ..model.SetUserPosition import *
from ..model.SetUserGroupRole import *
from ..utils.PluginUtils import *
from ..controller.UserRoleManagementDetialDialog import *
from uuid import getnode as get_mac
import commands
import datetime
import socket
import sys
import struct
INTERFACE_NAME = "eth0"
class UserRoleManagementDialog(QDialog, Ui_UserRoleManagementDialog):
GROUP_SEPARATOR = '-----'
PW_PLACEHOLDER = '0123456789'
def __init__(self, has_privilege , user, parent=None):
super(UserRoleManagementDialog, self).__init__(parent)
self.setupUi(self)
self.db_session = SessionHandler().session_instance()
self.has_privilege = has_privilege
self.__username = user
self.__privilage()
self.__setup_combo_boxes()
self.__populate_user_role_lwidget()
self.__populate_group_lwidget()
self.__populate_au_level1_cbox()
self.close_button.clicked.connect(self.reject)
# permit only alphanumeric characters for the username
reg_ex = QRegExp(u"[a-z]{4}[0-9]{6}")
validator = QRegExpValidator(reg_ex, None)
reg_ex = QRegExp(u"[a-z_0-9]+")
validator_pass = QRegExpValidator(reg_ex, None)
database = QSettings().value(SettingsConstants.DATABASE_NAME)
self.username_edit.setText('user'+ database[-4:])
self.username_edit.setValidator(validator)
self.password_edit.setValidator(validator_pass)
self.retype_password_edit.setValidator(validator_pass)
self.__setup_validators()
self.selected_user = None
# self.mac_address = self.get_mac_address()
# self.mac_address_edit.setText(self.mac_address)
self.__setup_twidget()
self.__load_default_ritht_grud()
def __setup_twidget(self):
self.user_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.user_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.user_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.user_twidget.setSortingEnabled(True)
self.position_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.position_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.position_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.position_twidget.setSortingEnabled(True)
self.settings_position_twidget.setAlternatingRowColors(True)
self.settings_position_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.settings_position_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.settings_right_grud_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.settings_right_grud_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.settings_right_grud_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.settings_right_grud_twidget.setSortingEnabled(True)
self.settings_right_grud_twidget.setColumnWidth(0, 170)
self.settings_right_grud_twidget.setColumnWidth(1, 170)
self.settings_right_grud_twidget.setColumnWidth(2, 45)
self.settings_right_grud_twidget.setColumnWidth(3, 45)
self.settings_right_grud_twidget.setColumnWidth(4, 45)
self.settings_right_grud_twidget.setColumnWidth(5, 45)
self.right_grud_twidget.setSelectionMode(QAbstractItemView.SingleSelection)
self.right_grud_twidget.setSelectionBehavior(QAbstractItemView.SelectRows)
self.right_grud_twidget.setEditTriggers(QAbstractItemView.NoEditTriggers)
self.right_grud_twidget.setSortingEnabled(True)
self.right_grud_twidget.setColumnWidth(0, 170)
self.right_grud_twidget.setColumnWidth(1, 45)
self.right_grud_twidget.setColumnWidth(2, 45)
self.right_grud_twidget.setColumnWidth(3, 45)
self.right_grud_twidget.setColumnWidth(4, 45)
@pyqtSlot(int)
def on_get_mac_checkbox_stateChanged(self, state):
if state == Qt.Checked:
self.mac_address = self.get_mac_address()
self.mac_address_edit.setText(self.mac_address)
else:
self.mac_address_edit.clear()
def __setup_validators(self):
self.mac_validator = QRegExpValidator(
QRegExp("[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}:[a-zA-Z0-9]{2}"),
None)
self.mac_address_edit.setValidator(self.mac_validator)
def get_mac_address(self):
if sys.platform == 'win32':
for line in os.popen("ipconfig /all"):
if line.lstrip().startswith('Physical Address'):
mac = line.split(':')[1].strip().replace('-', ':')
if len(mac) == 17:
mac = line.split(':')[1].strip().replace('-', ':')
break
else:
for line in os.popen("/sbin/ifconfig"):
if line.find('Ether') > -1:
mac = line.split()[4]
if len(mac) == 17:
mac = line.split(':')[1].strip().replace('-', ':')
break
return mac
def get_macaddress(self, host):
""" Returns the MAC address of a network host, requires >= WIN2K. """
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/347812
import ctypes
import socket
import struct
# Check for api availability
try:
SendARP = ctypes.windll.Iphlpapi.SendARP
except:
raise NotImplementedError('Usage only on Windows 2000 and above')
# Doesn't work with loopbacks, but let's try and help.
if host == '127.0.0.1' or host.lower() == 'localhost':
host = socket.gethostname()
# gethostbyname blocks, so use it wisely.
try:
inetaddr = ctypes.windll.wsock32.inet_addr(host)
if inetaddr in (0, -1):
raise Exception
except:
hostip = socket.gethostbyname(host)
inetaddr = ctypes.windll.wsock32.inet_addr(hostip)
buffer = ctypes.c_buffer(6)
addlen = ctypes.c_ulong(ctypes.sizeof(buffer))
if SendARP(inetaddr, 0, ctypes.byref(buffer), ctypes.byref(addlen)) != 0:
raise WindowsError('Retreival of mac address(%s) - failed' % host)
# Convert binary data into a string.
macaddr = ''
for intval in struct.unpack('BBBBBB', buffer):
if intval > 15:
replacestr = '0x'
else:
replacestr = 'x'
if macaddr != '':
macaddr = ':'.join([macaddr, hex(intval).replace(replacestr, '')])
else:
macaddr = ''.join([macaddr, hex(intval).replace(replacestr, '')])
return macaddr.upper()
def __privilage(self):
if not self.has_privilege:
self.groupBox_2.setEnabled(False)
self.add_button.setEnabled(False)
self.delete_button.setEnabled(False)
self.username_edit.setEnabled(False)
self.phone_edit.setEnabled(False)
self.surname_edit.setEnabled(False)
self.firstname_edit.setEnabled(False)
self.email_edit.setEnabled(False)
self.position_cbox.setEnabled(False)
self.mac_address_edit.setEnabled(False)
self.groupBox_3.setEnabled(False)
def __setup_combo_boxes(self):
try:
positions = self.db_session.query(ClPositionType).all()
for position in positions:
self.position_cbox.addItem(position.description, position.code)
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("Query Error"), self.tr("Error in line {0}: {1}").format(currentframe().f_lineno, e.message))
return
# def set_username(self, username):
#
# self.__username = username
def __populate_user_role_lwidget(self):
self.user_role_lwidget.clear()
if self.has_privilege:
users = self.db_session.query(SetRole.user_name).order_by(SetRole.user_name).group_by(SetRole.user_name)
else:
users = self.db_session.query(SetRole.user_name).filter(SetRole.user_name == self.__username).group_by(SetRole.user_name).all()
try:
for user in users:
item = QListWidgetItem(QIcon(":/plugins/lm2/person.png"), user.user_name)
# if user.user_name == self.__logged_on_user():
item.setForeground(Qt.blue)
# if self.__is_db_role(user.user_name):
self.user_role_lwidget.addItem(item)
except (DatabaseError, SQLAlchemyError), e:
PluginUtils.show_error(self, self.tr("Database Error"), e.message)
def __is_db_role(self, user_name):
try:
sql = "SELECT count(*) FROM pg_roles WHERE rolname = '{0}' and rolcanlogin = true".format(user_name)
count = self.db_session.execute(sql).fetchone()
return True if count[0] == 1 else False
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
def __populate_group_lwidget(self):
self.group_lwidget.clear()
self.member_lwidget.clear()
QListWidgetItem("land_office_administration", self.group_lwidget)
QListWidgetItem("db_creation", self.group_lwidget)
QListWidgetItem("role_management", self.group_lwidget)
QListWidgetItem(self.GROUP_SEPARATOR, self.group_lwidget)
QListWidgetItem("application_view", self.group_lwidget)
QListWidgetItem("application_update", self.group_lwidget)
QListWidgetItem("cadastre_view", self.group_lwidget)
QListWidgetItem("cadastre_update", self.group_lwidget)
QListWidgetItem("contracting_view", self.group_lwidget)
QListWidgetItem("contracting_update", self.group_lwidget)
QListWidgetItem("reporting", self.group_lwidget)
QListWidgetItem("log_view", self.member_lwidget)
def __populate_au_level1_cbox(self):
try:
PluginUtils.populate_au_level1_cbox(self.aimag_cbox, True, False, False)
except DatabaseError, e:
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
@pyqtSlot()
def on_aimag_lwidget_itemSelectionChanged(self):
try:
self.soum_cbox.clear()
self.soum_cbox.addItem("*", "*")
if self.aimag_lwidget.currentItem() is None:
return
# if self.aimag_lwidget.count() > 1:
# return
au_level1_code = self.aimag_lwidget.currentItem().data(Qt.UserRole)
PluginUtils.populate_au_level2_cbox(self.soum_cbox, au_level1_code, True, False, False)
except DatabaseError, e:
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
@pyqtSlot()
def on_user_role_lwidget_itemSelectionChanged(self):
self.selected_user = self.user_role_lwidget.currentItem().text()
user_name = self.user_role_lwidget.currentItem().text()
try:
user_c = self.db_session.query(SetRole). \
filter(SetRole.user_name == user_name).count()
if user_c == 1:
user = self.db_session.query(SetRole). \
filter(SetRole.user_name == user_name).one()
else:
user = self.db_session.query(SetRole).\
filter(SetRole.user_name == user_name).\
filter(SetRole.is_active == True).one()
except NoResultFound:
return
self.username_real_lbl.setText(user.user_name_real)
self.username_edit.setText(user.user_name)
self.surname_edit.setText(user.surname)
self.firstname_edit.setText(user.first_name)
self.email_edit.setText(user.email)
self.position_cbox.setCurrentIndex(self.position_cbox.findData(user.position))
# self.position_edit.setText(user.position)
self.phone_edit.setText(user.phone)
self.mac_address_edit.setText(user.mac_addresses)
self.password_edit.setText(self.PW_PLACEHOLDER)
self.retype_password_edit.setText(self.PW_PLACEHOLDER)
self.register_edit.setText(user.user_register)
# populate groups
self.__populate_group_lwidget()
groups = self.__groupsByUser(user_name)
for group in groups:
group_name = group[0]
items = self.group_lwidget.findItems(group_name, Qt.MatchExactly)
if len(items) > 0:
item = items[0]
self.member_lwidget.addItem(item.text())
self.group_lwidget.takeItem(self.group_lwidget.row(item))
# populate admin units
self.aimag_lwidget.clear()
self.soum_lwidget.clear()
restriction_au_level1 = user.restriction_au_level1
aimag_codes = restriction_au_level1.split(',')
try:
if len(aimag_codes) == self.db_session.query(AuLevel1).count(): # all Aimags
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.aimag_lwidget.addItem(item)
self.soum_lwidget.addItem(item)
else:
for code in aimag_codes:
code = code.strip()
aimag = self.db_session.query(AuLevel1).filter(AuLevel1.code == code).one()
item = QListWidgetItem(aimag.name)
item.setData(Qt.UserRole, aimag.code)
self.aimag_lwidget.addItem(item)
restriction_au_level2 = user.restriction_au_level2
soum_codes = restriction_au_level2.split(',')
# Find districts among the Aimags:
l1_district_entries = filter(lambda x: x.startswith('1') or x.startswith('01'), aimag_codes)
l2_district_entries = filter(lambda x: x.startswith('1') or x.startswith('01'), soum_codes)
true_aimags = filter(lambda x: not x.startswith('1') and not x.startswith('01'), aimag_codes)
if len(aimag_codes)-len(l1_district_entries) == 1 and \
len(soum_codes)-len(l2_district_entries) == self.db_session.query(AuLevel2)\
.filter(AuLevel2.code.startswith(true_aimags[0]))\
.count():
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.soum_lwidget.addItem(item)
else:
for code in soum_codes:
code = code.strip()
soum = self.db_session.query(AuLevel2).filter(AuLevel2.code == code).one()
item = QListWidgetItem(soum.name+'_'+soum.code)
item.setData(Qt.UserRole, soum.code)
self.soum_lwidget.addItem(item)
except NoResultFound:
pass
def reject(self):
SessionHandler().destroy_session()
QDialog.reject(self)
@pyqtSlot()
def on_add_button_clicked(self):
try:
if self.__add_or_update_role():
PluginUtils.show_message(self, self.tr("User Role Management"), self.tr('New user created.'))
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
@pyqtSlot()
def on_update_button_clicked(self):
try:
if self.__add_or_update_role('UPDATE'):
PluginUtils.show_message(self, self.tr("User Role Management"), self.tr('User information updated.'))
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
def __add_or_update_role(self, mode='ADD'):
if not self.__validate_user_input(mode):
return False
user_name = self.username_edit.text().strip()
surname = self.surname_edit.text().strip()
first_name = self.firstname_edit.text().strip()
user_register = self.register_edit.text().strip()
phone = self.phone_edit.text().strip()
# position = self.position_edit.text().strip()
position = self.position_cbox.itemData(self.position_cbox.currentIndex())
mac_addresses = self.mac_address_edit.text().strip()
password = self.password_edit.text().strip()
email = ''
if self.email_edit.text():
email = self.email_edit.text().strip()
if self.has_privilege:
try:
self.db_session.execute("SET ROLE role_management")
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"),
self.tr("You must login different username with member of role management"))
return
if mode == 'ADD':
sql = "SELECT count(*) FROM pg_roles WHERE rolname = '{0}' and rolcanlogin = true".format(user_name)
count = self.db_session.execute(sql).fetchone()
if count[0] == 0:
self.db_session.execute(u"CREATE ROLE {0} login PASSWORD '{1}'".format(user_name, password))
else:
message_box = QMessageBox()
message_box.setText(self.tr("Could not execute: {0} already exists. Do you want to connect selected soums?").format(user_name))
yes_button = message_box.addButton(self.tr("Yes"), QMessageBox.ActionRole)
message_box.addButton(self.tr("Cancel"), QMessageBox.ActionRole)
message_box.exec_()
if not message_box.clickedButton() == yes_button:
return
else:
if password != self.PW_PLACEHOLDER:
self.db_session.execute(u"ALTER ROLE {0} PASSWORD '{1}'".format(user_name, password))
groups = self.__groupsByUser(user_name)
for group in groups:
self.db_session.execute(u"REVOKE {0} FROM {1}".format(group[0], user_name))
for index in range(self.member_lwidget.count()):
item = self.member_lwidget.item(index)
sql = "SELECT count(*) FROM pg_roles WHERE rolname = '{0}' and rolcanlogin = true".format(user_name)
count = self.db_session.execute(sql).fetchone()
if count[0] == 0:
self.db_session.execute(u"CREATE ROLE {0} login PASSWORD '{1}'".format(user_name, password))
self.db_session.execute(u"GRANT {0} TO {1}".format(item.text(), user_name))
self.db_session.execute("RESET ROLE")
restriction_au_level1 = ''
restriction_au_level2 = ''
is_first = 0
for index in range(self.aimag_lwidget.count()):
item = self.aimag_lwidget.item(index)
if item.text() == '*': # all Aimags
for index2 in range(self.aimag_cbox.count()):
au_level1_code = str(self.aimag_cbox.itemData(index2, Qt.UserRole))
if au_level1_code != '*':
restriction_au_level1 += au_level1_code + ','
# Special treatment for UB's districts:
if au_level1_code.startswith('1') or au_level1_code.startswith('01'):
restriction_au_level2 += au_level1_code + '00' + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0}00 TO {1}".format(au_level1_code, user_name))
self.db_session.execute("RESET ROLE")
for au_level2 in self.db_session.query(AuLevel2).filter(AuLevel2.code.startswith(au_level1_code))\
.order_by(AuLevel2.code):
restriction_au_level2 += au_level2.code + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2.code, user_name))
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2.code, user_name))
self.db_session.execute("RESET ROLE")
break
else:
au_level1_code = str(item.data(Qt.UserRole))
restriction_au_level1 += au_level1_code + ','
# Special treatment for UB's districts:
# if au_level1_code.startswith('1') or au_level1_code.startswith('01'):
# restriction_au_level2 += au_level1_code + '00' + ','
# self.db_session.execute("SET ROLE role_management")
# self.db_session.execute(u"GRANT s{0}00 TO {1}".format(au_level1_code, user_name))
# self.db_session.execute("RESET ROLE")
if is_first == 0:
is_first = 1
for index2 in range(self.soum_lwidget.count()):
item = self.soum_lwidget.item(index2)
if item.text() == '*':
for au_level2 in self.db_session.query(AuLevel2).filter(AuLevel2.code.startswith(au_level1_code))\
.order_by(AuLevel2.code):
restriction_au_level2 += au_level2.code + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2.code, user_name))
self.db_session.execute("RESET ROLE")
else:
try:
au_level2_code = str(item.data(Qt.UserRole))
restriction_au_level2 += au_level2_code + ','
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"GRANT s{0} TO {1}".format(au_level2_code, user_name))
self.db_session.execute("RESET ROLE")
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"),
self.tr("You must login different username with member of role management"))
return
restriction_au_level1 = restriction_au_level1[:len(restriction_au_level1)-1]
restriction_au_level2 = restriction_au_level2[:len(restriction_au_level2)-1]
pa_from = datetime.datetime.today()
pa_till = datetime.date.max
role_c = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).count()
if self.register_edit.text() == None or self.register_edit.text() == '':
PluginUtils.show_message(None, self.tr("None register"),
self.tr("Register not null!"))
return
if mode == 'ADD':
if role_c != 0:
role_count = self.db_session.query(SetRole).filter(
SetRole.user_register == self.register_edit.text()).count()
# if role_count > 0:
# PluginUtils.show_message(None, self.tr("Duplicate user"),
# self.tr("This user already registered!"))
# return
is_active_user = False
if role_c == 0:
is_active_user = True
else:
active_role_count = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(SetRole.is_active == True).count()
if active_role_count == 0:
is_active_user = True
else:
is_active_user = False
try:
count = self.db_session.query(SetRole) \
.filter(SetRole.user_name == user_name) \
.order_by(func.substr(SetRole.user_name_real, 11, 12).desc()).count()
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("File Error"),
self.tr("Error in line {0}: {1}").format(currentframe().f_lineno, e.message))
return
if count > 0:
try:
max_number_user = self.db_session.query(SetRole) \
.filter(SetRole.user_name == user_name) \
.order_by(func.substr(SetRole.user_name_real, 11, 12).desc()).first()
except SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("File Error"),
self.tr("Error in line {0}: {1}").format(currentframe().f_lineno,
e.message))
return
user_numbers = max_number_user.user_name_real[-2:]
new_user_number = (str(int(user_numbers[1]) + 1).zfill(2))
last_user_name = user_name[:10] + new_user_number
user_name_real = last_user_name
employee_type = 2
else:
user_name_real = self.username_edit.text()+'01'
employee_type = 1
role = SetRole(user_name=user_name, surname=surname, first_name=first_name, phone=phone, user_register=user_register,
mac_addresses=mac_addresses, position=position, restriction_au_level1=restriction_au_level1, user_name_real = user_name_real,
employee_type = employee_type, restriction_au_level2=restriction_au_level2, pa_from=pa_from, pa_till=pa_till, is_active=is_active_user, email=email)
self.db_session.add(role)
else:
active_role_count = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(
SetRole.is_active == True).count()
if active_role_count == 1:
role = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(SetRole.is_active == True).one()
else:
role = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).filter(SetRole.user_name_real == self.username_real_lbl.text()).one()
# for role in roles:
# print role.user_name_real
role.surname = surname
role.first_name = first_name
role.phone = phone
role.user_register = user_register
role.mac_addresses = mac_addresses
if active_role_count == 0:
role.is_active = True
role.position = position
role.restriction_au_level1 = restriction_au_level1
role.restriction_au_level2 = restriction_au_level2
role.email = email
self.db_session.commit()
self.__populate_user_role_lwidget()
item = self.user_role_lwidget.findItems(user_name, Qt.MatchExactly)[0]
row = self.user_role_lwidget.row(item)
self.user_role_lwidget.setCurrentRow(row)
return True
else:
if password != self.PW_PLACEHOLDER:
self.db_session.execute(u"ALTER ROLE {0} PASSWORD '{1}'".format(user_name, password))
self.db_session.commit()
self.__populate_user_role_lwidget()
item = self.user_role_lwidget.findItems(user_name, Qt.MatchExactly)[0]
row = self.user_role_lwidget.row(item)
self.user_role_lwidget.setCurrentRow(row)
return True
def __validate_user_input(self, mode='ADD'):
if mode == 'UPDATE':
if self.username_edit.text().strip() != self.selected_user:
PluginUtils.show_message(None, self.tr("Username can't be modified"),
self.tr("The username of an existing user cannot be modified!"))
self.username_edit.setText(self.selected_user)
return False
if self.username_edit.text().strip() == 'role_manager' \
and not self.member_lwidget.findItems('role_management', Qt.MatchExactly):
PluginUtils.show_message(self, self.tr("Required group"),
self.tr("The user 'role_manager' must be member of group 'role_management'."))
return False
if len(self.username_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Username"), self.tr("Provide a valid username!"))
return False
if len(self.password_edit.text().strip()) < 8:
PluginUtils.show_message(self, self.tr("Invalid Password"),
self.tr("Provide a valid password that consists of 8 characters or more!"))
return False
if self.password_edit.text().strip() != self.retype_password_edit.text().strip():
PluginUtils.show_message(self, self.tr("Passwords Not Matching"),
self.tr("Password and retyped password are not identical!"))
return False
if len(self.surname_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Surname"), self.tr("Provide a valid surname!"))
return False
if len(self.firstname_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No First Name"), self.tr("Provide a valid first name!"))
return False
if len(self.email_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Email"), self.tr("Provide a valid email!"))
return False
if len(self.firstname_edit.text().strip()) == 0:
PluginUtils.show_message(self, self.tr("No Position"), self.tr("Provide a valid position!"))
return False
if self.member_lwidget.count() == 0:
PluginUtils.show_message(self, self.tr("No Group Membership"),
self.tr("The user must be member of at least one group!"))
return False
if not self.member_lwidget.findItems('role_management', Qt.MatchExactly) \
and not self.member_lwidget.findItems('db_creation', Qt.MatchExactly):
if self.aimag_lwidget.count() == 0:
PluginUtils.show_message(self, self.tr("No Aimag/Duureg"),
self.tr("The user must be granted at least one Aimag/Duureg!"))
return False
if self.soum_lwidget.count() == 0:
PluginUtils.show_message(self, self.tr("No Soum"),
self.tr("The user must granted at least one Soum!"))
return False
return True
@pyqtSlot()
def on_down_groups_button_clicked(self):
if not self.group_lwidget.currentItem():
return
group = self.group_lwidget.currentItem().text()
if group.find(self.GROUP_SEPARATOR) != -1:
return
self.group_lwidget.takeItem(self.group_lwidget.row(self.group_lwidget.currentItem()))
self.member_lwidget.addItem(group)
if group == 'land_office_administration':
item_list = self.member_lwidget.findItems('contracting_update', Qt.MatchExactly)
if len(item_list) == 0:
contracting_update_item = self.group_lwidget.findItems('contracting_update', Qt.MatchExactly)[0]
self.group_lwidget.takeItem(self.group_lwidget.row(contracting_update_item))
self.member_lwidget.addItem(contracting_update_item.text())
# elif group == 'contracting_update':
# item_list = self.member_lwidget.findItems('cadastre_update', Qt.MatchExactly)
# if len(item_list) == 0:
# cadastre_update_item = self.group_lwidget.findItems('cadastre_update', Qt.MatchExactly)[0]
# self.group_lwidget.takeItem(self.group_lwidget.row(cadastre_update_item))
# self.member_lwidget.addItem(cadastre_update_item.text())
@pyqtSlot()
def on_up_groups_button_clicked(self):
if not self.member_lwidget.currentItem():
return
group = self.member_lwidget.currentItem().text()
if group == 'log_view': # cannot be removed from member widget
return
self.member_lwidget.takeItem(self.member_lwidget.row(self.member_lwidget.currentItem()))
if group == 'role_management' or group == 'db_creation' or group == 'land_office_administration':
self.group_lwidget.insertItem(0, group)
else:
self.group_lwidget.addItem(group)
# if group == 'contracting_update':
# item_list = self.group_lwidget.findItems('land_office_administration', Qt.MatchExactly)
# if len(item_list) == 0:
# land_office_admin_item = self.member_lwidget.findItems('land_office_administration', Qt.MatchExactly)[0]
# self.member_lwidget.takeItem(self.member_lwidget.row(land_office_admin_item))
# self.group_lwidget.insertItem(0, land_office_admin_item.text())
# elif group == 'cadastre_update':
# item_list = self.group_lwidget.findItems('contracting_update', Qt.MatchExactly)
# if len(item_list) == 0:
# contracting_update_item = self.member_lwidget.findItems('contracting_update', Qt.MatchExactly)[0]
# self.member_lwidget.takeItem(self.member_lwidget.row(contracting_update_item))
# self.group_lwidget.addItem(contracting_update_item.text())
@pyqtSlot()
def on_down_aimag_button_clicked(self):
au_level1_name = self.aimag_cbox.currentText()
au_level1_code = self.aimag_cbox.itemData(self.aimag_cbox.currentIndex(), Qt.UserRole)
if len(self.aimag_lwidget.findItems(au_level1_name, Qt.MatchExactly)) == 0:
if len(self.aimag_lwidget.findItems("*", Qt.MatchExactly)) == 0:
if au_level1_name == '*':
self.aimag_lwidget.clear()
self.soum_lwidget.clear()
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.soum_lwidget.addItem(item)
item = QListWidgetItem(au_level1_name)
item.setData(Qt.UserRole, au_level1_code)
self.aimag_lwidget.addItem(item)
self.aimag_lwidget.setCurrentItem(item)
if self.aimag_lwidget.count() > 1:
self.soum_lwidget.clear()
item = QListWidgetItem("*")
item.setData(Qt.UserRole, "*")
self.soum_lwidget.addItem(item)
@pyqtSlot()
def on_up_aimag_button_clicked(self):
self.aimag_lwidget.takeItem(self.aimag_lwidget.row(self.aimag_lwidget.currentItem()))
if self.aimag_lwidget.count() > 0:
self.aimag_lwidget.setItemSelected(self.aimag_lwidget.item(0), False)
self.aimag_lwidget.setCurrentItem(self.aimag_lwidget.item(0))
self.soum_lwidget.clear()
@pyqtSlot()
def on_down_soum_button_clicked(self):
au_level2_name = self.soum_cbox.currentText()
au_level2_code = self.soum_cbox.itemData(self.soum_cbox.currentIndex(), Qt.UserRole)
itemsList = self.aimag_lwidget.selectedItems()
if len(self.soum_lwidget.findItems(au_level2_name +'_'+ au_level2_code, Qt.MatchExactly)) == 0:
if len(self.soum_lwidget.findItems("*", Qt.MatchExactly)) == 0:
if au_level2_name == '*':
self.soum_lwidget.clear()
item = QListWidgetItem(au_level2_name +'_'+ au_level2_code)
item.setData(Qt.UserRole, au_level2_code)
self.soum_lwidget.addItem(item)
@pyqtSlot()
def on_up_soum_button_clicked(self):
self.soum_lwidget.takeItem(self.soum_lwidget.row(self.soum_lwidget.currentItem()))
@pyqtSlot()
def on_delete_button_clicked(self):
item = self.user_role_lwidget.currentItem()
if item is None:
return
user_name = item.text()
if user_name == 'role_manager':
PluginUtils.show_message(self, self.tr("Delete User"),
self.tr("The user 'role_manager' is a required role and cannot be deleted."))
return
# The user logged on must not delete himself:
if self.__logged_on_user() == user_name:
PluginUtils.show_message(self, self.tr("Delete User"),
self.tr("The user currently logged on cannot be deleted."))
return
message = "Delete user role {0}".format(user_name)
if QMessageBox.No == QMessageBox.question(self, self.tr("Delete User Role"),
message, QMessageBox.Yes | QMessageBox.No, QMessageBox.No):
return
try:
user_role = self.db_session.query(SetRole).filter(SetRole.user_name == user_name).one()
self.db_session.delete(user_role)
self.db_session.execute("SET ROLE role_management")
self.db_session.execute(u"DROP ROLE {0}".format(user_name))
self.db_session.execute("RESET ROLE")
self.db_session.commit()
self.__populate_user_role_lwidget()
PluginUtils.show_message(self, self.tr("User Role Management"), self.tr('User role deleted.'))
except DatabaseError, e:
self.db_session.rollback()
PluginUtils.show_error(self, self.tr("Database Query Error"), self.tr("Could not execute: {0}").format(e.message))
def __groupsByUser(self, user_name):
sql = "select rolname from pg_user join pg_auth_members on (pg_user.usesysid=pg_auth_members.member) " \
"join pg_roles on (pg_roles.oid=pg_auth_members.roleid) where pg_user.usename=:bindName"
result = self.db_session.execute(sql, {'bindName': user_name}).fetchall()
return result
def __logged_on_user(self):
result = self.db_session.execute("SELECT USER")
current_user = result.fetchone()
return current_user[0]
@pyqtSlot()
def on_help_button_clicked(self):
os.system("hh.exe "+ str(os.path.dirname(os.path.realpath(__file__))[:-10]) +"help\output\help_lm2.chm::/html/user_role_management.htm")
@pyqtSlot(QListWidgetItem)
def on_user_role_lwidget_itemDoubleClicked(self, item):
username = item.text()
dlg = UserRoleManagementDetialDialog(username)
dlg.exec_()
@pyqtSlot()
def on_settings_button_clicked(self):
if not self.user_role_lwidget.currentItem():
return
username = self.user_role_lwidget.currentItem().text()
dlg = UserRoleManagementDetialDialog(username)
dlg.exec_()
def __load_default_ritht_grud(self):
aa = self.db_session.query(ClGroupRole).all()
positions = self.db_session.query(ClPositionType).all()
for position in positions:
# right_grud = self.db_session.query(SetPositionGroupRole)
row = self.settings_position_twidget.rowCount()
self.settings_position_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
self.settings_position_twidget.setItem(row, 0, item)
@pyqtSlot()
def on_load_users_button_clicked(self):
self.__load_user_roles()
def __load_user_roles(self):
self.user_twidget.setRowCount(0)
user_start = "user" + "%"
users = self.db_session.query(SetRole).filter(SetRole.user_name.like(user_start)).all()
for user in users:
row = self.user_twidget.rowCount()
self.user_twidget.insertRow(row)
full_name = '('+ user.user_name_real +') '+ user.surname[:1] + '.' + user.first_name
item = QTableWidgetItem(u'{0}'.format(full_name))
item.setData(Qt.UserRole, user.user_name_real)
self.user_twidget.setItem(row, 0, item)
@pyqtSlot()
def on_load_position_button_clicked(self):
self.__load_all_positions()
def __load_all_positions(self):
self.position_twidget.setRowCount(0)
selected_items = self.user_twidget.selectedItems()
if len(selected_items) == 0:
PluginUtils.show_message(self, self.tr("Selection"), self.tr("Please select user."))
return
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
positions = self.db_session.query(ClPositionType).all()
for position in positions:
row = self.position_twidget.rowCount()
self.position_twidget.insertRow(row)
user_positions_count = self.db_session.query(SetUserPosition).\
filter(SetUserPosition.user_name_real == user_name_real).\
filter(SetUserPosition.position == position.code).count()
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
if user_positions_count == 0:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.position_twidget.setItem(row, 0, item)
@pyqtSlot(QTableWidgetItem)
def on_user_twidget_itemClicked(self, item):
self.position_twidget.setRowCount(0)
self.right_grud_twidget.setRowCount(0)
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
self.__load_user_positions(user_name_real)
self.__load_user_right_types(user_name_real)
def __load_user_right_types(self, user_name_real):
right_types = self.db_session.query(ClGroupRole).all()
for right_type in right_types:
user_right_types_count = self.db_session.query(SetUserGroupRole). \
filter(SetUserGroupRole.user_name_real == user_name_real).\
filter(SetUserGroupRole.group_role == right_type.code).count()
if user_right_types_count == 1:
user_right_type = self.db_session.query(SetUserGroupRole). \
filter(SetUserGroupRole.user_name_real == user_name_real). \
filter(SetUserGroupRole.group_role == right_type.code).one()
row = self.right_grud_twidget.rowCount()
self.right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(right_type.description))
item.setData(Qt.UserRole, right_type.code)
self.right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_view:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_add:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_remove:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, right_type.code)
if user_right_types_count == 0:
item.setCheckState(Qt.Unchecked)
else:
if not user_right_type.r_update:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 4, item)
def __load_user_positions(self, user_name_real):
user_positions = self.db_session.query(SetUserPosition). \
filter(SetUserPosition.user_name_real == user_name_real).all()
set_role = self.db_session.query(SetRole).filter(SetRole.user_name_real == user_name_real).one()
position = self.db_session.query(ClPositionType). \
filter(ClPositionType.code == set_role.position).one()
user_positions_count = self.db_session.query(SetUserPosition). \
filter(SetUserPosition.user_name_real == user_name_real). \
filter(SetUserPosition.position == position.code).count()
if user_positions_count == 0:
row = self.position_twidget.rowCount()
self.position_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
item.setCheckState(Qt.Checked)
self.position_twidget.setItem(row, 0, item)
for user_position in user_positions:
position = self.db_session.query(ClPositionType). \
filter(ClPositionType.code == user_position.position).one()
row = self.position_twidget.rowCount()
self.position_twidget.insertRow(row)
user_positions_count = self.db_session.query(SetUserPosition). \
filter(SetUserPosition.user_name_real == user_name_real). \
filter(SetUserPosition.position == position.code).count()
item = QTableWidgetItem(u'{0}'.format(position.description))
item.setData(Qt.UserRole, position.code)
if user_positions_count == 0:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.position_twidget.setItem(row, 0, item)
@pyqtSlot()
def on_load_default_settings_button_clicked(self):
self.right_grud_twidget.setRowCount(0)
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
user = self.db_session.query(SetRole).filter_by(user_name_real = user_name_real).one()
position_code = user.position
position_gruds = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code).all()
for position_grud in position_gruds:
group_role = self.db_session.query(ClGroupRole).filter(ClGroupRole.code == position_grud.group_role).one()
row = self.right_grud_twidget.rowCount()
self.right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(group_role.description))
item.setData(Qt.UserRole, group_role.code)
self.right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_view:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_add:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_remove:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_update:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.right_grud_twidget.setItem(row, 4, item)
@pyqtSlot(QTableWidgetItem)
def on_settings_position_twidget_itemClicked(self, item):
self.settings_right_grud_twidget.setRowCount(0)
cur_row = self.settings_position_twidget.currentRow()
item = self.settings_position_twidget.item(cur_row, 0)
position_code = item.data(Qt.UserRole)
position_gruds = self.db_session.query(SetPositionGroupRole).\
filter(SetPositionGroupRole.position == position_code).all()
group_roles = self.db_session.query(ClGroupRole).all()
for group_role in group_roles:
position_grud_c = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code). \
filter(SetPositionGroupRole.group_role == group_role.code).count()
if position_grud_c == 1:
position_grud = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code).\
filter(SetPositionGroupRole.group_role == group_role.code).one()
row = self.settings_right_grud_twidget.rowCount()
self.settings_right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(group_role.description_en))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem(u'{0}'.format(group_role.description))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_view:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_add:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_remove:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 4, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
if not position_grud.r_update:
item.setCheckState(Qt.Unchecked)
else:
item.setCheckState(Qt.Checked)
self.settings_right_grud_twidget.setItem(row, 5, item)
else:
row = self.settings_right_grud_twidget.rowCount()
self.settings_right_grud_twidget.insertRow(row)
item = QTableWidgetItem(u'{0}'.format(group_role.description_en))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 0, item)
item = QTableWidgetItem(u'{0}'.format(group_role.description))
item.setData(Qt.UserRole, group_role.code)
self.settings_right_grud_twidget.setItem(row, 1, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 2, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 3, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 4, item)
item = QTableWidgetItem()
item.setData(Qt.UserRole, group_role.code)
item.setCheckState(Qt.Unchecked)
self.settings_right_grud_twidget.setItem(row, 5, item)
def __start_fade_out_timer(self):
self.timer = QTimer()
self.timer.timeout.connect(self.__fade_status_message)
self.time_counter = 500
self.timer.start(10)
def __fade_status_message(self):
opacity = int(self.time_counter * 0.5)
self.status_label.setStyleSheet("QLabel {color: rgba(255,0,0," + str(opacity) + ");}")
self.status_label.setText(self.tr('Changes applied successfully.'))
if self.time_counter == 0:
self.timer.stop()
self.time_counter -= 1
def __save_settings(self):
try:
self.__save_right_settings()
self.__save_user_positions()
self.__save_user_right_type()
return True
except exc.SQLAlchemyError, e:
PluginUtils.show_error(self, self.tr("SQL Error"), e.message)
return False
def __save_user_right_type(self):
selected_items = self.user_twidget.selectedItems()
if len(selected_items) == 0:
return
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
for row in range(self.right_grud_twidget.rowCount()):
check_item = self.right_grud_twidget.item(row, 0)
group_role = check_item.data(Qt.UserRole)
user_right_count = self.db_session.query(SetUserGroupRole).\
filter(SetUserGroupRole.group_role == group_role) .\
filter(SetUserGroupRole.user_name_real == user_name_real).count()
check_view_item = self.right_grud_twidget.item(row, 1)
check_add_item = self.right_grud_twidget.item(row, 2)
check_delete_item = self.right_grud_twidget.item(row, 3)
check_update_item = self.right_grud_twidget.item(row, 4)
if user_right_count == 0:
user_right = SetUserGroupRole()
user_right.user_name_real = user_name_real
user_right.group_role = group_role
if check_view_item.checkState() == Qt.Checked:
user_right.r_view = True
else:
user_right.r_view = False
if check_add_item.checkState() == Qt.Checked:
user_right.r_add = True
else:
user_right.r_add = False
if check_delete_item.checkState() == Qt.Checked:
user_right.r_remove = True
else:
user_right.r_remove = False
if check_update_item.checkState() == Qt.Checked:
user_right.r_update = True
else:
user_right.r_update = False
self.db_session.add(user_right)
else:
if user_right_count == 1:
user_right = self.db_session.query(SetUserGroupRole). \
filter(SetUserGroupRole.group_role == group_role). \
filter(SetUserGroupRole.user_name_real == user_name_real).one()
if check_view_item.checkState() == Qt.Checked:
user_right.r_view = True
else:
user_right.r_view = False
if check_add_item.checkState() == Qt.Checked:
user_right.r_add = True
else:
user_right.r_add = False
if check_delete_item.checkState() == Qt.Checked:
user_right.r_remove = True
else:
user_right.r_remove = False
if check_update_item.checkState() == Qt.Checked:
user_right.r_update = True
else:
user_right.r_update = False
def __save_user_positions(self):
selected_items = self.user_twidget.selectedItems()
if len(selected_items) == 0:
return
cur_row = self.user_twidget.currentRow()
item = self.user_twidget.item(cur_row, 0)
user_name_real = item.data(Qt.UserRole)
for row in range(self.position_twidget.rowCount()):
check_item = self.position_twidget.item(row, 0)
position_code = check_item.data(Qt.UserRole)
user_positions_count = self.db_session.query(SetUserPosition).\
filter(SetUserPosition.position == position_code) .\
filter(SetUserPosition.user_name_real == user_name_real).count()
if check_item.checkState() == Qt.Checked:
if user_positions_count == 0:
user_position = SetUserPosition()
user_position.user_name_real = user_name_real
user_position.position = position_code
self.db_session.add(user_position)
else:
if user_positions_count == 1:
self.db_session.query(SetUserPosition). \
filter(SetUserPosition.position == position_code). \
filter(SetUserPosition.user_name_real == user_name_real).delete()
def __save_right_settings(self):
selected_items = self.settings_position_twidget.selectedItems()
if len(selected_items) == 0:
return
cur_row = self.settings_position_twidget.currentRow()
item = self.settings_position_twidget.item(cur_row, 0)
position_code = item.data(Qt.UserRole)
for row in range(self.settings_right_grud_twidget.rowCount()):
group_role = self.settings_right_grud_twidget.item(row, 0).data(Qt.UserRole)
position_gruds_c = self.db_session.query(SetPositionGroupRole). \
filter(SetPositionGroupRole.position == position_code). \
filter(SetPositionGroupRole.group_role == group_role).count()
if position_gruds_c == 1:
position_gruds = self.db_session.query(SetPositionGroupRole).\
filter(SetPositionGroupRole.position == position_code). \
filter(SetPositionGroupRole.group_role == group_role).one()
check_view_item = self.settings_right_grud_twidget.item(row, 2)
check_add_item = self.settings_right_grud_twidget.item(row, 3)
check_delete_item = self.settings_right_grud_twidget.item(row, 4)
check_update_item = self.settings_right_grud_twidget.item(row, 5)
if check_view_item.checkState() == Qt.Checked:
position_gruds.r_view = True
else:
position_gruds.r_view = False
if check_add_item.checkState() == Qt.Checked:
position_gruds.r_add = True
else:
position_gruds.r_add = False
if check_delete_item.checkState() == Qt.Checked:
position_gruds.r_remove = True
else:
position_gruds.r_remove = False
if check_update_item.checkState() == Qt.Checked:
position_gruds.r_update = True
else:
position_gruds.r_update = False
else:
position_gruds = SetPositionGroupRole()
position_gruds.group_role = group_role
position_gruds.position = position_code
check_view_item = self.settings_right_grud_twidget.item(row, 2)
check_add_item = self.settings_right_grud_twidget.item(row, 3)
check_delete_item = self.settings_right_grud_twidget.item(row, 4)
check_update_item = self.settings_right_grud_twidget.item(row, 5)
if check_view_item.checkState() == Qt.Checked:
position_gruds.r_view = True
else:
position_gruds.r_view = False
if check_add_item.checkState() == Qt.Checked:
position_gruds.r_add = True
else:
position_gruds.r_add = False
if check_delete_item.checkState() == Qt.Checked:
position_gruds.r_remove = True
else:
position_gruds.r_remove = False
if check_update_item.checkState() == Qt.Checked:
position_gruds.r_update = True
else:
position_gruds.r_update = False
self.db_session.add(position_gruds)
@pyqtSlot()
def on_apply_button_clicked(self):
if not self.__save_settings():
return
self.db_session.commit()
self.__start_fade_out_timer()
| [
"aagii_csms@yahoo.com"
] | aagii_csms@yahoo.com |
c3c372e355b0b1fee36defc54ab53ac4f7d61fc6 | dda2f6f4f823ec3571f0a8474a51a3498166b1f9 | /rolle/migrations/0002_auto_20190920_1650.py | d2c3ede126b44ad962435e1f8736da9c4f8bd889 | [] | no_license | prauscher/thwin | 3761eef2e779491d52c5093ca0ce9841d218e743 | bab06bc5659d3778e81b92995e46b826da9cbd68 | refs/heads/master | 2020-07-30T08:31:45.522366 | 2019-09-22T21:07:08 | 2019-09-22T21:07:08 | 210,156,570 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 725 | py | # Generated by Django 2.2.5 on 2019-09-20 16:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rolle', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='freigabe',
name='berechtigung',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='freigaben', to='rolle.Berechtigung'),
),
migrations.AlterField(
model_name='freigabe',
name='rolle',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='freigaben', to='rolle.Rolle'),
),
]
| [
"prauscher@prauscher.de"
] | prauscher@prauscher.de |
5f6a1e686ceb5a0fd3ce59392ce011e48b736289 | e427906785f3076ea7cf5f0bc87ba2edffb926b6 | /Bakhteev2019/projects/Bakhteev2017Hypergrad/code/pyfos/hyperoptimizers/hoag_optimize.py | b606cc4e8b1163da2cca8fdc76fe02847bda54d1 | [] | no_license | rigof80/PhDThesis | d9e03b84b8118f8c9fd677622126bef88ea2eda8 | 0dfc331608059427ab2bc6fe61ac127b5dbd0fe3 | refs/heads/master | 2022-01-09T04:32:09.310722 | 2019-05-01T17:59:45 | 2019-05-01T17:59:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,817 | py | import sys
sys.path.append('.')
import theano
import theano.tensor as T
import numpy as np
import random
from structures import HyperparameterOptimization
import random
import time
from scipy.optimize import minimize
import gc
def hoag_optimize(trainig_criterion, model_constructor, param_optimizer, trial_num , batch_size, batch_size2, train_iteration_num, X_data, Y_data, hyperparams, internal_optimize_learning_rate=10**(-5), internal_optimize_eps = 0.98, limits=None, max_abs_err = 10**10,
lr=0.0, verbose=0):
history = []
dataset_size = np.array(X_data).shape[0]
X_datas = theano.shared(X_data)
Y_datas = theano.shared(Y_data)
if len(hyperparams)>1:
raise NotImplementedError('Sorry, not implemented: num of hyperparams > 1')
training_procedure = trainig_criterion( model_constructor, param_optimizer,X_data, Y_data )
k = len(training_procedure.models)
"""
1. solve
2. make hessian optimization
3. make derivatives
4. correct
"""
Xs = [T.matrix() for _ in xrange(k)]
Ys = [T.vector(dtype=Y_data.dtype) for _ in xrange(k)]
indices = [T.ivector() for _ in xrange(k)]
Xs2 = [T.matrix() for _ in xrange(k)]
Ys2 = [T.vector(dtype=Y_data.dtype) for _ in xrange(k)]
indices2 = [T.ivector() for _ in xrange(k)]
costs = []
givens = []
costs_valid = []
all_params = []
for X,Y, index, model, X2, Y2, index2 in zip(Xs, Ys, indices, training_procedure.models,Xs2, Ys2, indices2):
train_cost = model.cost(X, Y)
all_params.append(model.params)
validation_cost = model.validation(X2, Y2)
givens.append((X,X_datas[index]))
givens.append((Y,Y_datas[index]))
givens.append((X2,X_datas[index2]))
givens.append((Y2,Y_datas[index2]))
costs.append(-train_cost) #using negative for article correspondence
costs_valid.append(-validation_cost) #using negative for article correspondence
valid_cost = T.mean(costs_valid)
cost = T.mean(costs)
q = [theano.shared(np.zeros(len(all_params[0].eval())).astype(theano.config.floatX)) for _ in xrange(k)]
h_2 = T.grad(cost,all_params)
Hq = T.Rop(h_2, all_params, q) #submodels are independent
g_1 = T.grad(valid_cost, all_params) #test: 2x2
g_2 = T.grad(valid_cost, hyperparams, disconnected_inputs='ignore')
g_2 = theano.function(indices2, g_2, givens=givens, on_unused_input='ignore')
h_1 = T.grad(cost, all_params) #test: 2x2
#h_1_2s = [theano.gradient.jacobian(h_1_, hyperparams, disconnected_inputs='ignore' ) for h_1_, q_ in zip(h_1, q)]
#h_1_2s_conc = T.concatenate([h_[0] for h_ in h_1_2s], axis=0) #test expecting: 4x2
#h_1_2_q = theano.function(indices, T.dot(h_1_2s_conc.T, T.concatenate(q)), givens=givens)
h_1_2s = T.Lop(T.concatenate(h_1), hyperparams, T.concatenate(q))
h_1_2_q = theano.function(indices, h_1_2s[0], givens=givens)
internal_cost = T.mean((T.concatenate(Hq) - T.concatenate(g_1))**2)
internal_grad = T.grad(internal_cost, q)
updates = [(q_, q_-internal_optimize_learning_rate*internal_grad_) for q_, internal_grad_ in zip(q, internal_grad)]
internal_update = theano.function(indices+indices2, internal_cost, givens=givens, updates= updates+model.train_updates)
internal_monitor = theano.function(indices+indices2, internal_cost, givens=givens)
#internal_update = theano.function(indices+indices2, Hq, givens=givens, on_unused_input='ignore')
for trial in xrange(trial_num):
gc.collect()
for m in training_procedure.models:
m.respawn()
if verbose>=0 :
print 'trial ', trial
for i in xrange(train_iteration_num):
res = training_procedure.do_train()
if verbose>=0 and (verbose==0 or i%verbose==0):
print 'iteration {0}, internal loss={1}'.format(str(i), str(res))
valid_score = training_procedure.do_validation()
history.append(([h.eval() for h in hyperparams], valid_score))
if verbose>=0:
print 'validation score: ', valid_score
if verbose>=0:
print 'internal optimization'
err = None
rel_err = -1# -1
attemp_num = 10
while rel_err<internal_optimize_eps:
sample1 = [random.sample(ti,batch_size2) for ti in training_procedure.train_indices]
sample2 = [random.sample(vi,batch_size2) for vi in training_procedure.validation_indices]
err_new = internal_update(*(sample1+sample2))
gc.collect()
if err is not None:
rel_err = min(err_new,err)/max(err_new,err)
#qs = [q_.eval() for q_ in q]
#if attemp_num> 0 and (np.isnan(np.mean(qs)) or np.isinf(np.mean(qs)) or (err_new > max_abs_err)) :
# attemp_num-=1
# print 'bad internal learning rate', err_new, err, np.mean(qs)
# if internal_optimize_learning_rate/10 > 0:
# internal_optimize_learning_rate = internal_optimize_learning_rate/10
# for q_ in q:
# q_.set_value(np.zeros(len(all_params[0].eval())))
# err = None
# print 'updating learning rate', internal_optimize_learning_rate
err = err_new#abs(err_new - err)/(err + err_new)
if verbose>=0:
print rel_err, err
sample_t = [random.sample(ti, batch_size2) for ti in training_procedure.train_indices]
sample_v = [random.sample(vi, batch_size2) for vi in training_procedure.validation_indices]
time_s = time.time()
#print len(g_2(*sample1))
#print len(h_1_2_q(*sample2))
grads = g_2(*sample_v) - h_1_2_q(*sample_t)
#print 'TIME', time_s - time.time()
#g_2(*training_procedure.validation_indices)##g_2(*training_procedure.validation_indices)# - h_1_2_q(*training_procedure.train_indices)
#print 'grads', grads
#print h_1_2_q(*training_procedure.train_indices)
good_update = False
attemp_num = 10
####TODO
while not good_update and attemp_num>0:
good_update = True
if limits:
old_values = [h.eval() for h in hyperparams]
for h,l,g in zip(hyperparams, lr,grads):
#print (g).dtype, (h.eval()).dtype, type(l)
h.set_value(h.eval() - l * g)
if limits:
h_id = -1
for h, l in zip(hyperparams, limits):
h_id+=1
he = h.eval()
if np.max((he))>l[1] or np.min((he))<l[0] or np.isnan(np.max(he)) or np.isinf(np.max(he)):
print 'bad hyperparam update'
print he,' vs limit ',l
if np.isnan(np.max(he)) or np.isinf(np.max(he)):
h.set_value(o)
else:
h.set_value(np.minimum(l[1], np.maximum(l[0], he)))
for h2,o in zip(hyperparams, old_values):
print 'returning value', o
h2.set_value(o)
lr[h_id]= lr[h_id] / 10.0
print 'new lr', lr
attemp_num -= 1
good_update = False
if verbose>=0:
print 'hypergrads', grads
return HyperparameterOptimization(best_values=history[-1][0], history=history)
if __name__=='__main__':
from generic.optimizer import gd_optimizer
from pyfos.models.var_feedforward import build_var_feedforward
from tc.cv import cv_tc
from functools import partial
matrix = np.load('../../data/matrix.npy')
X, Y = np.load('../../data/linearx.npy'), np.load('../../data/lineary.npy')
X_train = X[:100]
Y_train = Y[:100]
X_test = X[100:]
Y_test = Y[100:]
lr = theano.shared(10**(-3))
log_alphas = theano.shared(np.array([.0, .0]))
optimizer = partial(gd_optimizer, learning_rate=lr)
model_build = partial(build_feedforward, structure = [2,1], nonlinearity=lambda x:x, log_alphas=log_alphas, bias=False)
hoag_optimize(partial(cv_tc, k =3, batch_size=75),
model_build,
optimizer,
25, 75,
100,
X_train, Y_train, [log_alphas], lr=[0.01],verbose=10)#10**(-7), verbose=1)
| [
"bakhteev@phystech.edu"
] | bakhteev@phystech.edu |
fa46caa1f1f3f3becac7ffa22defea3a911bab75 | ade138f110f2a311a6d2b425be33b5691fe8bd11 | /src/regex/patterns.py | f07bf814760ef45cc07ba4723857880fd075dde6 | [] | no_license | colinbazzano/learning-python | 68f6c7435691834b7c6e2dd6ac43872b76c94aee | 118c51d8b4a16c571584457bbadf97041c23953d | refs/heads/master | 2022-12-10T12:59:52.122819 | 2020-03-22T17:26:29 | 2020-03-22T17:26:29 | 247,514,583 | 1 | 0 | null | 2022-12-08T03:51:47 | 2020-03-15T17:21:34 | Python | UTF-8 | Python | false | false | 543 | py | """patterns
patterns are a powerful way to use Regex
to learn more
regex101.com
to practice
https://regexone.com/
email validation r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)"
"""
import re
pattern = re.compile(r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)")
string = 'fake_email@email.org'
pattern2 = re.compile(r"[a-zA-Z0-9$%#@]{8,}")
password = 'difaj$wEDJO%sjdi'
a = pattern.search(string)
print(a)
check = pattern2.fullmatch(password)
print(check)
# At least 8 char long
# contain any sort of letters, numbers, $%#@
| [
"colinbazzano@Colins-MacBook-Pro.local"
] | colinbazzano@Colins-MacBook-Pro.local |
afd0e272f53f664ee3b9139fa436e06067281f3b | fe8da1d3efa5bcc5b9833bd1358275fb517f1060 | /facenet-pytorch/models/tensorflow2pytorch.py | 359e6d245ca30b8a48d0f5f7db120f22cd4d1dd8 | [
"BSD-3-Clause",
"BSD-2-Clause",
"MIT"
] | permissive | juliagong/sketch2face | d57a95a4eea9bcdafff88f801cee7b0bb740049e | 40b7f1ee129dc0ff14c4d3a4e3479a7ee5439296 | refs/heads/master | 2023-05-27T08:22:12.110124 | 2020-06-30T02:51:15 | 2020-06-30T02:51:15 | 192,847,872 | 13 | 2 | NOASSERTION | 2023-05-22T22:16:07 | 2019-06-20T04:22:22 | Jupyter Notebook | UTF-8 | Python | false | false | 11,619 | py | from dependencies.facenet.src import facenet
from dependencies.facenet.src.models import inception_resnet_v1 as tf_mdl
import tensorflow as tf
import torch
import json
import os
from models.inception_resnet_v1 import InceptionResNetV1
def import_tf_params(tf_mdl_dir, sess):
"""Import tensorflow model from save directory.
Arguments:
tf_mdl_dir {str} -- Location of protobuf, checkpoint, meta files.
sess {tensorflow.Session} -- Tensorflow session object.
Returns:
(list, list, list) -- Tuple of lists containing the layer names,
parameter arrays as numpy ndarrays, parameter shapes.
"""
print('\nLoading tensorflow model\n')
facenet.load_model(tf_mdl_dir)
print('\nGetting model weights\n')
tf_layers = tf.trainable_variables()
tf_params = sess.run(tf_layers)
tf_shapes = [p.shape for p in tf_params]
tf_layers = [l.name for l in tf_layers]
with open(os.path.join(tf_mdl_dir, 'layer_description.json'), 'w') as f:
json.dump({l: s for l, s in zip(tf_layers, tf_shapes)}, f)
return tf_layers, tf_params, tf_shapes
def get_layer_indices(layer_lookup, tf_layers):
"""Giving a lookup of model layer attribute names and tensorflow variable names,
find matching parameters.
Arguments:
layer_lookup {dict} -- Dictionary mapping pytorch attribute names to (partial)
tensorflow variable names. Expects dict of the form {'attr': ['tf_name', ...]}
where the '...'s are ignored.
tf_layers {list} -- List of tensorflow variable names.
Returns:
list -- The input dictionary with the list of matching inds appended to each item.
"""
layer_inds = {}
for name, value in layer_lookup.items():
layer_inds[name] = value + [[i for i, n in enumerate(tf_layers) if value[0] in n]]
return layer_inds
def load_tf_batchNorm(weights, layer):
"""Load tensorflow weights into nn.BatchNorm object.
Arguments:
weights {list} -- Tensorflow parameters.
layer {torch.nn.Module} -- nn.BatchNorm.
"""
layer.bias.data = torch.tensor(weights[0]).view(layer.bias.data.shape)
layer.weight.data = torch.ones_like(layer.weight.data)
layer.running_mean = torch.tensor(weights[1]).view(layer.running_mean.shape)
layer.running_var = torch.tensor(weights[2]).view(layer.running_var.shape)
def load_tf_conv2d(weights, layer):
"""Load tensorflow weights into nn.Conv2d object.
Arguments:
weights {list} -- Tensorflow parameters.
layer {torch.nn.Module} -- nn.Conv2d.
"""
if isinstance(weights, list):
if len(weights) == 2:
layer.bias.data = (
torch.tensor(weights[1])
.view(layer.bias.data.shape)
)
weights = weights[0]
layer.weight.data = (
torch.tensor(weights)
.permute(3, 2, 0, 1)
.view(layer.weight.data.shape)
)
def load_tf_basicConv2d(weights, layer):
"""Load tensorflow weights into grouped Conv2d+BatchNorm object.
Arguments:
weights {list} -- Tensorflow parameters.
layer {torch.nn.Module} -- Object containing Conv2d+BatchNorm.
"""
load_tf_conv2d(weights[0], layer.conv)
load_tf_batchNorm(weights[1:], layer.bn)
def load_tf_linear(weights, layer):
"""Load tensorflow weights into nn.Linear object.
Arguments:
weights {list} -- Tensorflow parameters.
layer {torch.nn.Module} -- nn.Linear.
"""
if isinstance(weights, list):
if len(weights) == 2:
layer.bias.data = (
torch.tensor(weights[1])
.view(layer.bias.data.shape)
)
weights = weights[0]
layer.weight.data = (
torch.tensor(weights)
.permute(1, 0)
.view(layer.weight.data.shape)
)
# High-level parameter-loading functions:
def load_tf_block35(weights, layer):
load_tf_basicConv2d(weights[:4], layer.branch0)
load_tf_basicConv2d(weights[4:8], layer.branch1[0])
load_tf_basicConv2d(weights[8:12], layer.branch1[1])
load_tf_basicConv2d(weights[12:16], layer.branch2[0])
load_tf_basicConv2d(weights[16:20], layer.branch2[1])
load_tf_basicConv2d(weights[20:24], layer.branch2[2])
load_tf_conv2d(weights[24:26], layer.conv2d)
def load_tf_block17_8(weights, layer):
load_tf_basicConv2d(weights[:4], layer.branch0)
load_tf_basicConv2d(weights[4:8], layer.branch1[0])
load_tf_basicConv2d(weights[8:12], layer.branch1[1])
load_tf_basicConv2d(weights[12:16], layer.branch1[2])
load_tf_conv2d(weights[16:18], layer.conv2d)
def load_tf_mixed6a(weights, layer):
if len(weights) != 16:
raise ValueError(f'Number of weight arrays ({len(weights)}) not equal to 16')
load_tf_basicConv2d(weights[:4], layer.branch0)
load_tf_basicConv2d(weights[4:8], layer.branch1[0])
load_tf_basicConv2d(weights[8:12], layer.branch1[1])
load_tf_basicConv2d(weights[12:16], layer.branch1[2])
def load_tf_mixed7a(weights, layer):
if len(weights) != 28:
raise ValueError(f'Number of weight arrays ({len(weights)}) not equal to 28')
load_tf_basicConv2d(weights[:4], layer.branch0[0])
load_tf_basicConv2d(weights[4:8], layer.branch0[1])
load_tf_basicConv2d(weights[8:12], layer.branch1[0])
load_tf_basicConv2d(weights[12:16], layer.branch1[1])
load_tf_basicConv2d(weights[16:20], layer.branch2[0])
load_tf_basicConv2d(weights[20:24], layer.branch2[1])
load_tf_basicConv2d(weights[24:28], layer.branch2[2])
def load_tf_repeats(weights, layer, rptlen, subfun):
if len(weights) % rptlen != 0:
raise ValueError(f'Number of weight arrays ({len(weights)}) not divisible by {rptlen}')
weights_split = [weights[i:i+rptlen] for i in range(0, len(weights), rptlen)]
for i, w in enumerate(weights_split):
subfun(w, getattr(layer, str(i)))
def load_tf_repeat_1(weights, layer):
load_tf_repeats(weights, layer, 26, load_tf_block35)
def load_tf_repeat_2(weights, layer):
load_tf_repeats(weights, layer, 18, load_tf_block17_8)
def load_tf_repeat_3(weights, layer):
load_tf_repeats(weights, layer, 18, load_tf_block17_8)
def test_loaded_params(mdl, tf_params, tf_layers):
"""Check each parameter in a pytorch model for an equivalent parameter
in a list of tensorflow variables.
Arguments:
mdl {torch.nn.Module} -- Pytorch model.
tf_params {list} -- List of ndarrays representing tensorflow variables.
tf_layers {list} -- Corresponding list of tensorflow variable names.
"""
tf_means = torch.stack([torch.tensor(p).mean() for p in tf_params])
for name, param in mdl.named_parameters():
pt_mean = param.data.mean()
matching_inds = ((tf_means - pt_mean).abs() < 1e-8).nonzero()
print(f'{name} equivalent to {[tf_layers[i] for i in matching_inds]}')
def compare_model_outputs(pt_mdl, sess, test_data):
"""Given some testing data, compare the output of pytorch and tensorflow models.
Arguments:
pt_mdl {torch.nn.Module} -- Pytorch model.
sess {tensorflow.Session} -- Tensorflow session object.
test_data {torch.Tensor} -- Pytorch tensor.
"""
print('\nPassing test data through TF model\n')
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
feed_dict = {images_placeholder: test_data.numpy(), phase_train_placeholder: False}
tf_output = torch.tensor(sess.run(embeddings, feed_dict=feed_dict))
print(tf_output)
print('\nPassing test data through PT model\n')
pt_output = pt_mdl(test_data.permute(0, 3, 1, 2))
print(pt_output)
distance = (tf_output - pt_output).norm()
print(f'\nDistance {distance}\n')
def load_tf_model_weights(mdl, layer_lookup, tf_mdl_dir):
"""Load tensorflow parameters into a pytorch model.
Arguments:
mdl {torch.nn.Module} -- Pytorch model.
layer_lookup {[type]} -- Dictionary mapping pytorch attribute names to (partial)
tensorflow variable names, and a function suitable for loading weights.
Expects dict of the form {'attr': ['tf_name', function]}.
tf_mdl_dir {str} -- Location of protobuf, checkpoint, meta files.
"""
tf.reset_default_graph()
with tf.Session() as sess:
tf_layers, tf_params, tf_shapes = import_tf_params(tf_mdl_dir, sess)
layer_info = get_layer_indices(layer_lookup, tf_layers)
for layer_name, info in layer_info.items():
print(f'Loading {info[0]}/* into {layer_name}')
weights = [tf_params[i] for i in info[2]]
layer = getattr(mdl, layer_name)
info[1](weights, layer)
test_loaded_params(mdl, tf_params, tf_layers)
compare_model_outputs(mdl, sess, torch.randn(5, 160, 160, 3).detach())
def tensorflow2pytorch():
layer_lookup = {
'conv2d_1a': ['InceptionResnetV1/Conv2d_1a_3x3', load_tf_basicConv2d],
'conv2d_2a': ['InceptionResnetV1/Conv2d_2a_3x3', load_tf_basicConv2d],
'conv2d_2b': ['InceptionResnetV1/Conv2d_2b_3x3', load_tf_basicConv2d],
'conv2d_3b': ['InceptionResnetV1/Conv2d_3b_1x1', load_tf_basicConv2d],
'conv2d_4a': ['InceptionResnetV1/Conv2d_4a_3x3', load_tf_basicConv2d],
'conv2d_4b': ['InceptionResnetV1/Conv2d_4b_3x3', load_tf_basicConv2d],
'repeat_1': ['InceptionResnetV1/Repeat/block35', load_tf_repeat_1],
'mixed_6a': ['InceptionResnetV1/Mixed_6a', load_tf_mixed6a],
'repeat_2': ['InceptionResnetV1/Repeat_1/block17', load_tf_repeat_2],
'mixed_7a': ['InceptionResnetV1/Mixed_7a', load_tf_mixed7a],
'repeat_3': ['InceptionResnetV1/Repeat_2/block8', load_tf_repeat_3],
'block8': ['InceptionResnetV1/Block8', load_tf_block17_8],
'last_linear': ['InceptionResnetV1/Bottleneck/weights', load_tf_linear],
'last_bn': ['InceptionResnetV1/Bottleneck/BatchNorm', load_tf_batchNorm],
'logits': ['Logits', load_tf_linear],
}
print('\nLoad VGGFace2-trained weights and save\n')
mdl = InceptionResNetV1(num_classes=8631).eval()
tf_mdl_dir = 'data/20180402-114759'
data_name = 'vggface2'
load_tf_model_weights(mdl, layer_lookup, tf_mdl_dir)
state_dict = mdl.state_dict()
torch.save(state_dict, f'{tf_mdl_dir}-{data_name}.pt')
torch.save(
{
'logits.weight': state_dict['logits.weight'],
'logits.bias': state_dict['logits.bias'],
},
f'{tf_mdl_dir}-{data_name}-logits.pt'
)
state_dict.pop('logits.weight')
state_dict.pop('logits.bias')
torch.save(state_dict, f'{tf_mdl_dir}-{data_name}-features.pt')
print('\nLoad CASIA-Webface-trained weights and save\n')
mdl = InceptionResNetV1(num_classes=10575).eval()
tf_mdl_dir = 'data/20180408-102900'
data_name = 'casia-webface'
load_tf_model_weights(mdl, layer_lookup, tf_mdl_dir)
state_dict = mdl.state_dict()
torch.save(state_dict, f'{tf_mdl_dir}-{data_name}.pt')
torch.save(
{
'logits.weight': state_dict['logits.weight'],
'logits.bias': state_dict['logits.bias'],
},
f'{tf_mdl_dir}-{data_name}-logits.pt'
)
state_dict.pop('logits.weight')
state_dict.pop('logits.bias')
torch.save(state_dict, f'{tf_mdl_dir}-{data_name}-features.pt')
| [
"juliaxgong@gmail.com"
] | juliaxgong@gmail.com |
54375b15c50675fc6a4c8d7cd3c9ca6202d57faa | 07cd7d98765ffd3a11342155fb21fd1f209a4c9a | /examples/simple_user_interface.py | c25bfe062f4299ae0c71dd2229b4935b49fe2214 | [
"Apache-2.0"
] | permissive | fooelisa/network_traffic_modeler_py3 | 1ce439b938289c02bfb4c0950b8fee9fefda8fde | 9c5151c066331536b6864f7c5e152de3c352282f | refs/heads/master | 2020-05-07T09:46:29.567195 | 2019-07-24T15:41:31 | 2019-07-24T15:41:31 | 180,391,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,910 | py | """Simple, menu-driven UI for network_modeling module.
Allows users to interact with and relate between associated
demands, interfaces, and nodes."""
from pyNTM import Model
from pyNTM import ModelException
from pyNTM import Interface
from pyNTM import Demand
from pyNTM import Node
from graph_network import graph_network
from graph_network import graph_network_interactive
from tkinter import ttk as ttk
from tkinter import *
from tkinter import filedialog
import re
import pdb
background_color = 'tan'
def open_file():
"""Opens the file that describes the Model and allows user to save
a diagram of the network graph"""
if selected_model_file.get() == '':
selected_model_file.set(filedialog.askopenfilename(initialdir="/",
title="Select file",
filetypes=(("csv files", "*.csv"),
("all files", "*.*"))))
global model
selected_file_label = ttk.Label(label_frame,
text="Network Model file is:")
selected_file_label.grid(row=1, column=0, sticky='W')
selected_file_display = ttk.Label(label_frame, text=' ' * 30)
selected_file_display = ttk.Label(label_frame,
text=selected_model_file.get())
selected_file_display.grid(row=6, column=0)
if selected_model_file.get() != '':
model = Model.load_model_file(selected_model_file.get())
model.update_simulation()
model_status_label = ttk.Label(label_frame, text="Model is:")
model_status_label.grid(row=8, column=0, sticky='W')
model_file_display = ttk.Label(label_frame, text=model)
model_file_display.grid(row=9, column=0, sticky='W')
# Update the Node Explorer tab
examine_selected_node()
# Update the Demand Explorer tab
examine_selected_demand()
# Update the Interface Explorer tab
examine_selected_interface()
# Update the Path Explorer tab
examine_paths()
# Create a button to produce a network graph
graph_network_button = Button(label_frame)
graph_network_button.grid(row=12, column=0, padx=5, pady=5, sticky='W')
graph_network_button["text"] = "Push to create network graph"
# Don't add the trailing () or this will execute immediately/automatically
graph_network_button["command"] = create_interactive_network_graph_and_refresh
if network_graph_file.get() != '':
graph_label_text = "Graph file saved at: " + network_graph_file.get()
graph_file_label = Label(label_frame, text=graph_label_text)
graph_file_label.grid(row=13, column=0, sticky='W')
def create_network_graph():
"""Makes a network graph"""
network_graph_file.set(filedialog.asksaveasfilename(initialdir="/",
title="Select or Create file:"))
graph_network.make_utilization_graph_neat(model, network_graph_file.get(),
display_plot=False)
def create_network_graph_and_refresh():
"""Makes a network graph and refreshes open_file_tab"""
network_graph_file.set(filedialog.asksaveasfilename(initialdir="/",
title="Select or Create file:"))
graph_network.make_utilization_graph_neat(model, network_graph_file.get(),
display_plot=False)
open_file()
def create_interactive_network_graph_and_refresh():
"""Makes an interactive network graph and refreshes open_file_tab"""
graph_network_interactive.make_interactive_network_graph(model)
def set_active_interface_from_listbox(event):
"""Sets the selected interface value from a listbox to the
active_interface"""
w = event.widget
value = (w.curselection()) # TODO -- comment this out and test
value_position = (w.curselection())
# This next part takes the first value in case the listbox has
# extra info trailing the interface
value_in_position = w.get(value_position)
# Use interface repr, so splitting that using split() yields interface
# name in position 2
selected_interface_value = value_in_position
selected_interface.set(selected_interface_value)
# Refresh the tabs
# TODO - add the destroy() function?
examine_selected_node()
examine_selected_demand()
examine_selected_interface()
def set_active_demand_from_listbox(event):
"""Sets the selected demand value from a listbox to the active_demand"""
w = event.widget
value = (w.curselection()) # get the current selection
value_position = (w.curselection()) # get the position of the current selection
selected_demand.set(w.get(value_position)) # set selected_demand to the current selection
# Try to delete the Node Demand Info labelframe to clear the demand paths
for thing in demand_tab.grid_slaves():
thing.destroy()
for thing in node_tab.grid_slaves():
thing.destroy()
for thing in interface_tab.grid_slaves():
thing.destroy()
# Refresh the Node Info and Demand Info tabs
examine_selected_node()
examine_selected_demand()
examine_selected_interface()
def set_active_object_from_option_menu(event):
"""Refreshes the tabs with the new active object info and displays
the info based on the new active object"""
# Try to delete the Node Demand Info labelframe to clear the demand paths
for thing in demand_tab.grid_slaves():
thing.destroy()
for thing in node_tab.grid_slaves():
thing.destroy()
for thing in interface_tab.grid_slaves():
thing.destroy()
# for thing in path_tab.grid_slaves():
# thing.destroy()
# Refresh the Node Info and Demand Info tabs
examine_selected_node()
examine_selected_demand()
examine_selected_interface()
examine_paths()
def get_demand_object_from_repr(demand_repr):
"""Returns demand object with an input of the demand's repr"""
try:
demand_info = re.split(', | |\)', demand_repr)
demand_source = demand_info[2]
demand_dest = demand_info[5]
demand_name = demand_info[11][1:-1]
demand_object = model.get_demand_object(demand_source, demand_dest,
demand_name=demand_name)
return demand_object
except IndexError:
pass
def get_demands_on_interface(interface):
"""Returns a list of demands on the specified interface"""
# Display demands on interface
try:
interface_data = interface.split("'")
interface_name = interface_data[1]
node_name = interface_data[3]
interface_object = model.get_interface_object(interface_name,
node_name)
demands_on_interface = interface_object.demands(model)
except (ModelException, IndexError):
interface_object = None
demands_on_interface = []
return demands_on_interface
def display_selected_objects(canvas_object, row_, column_):
"""Displays the selected objects"""
node_status = 'Unknown'
interface_status = 'Unknown'
demand_status = 'Unknown'
interface_info = 'Unknown'
try:
node_failed = model.get_node_object(selected_node.get()).failed
if node_failed == True:
node_status = 'Failed'
else:
node_status = 'Not Failed'
except ModelException:
pass
try:
selected_interface_name = selected_interface.get().split("'")[1]
selected_interface_node = selected_interface.get().split("'")[3]
interface_object = model.get_interface_object(selected_interface_name,
selected_interface_node)
interface_failed = interface_object.failed
interface_util = str(round((interface_object.utilization * 100), 1))
interface_info = interface_object
if interface_failed == True:
interface_status = 'Failed'
else:
interface_status = interface_util + "% utilized"
except (ModelException, AttributeError, IndexError) as e:
pass
try:
demand_object = get_demand_object_from_repr(selected_demand.get())
demand_routed = demand_object.path
if demand_routed == 'Unrouted':
demand_status = 'Unrouted'
else:
demand_status = 'Routed'
except (ModelException, AttributeError):
pass
selected_object_frame = LabelFrame(canvas_object, background=background_color,
text="Selected Interface, Demand, and Node")
selected_object_frame.grid(column=column_, row=row_, columnspan=3, pady=10)
selected_object_frame.column_width = 40
selected_object_frame.columnconfigure(0, weight=1)
selected_object_frame.columnconfigure(1, weight=2)
selected_object_frame.columnconfigure(2, weight=1)
Label(selected_object_frame, text='Name',
background=background_color).grid(row=row_ + 1, column=1)
Label(selected_object_frame, text='Status',
background=background_color).grid(row=row_ + 1, column=2)
Label(selected_object_frame, text="Selected Node:",
background=background_color).grid(row=row_ + 2, column=0, sticky='W')
Label(selected_object_frame, text=selected_node.get(), width=52,
borderwidth=1, relief="solid").grid(row=row_ + 2, column=1)
Label(selected_object_frame, text=node_status,
background=background_color).grid(row=row_ + 2, column=2, sticky='E')
Label(selected_object_frame, text="Selected Interface:",
background=background_color).grid(row=row_ + 3, column=0, sticky='W')
Label(selected_object_frame, text=selected_interface.get(),
width=52, justify=LEFT, wraplength=450,
borderwidth=1, relief="solid").grid(row=row_ + 3, column=1)
Label(selected_object_frame, text=interface_status,
background=background_color).grid(row=row_ + 3, column=2, sticky='E')
Label(selected_object_frame, text="Selected Demand:",
background=background_color).grid(row=row_ + 4, column=0, sticky='W')
Label(selected_object_frame, text=selected_demand.get(), width=52,
borderwidth=1, wraplength=450, relief="solid").grid(row=row_ + 4, column=1)
Label(selected_object_frame, text=demand_status,
background=background_color).grid(row=row_ + 4, column=2, sticky='E')
def display_demands(label_info, canvas_object, list_of_demands, row_,
column_,):
"""Displays a label for demands and a single-select listbox of the
demands below the label_info on a given canvas_object. A horizontal
scrollbar is included """
demands_frame = LabelFrame(canvas_object)
demands_frame.grid(row=row_, column=column_, pady=10)
Label(demands_frame, text=label_info).grid(row=0,
column=0, sticky='W', padx=10)
# Horizontal scrollbar - TODO create decorator for the scrollbar?
horizontal_scrollbar = Scrollbar(demands_frame, orient=HORIZONTAL)
horizontal_scrollbar.grid(row=3, column=0, sticky=E + W)
# Vertical scrollbar
vertical_scrollbar = Scrollbar(demands_frame, orient=VERTICAL)
vertical_scrollbar.grid(row=1, column=1, sticky=N + S)
demand_listbox = Listbox(demands_frame, selectmode='single', height=10,
width=40, xscrollcommand=horizontal_scrollbar.set,
yscrollcommand=vertical_scrollbar.set)
demand_listbox.grid(row=1, column=0, sticky='W', padx=10)
vertical_scrollbar.config(command=demand_listbox.yview)
horizontal_scrollbar.config(command=demand_listbox.xview)
demand_counter = 1
for demand in list_of_demands:
demand_listbox.insert(demand_counter, demand)
demand_counter += 1
demand_listbox.bind("<<ListBoxSelect>>", set_active_demand_from_listbox)
demand_listbox.bind("<Double-Button-1>", set_active_demand_from_listbox)
def display_interfaces(label_info, canvas_object, list_of_interfaces,
row_, column_):
"""Displays interfaces from list of interfaces in single selectable listbox.
A label with label_info will appear above the listbox."""
# Display Node's Interfaces Label
Label(canvas_object, text=label_info).grid(row=row_, column=column_,
sticky='W', padx=5)
# Vertical scrollbar
vertical_scrollbar = Scrollbar(canvas_object, orient=VERTICAL)
vertical_scrollbar.grid(row=row_ + 1, column=column_ + 2, sticky=N + S)
# Horizontal scrollbar - TODO create decorator for the scrollbar?
horizontal_scrollbar = Scrollbar(canvas_object, orient=HORIZONTAL)
horizontal_scrollbar.grid(row=(row_ + 2), column=column_, sticky=E + W,
columnspan=2)
# Create a listbox with the available interfaces for the Node
interfaces_listbox = Listbox(canvas_object, selectmode='single',
height=8, width=40, xscrollcommand=horizontal_scrollbar.set,
yscrollcommand=vertical_scrollbar.set)
interfaces_listbox.grid(row=row_ + 1, column=column_, columnspan=2,
sticky='W', padx=5)
horizontal_scrollbar.config(command=interfaces_listbox.xview)
vertical_scrollbar.config(command=interfaces_listbox.yview)
intf_counter = 1
for intf_name in list_of_interfaces:
interfaces_listbox.insert(intf_counter, intf_name)
intf_counter += 1
interfaces_listbox.bind("<<ListBoxSelect>>", set_active_interface_from_listbox)
interfaces_listbox.bind("<Double-Button-1>", set_active_interface_from_listbox)
return interfaces_listbox
def examine_selected_node(*args):
"""Examine the selected_node"""
#### Frame to choose a node ####
choose_node_frame = LabelFrame(node_tab)
choose_node_frame.grid(row=0, column=0, padx=10, pady=10)
# Label for choosing node
Label(choose_node_frame, text="Choose a node:").grid(row=0, column=0, sticky='W',
pady=10)
# Dropdown menu to choose a node
node_choices_list = [node.name for node in model.node_objects]
node_choices_list.sort()
# Put the node selection button on the node_tab.
# This option menu will call examine_selected_node when the choice is made.
node_dropdown_select = OptionMenu(choose_node_frame, selected_node,
*node_choices_list,
command=set_active_object_from_option_menu)
node_dropdown_select.grid(row=0, column=1, sticky='E')
# Label to confirm selected Node
Label(choose_node_frame, text="Selected node is:").grid(row=1, column=0, sticky='W')
# Display the selected Node
Label(choose_node_frame, text='-----------------------------------').\
grid(row=1, column=1, sticky='E')
Label(choose_node_frame, text=selected_node.get()).grid(row=1, column=1, sticky='E')
# Get selected_nodes Interfaces and display them in a listbox
try:
interface_choices = (interface for interface in
model.get_node_object(selected_node.get()).interfaces(model))
except:
interface_choices = []
pass
#### Frame to display node's interfaces ####
node_intf_frame = LabelFrame(node_tab)
node_intf_frame.grid(row=0, column=1)
interface_info = [str(round((interface.utilization * 100), 1)) + '% ' + interface.__repr__() for
interface in interface_choices]
interface_listbox = display_interfaces("Node's Interfaces", node_intf_frame,
interface_info, 0, 2)
#### Create a frame to node show demand info ####
demands_frame = LabelFrame(node_tab, text="Node Demand Info")
demands_frame.grid(column=0, row=4, columnspan=4, sticky='W', pady=15)
# Display Demands Sourced From Node
source_demand_choices = \
model.get_demand_objects_source_node(selected_node.get())
display_demands("Demands sourced from node", demands_frame,
source_demand_choices, 0, 0)
# Display Demands Destined To Node
dest_demand_choices = model.get_demand_objects_dest_node(selected_node.get())
display_demands("Demands destined to node", demands_frame,
dest_demand_choices, 0, 1)
#### Create a frame to show interface demand info ####
intf_demands_frame = LabelFrame(node_tab, text="Interface Demand Info")
intf_demands_frame.grid(column=5, row=4, columnspan=2, sticky='W',
padx=15, pady=15)
# Display demands on interface
try:
demands_on_interface = get_demands_on_interface(selected_interface.get())
except (ModelException, IndexError):
interface_object = None
demands_on_interface = []
display_demands("Demands Egressing Selected Interface", intf_demands_frame,
demands_on_interface, 0, 1)
#### Create a frame to show selected object info ####
display_selected_objects(node_tab, 0, 4)
# TODO - fail selected interface or node
def examine_selected_demand(*args):
"""Examine selected_interface object"""
# Label for choosing interface
choose_demand_label = Label(demand_tab,
text="Choose a demand:").grid(row=0, column=0, sticky='W', pady=10)
# Dropdown menu to choose a demand
demand_choices_list = [demand for demand in model.demand_objects]
demand_choices_list_sorted = sorted(demand_choices_list,
key=lambda demand: demand.source_node_object.name)
demand_dropdown_select = OptionMenu(demand_tab, selected_demand,
*demand_choices_list_sorted,
command=set_active_object_from_option_menu)
demand_dropdown_select.grid(row=0, column=1, sticky='EW')
# Display the selected objects
display_selected_objects(demand_tab, 0, 3)
#### Display the selected demand's path(s) ####
demand_path_frame = LabelFrame(demand_tab,
text="Demand Path Info (Ordered hops from source to destination); Displays all paths for ECMP demands.")
demand_path_frame.grid(row=3, column=0, columnspan=10, sticky='W',
padx=10, pady=10)
try:
demand_object = get_demand_object_from_repr(selected_demand.get())
try:
dmd_paths = demand_object.path
except AttributeError:
pass
column_num = 0
for path in dmd_paths:
label_info = "Demand hops ordered from source to dest"
interface_info = [str(round((interface.utilization * 100), 1))
+ '% ' + interface.__repr__() for interface in path]
display_interfaces(label_info, demand_path_frame,
interface_info, 0, column_num)
column_num += 3
except (IndexError, UnboundLocalError):
pass
demands_on_interface = get_demands_on_interface(selected_interface.get())
demands_on_int = display_demands("Demands Egressing Selected Interface", demand_tab,
demands_on_interface, 4, 3)
def examine_selected_interface(*args):
"""Allows user to explore interfaces with different characteristics"""
#### Filter to interfaces above a certain utilization ####
utilization_frame = LabelFrame(interface_tab)
utilization_frame.grid(row=0, column=0)
utilization_pct = [x for x in range(0, 100)]
# Label for pct util selection
pct_label = Label(utilization_frame, text="Display interfaces with \
utilization % greater than:")
pct_label.grid(row=0, column=0, columnspan=2, sticky='W')
pct_label.config(width=50)
# Dropdown menu for pct util
pct_dropdown_select = OptionMenu(utilization_frame, min_pct,
*utilization_pct, command=set_active_object_from_option_menu)
pct_dropdown_select.grid(row=0, column=4, sticky='W')
msg = "Interfaces above " + str(min_pct.get()) + "% utilization"
interface_list = [str(round((interface.utilization * 100), 1)) + '% '
+ interface.__repr__() for interface in model.interface_objects if
((interface.utilization * 100) >= min_pct.get())]
interface_list.sort(key=lambda x: float(x.split('%')[0]))
int_util = display_interfaces(msg, utilization_frame, interface_list, 2, 1)
int_util.grid(sticky='W')
selected_objects_int_tab = LabelFrame(interface_tab)
selected_objects_int_tab.grid(row=0, column=6, padx=10, sticky='W')
display_selected_objects(selected_objects_int_tab, 0, 8)
demands_on_interface = get_demands_on_interface(selected_interface.get())
intf_demands = display_demands("Demands Egressing Selected Interface", interface_tab,
demands_on_interface, 6, 0)
def examine_paths(*args):
"""Allows user to examine shortest paths and all paths between the
selected source and destination nodes in the Model"""
#### Select source and dest nodes ####
node_choices = [node.name for node in model.node_objects]
node_choices.sort()
src_node_select_frame = node_dropdown_select("Select a source node",
node_choices, source_node, 0, 0)
src_node_select_frame.grid(sticky='W')
dest_node_select = node_dropdown_select("Select a dest node",
node_choices, dest_node, 1, 0)
dest_node_select.grid(sticky='W')
#### Display shortest path(s) ####
# Find shortest paths
try:
source_node_object = model.get_node_object(source_node.get())
dest_node_object = model.get_node_object(dest_node.get())
shortest_path = model.get_shortest_path(source_node.get(),
dest_node.get())
paths = shortest_path['path']
cost = shortest_path['cost']
# Create a frame to hold the shortest path(s)
shortest_path_frame = LabelFrame(path_tab, text="Shortest Paths")
shortest_path_frame.grid(row=2, column=0, sticky='W', padx=10)
column_counter = 0
path_counter = 0
for path in paths:
list_of_interfaces = path
label = "Shortest Path %s, cost = %s" % (str(path_counter),
str(cost))
display_interfaces(label, shortest_path_frame, list_of_interfaces,
1, column_counter)
column_counter += 2
path_counter += 1
except ModelException:
pass
#### Display all paths ####
# Note - python, wtf?! Getting the horizontal scrollbar to work with
# multiple listboxes was WAY more difficult than it should have been
try:
source_node_object = model.get_node_object(source_node.get())
dest_node_object = model.get_node_object(dest_node.get())
all_paths = model.get_feasible_paths(source_node.get(),
dest_node.get())
# Create label frame to hold the feasible path(s) # frame_canvas
feasible_path_frame = LabelFrame(path_tab, text="All Paths")
feasible_path_frame.grid(row=3, column=0, padx=10, pady=10)
feasible_path_frame.grid_rowconfigure(0, weight=1)
feasible_path_frame.grid_columnconfigure(0, weight=1)
feasible_path_frame.grid_propagate(False)
# canvas
feasible_path_canvas = Canvas(feasible_path_frame)
feasible_path_canvas.grid(row=0, column=0, sticky='news')
# Horizontal Scrollbar
horizontal_scrollbar = Scrollbar(feasible_path_frame, orient=HORIZONTAL,
command=feasible_path_canvas.xview)
horizontal_scrollbar.grid(row=4, column=0, sticky='ew')
feasible_path_canvas.configure(xscrollcommand=horizontal_scrollbar.set)
# Create a frame to house the path(s)
path_frame = Frame(feasible_path_canvas) # frame_buttons
feasible_path_canvas.create_window((0, 0), window=path_frame,
anchor='nw')
column_counter = 0
path_counter = 0
for path in all_paths:
list_of_interfaces = path
label = "Feasible Path %s" % (str(path_counter))
display_interfaces(label, path_frame, list_of_interfaces,
1, column_counter)
column_counter += 2
path_counter += 1
# These next 3 things need to be in this order or the horizontal
# scrollbar for the multiple listboxes doesn't work; holy cow, python,
# it shouldn't be this difficult
path_frame.update_idletasks()
feasible_path_frame.config(width=1200, height=300)
feasible_path_canvas.config(scrollregion=feasible_path_canvas.bbox("all"))
except ModelException:
pass
def node_dropdown_select(label, node_choices, target_variable, row_, column_):
""""Creates a labelframe with a node select option menu"""
#### Frame to choose a node ####
choose_node_frame = LabelFrame(path_tab)
choose_node_frame.grid(row=row_, column=column_, padx=10, pady=10)
# Label for choosing node
Label(choose_node_frame, text=label).grid(row=0, column=0, sticky='W',
pady=10)
# Dropdown menu to choose a node
node_choices_list = node_choices
# Put the node selection button on the node_tab.
# This option menu will call examine_selected_node when the choice is made.
node_dropdown_select = OptionMenu(choose_node_frame, target_variable,
*node_choices,
command=set_active_object_from_option_menu)
node_dropdown_select.grid(row=0, column=1, sticky='E')
# Label to confirm selected Node
Label(choose_node_frame, text="Selected node is:").grid(row=1, column=0, sticky='W')
# Display the selected Node
Label(choose_node_frame, text='-----------------------------------').\
grid(row=1, column=1, sticky='E')
Label(choose_node_frame, text=target_variable.get()).grid(row=1, column=1, sticky='E')
return choose_node_frame
# Establish the canvas
ui_window = Tk()
ui_window.title('Network modeler UI')
ui_window.geometry('1600x750')
ui_window.resizable(1, 1)
# Create a tabbed notebook in the canvas ui_window
nb = ttk.Notebook(ui_window) # Creates ttk notebook in ui window
# Establish names for selected demand, node, and interface in the notebook
selected_demand = StringVar(nb)
selected_node = StringVar(nb)
selected_interface = StringVar(nb)
selected_model_file = StringVar(nb)
source_node = StringVar(nb)
dest_node = StringVar(nb)
network_graph_file = StringVar(nb)
# selected_model_file.set(None)
model = None
min_pct = IntVar(nb) # Min percent utilization to search over interfaces for
# Notebook grid spans 70 columns and 69 rows and spreads out the notebook
# in all directions
nb.grid(row=1, column=0, columnspan=70, rowspan=69, sticky='NESW')
rows = 0
while rows < 70:
ui_window.rowconfigure(rows, weight=1)
ui_window.columnconfigure(rows, weight=1)
rows += 1
#### File Open Tab ####
# Open a model file
open_file_tab = ttk.Frame(nb)
nb.add(open_file_tab, text="Open Model File")
# Establish a frame label
label_frame = ttk.LabelFrame(open_file_tab, text="Select a Network Model File")
label_frame.grid(column=0, row=0, padx=8, pady=8, sticky='W')
# Make a button to load a file
load_file_button = ttk.Button(open_file_tab)
load_file_button["text"] = "Push button to load network model file"
load_file_button.grid(row=11, column=0, sticky='W')
load_file_button["command"] = open_file
#### Node Tab ####
# Create a new tab and add it to the notebook
node_tab = ttk.Frame(nb)
nb.add(node_tab, text="Node Explorer")
#### Demand Tab ####
# Create a new tab and add it to the notebook
demand_tab = ttk.Frame(nb)
nb.add(demand_tab, text="Demand Explorer")
# TODO - Interface Tab with list of top utilized interfaces
# and be able to set utilization % and see all ints that exceed it
#### Interface Tab ####
interface_tab = ttk.Frame(nb)
nb.add(interface_tab, text="Interface Explorer")
#### Create Paths Tab ####
path_tab = ttk.Frame(nb)
nb.add(path_tab, text="Path Explorer")
ui_window.mainloop()
| [
"elisa@jasinska.de"
] | elisa@jasinska.de |
2b8904d38acfeffb87691cb317edd7a9494fbc11 | 21f05b45dbb43667007f3063d1a33082e122bec6 | /src/NIMSU_Modules/DataType_Results.py | 1073a7061d10b06d877ccfdcc43e4b35d6fd655b | [] | no_license | DanAyres/NIMSU | 6f328f4b98a5eb34277be347fa1a2bb331bd87f0 | 6fe378c73d25aa58951de75d50841864268d389b | refs/heads/master | 2020-05-02T11:18:06.087070 | 2015-05-27T09:27:17 | 2015-05-27T09:27:17 | 34,388,261 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,820 | py | '''
Created on 14 Apr 2015
@author: daiel
'''
class Data():
def __init__(self,val,listt=[],hdf5='none'):
self.val=val
self.list=listt
self.hdf5=hdf5
def __add__(self,other):
pass
def __sub__(self,other):
pass
def __mul__(self,other):
pass
def __div__(self,other):
pass
def __radd__(self,other):
pass
class singleData(Data):
def __add__(self,other):
try:
return singleData( self.val + other.val)
except AttributeError:
return singleData( self.val + other)
def __radd__(self,other):
return singleData( self.val + other)
def __sub__(self,other):
try:
return singleData( self.val - other.val)
except AttributeError:
return singleData( self.val - other)
def __mul__(self,other):
try:
return singleData( self.val * other.val)
except AttributeError:
return singleData( self.val * other)
def __rmul__(self,other):
return singleData( self.val * other)
def __div__(self,other):
try:
return singleData( self.val / other.val)
except AttributeError:
return singleData( self.val / other)
def __pow__(self,other):
return singleData(self.val**other)
class listData(Data):
def __add__(self,other):
try:
return listData( self.val + other.val, listt=(self.list + other.list) )
except AttributeError:
return listData( self.val + other, listt=(self.list + other) )
def __radd__(self,other):
return listData( self.val + other, listt=(self.list + other) )
def __sub__(self,other):
try:
return listData( self.val - other.val, listt=(self.list - other.list) )
except AttributeError:
return listData( self.val - other, listt=(self.list - other) )
def __mul__(self,other):
try:
return listData( self.val * other.val, listt=(self.list * other.list) )
except AttributeError:
return listData( self.val * other, listt=(self.list * other) )
def __rmul__(self,other):
return listData( self.val * other, listt=(self.list * other) )
def __div__(self,other):
try:
return listData( self.val / other.val, listt=(self.list / other.list) )
except AttributeError:
return listData( self.val / other, listt=(self.list / other) )
def __pow__(self,other):
return singleData(self.val**other,listt=(self.list**other))
class hdf5Data(Data):
def __add__(self):
pass | [
"daiel@daiel-XPS-L421X"
] | daiel@daiel-XPS-L421X |
6612f576f17ed5f3dd743c78d2b75c72608b9c56 | 5722f0056c3066fcfe71eabb66d1830c714626c3 | /Week 3/ex29.py | cdf29f241855439e36a5a90a9ceedc58a5a2c418 | [] | no_license | Leorodr501/idh3034leo | e14928d7126a9a946c61d2083b3bb43de833afbe | 38596ca48bf945c5a8891fb9aa258d6b40edd9ca | refs/heads/master | 2020-04-02T05:28:16.132218 | 2018-12-09T01:25:23 | 2018-12-09T01:25:23 | 154,079,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 489 | py | people = 20
cats = 30
dogs = 15
if people < cats:
print("Too many cats! The world is doomed!")
if people > cats:
print("Not many cats! The world is saved!")
if people < dogs:
print("The world is drooled on!")
if people > dogs:
print("The world is dry!")
dogs += 5
if people >= dogs:
print("People are greater than or equal to dogs.")
if people <= dogs:
print("People are less than or equal to dogs.")
if people == dogs:
print("People are dogs.")
| [
"root@instance-2.us-east4-c.c.mad-libs-221518.internal"
] | root@instance-2.us-east4-c.c.mad-libs-221518.internal |
96da18240353d57e20908d2a0b7b3f23721bc1cd | 89148623fc5a85684da41c8a8d7c04543f21e93e | /designer/formWindow.py | 0f7729ba9b5bdeb96c35f445ab9262093931bf11 | [] | no_license | lllllllai27/PyQt5_GUI | e2d9151fbac21b066e31d1f509740123411ec13c | 0f858bbf058f975fb5db925c277fad73ecbef54f | refs/heads/master | 2020-08-02T19:53:59.705009 | 2019-09-29T14:03:19 | 2019-09-29T14:03:19 | 211,487,236 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,600 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'formWindow.ui'
#
# Created by: PyQt5 UI code generator 5.13.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.widget = QtWidgets.QWidget(self.centralwidget)
self.widget.setGeometry(QtCore.QRect(250, 90, 227, 181))
self.widget.setObjectName("widget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.widget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.formLayout = QtWidgets.QFormLayout()
self.formLayout.setObjectName("formLayout")
self.label = QtWidgets.QLabel(self.widget)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label)
self.lineEdit = QtWidgets.QLineEdit(self.widget)
self.lineEdit.setObjectName("lineEdit")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.lineEdit)
self.label_2 = QtWidgets.QLabel(self.widget)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.lineEdit_2 = QtWidgets.QLineEdit(self.widget)
self.lineEdit_2.setObjectName("lineEdit_2")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.lineEdit_2)
self.label_3 = QtWidgets.QLabel(self.widget)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.lineEdit_3 = QtWidgets.QLineEdit(self.widget)
self.lineEdit_3.setObjectName("lineEdit_3")
self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.lineEdit_3)
self.label_4 = QtWidgets.QLabel(self.widget)
self.label_4.setObjectName("label_4")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_4)
self.lineEdit_4 = QtWidgets.QLineEdit(self.widget)
self.lineEdit_4.setObjectName("lineEdit_4")
self.formLayout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.lineEdit_4)
self.label_5 = QtWidgets.QLabel(self.widget)
self.label_5.setObjectName("label_5")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.label_5)
self.lineEdit_5 = QtWidgets.QLineEdit(self.widget)
self.lineEdit_5.setObjectName("lineEdit_5")
self.formLayout.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.lineEdit_5)
self.verticalLayout.addLayout(self.formLayout)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.pushButton = QtWidgets.QPushButton(self.widget)
self.pushButton.setObjectName("pushButton")
self.horizontalLayout.addWidget(self.pushButton)
self.pushButton_2 = QtWidgets.QPushButton(self.widget)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout.addWidget(self.pushButton_2)
self.verticalLayout.addLayout(self.horizontalLayout)
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 26))
self.menubar.setObjectName("menubar")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label.setText(_translate("MainWindow", "序号:"))
self.label_2.setText(_translate("MainWindow", "姓名:"))
self.label_3.setText(_translate("MainWindow", "年龄:"))
self.label_4.setText(_translate("MainWindow", "职位:"))
self.label_5.setText(_translate("MainWindow", "薪水:"))
self.pushButton.setText(_translate("MainWindow", "确定"))
self.pushButton_2.setText(_translate("MainWindow", "取消"))
| [
"31854329+lllllllai27@users.noreply.github.com"
] | 31854329+lllllllai27@users.noreply.github.com |
0099c7138f3d1228d6bced756f44e3f4ed25ed66 | 975e63eb3d9fd2617699a8dd447ed281a5225f27 | /simulation_utils.py | 0f2b84cac2fc73955902888e3297994687a5d916 | [
"MIT"
] | permissive | meirelon/baseball-season-simulation | 06813be8021105e388b28412f7a3313a3568500e | 835283029844023ee528b4a52b771f10a4b622b5 | refs/heads/master | 2022-07-22T03:13:22.404375 | 2022-07-19T05:15:47 | 2022-07-19T05:15:47 | 175,287,540 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | SCHEDULE_COLUMNS = [
"date",
"number_of_games",
"day_of_week",
"visiting_team",
"away_league",
"away_game_number",
"home_team",
"home_league",
"home_game_number",
"game_time",
]
# DISTRIBUTIONS = ["beta", "normal", "lognormal", "gamma", "weibull"]
DISTRIBUTIONS = ["normal", "lognormal"]
MLB_DIVISONS = {
"al_east": ["NYA", "BOS", "BAL", "TOR", "TBA"],
"al_central": ["MIN", "CLE", "KCA", "CHA", "DET"],
"al_west": ["ANA", "HOU", "OAK", "SEA", "TEX"],
"nl_east": ["NYN", "PHI", "ATL", "MIA", "WAS"],
"nl_central": ["SLN", "CHN", "PIT", "MIL", "CIN"],
"nl_west": ["ARI", "LAD", "COL", "SFN", "SDN"],
}
| [
"nestelm@gmail.com"
] | nestelm@gmail.com |
1591371310314083e2b8c2848fe393223b425cc9 | b50d4539a08d18839f7260dd5283eced8a40f932 | /project/urls.py | 29be03a36c2098adf0b01931dddd988f236ccb3f | [] | no_license | walid-brahim/django-class | 6b8739b1c370489182e2662d8f06e5c750ab65e5 | 80ad2264c09adb3fec287806be7b79a109abe9e2 | refs/heads/master | 2022-11-17T05:51:00.875416 | 2020-07-15T11:43:41 | 2020-07-15T11:43:41 | 279,705,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 801 | py | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path , include
urlpatterns = [
path('admin/', admin.site.urls),
path('blog/', include('post.urls')),
]
| [
"wbrahim1899@gmail.com"
] | wbrahim1899@gmail.com |
de48c66359d85fb9300d2d9bf9851a4d1a883f0d | 034adbabe1f0243452e19a8313b23cc6950b4ed1 | /check_version.py | faff1a4f08a8bd052d0bd50e437c484f88a96ca1 | [] | no_license | MrLiuYS/JSONFormat4Flutter | e2204b136b7165400dffd8605277f87d80194d7a | aeb265abba99ddd74f65c354f436b2d0ab83f1be | refs/heads/master | 2020-09-26T15:03:51.973447 | 2019-12-11T02:19:09 | 2019-12-11T02:19:09 | 226,279,167 | 0 | 0 | null | 2019-12-06T08:17:14 | 2019-12-06T08:17:13 | null | UTF-8 | Python | false | false | 2,812 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Filename : check_version.py
# @Date : 18-8-20 上午1:52
# @Author : DebuggerX
import configparser
import os
import ssl
import sys
from urllib import request
from json import loads
from PyQt5 import QtGui, QtCore
from PyQt5.QtCore import QThread, pyqtSignal
from PyQt5.QtWidgets import QMessageBox
from tools import msg_box_ui
code = 0.7
ignore_code = 0.0
check_last_version_thread = None
def get_exe_path():
if getattr(sys, 'frozen', False):
return os.path.dirname(sys.executable)
else:
return os.path.dirname(__file__)
def _check_ignore_version():
config = configparser.ConfigParser()
global ignore_code
# noinspection PyBroadException
try:
config.read(os.path.join(get_exe_path(), '.ignore.cfg'))
ignore_code = float(config.get('version', 'code'))
except Exception:
pass
class CheckLastVersion(QThread):
trigger = pyqtSignal(dict)
def run(self):
res_json = None
# noinspection PyBroadException
try:
res = request.urlopen('https://raw.githubusercontent.com/debuggerx01/JSONFormat4Flutter/master/version',
context=ssl._create_unverified_context())
res_json = loads(res.read().decode())
except Exception:
pass
if res_json is not None:
global code
if res_json['code'] > code and res_json['code'] > ignore_code:
self.trigger.emit(res_json)
def check_last_version_handler(json_obj):
msg_box = QMessageBox()
msg_box.addButton('确定', QMessageBox.AcceptRole)
msg_box.addButton('忽略', QMessageBox.NoRole)
msg_box.addButton('关闭', QMessageBox.RejectRole)
msg_box.setParent(msg_box_ui)
msg_box.setWindowTitle("有新版本更新!")
msg_box.setText("新版本(v%s)更新内容:\n%s\n\n点击[确定]转跳到下载页,点击[忽略]忽略该版本提醒,点击[关闭]退出本提示框" % (json_obj['code'], json_obj['desc']))
res = msg_box.exec()
if res == QMessageBox.RejectRole:
config = configparser.ConfigParser()
config.add_section('version')
config.set('version', 'code', str(json_obj['code']))
with open(os.path.join(get_exe_path(), '.ignore.cfg'), 'w') as configfile:
config.write(configfile)
elif res == QMessageBox.AcceptRole:
QtGui.QDesktopServices.openUrl(QtCore.QUrl('https://github.com/debuggerx01/JSONFormat4Flutter/releases'))
def check_version():
_check_ignore_version()
global check_last_version_thread
check_last_version_thread = CheckLastVersion()
check_last_version_thread.trigger.connect(check_last_version_handler)
check_last_version_thread.start()
return code
| [
"dx8917312@163.com"
] | dx8917312@163.com |
269ba1cfc40017b082ebff0bdbc3320c353476f2 | febd53417e07f52dc4a717cf241967be6e977913 | /DataRendering/structures.py | de84e5e948b5a1f2fe43f99a24c790b2bebcbaad | [] | no_license | DynamicsAndNeuralSystems/DevelopingMouse | 1d2d9d1e14fcee22f1032876e37fdd09334fd64f | 551d68ade7e522a62534293d004e24dc05ccb804 | refs/heads/master | 2023-03-21T09:25:22.200676 | 2021-03-19T02:24:54 | 2021-03-19T02:24:54 | 93,137,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,026 | py | # import sys
# print('\n'.join(sys.path))
from allensdk.api.queries.rma_api import RmaApi
import pandas as pd
import os
api = RmaApi()
MOUSE_GRAPH_ID = 17
def getStructureInfo(structure_level, other_criteria):
STRUCTURE_LEVEL=structure_level
OTHER_CRITERIA=other_criteria
structures = pd.DataFrame(
api.model_query('Structure',
criteria=('[graph_id$eq%d]' % MOUSE_GRAPH_ID)+\
('[st_level$eq%d]' % STRUCTURE_LEVEL)+\
(str(OTHER_CRITERIA)),
num_rows='all'))
return structures
def getStructureInfo_AdultMouse():
structures = pd.DataFrame(
api.model_query('Structure',
criteria='[graph_id$eq1]',
num_rows='all'))
return structures
def getCentreCoordinates_DevMouse(structure_level):
STRUCTURE_LEVEL=structure_level
structure_centers = pd.DataFrame(
api.model_query('StructureCenter',
criteria='structure'+\
('[st_level$eq%d]' % STRUCTURE_LEVEL)+\
('[graph_id$eq%d]' % MOUSE_GRAPH_ID),
num_rows='all'))
return structure_centers
def getCentreCoordinates_AdultMouse():
structure_centers_adult = pd.DataFrame(
api.model_query('StructureCenter',
criteria='structure[graph_id$eq1]',
num_rows='all'))
return structure_centers_adult
def getAcronymPath(structure_level, other_criteria):
STRUCTURE_LEVEL=structure_level
OTHER_CRITERIA=other_criteria
OntologyNode = pd.DataFrame(
api.model_query('OntologyNode',
criteria='structure'+\
('[st_level$eq%d]' % STRUCTURE_LEVEL)+\
('[graph_id$eq%d]' % MOUSE_GRAPH_ID)+\
(str(OTHER_CRITERIA)),
num_rows='all'))
return OntologyNode
def main():
print('hi')
#os.chdir(r'D:\Data\DevelopingAllenMouseAPI-master\Git') # user input the Git directory as on their computer here
# download level 5 structures of developing mouse
other_criteria_level5 = '[parent_structure_id$ne126651574]\
[parent_structure_id$ne126651586]\
[parent_structure_id$ne126651606]\
[parent_structure_id$ne126651618]\
[parent_structure_id$ne126651642]\
[parent_structure_id$ne126651654]\
[parent_structure_id$ne126651670]\
[parent_structure_id$ne126651682]\
[parent_structure_id$ne126651698]\
[parent_structure_id$ne126651710]\
[parent_structure_id$ne126651730]\
[parent_structure_id$ne126651742]\
[parent_structure_id$ne126651758]\
[parent_structure_id$ne126651770]\
[parent_structure_id$ne126651790]\
[parent_structure_id$ne126651810]\
[parent_structure_id$ne126651830]\
[parent_structure_id$ne126651854]\
[parent_structure_id$ne126651874]\
[parent_structure_id$ne126651898]\
[parent_structure_id$ne126651918]\
[parent_structure_id$ne126651942]\
[parent_structure_id$ne126651962]\
[parent_structure_id$ne126651982]\
[parent_structure_id$ne126652002]\
[parent_structure_id$ne126652022]\
[parent_structure_id$ne17651]\
[parent_structure_id$ne126652042]'
structures=getStructureInfo(structure_level=5, other_criteria=other_criteria_level5)
STRUCTURE_LEVEL = 5
# specify the directories
abs_dir = os.path.dirname(__file__)
rel_dir = os.path.join(abs_dir, '..','Data','API','Structures')
data = os.path.join(rel_dir, 'structureData_level%d.csv' % STRUCTURE_LEVEL)
structures.to_csv(data)
# download level 3 structures pf developing mouse
other_criteria_level3 = '[parent_structure_id$ne126651566]\
[parent_structure_id$ne126651634]\
[parent_structure_id$ne126651722]\
[parent_structure_id$ne126651786]\
[parent_structure_id$ne126651850]\
[parent_structure_id$ne126651894]\
[parent_structure_id$ne126651938]'
structures=getStructureInfo(structure_level=3, other_criteria=other_criteria_level3)
STRUCTURE_LEVEL = 3
data = os.path.join(rel_dir, 'structureData_level%d.csv' % STRUCTURE_LEVEL)
structures.to_csv(data)
# download adult mouse structure info
structures = getStructureInfo_AdultMouse()
data = os.path.join(rel_dir, 'structureData_adult.csv')
structures.to_csv(data)
# Download coordinates of centre of developing mouse structures
structure_centers=getCentreCoordinates_DevMouse(structure_level=5)
STRUCTURE_LEVEL = 5
data = os.path.join(rel_dir, 'structureCenters_level%d.csv' % STRUCTURE_LEVEL)
structure_centers.to_csv(data)
# Download coordinates of centre of adult mouse structures
structure_centers_adult=getCentreCoordinates_AdultMouse()
data = os.path.join(rel_dir, 'structureCenters_adult.csv')
structure_centers_adult.to_csv(data)
# download acronym path for developing mouse
OntologyNode=getAcronymPath(structure_level=5, other_criteria=other_criteria_level5)
STRUCTURE_LEVEL = 5
data = os.path.join(rel_dir, 'AcronymPath_level%d.csv' % STRUCTURE_LEVEL)
OntologyNode.to_csv(data)
if __name__ == '__main__':
main()
| [
"lauhoiyangladys@gmail.com"
] | lauhoiyangladys@gmail.com |
0f606882ce06c84c1d476970b5e536632bad59e4 | b5aea44506bd21fcd7b259abb372d93cca5837a4 | /income_classification.py | 167d494b05429e52f77aec7108da545266c93fff | [] | no_license | programmingknowledege/MachineLearning | b5135c996c6c103852023a6a33f01d5d930baa17 | 37c523a3dd107fa8a22ef268a9ee9f64c90afaca | refs/heads/master | 2022-11-10T08:48:15.519752 | 2020-06-24T17:23:05 | 2020-06-24T17:23:05 | 274,725,891 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | import pandas as pd
from sklearn.preprocessing import LabelEncoder
data=pd.read_csv("C:\\Users\\kunal\\Downloads\\income-classification\\income_evaluation.csv")
print(data.columns)
le_income=LabelEncoder()
data[" income"]=le_income.fit_transform(data[" income"])
print(data[" income"].unique()) | [
"bafnakunal1998@gmail.com"
] | bafnakunal1998@gmail.com |
33889b44e80246dbdece176c1b56e7f53f3baae2 | f64a580208cfd7fa332dc1df9cb9e776f9581216 | /jobportal/views.py | df0025f809db53e36768749963ad5a71e1de3618 | [] | no_license | pravsp/job-portal | 66f0bb3051643da6af73a5ea3328cd61ad22254a | 84c2a18a1ce54374b107de18d73f2184565f1658 | refs/heads/master | 2022-12-23T09:09:38.763835 | 2020-10-01T16:45:32 | 2020-10-01T16:45:32 | 299,570,940 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137 | py | from jobportal import app
from flask import jsonify
@app.route('/')
def home():
return jsonify({'message': 'Welcome to rusteez!!!'}) | [
"praveen.kumar.sp@gmail.com"
] | praveen.kumar.sp@gmail.com |
d510a984109e30d272424766c0f4ceedc20d77e2 | ec5c35ac5163c4e81262a81a6a6c46667c01733d | /server/api.py | dfdfa338713c8c53b8fe3fb180871a407ed32b13 | [] | no_license | kotawiw/bytedance-exercise-2 | 27b32d81aa7e8040c1c8448acbe9c4ff20ff5b26 | 8db190487a6490ec852d8418d93ba62251a5437f | refs/heads/master | 2022-12-24T00:04:53.047395 | 2020-09-23T11:48:13 | 2020-09-23T11:48:13 | 297,948,510 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,826 | py | from flask import Blueprint, request, abort, jsonify
from flask import g
from server.auth import login_required
from server.models.users import User
from server.models.events import Event
from server.models.events import EventRegistration
bp = Blueprint("api", __name__, url_prefix="/api")
@bp.route("/events", methods=("GET",))
def query_events():
offset = request.args.get("offset", default=0, type=int)
limit = request.args.get("limit", default=10, type=int)
total_count, events = Event.query_events(offset=offset, limit=limit)
return jsonify(
totalCount=total_count,
values=[event_output(e) for e in events])
@bp.route("/events", methods=("POST",))
@login_required
def create_event():
user = g.user
event = Event.create(
user, request.json
)
return event_output(event)
@bp.route("/event/<string:event_id>", methods=("GET",))
def get_event(event_id):
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
return event_output(event)
@bp.route("/event/<string:event_id>/registrations", methods=("GET",))
def get_registrations(event_id):
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
registrations = EventRegistration.by_event(event)
return jsonify([registration_output(r) for r in registrations])
@bp.route("/event/<string:event_id>/registrations", methods=("PUT",))
def register_event(event_id):
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
user = g.user
if not user:
return abort(401, 'Please login to register for an event')
register = EventRegistration.register(event, user)
return registration_output(register)
@bp.route("/event/<string:event_id>/registrations", methods=("DELETE",))
def unregister_event(event_id):
user = g.user
if not user:
return abort(401, 'Please login to unregister for an event')
event = Event.by_identifier(event_id)
if not event:
return abort(404, 'Event not found')
register = EventRegistration.by_event_user(event, user)
if not register:
return abort(404, 'Event registration not found')
EventRegistration.unregister(register)
return registration_output(register)
def event_output(event: Event):
return dict(
id=event.identifier,
name=event.name,
location=event.location,
description=event.description,
startTimestamp=event.start_timestamp,
endTimestamp=event.end_timestamp)
def registration_output(registration: EventRegistration):
# Todo: De-normalize registration info to include user email
user = User.query.get(registration.user_id)
return dict(
email=user.email
)
| [
"you@example.com"
] | you@example.com |
f0c5be2e54cfd6b4c05e9c5ed5ce3fd666c97b30 | ce2feffb3b8c3433eefb596aacbb9b73ff8bb3bb | /Desktop/TAGTOSHOP/Chatbot/qrcode-bot/fb_chatbot/models.py | e932d9c0e6e0c6d56aa0984ccaa6a386132d3f42 | [] | no_license | RamonRR93/MicrocontrollerPrisonProject | a6fe0fa8de6b05a490514bec3857918639afe138 | 3f7e65e29440744fa37e178737e18edc157c7f70 | refs/heads/master | 2020-12-24T09:56:22.248517 | 2017-09-29T14:50:38 | 2017-09-29T14:50:38 | 73,256,369 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | from __future__ import unicode_literals
from django.db import models
# Create your models here.
class FacebookUser(models.Model):
fb_id = models.IntegerField(primary_key=True)
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
profile_pic = models.CharField(max_length=255)
locale = models.CharField(max_length=255)
gender = models.CharField(max_length=255)
timezone = models.CharField(max_length=255)
has_fb_data = models.BooleanField(default=False)
def __str__(self): # __unicode__ on Python 2
return self.first_name
class QRCode(models.Model):
style = models.CharField(max_length=255, default="Text")
url = models.CharField(max_length=255)
data = models.CharField(max_length=255)
def __str__(self): # __unicode__ on Python 2
return self.name
class Scan(models.Model):
fbuser = models.ForeignKey(FacebookUser, db_index=False)
qrcode = models.ForeignKey(QRCode, db_index=False)
date = models.DateTimeField(auto_now=True)
| [
"rr3088@columbia.edu"
] | rr3088@columbia.edu |
6e8f312ce8d26da7d371c9bd295ee0598f010704 | 5cc1296f10af0d65691fd01a23221d6d85f4deff | /cotizacion/migrations/0009_auto_20150805_1400.py | f213b7ccb1a13cf363c1195baf3b10f04e54fea3 | [] | no_license | yusnelvy/mtvmcotizacion | e52b58fe8c50d3921d36490084de328c52e4e9ea | 07d2bd5f36350b149c16a0aa514bb610b0cd3e18 | refs/heads/master | 2016-09-05T23:31:15.800940 | 2015-11-07T13:12:30 | 2015-11-07T13:12:30 | 35,440,629 | 0 | 0 | null | 2015-12-18T16:16:23 | 2015-05-11T18:01:47 | JavaScript | UTF-8 | Python | false | false | 1,033 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cotizacion', '0008_auto_20150727_1207'),
]
operations = [
migrations.AlterField(
model_name='tiempo_carga',
name='peso_max',
field=models.DecimalField(blank=True, default=0.0, max_digits=8, decimal_places=3),
),
migrations.AlterField(
model_name='tiempo_carga',
name='peso_min',
field=models.DecimalField(blank=True, default=0.0, max_digits=8, decimal_places=3),
),
migrations.AlterField(
model_name='vehiculo',
name='capacidad_peso',
field=models.DecimalField(max_digits=8, decimal_places=3),
),
migrations.AlterField(
model_name='vehiculo',
name='capacidad_volumen',
field=models.DecimalField(max_digits=8, decimal_places=3),
),
]
| [
"yusnelvy@gmail.com"
] | yusnelvy@gmail.com |
a03b1ddee9bd276eb940bb87b27069497c127011 | b5c33e768a9845ae242026cb2a85ec4f073c4aa4 | /2_create-topic-model/sparkutils.py | ea2bc40e68ad0e69fe65c29a9bff4ba918b2b465 | [] | no_license | htrc/ACS-samayoa | 560e292db3d90fa1f4c1228d3239a7933863e4fb | 3a4652eef53e86e5ac6e566a6fdf62da88525854 | refs/heads/master | 2023-05-14T20:07:16.282745 | 2021-06-03T20:01:43 | 2021-06-03T20:01:43 | 362,940,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | import sys
from typing import Union
from pyspark import SparkConf, SparkContext
from pyspark.sql import SparkSession
def config_spark(app_name: str, num_cores: int = None) -> SparkSession:
spark_conf = SparkConf().setAppName(app_name)
if num_cores is not None:
spark_conf.setMaster(f"local[{num_cores}]")
spark = SparkSession.builder \
.config(conf=spark_conf) \
.getOrCreate()
return spark
def stop_spark_and_exit(spark: Union[SparkSession, SparkContext], exit_code: int = 0):
try:
spark.stop()
finally:
sys.exit(exit_code)
| [
"capitanu@illinois.edu"
] | capitanu@illinois.edu |
a69567cb312181e925b480f018bcfda89912d788 | 28642c0afd5a78640b713c4562d950ea40e0147a | /scripts/common.py | 1e929bbe059b4d552b5c6f55549fcaef6dfad70b | [
"Apache-2.0"
] | permissive | Kevin-Mok/kogito-images | e4764327f36983fd3f545089d83b35549b304121 | a814fe35e4a8c7bd32849bef934c7f6f57faf1b3 | refs/heads/master | 2023-06-12T09:24:35.854019 | 2021-07-05T12:59:09 | 2021-07-05T12:59:09 | 269,133,713 | 0 | 1 | Apache-2.0 | 2020-06-03T16:04:24 | 2020-06-03T16:04:23 | null | UTF-8 | Python | false | false | 13,810 | py | #!/usr/bin/python3
# This script defines some common function that are used by manage-kogito-version.py and push-staging.py script
import os
import re
from ruamel.yaml import YAML
# All kogito-image modules that have the kogito version.
MODULES = {"kogito-data-index-common", "kogito-data-index-mongodb",
"kogito-data-index-infinispan", "kogito-data-index-postgresql",
"kogito-trusty-common", "kogito-trusty-infinispan",
"kogito-trusty-redis", "kogito-explainability",
"kogito-image-dependencies", "kogito-jobs-service-common",
"kogito-jobs-service-ephemeral", "kogito-jobs-service-infinispan",
"kogito-jobs-service-mongodb", "kogito-jobs-service-postgresql",
"kogito-trusty-ui", "kogito-jq",
"kogito-kubernetes-client", "kogito-launch-scripts",
"kogito-logging", "kogito-management-console",
"kogito-task-console", "kogito-persistence",
"kogito-runtime-native", "kogito-runtime-jvm",
"kogito-builder", "kogito-s2i-core",
"kogito-system-user", "kogito-jit-runner",
"kogito-custom-truststore"}
MODULE_FILENAME = "module.yaml"
MODULES_DIR = "modules"
# imagestream file that contains all images, this file aldo needs to be updated.
IMAGE_STREAM_FILENAME = "kogito-imagestream.yaml"
# image.yaml file definition that needs to be updated
IMAGE_FILENAME = "image.yaml"
ARTIFACTS_VERSION_ENV_KEY = "KOGITO_VERSION"
# behave tests that needs to be updated
BEHAVE_BASE_DIR = 'tests/features'
CLONE_REPO_SCRIPT = 'tests/test-apps/clone-repo.sh'
def yaml_loader():
"""
default yaml Loader
:return: yaml object
"""
yaml = YAML()
yaml.preserve_quotes = True
yaml.width = 1024
yaml.indent(mapping=2, sequence=4, offset=2)
return yaml
def update_image_version(target_version):
"""
Update image.yaml version tag.
:param target_version: version used to update the image.yaml file
"""
print("Updating Image main file version from file {0} to version {1}".format(IMAGE_FILENAME, target_version))
try:
with open(IMAGE_FILENAME) as image:
data = yaml_loader().load(image)
if 'version' in data:
data['version'] = target_version
else:
print("Field version not found, returning...")
return
with open(IMAGE_FILENAME, 'w') as image:
yaml_loader().dump(data, image)
except TypeError as err:
print("Unexpected error:", err)
def update_image_stream(target_version):
"""
Update the imagestream file, it will update the tag name, version and image tag.
:param target_version: version used to update the imagestream file;
"""
print("Updating ImageStream images version from file {0} to version {1}".format(IMAGE_STREAM_FILENAME,
target_version))
try:
with open(IMAGE_STREAM_FILENAME) as imagestream:
data = yaml_loader().load(imagestream)
for item_index, item in enumerate(data['items'], start=0):
for tag_index, tag in enumerate(item['spec']['tags'], start=0):
data['items'][item_index]['spec']['tags'][tag_index]['name'] = target_version
data['items'][item_index]['spec']['tags'][tag_index]['annotations']['version'] = target_version
image_dict = str.split(data['items'][item_index]['spec']['tags'][tag_index]['from']['name'], ':')
# image name + new version
updated_image_name = image_dict[0] + ':' + target_version
data['items'][item_index]['spec']['tags'][tag_index]['from']['name'] = updated_image_name
with open(IMAGE_STREAM_FILENAME, 'w') as imagestream:
yaml_loader().dump(data, imagestream)
except TypeError:
raise
def get_all_module_dirs():
"""
Retrieve the module directories
"""
modules = []
# r=>root, d=>directories, f=>files
for r, d, f in os.walk(MODULES_DIR):
for item in f:
if MODULE_FILENAME == item:
modules.append(os.path.dirname(os.path.join(r, item)))
return modules
def get_kogito_module_dirs():
"""
Retrieve the Kogito module directories
"""
modules = []
for moduleName in MODULES:
modules.append(os.path.join(MODULES_DIR, moduleName))
return modules
def get_all_images():
"""
Retrieve the Kogito images' names
"""
images = []
# r=>root, d=>directories, f=>files
for r, d, f in os.walk("."):
for item in f:
if re.compile(r'.*-overrides.yaml').match(item):
images.append(item.replace("-overrides.yaml", ''))
return images
def update_modules_version(target_version):
"""
Update every Kogito module.yaml to the given version.
:param target_version: version used to update all Kogito module.yaml files
"""
for module_dir in get_kogito_module_dirs():
update_module_version(module_dir, target_version)
def update_module_version(module_dir, target_version):
"""
Set Kogito module.yaml to given version.
:param module_dir: directory where cekit modules are hold
:param target_version: version to set into the module
"""
try:
module_file = os.path.join(module_dir, "module.yaml")
with open(module_file) as module:
data = yaml_loader().load(module)
print(
"Updating module {0} version from {1} to {2}".format(data['name'], data['version'], target_version))
data['version'] = target_version
with open(module_file, 'w') as module:
yaml_loader().dump(data, module)
except TypeError:
raise
def retrieve_artifacts_version():
"""
Retrieve the artifacts version from envs in main image.yaml
"""
try:
with open(IMAGE_FILENAME) as imageFile:
data = yaml_loader().load(imageFile)
for index, env in enumerate(data['envs'], start=0):
if env['name'] == ARTIFACTS_VERSION_ENV_KEY:
return data['envs'][index]['value']
except TypeError:
raise
def update_artifacts_version_env_in_image(artifacts_version):
"""
Update `KOGITO_VERSION` env var in image.yaml.
:param artifacts_version: kogito version used to update image.yaml which contains the `KOGITO_VERSION` env var
"""
try:
with open(IMAGE_FILENAME) as imageFile:
data = yaml_loader().load(imageFile)
for index, env in enumerate(data['envs'], start=0):
if env['name'] == ARTIFACTS_VERSION_ENV_KEY:
print("Updating image.yaml env var {0} with value {1}".format(ARTIFACTS_VERSION_ENV_KEY,
artifacts_version))
data['envs'][index]['value'] = artifacts_version
with open(IMAGE_FILENAME, 'w') as imageFile:
yaml_loader().dump(data, imageFile)
except TypeError:
raise
def update_examples_ref_in_behave_tests(examples_ref):
"""
Update examples git reference into behave tests
:param examples_ref: kogito-examples reference
"""
print("Set examples_ref {} in behave tests".format(examples_ref))
# this pattern will look for any occurrences of using master or using x.x.x
pattern = re.compile(r'(using nightly-master)|(using nightly-\s*([\d.]+.x))|(using \s*([\d.]+))')
replacement = 'using {}'.format(examples_ref)
update_in_behave_tests(pattern, replacement)
def update_examples_uri_in_behave_tests(examples_uri):
"""
Update examples uri into behave tests
:param examples_uri: kogito-examples uri
"""
print("Set examples_uri {} in behave tests".format(examples_uri))
# pattern to get the default examples uri
pattern = re.compile(r'(https://github.com/kiegroup/kogito-examples.git)')
replacement = examples_uri
update_in_behave_tests(pattern, replacement)
def update_artifacts_version_in_behave_tests(artifacts_version):
"""
Update artifacts version into behave tests
:param artifacts_version: artifacts version to set
"""
print("Set artifacts_version {} in behave tests".format(artifacts_version))
# pattern to change the KOGITO_VERSION
pattern = re.compile('\|[\s]*KOGITO_VERSION[\s]*\|[\s]*(([\d.]+.x)|([\d.]+)[\s]*|([\d.]+-SNAPSHOT)|([\d.]+.Final))[\s]*\|')
replacement = '| KOGITO_VERSION | {} | '.format(artifacts_version)
update_in_behave_tests(pattern, replacement)
def update_runtime_image_in_behave_tests(runtime_image_name, image_suffix):
"""
Update a runtime image into behave tests
:param runtime_image_name: new full tag name of the runtime image
:param image_suffix: suffix of the runtime image to update
"""
print("Set {0} runtime image to {1} in behave tests".format(image_suffix, runtime_image_name))
# pattern to change the KOGITO_VERSION
pattern = re.compile(r'(runtime-image quay.io/kiegroup/kogito-runtime-{}:latest)'.format(image_suffix))
replacement = 'runtime-image {}'.format(runtime_image_name)
update_in_behave_tests(pattern, replacement)
pattern = re.compile(r'(runtime-image rhpam-7/rhpam-kogito-runtime-{}-rhel8:latest)'.format(image_suffix))
replacement = 'runtime-image {}'.format(runtime_image_name)
update_in_behave_tests(pattern, replacement)
def update_maven_repo_in_behave_tests(repo_url, replaceJbossRepository):
"""
Update maven repository into behave tests
:param repo_url: Maven repository url
:param replaceJbossRepository: Set to true if default Jboss repository needs to be overriden
"""
print("Set maven repo {} in behave tests".format(repo_url))
pattern = re.compile('\|\s*variable[\s]*\|[\s]*value[\s]*\|')
env_var_key = "MAVEN_REPO_URL"
if replaceJbossRepository:
env_var_key = "JBOSS_MAVEN_REPO_URL"
replacement = "| variable | value |\n | {} | {} |\n | MAVEN_DOWNLOAD_OUTPUT | true |".format(env_var_key,
repo_url)
update_in_behave_tests(pattern, replacement)
def ignore_maven_self_signed_certificate_in_behave_tests():
"""
Sets the environment variable to ignore the self-signed certificates in maven
"""
print("Setting MAVEN_IGNORE_SELF_SIGNED_CERTIFICATE env in behave tests")
pattern = re.compile('\|\s*variable[\s]*\|[\s]*value[\s]*\|')
replacement = "| variable | value |\n | MAVEN_IGNORE_SELF_SIGNED_CERTIFICATE | true |"
update_in_behave_tests(pattern, replacement)
def update_in_behave_tests(pattern, replacement):
"""
Update all behave tests files
:param pattern: Pattern to look for into file
:param replacement: What to put instead if pattern found
"""
for f in os.listdir(BEHAVE_BASE_DIR):
if f.endswith('.feature'):
update_in_file(os.path.join(BEHAVE_BASE_DIR, f), pattern, replacement)
def update_examples_ref_in_clone_repo(examples_ref):
"""
Update examples git reference into clone-repo.sh script
:param examples_ref: kogito-examples reference
"""
print("Set examples_ref {} in clone-repo script".format(examples_ref))
pattern = re.compile(r'(git checkout.*)')
replacement = "git checkout master"
if examples_ref != 'master':
replacement = "git checkout -b {0} origin/{1}".format(examples_ref, examples_ref)
update_in_file(CLONE_REPO_SCRIPT, pattern, replacement)
def update_examples_uri_in_clone_repo(examples_uri):
"""
Update examples uri into clone-repo.sh script
:param examples_uri: kogito-examples uri
"""
print("Set examples_uri {} in clone-repo script".format(examples_uri))
pattern = re.compile(r'(git clone.*)')
replacement = "git clone {}".format(examples_uri)
update_in_file(CLONE_REPO_SCRIPT, pattern, replacement)
def update_maven_repo_in_clone_repo(repo_url, replace_jboss_repository):
"""
Update maven repository into clone-repo.sh script
:param repo_url: Maven repository url
:param replace_jboss_repository: Set to true if default Jboss repository needs to be overridden
"""
print("Set maven repo {} in clone-repo script".format(repo_url))
pattern = ""
replacement = ""
if replace_jboss_repository:
pattern = re.compile(r'(export JBOSS_MAVEN_REPO_URL=.*)')
replacement = 'export JBOSS_MAVEN_REPO_URL="{}"'.format(repo_url)
else:
pattern = re.compile(r'(# export MAVEN_REPO_URL=.*)')
replacement = 'export MAVEN_REPO_URL="{}"'.format(repo_url)
update_in_file(CLONE_REPO_SCRIPT, pattern, replacement)
def ignore_maven_self_signed_certificate_in_clone_repo():
"""
Sets the environment variable to ignore the self-signed certificates in maven
"""
print("Setting MAVEN_IGNORE_SELF_SIGNED_CERTIFICATE env in clone repo")
pattern = re.compile(r'(# MAVEN_IGNORE_SELF_SIGNED_CERTIFICATE=.*)')
replacement = "MAVEN_IGNORE_SELF_SIGNED_CERTIFICATE=true"
update_in_file(CLONE_REPO_SCRIPT, pattern, replacement)
def update_in_file(file, pattern, replacement):
"""
Update in given file
:param file: file to update
:param pattern: Pattern to look for into file
:param replacement: What to put instead if pattern found
"""
with open(file) as fe:
updated_value = pattern.sub(replacement, fe.read())
with open(file, 'w') as fe:
fe.write(updated_value)
if __name__ == "__main__":
for m in get_kogito_module_dirs():
print("module {}".format(m))
| [
"noreply@github.com"
] | noreply@github.com |
ff4ddb936b2b3872ee557d4c51e532d34037b10d | 38b5dff792caa9246b9cb0a92c74c713878f1a19 | /Python/0804_UniqueMorseCodeWords/uniqueMorseRepresentations.py | 3cfca6e6817a9ceae5e7bbc481b0cf325ad0971f | [] | no_license | mtmmy/Leetcode | 12b108f81a3b3cee4f2801353e8fc8e9ec5e791e | 75aef2f6c42aeb51261b9450a24099957a084d51 | refs/heads/master | 2020-12-02T06:43:22.419495 | 2019-04-07T18:30:45 | 2019-04-07T18:30:45 | 96,882,649 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | import string
class Solution:
def uniqueMorseRepresentations(self, words):
"""
:type words: List[str]
:rtype: int
"""
morseCode = [".-","-...","-.-.","-..",".","..-.","--.","....","..",".---","-.-",".-..","--","-.","---",".--.","--.-",".-.","...","-","..-","...-",".--","-..-","-.--","--.."]
alphabet = string.ascii_lowercase
morseDict = dict(zip(alphabet, morseCode))
result = set()
for word in words:
codedMorse = ""
for c in word:
codedMorse += morseDict[c]
result.add(codedMorse)
return len(result) | [
"mtmmy@users.noreply.github.com"
] | mtmmy@users.noreply.github.com |
0282347967f78e46d8770816ad02f6e2e4a5cc3d | 1f2494e856352873edba6dd24dcd5736a6c05351 | /pystart/pystart/spiders/Hue.py | 530c13f5d2cf578f79eadaa8e8868f7c8f2b2221 | [] | no_license | namvh145/Nam-Scott | c0f5ae4b2e0e9cabe7b7896d5a2153ac2b5e7cbc | a9c7c84f85bb3cecd622ec07f44e05efdf955c50 | refs/heads/master | 2021-07-04T06:54:24.790942 | 2021-05-11T09:36:02 | 2021-05-11T09:36:02 | 234,663,416 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | import re
import scrapy
class QuangTri(scrapy.Spider):
name = "Hue"
start_urls = [
"https://toplist.vn/top-list/diem-du-lich-noi-tieng-xu-hue-382.htm"
]
def parse(self, response, **kwargs):
address = ".media-body li::text , .media-body p::text , strong::text , .item_dsp_row .media-heading::text"
source = response.css(address).extract()
title = response.css(".item_dsp_row .media-heading::text").extract()
n = len(source)
name = ""
description = ""
for i in range(n):
if source[i] in title:
name = source[i]
title.remove(name)
description = ""
else:
if (i + 1 < n and source[i + 1] in title) or i + 1 == n:
description += source[i]
yield {
"Name": name,
"Description": description
}
else:
description += source[i] | [
"noreply@github.com"
] | noreply@github.com |
c02a678107f5e807bc54b95fb1bc038e46931756 | f338eb32c45d8d5d002a84798a7df7bb0403b3c4 | /DQM/DTMonitorModule/test/DTkFactValidation_1_TEMPL_cfg.py | 28873b4aebd3900356c5f720350f92f2c2e3d464 | [] | permissive | wouf/cmssw | 0a8a8016e6bebc611f1277379e12bef130464afb | 60da16aec83a0fc016cca9e2a5ed0768ba3b161c | refs/heads/CMSSW_7_3_X | 2022-06-30T04:35:45.380754 | 2015-05-08T17:40:17 | 2015-05-08T17:40:17 | 463,028,972 | 0 | 0 | Apache-2.0 | 2022-02-24T06:05:30 | 2022-02-24T06:05:26 | null | UTF-8 | Python | false | false | 3,607 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("PROD")
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('resolutionTest_step1',
'resolutionTest_step2',
'resolutionTest_step3'),
cout = cms.untracked.PSet(
threshold = cms.untracked.string('ERROR'),
default = cms.untracked.PSet(
limit = cms.untracked.int32(0)
),
resolution = cms.untracked.PSet(
limit = cms.untracked.int32(10000000)
),
noLineBreaks = cms.untracked.bool(True)
),
categories = cms.untracked.vstring('resolution'),
destinations = cms.untracked.vstring('cout')
)
process.load("Configuration.StandardSequences.Geometry_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.GlobalTag.globaltag = "GLOBALTAGTEMPLATE"
process.load("CondCore.DBCommon.CondDBSetup_cfi")
process.load("DQMServices.Core.DQM_cfg")
process.load("RecoLocalMuon.Configuration.RecoLocalMuonCosmics_cff")
process.source = cms.Source("PoolSource",
debugFlag = cms.untracked.bool(True),
debugVebosity = cms.untracked.uint32(10),
fileNames = cms.untracked.vstring()
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.calibDB = cms.ESSource("PoolDBESSource",
process.CondDBSetup,
timetype = cms.string('runnumber'),
toGet = cms.VPSet(cms.PSet(
record = cms.string('DTTtrigRcd'),
tag = cms.string('ttrig')
)),
connect = cms.string('sqlite_file:/afs/cern.ch/cms/CAF/CMSALCA/ALCA_MUONCALIB/DTCALIB/RUNPERIODTEMPLATE/ttrig/ttrig_DUMPDBTEMPL_RUNNUMBERTEMPLATE.db'),
authenticationMethod = cms.untracked.uint32(0)
)
process.es_prefer_calibDB = cms.ESPrefer('PoolDBESSource','calibDB')
# if read from RAW
process.load("EventFilter.DTRawToDigi.dtunpacker_cfi")
process.eventInfoProvider = cms.EDFilter("EventCoordinatesSource",
eventInfoFolder = cms.untracked.string('EventInfo/')
)
process.DTkFactValidation = cms.EDAnalyzer("DTCalibValidation",
# Write the histos on file
OutputMEsInRootFile = cms.bool(True),
# Lable to retrieve 2D segments from the event
segment2DLabel = cms.untracked.string('dt2DSegments'),
OutputFileName = cms.string('residuals.root'),
# Lable to retrieve 4D segments from the event
segment4DLabel = cms.untracked.string('dt4DSegments'),
debug = cms.untracked.bool(False),
# Lable to retrieve RecHits from the event
recHits1DLabel = cms.untracked.string('dt1DRecHits')
)
process.FEVT = cms.OutputModule("PoolOutputModule",
outputCommands = cms.untracked.vstring('drop *',
'keep *_MEtoEDMConverter_*_*'),
fileName = cms.untracked.string('DQM.root')
)
process.load("DQMServices.Components.MEtoEDMConverter_cff")
process.dummyProducer = cms.EDProducer("ThingWithMergeProducer")
# if read from RAW
#process.firstStep = cms.Sequence(process.muonDTDigis*process.dt1DRecHits*process.dt2DSegments*process.dt4DSegments*process.DTkFactValidation)
process.firstStep = cms.Sequence(process.dummyProducer + process.muonDTDigis*process.dt1DRecHits*process.dt2DSegments*process.dt4DSegments*process.DTkFactValidation*process.MEtoEDMConverter)
#process.firstStep = cms.Sequence(process.dummyProducer + process.dt1DRecHits*process.dt2DSegments*process.dt4DSegments*process.DTkFactValidation*process.MEtoEDMConverter)
process.p = cms.Path(process.firstStep)
process.outpath = cms.EndPath(process.FEVT)
process.DQM.collectorHost = ''
| [
"giulio.eulisse@gmail.com"
] | giulio.eulisse@gmail.com |
339cf3d15e438d86c084c9aa0f60c0b938831e58 | 3a910ca48f778e57cb0ab9a42f3e57ba2f527e6b | /reduceDimension/plotMethods.py | 1d0c4ca6a23a62cb52ac8e19fcea7b522952998d | [] | no_license | DaveGeneral/tensorflow_manage_nets | d7daec9f6fbe466b5257ad2add5de8733df35ef0 | 1ed8e657a593ffd37eb15b1ec2dfb1c9e7dff4ea | refs/heads/master | 2021-06-19T11:41:14.760786 | 2017-07-18T01:14:38 | 2017-07-18T01:14:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,554 | py | import sys, os
import numpy as np
import matplotlib.pyplot as plt
from itertools import cycle
switch_server = True
testdir = os.path.dirname('__file__')
srcdir = '..'
sys.path.insert(0, os.path.abspath(os.path.join(testdir, srcdir)))
if switch_server is True:
from tools import utils
from nets import net_aencoder as AE
from tools.dataset_csv import Dataset_csv
else:
from tensorflow_manage_nets.tools import utils
from tensorflow_manage_nets.nets import net_aencoder as AE
from tensorflow_manage_nets.tools.dataset_csv import Dataset_csv
def get_data_plot(opc):
data = {}
if opc == 0:
# MNIST
data['mnist'] = {}
data['mnist']['ae'] = {}
data['mnist']['ae']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['ae']['fractal'] = [3.1633,4.5932,6.2643,7.8174,0.1364,0.2681,0.4716,1.0265,1.8659,1.9534,25.5753,25.5753,25.5753]
data['mnist']['dct'] = {}
data['mnist']['dct']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['dct']['fractal'] = [3.5165,4.4513,4.7163,4.981,5.4558,5.2292,3.4751,19.0054,22.5753,25.5753,25.5753,25.5753,25.5753]
data['mnist']['ipla'] = {}
data['mnist']['ipla']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['ipla']['fractal'] = [2.0956,3.4231,3.613,4.5425,4.2206,4.4101,4.0596,16.7204,19.8748,23.2534,25.5753,25.5753,25.5753]
# data['mnist']['sax'] = {}
# data['mnist']['sax']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
# data['mnist']['sax']['fractal'] = [2.5403,3.5612,4.7923,6.0755,6.1693,6.6266,13.7531,17.0834,21.3274,25.5753,25.5753,25.5753,25.5753]
data['mnist']['pca'] = {}
data['mnist']['pca']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['pca']['fractal'] = [3.7764,3.633,4.3696,4.7597,4.1067,4.5098,18.7939,1.7925,25.5753,25.5753,25.5753,25.5753,25.5753]
data['mnist']['svd'] = {}
data['mnist']['svd']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['svd']['fractal'] = [3.5072,2.9827,3.6737,4.8881,4.3186,4.8956,3.033,22.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
data['mnist']['paa'] = {}
data['mnist']['paa']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['paa']['fractal'] = [3.7688,4.2507,3.8278,3.8522,5.2186,4.6841,4.8219,5.1059,4.2068,19.2354,23.5753,25.5753,25.5753]
data['mnist']['dwt'] = {}
data['mnist']['dwt']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
data['mnist']['dwt']['fractal'] = [3.4016,4.5113,4.7749,4.4473,4.4599,4.5201,3.9403,4.6369,19.598,22.5753,25.5753,25.5753,25.5753]
# data['mnist']['cp'] = {}
# data['mnist']['cp']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474]
# data['mnist']['cp']['fractal'] = [3.4246,4.0078,4.4739,4.8383,4.3009,5.4709,15.1808,17.9606,2.0437,24.5753,25.5753,25.5753,25.5753]
dx = data['mnist']
name = 'mnist'
original_dim = 800
elif opc == 1:
# CIFAR10 - 1.6494
data['cifar10'] = {}
data['cifar10']['ae'] = {}
data['cifar10']['ae']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474,711,1066,1599,2398,3597]
data['cifar10']['ae']['fractal'] = [2.2365,2.6026,3.48,0.1136,0.119,0.1171,0.1697,0.2356,0.1557,0.2513,23.9903,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
data['cifar10']['dct'] = {}
data['cifar10']['dct']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['cifar10']['dct']['fractal'] = [3.9918,5.0943,6.243,2.8814,3.2774,4.645,6.1401,15.0665,3.3972,23.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
data['cifar10']['ipla'] = {}
data['cifar10']['ipla']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['cifar10']['ipla']['fractal'] = [1.9384,2.9256,3.8386,5.0608,5.8173,6.1881,8.9843,8.9674,9.8928,11.6136,12.7876,15.8204,20.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
# data['cifar10']['sax'] = {}
# data['cifar10']['sax']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
# data['cifar10']['sax']['fractal'] = [0.4879,0.546,0.6239,0.7161,0.8361,0.9713,1.2115,1.5869,2.1696,2.9898,4.2449,6.7758,10.2027,16.8038,25.5753,25.5753,25.5753,25.5753]
data['cifar10']['pca'] = {}
data['cifar10']['pca']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['cifar10']['pca']['fractal'] = [3.3653,4.0906,3.9554,1.5782,3.9994,6.2009,5.3791,6.9906,18.8888,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
data['cifar10']['svd'] = {}
data['cifar10']['svd']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['cifar10']['svd']['fractal'] = [3.634,2.7057,4.6587,2.6081,5.9719,2.5707,6.5027,6.2395,21.8748,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
data['cifar10']['paa'] = {}
data['cifar10']['paa']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['cifar10']['paa']['fractal'] = [3.5931,4.3796,5.6623,8.6333,0.3525,10.1474,9.4041,0.1516,0.2149,10.5511,9.9956,9.3387,0.6065,0.7336,10.6043,10.1417,1.4088,1.5912]
data['cifar10']['dwt'] = {}
data['cifar10']['dwt']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['cifar10']['dwt']['fractal'] = [3.4213,4.8521,5.6459,2.9588,3.3272,5.485,6.7866,12.7766,14.5139,20.3658,23.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
# data['cifar10']['cp'] = {}
# data['cifar10']['cp']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
# data['cifar10']['cp']['fractal'] = [3.5199,4.6375,1.0774,6.3398,8.323,4.7599,8.2534,10.9358,14.926,20.9903,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753,25.5753]
dx = data['cifar10']
name = 'cifar10'
original_dim = 4096
elif opc == 2:
# SVHN = 28.3359
data['svhn'] = {}
data['svhn']['pca'] = {}
data['svhn']['pca']['dim'] = [4,6,9,13,19,28,42,63,94,141,211]
data['svhn']['pca']['fractal'] = [3.6906,5.1793,6.4785,7.2137,12.435,5.422,2.4771,28.3359,28.3359,28.3359,28.3359]
data['svhn']['paa'] = {}
data['svhn']['paa']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211]
data['svhn']['paa']['fractal'] = [3.6458,5.5113,4.277,5.9883,11.0742,8.8174,9.3821,10.8159,11.9396,14.168,14.168]
data['svhn']['dwt'] = {}
data['svhn']['dwt']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211]
data['svhn']['dwt']['fractal'] = [2.9093,4.6779,5.5817,7.5835,8.0256,14.9638,3.4771,25.751,28.3359,28.3359,28.3359]
data['svhn']['dct'] = {}
data['svhn']['dct']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211]
data['svhn']['dct']['fractal'] = [3.8896,5.515,6.5078,7.5054,15.8202,21.736,27.3359,28.3359,28.3359,28.3359,28.3359]
data['svhn']['svd'] = {}
data['svhn']['svd']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211]
data['svhn']['svd']['fractal'] = [3.8101,5.0242,6.5251,7.9185,12.2805,5.5287,22.7814,28.3359,28.3359,28.3359,28.3359]
data['svhn']['ipla'] = {}
data['svhn']['ipla']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211]
data['svhn']['ipla']['fractal'] = [1.8991,2.8153,3.7822,5.4587,7.7079,13.4563,24.751,28.3359,28.3359,28.3359,28.3359]
dx = data['svhn']
name = 'svhn'
original_dim = 1152
elif opc == 3:
# AgNews - 30.4943
data['agnews'] = {}
data['agnews']['dwt'] = {}
data['agnews']['dwt']['dim'] = [4,6,9,13,19,28,42,63,94,141,211,316,474,711,1066,1599,2398,3597]
data['agnews']['dwt']['fractal'] = [0.9801,0.9747,1.7576,1.758,2.6681,2.5881,4.0368,4.0368,2.132,2.2803,3.9624,7.6197,7.6192,8.0396,7.4786,10.2216,12.3825,12.3825]
data['agnews']['ipla'] = {}
data['agnews']['ipla']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['agnews']['ipla']['fractal'] = [0.8858,0.8713,0.7968,1.5267,1.8855,1.94,2.6152,3.2711,2.5011,2.5171,3.1752,3.8608,6.2208,6.9321,10.0606,9.9789,9.9536,10.5323]
data['agnews']['paa'] = {}
data['agnews']['paa']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['agnews']['paa']['fractal'] = [0.8103,1.3728,1.7259,1.8405,2.7076,1.6259,3.195,5.4737,6.2383,6.8174,8.5794,9.333,0.4478,10.298,9.3606,8.1446,10.3471,11.1106]
data['agnews']['svd'] = {}
data['agnews']['svd']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['agnews']['svd']['fractal'] = [2.448,2.7537,2.9912,3.3966,3.6956,4.8341,4.5793,4.6637,17.3139,22.4432,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651]
data['agnews']['pca'] = {}
data['agnews']['pca']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['agnews']['pca']['fractal'] = [2.5685,2.3968,2.8437,2.2094,3.805,4.4401,5.4467,5.8244,4.3857,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651,24.7651]
data['agnews']['dct'] = {}
data['agnews']['dct']['dim'] = [4, 6, 9, 13, 19, 28, 42, 63, 94, 141, 211, 316, 474, 711, 1066, 1599, 2398, 3597]
data['agnews']['dct']['fractal'] = [1.9761,2.43,2.5922,2.9844,3.9418,4.3012,4.111,2.1457,3.7315,4.676,5.0242,16.3515,1.5,24.7651,24.7651,24.7651,24.7651,24.7651]
dx = data['agnews']
name = 'agnews'
original_dim = 8704
return dx, name, original_dim
colors = cycle(['navy', 'turquoise', 'darkorange', 'cornflowerblue', 'teal','red', 'yellow', 'magenta', 'gray'])
for i in range(4):
data, name, originalD = get_data_plot(i)
plt.figure(1)
for met, color in zip(data, colors):
dset = data
plt.plot(dset[met]['dim'], dset[met]['fractal'], color=color, lw=1,
label='Method - {0}'.format(met))
plt.xlabel('Dim')
plt.ylabel('Fractal')
plt.title('Dim-Fractal - Dataset ' + name + '-' + str(originalD))
plt.legend(loc="lower right")
plt.show()
| [
"rikardo.corp@gmail.com"
] | rikardo.corp@gmail.com |
4869b01a3ceb8a2fe7ccd3a7f8289efc0306cd9a | 4b8100df8c29a3676f3ed3f9cf4ce19f83fac6d5 | /images/urls.py | 038957d622f8315d93fcfe38b4d4c39f1b274bb3 | [] | no_license | mtali/bookmark | bc96818764e18865ef18142ab075d512cd028cfa | f568e2463599b7d4333074d4dc0de28ca67f76ce | refs/heads/master | 2021-04-12T01:45:48.934820 | 2018-03-18T19:43:34 | 2018-03-18T19:43:34 | 125,758,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | from django.urls import path
from . import views
app_name = 'images'
urlpatterns = [
path('', views.image_list, name='list'),
path('create/', views.image_create, name='images'),
path('detail/<int:id>/<slug:slug>/', views.image_detail, name='detail'),
path('like/', views.image_like, name='like'),
path('ranking/', views.image_ranking, name='image_ranking'),
]
| [
"emmanuelmtali@yahoo.com"
] | emmanuelmtali@yahoo.com |
77e0b14f23eeea5597c04077de558897a24bc4d1 | d62ab6cb9243e790ac9537274f5bfa424b154d45 | /04-Flappy_bird_single/Brid_DQN.py | 7c1eec3fe592b3fdb4acf78eb6e18efab6b481d3 | [] | no_license | HeatedMajin/RL_experiment_note | cd06916b296f8ffcce96ebfe8250d14cf81ecf0b | 445a76c9c85895fe5bcda83cd0d37d7cbfd0107f | refs/heads/master | 2022-11-29T23:56:45.188342 | 2020-08-10T12:52:56 | 2020-08-10T12:52:56 | 278,829,649 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,562 | py | from collections import deque
import numpy as np
import torch
from torch import nn
from torch.autograd import Variable
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
class Bird_DQN(nn.Module):
''''
Deep Q network for flapping bird:
Given image , the network will output what the action should be taken
'''
empty_frame = np.zeros((72, 128), dtype=np.float32)
empty_state = np.stack((empty_frame, empty_frame, empty_frame), axis=0)
def __init__(self, epsilon, mem_size):
super(Bird_DQN, self).__init__()
self.epsilon = epsilon
self.actions_num = 2
self.buildDQN()
self.trainable = None
self.mem_size = mem_size
self.replay_mem = deque()
self.time_step = 0
def buildDQN(self):
self.map_size = (32, 16, 9)
self.conv1 = nn.Conv2d(3, 16, kernel_size=8, stride=4, padding=2).to(device)
self.relu1 = nn.LeakyReLU(inplace=True).to(device)
self.conv2 = nn.Conv2d(16, 32, kernel_size=4, stride=2, padding=1).to(device)
self.relu2 = nn.LeakyReLU(inplace=True).to(device)
self.fc1 = nn.Linear(self.map_size[0] * self.map_size[1] * self.map_size[2], 128).to(device)
self.relu3 = nn.LeakyReLU(inplace=True).to(device)
self.fc2 = nn.Linear(128, self.actions_num).to(device)
def set_trainable(self, trainable):
self.trainable = trainable
def set_initial_state(self, obs=None):
"""
Set initial state
state: initial state. if None, use `BrainDQN.empty_state`
"""
if obs is None:
self.current_state = Bird_DQN.empty_state
else:
self.current_state = np.append(Bird_DQN.empty_state[1:, :, :], obs.reshape((1,) + obs.shape),
axis=0)
def forward(self, obs):
# get Q estimation
out = self.conv1(obs)
out = self.relu1(out)
out = self.conv2(out)
out = self.relu2(out)
out = out.view(out.size()[0], -1)
out = self.fc1(out)
out = self.relu3(out)
out = self.fc2(out)
return out
def optimal_action(self): # greedy choose (exploitation)
state = self.current_state
state_var = Variable(torch.from_numpy(state), requires_grad=False).unsqueeze(0).to(device)
q_values = self.forward(state_var)
_, actions_index = torch.max(q_values, dim=1)
action_index = actions_index.data[0]
action = np.zeros(self.actions_num, dtype=np.float32)
action[action_index] = 1
return action
def random_action(self): # random choose (exploration)
action = np.zeros(self.actions_num, dtype=np.float32)
action_index = 0 if np.random.random() < 0.8 else 1
action[action_index] = 1
return action
def take_action(self):
if np.random.random() < self.epsilon:
return self.random_action()
else:
return self.optimal_action()
def store_trans(self, action, reward, next_obs, finish):
next_state = np.append(self.current_state[1:, :, :], next_obs.reshape((1,) + next_obs.shape), axis=0)
self.replay_mem.append((self.current_state, action, reward, next_state, finish))
if (len(self.replay_mem) > self.mem_size):
self.replay_mem.popleft()
if (finish):
self.set_initial_state()
else:
self.current_state = next_state
def increase_timestep(self):
self.time_step += 1
| [
"a145926@163.com"
] | a145926@163.com |
ead64974c7f331db8bd57afaac4d4c6e4eb8d533 | 98a288ad0496c484a777d31ffaaa0cd8678e6452 | /plusfriend/urls.py | afa16ed72fcced5aec68292880dc2565ef9d9d58 | [] | no_license | WHWH10/Askbot_ex | 24224b78cfb3f5e5d8b15987075ff675396f3690 | 4bc0fb3f7c81470010617ffb927eaa886496a4b1 | refs/heads/master | 2021-06-26T06:46:26.077884 | 2017-09-11T07:47:35 | 2017-09-11T07:47:35 | 103,101,059 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^keyboard$', views.on_init),
url(r'^friend$', views.on_added),
url(r'^friend/(?P<user_key>[\w-]+)$', views.on_block),
url(r'^chat_room/(?P<user_key>[\w-]+)$', views.on_leave),
url(r'^message$', views.on_message),
]
| [
"eyet010@gmail.com"
] | eyet010@gmail.com |
fb6f67a97a02f948ad2aa2760275229fce44c0f1 | 1abe01b916df738f6597fd3d754e0caf5bba82db | /Books/contact/views.py | 486198a07c7f7a2686fb8f898ce2ddaa145b8fda | [] | no_license | alegoriyas/Books | 58cb711b3b35ca9069802e0437f7a28a4e8db8f0 | b8fc2e0a5db88b921bd1b22965e95012feaadffd | refs/heads/master | 2022-12-24T10:08:09.362715 | 2020-10-08T08:31:06 | 2020-10-08T08:31:06 | 273,240,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,495 | py | from django.shortcuts import render
from django.core.mail import send_mail
def index(request):
send_mail('Hello from SteblinaEE',
'Hello there. This is an automated message.',
'steblinaee@gmail.com',
['egorinkas@gmail.com'],
fail_silently=False)
return render(request, 'contact/index.html')
def contact_form(request):
#errors = []
if request.method == 'POST':
message_name = request.POST['message-name']
message = request.POST['message']
message_email = request.POST['message-email']
# send an email
send_mail(
message_name, # subject
message, # message
message_email, # from email
['egorinkas@gmail.com'], # To Email
)
return render(request, 'contact/contact_form.html', {'message_name': message_name,
'message_email': message_email})
else:
return render(request, 'contact/contact_form.html', {})
#if not request.POST.get('subject', ''):
#errors.append('Введите тему.')
#if not request.POST.get('message', ''):
# errors.append('Введите сообщение.')
#if not request.POST.get('e-mail') and '@' not in request.POST['e-mail']:
# errors.append('Введите правильный адрес e-mail.')
#if not errors:
# send_mail( | [
"alegoriyass@yandex.ru"
] | alegoriyass@yandex.ru |
bc4df46eb7872b86c36c55931baed6b9e64e0e68 | 0b2590e8c6783a0ba9d811d4467e59241b037767 | /python3_programming_tricks/ch09/9-4.py | 7d550afaa409e34ca6df96184d15899b3b21c182 | [] | no_license | tuxnotes/python_notes | f070dffb46f34eefd341c47fdb2e414e4f85261c | f031490b3a208898a45fec67d83bf75f6ad91b8e | refs/heads/master | 2020-05-20T20:39:29.239620 | 2019-12-04T06:34:35 | 2019-12-04T06:34:35 | 185,746,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,023 | py | import time
import logging
import random
def warn_timeout(timeout):
# 既然是带参数的,那么就是一个生产装饰器的工厂
# 定义装饰器
def decorator(func):
def wrap(*args, **kwargs):
t0 = timem.time()
res = func(*args, **kwargs)
used = time.time() - t0
if used > timeout:
logging.warning("%s: %s > %s", func.__name__, used, timeout)
return res
# 通过定义一个函数,来动态修改timeout的值
def set_timeout(new_timeout):
# timeout是闭包中的一个自由变量,不能直接赋值,python3中使用nonlocal
nonlocal timeout
timeout = new_timeout
wrap.set_timeout = set_timeout # 函数增加属性
return wrap
return decorator
@warn_timeout(1.5)
def f(i):
print('in f [%s]' % i)
while random.randint(0, 1):
time.spleep(0.6)
for i in range(30):
f(i)
f.set_timeout(1)
for i in range(30):
f(i)
| [
"vodaka@126.com"
] | vodaka@126.com |
1a24d817c3f129cc65696bef11cd602b33c56141 | edde0db6d7df34dce6b68b64bd88b382300d5661 | /021.py | 1a4a7697905445a6524d39963a4f8134345e59b0 | [] | no_license | Trietptm-on-Coding-Algorithms/eulerproject | 2af378c2464130e097e410ca981d11e8de743d7b | c794ae2528a516bd62069d59f9375210e4376969 | refs/heads/master | 2021-01-14T02:29:38.582949 | 2013-04-05T05:03:44 | 2013-04-05T05:03:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 487 | py | #! /usr/bin/env python
def memo(func):
cache={}
def f(*arg):
if arg not in cache:
cache[arg] = func(*arg)
return cache[arg]
return f
@memo
def divisors(n):
return [x for x in range(1,n//2+1) if n%x == 0]
def d(n):
return sum(divisors(n))
#print d(284),d(220) #220,284
res = []
for i in range(1,10000):
if i in res: continue
j = d(i)
if i!=j and d(j) == i:
res.append(i)
res.append(j)
print sum(res)
| [
"kennel209@gmail.com"
] | kennel209@gmail.com |
5f90d9616705b9653e695c4d316f9a93a74866bd | 080ed9d2b5ff76c5613d8dd42cb4f33bd9b3a82e | /code/sheet_cleaner/sheet_processor.py | f41e8a41c99357f2914903acd8abb13763aa675a | [
"MIT"
] | permissive | garain/nCoV2019 | 2114b9031baa13a52a93c30e92995ccfb01ea075 | 9cd6829ada1fd193908b213e4716f536253dbbe4 | refs/heads/master | 2022-12-12T01:21:18.883025 | 2020-09-09T05:01:52 | 2020-09-09T05:01:52 | 260,449,278 | 2 | 0 | MIT | 2020-05-01T12:03:27 | 2020-05-01T12:03:26 | null | UTF-8 | Python | false | false | 6,509 | py | import logging
import os
from datetime import datetime
from typing import List
import configparser
import pandas as pd
from geocoding import csv_geocoder
from spreadsheet import GoogleSheet
from functions import (duplicate_rows_per_column, fix_na, fix_sex,
generate_error_tables, trim_df, values2dataframe)
class SheetProcessor:
def __init__(self, sheets: List[GoogleSheet], geocoder: csv_geocoder.CSVGeocoder, config: configparser.ConfigParser):
self.for_github = []
self.sheets = sheets
self.geocoder = geocoder
self.config = config
def process(self):
"""Does all the heavy handling of spreadsheets, writing output to CSV files."""
for s in self.sheets:
logging.info("Processing sheet %s", s.name)
### Clean Private Sheet Entries. ###
# note : private sheet gets updated on the fly and redownloaded to ensure continuity between fixes (granted its slower).
range_ = f'{s.name}!A:AG'
data = values2dataframe(s.read_values(range_))
# Expand aggregated cases into one row each.
logging.info("Rows before expansion: %d", len(data))
if len(data) > 150000:
logging.warning("Sheet %s has more than 150K rows, it should be split soon", s.name)
data.aggregated_num_cases = pd.to_numeric(data.aggregated_num_cases, errors='coerce')
data = duplicate_rows_per_column(data, "aggregated_num_cases")
logging.info("Rows after expansion: %d", len(data))
# Generate IDs for each row sequentially following the sheet_id-inc_int pattern.
data['ID'] = s.base_id + "-" + pd.Series(range(1, len(data)+1)).astype(str)
# Remove whitespace.
data = trim_df(data)
# Fix columns that can be fixed easily.
data.sex = fix_sex(data.sex)
# fix N/A => NA
for col in data.select_dtypes("string"):
data[col] = fix_na(data[col])
# Regex fixes
fixable, non_fixable = generate_error_tables(data)
if len(fixable) > 0:
logging.info('fixing %d regexps', len(fixable))
s.fix_cells(fixable)
data = values2dataframe(s.read_values(range_))
# ~ negates, here clean = data with IDs not in non_fixable IDs.
clean = data[~data.ID.isin(non_fixable.ID)]
clean = clean.drop('row', axis=1)
clean.sort_values(by='ID')
s.data = clean
non_fixable = non_fixable.sort_values(by='ID')
# Save error_reports
# These are separated by Sheet.
logging.info('Saving error reports')
directory = self.config['FILES']['ERRORS']
file_name = f'{s.name}.error-report.csv'
error_file = os.path.join(directory, file_name)
non_fixable.to_csv(error_file, index=False, header=True, encoding="utf-8")
self.for_github.append(error_file)
# Combine data from all sheets into a single datafile
all_data = []
for s in self.sheets:
logging.info("sheet %s had %d rows", s.name, len(s.data))
all_data.append(s.data)
all_data = pd.concat(all_data, ignore_index=True)
all_data = all_data.sort_values(by='ID')
logging.info("all_data has %d rows", len(all_data))
# Fill geo columns.
geocode_matched = 0
for i, row in all_data.iterrows():
geocode = self.geocoder.geocode(row.city, row.province, row.country)
if not geocode:
continue
geocode_matched += 1
all_data.at[i, 'latitude'] = geocode.lat
all_data.at[i, 'longitude'] = geocode.lng
all_data.at[i, 'geo_resolution'] = geocode.geo_resolution
all_data.at[i, 'location'] = geocode.location
all_data.at[i, 'admin3'] = geocode.admin3
all_data.at[i, 'admin2'] = geocode.admin2
all_data.at[i, 'admin1'] = geocode.admin1
all_data.at[i, 'admin_id'] = geocode.admin_id
all_data.at[i, 'country_new'] = geocode.country_new
logging.info("Geocode matched %d/%d", geocode_matched, len(all_data))
logging.info("Top 10 geocode misses: %s", self.geocoder.misses.most_common(10))
with open("geocode_misses.csv", "w") as f:
self.geocoder.write_misses_to_csv(f)
logging.info("Wrote all geocode misses to geocode_misses.csv")
# Reorganize csv columns so that they are in the same order as when we
# used to have those geolocation within the spreadsheet.
# This is to avoid breaking latestdata.csv consumers.
all_data = all_data[["ID","age","sex","city","province","country","latitude","longitude","geo_resolution","date_onset_symptoms","date_admission_hospital","date_confirmation","symptoms","lives_in_Wuhan","travel_history_dates","travel_history_location","reported_market_exposure","additional_information","chronic_disease_binary","chronic_disease","source","sequence_available","outcome","date_death_or_discharge","notes_for_discussion","location","admin3","admin2","admin1","country_new","admin_id","data_moderator_initials","travel_history_binary"]]
# save
logging.info("Saving files to disk")
dt = datetime.now().strftime('%Y-%m-%dT%H%M%S')
file_name = self.config['FILES']['DATA'].replace('TIMESTAMP', dt)
latest_name = os.path.join(self.config['FILES']['LATEST'], 'latestdata.csv')
all_data.to_csv(file_name, index=False, encoding="utf-8")
all_data.to_csv(latest_name, index=False, encoding="utf-8")
logging.info("Wrote %s, %s", file_name, latest_name)
self.for_github.extend([file_name, latest_name])
def push_to_github(self):
"""Pushes csv files created by Process to Github."""
logging.info("Pushing to github")
# Create script for uploading to github
script = 'set -e\n'
script += 'cd {}\n'.format(self.config['GIT']['REPO'])
script += 'git pull origin master\n'
for g in self.for_github:
script += f'git add {g}\n'
script += 'git commit -m "data update"\n'
script += 'git push origin master\n'
script += f'cd {os.getcwd()}\n'
print(script)
os.system(script)
| [
"thomas.brewer@childrens.harvard.edu"
] | thomas.brewer@childrens.harvard.edu |
1a4bb46bd52734b636a9289f60533e348c0cd036 | 6a7e2db41319fc6a4470a06258bf487234055021 | /venv/Scripts/pip3.8-script.py | fa9e30e2c16968d373a28069a08af45f14f409f1 | [] | no_license | AthanasiosChaloudis/Data-Science-for-Web-Applications | 4cd448f4b9dd4d5df01be3ef12a4c00591a100e0 | a99bfd0fad92c6b6238ee48b10d7543fa32de92f | refs/heads/main | 2023-01-30T00:33:36.762972 | 2020-12-13T10:37:19 | 2020-12-13T10:37:19 | 308,966,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | #!C:\Users\Thanos\PycharmProjects\test1\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.8'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.8')()
)
| [
"61148755+thanosuhh@users.noreply.github.com"
] | 61148755+thanosuhh@users.noreply.github.com |
d3169a5021ad5997b3bd8cbd3b310c67d027bb0e | 53356576265cd15a98837e0f7ba60ce2b6a8d687 | /getemails/pipelines.py | 8cc50f657318fe68120fd1b906966134090cffa1 | [] | no_license | cnoott/getemails | 87abd44533d315f8929b00dc55ee23f5d328e424 | 2465a01f058ff4d47b57bf5184537023d54fe834 | refs/heads/master | 2022-11-28T15:37:36.620549 | 2020-08-11T16:37:55 | 2020-08-11T16:37:55 | 286,796,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | # Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
class GetemailsPipeline:
def process_item(self, item, spider):
return item
| [
"liamamadio@gmail.com"
] | liamamadio@gmail.com |
b84f468a68c639807ccd982bd0207469b605c051 | d0571268b8b7fa8e6621d138217f6a98a418ca93 | /_1327/main/migrations/0005_add_anonymous_group.py | c8edc82430a8a8a4e4e7e3ee4d329e6563f138d4 | [
"MIT"
] | permissive | xasetl/1327 | 743a4a8b7c9b1984d3b8b434c4db4b6799a5ddb7 | 71a9d3adac0f01fb87612c24bb8d0f1b945cc703 | refs/heads/master | 2020-04-05T11:44:44.422539 | 2017-08-21T16:11:34 | 2017-08-21T16:11:34 | 35,056,504 | 2 | 0 | null | 2015-05-04T20:04:55 | 2015-05-04T20:04:55 | null | UTF-8 | Python | false | false | 596 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.contrib.auth.models import Group
from guardian.management import create_anonymous_user
from guardian.utils import get_anonymous_user
def add_anonymous_group(apps, schema_editor):
create_anonymous_user(None)
group = Group.objects.create(name="Anonymous")
user = get_anonymous_user()
user.groups.add(group)
class Migration(migrations.Migration):
dependencies = [
('main', '0004_add_university_network_group'),
]
operations = [
migrations.RunPython(add_anonymous_group),
]
| [
"steffen.koette@gmail.com"
] | steffen.koette@gmail.com |
cba48a755863db9c8468e5ef1df1f3d89d1a79df | 06cb3c450c322f6b01ee44c1f4de0a883a5a785e | /Echoo/__init__.py | 09fa2ddd18cdb10f827e8356184c130e6e0f7acb | [
"MIT"
] | permissive | Lyken17/Echoo | 239077c1791e61745e3dda97ca6661a5e9eca962 | ae604ee42ee3cae9afe9d3583ede568567c7dbe8 | refs/heads/master | 2023-06-22T01:48:22.873260 | 2023-06-15T15:30:38 | 2023-06-15T15:30:38 | 205,288,960 | 5 | 2 | MIT | 2019-08-30T02:59:53 | 2019-08-30T02:26:11 | Python | UTF-8 | Python | false | false | 24 | py | from .echoo import Echoo | [
"ligeng@mit.edu"
] | ligeng@mit.edu |
8dda7acc6045272a3c4408b6132cc59c2ad575dc | 97b5e5caedf0931b00fdce1df7bbdbad692bdd0b | /pyscf/cc/ec_ccsd_t.py | add7818fc1ad1071e15d9c022971f6ae9c6e6573 | [
"Apache-2.0"
] | permissive | seunghoonlee89/pyscf-ecCC-TCC | c08cd333d7bf9accfb340ad3c1397b5d8a354b26 | 2091566fb83c1474e40bf74f271be2ce4611f60c | refs/heads/main | 2023-08-21T08:22:12.188933 | 2021-10-21T17:54:50 | 2021-10-21T17:54:50 | 326,832,474 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 18,463 | py | #!/usr/bin/env python
# Copyright 2014-2018 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
clone of UCCSD(T) code for ecRCCSD(T)
'''
import time
import ctypes
import numpy
from pyscf import lib
from pyscf.lib import logger
from pyscf.cc import _ccsd
def kernel(mycc, eris, coeff, t1=None, t2=None, verbose=logger.NOTE, ecTCCSD=False):
cpu1 = cpu0 = (time.clock(), time.time())
log = logger.new_logger(mycc, verbose)
mem_now = lib.current_memory()[0]
max_memory = max(0, mycc.max_memory - mem_now)
log.debug('max_memory %d MB (%d MB in use)', max_memory, mem_now)
if t1 is None: t1 = mycc.t1
if t2 is None: t2 = mycc.t2
if ecTCCSD: raise NotImplementedError
else: coeff.exclude_t_ecCCSDt() # off diag Pmat cas
#else: mycc.coeff.get_Pmat_ccsdt_cas() # full Pmat cas
#else: mycc.coeff.get_Pmat_ccsdt() # full Pmat
nocc_cas = int(coeff.nocc_cas)
nvir_cas = int(coeff.nvir_cas)
nocc_iact= int(coeff.nocc_iact)
nocc2 = int(nocc_cas*(nocc_cas-1)/2)
nocc3 = int(nocc_cas*(nocc_cas-1)*(nocc_cas-2)/6 )
t1a = t1
t1b = t1
t2ab= t2
t2aa= t2 - t2.transpose(0,1,3,2)
t2bb= t2aa
nocca, nvira = t1.shape
noccb, nvirb = t1.shape
nmoa = nocca + nvira
nmob = noccb + nvirb
if mycc.incore_complete:
ftmp = None
else:
ftmp = lib.H5TmpFile()
t1aT = t1a.T.copy()
t1bT = t1aT
t2aaT = t2aa.transpose(2,3,0,1).copy()
t2bbT = t2aaT
eris_vooo = numpy.asarray(eris.ovoo).transpose(1,3,0,2).conj().copy()
eris_vvop = _sort_eri(mycc, eris, ftmp, log)
cpu1 = log.timer_debug1('ecCCSD(T) sort_eri', *cpu1)
dtype = numpy.result_type(t1a.dtype, t2aa.dtype, eris_vooo.dtype)
et_sum = numpy.zeros(1, dtype=dtype)
mem_now = lib.current_memory()[0]
max_memory = max(0, mycc.max_memory - mem_now)
# aaa
bufsize = max(8, int((max_memory*.5e6/8-nocca**3*3*lib.num_threads())*.4/(nocca*nmoa)))
log.debug('max_memory %d MB (%d MB in use)', max_memory, mem_now)
orbsym = numpy.zeros(nocca, dtype=int)
contract = _gen_contract_aaa(t1aT, t2aaT, eris_vooo, eris.fock,
eris.mo_energy, orbsym, coeff.Paaa,
nocc_iact, nvir_cas, nocc3, log)
with lib.call_in_background(contract, sync=not mycc.async_io) as ctr:
for a0, a1 in reversed(list(lib.prange_tril(0, nvira, bufsize))):
cache_row_a = numpy.asarray(eris_vvop[a0:a1,:a1], order='C')
if a0 == 0:
cache_col_a = cache_row_a
else:
cache_col_a = numpy.asarray(eris_vvop[:a0,a0:a1], order='C')
ctr(et_sum, a0, a1, a0, a1, (cache_row_a,cache_col_a,
cache_row_a,cache_col_a))
for b0, b1 in lib.prange_tril(0, a0, bufsize/8):
cache_row_b = numpy.asarray(eris_vvop[b0:b1,:b1], order='C')
if b0 == 0:
cache_col_b = cache_row_b
else:
cache_col_b = numpy.asarray(eris_vvop[:b0,b0:b1], order='C')
ctr(et_sum, a0, a1, b0, b1, (cache_row_a,cache_col_a,
cache_row_b,cache_col_b))
cpu1 = log.timer_debug1('contract_aaa', *cpu1)
et_aaa = et_sum[0]*0.5
print('ecCCSD(T) aaa contribution =',et_sum[0]*0.5)
# # bbb
# bufsize = max(8, int((max_memory*.5e6/8-noccb**3*3*lib.num_threads())*.4/(noccb*nmob)))
# log.debug('max_memory %d MB (%d MB in use)', max_memory, mem_now)
# orbsym = numpy.zeros(noccb, dtype=int)
# contract = _gen_contract_aaa(t1bT, t2bbT, eris_VOOO, eris.fockb,
# eris.mo_energy[1], orbsym, log)
# with lib.call_in_background(contract, sync=not mycc.async_io) as ctr:
# for a0, a1 in reversed(list(lib.prange_tril(0, nvirb, bufsize))):
# cache_row_a = numpy.asarray(eris_VVOP[a0:a1,:a1], order='C')
# if a0 == 0:
# cache_col_a = cache_row_a
# else:
# cache_col_a = numpy.asarray(eris_VVOP[:a0,a0:a1], order='C')
# ctr(et_sum, a0, a1, a0, a1, (cache_row_a,cache_col_a,
# cache_row_a,cache_col_a))
#
# for b0, b1 in lib.prange_tril(0, a0, bufsize/8):
# cache_row_b = numpy.asarray(eris_VVOP[b0:b1,:b1], order='C')
# if b0 == 0:
# cache_col_b = cache_row_b
# else:
# cache_col_b = numpy.asarray(eris_VVOP[:b0,b0:b1], order='C')
# ctr(et_sum, a0, a1, b0, b1, (cache_row_a,cache_col_a,
# cache_row_b,cache_col_b))
# cpu1 = log.timer_debug1('contract_bbb', *cpu1)
# Cache t2abT in t2ab to reduce memory footprint
assert(t2ab.flags.c_contiguous)
t2abT = lib.transpose(t2ab.copy().reshape(nocca*noccb,nvira*nvirb), out=t2ab)
t2abT = t2abT.reshape(nvira,nvirb,nocca,noccb)
# baa
bufsize = int(max(12, (max_memory*.5e6/8-noccb*nocca**2*5)*.7/(nocca*nmob)))
ts = t1aT, t1bT, t2aaT, t2abT
fock = eris.fock
vooo = eris_vooo
contract = _gen_contract_baa(ts, vooo, fock, eris.mo_energy, orbsym,
coeff.Pbaa, nocc_cas, nvir_cas, nocc_iact, nocc2, log)
with lib.call_in_background(contract, sync=not mycc.async_io) as ctr:
for a0, a1 in lib.prange(0, nvirb, int(bufsize/nvira+1)):
cache_row_a = numpy.asarray(eris_vvop[a0:a1,:], order='C')
cache_col_a = numpy.asarray(eris_vvop[:,a0:a1], order='C')
for b0, b1 in lib.prange_tril(0, nvira, bufsize/6/2):
cache_row_b = numpy.asarray(eris_vvop[b0:b1,:b1], order='C')
cache_col_b = numpy.asarray(eris_vvop[:b0,b0:b1], order='C')
ctr(et_sum, a0, a1, b0, b1, (cache_row_a,cache_col_a,
cache_row_b,cache_col_b))
cpu1 = log.timer_debug1('contract_baa', *cpu1)
print('ecCCSD(T) baa contribution =',0.5*et_sum[0]-et_aaa)
# t2baT = numpy.ndarray((nvirb,nvira,noccb,nocca), buffer=t2abT,
# dtype=t2abT.dtype)
# t2baT[:] = t2abT.copy().transpose(1,0,3,2)
# # abb
# ts = t1bT, t1aT, t2bbT, t2baT
# fock = (eris.fockb, eris.focka)
# mo_energy = (eris.mo_energy[1], eris.mo_energy[0])
# vooo = (eris_VOOO, eris_VoOo, eris_vOoO)
# contract = _gen_contract_baa(ts, vooo, fock, mo_energy, orbsym, log)
# for a0, a1 in lib.prange(0, nvira, int(bufsize/nvirb+1)):
# with lib.call_in_background(contract, sync=not mycc.async_io) as ctr:
# cache_row_a = numpy.asarray(eris_vVoP[a0:a1,:], order='C')
# cache_col_a = numpy.asarray(eris_VvOp[:,a0:a1], order='C')
# for b0, b1 in lib.prange_tril(0, nvirb, bufsize/6/2):
# cache_row_b = numpy.asarray(eris_VVOP[b0:b1,:b1], order='C')
# cache_col_b = numpy.asarray(eris_VVOP[:b0,b0:b1], order='C')
# ctr(et_sum, a0, a1, b0, b1, (cache_row_a,cache_col_a,
# cache_row_b,cache_col_b))
# cpu1 = log.timer_debug1('contract_abb', *cpu1)
#
# # Restore t2ab
# lib.transpose(t2baT.transpose(1,0,3,2).copy().reshape(nvira*nvirb,nocca*noccb),
# out=t2ab)
et_sum *= .5
if abs(et_sum[0].imag) > 1e-4:
logger.warn(mycc, 'Non-zero imaginary part of ecCCSD(T) energy was found %s',
et_sum[0])
et = et_sum[0].real
mem_now = lib.current_memory()[0]
max_memory = max(0, mycc.max_memory - mem_now)
log.debug('max_memory %d MB (%d MB in use)', max_memory, mem_now)
log.timer('ecCCSD(T)', *cpu0)
log.note('ecCCSD(T) correction = %.15g', et)
return et
def _gen_contract_aaa(t1T, t2T, vooo, fock, mo_energy, orbsym, paaa, nocc_iact, nvir_cas, nocc3, log):
nvir, nocc = t1T.shape
mo_energy = numpy.asarray(mo_energy, order='C')
fvo = fock[nocc:,:nocc].copy()
cpu2 = [time.clock(), time.time()]
orbsym = numpy.hstack((numpy.sort(orbsym[:nocc]),numpy.sort(orbsym[nocc:])))
o_ir_loc = numpy.append(0, numpy.cumsum(numpy.bincount(orbsym[:nocc], minlength=8)))
v_ir_loc = numpy.append(0, numpy.cumsum(numpy.bincount(orbsym[nocc:], minlength=8)))
o_sym = orbsym[:nocc]
oo_sym = (o_sym[:,None] ^ o_sym).ravel()
oo_ir_loc = numpy.append(0, numpy.cumsum(numpy.bincount(oo_sym, minlength=8)))
nirrep = max(oo_sym) + 1
orbsym = orbsym.astype(numpy.int32)
o_ir_loc = o_ir_loc.astype(numpy.int32)
v_ir_loc = v_ir_loc.astype(numpy.int32)
oo_ir_loc = oo_ir_loc.astype(numpy.int32)
dtype = numpy.result_type(t2T.dtype, vooo.dtype, fock.dtype)
if dtype == numpy.complex:
#drv = _ccsd.libcc.CCuccsd_t_zaaa
raise NotImplementedError
else:
drv = _ccsd.libcc.CCecccsd_t_aaa
#drv = _ccsd.libcc.CCuccsd_t_aaa
def contract(et_sum, a0, a1, b0, b1, cache):
cache_row_a, cache_col_a, cache_row_b, cache_col_b = cache
# drv(et_sum.ctypes.data_as(ctypes.c_void_p),
# mo_energy.ctypes.data_as(ctypes.c_void_p),
# t1T.ctypes.data_as(ctypes.c_void_p),
# t2T.ctypes.data_as(ctypes.c_void_p),
# vooo.ctypes.data_as(ctypes.c_void_p),
# fvo.ctypes.data_as(ctypes.c_void_p),
# ctypes.c_int(nocc), ctypes.c_int(nvir),
# ctypes.c_int(a0), ctypes.c_int(a1),
# ctypes.c_int(b0), ctypes.c_int(b1),
# ctypes.c_int(nirrep),
# o_ir_loc.ctypes.data_as(ctypes.c_void_p),
# v_ir_loc.ctypes.data_as(ctypes.c_void_p),
# oo_ir_loc.ctypes.data_as(ctypes.c_void_p),
# orbsym.ctypes.data_as(ctypes.c_void_p),
# cache_row_a.ctypes.data_as(ctypes.c_void_p),
# cache_col_a.ctypes.data_as(ctypes.c_void_p),
# cache_row_b.ctypes.data_as(ctypes.c_void_p),
# cache_col_b.ctypes.data_as(ctypes.c_void_p))
drv(et_sum.ctypes.data_as(ctypes.c_void_p),
mo_energy.ctypes.data_as(ctypes.c_void_p),
t1T.ctypes.data_as(ctypes.c_void_p),
t2T.ctypes.data_as(ctypes.c_void_p),
vooo.ctypes.data_as(ctypes.c_void_p),
fvo.ctypes.data_as(ctypes.c_void_p),
paaa.ctypes.data_as(ctypes.c_void_p),
ctypes.c_int(nocc), ctypes.c_int(nvir),
ctypes.c_int(nocc_iact), ctypes.c_int(nvir_cas),
ctypes.c_int(nocc3),
ctypes.c_int(a0), ctypes.c_int(a1),
ctypes.c_int(b0), ctypes.c_int(b1),
ctypes.c_int(nirrep),
o_ir_loc.ctypes.data_as(ctypes.c_void_p),
v_ir_loc.ctypes.data_as(ctypes.c_void_p),
oo_ir_loc.ctypes.data_as(ctypes.c_void_p),
orbsym.ctypes.data_as(ctypes.c_void_p),
cache_row_a.ctypes.data_as(ctypes.c_void_p),
cache_col_a.ctypes.data_as(ctypes.c_void_p),
cache_row_b.ctypes.data_as(ctypes.c_void_p),
cache_col_b.ctypes.data_as(ctypes.c_void_p))
cpu2[:] = log.timer_debug1('contract %d:%d,%d:%d'%(a0,a1,b0,b1), *cpu2)
return contract
def _gen_contract_baa(ts, vooo, fock, mo_energy, orbsym,
pbaa, nocc_cas, nvir_cas, nocc_iact, nocc2, log):
t1aT, t1bT, t2aaT, t2abT = ts
focka = fock
fockb = fock
vOoO = vooo
VoOo = vooo
nvira, nocca = t1aT.shape
nvirb, noccb = t1bT.shape
mo_ea = numpy.asarray(mo_energy, order='C')
mo_eb = mo_ea
fvo = focka[nocca:,:nocca].copy()
fVO = fockb[noccb:,:noccb].copy()
cpu2 = [time.clock(), time.time()]
dtype = numpy.result_type(t2aaT.dtype, vooo.dtype)
if dtype == numpy.complex:
raise NotImplementedError
#drv = _ccsd.libcc.CCuccsd_t_zbaa
else:
#drv = _ccsd.libcc.CCuccsd_t_baa
drv = _ccsd.libcc.CCecccsd_t_baa
def contract(et_sum, a0, a1, b0, b1, cache):
cache_row_a, cache_col_a, cache_row_b, cache_col_b = cache
# drv(et_sum.ctypes.data_as(ctypes.c_void_p),
# mo_ea.ctypes.data_as(ctypes.c_void_p),
# mo_eb.ctypes.data_as(ctypes.c_void_p),
# t1aT.ctypes.data_as(ctypes.c_void_p),
# t1bT.ctypes.data_as(ctypes.c_void_p),
# t2aaT.ctypes.data_as(ctypes.c_void_p),
# t2abT.ctypes.data_as(ctypes.c_void_p),
# vooo.ctypes.data_as(ctypes.c_void_p),
# vOoO.ctypes.data_as(ctypes.c_void_p),
# VoOo.ctypes.data_as(ctypes.c_void_p),
# fvo.ctypes.data_as(ctypes.c_void_p),
# fVO.ctypes.data_as(ctypes.c_void_p),
# ctypes.c_int(nocca), ctypes.c_int(noccb),
# ctypes.c_int(nvira), ctypes.c_int(nvirb),
# ctypes.c_int(a0), ctypes.c_int(a1),
# ctypes.c_int(b0), ctypes.c_int(b1),
# cache_row_a.ctypes.data_as(ctypes.c_void_p),
# cache_col_a.ctypes.data_as(ctypes.c_void_p),
# cache_row_b.ctypes.data_as(ctypes.c_void_p),
# cache_col_b.ctypes.data_as(ctypes.c_void_p))
drv(et_sum.ctypes.data_as(ctypes.c_void_p),
mo_ea.ctypes.data_as(ctypes.c_void_p),
mo_eb.ctypes.data_as(ctypes.c_void_p),
t1aT.ctypes.data_as(ctypes.c_void_p),
t1bT.ctypes.data_as(ctypes.c_void_p),
t2aaT.ctypes.data_as(ctypes.c_void_p),
t2abT.ctypes.data_as(ctypes.c_void_p),
vooo.ctypes.data_as(ctypes.c_void_p),
vOoO.ctypes.data_as(ctypes.c_void_p),
VoOo.ctypes.data_as(ctypes.c_void_p),
fvo.ctypes.data_as(ctypes.c_void_p),
fVO.ctypes.data_as(ctypes.c_void_p),
pbaa.ctypes.data_as(ctypes.c_void_p),
ctypes.c_int(nocca), ctypes.c_int(noccb),
ctypes.c_int(nvira), ctypes.c_int(nvirb),
ctypes.c_int(nocc_cas), ctypes.c_int(nvir_cas),
ctypes.c_int(nocc_iact), ctypes.c_int(nocc2),
ctypes.c_int(a0), ctypes.c_int(a1),
ctypes.c_int(b0), ctypes.c_int(b1),
cache_row_a.ctypes.data_as(ctypes.c_void_p),
cache_col_a.ctypes.data_as(ctypes.c_void_p),
cache_row_b.ctypes.data_as(ctypes.c_void_p),
cache_col_b.ctypes.data_as(ctypes.c_void_p))
cpu2[:] = log.timer_debug1('contract %d:%d,%d:%d'%(a0,a1,b0,b1), *cpu2)
return contract
def _sort_eri(mycc, eris, h5tmp, log):
cpu1 = (time.clock(), time.time())
nocc = eris.nocc
nmo = eris.fock.shape[0]
nvir = nmo - nocc
if mycc.t2 is None:
dtype = eris.ovov.dtype
else:
dtype = numpy.result_type(mycc.t2[0], eris.ovov.dtype)
if mycc.incore_complete or h5tmp is None:
eris_vvop = numpy.empty((nvir,nvir,nocc,nmo), dtype)
else:
eris_vvop = h5tmp.create_dataset('vvop', (nvir,nvir,nocc,nmo), dtype)
max_memory = max(2000, mycc.max_memory - lib.current_memory()[0])
max_memory = min(8000, max_memory*.9)
blksize = min(nvir, max(16, int(max_memory*1e6/8/(nvir*nocc*nmo))))
with lib.call_in_background(eris_vvop.__setitem__, sync=not mycc.async_io) as save:
bufopv = numpy.empty((nocc,nmo,nvir), dtype=dtype)
buf1 = numpy.empty_like(bufopv)
for j0, j1 in lib.prange(0, nvir, blksize):
ovov = numpy.asarray(eris.ovov[:,j0:j1])
ovvv = eris.get_ovvv(slice(None), slice(j0,j1))
for j in range(j0,j1):
bufopv[:,:nocc,:] = ovov[:,j-j0].conj()
bufopv[:,nocc:,:] = ovvv[:,j-j0].conj()
save(j, bufopv.transpose(2,0,1))
bufopv, buf1 = buf1, bufopv
ovov = ovvv = None
cpu1 = log.timer_debug1('transpose %d:%d'%(j0,j1), *cpu1)
return eris_vvop
if __name__ == '__main__':
from pyscf import gto
from pyscf import scf
from pyscf import cc
mol = gto.Mole()
mol.atom = [
[8 , (0. , 0. , 0.)],
[1 , (0. , -.757 , .587)],
[1 , (0. , .757 , .587)]]
mol.basis = '631g'
mol.build()
rhf = scf.RHF(mol)
rhf.conv_tol = 1e-14
rhf.scf()
mcc = cc.CCSD(rhf)
mcc.conv_tol = 1e-12
mcc.ccsd()
t1a = t1b = mcc.t1
t2ab = mcc.t2
t2aa = t2bb = t2ab - t2ab.transpose(1,0,2,3)
mycc = cc.UCCSD(scf.addons.convert_to_uhf(rhf))
eris = mycc.ao2mo()
e3a = kernel(mycc, eris, (t1a,t1b), (t2aa,t2ab,t2bb))
print(e3a - -0.00099642337843278096)
mol = gto.Mole()
mol.atom = [
[8 , (0. , 0. , 0.)],
[1 , (0. , -.757 , .587)],
[1 , (0. , .757 , .587)]]
mol.spin = 2
mol.basis = '3-21g'
mol.build()
mf = scf.UHF(mol).run(conv_tol=1e-14)
nao, nmo = mf.mo_coeff[0].shape
numpy.random.seed(10)
mf.mo_coeff = numpy.random.random((2,nao,nmo))
numpy.random.seed(12)
nocca, noccb = mol.nelec
nmo = mf.mo_occ[0].size
nvira = nmo - nocca
nvirb = nmo - noccb
t1a = .1 * numpy.random.random((nocca,nvira))
t1b = .1 * numpy.random.random((noccb,nvirb))
t2aa = .1 * numpy.random.random((nocca,nocca,nvira,nvira))
t2aa = t2aa - t2aa.transpose(0,1,3,2)
t2aa = t2aa - t2aa.transpose(1,0,2,3)
t2bb = .1 * numpy.random.random((noccb,noccb,nvirb,nvirb))
t2bb = t2bb - t2bb.transpose(0,1,3,2)
t2bb = t2bb - t2bb.transpose(1,0,2,3)
t2ab = .1 * numpy.random.random((nocca,noccb,nvira,nvirb))
t1 = t1a, t1b
t2 = t2aa, t2ab, t2bb
mycc = cc.UCCSD(mf)
eris = mycc.ao2mo(mf.mo_coeff)
e3a = kernel(mycc, eris, [t1a,t1b], [t2aa, t2ab, t2bb])
print(e3a - 9877.2780859693339)
mycc = cc.GCCSD(scf.addons.convert_to_ghf(mf))
eris = mycc.ao2mo()
t1 = mycc.spatial2spin(t1, eris.orbspin)
t2 = mycc.spatial2spin(t2, eris.orbspin)
from pyscf.cc import gccsd_t_slow
et = gccsd_t_slow.kernel(mycc, eris, t1, t2)
print(et - 9877.2780859693339)
| [
"slee89@caltech.edu"
] | slee89@caltech.edu |
5d009ec1750156835ab05bd369cef58aeaed239e | b4c93bad8ccc9007a7d3e7e1d1d4eb8388f6e988 | /farmercoupon/migrations/0048_auto_20210322_1046.py | 45f4e7b616e00e32a923afc76da686935d36cabb | [] | no_license | flashdreiv/fis | 39b60c010d0d989a34c01b39ea88f7fc3be0a87d | b93277785d6ad113a90a011f7c43b1e3e9209ec5 | refs/heads/main | 2023-04-02T12:46:32.249800 | 2021-03-31T00:27:29 | 2021-03-31T00:27:29 | 343,431,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 748 | py | # Generated by Django 3.1.7 on 2021-03-22 02:46
from django.db import migrations, models
import multiselectfield.db.fields
class Migration(migrations.Migration):
dependencies = [
('farmercoupon', '0047_auto_20210321_1524'),
]
operations = [
migrations.AddField(
model_name='farmer',
name='crop',
field=multiselectfield.db.fields.MultiSelectField(blank=True, choices=[(1, 'Item title 2.1'), (2, 'Item title 2.2'), (3, 'Item title 2.3'), (4, 'Item title 2.4'), (5, 'Item title 2.5')], max_length=9, null=True),
),
migrations.AddField(
model_name='farmer',
name='land_area',
field=models.IntegerField(default=0),
),
]
| [
"dreivan.orprecio@gmail.com"
] | dreivan.orprecio@gmail.com |
94cb36fc55af1eb504fcbf88f2c20c31038bd4dc | 917b85156ddfb653592b3b0994e7e7e9802a9eed | /ejerXML.py | c8789ca346bf35fd1f02bff24c1534fdec3609d4 | [] | no_license | antoniogomezvarela/XML | 3d2f2e8e1949b4a7f335a0b7c6ea229544d816a4 | c6dfeed3d782c4a28e56c7992414accf9fdcc660 | refs/heads/master | 2021-01-22T03:25:47.441160 | 2015-03-06T07:28:34 | 2015-03-06T07:28:34 | 31,011,138 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,135 | py | # -*- coding: utf-8 -*-
from lxml import etree
from datetime import date
tree = etree.parse('becas_premios.xml')
documento = tree.getroot()
#MENU
print "1- Buscar beca o premio por teclado"
print "2- Mostrar becas y enlaces"
print "3- Buscar las becas y premios que su fecha de publicación este entre febrero y abril"
print "4- Contar cuantas becas y premios se han dado."
print "5- Mostrar las id de las becas y añadir cuantos dias ha estado abierta"
opcion= raw_input("Elige una opción: ")
#Ejercicio 1
if opcion == '1':
encontrado = False
identificacion = raw_input("Introduce una id: ")
for i in documento:
if i[0].text==identificacion:
encontrado = True
print "ID: ",i[0].text
print "Titulo: ",i[1].text
print "Fecha: ",i[2].text
print "Descripción: ",i[3].text
print "Estado: ",i[5].text
if encontrado == False:
print "Esa ID no existe"
elif opcion == '2':
for i in documento:
print "ID: ",i[0].text,", Enlace: ",i[4].text
elif opcion == '3':
for i in documento:
fecha1=i[2].text
fecha2=fecha1.split("-")
if fecha2[1] >= "02" and fecha2[1] <= "04":
print "ID: ",i[0].text,", Fecha: ",i[2].text
elif opcion == '4':
becas = 0
premios = 0
for i in documento:
titulo = i[1].text
titulo = titulo.split(" ")
if titulo[0] == "Becas":
becas += 1
elif titulo[0] == "Premios":
premios += 1
print "Número de becas concedidas: ",becas
print "Número de premios concedidos: ",premios
elif opcion == '5':
date_format = "%Y/%m/%d"
for i in documento:
incial = i.findall("plazopresentacion/plazopresentacion_item/incial")
final = i.findall("plazopresentacion/plazopresentacion_item/final")
inicial= str(incial[0].text)
final= str(final[0].text)
if inicial != "None" or final != "None":
inicial = inicial.split("T")
final = final.split("T")
inicial = inicial[0].split("-")
final = final[0].split("-")
d0 = date(int(inicial[0]),int(inicial[1]),int(inicial[2]))
d1 = date(int(final[0]),int(final[1]),int(final[2]))
dias = d1-d0
print "la beca ",i[0].text," estuvo abierta ",dias.days," dias"
else:
print "Elige una opción correcta" | [
"root@debian"
] | root@debian |
ff6271a955e1cb503a3d11d4da07cb233f75e69b | 897e63cc3e19882e50e76fc4abb3b1e7c5408001 | /manage.py | 7ded26bba34bfae49570ed0facc1d25773c241ff | [] | no_license | sasirekha3/molecular-search-system | 277d67330532401bdb886845430f4dca3623fad4 | eccb7722034bcbe1fa1d173ae91bdf1a98dba83b | refs/heads/master | 2020-12-04T14:10:09.412422 | 2020-01-04T17:24:02 | 2020-01-04T17:24:02 | 231,795,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 542 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoProj.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"ksasirekha@gmail.com"
] | ksasirekha@gmail.com |
85307e709e74ac64e17f146241b6cfd3567f4f85 | cf3f8baeb9e431ac9c070dbfa2cf28e748dc40a9 | /Database/JsonToIPC.py | d4f1b93c4a808463082bd56f4e9b0c74d72b6523 | [] | no_license | ericstoneking/42 | b2eb7cb0a2c8c87b092a0b64bd9204f79013a98c | d4547dae44270876657aec009fe59980082ed999 | refs/heads/master | 2023-09-03T17:34:48.025904 | 2023-05-26T12:58:46 | 2023-05-26T12:58:46 | 32,602,944 | 198 | 92 | null | 2023-03-30T07:25:21 | 2015-03-20T19:30:47 | C | UTF-8 | Python | false | false | 36,023 | py | import sys
import os
import re # Regular Expressions
import json
# Set up globals
Prog = ""
Verb = ""
Pipe = ""
outfile = 0
EchoString = ""
ParmPass = 0
########################################################################
def WriteProlog():
global Prog, Verb, Pipe, outfile, EchoString
outfile.write("/* Note: This file was autogenerated by Database/JsonToIPC.py */\n")
outfile.write("/* See Database/Readme.txt for details. */\n")
outfile.write("/* If you are hand-modifying this file, keep in mind that */\n")
outfile.write("/* your work will be lost if you run JsonToIPC.py again. */\n\n")
if Prog == "Sim":
outfile.write("#include \"42.h\"\n\n")
elif Prog == "App":
outfile.write("#include \"Ac.h\"\n\n")
#endif
if Pipe == "Gmsec":
outfile.write("#include \"gmseckit.h\"\n")
#endif
outfile.write("/**********************************************************************/\n")
if Pipe == "Socket":
if Prog == "Sim":
outfile.write("void WriteToSocket(SOCKET Socket, char **Prefix, long Nprefix, long EchoEnabled)\n")
else:
outfile.write("void WriteToSocket(SOCKET Socket, struct AcType *AC)\n")
#endif
elif Pipe == "Gmsec":
if Prog == "Sim":
outfile.write("void WriteToGmsec(GMSEC_ConnectionMgr ConnMgr,GMSEC_Status status, char **Prefix, long Nprefix, long EchoEnabled)\n")
else:
outfile.write("void WriteToGmsec(GMSEC_ConnectionMgr ConnMgr,GMSEC_Status status, struct AcType *AC)\n")
#endif
elif Pipe == "File":
if Prog == "Sim":
outfile.write("void WriteToFile(FILE *StateFile, char **Prefix, long Nprefix, long EchoEnabled)\n")
else:
outfile.write("void WriteToFile(FILE *StateFile, struct AcType *AC)\n")
#endif
#endif
outfile.write("{\n\n")
outfile.write(" long Isc,Iorb,Iw,Ipfx,i;\n")
if Pipe == "Socket":
outfile.write(" char AckMsg[5] = \"Ack\\n\";\n")
outfile.write(" char Msg[16384];\n")
outfile.write(" long MsgLen = 0;\n")
outfile.write(" long LineLen;\n")
elif Pipe == "Gmsec":
outfile.write(" char Header[40] = \"GMSEC.42.TX.MSG.LOG\";\n")
outfile.write(" GMSEC_Message AckMsg;\n")
outfile.write(" char Msg[16384];\n")
outfile.write(" long MsgLen = 0;\n")
outfile.write(" long LineLen;\n")
#endif
outfile.write(" long PfxLen;\n")
outfile.write(" char line[512];\n\n")
if Prog == "App":
outfile.write(" Isc = AC->ID;\n\n")
#endif
if Prog == "Sim":
outfile.write(" sprintf(line,\"TIME %ld-%03ld-%02ld:%02ld:%012.9lf\\n\",\n")
outfile.write(" UTC.Year,UTC.doy,UTC.Hour,UTC.Minute,UTC.Second);\n")
if Pipe == "Socket":
outfile.write(" LineLen = strlen(line);\n")
outfile.write(" memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" MsgLen += LineLen;\n")
elif Pipe == "Gmsec":
outfile.write(" LineLen = strlen(line);\n")
outfile.write(" memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" MsgLen += LineLen;\n")
elif Pipe == "File":
outfile.write(" fprintf(StateFile,\"%s\",line);\n")
#endif
outfile.write(" if ("+EchoString+") printf(\"%s\",line);\n\n")
outfile.write(" for(Ipfx=0;Ipfx<Nprefix;Ipfx++) {\n")
outfile.write(" PfxLen = strlen(Prefix[Ipfx]);\n\n")
#endif
########################################################################
def ReadProlog():
global Prog, Verb, Pipe, outfile, EchoString
outfile.write("/* Note: This file was autogenerated by Database/JsonToIPC.py */\n")
outfile.write("/* See Database/Readme.txt for details. */\n")
outfile.write("/* If you are hand-modifying this file, keep in mind that */\n")
outfile.write("/* your work will be lost if you run JsonToIPC.py again. */\n\n")
if Prog == "Sim":
outfile.write("#include \"42.h\"\n\n")
elif Prog == "App":
outfile.write("#include \"Ac.h\"\n\n")
#endif
if Pipe == "Gmsec":
outfile.write("#include \"gmseckit.h\"\n")
#endif
outfile.write("/**********************************************************************/\n")
if Pipe == "Socket":
if Prog == "Sim":
outfile.write("void ReadFromSocket(SOCKET Socket, long EchoEnabled)\n")
else:
outfile.write("void ReadFromSocket(SOCKET Socket, struct AcType *AC)\n")
#endif
elif Pipe == "Gmsec":
if Prog == "Sim":
outfile.write("void ReadFromGmsec(GMSEC_ConnectionMgr ConnMgr,GMSEC_Status status, long EchoEnabled)\n")
else:
outfile.write("void ReadFromGmsec(GMSEC_ConnectionMgr ConnMgr,GMSEC_Status status,struct AcType *AC)\n")
elif Pipe == "File":
if Prog == "Sim":
outfile.write("void ReadFromFile(FILE *StateFile, long EchoEnabled)\n")
else:
outfile.write("void ReadFromFile(FILE *StateFile, struct AcType *AC)\n")
#endif
elif Pipe == "Cmd":
outfile.write("void ReadFromCmd(void)\n");
#endif
outfile.write("{\n\n")
outfile.write(" struct SCType *S;\n")
outfile.write(" struct OrbitType *O;\n")
outfile.write(" struct DynType *D;\n")
outfile.write(" long Isc,Iorb,Iw,i;\n")
outfile.write(" char line[512] = \"Blank\";\n")
outfile.write(" long RequestTimeRefresh = 0;\n")
outfile.write(" long Done;\n")
if Pipe == "Gmsec":
outfile.write(" char Msg[16384];\n")
outfile.write(" GMSEC_Message GsMsg;\n")
outfile.write(" GMSEC_Field Field;\n")
outfile.write(" char AckMsg[5] = \"Ack\\n\";\n")
outfile.write(" long Imsg,Iline;\n")
elif Pipe == "Socket":
outfile.write(" char Msg[16384];\n")
outfile.write(" char AckMsg[5] = \"Ack\\n\";\n")
outfile.write(" long Imsg,Iline;\n")
outfile.write(" int NumBytes;\n")
#endif
outfile.write(" double DbleVal[30];\n")
outfile.write(" long LongVal[30];\n\n")
outfile.write(" long Year,doy,Hour,Minute;\n")
outfile.write(" double Second;\n")
if Prog == "App":
outfile.write(" long Month,Day;\n")
#endif
if Pipe == "Socket":
outfile.write(" \n")
outfile.write(" memset(Msg,'\\0',16384);\n")
outfile.write(" NumBytes = recv(Socket,Msg,16384,0);\n")
outfile.write(" if (NumBytes <= 0) return; /* Bail out if no message */\n\n")
outfile.write(" Done = 0;\n")
outfile.write(" Imsg = 0;\n")
outfile.write(" while(!Done) {\n")
outfile.write(" /* Parse lines from Msg, newline-delimited */\n")
outfile.write(" Iline = 0;\n")
outfile.write(" memset(line,'\\0',512);\n")
outfile.write(" while((Msg[Imsg] != '\\n') && (Iline < 511) && (Imsg < 16383)) {\n")
outfile.write(" line[Iline++] = Msg[Imsg++];\n")
outfile.write(" }\n")
outfile.write(" line[Iline++] = Msg[Imsg++];\n")
elif Pipe == "Gmsec":
outfile.write(" GsMsg = connectionManagerReceive(ConnMgr,GMSEC_WAIT_FOREVER,status);\n")
outfile.write(" CheckGmsecStatus(status);\n")
outfile.write(" Field = messageGetField(GsMsg,\"MSG-TEXT\",status);\n")
outfile.write(" CheckGmsecStatus(status);\n")
outfile.write(" strcpy(Msg,stringFieldGetValue(Field,status));\n")
outfile.write(" CheckGmsecStatus(status);\n\n")
outfile.write(" Done = 0;\n")
outfile.write(" Imsg = 0;\n")
outfile.write(" while(!Done) {\n")
outfile.write(" /* Parse lines from Msg, newline-delimited */\n")
outfile.write(" Iline = 0;\n")
outfile.write(" memset(line,'\\0',512);\n")
outfile.write(" while(Msg[Imsg] != '\\n') {\n")
outfile.write(" line[Iline++] = Msg[Imsg++];\n")
outfile.write(" }\n")
outfile.write(" line[Iline++] = Msg[Imsg++];\n")
elif Pipe == "File":
outfile.write(" Done = 0;\n")
outfile.write(" while(!Done) {\n")
outfile.write(" fgets(line,511,StateFile);\n")
elif Pipe == "Cmd":
outfile.write("\n\n")
#endif
outfile.write(" if ("+EchoString+") printf(\"%s\",line);\n\n")
outfile.write(" if (sscanf(line,\"TIME %ld-%ld-%ld:%ld:%lf\\n\",\n")
outfile.write(" &Year,&doy,&Hour,&Minute,&Second) == 5) {\n")
outfile.write(" RequestTimeRefresh = 1;\n")
outfile.write(" }\n\n")
########################################################################
def WriteEpilog():
global Prog, Verb, Pipe, outfile, EchoString
if Prog == "Sim":
outfile.write(" }\n\n")
#endif
outfile.write(" sprintf(line,\"[EOF]\\n\\n\");\n")
outfile.write(" if ("+EchoString+") printf(\"%s\",line);\n\n")
if Pipe == "Socket":
outfile.write(" LineLen = strlen(line);\n")
outfile.write(" memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" MsgLen += LineLen;\n")
outfile.write(" send(Socket,Msg,MsgLen,0);\n\n")
outfile.write(" /* Wait for Ack */\n");
outfile.write(" recv(Socket,AckMsg,5,0);\n")
elif Pipe == "Gmsec":
outfile.write(" LineLen = strlen(line);\n")
outfile.write(" memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" MsgLen += LineLen;\n")
outfile.write(" GmsecSend(Header,Msg,ConnMgr,status);\n")
outfile.write(" /* Wait for ack */\n")
outfile.write(" AckMsg = connectionManagerReceive(ConnMgr,GMSEC_WAIT_FOREVER,status);\n")
outfile.write(" CheckGmsecStatus(status);\n")
outfile.write(" messageDestroy(&AckMsg);\n")
elif Pipe == "File":
outfile.write(" fprintf(StateFile,\"%s\",line);\n")
#endif
outfile.write("}\n")
########################################################################
def ReadEpilog():
global Prog, Verb, Pipe, outfile
outfile.write("\n")
outfile.write(" if (!strncmp(line,\"[EOF]\",5)) {\n")
outfile.write(" Done = 1;\n")
outfile.write(" sprintf(line,\"[EOF] reached\\n\");\n")
outfile.write(" }\n")
if Pipe == "Socket":
outfile.write(" if (Imsg >= 16383) {\n")
outfile.write(" Done = 1;\n")
outfile.write(" printf(\"Imsg limit exceeded\\n\");\n")
outfile.write(" }\n")
outfile.write(" }\n\n")
outfile.write(" /* Acknowledge receipt */\n")
outfile.write(" send(Socket,AckMsg,strlen(AckMsg),0);\n\n")
elif Pipe == "Gmsec":
outfile.write(" messageDestroy(&GsMsg);\n")
outfile.write(" }\n\n")
outfile.write(" /* Acknowledge receipt */\n")
outfile.write(" GmsecSend(\"GMSEC.42.RX.MSG.LOG\",AckMsg,ConnMgr,status);\n\n")
elif Pipe == "File":
outfile.write(" }\n\n")
elif Pipe == "Cmd":
outfile.write(" \n\n")
#outfile.write(" }\n\n")
#endif
########################################################################
def TimeRefreshCode():
global outfile,Prog
if Prog == "Sim":
outfile.write(" if (RequestTimeRefresh) {\n")
outfile.write(" /* Update time variables */\n")
outfile.write(" UTC.Year = Year;\n")
outfile.write(" UTC.doy = doy;\n")
outfile.write(" UTC.Hour = Hour;\n")
outfile.write(" UTC.Minute = Minute;\n")
outfile.write(" UTC.Second = Second;\n")
outfile.write(" DOY2MD(UTC.Year,UTC.doy,&UTC.Month,&UTC.Day);\n")
outfile.write(" CivilTime = DateToTime(UTC.Year,UTC.Month,UTC.Day,UTC.Hour,UTC.Minute,UTC.Second);\n")
outfile.write(" AtomicTime = CivilTime + LeapSec;\n")
outfile.write(" DynTime = AtomicTime + 32.184;\n")
outfile.write(" TT.JulDay = TimeToJD(DynTime);\n")
outfile.write(" TimeToDate(DynTime,&TT.Year,&TT.Month,&TT.Day,\n")
outfile.write(" &TT.Hour,&TT.Minute,&TT.Second,DTSIM);\n")
outfile.write(" TT.doy = MD2DOY(TT.Year,TT.Month,TT.Day);\n")
outfile.write(" UTC.JulDay = TimeToJD(CivilTime);\n")
outfile.write(" JDToGpsTime(TT.JulDay,&GpsRollover,&GpsWeek,&GpsSecond);\n")
outfile.write(" SimTime = DynTime-DynTime0;\n")
outfile.write(" }\n\n")
else:
outfile.write(" if (RequestTimeRefresh) {\n")
outfile.write(" /* Update AC->Time */\n")
outfile.write(" DOY2MD(Year,doy,&Month,&Day);\n")
outfile.write(" AC->Time = DateToTime(Year,Month,Day,Hour,Minute,Second);\n")
outfile.write(" }\n\n")
#endif
########################################################################
def StateRefreshCode():
global outfile
outfile.write("\n/* .. Refresh SC states that depend on inputs */\n\n")
outfile.write(" for(Isc=0;Isc<Nsc;Isc++) {\n")
outfile.write(" if (SC[Isc].RequestStateRefresh) {\n")
outfile.write(" S = &SC[Isc];\n")
outfile.write(" S->RequestStateRefresh = 0;\n")
outfile.write(" if (S->Exists) {\n")
outfile.write(" /* Update RefOrb */\n")
outfile.write(" O = &Orb[S->RefOrb];\n")
outfile.write(" O->Epoch = DynTime;\n")
outfile.write(" for(i=0;i<3;i++) {\n")
outfile.write(" S->PosN[i] = O->PosN[i] + S->PosR[i];\n")
outfile.write(" S->VelN[i] = O->VelN[i] + S->VelR[i];\n")
outfile.write(" }\n")
outfile.write(" RV2Eph(O->Epoch,O->mu,O->PosN,O->VelN,\n")
outfile.write(" &O->SMA,&O->ecc,&O->inc,&O->RAAN,\n")
outfile.write(" &O->ArgP,&O->anom,&O->tp,\n")
outfile.write(" &O->SLR,&O->alpha,&O->rmin,\n")
outfile.write(" &O->MeanMotion,&O->Period);\n")
outfile.write(" FindCLN(O->PosN,O->VelN,O->CLN,O->wln);\n\n")
outfile.write(" /* Update Dyn */\n")
outfile.write(" MapJointStatesToStateVector(S);\n")
outfile.write(" D = &S->Dyn;\n")
outfile.write(" MapStateVectorToBodyStates(D->u,D->x,D->h,D->a,D->uf,D->xf,S);\n")
outfile.write(" MotionConstraints(S);\n")
outfile.write(" }\n")
outfile.write(" }\n")
outfile.write(" }\n")
########################################################################
def WriteCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,VarString,IdxLen,Ni,Nj,StructIdxString,FormatString):
global Prog, Verb, Pipe, outfile, EchoString
line = Indent+" sprintf(line,\""
line += FmtPrefix
line += VarString
line += " ="
for i in range (0,Ni):
for j in range (0,Nj):
line += " "+FormatString
#next j
#next i
line += "\\n\",\n"+" "+Indent+ArrayIdx+StructIdxString
if Nj > 1:
for i in range (0,Ni):
for j in range (0,Nj):
line += ",\n"+" "+Indent+ArgPrefix+VarString+"["+str(i)+"]["+str(j)+"]"
#next j
#next i
elif Ni > 1:
for i in range (0,Ni):
line += ",\n"+" "+Indent+ArgPrefix+VarString+"["+str(i)+"]"
#next i
else:
line += ",\n"+" "+Indent+ArgPrefix+VarString
#endif
line += ");\n"
outfile.write(line)
if Prog == "Sim":
if Pipe == "Socket":
outfile.write(" "+Indent+"if (!strncmp(line,Prefix[Ipfx],PfxLen)) {\n")
outfile.write(" "+Indent+" LineLen = strlen(line);\n")
outfile.write(" "+Indent+" memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" "+Indent+" MsgLen += LineLen;\n")
outfile.write(" "+Indent+" if ("+EchoString+") printf(\"%s\",line);\n")
outfile.write(" "+Indent+"}\n\n")
elif Pipe == "Gmsec":
outfile.write(" "+Indent+"if (!strncmp(line,Prefix[Ipfx],PfxLen)) {\n")
outfile.write(" "+Indent+" LineLen = strlen(line);\n")
outfile.write(" "+Indent+" memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" "+Indent+" MsgLen += LineLen;\n\n")
outfile.write(" "+Indent+" if ("+EchoString+") printf(\"%s\",line);\n")
outfile.write(" "+Indent+"}\n\n")
elif Pipe == "File":
outfile.write(" "+Indent+"if (!strncmp(line,Prefix[Ipfx],PfxLen)) {\n")
outfile.write(" "+Indent+" fprintf(StateFile,\"%s\",line);\n")
outfile.write(" "+Indent+" if ("+EchoString+") printf(\"%s\",line);\n")
outfile.write(" "+Indent+"}\n\n")
#endif
else:
if Pipe == "Socket":
outfile.write(" "+Indent+"LineLen = strlen(line);\n")
outfile.write(" "+Indent+"memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" "+Indent+"MsgLen += LineLen;\n")
outfile.write(" "+Indent+"if ("+EchoString+") printf(\"%s\",line);\n\n")
elif Pipe == "Gmsec":
outfile.write(" "+Indent+"LineLen = strlen(line);\n")
outfile.write(" "+Indent+"memcpy(&Msg[MsgLen],line,LineLen);\n")
outfile.write(" "+Indent+"MsgLen += LineLen;\n\n")
outfile.write(" "+Indent+"if ("+EchoString+") printf(\"%s\",line);\n\n")
elif Pipe == "File":
outfile.write(" "+Indent+"fprintf(StateFile,\"%s\",line);\n")
outfile.write(" "+Indent+"if ("+EchoString+") printf(\"%s\",line);\n\n")
#endif
#endif
########################################################################
def ReadCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,ArgString,VarString,IdxLen,Ni,Nj,StructIdxString,Narg,FormatString):
global Prog, outfile
line = Indent+"if (sscanf(line,\""
line += FmtPrefix
line += VarString
line += " ="
for i in range (0,Ni):
for j in range (0,Nj):
line += " "+FormatString
#next j
#next i
line += "\","+"\n "+Indent+"&"+ArrayIdx+StructIdxString
if Nj > 1:
for i in range (0,Ni):
for j in range (0,Nj):
line += ","+"\n "+Indent+"&"+ArgString+"["+str(Nj*i+j)+"]"
#next j
#next i
elif Ni > 1:
for i in range (0,Ni):
line += ","+"\n "+Indent+"&"+ArgString+"["+str(i)+"]"
#next i
else:
line += ","+"\n "+Indent+"&"+ArgString+"[0]"
#endif
line += ") == "+str(Narg)+") {"
if Prog == "App":
line += "\n "+Indent+"if (Isc == AC->ID) {"
Indent += " "
#endif
if Nj > 1:
for i in range (0,Ni):
for j in range (0,Nj):
line += "\n "+Indent+ArgPrefix+VarString+"["+str(i)+"]["+str(j)+"] = "+ArgString+"["+str(Nj*i+j)+"];"
#next j
#next i
elif Ni > 1:
for i in range (0,Ni):
line += "\n "+Indent+ArgPrefix+VarString+"["+str(i)+"] = "+ArgString+"["+str(i)+"];"
#next i
else:
line += "\n "+Indent+ArgPrefix+VarString+" = "+ArgString+"[0];"
#endif
if Prog == "App":
Indent = Indent[0:-3]
line += "\n "+Indent+"}"
#endif
if ArgPrefix.startswith("SC") and ArgPrefix.count("AC") == 0:
line += "\n "+Indent+"SC[Isc].RequestStateRefresh = 1;"
#endif
line += "\n"+Indent+"}\n\n"
outfile.write(line)
########################################################################
def ParseStruct(StructList,Struct,Indent,FmtPrefix,ArrayIdx,ArgPrefix,StructIdxString,Narg):
global Prog, Verb, Pipe, outfile, ParmPass
Primitives = {"long","double"}
VarList = Struct["Table Data"]
for Var in VarList:
DataType = Var["Data Type"]
if DataType in Primitives:
VarString = Var["Variable Name"]
if "Array Size" in Var:
IdxString = Var["Array Size"].strip(r"[]")
IdxList = IdxString.split(",")
IdxLen = len(IdxList)
if IdxLen == 2:
Ni = int(IdxList[0])
Nj = int(IdxList[1])
elif IdxList[0].isnumeric():
Ni = int(IdxList[0])
Nj = 1
else:
Ni = 1
Nj = 1
#endif
else:
IdxLen = 0
Ni = 1
Nj = 1
#endif
if DataType == "long":
WriteFormatString = "%ld"
ReadFormatString = "%ld"
ArgString = "LongVal"
else:
WriteFormatString = "%18.12le"
ReadFormatString = "%le"
ArgString = "DbleVal"
#endif
if Prog == "Sim":
ReadWrite = Var["Sim Read/Write"]
elif Prog == "App":
ReadWrite = Var["App Read/Write"]
else:
ReadWrite = ""
#endif
PktRole = Var["Packet Role"]
if ParmPass == 1:
if Verb == "WriteTo" and PktRole == "PRM" and ReadWrite == "":
WriteCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,VarString,IdxLen,Ni,Nj,StructIdxString,WriteFormatString)
#endif
if Verb == "ReadFrom" and PktRole == "PRM" and ReadWrite == "":
ReadCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,ArgString,VarString,IdxLen,Ni,Nj,StructIdxString,Narg+Ni*Nj,ReadFormatString)
#endif
else:
if Verb == "WriteTo" and ReadWrite in ["WRITE","READ_WRITE"]:
WriteCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,VarString,IdxLen,Ni,Nj,StructIdxString,WriteFormatString)
#endif
if Verb == "ReadFrom" and ReadWrite in ["READ","READ_WRITE"]:
ReadCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,ArgString,VarString,IdxLen,Ni,Nj,StructIdxString,Narg+Ni*Nj,ReadFormatString)
#endif
if Prog == "Sim" and Verb == "ReadFrom" and Pipe == "Cmd" and Var["Cmd Read"] == "READ":
ReadCodeBlock(Indent,FmtPrefix,ArrayIdx,ArgPrefix,ArgString,VarString,IdxLen,Ni,Nj,StructIdxString,Narg+Ni*Nj,ReadFormatString)
#endif
#endif
else: # struct
for SubStruct in StructList:
if SubStruct["Table Name"] == Var["Data Type"]:
LocalFmtPrefix = FmtPrefix + Var["Variable Name"]
LocalArgPrefix = ArgPrefix + Var["Variable Name"]
LocalStructIdxString = StructIdxString
if "Array Size" in Var:
IdxString = Var["Array Size"].strip(r"[]")
IdxList = IdxString.split(",")
IdxLen = len(IdxList)
if IdxString.isalpha():
if Verb == "WriteTo":
outfile.write(Indent+" for(i=0;i<"+ArgPrefix+IdxString+";i++) {\n")
LocalIndent = Indent+" "
if Prog == "Sim":
LocalStructIdxString += ",i"
else:
LocalStructIdxString += ",i"
#endif
else:
LocalIndent = Indent
LocalStructIdxString += ",&i"
#endif
LocalFmtPrefix += "[%ld]."
LocalArgPrefix += "[i]."
LocalNarg = Narg+1
ParseStruct(StructList,SubStruct,LocalIndent,LocalFmtPrefix,ArrayIdx,LocalArgPrefix,LocalStructIdxString,LocalNarg)
if Verb == "WriteTo":
outfile.write(Indent+" }\n\n")
#endif
elif IdxLen == 2:
LocalFmtPrefix += "[%ld][%ld]."
LocalArgPrefix += "[i][j]."
if Verb == "WriteTo":
LocalStructIdxString += ",i,j"
else:
LocalStructIdxString += ",&i,&j"
#endif
LocalNarg = Narg+2
LocalIndent = Indent+""
ParseStruct(StructList,SubStruct,LocalIndent,LocalFmtPrefix,ArrayIdx,LocalArgPrefix,LocalStructIdxString,LocalNarg)
else:
LocalFmtPrefix += "[%ld]."
LocalArgPrefix += "[i]."
if Verb == "WriteTo":
LocalStructIdxString += ",i"
else:
LocalStructIdxString += ",&i"
#endif
LocalNarg = Narg+1
LocalIndent = Indent+""
ParseStruct(StructList,SubStruct,LocalIndent,LocalFmtPrefix,ArrayIdx,LocalArgPrefix,LocalStructIdxString,LocalNarg)
#endif
else:
LocalFmtPrefix += "."
LocalArgPrefix += "."
LocalIndent = Indent+""
ParseStruct(StructList,SubStruct,LocalIndent,LocalFmtPrefix,ArrayIdx,LocalArgPrefix,LocalStructIdxString,Narg)
#endif
#endif
#next SubStruct
#endif
#next Var
########################################################################
def StripEmptyLoops(infile,outfile):
line1 = infile.readline()
while (line1 != ''): # EOF
StrippedLine1 = line1.strip()
FoundFor = 0
LoopIsEmpty = 0
if StrippedLine1.startswith('for'):
FoundFor = 1
line2 = infile.readline()
StrippedLine2 = line2.strip()
if StrippedLine2.startswith('}'):
LoopIsEmpty = 1
line3 = infile.readline() # Blank line
#endif
#endif
if (FoundFor and LoopIsEmpty):
pass
# Write nothing
elif FoundFor:
outfile.write(line1)
outfile.write(line2)
else:
outfile.write(line1)
#endif
line1 = infile.readline()
#end while
########################################################################
def main():
global Prog, Verb, Pipe, outfile, EchoString, ParmPass
ProgList = {"Sim","App"}
VerbList = {"WriteTo","ReadFrom"}
PipeList = {"Socket","Gmsec","File","Cmd"}
infile = open('42.json')
StructDict = json.load(infile)
infile.close()
for Prog in ProgList:
if Prog == "Sim":
EchoString = "EchoEnabled"
else:
EchoString = "AC->EchoEnabled"
#endif
for Verb in VerbList:
for Pipe in PipeList:
if not(Verb == "WriteTo" and Pipe == "Cmd") and not (Prog == "App" and Pipe == "Gmsec"):
outfile = open("TempIpc.c","w")
if Verb == "WriteTo":
WriteProlog()
elif Verb == "ReadFrom":
ReadProlog()
#endif
for ParmPass in [0,1]:
StructList = StructDict["Table Definition"]
for Struct in StructList:
Indent = " "
if Prog == "Sim":
if Struct["Table Name"] == "SCType":
if ParmPass == 0:
if Verb == "WriteTo":
outfile.write(Indent+"for(Isc=0;Isc<Nsc;Isc++) {\n")
outfile.write(Indent+" if (SC[Isc].Exists) {\n")
#endif
ParseStruct(StructList,Struct,Indent+" ","SC[%ld].","Isc","SC[Isc].","",1)
if Verb == "WriteTo":
outfile.write(Indent+" }\n")
outfile.write(Indent+"}\n\n")
#endif
#endif
#endif
if Struct["Table Name"] == "OrbitType":
if ParmPass == 0:
if Verb == "WriteTo":
outfile.write(Indent+"for(Iorb=0;Iorb<Norb;Iorb++) {\n")
outfile.write(Indent+" if (Orb[Iorb].Exists) {\n")
#endif
ParseStruct(StructList,Struct,Indent+" ","Orb[%ld].","Iorb","Orb[Iorb].","",1)
if Verb == "WriteTo":
outfile.write(Indent+" }\n")
outfile.write(Indent+"}\n\n")
#endif
#endif
#endif
if Struct["Table Name"] == "WorldType":
if ParmPass == 0:
if Verb == "WriteTo":
outfile.write(Indent+"for(Iw=1;Iw<NWORLD;Iw++) {\n")
outfile.write(Indent+" if (World[Iw].Exists) {\n")
#endif
ParseStruct(StructList,Struct,Indent+" ","World[%ld].","Iw","World[Iw].","",1)
if Verb == "WriteTo":
outfile.write(Indent+" }\n")
outfile.write(Indent+"}\n\n")
#endif
#endif
#endif
if Struct["Table Name"] == "AcType":
if ParmPass == 1:
if Verb == "WriteTo":
Indent = " "
outfile.write(Indent+"for(Isc=0;Isc<Nsc;Isc++) {\n")
outfile.write(Indent+" if (SC[Isc].Exists) {\n")
outfile.write(Indent+" if (SC[Isc].AC.ParmLoadEnabled) {\n")
#endif
else:
Indent = " "
outfile.write(Indent+"for(Isc=0;Isc<Nsc;Isc++) {\n")
outfile.write(Indent+" if (SC[Isc].Exists) {\n")
outfile.write(Indent+" if (SC[Isc].AC.ParmDumpEnabled) {\n")
Indent += " "
#endif
ParseStruct(StructList,Struct,Indent+" ","SC[%ld].AC.","Isc","SC[Isc].AC.","",1)
if Verb == "WriteTo":
outfile.write(Indent+" }\n")
outfile.write(Indent+" }\n")
outfile.write(Indent+"}\n\n")
else:
Indent = Indent[0:-3]
outfile.write(Indent+" }\n")
outfile.write(Indent+" }\n")
outfile.write(Indent+"}\n\n")
#endif
#endif
#endif
else:
if Struct["Table Name"] == "AcType":
if ParmPass == 1:
if Verb == "WriteTo":
Indent = " "
outfile.write(Indent+" if (AC->ParmDumpEnabled) {\n")
#endif
else:
Indent = " "
outfile.write(Indent+"if (AC->ParmLoadEnabled) {\n")
#endif
ParseStruct(StructList,Struct,Indent+" ","SC[%ld].AC.","Isc","AC->","",1)
if Verb == "WriteTo":
outfile.write(Indent+" }\n\n")
else:
outfile.write(Indent+"}\n\n")
#endif
else:
if Verb == "WriteTo":
Indent = " "
#endif
else:
Indent = " "
#endif
ParseStruct(StructList,Struct,Indent,"SC[%ld].AC.","Isc","AC->","",1)
#endif
#endif
#endif
#next Struct
#next ParmPass
if Verb == "WriteTo":
WriteEpilog()
elif Verb == "ReadFrom":
ReadEpilog()
TimeRefreshCode()
if Prog == "Sim":
StateRefreshCode()
#endif
outfile.write("}\n")
#endif
outfile.close()
infile = open("TempIpc.c")
outfile = open("../Source/IPC/"+Prog+Verb+Pipe+".c","w")
StripEmptyLoops(infile,outfile)
infile.close()
outfile.close()
os.remove("TempIpc.c")
#endif
#next Pipe
#next Verb
#next Prog
########################################################################
if __name__ == '__main__': main()
| [
"Eric.T.Stoneking@nasa.gov"
] | Eric.T.Stoneking@nasa.gov |
17b8933019a86e9c2ffbcdaa3cb4a887f9d66940 | 7411152e1618fe463d170e78fc8df594de9ce88e | /web_scrapper.py | b22663f1377bfd8bab6de148fe86b01a03892fbe | [] | no_license | neramas1221/Bird_web_scrapper | 31aa7f92df2223966359d9235feb452ea3f0d78a | 11fb372932b5443f5586e0af7eb5ce88afbc25ce | refs/heads/master | 2020-04-01T14:54:06.181993 | 2019-03-19T13:43:48 | 2019-03-19T13:43:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,201 | py | import requests
from bs4 import BeautifulSoup
import time
import numpy as np
import pandas as pd
import re
page_counter = 8040 #8575
page_url = "https://www.xeno-canto.org/explore?dir=0&order=xc&pg="
download_url = "https://www.xeno-canto.org"
folder = "/media/neramas1221/Maxtor/sounds/"
row_Data = []
col_Data = []
cont = True
table_data = np.array([])
while cont:
print("loop " + str(page_counter))
row_Data = []
page = requests.get(page_url + str(page_counter))
soup = BeautifulSoup(page.text, 'lxml')
table = soup.find(class_="results")
rows = table.find_all('tr')
for row in range(1, len(rows)):
cols = rows[row].find_all('td')
col_Data = []
for col in range(1, len(cols)):
if cols[col].contents !=[]:
info = cols[col].contents[0]
else:
info = " "
if cols[col].find_all('p') != []:
info = cols[col].find('p')
info = info.contents[0]
elif cols[col].find_all('a') != []:
info = cols[col].find('a')
if 'download' in str(info):
info = info['href']
else:
info = info.contents[0]
if cols[col].find_all(class_='rating') != []:
if cols[col].find_all(class_='selected') != []:
section = cols[col].find(class_='selected')
rating = section.contents[0]
rating = rating.contents[0]
col_Data.append(rating)
else:
col_Data.append(" ")
info = " ".join(str(info).split())
col_Data.append(info)
row_Data.append(col_Data)
f = open("/media/neramas1221/Maxtor/bird_data_text.txt", "a")
for i in range(0, len(row_Data)):
for j in range(0, len(cols)):
row_Data[i][j] = re.sub('[!@#$",]', '', row_Data[i][j])
f.write('"' + str(row_Data[i][j]) + '"' + ",")
f.write("\n")
f.close()
for i in range(0, len(row_Data)):
print("Downloading...")
if "img" not in (download_url + str(row_Data[i][11])):
r = requests.get(download_url + str(row_Data[i][11]), stream=True)
with open(folder+str(row_Data[i][12])+".mp3", 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.close()
print("...Done")
time.sleep(0.1)
# if page_counter == 1:
# table_data = np.array(row_Data)
# else:
# table_data = np.vstack((table_data, row_Data))
if len(row_Data) != 30:
cont = False
else:
page_counter = page_counter + 1
#output = pd.DataFrame(table_data,columns = ['Common name','Length','Recordist','Date','Time','Country',
# 'Location','Elev. (m)','Type','Remarks','Rating','Download link',
# 'ID'])
#output.to_csv("data_set.csv")
print(table_data.size)
print(table_data.shape)
| [
"connorwheeler1997@gmail.com"
] | connorwheeler1997@gmail.com |
38ec4f2c3c29b218a061ec89a9850e93af9664f6 | c0057c0bfa216ec991894352700c8e7bca52ad4a | /test2.py | 9638768a9f5438f8d5af7319ec5bf987a0ec0be7 | [] | no_license | syurituu/exercise | f040150d2a734fbc458038de2834b84585c5adaa | c7d4f5e16b79305c513c6576d9edf3da4b051974 | refs/heads/master | 2020-12-13T18:45:07.937811 | 2020-01-17T07:48:50 | 2020-01-17T07:48:50 | 234,498,432 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | import jieba
def splitfile(inputFile, outputFile):
fin=open(inputFile,'r',errors='ignore')
fout=open(outputFile,'w',errors='ignore')
for line in fin:
line = line.strip()
line = jieba.cut(line)
outstr = " ".join(line)
fout.write(outstr + '\n')
fin.close()
fout.close()
splitfile('msr_test.txt','result2.txt') | [
"zhutom1996@gmail.com"
] | zhutom1996@gmail.com |
60860eacc8024b7eec8832f1bace9276b752943b | 9af43f9f52ab8726caacdd594980d5e0bf462c40 | /flask_transmute/decorators.py | 29e4c8463f456ffad3e1540e4880e4cebb3c4467 | [] | no_license | elindell/flask-transmute | 3b28509fee071e606be0021bfdc63bff85b51a38 | bd3c103c5eca9a5e4071f71be4a12460acddfd26 | refs/heads/master | 2021-01-22T09:16:45.945064 | 2016-04-04T08:49:08 | 2016-04-04T08:49:08 | 67,669,319 | 0 | 0 | null | 2016-09-08T04:48:59 | 2016-09-08T04:48:59 | null | UTF-8 | Python | false | false | 769 | py | def updates(f):
"""
this labels a function as one that updates data.
"""
f.updates = True
return f
def creates(f):
"""
this labels a function as one that creates data.
"""
f.creates = True
return f
def deletes(f):
"""
this labels a function as one that deletes data.
"""
f.deletes = True
return f
def annotate(annotations):
"""
in python2, native annotions on parameters do not exist:
def foo(a : str, b: int) -> bool:
...
this provides a way to provide attribute annotations:
@annotate({"a": str, "b": int, "return": bool})
def foo(a, b):
...
"""
def decorate(func):
func.__annotations__ = annotations
return func
return decorate
| [
"tsutsumi.yusuke@gmail.com"
] | tsutsumi.yusuke@gmail.com |
468c319fce38acce24ace0e88fc5325e3bdc9b49 | 4b56e86b33a52d2d1808d6b80f13a169f3f6159a | /ImproveDeepNN/OptimizationMethods/Momentum.py | c93deca6fb6eb05c5f1e573354eb5b638d4451d0 | [] | no_license | vandeppce/DeepLearning | d922d4780afab5b733cc7a87b0c167bc4b2cfc81 | 5e2d4a34b8a5a1c81e132a1a70e2b503859b1f7d | refs/heads/master | 2021-04-03T04:13:07.250159 | 2018-05-24T04:49:51 | 2018-05-24T04:49:51 | 124,764,156 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,219 | py | import numpy as np
import matplotlib.pyplot as plt
import scipy.io
import math
import sklearn
import sklearn.datasets
from ImproveDeepNN.OptimizationMethods.opt_utils import *
from ImproveDeepNN.OptimizationMethods.testCases import *
def initialize_velocity(parameters):
"""
Initializes the velocity as a python dictionary with:
- keys: "dW1", "db1", ..., "dWL", "dbL"
- values: numpy arrays of zeros of the same shape as the corresponding gradients/parameters.
Arguments:
parameters -- python dictionary containing your parameters.
parameters['W' + str(l)] = Wl
parameters['b' + str(l)] = bl
Returns:
v -- python dictionary containing the current velocity.
v['dW' + str(l)] = velocity of dWl
v['db' + str(l)] = velocity of dbl
"""
L = len(parameters) // 2
v = {}
for i in range(L):
v["dW" + str(i + 1)] = np.zeros(parameters["W" + str(i + 1)].shape)
v["db" + str(i + 1)] = np.zeros(parameters["b" + str(i + 1)].shape)
return v
'''
parameters = initialize_velocity_test_case()
v = initialize_velocity(parameters)
print("v[\"dW1\"] = " + str(v["dW1"]))
print("v[\"db1\"] = " + str(v["db1"]))
print("v[\"dW2\"] = " + str(v["dW2"]))
print("v[\"db2\"] = " + str(v["db2"]))
'''
def update_parameters_with_momentum(parameters, grads, v, beta, learning_rate):
"""
Update parameters using Momentum
Arguments:
parameters -- python dictionary containing your parameters:
parameters['W' + str(l)] = Wl
parameters['b' + str(l)] = bl
grads -- python dictionary containing your gradients for each parameters:
grads['dW' + str(l)] = dWl
grads['db' + str(l)] = dbl
v -- python dictionary containing the current velocity:
v['dW' + str(l)] = ...
v['db' + str(l)] = ...
beta -- the momentum hyperparameter, scalar
learning_rate -- the learning rate, scalar
Returns:
parameters -- python dictionary containing your updated parameters
v -- python dictionary containing your updated velocities
"""
L = len(parameters) // 2
for i in range(L):
v["dW" + str(i + 1)] = beta * v["dW" + str(i + 1)] + (1.0 - beta) * grads["dW" + str(i + 1)]
v["db" + str(i + 1)] = beta * v["db" + str(i + 1)] + (1.0 - beta) * grads["db" + str(i + 1)]
parameters["W" + str(i + 1)] = parameters["W" + str(i + 1)] - learning_rate * v["dW" + str(i + 1)]
parameters["b" + str(i + 1)] = parameters["b" + str(i + 1)] - learning_rate * v["db" + str(i + 1)]
return parameters, v
'''
parameters, grads, v = update_parameters_with_momentum_test_case()
parameters, v = update_parameters_with_momentum(parameters, grads, v, beta = 0.9, learning_rate = 0.01)
print("W1 = " + str(parameters["W1"]))
print("b1 = " + str(parameters["b1"]))
print("W2 = " + str(parameters["W2"]))
print("b2 = " + str(parameters["b2"]))
print("v[\"dW1\"] = " + str(v["dW1"]))
print("v[\"db1\"] = " + str(v["db1"]))
print("v[\"dW2\"] = " + str(v["dW2"]))
print("v[\"db2\"] = " + str(v["db2"]))
'''
| [
"sdtczyj@163.com"
] | sdtczyj@163.com |
41ad7dfb509fba61890e0aea60cd3b110cef1c09 | 01362c32c4f28774d35eb040fce84e1bdc1dbe36 | /programming/migrations/0006_auto__add_field_film_picture__add_field_festival_picture__add_field_gi.py | f9215083aa7020ad553feb308aef4229099fee4b | [] | no_license | marcusvalentine/starandshadow | 90c434e04829248a69c6a4d9cf6b32eb395d42c9 | 93a74542bc5d949238232632ee85de66438f78bb | refs/heads/master | 2020-03-29T16:24:36.163141 | 2018-05-10T13:32:08 | 2018-05-10T13:32:08 | 150,112,777 | 0 | 0 | null | 2018-09-24T14:10:08 | 2018-09-24T14:10:07 | null | UTF-8 | Python | false | false | 17,082 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Film.picture'
db.add_column('programming_film', 'picture', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['fileupload.Picture'], null=True, blank=True), keep_default=False)
# Adding field 'Festival.picture'
db.add_column('programming_festival', 'picture', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['fileupload.Picture'], null=True, blank=True), keep_default=False)
# Adding field 'Gig.picture'
db.add_column('programming_gig', 'picture', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['fileupload.Picture'], null=True, blank=True), keep_default=False)
# Adding field 'Season.picture'
db.add_column('programming_season', 'picture', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['fileupload.Picture'], null=True, blank=True), keep_default=False)
if not db.dry_run:
for event in orm.Film.objects.all():
if event.graphic.name != '':
try:
event.picture = orm['fileupload.Picture'].objects.get(file=event.graphic.name)
except:
p = orm['fileupload.Picture'](file=event.graphic.name)
p.save()
event.picture = p
event.save()
for event in orm.Festival.objects.all():
if event.graphic.name != '':
try:
event.picture = orm['fileupload.Picture'].objects.get(file=event.graphic.name)
except:
p = orm['fileupload.Picture'](file=event.graphic.name)
p.save()
event.picture = p
event.save()
for event in orm.Gig.objects.all():
if event.graphic.name != '':
try:
event.picture = orm['fileupload.Picture'].objects.get(file=event.graphic.name)
except:
p = orm['fileupload.Picture'](file=event.graphic.name)
p.save()
event.picture = p
event.save()
for event in orm.Season.objects.all():
if event.graphic.name != '':
try:
event.picture = orm['fileupload.Picture'].objects.get(file=event.graphic.name)
except:
p = orm['fileupload.Picture'](file=event.graphic.name)
p.save()
event.picture = p
event.save()
def backwards(self, orm):
# Deleting field 'Film.picture'
db.delete_column('programming_film', 'picture_id')
# Deleting field 'Festival.picture'
db.delete_column('programming_festival', 'picture_id')
# Deleting field 'Gig.picture'
db.delete_column('programming_gig', 'picture_id')
# Deleting field 'Season.picture'
db.delete_column('programming_season', 'picture_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'fileupload.picture': {
'Meta': {'object_name': 'Picture'},
'file': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'})
},
'programming.event': {
'Meta': {'ordering': "['start']", 'object_name': 'Event'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end': ('django.db.models.fields.TimeField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['fileupload.Picture']", 'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'programmer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Programmer']"}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'programming.festival': {
'Meta': {'ordering': "['start']", 'object_name': 'Festival'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end': ('django.db.models.fields.DateTimeField', [], {}),
'events': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['programming.Event']", 'symmetrical': 'False', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'films': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['programming.Film']", 'symmetrical': 'False', 'blank': 'True'}),
'gigs': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['programming.Gig']", 'symmetrical': 'False', 'blank': 'True'}),
'graphic': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['fileupload.Picture']", 'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'programmer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Programmer']"}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'programming.film': {
'Meta': {'ordering': "['start']", 'object_name': 'Film'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'certificate': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Rating']"}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'director': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'filmFormat': ('django.db.models.fields.CharField', [], {'default': "'UK'", 'max_length': '15'}),
'graphic': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'length': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['fileupload.Picture']", 'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'programmer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Programmer']"}),
'season': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Season']"}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'year': ('django.db.models.fields.CharField', [], {'max_length': '150', 'blank': 'True'})
},
'programming.gig': {
'Meta': {'ordering': "['start']", 'object_name': 'Gig'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end': ('django.db.models.fields.TimeField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'graphic': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['fileupload.Picture']", 'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'programmer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Programmer']"}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'programming.meeting': {
'Meta': {'ordering': "['start']", 'object_name': 'Meeting'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'programmer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Programmer']"}),
'start': ('django.db.models.fields.DateTimeField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "'General Meeting'", 'max_length': '150'})
},
'programming.programmer': {
'Meta': {'ordering': "['name']", 'object_name': 'Programmer'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'homePhone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mobilePhone': ('django.db.models.fields.CharField', [], {'max_length': '15', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'photo': ('django.db.models.fields.files.ImageField', [], {'default': "'img/programmer/ron1-small.jpg'", 'max_length': '100', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'programming.rating': {
'Meta': {'ordering': "['name']", 'object_name': 'Rating'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'largeImage': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'smallImage': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'})
},
'programming.season': {
'Meta': {'ordering': "['start']", 'object_name': 'Season'},
'body': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'confirmed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end': ('django.db.models.fields.DateField', [], {}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'graphic': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'picture': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['fileupload.Picture']", 'null': 'True', 'blank': 'True'}),
'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'programmer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['programming.Programmer']"}),
'start': ('django.db.models.fields.DateField', [], {}),
'summary': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '150'})
}
}
complete_apps = ['programming']
| [
"sjk@psimonkey.org.uk"
] | sjk@psimonkey.org.uk |
7c818c5e714842a6d73b7c92f35e9830888d1a26 | c6a37df4e40530dd87f89de5c8f6d98b10173c63 | /concolic.py | b182cfc1bac7d0fa89b9d437352d1a084534721a | [] | no_license | KevinBender/AutomatedConcolic | ed88fc7cbe122d76679093b6945214c0ecd4a0b6 | 7c5e50f434bff0d3e25c4f346790915a55397d7a | refs/heads/master | 2023-05-01T04:42:08.827665 | 2021-05-18T14:50:42 | 2021-05-18T14:50:42 | 361,894,492 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,125 | py | import z3
import traceback
from inspect import currentframe, signature
from dataclasses import dataclass
"""
This file implements a concolic execution engine. It assumes that
a program under test is instrumented and will use the API
concolic.guard() to collect path constraints, and the API
concolic.set() and concolic.set() to manipulate a symbolic store.
The main algorithm in this file is from the DART paper:
Patrice Godefroid, Nils Klarlund, and Koushik Sen. 2005.
DART: Directed Automated Random Testing. In Proceedings of
the 2005 ACM SIGPLAN conference on Programming Language
Design and Implementation (PLDI '05).
DOI: https://doi.org/10.1145/1065010.1065036
The entry point to the engine is concolic.run().
"""
@dataclass
class PathRecord:
line: int
done: bool
class ConcolicException(Exception):
pass
# Globals
symbols = None
store = None
current_path = None
path_record = None
solver = None
def init(vars):
global path_record, symbols
symbols = {v: z3.Int(v) for v in vars}
path_record = []
reset()
def reset():
global store, current_path, solver
store = {v: symbols[v] for v in symbols.keys()}
current_path = []
solver = z3.Solver()
def get(x):
"""Get concolic store mapping of var `x`"""
if store is not None:
return store[x]
else:
return 0
def set(x, a):
"""Set concolic store mapping of var `x` to arithmetic expression `a`"""
if store is not None:
store[x] = a
def guard(g):
"""Add `g` to current path constraint"""
if solver is None:
return # Concolic testing is not running
solver.append(g)
assert solver.check()
# Get line number of guard
line = int(currentframe().f_back.f_lineno)
# We are just seeing the k-th branch in this execution
k = len(current_path)
# Append to current path
current_path.append(g)
# Check if we have an expected k-th branch in the path record else add to it
if k < len(path_record):
if k == len(path_record)-1:
# We just got to the last negated guard
if path_record[k].line == line:
# We got to an unexpected branch
raise ConcolicException(("Unsoundness! Current path is %s and I'm back on line %d, " +\
"but I was expecting to have negated this branch") % (current_path, line))
else:
path_record[k].line = line
path_record[k].done = True
elif path_record[k].line != line:
# We got to an unexpected branch
raise ConcolicException(("Unsoundness! Current path is %s and I'm on line %d, " +\
"but I was expecting to go to line %d.") % (current_path, line, path_record[k].line))
# else: do nothing, we saw an expected branch
else:
path_record.append(PathRecord(line, False)) # Set `done`=False initially
def dump_path():
"""print Z3 constraints in Python in short hand"""
if solver is not None:
print(solver)
def dump_smt():
"""print Z3 constraints in Python in SMT-LIB format"""
if solver is not None:
print(solver.to_smt2())
# Top-level runner
def run(func, vars):
"""Concolically executes `func` with parameters `vars` and returns (total_paths:int, bug_found:bool)"""
global store, current_path, path_record, solver
# Initialize state
inputs = {str(v): 0 for v in vars} # Could also be random
init(vars)
total_runs = 0
bug_found = False
while True:
# Run concolically
try :
print("Running with inputs %s" % inputs)
total_runs += 1
func(**inputs)
except AssertionError as e:
traceback.print_exc()
print("*** Assertion violation found! Inputs are: %s" % inputs)
bug_found = True
finally:
print("... Path collected: %s" % current_path)
# print("Path Record: %s" % path_record)
# Figure out the next guard to negate
next = len(current_path)-1
while True:
while next >= 0 and path_record[next].done:
next = next - 1
if next == -1:
print("Concolic execution complete! %d paths explored." % total_runs)
# TODO: Actually do a random restart if there was any unsoundness observed
return total_runs, bug_found
else:
# print("next idx=%d" % next)
# Create a new path constraint up to `next` with the condition at index `next` negated
current_path = current_path[:next] + [z3.Not(current_path[next])]
path_record = path_record[:next+1]
solver.reset()
solver.insert(current_path)
# print("Path Record: %s" % path_record)
print("... Negating the condition at line %d...." % path_record[-1].line)
print("...... New candidate path: %s" % current_path)
is_sat = solver.check()
if is_sat == z3.sat:
model = solver.model()
inputs = {var_name: model.eval(var_symbol, model_completion=True).as_long()
for var_name, var_symbol in symbols.items()}
print("...... SAT! New inputs are: %s" % inputs)
reset()
print()
break
elif is_sat == z3.unsat:
print("...... UNSAT!")
next = next - 1
continue # Go look for the next branch to negate
else:
raise Exception("You should not get a z3 result of %s." % is_sat)
return
| [
"theguykevin@gmail.com"
] | theguykevin@gmail.com |
a6cb5f2f4b102661f20b0783836f1b11b3805ee9 | 3ebe732756194704043bb353c768ebb26cfed22e | /enbuyuksayi.py | e0d3d117bb893497f93897df0bc0708ec1b5cef0 | [] | no_license | BarisKamis/CLARUSWAY | 68ddf303d992d91268e0b5ef472fab13d1bdd554 | 63b2e0b3e119fdcbc27c956b436226134bec57b7 | refs/heads/main | 2023-06-09T16:18:25.579005 | 2021-07-01T20:48:01 | 2021-07-01T20:48:01 | 382,150,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | sayilar=[]
for i in range(1,6):
print ("bir sayi girin")
x=input()
sayilar.append(int(x))
i=i+1
enbuyuk=sayilar[1]
for i in sayilar:
if i > enbuyuk:
enbuyuk=i
i=i+1
print ("en buyuk sayi:", enbuyuk) | [
"noreply@github.com"
] | noreply@github.com |
be7efb58da4aaf5477a500e8e99560403997a417 | f8200c79eeddbf64f39ca21ac0afcaa1c235703a | /debug.py | dc93da203b599c61812c22b41857c85bd4143eb9 | [] | no_license | glanton/robocritic | dbe98eb2ec7d8f79fa482f547f480e780b0688db | afd9545dfa158957e7ca9d40fcf59b410d192dcb | refs/heads/master | 2016-08-05T03:52:24.779438 | 2015-05-01T20:57:25 | 2015-05-01T20:57:25 | 34,097,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 817 | py | # functions and data used for debugging purposes
# holds all counts kept when running the program
_count_dict = {}
# creates a new counter with the specified name when first called; increments counter on future calls
# also prints the current count based on the specified frequency
def run_counter(name, frequency):
# first check if counter already exists
if name in _count_dict:
# print count and increment
count = _count_dict[name]
if count % frequency == 0:
print(" - " + name + ": " + str(count))
_count_dict[name] += 1
# otherwise create new counter
else:
print(name + ": 0")
_count_dict[name] = 1
# resets a counter so that it can be used again
def reset_counter(name):
if name in _count_dict:
_count_dict[name] = 0
| [
"classay@gmail.com"
] | classay@gmail.com |
1262b3cda999d37cac7c4fdb37305365b0c404ab | 38df8849ad0b43594dafb94008fd0036951dde85 | /regexquerytool/regexquerytool/wsgi.py | 505c9396070827e73bb9d9157051f0d3b84ff44c | [] | no_license | yasminhillis/regex_query_tool | ffebdcfeb312e9ec164f4ca429aa23c14a58823d | 10e713755a9b35350ab467f515cbb541af891f0d | refs/heads/main | 2023-01-05T00:03:23.064281 | 2020-11-08T09:33:21 | 2020-11-08T09:33:21 | 306,550,614 | 1 | 0 | null | 2020-11-08T09:33:22 | 2020-10-23T06:40:27 | JavaScript | UTF-8 | Python | false | false | 683 | py | """
WSGI config for regexquerytool project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
import sys
# from pathlib import Path
# path = 'regexquerytool.settings'
from django.core.wsgi import get_wsgi_application
# if path not in sys.path:
# sys.path.append(path)
# project_home = u'/app/regexquerytool/regexquerytool/wsgi.py'
os.environ['DJANGO_SETTINGS_MODULE'] = 'regexquerytool.settings'
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'regexquerytool.settings')
application = get_wsgi_application()
| [
"jasminehillis94@gmail.com"
] | jasminehillis94@gmail.com |
5fc9f33ad9117e2363e5ed12f9e8c613c93c79bf | 3ac9007691cacf0620530baf04dda9dd85fee556 | /usr/bin/gooroom-update | db947198d61acf88ab08e6d71ed22d8f717e9f75 | [] | no_license | ozun215/gooroom-update | 406cec20414486113d04867ad8f5ec9fc455378d | 1b69e58b001c36569557f51767310f33fa1a2dc8 | refs/heads/master | 2022-12-01T11:36:19.672171 | 2020-07-08T11:10:11 | 2020-07-08T11:10:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | #!/usr/bin/python
import os
import commands
command="/usr/lib/gooroom/gooroomUpdate/gooroomUpdate.py show &"
os.system(command)
| [
"gooroom@gooroom.kr"
] | gooroom@gooroom.kr | |
e0e32be403a6963887949ef4f1269a652f11e196 | 89e6c3548fbdd06178aae712de1ff19004bc2faa | /my_django/contrib/localflavor/sk/forms.py | f5428d879572000d4ed3f57df9882da6f007f378 | [] | no_license | bhgv/ublog_git.hg.repo-django.python-engine | a3f3cdcbacc95ec98f022f9719d3b300dd6541d4 | 74cdae100bff5e8ab8fb9c3e8ba95623333c2d43 | refs/heads/master | 2020-03-23T01:04:07.431749 | 2018-07-25T12:59:21 | 2018-07-25T12:59:21 | 140,899,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,560 | py | """
Slovak-specific form helpers
"""
from __future__ import absolute_import
from my_django.contrib.localflavor.sk.sk_districts import DISTRICT_CHOICES
from my_django.contrib.localflavor.sk.sk_regions import REGION_CHOICES
from my_django.forms.fields import Select, RegexField
from my_django.utils.translation import ugettext_lazy as _
class SKRegionSelect(Select):
"""
A select widget widget with list of Slovak regions as choices.
"""
def __init__(self, attrs=None):
super(SKRegionSelect, self).__init__(attrs, choices=REGION_CHOICES)
class SKDistrictSelect(Select):
"""
A select widget with list of Slovak districts as choices.
"""
def __init__(self, attrs=None):
super(SKDistrictSelect, self).__init__(attrs, choices=DISTRICT_CHOICES)
class SKPostalCodeField(RegexField):
"""
A form field that validates its input as Slovak postal code.
Valid form is XXXXX or XXX XX, where X represents integer.
"""
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXX or XXX XX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(SKPostalCodeField, self).__init__(r'^\d{5}$|^\d{3} \d{2}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
"""
Validates the input and returns a string that contains only numbers.
Returns an empty string for empty values.
"""
v = super(SKPostalCodeField, self).clean(value)
return v.replace(' ', '')
| [
"bhgv.empire@gmail.com"
] | bhgv.empire@gmail.com |
e784cfeb07b1b4b44de67e5f78c4e17cfbf1338b | 1d717c797e93b451f7da7c810a0fb4075b1050d5 | /src/data/dataset/basic_dataset.py | bc875ea6516703ea40caa5028c2b7984ad5dd2fa | [] | no_license | jessie0624/nlp-task | 32338b08051a3ea192db2bf74c9c969bdff1f6ad | aaeeed86341356d9fd061664f6f7bccf2ac353d0 | refs/heads/master | 2023-01-24T12:06:13.323646 | 2020-12-10T08:38:23 | 2020-12-10T08:38:23 | 292,151,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py |
import numpy as np
from src.base import BaseDataset
class BasicDataset(BaseDataset):
def __init__(self, x: list, y: list, callbacks=None):
super().__init__(callbacks=callbacks)
self.x = x
self.y = y
self.sample() # 先获取候选的索引池(index pool)
def get_index_pool(self):
'''
index_pool用来保存每一次索引返回的list
:return:
'''
# 默认为x的长度,这里要保证是二维的,便于统一,即[[0], [1], [2],...]
index_pool = np.expand_dims(range(len(self.x)), axis=1).tolist()
return index_pool
def sort(self):
'''
按照x中数据的长度进行排序
'''
old_index_pool = self._index_pool
lengths = [len(item) for item in self.x]
sort_index = np.argsort(lengths)
self._index_pool = [old_index_pool[index] for index in sort_index]
def __getitem__(self, item: int):
x, y = self.x[item], self.y[item]
self._handle_callback_on_batch(x, y)
return x, y | [
"jessie_lijie@126.com"
] | jessie_lijie@126.com |
321d28bf716d6a7c7adbd903db11e67dbdfd4a8b | 743be419d9af6be760a4c9754a9fb946b84827ec | /manage.py | 55a16d5a5082fbd03c4d4c1723d5af460278aaa9 | [] | no_license | mathbeal/videomembership-django | f76c9debaef1b00171d79e8fd1e9409e24705f68 | 3fa779458197a245aacb82d00ff0e7102a3cb831 | refs/heads/master | 2021-06-04T20:13:38.457946 | 2016-09-13T13:37:05 | 2016-09-13T13:37:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "videomembership.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"leo.maltrait@gmail.com"
] | leo.maltrait@gmail.com |
3baa1450535e83cf7f724f6e11344aa79973106c | 18f5ea1689c88eac0a8e68f1697d04b9f4028e19 | /manuel-ortiz-fortis-leonardo-Jimenez-miguel-rodriguez.py | 66b9c13a76ea9db5d44f53949579900476c2621e | [] | no_license | MiguelAngelR95/programas-automatas | 44dc69ff4fee2a807417de18e6cbdb946193ed2f | 60881b805c01b09850691c4741e3ea6456d866c2 | refs/heads/master | 2021-01-18T22:55:27.817884 | 2016-08-05T03:47:27 | 2016-08-05T03:47:27 | 62,854,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,063 | py |
mensaje = "ingrese los arreglos"
print mensaje
t = raw_input()
lista = []
for i in range(0, int(t)):
lista.append(raw_input("Ingrese elemento -> "))
print "A^0 = E"
#---------------------------------------------------------------
a = "{"
for i in range(0, len(lista)):
a = a + lista[i]+","
a = a + "}"
print "A^1 = "+a
#---------------------------------------------------------------
b = "{"
for i in range(0, len(lista)):
for j in range(0, len(lista)):
b = b + lista[i]+lista[j]+","
b = b + "}"
print "A^2 = "+b
#---------------------------------------------------------------
c = "{"
for i in range(0, len(lista)):
for j in range(0, len(lista)):
for k in range(0, len(lista)):
c = c + lista[i]+lista[j]+lista[k]+","
c = c + "}"
print "A^3 = "+c
#---------------------------------------------------------------
d = "{"
for i in range(0, len(lista)):
for j in range(0, len(lista)):
for k in range(0, len(lista)):
for l in range(0, len(lista)):
d = d + lista[i]+lista[j]+lista[k]+lista[l]+","
#------------------------------------------------------------------
e = "{"
for i in range(0, len(lista)):
for j in range(0, len(lista)):
for k in range(0, len(lista)):
for l in range(0, len(lista)):
for m in range(0, len(lista)):
e = e + lista[i]+lista[j]+lista[k]+lista[m]+","
e = e + "}"
print "A^5 = "+e
#---------------------------------------------------------------
f = "{"
for i in range(0, len(lista)):
for j in range(0, len(lista)):
for k in range(0, len(lista)):
for l in range(0, len(lista)):
for m in range(0, len(lista)):
for n in range(0, len(lista)):
e = e + lista[i]+lista[j]+lista[k]+lista[m]+lista[m]+","
f = f + "}"
print "A^ = "+f
#---------------------------------------------------------------
a = len(d)
d[a-2].replace(",","}")
#d = d + "}"
print "A^4 = "+d
#print "------------------------------"
#print len(cadena)
#for i in range(0, 10,2):
| [
"noreply@github.com"
] | noreply@github.com |
1c54602c168cdfc90d1c47fa582a445c1da70afa | 730d9b6251cfb911250626b21c8476c30f5729c0 | /day04/code/4-xpath-language.py | 87de8b71941e2a1cc8923144e991fd9861a31ac6 | [] | no_license | Wuhuaxing2017/spider_notes | 7b6d6e9e26e97d9b67dda85fd4833f82ef793f58 | 69cb18e38f54c839e4d0ebaa7199f984dfbcf5da | refs/heads/master | 2020-03-19T08:09:51.723802 | 2018-06-05T14:32:46 | 2018-06-05T14:32:46 | 136,180,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,153 | py | from lxml import etree
html = '''<div>
<ul>
<li class="item-0"><a href="link1.html">first item</a></li>
<li class="item-1"><a href="link2.html">second item</a></li>
<li class="item-inactive"><a href="link3.html" class="linkjfdlsfjls">third item</a></li>
<li class="shfs-inactive"><a href="link4.html">third item</a></li>
<li class="isjfls-inactive"><a href="link5.html">third item</a></li>
<li class="qwert-inactive"><a href="link6.html">third item</a></li>
<li class="item-1"><a href="link4.html">fourth item</a></li>
<li class="item-0"><a href="link5.html">fifth item</a>
</ul>
</div>'''
# 数据转换成标签树方式一
html_tree = etree.HTML(html)
# 方式二,可以将文件中的直接进行转换
html_tree2 = etree.parse('./data.html')
# print(html_tree,html_tree2)
# print(etree.tostring(html_tree).decode('utf-8'))
# 获取文件中所有的标签li
# xpath返回的数据是列表,标签<Element 内存地址>
li = html_tree.xpath('//li')
# print(li)
li = html_tree.xpath('//li[@class="item-1"]')
# print(li[0].xpath('..//a/text()'))
# 查询class属性不等于“item-1” 标签
li = html_tree.xpath('//li[@class!="item-1"]')
# print(li)
# 查询li标签,class 包含inactive 字符串
li = html_tree.xpath('//li[contains(@class,"inactive")]')
# print(li)
# print(li[0].xpath('./a/@*'))
# 查询li标签,class 不包含inactive字符串
li = html_tree.xpath('//li[not(contains(@class,"inactive"))]')
# print(li)
# print(etree.tostring(li[0]).decode('utf-8'))
# 查询li标签,class 不包含inactive字符串 同时包含class =item-1
li = html_tree.xpath('//li[not(contains(@class,"inactive"))][@class="item-1"]')
# print(li)
# print(etree.tostring(li[-1]).decode('utf-8'))
# 查询li标签,最后一个
# print(etree.tostring(html_tree).decode('utf-8'))
li = html_tree.xpath('/html/body/div/ul/li')
li = html_tree.xpath('//li[last()-1]')
# print(li,etree.tostring(li[0]))
# 查询位置小于4的标签
li = html_tree.xpath('//li[position()<4]')
print(li)
| [
"Wuhuaxing2017@qq.com"
] | Wuhuaxing2017@qq.com |
0b5713449027037d0ab2ad412af79684d0153c48 | 1a166165ab8287d01cbb377a13efdb5eff5dfef0 | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_06_01/aio/operations/_vpn_site_links_operations.py | 5637893f1b1e6e52468412705bc4b471675b3407 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | manoj0806/azure-sdk-for-python | 7a14b202ff80f528abd068bf50334e91001a9686 | aab999792db1132232b2f297c76800590a901142 | refs/heads/master | 2023-04-19T16:11:31.984930 | 2021-04-29T23:19:49 | 2021-04-29T23:19:49 | 363,025,016 | 1 | 0 | MIT | 2021-04-30T04:23:35 | 2021-04-30T04:23:35 | null | UTF-8 | Python | false | false | 8,712 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VpnSiteLinksOperations:
"""VpnSiteLinksOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
vpn_site_name: str,
vpn_site_link_name: str,
**kwargs
) -> "_models.VpnSiteLink":
"""Retrieves the details of a VPN site link.
:param resource_group_name: The resource group name of the VpnSite.
:type resource_group_name: str
:param vpn_site_name: The name of the VpnSite.
:type vpn_site_name: str
:param vpn_site_link_name: The name of the VpnSiteLink being retrieved.
:type vpn_site_link_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VpnSiteLink, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_06_01.models.VpnSiteLink
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VpnSiteLink"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'),
'vpnSiteLinkName': self._serialize.url("vpn_site_link_name", vpn_site_link_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(_models.Error, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VpnSiteLink', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks/{vpnSiteLinkName}'} # type: ignore
def list_by_vpn_site(
self,
resource_group_name: str,
vpn_site_name: str,
**kwargs
) -> AsyncIterable["_models.ListVpnSiteLinksResult"]:
"""Lists all the vpnSiteLinks in a resource group for a vpn site.
:param resource_group_name: The resource group name of the VpnSite.
:type resource_group_name: str
:param vpn_site_name: The name of the VpnSite.
:type vpn_site_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVpnSiteLinksResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_06_01.models.ListVpnSiteLinksResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVpnSiteLinksResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_vpn_site.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVpnSiteLinksResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(_models.Error, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_vpn_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks'} # type: ignore
| [
"noreply@github.com"
] | noreply@github.com |
3ebc4805f20af93c3c9a5846bf9c7185e1c7a7c0 | f2852e75af2c15d59cb962d7d5c81fa1e5cbe374 | /home/migrations/0015_contactpage_map_address.py | b413c8f57fffec09fcecc85728e290124f5f5250 | [
"Unlicense"
] | permissive | jesuispaulbonnet/technic-alu | 27f4cb95a736894de7588d8d5a42efaa42a0ddb8 | 697b948108bdda3c2f538f88d747b5cd50e21254 | refs/heads/master | 2021-05-11T11:05:54.110018 | 2019-01-20T14:23:26 | 2019-01-20T14:23:26 | 118,121,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-23 12:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0014_auto_20180223_1254'),
]
operations = [
migrations.AddField(
model_name='contactpage',
name='map_address',
field=models.CharField(default='technic-alu', max_length=250),
preserve_default=False,
),
]
| [
"paul.bonnet@tuware.com"
] | paul.bonnet@tuware.com |
a29090ef119e51b024e2fc4af969d65ecaef476a | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_155/1805.py | f216188bcb5e778686fc1da1297901988727a426 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 566 | py |
def get_min_members(smax, audience):
standing = 0
friends = 0
i = 1
standing += audience[0]
while i <= smax:
if standing < i:
new_friends = i - standing
standing += new_friends
friends += new_friends
standing += audience[i]
i += 1
return friends
# cases = [(4, "11111"), (1, "09"), (5, "110011"), (0, "1")]
t = input()
for i in range(t):
smax, audience = raw_input().split()
result = get_min_members(int(smax), map(int, audience))
print "Case #%d: %d" % (i+1, result)
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
c3469d4c16126daa8fbccf787da1442ae647afdf | 450021885a28d498f309b656973f1afd2ae538ef | /Mnist_UseNN.py | c232b0a1668431936e280db93cbd467d5cdeb590 | [] | no_license | kimwoojoo/DeepLearningStudy | 5f1f2e859b4f926c461847fafab02c855646ed71 | 521816261a2538cb6cb51b9b1019d27ca7e9a0b8 | refs/heads/master | 2020-04-27T11:57:33.578479 | 2019-03-08T01:59:53 | 2019-03-08T01:59:53 | 174,315,975 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,160 | py | # Lab 7 Learning rate and Evaluation
import tensorflow as tf
import matplotlib.pyplot as plt
import random
from PIL import Image
tf.set_random_seed(777) # for reproducibility
from tensorflow.examples.tutorials.mnist import input_data
# Check out https://www.tensorflow.org/get_started/mnist/beginners for
# more information about the mnist dataset
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
nb_classes = 10
# MNIST data image of shape 28 * 28 = 784
X = tf.placeholder(tf.float32, [None, 784])
# 0 - 9 digits recognition = 10 classes
Y = tf.placeholder(tf.float32, [None, nb_classes])
W1 = tf.Variable(tf.random_normal([784, 256]))
b1 = tf.Variable(tf.random_normal([256]))
layer1 = tf.nn.relu(tf.matmul(X,W1) + b1)
W2 = tf.Variable(tf.random_normal([256, 128]))
b2 = tf.Variable(tf.random_normal([128]))
layer2 = tf.nn.relu(tf.matmul(layer1,W2) + b2)
W3 = tf.Variable(tf.random_normal([128, 10]))
b3 = tf.Variable(tf.random_normal([10]))
# Hypothesis (using softmax)
hypothesis = tf.matmul(layer2, W3) + b3
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
logits=hypothesis, labels=Y))
train = tf.train.GradientDescentOptimizer(learning_rate=0.001).minimize(cost)
# Test model
is_correct = tf.equal(tf.argmax(hypothesis, 1), tf.argmax(Y, 1))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(is_correct, tf.float32))
# parameters
num_epochs = 15
batch_size = 100
num_iterations = int(mnist.train.num_examples / batch_size)
with tf.Session() as sess:
# Initialize TensorFlow variables
sess.run(tf.global_variables_initializer())
# Training cycle
for epoch in range(num_epochs):
avg_cost = 0
for i in range(num_iterations):
batch_xs, batch_ys = mnist.train.next_batch(batch_size)
_, cost_val = sess.run([train, cost], feed_dict={X: batch_xs, Y: batch_ys})
avg_cost += cost_val / num_iterations
print("Epoch: {:04d}, Cost: {:.9f}".format(epoch + 1, avg_cost))
print("Learning finished")
# Test the model using test sets
print(
"Accuracy: ",
accuracy.eval(
session=sess, feed_dict={X: mnist.test.images, Y: mnist.test.labels}
),
)
# Get one and predict
r = random.randint(0, mnist.test.num_examples - 1)
print("Label: ", sess.run(tf.argmax(mnist.test.labels[r : r + 1], 1)))
print(
"Prediction: ",
sess.run(tf.argmax(hypothesis, 1), feed_dict={X: mnist.test.images[r : r + 1]}),
)
plt.imshow(
mnist.test.images[r : r + 1].reshape(28, 28),
cmap="Greys",
interpolation="nearest",
)
plt.show()
'''
Epoch: 0001, Cost: 2.826302672
Epoch: 0002, Cost: 1.061668952
Epoch: 0003, Cost: 0.838061315
Epoch: 0004, Cost: 0.733232745
Epoch: 0005, Cost: 0.669279885
Epoch: 0006, Cost: 0.624611836
Epoch: 0007, Cost: 0.591160344
Epoch: 0008, Cost: 0.563868987
Epoch: 0009, Cost: 0.541745171
Epoch: 0010, Cost: 0.522673578
Epoch: 0011, Cost: 0.506782325
Epoch: 0012, Cost: 0.492447643
Epoch: 0013, Cost: 0.479955837
Epoch: 0014, Cost: 0.468893674
Epoch: 0015, Cost: 0.458703488
Learning finished
Accuracy: 0.8951
''' | [
"kwj1217@gmail.com"
] | kwj1217@gmail.com |
7ceee000b032290546e28faeaac57035bee55b29 | 9a65ca76a29102c3a74433a0d11a29e7d369b1b3 | /Assignment03/single_layer_nn.py | 1e93e03133650f348f4b4d9a593a02a3946dcb27 | [
"MIT"
] | permissive | Ericbrod10/Deep-Learning | bd8ffe7d17f2275c885f7550b3394a8969f5c705 | 5b0a01597ce19f2da5bf45b76023b898c494f46a | refs/heads/main | 2023-02-05T11:52:12.677261 | 2020-12-26T23:01:50 | 2020-12-26T23:01:50 | 324,647,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,253 | py | #Eric Broderick
#eab37
#Assignment 3
import numpy as np
import sys
# Command: python single_layer_nn.py <train> <test> <n>
# sys.argv[1] = <train> = training data
# sys.argv[2] = <test> = test data
# sys.argv[3] = <n> = number of nodes
## Read in data ##
f = open(sys.argv[1])
data = np.loadtxt(f)
train = data[:,1:]
trainlabels = data[:,0]
onearray = np.ones((train.shape[0],1))
train = np.append(train,onearray,axis = 1)
# print("train = ",train)
# print("train shape = ",train.shape)
f = open(sys.argv[2])
data = np.loadtxt(f)
test = data[:,1:]
testlabels = data[:,0]
# onearray = np.ones((test.shape[0],1))
# test = np.append(test,onearray,axis = 1)
rows = train.shape[0]
cols = train.shape[1]
hidden_nodes = int(sys.argv[3])
hidden_nodes = 3
## Initialize all weights ##
w = np.random.rand(hidden_nodes)
W = np.random.rand(hidden_nodes, cols)
s = np.random.rand(hidden_nodes)
S = np.random.rand(hidden_nodes, cols)
u = np.random.rand(hidden_nodes)
U = np.random.rand(hidden_nodes, cols)
v = np.random.rand(hidden_nodes)
V = np.random.rand(hidden_nodes, cols)
eta = 0.1
epochs = 1000
stop = 0
prevobj = np.inf
i = 0
## Calculate objective ##
hidden_layer = np.matmul(train, np.transpose(W))
# print("hidden_layer = ",hidden_layer)
# print("hidden_layer shape = ",hidden_layer.shape)
##sigmoid function ##
sigmoid = lambda x: 1/(1+np.exp(-x))
hidden_layer = np.array([sigmoid(xi) for xi in hidden_layer])
# print("hidden_layer = ",hidden_layer)
# print("hidden_layer shape = ",hidden_layer.shape)
output_layer = np.matmul(hidden_layer, np.transpose(w))
# print("output_layer = ",output_layer)
obj = np.sum(np.square(output_layer - trainlabels))
# print("obj = ",obj)
#obj = np.sum(np.square(np.matmul(train, np.transpose(w)) - trainlabels))
#print("Obj = ",obj)
## Begin gradient descent ##
## stop = 0
while(prevobj - obj > stop and i < epochs):
#while(prevobj - obj > 0):
#Update previous objective
prevobj = obj
#Calculate gradient update for final layer (w)
#dellw is the same dimension as w
# print(hidden_layer[0,:].shape, w.shape)
dellw = (np.dot(hidden_layer[0,:],w)-trainlabels[0])*hidden_layer[0,:]
for j in range(1, rows):
dellw += (np.dot(hidden_layer[j,:],np.transpose(w))-trainlabels[j])*hidden_layer[j,:]
#Update w
w = w - eta*dellw
# print("w = ",w)
# print("dellf = ",dellf)
#Calculate gradient update for hidden layer weights (W)
#dellW has to be of same dimension as W
#Let's first calculate dells. After that we do dellu and dellv.
#Here s, u, and v are the three hidden nodes
#dells = df/dz1 * (dz1/ds1, dz1,ds2)
dells = np.sum(np.dot(hidden_layer[0,:],w)-trainlabels[0])*w[0] * (hidden_layer[0,0])*(1-hidden_layer[0,0])*train[0]
for j in range(1, rows):
dells += np.sum(np.dot(hidden_layer[j,:],w)-trainlabels[j])*w[0] * (hidden_layer[j,0])*(1-hidden_layer[j,0])*train[j]
s = s - eta*dells
#TODO: dellu = ?
dellu = np.sum(np.dot(hidden_layer[0,:],w)-trainlabels[0])*w[1] * (hidden_layer[0,1])*(1-hidden_layer[0,1])*train[0]
for j in range(1, rows):
dellu += np.sum(np.dot(hidden_layer[j,:],w)-trainlabels[j])*w[1] * (hidden_layer[j,1])*(1-hidden_layer[j,1])*train[j]
u = u - eta*dellu
#TODO: dellv = ?
dellv = np.sum(np.dot(hidden_layer[0,:],w)-trainlabels[0])*w[2] * (hidden_layer[0,2])*(1-hidden_layer[0,2])*train[0]
for j in range(1, rows):
dellv += np.sum(np.dot(hidden_layer[j,:],w)-trainlabels[j])*w[2] * (hidden_layer[j,2])*(1-hidden_layer[j,2])*train[j]
v = v - eta*dellv
#TODO: Put dells, dellu, and dellv as rows of dellW
dellW = np.empty((0,cols),float)
dellW = np.vstack((dellW,dells,dellu,dellv))
W = W - eta*dellW
hidden_layer = np.matmul(train, np.transpose(W))
hidden_layer = np.array([sigmoid(xi) for xi in hidden_layer])
output_layer = np.matmul(hidden_layer, np.transpose(w))
obj = np.sum(np.square(output_layer - trainlabels))
i = i + 1
print("i = %s Objective = %s " % (i,obj))
# Do final predictions
final = np.matmul(train, np.transpose(W))
predictions = np.sign(np.matmul(sigmoid(final), np.transpose(w)))
error = (1 - (predictions == testlabels).mean()) * 100
print('Predictions: %s' % predictions)
print('Error: ', error)
print('w = %s' % w)
print('s = %s' % s)
print('u = %s' % u)
print('v = %s' % v) | [
"56181235+Ericbrod10@users.noreply.github.com"
] | 56181235+Ericbrod10@users.noreply.github.com |
03eb716acba6ddc2f77eb15d4d74e33f46e68cc5 | a9bbcbb4d6142c0b290ad7177d29085354d3a1b1 | /upstream/python-bitstring/test/test_bitstream.py | ebd92078b09f342167699b6492d4421e71d5e798 | [
"MIT"
] | permissive | nssllc/carver | 44caf9181824215e3c317485d93ad1ed92e0d2f6 | 89ce21ee8df216ce8485adb5f0b413050a4b3cce | refs/heads/master | 2021-01-13T01:41:23.561399 | 2011-10-26T13:03:22 | 2011-10-26T13:03:22 | 2,506,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135,708 | py | #!/usr/bin/env python
import unittest
import sys
sys.path.insert(0, '..')
import bitstring
import copy
import os
import collections
from bitstring import BitStream, ConstBitStream, pack
from bitstring.bitstore import ByteStore, offsetcopy
class FlexibleInitialisation(unittest.TestCase):
def testFlexibleInitialisation(self):
a = BitStream('uint:8=12')
c = BitStream(' uint : 8 = 12')
self.assertTrue(a == c == BitStream(uint=12, length=8))
a = BitStream(' int:2= -1')
b = BitStream('int :2 = -1')
c = BitStream(' int: 2 =-1 ')
self.assertTrue(a == b == c == BitStream(int=-1, length=2))
def testFlexibleInitialisation2(self):
h = BitStream('hex=12')
o = BitStream('oct=33')
b = BitStream('bin=10')
self.assertEqual(h, '0x12')
self.assertEqual(o, '0o33')
self.assertEqual(b, '0b10')
def testFlexibleInitialisation3(self):
for s in ['se=-1', ' se = -1 ', 'se = -1']:
a = BitStream(s)
self.assertEqual(a.se, -1)
for s in ['ue=23', 'ue =23', 'ue = 23']:
a = BitStream(s)
self.assertEqual(a.ue, 23)
def testMultipleStringInitialisation(self):
a = BitStream('0b1 , 0x1')
self.assertEqual(a, '0b10001')
a = BitStream('ue=5, ue=1, se=-2')
self.assertEqual(a.read('ue'), 5)
self.assertEqual(a.read('ue'), 1)
self.assertEqual(a.read('se'), -2)
b = BitStream('uint:32 = 12, 0b11') + 'int:100=-100, 0o44'
self.assertEqual(b.read(32).uint, 12)
self.assertEqual(b.read(2).bin, '11')
self.assertEqual(b.read(100).int, -100)
class Reading(unittest.TestCase):
def testReadBits(self):
s = BitStream(bytes=b'\x4d\x55')
self.assertEqual(s.read(4).hex, '4')
self.assertEqual(s.read(8).hex, 'd5')
self.assertEqual(s.read(1), [0])
self.assertEqual(s.read(3).bin, '101')
self.assertFalse(s.read(0))
def testReadByte(self):
s = BitStream(hex='4d55')
self.assertEqual(s.read(8).hex, '4d')
self.assertEqual(s.read(8).hex, '55')
def testReadBytes(self):
s = BitStream(hex='0x112233448811')
self.assertEqual(s.read(3 * 8).hex, '112233')
self.assertRaises(ValueError, s.read, -2 * 8)
s.bitpos += 1
self.assertEqual(s.read(2 * 8).bin, '1000100100010000')
def testReadUE(self):
self.assertRaises(bitstring.InterpretError, BitStream('')._getue)
# The numbers 0 to 8 as unsigned Exponential-Golomb codes
s = BitStream(bin='1 010 011 00100 00101 00110 00111 0001000 0001001')
self.assertEqual(s.pos, 0)
for i in range(9):
self.assertEqual(s.read('ue'), i)
self.assertRaises(bitstring.ReadError, s.read, 'ue')
def testReadSE(self):
s = BitStream(bin='010 00110 0001010 0001000 00111')
self.assertEqual(s.read('se'), 1)
self.assertEqual(s.read('se'), 3)
self.assertEqual(s.readlist(3 * ['se']), [5, 4, -3])
class Find(unittest.TestCase):
def testFind1(self):
s = ConstBitStream(bin='0b0000110110000')
self.assertTrue(s.find(BitStream(bin='11011'), False))
self.assertEqual(s.bitpos, 4)
self.assertEqual(s.read(5).bin, '11011')
s.bitpos = 0
self.assertFalse(s.find('0b11001', False))
def testFind2(self):
s = BitStream(bin='0')
self.assertTrue(s.find(s, False))
self.assertEqual(s.pos, 0)
self.assertFalse(s.find('0b00', False))
self.assertRaises(ValueError, s.find, BitStream(), False)
def testFindWithOffset(self):
s = BitStream(hex='0x112233')[4:]
self.assertTrue(s.find('0x23', False))
self.assertEqual(s.pos, 8)
def testFindCornerCases(self):
s = BitStream(bin='000111000111')
self.assertTrue(s.find('0b000'))
self.assertEqual(s.pos, 0)
self.assertTrue(s.find('0b000'))
self.assertEqual(s.pos, 0)
self.assertTrue(s.find('0b0111000111'))
self.assertEqual(s.pos, 2)
self.assertTrue(s.find('0b000', start=2))
self.assertEqual(s.pos, 6)
self.assertTrue(s.find('0b111', start=6))
self.assertEqual(s.pos, 9)
s.pos += 2
self.assertTrue(s.find('0b1', start=s.pos))
def testFindBytes(self):
s = BitStream('0x010203040102ff')
self.assertFalse(s.find('0x05', bytealigned=True))
self.assertTrue(s.find('0x02', bytealigned=True))
self.assertEqual(s.read(16).hex, '0203')
self.assertTrue(s.find('0x02', start=s.bitpos, bytealigned=True))
s.read(1)
self.assertFalse(s.find('0x02', start=s.bitpos, bytealigned=True))
def testFindBytesAlignedCornerCases(self):
s = BitStream('0xff')
self.assertTrue(s.find(s))
self.assertFalse(s.find(BitStream(hex='0x12')))
self.assertFalse(s.find(BitStream(hex='0xffff')))
def testFindBytesBitpos(self):
s = BitStream(hex='0x1122334455')
s.pos = 2
s.find('0x66', bytealigned=True)
self.assertEqual(s.pos, 2)
s.pos = 38
s.find('0x66', bytealigned=True)
self.assertEqual(s.pos, 38)
def testFindByteAligned(self):
s = BitStream(hex='0x12345678')
self.assertTrue(s.find(BitStream(hex='0x56'), bytealigned=True))
self.assertEqual(s.bytepos, 2)
s.pos = 0
self.assertFalse(s.find(BitStream(hex='0x45'), bytealigned=True))
s = BitStream('0x1234')
s.find('0x1234')
self.assertTrue(s.find('0x1234'))
s += '0b111'
s.pos = 3
s.find('0b1', start=17, bytealigned=True)
self.assertFalse(s.find('0b1', start=17, bytealigned=True))
self.assertEqual(s.pos, 3)
def testFindByteAlignedWithOffset(self):
s = BitStream(hex='0x112233')[4:]
self.assertTrue(s.find(BitStream(hex='0x23')))
def testFindByteAlignedErrors(self):
s = BitStream(hex='0xffff')
self.assertRaises(ValueError, s.find, '')
self.assertRaises(ValueError, s.find, BitStream())
class Rfind(unittest.TestCase):
def testRfind(self):
a = BitStream('0b001001001')
b = a.rfind('0b001')
self.assertEqual(b, (6,))
self.assertEqual(a.pos, 6)
big = BitStream(length=100000) + '0x12' + BitStream(length=10000)
found = big.rfind('0x12', bytealigned=True)
self.assertEqual(found, (100000,))
self.assertEqual(big.pos, 100000)
def testRfindByteAligned(self):
a = BitStream('0x8888')
b = a.rfind('0b1', bytealigned=True)
self.assertEqual(b, (8,))
self.assertEqual(a.pos, 8)
def testRfindStartbit(self):
a = BitStream('0x0000ffffff')
b = a.rfind('0x0000', start=1, bytealigned=True)
self.assertEqual(b, ())
self.assertEqual(a.pos, 0)
b = a.rfind('0x00', start=1, bytealigned=True)
self.assertEqual(b, (8,))
self.assertEqual(a.pos, 8)
def testRfindEndbit(self):
a = BitStream('0x000fff')
b = a.rfind('0b011', bytealigned=False, start=0, end=14)
self.assertEqual(bool(b), True)
b = a.rfind('0b011', False, 0, 13)
self.assertEqual(b, ())
def testRfindErrors(self):
a = BitStream('0x43234234')
self.assertRaises(ValueError, a.rfind, '', bytealigned=True)
self.assertRaises(ValueError, a.rfind, '0b1', start=-99, bytealigned=True)
self.assertRaises(ValueError, a.rfind, '0b1', end=33, bytealigned=True)
self.assertRaises(ValueError, a.rfind, '0b1', start=10, end=9, bytealigned=True)
class Shift(unittest.TestCase):
def testShiftLeft(self):
s = BitStream('0b1010')
t = s << 1
self.assertEqual(s.bin, '1010')
self.assertEqual(t.bin, '0100')
t = t << 0
self.assertEqual(t, '0b0100')
t = t << 100
self.assertEqual(t.bin, '0000')
def testShiftLeftErrors(self):
s = BitStream()
self.assertRaises(ValueError, s.__lshift__, 1)
s = BitStream('0xf')
self.assertRaises(ValueError, s.__lshift__, -1)
def testShiftRight(self):
s = BitStream('0b1010')
t = s >> 1
self.assertEqual(s.bin, '1010')
self.assertEqual(t.bin, '0101')
q = s >> 0
self.assertEqual(q, '0b1010')
q.replace('0b1010', '')
s = s >> 100
self.assertEqual(s.bin, '0000')
def testShiftRightErrors(self):
s = BitStream()
self.assertRaises(ValueError, s.__rshift__, 1)
s = BitStream('0xf')
self.assertRaises(ValueError, s.__rshift__, -1)
def testShiftRightInPlace(self):
s = BitStream('0xffff')[4:12]
s >>= 1
self.assertEqual(s, '0b01111111')
s = BitStream('0b11011')
s >>= 2
self.assertEqual(s.bin, '00110')
s >>= 100000000000000
self.assertEqual(s.bin, '00000')
s = BitStream('0xff')
s >>= 1
self.assertEqual(s, '0x7f')
s >>= 0
self.assertEqual(s, '0x7f')
def testShiftRightInPlaceErrors(self):
s = BitStream()
self.assertRaises(ValueError, s.__irshift__, 1)
s += '0b11'
self.assertRaises(ValueError, s.__irshift__, -1)
def testShiftLeftInPlace(self):
s = BitStream('0xffff')
t = s[4:12]
t <<= 2
self.assertEqual(t, '0b11111100')
s = BitStream('0b11011')
s <<= 2
self.assertEqual(s.bin, '01100')
s <<= 100000000000000000000
self.assertEqual(s.bin, '00000')
s = BitStream('0xff')
s <<= 1
self.assertEqual(s, '0xfe')
s <<= 0
self.assertEqual(s, '0xfe')
def testShiftLeftInPlaceErrors(self):
s = BitStream()
self.assertRaises(ValueError, s.__ilshift__, 1)
s += '0b11'
self.assertRaises(ValueError, s.__ilshift__, -1)
class Replace(unittest.TestCase):
def testReplace1(self):
a = BitStream('0b1')
n = a.replace('0b1', '0b0', bytealigned=True)
self.assertEqual(a.bin, '0')
self.assertEqual(n, 1)
n = a.replace('0b1', '0b0', bytealigned=True)
self.assertEqual(n, 0)
def testReplace2(self):
a = BitStream('0b00001111111')
n = a.replace('0b1', '0b0', bytealigned=True)
self.assertEqual(a.bin, '00001111011')
self.assertEqual(n, 1)
n = a.replace('0b1', '0b0', bytealigned=False)
self.assertEqual(a.bin, '00000000000')
self.assertEqual(n, 6)
def testReplace3(self):
a = BitStream('0b0')
n = a.replace('0b0', '0b110011111', bytealigned=True)
self.assertEqual(n, 1)
self.assertEqual(a.bin, '110011111')
n = a.replace('0b11', '', bytealigned=False)
self.assertEqual(n, 3)
self.assertEqual(a.bin, '001')
def testReplace4(self):
a = BitStream('0x00114723ef4732344700')
n = a.replace('0x47', '0x00', bytealigned=True)
self.assertEqual(n, 3)
self.assertEqual(a.hex, '00110023ef0032340000')
a.replace('0x00', '', bytealigned=True)
self.assertEqual(a.hex, '1123ef3234')
a.replace('0x11', '', start=1, bytealigned=True)
self.assertEqual(a.hex, '1123ef3234')
a.replace('0x11', '0xfff', end=7, bytealigned=True)
self.assertEqual(a.hex, '1123ef3234')
a.replace('0x11', '0xfff', end=8, bytealigned=True)
self.assertEqual(a.hex, 'fff23ef3234')
def testReplace5(self):
a = BitStream('0xab')
b = BitStream('0xcd')
c = BitStream('0xabef')
c.replace(a, b)
self.assertEqual(c, '0xcdef')
self.assertEqual(a, '0xab')
self.assertEqual(b, '0xcd')
a = BitStream('0x0011223344')
a.pos = 12
a.replace('0x11', '0xfff', bytealigned=True)
self.assertEqual(a.pos, 8)
self.assertEqual(a, '0x00fff223344')
def testReplaceWithSelf(self):
a = BitStream('0b11')
a.replace('0b1', a)
self.assertEqual(a, '0xf')
a.replace(a, a)
self.assertEqual(a, '0xf')
def testReplaceCount(self):
a = BitStream('0x223344223344223344')
n = a.replace('0x2', '0x0', count=0, bytealigned=True)
self.assertEqual(n, 0)
self.assertEqual(a.hex, '223344223344223344')
n = a.replace('0x2', '0x0', count=1, bytealigned=True)
self.assertEqual(n, 1)
self.assertEqual(a.hex, '023344223344223344')
n = a.replace('0x33', '', count=2, bytealigned=True)
self.assertEqual(n, 2)
self.assertEqual(a.hex, '02442244223344')
n = a.replace('0x44', '0x4444', count=1435, bytealigned=True)
self.assertEqual(n, 3)
self.assertEqual(a.hex, '02444422444422334444')
def testReplaceBitpos(self):
a = BitStream('0xff')
a.bitpos = 8
a.replace('0xff', '', bytealigned=True)
self.assertEqual(a.bitpos, 0)
a = BitStream('0b0011110001')
a.bitpos = 4
a.replace('0b1', '0b000')
self.assertEqual(a.bitpos, 8)
a = BitStream('0b1')
a.bitpos = 1
a.replace('0b1', '0b11111', bytealigned=True)
self.assertEqual(a.bitpos, 5)
a.replace('0b11', '0b0', False)
self.assertEqual(a.bitpos, 3)
a.append('0b00')
a.replace('0b00', '0xffff')
self.assertEqual(a.bitpos, 17)
def testReplaceErrors(self):
a = BitStream('0o123415')
self.assertRaises(ValueError, a.replace, '', '0o7', bytealigned=True)
self.assertRaises(ValueError, a.replace, '0b1', '0b1', start=-100, bytealigned=True)
self.assertRaises(ValueError, a.replace, '0b1', '0b1', end=19, bytealigned=True)
class SliceAssignmentWithStep(unittest.TestCase):
def testSetSliceStep(self):
a = BitStream()
a[0:0:12] = '0xabcdef'
self.assertEqual(a.bytepos, 3)
a[1:4:4] = ''
self.assertEqual(a, '0xaef')
self.assertEqual(a.bitpos, 4)
a[1::8] = '0x00'
self.assertEqual(a, '0xae00')
self.assertEqual(a.bytepos, 2)
a += '0xf'
a[1::8] = '0xe'
self.assertEqual(a, '0xaee')
self.assertEqual(a.bitpos, 12)
b = BitStream()
b[0:100:8] = '0xffee'
self.assertEqual(b, '0xffee')
b[1:12:4] = '0xeed123'
self.assertEqual(b, '0xfeed123')
b[-100:2:4] = '0x0000'
self.assertEqual(b, '0x0000ed123')
a = BitStream('0xabcde')
self.assertEqual(a[-100:-90:4], '')
self.assertEqual(a[-100:-4:4], '0xa')
a[-100:-4:4] = '0x0'
self.assertEqual(a, '0x0bcde')
self.assertRaises(ValueError, a.__setitem__, slice(2, 0, 4), '0x33')
def testSetSliceNegativeStep(self):
a = BitStream('0x000000')
a[1::-8] = '0x1122'
self.assertEqual(a, '0x221100')
a[-1:-3:-4] = '0xaeebb'
self.assertEqual(a, '0x2211bbeea')
a[-1::-8] = '0xffdd'
self.assertEqual(a, '0xddff')
self.assertRaises(ValueError, a.__setitem__, slice(3, 4, -1), '0x12')
b = BitStream('0x00')
b[::-1] = '0b10001111'
self.assertEqual(b, '0xf1')
def testInsertingUsingSetItem(self):
a = BitStream()
a[0:0] = '0xdeadbeef'
self.assertEqual(a, '0xdeadbeef')
self.assertEqual(a.bytepos, 4)
a[4:4:4] = '0xfeed'
self.assertEqual(a, '0xdeadfeedbeef')
self.assertEqual(a.bytepos, 4)
a[14232:442232:0] = '0xa'
self.assertEqual(a, '0xadeadfeedbeef')
self.assertEqual(a.bitpos, 4)
a.bytepos = 6
a[0:0] = '0xff'
self.assertEqual(a.bytepos, 1)
a[8:0] = '0x00000'
self.assertTrue(a.startswith('0xff00000adead'))
def testSliceAssignmentBitPos(self):
a = BitStream('int:64=-1')
a.pos = 64
a[0:8] = ''
self.assertEqual(a.pos, 0)
a.pos = 52
a[48:56] = '0x0000'
self.assertEqual(a.pos, 64)
a[10:10] = '0x0'
self.assertEqual(a.pos, 14)
a[56:68] = '0x000'
self.assertEqual(a.pos, 14)
class Pack(unittest.TestCase):
def testPack1(self):
s = bitstring.pack('uint:6, bin, hex, int:6, se, ue, oct', 10, '0b110', 'ff', -1, -6, 6, '54')
t = BitStream('uint:6=10, 0b110, 0xff, int:6=-1, se=-6, ue=6, oct=54')
self.assertEqual(s, t)
self.assertRaises(bitstring.CreationError, pack, 'tomato', '0')
self.assertRaises(bitstring.CreationError, pack, 'uint', 12)
self.assertRaises(bitstring.CreationError, pack, 'hex', 'penguin')
self.assertRaises(bitstring.CreationError, pack, 'hex12', '0x12')
def testPackWithLiterals(self):
s = bitstring.pack('0xf')
self.assertEqual(s, '0xf')
self.assertTrue(type(s), BitStream)
s = pack('0b1')
self.assertEqual(s, '0b1')
s = pack('0o7')
self.assertEqual(s, '0o7')
s = pack('int:10=-1')
self.assertEqual(s, '0b1111111111')
s = pack('uint:10=1')
self.assertEqual(s, '0b0000000001')
s = pack('ue=12')
self.assertEqual(s.ue, 12)
s = pack('se=-12')
self.assertEqual(s.se, -12)
s = pack('bin=01')
self.assertEqual(s.bin, '01')
s = pack('hex=01')
self.assertEqual(s.hex, '01')
s = pack('oct=01')
self.assertEqual(s.oct, '01')
def testPackWithDict(self):
a = pack('uint:6=width, se=height', height=100, width=12)
w, h = a.unpack('uint:6, se')
self.assertEqual(w, 12)
self.assertEqual(h, 100)
d = {}
d['w'] = '0xf'
d['300'] = 423
d['e'] = '0b1101'
a = pack('int:100=300, bin=e, uint:12=300', **d)
x, y, z = a.unpack('int:100, bin, uint:12')
self.assertEqual(x, 423)
self.assertEqual(y, '1101')
self.assertEqual(z, 423)
def testPackWithDict2(self):
a = pack('int:5, bin:3=b, 0x3, bin=c, se=12', 10, b='0b111', c='0b1')
b = BitStream('int:5=10, 0b111, 0x3, 0b1, se=12')
self.assertEqual(a, b)
a = pack('bits:3=b', b=BitStream('0b101'))
self.assertEqual(a, '0b101')
a = pack('bits:24=b', b=BitStream('0x001122'))
self.assertEqual(a, '0x001122')
def testPackWithDict3(self):
s = pack('hex:4=e, hex:4=0xe, hex:4=e', e='f')
self.assertEqual(s, '0xfef')
s = pack('sep', sep='0b00')
self.assertEqual(s, '0b00')
def testPackWithDict4(self):
s = pack('hello', hello='0xf')
self.assertEqual(s, '0xf')
s = pack('x, y, x, y, x', x='0b10', y='uint:12=100')
t = BitStream('0b10, uint:12=100, 0b10, uint:12=100, 0b10')
self.assertEqual(s, t)
a = [1, 2, 3, 4, 5]
s = pack('int:8, div,' * 5, *a, **{'div': '0b1'})
t = BitStream('int:8=1, 0b1, int:8=2, 0b1, int:8=3, 0b1, int:8=4, 0b1, int:8=5, 0b1')
self.assertEqual(s, t)
def testPackWithLocals(self):
width = 352
height = 288
s = pack('uint:12=width, uint:12=height', **locals())
self.assertEqual(s, '0x160120')
def testPackWithLengthRestriction(self):
s = pack('bin:3', '0b000')
self.assertRaises(bitstring.CreationError, pack, 'bin:3', '0b0011')
self.assertRaises(bitstring.CreationError, pack, 'bin:3', '0b11')
self.assertRaises(bitstring.CreationError, pack, 'bin:3=0b0011')
self.assertRaises(bitstring.CreationError, pack, 'bin:3=0b11')
s = pack('hex:4', '0xf')
self.assertRaises(bitstring.CreationError, pack, 'hex:4', '0b111')
self.assertRaises(bitstring.CreationError, pack, 'hex:4', '0b11111')
self.assertRaises(bitstring.CreationError, pack, 'hex:8=0xf')
s = pack('oct:6', '0o77')
self.assertRaises(bitstring.CreationError, pack, 'oct:6', '0o1')
self.assertRaises(bitstring.CreationError, pack, 'oct:6', '0o111')
self.assertRaises(bitstring.CreationError, pack, 'oct:3', '0b1')
self.assertRaises(bitstring.CreationError, pack, 'oct:3=hello', hello='0o12')
s = pack('bits:3', BitStream('0b111'))
self.assertRaises(bitstring.CreationError, pack, 'bits:3', BitStream('0b11'))
self.assertRaises(bitstring.CreationError, pack, 'bits:3', BitStream('0b1111'))
self.assertRaises(bitstring.CreationError, pack, 'bits:12=b', b=BitStream('0b11'))
def testPackNull(self):
s = pack('')
self.assertFalse(s)
s = pack(',')
self.assertFalse(s)
s = pack(',,,,,0b1,,,,,,,,,,,,,0b1,,,,,,,,,,')
self.assertEqual(s, '0b11')
s = pack(',,uint:12,,bin:3,', 100, '100')
a, b = s.unpack(',,,uint:12,,,,bin:3,,,')
self.assertEqual(a, 100)
self.assertEqual(b, '100')
def testPackDefaultUint(self):
s = pack('10, 5', 1, 2)
a, b = s.unpack('10, 5')
self.assertEqual((a, b), (1, 2))
s = pack('10=150, 12=qee', qee=3)
self.assertEqual(s, 'uint:10=150, uint:12=3')
t = BitStream('100=5')
self.assertEqual(t, 'uint:100=5')
def testPackDefualtUintErrors(self):
self.assertRaises(bitstring.CreationError, BitStream, '5=-1')
def testPackingLongKeywordBitstring(self):
s = pack('bits=b', b=BitStream(128000))
self.assertEqual(s, BitStream(128000))
def testPackingWithListFormat(self):
f = ['bin', 'hex', 'uint:10']
a = pack(','.join(f), '00', '234', 100)
b = pack(f, '00', '234', 100)
self.assertEqual(a, b)
class Unpack(unittest.TestCase):
def testUnpack1(self):
s = BitStream('uint:13=23, hex=e, bin=010, int:41=-554, 0o44332, se=-12, ue=4')
s.pos = 11
a, b, c, d, e, f, g = s.unpack('uint:13, hex:4, bin:3, int:41, oct:15, se, ue')
self.assertEqual(a, 23)
self.assertEqual(b, 'e')
self.assertEqual(c, '010')
self.assertEqual(d, -554)
self.assertEqual(e, '44332')
self.assertEqual(f, -12)
self.assertEqual(g, 4)
self.assertEqual(s.pos, 11)
def testUnpack2(self):
s = BitStream('0xff, 0b000, uint:12=100')
a, b, c = s.unpack('bits:8, bits, uint:12')
self.assertEqual(type(s), BitStream)
self.assertEqual(a, '0xff')
self.assertEqual(type(s), BitStream)
self.assertEqual(b, '0b000')
self.assertEqual(c, 100)
a, b = s.unpack(['bits:11', 'uint'])
self.assertEqual(a, '0xff, 0b000')
self.assertEqual(b, 100)
def testUnpackNull(self):
s = pack('0b1, , , 0xf,')
a, b = s.unpack('bin:1,,,hex:4,')
self.assertEqual(a, '1')
self.assertEqual(b, 'f')
class FromFile(unittest.TestCase):
def testCreationFromFileOperations(self):
s = BitStream(filename='smalltestfile')
s.append('0xff')
self.assertEqual(s.hex, '0123456789abcdefff')
s = ConstBitStream(filename='smalltestfile')
t = BitStream('0xff') + s
self.assertEqual(t.hex, 'ff0123456789abcdef')
s = BitStream(filename='smalltestfile')
del s[:1]
self.assertEqual((BitStream('0b0') + s).hex, '0123456789abcdef')
s = BitStream(filename='smalltestfile')
del s[:7 * 8]
self.assertEqual(s.hex, 'ef')
s = BitStream(filename='smalltestfile')
s.insert('0xc', 4)
self.assertEqual(s.hex, '0c123456789abcdef')
s = BitStream(filename='smalltestfile')
s.prepend('0xf')
self.assertEqual(s.hex, 'f0123456789abcdef')
s = BitStream(filename='smalltestfile')
s.overwrite('0xaaa', 12)
self.assertEqual(s.hex, '012aaa6789abcdef')
s = BitStream(filename='smalltestfile')
s.reverse()
self.assertEqual(s.hex, 'f7b3d591e6a2c480')
s = BitStream(filename='smalltestfile')
del s[-60:]
self.assertEqual(s.hex, '0')
s = BitStream(filename='smalltestfile')
del s[:60]
self.assertEqual(s.hex, 'f')
def testFileProperties(self):
s = ConstBitStream(filename='smalltestfile')
self.assertEqual(s.hex, '0123456789abcdef')
self.assertEqual(s.uint, 81985529216486895)
self.assertEqual(s.int, 81985529216486895)
self.assertEqual(s.bin, '0000000100100011010001010110011110001001101010111100110111101111')
self.assertEqual(s[:-1].oct, '002215053170465363367')
s.bitpos = 0
self.assertEqual(s.read('se'), -72)
s.bitpos = 0
self.assertEqual(s.read('ue'), 144)
self.assertEqual(s.bytes, b'\x01\x23\x45\x67\x89\xab\xcd\xef')
self.assertEqual(s.tobytes(), b'\x01\x23\x45\x67\x89\xab\xcd\xef')
def testCreationFromFileWithLength(self):
s = ConstBitStream(filename='test.m1v', length=32)
self.assertEqual(s.length, 32)
self.assertEqual(s.hex, '000001b3')
s = ConstBitStream(filename='test.m1v', length=0)
self.assertFalse(s)
self.assertRaises(bitstring.CreationError, BitStream, filename='smalltestfile', length=65)
self.assertRaises(bitstring.CreationError, ConstBitStream, filename='smalltestfile', length=64, offset=1)
# self.assertRaises(bitstring.CreationError, ConstBitStream, filename='smalltestfile', offset=65)
f = open('smalltestfile', 'rb')
# self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, offset=65)
self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, length=65)
self.assertRaises(bitstring.CreationError, ConstBitStream, auto=f, offset=60, length=5)
def testCreationFromFileWithOffset(self):
a = BitStream(filename='test.m1v', offset=4)
self.assertEqual(a.peek(4 * 8).hex, '00001b31')
b = BitStream(filename='test.m1v', offset=28)
self.assertEqual(b.peek(8).hex, '31')
def testFileSlices(self):
s = BitStream(filename='smalltestfile')
t = s[-2::8]
self.assertEqual(s[-2::8].hex, 'cdef')
def testCreataionFromFileErrors(self):
self.assertRaises(IOError, BitStream, filename='Idonotexist')
def testFindInFile(self):
s = BitStream(filename='test.m1v')
self.assertTrue(s.find('0x160120'))
self.assertEqual(s.bytepos, 4)
s3 = s.read(3 * 8)
self.assertEqual(s3.hex, '160120')
s.bytepos = 0
self.assertTrue(s._pos == 0)
self.assertTrue(s.find('0x0001b2'))
self.assertEqual(s.bytepos, 13)
def testHexFromFile(self):
s = BitStream(filename='test.m1v')
self.assertEqual(s[0:32].hex, '000001b3')
self.assertEqual(s[-32:].hex, '000001b7')
s.hex = '0x11'
self.assertEqual(s.hex, '11')
def testFileOperations(self):
s1 = BitStream(filename='test.m1v')
s2 = BitStream(filename='test.m1v')
self.assertEqual(s1.read(32).hex, '000001b3')
self.assertEqual(s2.read(32).hex, '000001b3')
s1.bytepos += 4
self.assertEqual(s1.read(8).hex, '02')
self.assertEqual(s2.read(5 * 8).hex, '1601208302')
s1.pos = s1.len
try:
s1.pos += 1
self.assertTrue(False)
except ValueError:
pass
def testFileBitGetting(self):
s = ConstBitStream(filename='smalltestfile', offset=16, length=8) # 0x45
b = s[1]
self.assertTrue(b)
b = s.any(0, [-1, -2, -3])
self.assertTrue(b)
b = s.all(0, [0, 1, 2])
self.assertFalse(b)
def testVeryLargeFiles(self):
# This uses an 11GB file which isn't distributed for obvious reasons
# and so this test won't work for anyone except me!
try:
s = ConstBitStream(filename='11GB.mkv')
except IOError:
return
self.assertEqual(s.len, 11743020505 * 8)
self.assertEqual(s[1000000000:1000000100].hex, 'bdef7335d4545f680d669ce24')
self.assertEqual(s[-4::8].hex, 'bbebf7a1')
class CreationErrors(unittest.TestCase):
def testIncorrectBinAssignment(self):
s = BitStream()
self.assertRaises(bitstring.CreationError, s._setbin_safe, '0010020')
def testIncorrectHexAssignment(self):
s = BitStream()
self.assertRaises(bitstring.CreationError, s._sethex, '0xabcdefg')
class Length(unittest.TestCase):
def testLengthZero(self):
self.assertEqual(BitStream('').len, 0)
def testLength(self):
self.assertEqual(BitStream('0x80').len, 8)
def testLengthErrors(self):
#TODO: Lots of new checks, for various inits which now disallow length and offset
pass
#self.assertRaises(ValueError, BitStream, bin='111', length=-1)
#self.assertRaises(ValueError, BitStream, bin='111', length=4)
def testOffsetLengthError(self):
self.assertRaises(bitstring.CreationError, BitStream, hex='0xffff', offset=-1)
class SimpleConversions(unittest.TestCase):
def testConvertToUint(self):
self.assertEqual(BitStream('0x10').uint, 16)
self.assertEqual(BitStream('0b000111').uint, 7)
def testConvertToInt(self):
self.assertEqual(BitStream('0x10').int, 16)
self.assertEqual(BitStream('0b11110').int, -2)
def testConvertToHex(self):
self.assertEqual(BitStream(bytes=b'\x00\x12\x23\xff').hex, '001223ff')
s = BitStream('0b11111')
self.assertRaises(bitstring.InterpretError, s._gethex)
class Empty(unittest.TestCase):
def testEmptyBitstring(self):
s = BitStream()
self.assertRaises(bitstring.ReadError, s.read, 1)
self.assertEqual(s.bin, '')
self.assertEqual(s.hex, '')
self.assertRaises(bitstring.InterpretError, s._getint)
self.assertRaises(bitstring.InterpretError, s._getuint)
self.assertFalse(s)
def testNonEmptyBitStream(self):
s = BitStream(bin='0')
self.assertFalse(not s.len)
class Position(unittest.TestCase):
def testBitPosition(self):
s = BitStream(bytes=b'\x00\x00\x00')
self.assertEqual(s.bitpos, 0)
s.read(5)
self.assertEqual(s.pos, 5)
s.pos = s.len
self.assertRaises(bitstring.ReadError, s.read, 1)
def testBytePosition(self):
s = BitStream(bytes=b'\x00\x00\x00')
self.assertEqual(s.bytepos, 0)
s.read(10)
self.assertRaises(bitstring.ByteAlignError, s._getbytepos)
s.read(6)
self.assertEqual(s.bytepos, 2)
def testSeekToBit(self):
s = BitStream(bytes=b'\x00\x00\x00\x00\x00\x00')
s.bitpos = 0
self.assertEqual(s.bitpos, 0)
self.assertRaises(ValueError, s._setbitpos, -1)
self.assertRaises(ValueError, s._setbitpos, 6 * 8 + 1)
s.bitpos = 6 * 8
self.assertEqual(s.bitpos, 6 * 8)
def testSeekToByte(self):
s = BitStream(bytes=b'\x00\x00\x00\x00\x00\xab')
s.bytepos = 5
self.assertEqual(s.read(8).hex, 'ab')
def testAdvanceBitsAndBytes(self):
s = BitStream(bytes=b'\x00\x00\x00\x00\x00\x00\x00\x00')
s.pos += 5
self.assertEqual(s.pos, 5)
s.bitpos += 16
self.assertEqual(s.pos, 2 * 8 + 5)
s.pos -= 8
self.assertEqual(s.pos, 8 + 5)
def testRetreatBitsAndBytes(self):
a = BitStream(length=100)
a.pos = 80
a.bytepos -= 5
self.assertEqual(a.bytepos, 5)
a.pos -= 5
self.assertEqual(a.pos, 35)
class Offset(unittest.TestCase):
def testOffset1(self):
s = BitStream(bytes=b'\x00\x1b\x3f', offset=4)
self.assertEqual(s.read(8).bin, '00000001')
self.assertEqual(s.length, 20)
def testOffset2(self):
s1 = BitStream(bytes=b'\xf1\x02\x04')
s2 = BitStream(bytes=b'\xf1\x02\x04', length=23)
for i in [1, 2, 3, 4, 5, 6, 7, 6, 5, 4, 3, 2, 1, 0, 7, 3, 5, 1, 4]:
s1._datastore = offsetcopy(s1._datastore, i)
self.assertEqual(s1.hex, 'f10204')
s2._datastore = offsetcopy(s2._datastore, i)
self.assertEqual(s2.bin, '11110001000000100000010')
class Append(unittest.TestCase):
def testAppend(self):
s1 = BitStream('0b00000')
s1.append(BitStream(bool=True))
self.assertEqual(s1.bin, '000001')
self.assertEqual((BitStream('0x0102') + BitStream('0x0304')).hex, '01020304')
def testAppendSameBitstring(self):
s1 = BitStream('0xf0')[:6]
s1.append(s1)
self.assertEqual(s1.bin, '111100111100')
def testAppendWithOffset(self):
s = BitStream(bytes=b'\x28\x28', offset=1)
s.append('0b0')
self.assertEqual(s.hex, '5050')
class ByteAlign(unittest.TestCase):
def testByteAlign(self):
s = BitStream(hex='0001ff23')
s.bytealign()
self.assertEqual(s.bytepos, 0)
s.pos += 11
s.bytealign()
self.assertEqual(s.bytepos, 2)
s.pos -= 10
s.bytealign()
self.assertEqual(s.bytepos, 1)
def testByteAlignWithOffset(self):
s = BitStream(hex='0112233')
s._datastore = offsetcopy(s._datastore, 3)
bitstoalign = s.bytealign()
self.assertEqual(bitstoalign, 0)
self.assertEqual(s.read(5).bin, '00001')
def testInsertByteAligned(self):
s = BitStream('0x0011')
s.insert(BitStream('0x22'), 8)
self.assertEqual(s.hex, '002211')
s = BitStream(0)
s.insert(BitStream(bin='101'), 0)
self.assertEqual(s.bin, '101')
class Truncate(unittest.TestCase):
def testTruncateStart(self):
s = BitStream('0b1')
del s[:1]
self.assertFalse(s)
s = BitStream(hex='1234')
self.assertEqual(s.hex, '1234')
del s[:4]
self.assertEqual(s.hex, '234')
del s[:9]
self.assertEqual(s.bin, '100')
del s[:2]
self.assertEqual(s.bin, '0')
self.assertEqual(s.len, 1)
del s[:1]
self.assertFalse(s)
def testTruncateEnd(self):
s = BitStream('0b1')
del s[-1:]
self.assertFalse(s)
s = BitStream(bytes=b'\x12\x34')
self.assertEqual(s.hex, '1234')
del s[-4:]
self.assertEqual(s.hex, '123')
del s[-9:]
self.assertEqual(s.bin, '000')
del s[-3:]
self.assertFalse(s)
s = BitStream('0b001')
del s[:2]
del s[-1:]
self.assertFalse(s)
class Slice(unittest.TestCase):
def testByteAlignedSlice(self):
s = BitStream(hex='0x123456')
self.assertEqual(s[8:16].hex, '34')
s = s[8:24]
self.assertEqual(s.len, 16)
self.assertEqual(s.hex, '3456')
s = s[0:8]
self.assertEqual(s.hex, '34')
s.hex = '0x123456'
self.assertEqual(s[8:24][0:8].hex, '34')
def testSlice(self):
s = BitStream(bin='000001111100000')
s1 = s[0:5]
s2 = s[5:10]
s3 = s[10:15]
self.assertEqual(s1.bin, '00000')
self.assertEqual(s2.bin, '11111')
self.assertEqual(s3.bin, '00000')
class Insert(unittest.TestCase):
def testInsert(self):
s1 = BitStream(hex='0x123456')
s2 = BitStream(hex='0xff')
s1.bytepos = 1
s1.insert(s2)
self.assertEqual(s1.bytepos, 2)
self.assertEqual(s1.hex, '12ff3456')
s1.insert('0xee', 24)
self.assertEqual(s1.hex, '12ff34ee56')
self.assertEqual(s1.bitpos, 32)
self.assertRaises(ValueError, s1.insert, '0b1', -1000)
self.assertRaises(ValueError, s1.insert, '0b1', 1000)
def testInsertNull(self):
s = BitStream(hex='0x123').insert(BitStream(), 3)
self.assertEqual(s.hex, '123')
def testInsertBits(self):
one = BitStream(bin='1')
zero = BitStream(bin='0')
s = BitStream(bin='00')
s.insert(one, 0)
self.assertEqual(s.bin, '100')
s.insert(zero, 0)
self.assertEqual(s.bin, '0100')
s.insert(one, s.len)
self.assertEqual(s.bin, '01001')
s.insert(s, 2)
self.assertEqual(s.bin, '0101001001')
class Resetting(unittest.TestCase):
def testSetHex(self):
s = BitStream()
s.hex = '0'
self.assertEqual(s.hex, '0')
s.hex = '0x010203045'
self.assertEqual(s.hex, '010203045')
self.assertRaises(bitstring.CreationError, s._sethex, '0x002g')
def testSetBin(self):
s = BitStream(bin="000101101")
self.assertEqual(s.bin, '000101101')
self.assertEqual(s.len, 9)
s.bin = '0'
self.assertEqual(s.bin, '0')
self.assertEqual(s.len, 1)
def testSetEmptyBin(self):
s = BitStream(hex='0x000001b3')
s.bin = ''
self.assertEqual(s.len, 0)
self.assertEqual(s.bin, '')
def testSetInvalidBin(self):
s = BitStream()
self.assertRaises(bitstring.CreationError, s._setbin_safe, '00102')
class Overwriting(unittest.TestCase):
def testOverwriteBit(self):
s = BitStream(bin='0')
s.overwrite(BitStream(bin='1'), 0)
self.assertEqual(s.bin, '1')
def testOverwriteLimits(self):
s = BitStream(bin='0b11111')
s.overwrite(BitStream(bin='000'), 0)
self.assertEqual(s.bin, '00011')
s.overwrite('0b000', 2)
self.assertEqual(s.bin, '00000')
def testOverwriteNull(self):
s = BitStream(hex='342563fedec')
s2 = BitStream(s)
s.overwrite(BitStream(bin=''), 23)
self.assertEqual(s.bin, s2.bin)
def testOverwritePosition(self):
s1 = BitStream(hex='0123456')
s2 = BitStream(hex='ff')
s1.bytepos = 1
s1.overwrite(s2)
self.assertEqual((s1.hex, s1.bytepos), ('01ff456', 2))
s1.overwrite('0xff', 0)
self.assertEqual((s1.hex, s1.bytepos), ('ffff456', 1))
def testOverwriteWithSelf(self):
s = BitStream('0x123')
s.overwrite(s)
self.assertEqual(s, '0x123')
class Split(unittest.TestCase):
def testSplitByteAlignedCornerCases(self):
s = BitStream()
bsl = s.split(BitStream(hex='0xff'))
self.assertEqual(next(bsl).hex, '')
self.assertRaises(StopIteration, next, bsl)
s = BitStream(hex='aabbcceeddff')
delimiter = BitStream()
bsl = s.split(delimiter)
self.assertRaises(ValueError, next, bsl)
delimiter = BitStream(hex='11')
bsl = s.split(delimiter)
self.assertEqual(next(bsl).hex, s.hex)
def testSplitByteAligned(self):
s = BitStream(hex='0x1234aa1234bbcc1234ffff')
delimiter = BitStream(hex='1234')
bsl = s.split(delimiter)
self.assertEqual([b.hex for b in bsl], ['', '1234aa', '1234bbcc', '1234ffff'])
self.assertEqual(s.pos, 0)
def testSplitByteAlignedWithIntialBytes(self):
s = BitStream(hex='aa471234fedc43 47112233 47 4723 472314')
delimiter = BitStream(hex='47')
s.find(delimiter)
self.assertEqual(s.bytepos, 1)
bsl = s.split(delimiter, start=0)
self.assertEqual([b.hex for b in bsl], ['aa', '471234fedc43', '47112233',
'47', '4723', '472314'])
self.assertEqual(s.bytepos, 1)
def testSplitByteAlignedWithOverlappingDelimiter(self):
s = BitStream(hex='aaffaaffaaffaaffaaff')
bsl = s.split(BitStream(hex='aaffaa'))
self.assertEqual([b.hex for b in bsl], ['', 'aaffaaff', 'aaffaaffaaff'])
class Adding(unittest.TestCase):
def testAdding(self):
s1 = BitStream(hex='0x0102')
s2 = BitStream(hex='0x0304')
s3 = s1 + s2
self.assertEqual(s1.hex, '0102')
self.assertEqual(s2.hex, '0304')
self.assertEqual(s3.hex, '01020304')
s3 += s1
self.assertEqual(s3.hex, '010203040102')
self.assertEqual(s2[9:16].bin, '0000100')
self.assertEqual(s1[0:9].bin, '000000010')
s4 = BitStream(bin='000000010') +\
BitStream(bin='0000100')
self.assertEqual(s4.bin, '0000000100000100')
s2p = s2[9:16]
s1p = s1[0:9]
s5p = s1p + s2p
s5 = s1[0:9] + s2[9:16]
self.assertEqual(s5.bin, '0000000100000100')
def testMoreAdding(self):
s = BitStream(bin='00') + BitStream(bin='') + BitStream(bin='11')
self.assertEqual(s.bin, '0011')
s = '0b01'
s += BitStream('0b11')
self.assertEqual(s.bin, '0111')
s = BitStream('0x00')
t = BitStream('0x11')
s += t
self.assertEqual(s.hex, '0011')
self.assertEqual(t.hex, '11')
s += s
self.assertEqual(s.hex, '00110011')
def testRadd(self):
s = '0xff' + BitStream('0xee')
self.assertEqual(s.hex, 'ffee')
def testTruncateAsserts(self):
s = BitStream('0x001122')
s.bytepos = 2
del s[-s.len:]
self.assertEqual(s.bytepos, 0)
s.append('0x00')
s.append('0x1122')
s.bytepos = 2
del s[:s.len]
self.assertEqual(s.bytepos, 0)
s.append('0x00')
def testOverwriteErrors(self):
s = BitStream(bin='11111')
self.assertRaises(ValueError, s.overwrite, BitStream(bin='1'), -10)
self.assertRaises(ValueError, s.overwrite, BitStream(bin='1'), 6)
self.assertRaises(ValueError, s.overwrite, BitStream(bin='11111'), 1)
def testDeleteBits(self):
s = BitStream(bin='000111100000')
s.bitpos = 4
del s[4:8]
self.assertEqual(s.bin, '00010000')
del s[4:1004]
self.assertTrue(s.bin, '0001')
def testDeleteBitsWithPosition(self):
s = BitStream(bin='000111100000')
del s[4:8]
self.assertEqual(s.bin, '00010000')
def testDeleteBytes(self):
s = BitStream('0x00112233')
del s[8:8]
self.assertEqual(s.hex, '00112233')
self.assertEqual(s.pos, 0)
del s[8:16]
self.assertEqual(s.hex, '002233')
self.assertEqual(s.bytepos, 0)
del s[:3:8]
self.assertFalse(s)
self.assertEqual(s.pos, 0)
def testGetItemWithPositivePosition(self):
s = BitStream(bin='0b1011')
self.assertEqual(s[0], True)
self.assertEqual(s[1], False)
self.assertEqual(s[2], True)
self.assertEqual(s[3], True)
self.assertRaises(IndexError, s.__getitem__, 4)
def testGetItemWithNegativePosition(self):
s = BitStream(bin='1011')
self.assertEqual(s[-1], True)
self.assertEqual(s[-2], True)
self.assertEqual(s[-3], False)
self.assertEqual(s[-4], True)
self.assertRaises(IndexError, s.__getitem__, -5)
def testSlicing(self):
s = ConstBitStream(hex='0123456789')
self.assertEqual(s[0:8].hex, '01')
self.assertFalse(s[0:0])
self.assertFalse(s[23:20])
self.assertEqual(s[8:12].bin, '0010')
self.assertEqual(s[8:20:4], '0x89')
def testNegativeSlicing(self):
s = ConstBitStream(hex='0x012345678')
self.assertEqual(s[:-8].hex, '0123456')
self.assertEqual(s[-16:-8].hex, '56')
self.assertEqual(s[-24:].hex, '345678')
self.assertEqual(s[-1000:-6:4], '0x012')
def testLen(self):
s = BitStream()
self.assertEqual(len(s), 0)
s.append(BitStream(bin='001'))
self.assertEqual(len(s), 3)
def testJoin(self):
s1 = BitStream(bin='0')
s2 = BitStream(bin='1')
s3 = BitStream(bin='000')
s4 = BitStream(bin='111')
strings = [s1, s2, s1, s3, s4]
s = BitStream().join(strings)
self.assertEqual(s.bin, '010000111')
def testJoin2(self):
s1 = BitStream(hex='00112233445566778899aabbccddeeff')
s2 = BitStream(bin='0b000011')
bsl = [s1[0:32], s1[4:12], s2, s2, s2, s2]
s = ConstBitStream().join(bsl)
self.assertEqual(s.hex, '00112233010c30c3')
bsl = [BitStream(uint=j, length=12) for j in range(10) for i in range(10)]
s = BitStream().join(bsl)
self.assertEqual(s.length, 1200)
def testPos(self):
s = BitStream(bin='1')
self.assertEqual(s.bitpos, 0)
s.read(1)
self.assertEqual(s.bitpos, 1)
def testWritingData(self):
strings = [BitStream(bin=x) for x in ['0', '001', '0011010010', '010010', '1011']]
s = BitStream().join(strings)
s2 = BitStream(bytes=s.bytes)
self.assertEqual(s2.bin, '000100110100100100101011')
s2.append(BitStream(bin='1'))
s3 = BitStream(bytes=s2.tobytes())
self.assertEqual(s3.bin, '00010011010010010010101110000000')
def testWritingDataWithOffsets(self):
s1 = BitStream(bytes=b'\x10')
s2 = BitStream(bytes=b'\x08\x00', length=8, offset=1)
s3 = BitStream(bytes=b'\x04\x00', length=8, offset=2)
self.assertTrue(s1 == s2)
self.assertTrue(s2 == s3)
self.assertTrue(s1.bytes == s2.bytes)
self.assertTrue(s2.bytes == s3.bytes)
def testVariousThings1(self):
hexes = ['12345678', '87654321', 'ffffffffff', 'ed', '12ec']
bins = ['001010', '1101011', '0010000100101110110110', '11', '011']
bsl = []
for (hex, bin) in list(zip(hexes, bins)) * 5:
bsl.append(BitStream(hex=hex))
bsl.append(BitStream(bin=bin))
s = BitStream().join(bsl)
for (hex, bin) in list(zip(hexes, bins)) * 5:
h = s.read(4 * len(hex))
b = s.read(len(bin))
self.assertEqual(h.hex, hex)
self.assertEqual(b.bin, bin)
def testVariousThings2(self):
s1 = BitStream(hex="0x1f08")[:13]
self.assertEqual(s1.bin, '0001111100001')
s2 = BitStream(bin='0101')
self.assertEqual(s2.bin, '0101')
s1.append(s2)
self.assertEqual(s1.length, 17)
self.assertEqual(s1.bin, '00011111000010101')
s1 = s1[3:8]
self.assertEqual(s1.bin, '11111')
def testVariousThings3(self):
s1 = BitStream(hex='0x012480ff')[2:27]
s2 = s1 + s1
self.assertEqual(s2.length, 50)
s3 = s2[0:25]
s4 = s2[25:50]
self.assertEqual(s3.bin, s4.bin)
def testPeekBit(self):
s = BitStream(bin='01')
self.assertEqual(s.peek(1), [0])
self.assertEqual(s.peek(1), [0])
self.assertEqual(s.read(1), [0])
self.assertEqual(s.peek(1), [1])
self.assertEqual(s.peek(1), [1])
s = BitStream(bytes=b'\x1f', offset=3)
self.assertEqual(s.len, 5)
self.assertEqual(s.peek(5).bin, '11111')
self.assertEqual(s.peek(5).bin, '11111')
s.pos += 1
self.assertRaises(bitstring.ReadError, s.peek, 5)
s = BitStream(hex='001122334455')
self.assertEqual(s.peek(8).hex, '00')
self.assertEqual(s.read(8).hex, '00')
s.pos += 33
self.assertRaises(bitstring.ReadError, s.peek, 8)
s = BitStream(hex='001122334455')
self.assertEqual(s.peek(8 * 2).hex, '0011')
self.assertEqual(s.read(8 * 3).hex, '001122')
self.assertEqual(s.peek(8 * 3).hex, '334455')
self.assertRaises(bitstring.ReadError, s.peek, 25)
def testAdvanceBit(self):
s = BitStream(hex='0xff')
s.bitpos = 6
s.pos += 1
self.assertEqual(s.bitpos, 7)
s.bitpos += 1
try:
s.pos += 1
self.assertTrue(False)
except ValueError:
pass
def testAdvanceByte(self):
s = BitStream(hex='0x010203')
s.bytepos += 1
self.assertEqual(s.bytepos, 1)
s.bytepos += 1
self.assertEqual(s.bytepos, 2)
s.bytepos += 1
try:
s.bytepos += 1
self.assertTrue(False)
except ValueError:
pass
def testRetreatBit(self):
s = BitStream(hex='0xff')
try:
s.pos -= 1
self.assertTrue(False)
except ValueError:
pass
s.pos = 5
s.pos -= 1
self.assertEqual(s.pos, 4)
def testRetreatByte(self):
s = BitStream(hex='0x010203')
try:
s.bytepos -= 1
self.assertTrue(False)
except ValueError:
pass
s.bytepos = 3
s.bytepos -= 1
self.assertEqual(s.bytepos, 2)
self.assertEqual(s.read(8).hex, '03')
def testCreationByAuto(self):
s = BitStream('0xff')
self.assertEqual(s.hex, 'ff')
s = BitStream('0b00011')
self.assertEqual(s.bin, '00011')
self.assertRaises(bitstring.CreationError, BitStream, 'hello')
s1 = BitStream(bytes=b'\xf5', length=3, offset=5)
s2 = BitStream(s1, length=1, offset=1)
self.assertEqual(s2, '0b0')
s = BitStream(bytes=b'\xff', offset=2)
t = BitStream(s, offset=2)
self.assertEqual(t, '0b1111')
self.assertRaises(TypeError, BitStream, auto=1.2)
def testCreationByAuto2(self):
s = BitStream('bin=001')
self.assertEqual(s.bin, '001')
s = BitStream('oct=0o007')
self.assertEqual(s.oct, '007')
s = BitStream('hex=123abc')
self.assertEqual(s, '0x123abc')
s = BitStream('bin:2=01')
self.assertEqual(s, '0b01')
for s in ['bin:1=01', 'bits:4=0b1', 'oct:3=000', 'hex:4=0x1234']:
self.assertRaises(bitstring.CreationError, BitStream, s)
def testInsertUsingAuto(self):
s = BitStream('0xff')
s.insert('0x00', 4)
self.assertEqual(s.hex, 'f00f')
self.assertRaises(ValueError, s.insert, 'ff')
def testOverwriteUsingAuto(self):
s = BitStream('0x0110')
s.overwrite('0b1')
self.assertEqual(s.hex, '8110')
s.overwrite('')
self.assertEqual(s.hex, '8110')
self.assertRaises(ValueError, s.overwrite, '0bf')
def testFindUsingAuto(self):
s = BitStream('0b000000010100011000')
self.assertTrue(s.find('0b101'))
self.assertEqual(s.pos, 7)
def testFindbytealignedUsingAuto(self):
s = BitStream('0x00004700')
self.assertTrue(s.find('0b01000111', bytealigned=True))
self.assertEqual(s.bytepos, 2)
def testAppendUsingAuto(self):
s = BitStream('0b000')
s.append('0b111')
self.assertEqual(s.bin, '000111')
s.append('0b0')
self.assertEqual(s.bin, '0001110')
def testSplitByteAlignedUsingAuto(self):
s = BitStream('0x000143563200015533000123')
sections = s.split('0x0001')
self.assertEqual(next(sections).hex, '')
self.assertEqual(next(sections).hex, '0001435632')
self.assertEqual(next(sections).hex, '00015533')
self.assertEqual(next(sections).hex, '000123')
self.assertRaises(StopIteration, next, sections)
def testSplitByteAlignedWithSelf(self):
s = BitStream('0x1234')
sections = s.split(s)
self.assertEqual(next(sections).hex, '')
self.assertEqual(next(sections).hex, '1234')
self.assertRaises(StopIteration, next, sections)
def testPrepend(self):
s = BitStream('0b000')
s.prepend('0b11')
self.assertEqual(s.bin, '11000')
s.prepend(s)
self.assertEqual(s.bin, '1100011000')
s.prepend('')
self.assertEqual(s.bin, '1100011000')
def testNullSlice(self):
s = BitStream('0x111')
t = s[1:1]
self.assertEqual(t._datastore.bytelength, 0)
def testMultipleAutos(self):
s = BitStream('0xa')
s.prepend('0xf')
s.append('0xb')
self.assertEqual(s, '0xfab')
s.prepend(s)
s.append('0x100')
s.overwrite('0x5', 4)
self.assertEqual(s, '0xf5bfab100')
def testReverse(self):
s = BitStream('0b0011')
s.reverse()
self.assertEqual(s.bin, '1100')
s = BitStream('0b10')
s.reverse()
self.assertEqual(s.bin, '01')
s = BitStream()
s.reverse()
self.assertEqual(s.bin, '')
def testInitWithConcatenatedStrings(self):
s = BitStream('0xff 0Xee 0xd 0xcc')
self.assertEqual(s.hex, 'ffeedcc')
s = BitStream('0b0 0B111 0b001')
self.assertEqual(s.bin, '0111001')
s += '0b1' + '0B1'
self.assertEqual(s.bin, '011100111')
s = BitStream(hex='ff0xee')
self.assertEqual(s.hex, 'ffee')
s = BitStream(bin='000b0b11')
self.assertEqual(s.bin, '0011')
s = BitStream(' 0o123 0O 7 0 o1')
self.assertEqual(s.oct, '12371')
s += ' 0 o 332'
self.assertEqual(s.oct, '12371332')
def testEquals(self):
s1 = BitStream('0b01010101')
s2 = BitStream('0b01010101')
self.assertTrue(s1 == s2)
s3 = BitStream()
s4 = BitStream()
self.assertTrue(s3 == s4)
self.assertFalse(s3 != s4)
s5 = BitStream(bytes=b'\xff', offset=2, length=3)
s6 = BitStream('0b111')
self.assertTrue(s5 == s6)
def testLargeEquals(self):
s1 = BitStream(10000000)
s2 = BitStream(10000000)
s1.set(True, [0, 55, 53214, 5342111, 9999999])
s2.set(True, [0, 55, 53214, 5342111, 9999999])
self.assertEqual(s1, s2)
s1.set(True, 8000000)
self.assertNotEqual(s1, s2)
def testNotEquals(self):
s1 = BitStream('0b0')
s2 = BitStream('0b1')
self.assertTrue(s1 != s2)
self.assertFalse(s1 != BitStream('0b0'))
def testEqualityWithAutoInitialised(self):
a = BitStream('0b00110111')
self.assertTrue(a == '0b00110111')
self.assertTrue(a == '0x37')
self.assertTrue('0b0011 0111' == a)
self.assertTrue('0x3 0x7' == a)
self.assertFalse(a == '0b11001000')
self.assertFalse('0x3737' == a)
def testInvertSpecialMethod(self):
s = BitStream('0b00011001')
self.assertEqual((~s).bin, '11100110')
self.assertEqual((~BitStream('0b0')).bin, '1')
self.assertEqual((~BitStream('0b1')).bin, '0')
self.assertTrue(~~s == s)
def testInvertBitPosition(self):
s = ConstBitStream('0xefef')
s.pos = 8
t = ~s
self.assertEqual(s.pos, 8)
self.assertEqual(t.pos, 0)
def testInvertSpecialMethodErrors(self):
s = BitStream()
self.assertRaises(bitstring.Error, s.__invert__)
def testJoinWithAuto(self):
s = BitStream().join(['0xf', '0b00', BitStream(bin='11')])
self.assertEqual(s, '0b11110011')
def testAutoBitStringCopy(self):
s = BitStream('0xabcdef')
t = BitStream(s)
self.assertEqual(t.hex, 'abcdef')
del s[-8:]
self.assertEqual(t.hex, 'abcdef')
def testMultiplication(self):
a = BitStream('0xff')
b = a * 8
self.assertEqual(b, '0xffffffffffffffff')
b = 4 * a
self.assertEqual(b, '0xffffffff')
self.assertTrue(1 * a == a * 1 == a)
c = a * 0
self.assertFalse(c)
a *= 3
self.assertEqual(a, '0xffffff')
a *= 0
self.assertFalse(a)
one = BitStream('0b1')
zero = BitStream('0b0')
mix = one * 2 + 3 * zero + 2 * one * 2
self.assertEqual(mix, '0b110001111')
q = BitStream()
q *= 143
self.assertFalse(q)
q += [True, True, False]
q.pos += 2
q *= 0
self.assertFalse(q)
self.assertEqual(q.bitpos, 0)
def testMultiplicationWithFiles(self):
a = BitStream(filename='test.m1v')
b = a.len
a *= 3
self.assertEqual(a.len, 3 * b)
def testMultiplicationErrors(self):
a = BitStream('0b1')
b = BitStream('0b0')
self.assertRaises(ValueError, a.__mul__, -1)
self.assertRaises(ValueError, a.__imul__, -1)
self.assertRaises(ValueError, a.__rmul__, -1)
self.assertRaises(TypeError, a.__mul__, 1.2)
self.assertRaises(TypeError, a.__rmul__, b)
self.assertRaises(TypeError, a.__imul__, b)
def testFileAndMemEquivalence(self):
a = ConstBitStream(filename='smalltestfile')
b = BitStream(filename='smalltestfile')
self.assertTrue(isinstance(a._datastore._rawarray, bitstring.bits.MmapByteArray))
self.assertTrue(isinstance(b._datastore._rawarray, bytearray))
self.assertEqual(a._datastore.getbyte(0), b._datastore.getbyte(0))
self.assertEqual(a._datastore.getbyteslice(1, 5), bytearray(b._datastore.getbyteslice(1, 5)))
def testByte2Bits(self):
for i in range(256):
s = BitStream(bin=bitstring.bits.BYTE_TO_BITS[i])
self.assertEqual(i, s.uint)
self.assertEqual(s.length, 8)
def testBitwiseAnd(self):
a = BitStream('0b01101')
b = BitStream('0b00110')
self.assertEqual((a & b).bin, '00100')
self.assertEqual((a & '0b11111'), a)
self.assertRaises(ValueError, a.__and__, '0b1')
self.assertRaises(ValueError, b.__and__, '0b110111111')
c = BitStream('0b0011011')
c.pos = 4
d = c & '0b1111000'
self.assertEqual(d.pos, 0)
self.assertEqual(d.bin, '0011000')
d = '0b1111000' & c
self.assertEqual(d.bin, '0011000')
def testBitwiseOr(self):
a = BitStream('0b111001001')
b = BitStream('0b011100011')
self.assertEqual((a | b).bin, '111101011')
self.assertEqual((a | '0b000000000'), a)
self.assertRaises(ValueError, a.__or__, '0b0000')
self.assertRaises(ValueError, b.__or__, a + '0b1')
a = '0xff00' | BitStream('0x00f0')
self.assertEqual(a.hex, 'fff0')
def testBitwiseXor(self):
a = BitStream('0b111001001')
b = BitStream('0b011100011')
self.assertEqual((a ^ b).bin, '100101010')
self.assertEqual((a ^ '0b111100000').bin, '000101001')
self.assertRaises(ValueError, a.__xor__, '0b0000')
self.assertRaises(ValueError, b.__xor__, a + '0b1')
a = '0o707' ^ BitStream('0o777')
self.assertEqual(a.oct, '070')
def testSplit(self):
a = BitStream('0b0 010100111 010100 0101 010')
a.pos = 20
subs = [i.bin for i in a.split('0b010')]
self.assertEqual(subs, ['0', '010100111', '010100', '0101', '010'])
self.assertEqual(a.pos, 20)
def testSplitCornerCases(self):
a = BitStream('0b000000')
bsl = a.split('0b1', False)
self.assertEqual(next(bsl), a)
self.assertRaises(StopIteration, next, bsl)
b = BitStream()
bsl = b.split('0b001', False)
self.assertFalse(next(bsl))
self.assertRaises(StopIteration, next, bsl)
def testSplitErrors(self):
a = BitStream('0b0')
b = a.split('', False)
self.assertRaises(ValueError, next, b)
def testPositionInSlice(self):
a = BitStream('0x00ffff00')
a.bytepos = 2
b = a[8:24]
self.assertEqual(b.bytepos, 0)
def testSliceWithOffset(self):
a = BitStream(bytes=b'\x00\xff\x00', offset=7)
b = a[7:12]
self.assertEqual(b.bin, '11000')
def testSplitWithMaxsplit(self):
a = BitStream('0xaabbccbbccddbbccddee')
self.assertEqual(len(list(a.split('0xbb', bytealigned=True))), 4)
bsl = list(a.split('0xbb', count=1, bytealigned=True))
self.assertEqual((len(bsl), bsl[0]), (1, '0xaa'))
bsl = list(a.split('0xbb', count=2, bytealigned=True))
self.assertEqual(len(bsl), 2)
self.assertEqual(bsl[0], '0xaa')
self.assertEqual(bsl[1], '0xbbcc')
def testSplitMore(self):
s = BitStream('0b1100011001110110')
for i in range(10):
a = list(s.split('0b11', False, count=i))
b = list(s.split('0b11', False))[:i]
self.assertEqual(a, b)
b = s.split('0b11', count=-1)
self.assertRaises(ValueError, next, b)
def testFindByteAlignedWithBits(self):
a = BitStream('0x00112233445566778899')
a.find('0b0001', bytealigned=True)
self.assertEqual(a.bitpos, 8)
def testFindStartbitNotByteAligned(self):
a = BitStream('0b0010000100')
found = a.find('0b1', start=4)
self.assertEqual((found, a.bitpos), ((7,), 7))
found = a.find('0b1', start=2)
self.assertEqual((found, a.bitpos), ((2,), 2))
found = a.find('0b1', bytealigned=False, start=8)
self.assertEqual((found, a.bitpos), ((), 2))
def testFindEndbitNotByteAligned(self):
a = BitStream('0b0010010000')
found = a.find('0b1', bytealigned=False, end=2)
self.assertEqual((found, a.bitpos), ((), 0))
found = a.find('0b1', end=3)
self.assertEqual((found, a.bitpos), ((2,), 2))
found = a.find('0b1', bytealigned=False, start=3, end=5)
self.assertEqual((found, a.bitpos), ((), 2))
found = a.find('0b1', start=3, end=6)
self.assertEqual((found[0], a.bitpos), (5, 5))
def testFindStartbitByteAligned(self):
a = BitStream('0xff001122ff0011ff')
a.pos = 40
found = a.find('0x22', start=23, bytealigned=True)
self.assertEqual((found, a.bytepos), ((24,), 3))
a.bytepos = 4
found = a.find('0x22', start=24, bytealigned=True)
self.assertEqual((found, a.bytepos), ((24,), 3))
found = a.find('0x22', start=25, bytealigned=True)
self.assertEqual((found, a.pos), ((), 24))
found = a.find('0b111', start=40, bytealigned=True)
self.assertEqual((found, a.pos), ((56,), 56))
def testFindEndbitByteAligned(self):
a = BitStream('0xff001122ff0011ff')
found = a.find('0x22', end=31, bytealigned=True)
self.assertFalse(found)
self.assertEqual(a.pos, 0)
found = a.find('0x22', end=32, bytealigned=True)
self.assertTrue(found)
self.assertEqual(a.pos, 24)
self.assertEqual(found[0], 24)
def testFindStartEndbitErrors(self):
a = BitStream('0b00100')
self.assertRaises(ValueError, a.find, '0b1', bytealigned=False, start=-100)
self.assertRaises(ValueError, a.find, '0b1', end=6)
self.assertRaises(ValueError, a.find, '0b1', start=4, end=3)
b = BitStream('0x0011223344')
self.assertRaises(ValueError, a.find, '0x22', bytealigned=True, start=-100)
self.assertRaises(ValueError, a.find, '0x22', end=41, bytealigned=True)
def testSplitStartbit(self):
a = BitStream('0b0010101001000000001111')
bsl = a.split('0b001', bytealigned=False, start=1)
self.assertEqual([x.bin for x in bsl], ['010101', '001000000', '001111'])
b = a.split('0b001', start=-100)
self.assertRaises(ValueError, next, b)
b = a.split('0b001', start=23)
self.assertRaises(ValueError, next, b)
b = a.split('0b1', start=10, end=9)
self.assertRaises(ValueError, next, b)
def testSplitStartbitByteAligned(self):
a = BitStream('0x00ffffee')
bsl = list(a.split('0b111', start=9, bytealigned=True))
self.assertEqual([x.bin for x in bsl], ['1111111', '11111111', '11101110'])
def testSplitEndbit(self):
a = BitStream('0b000010001001011')
bsl = list(a.split('0b1', bytealigned=False, end=14))
self.assertEqual([x.bin for x in bsl], ['0000', '1000', '100', '10', '1'])
self.assertEqual(list(a[4:12].split('0b0', False)), list(a.split('0b0', start=4, end=12)))
# Shouldn't raise ValueError
bsl = list(a.split('0xffee', end=15))
# Whereas this one will when we call next()
bsl = a.split('0xffee', end=16)
self.assertRaises(ValueError, next, bsl)
def testSplitEndbitByteAligned(self):
a = BitStream('0xff00ff')[:22]
bsl = list(a.split('0b 0000 0000 111', end=19))
self.assertEqual([x.bin for x in bsl], ['11111111', '00000000111'])
bsl = list(a.split('0b 0000 0000 111', end=18))
self.assertEqual([x.bin for x in bsl], ['111111110000000011'])
def testSplitMaxSplit(self):
a = BitStream('0b1' * 20)
for i in range(10):
bsl = list(a.split('0b1', count=i))
self.assertEqual(len(bsl), i)
def testPrependAndAppendAgain(self):
c = BitStream('0x1122334455667788')
c.bitpos = 40
c.prepend('0b1')
self.assertEqual(c.bitpos, 41)
c = BitStream()
c.prepend('0x1234')
self.assertEqual(c.bytepos, 2)
c = BitStream()
c.append('0x1234')
self.assertEqual(c.bytepos, 0)
s = BitStream(bytes=b'\xff\xff', offset=2)
self.assertEqual(s.length, 14)
t = BitStream(bytes=b'\x80', offset=1, length=2)
s.prepend(t)
self.assertEqual(s, '0x3fff')
def testFindAll(self):
a = BitStream('0b11111')
p = a.findall('0b1')
self.assertEqual(list(p), [0, 1, 2, 3, 4])
p = a.findall('0b11')
self.assertEqual(list(p), [0, 1, 2, 3])
p = a.findall('0b10')
self.assertEqual(list(p), [])
a = BitStream('0x4733eeff66554747335832434547')
p = a.findall('0x47', bytealigned=True)
self.assertEqual(list(p), [0, 6 * 8, 7 * 8, 13 * 8])
p = a.findall('0x4733', bytealigned=True)
self.assertEqual(list(p), [0, 7 * 8])
a = BitStream('0b1001001001001001001')
p = a.findall('0b1001', bytealigned=False)
self.assertEqual(list(p), [0, 3, 6, 9, 12, 15])
self.assertEqual(a.pos, 15)
def testFindAllGenerator(self):
a = BitStream('0xff1234512345ff1234ff12ff')
p = a.findall('0xff', bytealigned=True)
self.assertEqual(next(p), 0)
self.assertEqual(next(p), 6 * 8)
self.assertEqual(next(p), 9 * 8)
self.assertEqual(next(p), 11 * 8)
self.assertRaises(StopIteration, next, p)
def testFindAllCount(self):
s = BitStream('0b1') * 100
for i in [0, 1, 23]:
self.assertEqual(len(list(s.findall('0b1', count=i))), i)
b = s.findall('0b1', bytealigned=True, count=-1)
self.assertRaises(ValueError, next, b)
def testContains(self):
a = BitStream('0b1') + '0x0001dead0001'
self.assertTrue('0xdead' in a)
self.assertEqual(a.pos, 0)
self.assertFalse('0xfeed' in a)
def testRepr(self):
max = bitstring.bits.MAX_CHARS
bls = ['', '0b1', '0o5', '0x43412424f41', '0b00101001010101']
for bs in bls:
a = BitStream(bs)
b = eval(a.__repr__())
self.assertTrue(a == b)
for f in [ConstBitStream(filename='test.m1v'),
ConstBitStream(filename='test.m1v', length=17),
ConstBitStream(filename='test.m1v', length=23, offset=23102)]:
f2 = eval(f.__repr__())
self.assertEqual(f._datastore._rawarray.source.name, f2._datastore._rawarray.source.name)
self.assertTrue(f2 == f)
a = BitStream('0b1')
self.assertEqual(repr(a), "BitStream('0b1')")
a += '0b11'
self.assertEqual(repr(a), "BitStream('0b111')")
a += '0b1'
self.assertEqual(repr(a), "BitStream('0xf')")
a *= max
self.assertEqual(repr(a), "BitStream('0x" + "f" * max + "')")
a += '0xf'
self.assertEqual(repr(a), "BitStream('0x" + "f" * max + "...') # length=%d" % (max * 4 + 4))
def testPrint(self):
s = BitStream(hex='0x00')
self.assertEqual('0x' + s.hex, s.__str__())
s = BitStream(filename='test.m1v')
self.assertEqual('0x' + s[0:bitstring.bits.MAX_CHARS * 4].hex + '...', s.__str__())
self.assertEqual(BitStream().__str__(), '')
def testIter(self):
a = BitStream('0b001010')
b = BitStream()
for bit in a:
b.append(ConstBitStream(bool=bit))
self.assertEqual(a, b)
def testDelitem(self):
a = BitStream('0xffee')
del a[0:8]
self.assertEqual(a.hex, 'ee')
del a[0:8]
self.assertFalse(a)
del a[10:12]
self.assertFalse(a)
def testNonZeroBitsAtStart(self):
a = BitStream(bytes=b'\xff', offset=2)
b = BitStream('0b00')
b += a
self.assertTrue(b == '0b0011 1111')
#self.assertEqual(a._datastore.rawbytes, b'\xff')
self.assertEqual(a.tobytes(), b'\xfc')
def testNonZeroBitsAtEnd(self):
a = BitStream(bytes=b'\xff', length=5)
#self.assertEqual(a._datastore.rawbytes, b'\xff')
b = BitStream('0b00')
a += b
self.assertTrue(a == '0b1111100')
self.assertEqual(a.tobytes(), b'\xf8')
self.assertRaises(ValueError, a._getbytes)
#def testLargeOffsets(self):
#a = BitStream('0xffffffff', offset=32)
#self.assertFalse(a)
#b = BitStream(bytes=b'\xff\xff\xff\xfd', offset=30, length=1)
#self.assertEqual(b, '0b0')
#o = BitStream(oct='123456707', offset=24)
#self.assertEqual(o, '0o7')
#d = BitStream(bytes=b'\x00\x00\x00\x00\x0f', offset=33, length=5)
#self.assertEqual(d, '0b00011')
def testNewOffsetErrors(self):
self.assertRaises(bitstring.CreationError, BitStream, hex='ff', offset=-1)
self.assertRaises(bitstring.CreationError, BitStream, '0xffffffff', offset=33)
def testSliceStep(self):
a = BitStream('0x3')
b = a[::1]
self.assertEqual(a, b)
self.assertEqual(a[1:2:2], '0b11')
self.assertEqual(a[0:1:2], '0b00')
self.assertEqual(a[:1:3], '0o1')
self.assertEqual(a[::4], a)
self.assertFalse(a[::5])
a = BitStream('0x0011223344556677')
self.assertEqual(a[3:5:8], '0x3344')
self.assertEqual(a[5::8], '0x556677')
self.assertEqual(a[-1::8], '0x77')
self.assertEqual(a[-2::4], '0x77')
self.assertEqual(a[:-3:8], '0x0011223344')
self.assertEqual(a[-1000:-3:8], '0x0011223344')
a.append('0b1')
self.assertEqual(a[5::8], '0x556677')
self.assertEqual(a[5:100:8], '0x556677')
def testSliceNegativeStep(self):
a = BitStream('0o 01 23 45 6')
self.assertEqual(a[::-3], '0o6543210')
self.assertFalse(a[1:3:-6])
self.assertEqual(a[2:0:-6], '0o4523')
self.assertEqual(a[2::-6], '0o452301')
b = a[::-1]
a.reverse()
self.assertEqual(b, a)
b = BitStream('0x01020408') + '0b11'
self.assertEqual(b[::-8], '0x08040201')
self.assertEqual(b[::-4], '0x80402010')
self.assertEqual(b[::-2], '0b11' + BitStream('0x20108040'))
self.assertEqual(b[::-33], b[:33])
self.assertEqual(b[::-34], b)
self.assertFalse(b[::-35])
self.assertEqual(b[-1:-3:-8], '0x0402')
def testInsertionOrderAndBitpos(self):
b = BitStream()
b[0:0] = '0b0'
b[0:0] = '0b1'
self.assertEqual(b, '0b10')
self.assertEqual(b.bitpos, 1)
a = BitStream()
a.insert('0b0')
a.insert('0b1')
self.assertEqual(a, '0b01')
self.assertEqual(a.bitpos, 2)
def testOverwriteOrderAndBitpos(self):
a = BitStream('0xff')
a.overwrite('0xa')
self.assertEqual(a, '0xaf')
self.assertEqual(a.bitpos, 4)
a.overwrite('0xb')
self.assertEqual(a, '0xab')
self.assertEqual(a.bitpos, 8)
self.assertRaises(ValueError, a.overwrite, '0b1')
a.overwrite('0xa', 4)
self.assertEqual(a, '0xaa')
self.assertEqual(a.bitpos, 8)
def testInitSliceWithInt(self):
a = BitStream(length=8)
a[:] = 100
self.assertEqual(a.uint, 100)
a[0] = 1
self.assertEqual(a.bin, '11100100')
a[1] = 0
self.assertEqual(a.bin, '10100100')
a[-1] = -1
self.assertEqual(a.bin, '10100101')
a[-3:] = -2
self.assertEqual(a.bin, '10100110')
def testInitSliceWithIntErrors(self):
a = BitStream('0b0000')
self.assertRaises(ValueError, a.__setitem__, slice(0, 4), 16)
self.assertRaises(ValueError, a.__setitem__, slice(0, 4), -9)
self.assertRaises(ValueError, a.__setitem__, 0, 2)
self.assertRaises(ValueError, a.__setitem__, 0, -2)
def testReverseWithSlice(self):
a = BitStream('0x0012ff')
a.reverse()
self.assertEqual(a, '0xff4800')
a.reverse(8, 16)
self.assertEqual(a, '0xff1200')
b = a[8:16]
b.reverse()
a[8:16] = b
self.assertEqual(a, '0xff4800')
def testReverseWithSliceErrors(self):
a = BitStream('0x123')
self.assertRaises(ValueError, a.reverse, -1, 4)
self.assertRaises(ValueError, a.reverse, 10, 9)
self.assertRaises(ValueError, a.reverse, 1, 10000)
def testInitialiseFromList(self):
a = BitStream([])
self.assertFalse(a)
a = BitStream([True, False, [], [0], 'hello'])
self.assertEqual(a, '0b10011')
a += []
self.assertEqual(a, '0b10011')
a += [True, False, True]
self.assertEqual(a, '0b10011101')
a.find([12, 23])
self.assertEqual(a.pos, 3)
self.assertEqual([1, 0, False, True], BitStream('0b1001'))
a = [True] + BitStream('0b1')
self.assertEqual(a, '0b11')
def testInitialiseFromTuple(self):
a = BitStream(())
self.assertFalse(a)
a = BitStream((0, 1, '0', '1'))
self.assertEqual('0b0111', a)
a.replace((True, True), [])
self.assertEqual(a, (False, True))
def testCut(self):
a = BitStream('0x00112233445')
b = list(a.cut(8))
self.assertEqual(b, ['0x00', '0x11', '0x22', '0x33', '0x44'])
b = list(a.cut(4, 8, 16))
self.assertEqual(b, ['0x1', '0x1'])
b = list(a.cut(4, 0, 44, 4))
self.assertEqual(b, ['0x0', '0x0', '0x1', '0x1'])
a = BitStream()
b = list(a.cut(10))
self.assertTrue(not b)
def testCutErrors(self):
a = BitStream('0b1')
b = a.cut(1, 1, 2)
self.assertRaises(ValueError, next, b)
b = a.cut(1, -2, 1)
self.assertRaises(ValueError, next, b)
b = a.cut(0)
self.assertRaises(ValueError, next, b)
b = a.cut(1, count=-1)
self.assertRaises(ValueError, next, b)
def testCutProblem(self):
s = BitStream('0x1234')
for n in list(s.cut(4)):
s.prepend(n)
self.assertEqual(s, '0x43211234')
def testJoinFunctions(self):
a = BitStream().join(['0xa', '0xb', '0b1111'])
self.assertEqual(a, '0xabf')
a = BitStream('0b1').join(['0b0' for i in range(10)])
self.assertEqual(a, '0b0101010101010101010')
a = BitStream('0xff').join([])
self.assertFalse(a)
def testAddingBitpos(self):
a = BitStream('0xff')
b = BitStream('0x00')
a.bitpos = b.bitpos = 8
c = a + b
self.assertEqual(c.bitpos, 0)
def testIntelligentRead1(self):
a = BitStream(uint=123, length=23)
u = a.read('uint:23')
self.assertEqual(u, 123)
self.assertEqual(a.pos, a.len)
b = BitStream(int=-12, length=44)
i = b.read('int:44')
self.assertEqual(i, -12)
self.assertEqual(b.pos, b.len)
u2, i2 = (a + b).readlist('uint:23, int:44')
self.assertEqual((u2, i2), (123, -12))
def testIntelligentRead2(self):
a = BitStream(ue=822)
u = a.read('ue')
self.assertEqual(u, 822)
self.assertEqual(a.pos, a.len)
b = BitStream(se=-1001)
s = b.read('se')
self.assertEqual(s, -1001)
self.assertEqual(b.pos, b.len)
s, u1, u2 = (b + 2 * a).readlist('se, ue, ue')
self.assertEqual((s, u1, u2), (-1001, 822, 822))
def testIntelligentRead3(self):
a = BitStream('0x123') + '0b11101'
h = a.read('hex:12')
self.assertEqual(h, '123')
b = a.read('bin: 5')
self.assertEqual(b, '11101')
c = '0b' + b + a
b, h = c.readlist('bin:5, hex:12')
self.assertEqual((b, h), ('11101', '123'))
def testIntelligentRead4(self):
a = BitStream('0o007')
o = a.read('oct:9')
self.assertEqual(o, '007')
self.assertEqual(a.pos, a.len)
def testIntelligentRead5(self):
a = BitStream('0x00112233')
c0, c1, c2 = a.readlist('bits:8, bits:8, bits:16')
self.assertEqual((c0, c1, c2), (BitStream('0x00'), BitStream('0x11'), BitStream('0x2233')))
a.pos = 0
c = a.read('bits:16')
self.assertEqual(c, BitStream('0x0011'))
def testIntelligentRead6(self):
a = BitStream('0b000111000')
b1, b2, b3 = a.readlist('bin :3, int: 3, int:3')
self.assertEqual(b1, '000')
self.assertEqual(b2, -1)
self.assertEqual(b3, 0)
def testIntelligentRead7(self):
a = BitStream('0x1234')
a1, a2, a3, a4 = a.readlist('bin:0, oct:0, hex:0, bits:0')
self.assertTrue(a1 == a2 == a3 == '')
self.assertFalse(a4)
self.assertRaises(ValueError, a.read, 'int:0')
self.assertRaises(ValueError, a.read, 'uint:0')
self.assertEqual(a.pos, 0)
def testIntelligentRead8(self):
a = BitStream('0x123456')
for t in ['hex:1', 'oct:1', 'hex4', '-5', 'fred', 'bin:-2',
'uint:p', 'uint:-2', 'int:u', 'int:-3', 'ses', 'uee', '-14']:
self.assertRaises(ValueError, a.read, t)
def testIntelligentRead9(self):
a = BitStream('0xff')
self.assertEqual(a.read('intle'), -1)
def testFillerReads1(self):
s = BitStream('0x012345')
t = s.read('bits')
self.assertEqual(s, t)
s.pos = 0
a, b = s.readlist('hex:8, hex')
self.assertEqual(a, '01')
self.assertEqual(b, '2345')
self.assertTrue(isinstance(b, str))
s.bytepos = 0
a, b = s.readlist('bin, hex:20')
self.assertEqual(a, '0000')
self.assertEqual(b, '12345')
self.assertTrue(isinstance(a, str))
def testFillerReads2(self):
s = BitStream('0xabcdef')
self.assertRaises(bitstring.Error, s.readlist, 'bits, se')
self.assertRaises(bitstring.Error, s.readlist, 'hex:4, bits, ue, bin:4')
def testIntelligentPeek(self):
a = BitStream('0b01, 0x43, 0o4, uint:23=2, se=5, ue=3')
b, c, e = a.peeklist('bin:2, hex:8, oct:3')
self.assertEqual((b, c, e), ('01', '43', '4'))
self.assertEqual(a.pos, 0)
a.pos = 13
f, g, h = a.peeklist('uint:23, se, ue')
self.assertEqual((f, g, h), (2, 5, 3))
self.assertEqual(a.pos, 13)
def testReadMultipleBits(self):
s = BitStream('0x123456789abcdef')
a, b = s.readlist([4, 4])
self.assertEqual(a, '0x1')
self.assertEqual(b, '0x2')
c, d, e = s.readlist([8, 16, 8])
self.assertEqual(c, '0x34')
self.assertEqual(d, '0x5678')
self.assertEqual(e, '0x9a')
def testPeekMultipleBits(self):
s = BitStream('0b1101, 0o721, 0x2234567')
a, b, c, d = s.peeklist([2, 1, 1, 9])
self.assertEqual(a, '0b11')
self.assertEqual(bool(b), False)
self.assertEqual(bool(c), True)
self.assertEqual(d, '0o721')
self.assertEqual(s.pos, 0)
a, b = s.peeklist([4, 9])
self.assertEqual(a, '0b1101')
self.assertEqual(b, '0o721')
s.pos = 13
a, b = s.peeklist([16, 8])
self.assertEqual(a, '0x2234')
self.assertEqual(b, '0x56')
self.assertEqual(s.pos, 13)
def testDifficultPrepends(self):
a = BitStream('0b1101011')
b = BitStream()
for i in range(10):
b.prepend(a)
self.assertEqual(b, a * 10)
def testPackingWrongNumberOfThings(self):
self.assertRaises(bitstring.CreationError, pack, 'bin:1')
self.assertRaises(bitstring.CreationError, pack, '', 100)
def testPackWithVariousKeys(self):
a = pack('uint10', uint10='0b1')
self.assertEqual(a, '0b1')
b = pack('0b110', **{'0b110': '0xfff'})
self.assertEqual(b, '0xfff')
def testPackWithVariableLength(self):
for i in range(1, 11):
a = pack('uint:n', 0, n=i)
self.assertEqual(a.bin, '0' * i)
def testToBytes(self):
a = BitStream(bytes=b'\xab\x00')
b = a.tobytes()
self.assertEqual(a.bytes, b)
for i in range(7):
del a[-1:]
self.assertEqual(a.tobytes(), b'\xab\x00')
del a[-1:]
self.assertEqual(a.tobytes(), b'\xab')
def testToFile(self):
a = BitStream('0x0000ff')[:17]
f = open('temp_bitstring_unit_testing_file', 'wb')
a.tofile(f)
f.close()
b = BitStream(filename='temp_bitstring_unit_testing_file')
self.assertEqual(b, '0x000080')
a = BitStream('0x911111')
del a[:1]
self.assertEqual(a + '0b0', '0x222222')
f = open('temp_bitstring_unit_testing_file', 'wb')
a.tofile(f)
f.close()
b = BitStream(filename='temp_bitstring_unit_testing_file')
self.assertEqual(b, '0x222222')
os.remove('temp_bitstring_unit_testing_file')
#def testToFileWithLargerFile(self):
# a = BitStream(length=16000000)
# a[1] = '0b1'
# a[-2] = '0b1'
# f = open('temp_bitstring_unit_testing_file' ,'wb')
# a.tofile(f)
# f.close()
# b = BitStream(filename='temp_bitstring_unit_testing_file')
# self.assertEqual(b.len, 16000000)
# self.assertEqual(b[1], True)
#
# f = open('temp_bitstring_unit_testing_file' ,'wb')
# a[1:].tofile(f)
# f.close()
# b = BitStream(filename='temp_bitstring_unit_testing_file')
# self.assertEqual(b.len, 16000000)
# self.assertEqual(b[0], True)
# os.remove('temp_bitstring_unit_testing_file')
def testTokenParser(self):
tp = bitstring.constbitstream.tokenparser
self.assertEqual(tp('hex'), (True, [('hex', None, None)]))
self.assertEqual(tp('hex=14'), (True, [('hex', None, '14')]))
self.assertEqual(tp('se'), (False, [('se', None, None)]))
self.assertEqual(tp('ue=12'), (False, [('ue', None, '12')]))
self.assertEqual(tp('0xef'), (False, [('0x', None, 'ef')]))
self.assertEqual(tp('uint:12'), (False, [('uint', 12, None)]))
self.assertEqual(tp('int:30=-1'), (False, [('int', 30, '-1')]))
self.assertEqual(tp('bits:10'), (False, [('bits', 10, None)]))
self.assertEqual(tp('bits:10'), (False, [('bits', 10, None)]))
self.assertEqual(tp('123'), (False, [('uint', 123, None)]))
self.assertEqual(tp('123'), (False, [('uint', 123, None)]))
self.assertRaises(ValueError, tp, 'hex12')
self.assertEqual(tp('hex12', ('hex12',)), (False, [('hex12', None, None)]))
self.assertEqual(tp('2*bits:6'), (False, [('bits', 6, None), ('bits', 6, None)]))
def testAutoFromFileObject(self):
with open('test.m1v', 'rb') as f:
s = ConstBitStream(f, offset=32, length=12)
self.assertEqual(s.uint, 352)
t = ConstBitStream('0xf') + f
self.assertTrue(t.startswith('0xf000001b3160'))
s2 = ConstBitStream(f)
t2 = BitStream('0xc')
t2.prepend(s2)
self.assertTrue(t2.startswith('0x000001b3'))
self.assertTrue(t2.endswith('0xc'))
with open('test.m1v', 'rb') as b:
u = ConstBitStream(bytes=b.read())
self.assertEqual(u, f)
def testFileBasedCopy(self):
with open('smalltestfile', 'rb') as f:
s = BitStream(f)
t = BitStream(s)
s.prepend('0b1')
self.assertEqual(s[1:], t)
s = BitStream(f)
t = copy.copy(s)
t.append('0b1')
self.assertEqual(s, t[:-1])
def testBigEndianSynonyms(self):
s = BitStream('0x12318276ef')
self.assertEqual(s.int, s.intbe)
self.assertEqual(s.uint, s.uintbe)
s = BitStream(intbe=-100, length=16)
self.assertEqual(s, 'int:16=-100')
s = BitStream(uintbe=13, length=24)
self.assertEqual(s, 'int:24=13')
s = BitStream('uintbe:32=1000')
self.assertEqual(s, 'uint:32=1000')
s = BitStream('intbe:8=2')
self.assertEqual(s, 'int:8=2')
self.assertEqual(s.read('intbe'), 2)
s.pos = 0
self.assertEqual(s.read('uintbe'), 2)
def testBigEndianSynonymErrors(self):
self.assertRaises(bitstring.CreationError, BitStream, uintbe=100, length=15)
self.assertRaises(bitstring.CreationError, BitStream, intbe=100, length=15)
self.assertRaises(bitstring.CreationError, BitStream, 'uintbe:17=100')
self.assertRaises(bitstring.CreationError, BitStream, 'intbe:7=2')
s = BitStream('0b1')
self.assertRaises(bitstring.InterpretError, s._getintbe)
self.assertRaises(bitstring.InterpretError, s._getuintbe)
self.assertRaises(ValueError, s.read, 'uintbe')
self.assertRaises(ValueError, s.read, 'intbe')
def testLittleEndianUint(self):
s = BitStream(uint=100, length=16)
self.assertEqual(s.uintle, 25600)
s = BitStream(uintle=100, length=16)
self.assertEqual(s.uint, 25600)
self.assertEqual(s.uintle, 100)
s.uintle += 5
self.assertEqual(s.uintle, 105)
s = BitStream('uintle:32=999')
self.assertEqual(s.uintle, 999)
self.assertEqual(s[::-8].uint, 999)
s = pack('uintle:24', 1001)
self.assertEqual(s.uintle, 1001)
self.assertEqual(s.length, 24)
self.assertEqual(s.read('uintle'), 1001)
def testLittleEndianInt(self):
s = BitStream(int=100, length=16)
self.assertEqual(s.intle, 25600)
s = BitStream(intle=100, length=16)
self.assertEqual(s.int, 25600)
self.assertEqual(s.intle, 100)
s.intle += 5
self.assertEqual(s.intle, 105)
s = BitStream('intle:32=999')
self.assertEqual(s.intle, 999)
self.assertEqual(s[::-8].int, 999)
s = pack('intle:24', 1001)
self.assertEqual(s.intle, 1001)
self.assertEqual(s.length, 24)
self.assertEqual(s.read('intle'), 1001)
def testLittleEndianErrors(self):
self.assertRaises(bitstring.CreationError, BitStream, 'uintle:15=10')
self.assertRaises(bitstring.CreationError, BitStream, 'intle:31=-999')
self.assertRaises(bitstring.CreationError, BitStream, uintle=100, length=15)
self.assertRaises(bitstring.CreationError, BitStream, intle=100, length=15)
s = BitStream('0xfff')
self.assertRaises(bitstring.InterpretError, s._getintle)
self.assertRaises(bitstring.InterpretError, s._getuintle)
self.assertRaises(ValueError, s.read, 'uintle')
self.assertRaises(ValueError, s.read, 'intle')
def testStructTokens1(self):
self.assertEqual(pack('<b', 23), BitStream('intle:8=23'))
self.assertEqual(pack('<B', 23), BitStream('uintle:8=23'))
self.assertEqual(pack('<h', 23), BitStream('intle:16=23'))
self.assertEqual(pack('<H', 23), BitStream('uintle:16=23'))
self.assertEqual(pack('<l', 23), BitStream('intle:32=23'))
self.assertEqual(pack('<L', 23), BitStream('uintle:32=23'))
self.assertEqual(pack('<q', 23), BitStream('intle:64=23'))
self.assertEqual(pack('<Q', 23), BitStream('uintle:64=23'))
self.assertEqual(pack('>b', 23), BitStream('intbe:8=23'))
self.assertEqual(pack('>B', 23), BitStream('uintbe:8=23'))
self.assertEqual(pack('>h', 23), BitStream('intbe:16=23'))
self.assertEqual(pack('>H', 23), BitStream('uintbe:16=23'))
self.assertEqual(pack('>l', 23), BitStream('intbe:32=23'))
self.assertEqual(pack('>L', 23), BitStream('uintbe:32=23'))
self.assertEqual(pack('>q', 23), BitStream('intbe:64=23'))
self.assertEqual(pack('>Q', 23), BitStream('uintbe:64=23'))
self.assertRaises(bitstring.CreationError, pack, '<B', -1)
self.assertRaises(bitstring.CreationError, pack, '<H', -1)
self.assertRaises(bitstring.CreationError, pack, '<L', -1)
self.assertRaises(bitstring.CreationError, pack, '<Q', -1)
def testStructTokens2(self):
endianness = sys.byteorder
sys.byteorder = 'little'
self.assertEqual(pack('@b', 23), BitStream('intle:8=23'))
self.assertEqual(pack('@B', 23), BitStream('uintle:8=23'))
self.assertEqual(pack('@h', 23), BitStream('intle:16=23'))
self.assertEqual(pack('@H', 23), BitStream('uintle:16=23'))
self.assertEqual(pack('@l', 23), BitStream('intle:32=23'))
self.assertEqual(pack('@L', 23), BitStream('uintle:32=23'))
self.assertEqual(pack('@q', 23), BitStream('intle:64=23'))
self.assertEqual(pack('@Q', 23), BitStream('uintle:64=23'))
sys.byteorder = 'big'
self.assertEqual(pack('@b', 23), BitStream('intbe:8=23'))
self.assertEqual(pack('@B', 23), BitStream('uintbe:8=23'))
self.assertEqual(pack('@h', 23), BitStream('intbe:16=23'))
self.assertEqual(pack('@H', 23), BitStream('uintbe:16=23'))
self.assertEqual(pack('@l', 23), BitStream('intbe:32=23'))
self.assertEqual(pack('@L', 23), BitStream('uintbe:32=23'))
self.assertEqual(pack('@q', 23), BitStream('intbe:64=23'))
self.assertEqual(pack('@Q', 23), BitStream('uintbe:64=23'))
sys.byteorder = endianness
def testNativeEndianness(self):
s = pack('@2L', 40, 40)
if sys.byteorder == 'little':
self.assertEqual(s, pack('<2L', 40, 40))
else:
self.assertEqual(sys.byteorder, 'big')
self.assertEqual(s, pack('>2L', 40, 40))
def testStructTokens2(self):
s = pack('>hhl', 1, 2, 3)
a, b, c = s.unpack('>hhl')
self.assertEqual((a, b, c), (1, 2, 3))
s = pack('<QL, >Q \tL', 1001, 43, 21, 9999)
self.assertEqual(s.unpack('<QL, >QL'), [1001, 43, 21, 9999])
def testStructTokensMultiplicativeFactors(self):
s = pack('<2h', 1, 2)
a, b = s.unpack('<2h')
self.assertEqual((a, b), (1, 2))
s = pack('<100q', *range(100))
self.assertEqual(s.len, 100 * 64)
self.assertEqual(s[44:45:64].uintle, 44)
s = pack('@L0B2h', 5, 5, 5)
self.assertEqual(s.unpack('@Lhh'), [5, 5, 5])
def testStructTokensErrors(self):
for f in ['>>q', '<>q', 'q>', '2q', 'q', '>-2q', '@a', '>int:8', '>q2']:
self.assertRaises(bitstring.CreationError, pack, f, 100)
def testImmutableBitStreams(self):
a = ConstBitStream('0x012345')
self.assertEqual(a, '0x012345')
b = BitStream('0xf') + a
self.assertEqual(b, '0xf012345')
try:
a.append(b)
self.assertTrue(False)
except AttributeError:
pass
try:
a.prepend(b)
self.assertTrue(False)
except AttributeError:
pass
try:
a[0] = '0b1'
self.assertTrue(False)
except TypeError:
pass
try:
del a[5]
self.assertTrue(False)
except TypeError:
pass
try:
a.replace('0b1', '0b0')
self.assertTrue(False)
except AttributeError:
pass
try:
a.insert('0b11', 4)
self.assertTrue(False)
except AttributeError:
pass
try:
a.reverse()
self.assertTrue(False)
except AttributeError:
pass
try:
a.reversebytes()
self.assertTrue(False)
except AttributeError:
pass
self.assertEqual(a, '0x012345')
self.assertTrue(isinstance(a, ConstBitStream))
def testReverseBytes(self):
a = BitStream('0x123456')
a.byteswap()
self.assertEqual(a, '0x563412')
b = a + '0b1'
b.byteswap()
self.assertEqual('0x123456, 0b1', b)
a = BitStream('0x54')
a.byteswap()
self.assertEqual(a, '0x54')
a = BitStream()
a.byteswap()
self.assertFalse(a)
def testReverseBytes2(self):
a = BitStream()
a.byteswap()
self.assertFalse(a)
a = BitStream('0x00112233')
a.byteswap(0, 0, 16)
self.assertEqual(a, '0x11002233')
a.byteswap(0, 4, 28)
self.assertEqual(a, '0x12302103')
a.byteswap(start=0, end=18)
self.assertEqual(a, '0x30122103')
self.assertRaises(ValueError, a.byteswap, 0, 10, 2)
self.assertRaises(ValueError, a.byteswap, 0, -4, 4)
self.assertRaises(ValueError, a.byteswap, 0, 24, 48)
a.byteswap(0, 24)
self.assertEqual(a, '0x30122103')
a.byteswap(0, 11, 11)
self.assertEqual(a, '0x30122103')
def testCapitalsInPack(self):
a = pack('A', A='0b1')
self.assertEqual(a, '0b1')
format = 'bits:4=BL_OFFT, uint:12=width, uint:12=height'
d = {'BL_OFFT': '0b1011', 'width': 352, 'height': 288}
s = bitstring.pack(format, **d)
self.assertEqual(s, '0b1011, uint:12=352, uint:12=288')
a = pack('0X0, uint:8, hex', 45, '0XABcD')
self.assertEqual(a, '0x0, uint:8=45, 0xabCD')
def testOtherCapitals(self):
a = ConstBitStream('0XABC, 0O0, 0B11')
self.assertEqual(a, 'hex=0Xabc, oct=0, bin=0B11')
def testEfficientOverwrite(self):
a = BitStream(1000000000)
a.overwrite([1], 123456)
self.assertEqual(a[123456], True)
a.overwrite('0xff', 1)
self.assertEqual(a[0:4:8], '0x7f800000')
b = BitStream('0xffff')
b.overwrite('0x0000')
self.assertEqual(b, '0x0000')
self.assertEqual(b.pos, 16)
c = BitStream(length=1000)
c.overwrite('0xaaaaaaaaaaaa', 81)
self.assertEqual(c[81:81 + 6 * 8], '0xaaaaaaaaaaaa')
self.assertEqual(len(list(c.findall('0b1'))), 24)
s = BitStream(length=1000)
s = s[5:]
s.overwrite('0xffffff', 500)
s.pos = 500
self.assertEqual(s.read(4 * 8), '0xffffff00')
s.overwrite('0xff', 502)
self.assertEqual(s[502:518], '0xffff')
def testPeekAndReadListErrors(self):
a = BitStream('0x123456')
self.assertRaises(ValueError, a.read, 'hex:8, hex:8')
self.assertRaises(ValueError, a.peek, 'hex:8, hex:8')
self.assertRaises(TypeError, a.read, 10, 12)
self.assertRaises(TypeError, a.peek, 12, 14)
self.assertRaises(TypeError, a.read, 8, 8)
self.assertRaises(TypeError, a.peek, 80, 80)
def testStartswith(self):
a = BitStream()
self.assertTrue(a.startswith(BitStream()))
self.assertFalse(a.startswith('0b0'))
a = BitStream('0x12ff')
self.assertTrue(a.startswith('0x1'))
self.assertTrue(a.startswith('0b0001001'))
self.assertTrue(a.startswith('0x12ff'))
self.assertFalse(a.startswith('0x12ff, 0b1'))
self.assertFalse(a.startswith('0x2'))
def testStartswithStartEnd(self):
s = BitStream('0x123456')
self.assertTrue(s.startswith('0x234', 4))
self.assertFalse(s.startswith('0x123', end=11))
self.assertTrue(s.startswith('0x123', end=12))
self.assertTrue(s.startswith('0x34', 8, 16))
self.assertFalse(s.startswith('0x34', 7, 16))
self.assertFalse(s.startswith('0x34', 9, 16))
self.assertFalse(s.startswith('0x34', 8, 15))
def testEndswith(self):
a = BitStream()
self.assertTrue(a.endswith(''))
self.assertFalse(a.endswith(BitStream('0b1')))
a = BitStream('0xf2341')
self.assertTrue(a.endswith('0x41'))
self.assertTrue(a.endswith('0b001'))
self.assertTrue(a.endswith('0xf2341'))
self.assertFalse(a.endswith('0x1f2341'))
self.assertFalse(a.endswith('0o34'))
def testEndswithStartEnd(self):
s = BitStream('0x123456')
self.assertTrue(s.endswith('0x234', end=16))
self.assertFalse(s.endswith('0x456', start=13))
self.assertTrue(s.endswith('0x456', start=12))
self.assertTrue(s.endswith('0x34', 8, 16))
self.assertTrue(s.endswith('0x34', 7, 16))
self.assertFalse(s.endswith('0x34', 9, 16))
self.assertFalse(s.endswith('0x34', 8, 15))
def testUnhashability(self):
s = BitStream('0xf')
self.assertRaises(TypeError, set, [s])
self.assertRaises(TypeError, hash, [s])
def testConstBitStreamSetCreation(self):
sl = [ConstBitStream(uint=i, length=7) for i in range(15)]
s = set(sl)
self.assertEqual(len(s), 15)
s.add(ConstBitStream('0b0000011'))
self.assertEqual(len(s), 15)
self.assertRaises(TypeError, s.add, BitStream('0b0000011'))
def testConstBitStreamFunctions(self):
s = ConstBitStream('0xf, 0b1')
self.assertEqual(type(s), ConstBitStream)
t = copy.copy(s)
self.assertEqual(type(t), ConstBitStream)
a = s + '0o3'
self.assertEqual(type(a), ConstBitStream)
b = a[0:4]
self.assertEqual(type(b), ConstBitStream)
b = a[4:3]
self.assertEqual(type(b), ConstBitStream)
b = a[5:2:-1]
self.assertEqual(type(b), ConstBitStream)
b = ~a
self.assertEqual(type(b), ConstBitStream)
b = a << 2
self.assertEqual(type(b), ConstBitStream)
b = a >> 2
self.assertEqual(type(b), ConstBitStream)
b = a * 2
self.assertEqual(type(b), ConstBitStream)
b = a * 0
self.assertEqual(type(b), ConstBitStream)
b = a & ~a
self.assertEqual(type(b), ConstBitStream)
b = a | ~a
self.assertEqual(type(b), ConstBitStream)
b = a ^ ~a
self.assertEqual(type(b), ConstBitStream)
b = a._slice(4, 4)
self.assertEqual(type(b), ConstBitStream)
b = a.read(4)
self.assertEqual(type(b), ConstBitStream)
def testConstBitStreamProperties(self):
a = ConstBitStream('0x123123')
try:
a.hex = '0x234'
self.assertTrue(False)
except AttributeError:
pass
try:
a.oct = '0o234'
self.assertTrue(False)
except AttributeError:
pass
try:
a.bin = '0b101'
self.assertTrue(False)
except AttributeError:
pass
try:
a.ue = 3453
self.assertTrue(False)
except AttributeError:
pass
try:
a.se = -123
self.assertTrue(False)
except AttributeError:
pass
try:
a.int = 432
self.assertTrue(False)
except AttributeError:
pass
try:
a.uint = 4412
self.assertTrue(False)
except AttributeError:
pass
try:
a.intle = 123
self.assertTrue(False)
except AttributeError:
pass
try:
a.uintle = 4412
self.assertTrue(False)
except AttributeError:
pass
try:
a.intbe = 123
self.assertTrue(False)
except AttributeError:
pass
try:
a.uintbe = 4412
self.assertTrue(False)
except AttributeError:
pass
try:
a.intne = 123
self.assertTrue(False)
except AttributeError:
pass
try:
a.uintne = 4412
self.assertTrue(False)
except AttributeError:
pass
try:
a.bytes = b'hello'
self.assertTrue(False)
except AttributeError:
pass
def testConstBitStreamMisc(self):
a = ConstBitStream('0xf')
b = a
a += '0xe'
self.assertEqual(b, '0xf')
self.assertEqual(a, '0xfe')
c = BitStream(a)
self.assertEqual(a, c)
a = ConstBitStream('0b1')
a._append(a)
self.assertEqual(a, '0b11')
self.assertEqual(type(a), ConstBitStream)
a._prepend(a)
self.assertEqual(a, '0b1111')
self.assertEqual(type(a), ConstBitStream)
def testConstBitStreamHashibility(self):
a = ConstBitStream('0x1')
b = ConstBitStream('0x2')
c = ConstBitStream('0x1')
c.pos = 3
s = set((a, b, c))
self.assertEqual(len(s), 2)
self.assertEqual(hash(a), hash(c))
def testConstBitStreamCopy(self):
a = ConstBitStream('0xabc')
a.pos = 11
b = copy.copy(a)
b.pos = 4
self.assertEqual(id(a._datastore), id(b._datastore))
self.assertEqual(a.pos, 11)
self.assertEqual(b.pos, 4)
def testPython26stuff(self):
s = BitStream('0xff')
self.assertTrue(isinstance(s.tobytes(), bytes))
self.assertTrue(isinstance(s.bytes, bytes))
def testReadFromBits(self):
a = ConstBitStream('0xaabbccdd')
b = a.read(8)
self.assertEqual(b, '0xaa')
self.assertEqual(a[0:8], '0xaa')
self.assertEqual(a[-1], True)
a.pos = 0
self.assertEqual(a.read(4).uint, 10)
class Set(unittest.TestCase):
def testSet(self):
a = BitStream(length=16)
a.set(True, 0)
self.assertEqual(a, '0b10000000 00000000')
a.set(1, 15)
self.assertEqual(a, '0b10000000 00000001')
b = a[4:12]
b.set(True, 1)
self.assertEqual(b, '0b01000000')
b.set(True, -1)
self.assertEqual(b, '0b01000001')
b.set(1, -8)
self.assertEqual(b, '0b11000001')
self.assertRaises(IndexError, b.set, True, -9)
self.assertRaises(IndexError, b.set, True, 8)
def testFileBasedSetUnset(self):
a = BitStream(filename='test.m1v')
a.set(True, (0, 1, 2, 3, 4))
self.assertEqual(a[0:4:8], '0xf80001b3')
a = BitStream(filename='test.m1v')
a.set(False, (28, 29, 30, 31))
self.assertTrue(a.startswith('0x000001b0'))
def testSetList(self):
a = BitStream(length=18)
a.set(True, range(18))
self.assertEqual(a.int, -1)
a.set(False, range(18))
self.assertEqual(a.int, 0)
def testUnset(self):
a = BitStream(length=16, int=-1)
a.set(False, 0)
self.assertEqual(~a, '0b10000000 00000000')
a.set(0, 15)
self.assertEqual(~a, '0b10000000 00000001')
b = a[4:12]
b.set(False, 1)
self.assertEqual(~b, '0b01000000')
b.set(False, -1)
self.assertEqual(~b, '0b01000001')
b.set(False, -8)
self.assertEqual(~b, '0b11000001')
self.assertRaises(IndexError, b.set, False, -9)
self.assertRaises(IndexError, b.set, False, 8)
def testSetWholeBitStream(self):
a = BitStream(14)
a.set(1)
self.assertTrue(a.all(1))
a.set(0)
self.assertTrue(a.all(0))
class Invert(unittest.TestCase):
def testInvertBits(self):
a = BitStream('0b111000')
a.invert(range(a.len))
self.assertEqual(a, '0b000111')
a.invert([0, 1, -1])
self.assertEqual(a, '0b110110')
def testInvertWholeBitStream(self):
a = BitStream('0b11011')
a.invert()
self.assertEqual(a, '0b00100')
#######################
def testIor(self):
a = BitStream('0b1101001')
a |= '0b1110000'
self.assertEqual(a, '0b1111001')
b = a[2:]
c = a[1:-1]
b |= c
self.assertEqual(c, '0b11100')
self.assertEqual(b, '0b11101')
def testIand(self):
a = BitStream('0b0101010101000')
a &= '0b1111110000000'
self.assertEqual(a, '0b0101010000000')
s = BitStream(filename='test.m1v', offset=26, length=24)
s &= '0xff00ff'
self.assertEqual(s, '0xcc0004')
def testIxor(self):
a = BitStream('0b11001100110011')
a ^= '0b11111100000010'
self.assertEqual(a, '0b00110000110001')
class AllAndAny(unittest.TestCase):
def testAll(self):
a = BitStream('0b0111')
self.assertTrue(a.all(True, (1, 3)))
self.assertFalse(a.all(True, (0, 1, 2)))
self.assertTrue(a.all(True, [-1]))
self.assertFalse(a.all(True, [0]))
def testFileBasedAll(self):
a = BitStream(filename='test.m1v')
self.assertTrue(a.all(True, [31]))
a = BitStream(filename='test.m1v')
self.assertTrue(a.all(False, (0, 1, 2, 3, 4)))
def testFileBasedAny(self):
a = BitStream(filename='test.m1v')
self.assertTrue(a.any(True, (31, 12)))
a = BitStream(filename='test.m1v')
self.assertTrue(a.any(False, (0, 1, 2, 3, 4)))
def testAny(self):
a = BitStream('0b10011011')
self.assertTrue(a.any(True, (1, 2, 3, 5)))
self.assertFalse(a.any(True, (1, 2, 5)))
self.assertTrue(a.any(True, (-1,)))
self.assertFalse(a.any(True, (1,)))
def testAllFalse(self):
a = BitStream('0b0010011101')
self.assertTrue(a.all(False, (0, 1, 3, 4)))
self.assertFalse(a.all(False, (0, 1, 2, 3, 4)))
def testAnyFalse(self):
a = BitStream('0b01001110110111111111111111111')
self.assertTrue(a.any(False, (4, 5, 6, 2)))
self.assertFalse(a.any(False, (1, 15, 20)))
def testAnyEmptyBitstring(self):
a = ConstBitStream()
self.assertFalse(a.any(True))
self.assertFalse(a.any(False))
def testAllEmptyBitStream(self):
a = ConstBitStream()
self.assertTrue(a.all(True))
self.assertTrue(a.all(False))
def testAnyWholeBitstring(self):
a = ConstBitStream('0xfff')
self.assertTrue(a.any(True))
self.assertFalse(a.any(False))
def testAllWholeBitstring(self):
a = ConstBitStream('0xfff')
self.assertTrue(a.all(True))
self.assertFalse(a.all(False))
###################
def testFloatInitialisation(self):
for f in (0.0000001, -1.0, 1.0, 0.2, -3.1415265, 1.331e32):
a = BitStream(float=f, length=64)
a.pos = 6
self.assertEqual(a.float, f)
a = BitStream('float:64=%s' % str(f))
a.pos = 6
self.assertEqual(a.float, f)
a = BitStream('floatbe:64=%s' % str(f))
a.pos = 6
self.assertEqual(a.floatbe, f)
a = BitStream('floatle:64=%s' % str(f))
a.pos = 6
self.assertEqual(a.floatle, f)
a = BitStream('floatne:64=%s' % str(f))
a.pos = 6
self.assertEqual(a.floatne, f)
b = BitStream(float=f, length=32)
b.pos = 6
self.assertAlmostEqual(b.float / f, 1.0)
b = BitStream('float:32=%s' % str(f))
b.pos = 6
self.assertAlmostEqual(b.float / f, 1.0)
b = BitStream('floatbe:32=%s' % str(f))
b.pos = 6
self.assertAlmostEqual(b.floatbe / f, 1.0)
b = BitStream('floatle:32=%s' % str(f))
b.pos = 6
self.assertAlmostEqual(b.floatle / f, 1.0)
b = BitStream('floatne:32=%s' % str(f))
b.pos = 6
self.assertAlmostEqual(b.floatne / f, 1.0)
a = BitStream('0x12345678')
a.pos = 6
a.float = 23
self.assertEqual(a.float, 23.0)
def testFloatInitStrings(self):
for s in ('5', '+0.0001', '-1e101', '4.', '.2', '-.65', '43.21E+32'):
a = BitStream('float:64=%s' % s)
self.assertEqual(a.float, float(s))
def testFloatPacking(self):
a = pack('>d', 0.01)
self.assertEqual(a.float, 0.01)
self.assertEqual(a.floatbe, 0.01)
self.assertEqual(a[::-8].floatle, 0.01)
b = pack('>f', 1e10)
self.assertAlmostEqual(b.float / 1e10, 1.0)
c = pack('<f', 10.3)
self.assertAlmostEqual(c.floatle / 10.3, 1.0)
d = pack('>5d', 10.0, 5.0, 2.5, 1.25, 0.1)
self.assertEqual(d.unpack('>5d'), [10.0, 5.0, 2.5, 1.25, 0.1])
def testFloatReading(self):
a = BitStream('floatle:64=12, floatbe:64=-0.01, floatne:64=3e33')
x, y, z = a.readlist('floatle:64, floatbe:64, floatne:64')
self.assertEqual(x, 12.0)
self.assertEqual(y, -0.01)
self.assertEqual(z, 3e33)
a = BitStream('floatle:32=12, floatbe:32=-0.01, floatne:32=3e33')
x, y, z = a.readlist('floatle:32, floatbe:32, floatne:32')
self.assertAlmostEqual(x / 12.0, 1.0)
self.assertAlmostEqual(y / -0.01, 1.0)
self.assertAlmostEqual(z / 3e33, 1.0)
a = BitStream('0b11, floatle:64=12, 0xfffff')
a.pos = 2
self.assertEqual(a.read('floatle:64'), 12.0)
def testFloatErrors(self):
a = BitStream('0x3')
self.assertRaises(bitstring.InterpretError, a._getfloat)
self.assertRaises(bitstring.CreationError, a._setfloat, -0.2)
for l in (8, 10, 12, 16, 30, 128, 200):
self.assertRaises(ValueError, BitStream, float=1.0, length=l)
def testReadErrorChangesPos(self):
a = BitStream('0x123123')
try:
a.read('10, 5')
except ValueError:
pass
self.assertEqual(a.pos, 0)
def testRor(self):
a = BitStream('0b11001')
a.ror(0)
self.assertEqual(a, '0b11001')
a.ror(1)
self.assertEqual(a, '0b11100')
a.ror(5)
self.assertEqual(a, '0b11100')
a.ror(101)
self.assertEqual(a, '0b01110')
a = BitStream('0b1')
a.ror(1000000)
self.assertEqual(a, '0b1')
def testRorErrors(self):
a = BitStream()
self.assertRaises(bitstring.Error, a.ror, 0)
a += '0b001'
self.assertRaises(ValueError, a.ror, -1)
def testRol(self):
a = BitStream('0b11001')
a.rol(0)
self.assertEqual(a, '0b11001')
a.rol(1)
self.assertEqual(a, '0b10011')
a.rol(5)
self.assertEqual(a, '0b10011')
a.rol(101)
self.assertEqual(a, '0b00111')
a = BitStream('0b1')
a.rol(1000000)
self.assertEqual(a, '0b1')
def testRolFromFile(self):
a = BitStream(filename='test.m1v')
l = a.len
a.rol(1)
self.assertTrue(a.startswith('0x000003'))
self.assertEqual(a.len, l)
self.assertTrue(a.endswith('0x0036e'))
def testRorFromFile(self):
a = BitStream(filename='test.m1v')
l = a.len
a.ror(1)
self.assertTrue(a.startswith('0x800000'))
self.assertEqual(a.len, l)
self.assertTrue(a.endswith('0x000db'))
def testRolErrors(self):
a = BitStream()
self.assertRaises(bitstring.Error, a.rol, 0)
a += '0b001'
self.assertRaises(ValueError, a.rol, -1)
def testBytesToken(self):
a = BitStream('0x010203')
b = a.read('bytes:1')
self.assertTrue(isinstance(b, bytes))
self.assertEqual(b, b'\x01')
x, y, z = a.unpack('4, bytes:2, uint')
self.assertEqual(x, 0)
self.assertEqual(y, b'\x10\x20')
self.assertEqual(z, 3)
s = pack('bytes:4', b'abcd')
self.assertEqual(s.bytes, b'abcd')
def testBytesTokenMoreThoroughly(self):
a = BitStream('0x0123456789abcdef')
a.pos += 16
self.assertEqual(a.read('bytes:1'), b'\x45')
self.assertEqual(a.read('bytes:3'), b'\x67\x89\xab')
x, y, z = a.unpack('bits:28, bytes, bits:12')
self.assertEqual(y, b'\x78\x9a\xbc')
def testDedicatedReadFunctions(self):
a = BitStream('0b11, uint:43=98798798172, 0b11111')
x = a._readuint(43, 2)
self.assertEqual(x, 98798798172)
self.assertEqual(a.pos, 0)
x = a._readint(43, 2)
self.assertEqual(x, 98798798172)
self.assertEqual(a.pos, 0)
a = BitStream('0b11, uintbe:48=98798798172, 0b11111')
x = a._readuintbe(48, 2)
self.assertEqual(x, 98798798172)
self.assertEqual(a.pos, 0)
x = a._readintbe(48, 2)
self.assertEqual(x, 98798798172)
self.assertEqual(a.pos, 0)
a = BitStream('0b111, uintle:40=123516, 0b111')
self.assertEqual(a._readuintle(40, 3), 123516)
b = BitStream('0xff, uintle:800=999, 0xffff')
self.assertEqual(b._readuintle(800, 8), 999)
a = BitStream('0b111, intle:48=999999999, 0b111111111111')
self.assertEqual(a._readintle(48, 3), 999999999)
b = BitStream('0xff, intle:200=918019283740918263512351235, 0xfffffff')
self.assertEqual(b._readintle(200, 8), 918019283740918263512351235)
a = BitStream('0b111, floatbe:64=-5.32, 0xffffffff')
self.assertEqual(a._readfloat(64, 3), -5.32)
a = BitStream('0b111, floatle:64=9.9998, 0b111')
self.assertEqual(a._readfloatle(64, 3), 9.9998)
def testAutoInitWithInt(self):
a = BitStream(0)
self.assertFalse(a)
a = BitStream(1)
self.assertEqual(a, '0b0')
a = BitStream(1007)
self.assertEqual(a, BitStream(length=1007))
self.assertRaises(bitstring.CreationError, BitStream, -1)
a = 6 + ConstBitStream('0b1') + 3
self.assertEqual(a, '0b0000001000')
a += 1
self.assertEqual(a, '0b00000010000')
self.assertEqual(ConstBitStream(13), 13)
def testReadingProblems(self):
a = BitStream('0x000001')
b = a.read('uint:24')
self.assertEqual(b, 1)
a.pos = 0
self.assertRaises(bitstring.ReadError, a.read, 'bytes:4')
def testAddVersesInPlaceAdd(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 += '0xdef'
self.assertEqual(a1, '0xabcdef')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 + '0x0'
a2 += '0xdef'
self.assertEqual(a2, '0xabcdef')
self.assertEqual(b2, '0xabcdef')
self.assertEqual(c2, '0xabc0')
def testAndVersesInPlaceAnd(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 &= '0xf0f'
self.assertEqual(a1, '0xa0c')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 & '0x00f'
a2 &= '0xf0f'
self.assertEqual(a2, '0xa0c')
self.assertEqual(b2, '0xa0c')
self.assertEqual(c2, '0x00c')
def testOrVersesInPlaceOr(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 |= '0xf0f'
self.assertEqual(a1, '0xfbf')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 | '0x00f'
a2 |= '0xf0f'
self.assertEqual(a2, '0xfbf')
self.assertEqual(b2, '0xfbf')
self.assertEqual(c2, '0xabf')
def testXorVersesInPlaceXor(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 ^= '0xf0f'
self.assertEqual(a1, '0x5b3')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 ^ '0x00f'
a2 ^= '0xf0f'
self.assertEqual(a2, '0x5b3')
self.assertEqual(b2, '0x5b3')
self.assertEqual(c2, '0xab3')
def testMulVersesInPlaceMul(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 *= 3
self.assertEqual(a1, '0xabcabcabc')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 * 2
a2 *= 3
self.assertEqual(a2, '0xabcabcabc')
self.assertEqual(b2, '0xabcabcabc')
self.assertEqual(c2, '0xabcabc')
def testLshiftVersesInPlaceLshift(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 <<= 4
self.assertEqual(a1, '0xbc0')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 << 8
a2 <<= 4
self.assertEqual(a2, '0xbc0')
self.assertEqual(b2, '0xbc0')
self.assertEqual(c2, '0xc00')
def testRshiftVersesInPlaceRshift(self):
a1 = ConstBitStream('0xabc')
b1 = a1
a1 >>= 4
self.assertEqual(a1, '0x0ab')
self.assertEqual(b1, '0xabc')
a2 = BitStream('0xabc')
b2 = a2
c2 = a2 >> 8
a2 >>= 4
self.assertEqual(a2, '0x0ab')
self.assertEqual(b2, '0x0ab')
self.assertEqual(c2, '0x00a')
def testAutoFromBool(self):
a = ConstBitStream() + True + False + True
self.assertEqual(a, '0b00')
# self.assertEqual(a, '0b101')
# b = ConstBitStream(False)
# self.assertEqual(b, '0b0')
# c = ConstBitStream(True)
# self.assertEqual(c, '0b1')
# self.assertEqual(b, False)
# self.assertEqual(c, True)
# self.assertEqual(b & True, False)
class Bugs(unittest.TestCase):
def testBugInReplace(self):
s = BitStream('0x00112233')
l = list(s.split('0x22', start=8, bytealigned=True))
self.assertEqual(l, ['0x11', '0x2233'])
s = BitStream('0x00112233')
s.replace('0x22', '0xffff', start=8, bytealigned=True)
self.assertEqual(s, '0x0011ffff33')
s = BitStream('0x0123412341234')
s.replace('0x23', '0xf', start=9, bytealigned=True)
self.assertEqual(s, '0x012341f41f4')
def testTruncateStartBug(self):
a = BitStream('0b000000111')[2:]
a._truncatestart(6)
self.assertEqual(a, '0b1')
def testNullBits(self):
s = ConstBitStream(bin='')
t = ConstBitStream(oct='')
u = ConstBitStream(hex='')
v = ConstBitStream(bytes=b'')
self.assertFalse(s)
self.assertFalse(t)
self.assertFalse(u)
self.assertFalse(v)
def testMultiplicativeFactorsCreation(self):
s = BitStream('1*0b1')
self.assertEqual(s, '0b1')
s = BitStream('4*0xc')
self.assertEqual(s, '0xcccc')
s = BitStream('0b1, 0*0b0')
self.assertEqual(s, '0b1')
s = BitStream('0b1, 3*uint:8=34, 2*0o755')
self.assertEqual(s, '0b1, uint:8=34, uint:8=34, uint:8=34, 0o755755')
s = BitStream('0*0b1001010')
self.assertFalse(s)
def testMultiplicativeFactorsReading(self):
s = BitStream('0xc') * 5
a, b, c, d, e = s.readlist('5*4')
self.assertTrue(a == b == c == d == e == 12)
s = ConstBitStream('2*0b101, 4*uint:7=3')
a, b, c, d, e = s.readlist('2*bin:3, 3*uint:7')
self.assertTrue(a == b == '101')
self.assertTrue(c == d == e == 3)
def testMultiplicativeFactorsPacking(self):
s = pack('3*bin', '1', '001', '101')
self.assertEqual(s, '0b1001101')
s = pack('hex, 2*se=-56, 3*uint:37', '34', 1, 2, 3)
a, b, c, d, e, f = s.unpack('hex:8, 2*se, 3*uint:37')
self.assertEqual(a, '34')
self.assertEqual(b, -56)
self.assertEqual(c, -56)
self.assertEqual((d, e, f), (1, 2, 3))
# This isn't allowed yet. See comment in tokenparser.
#s = pack('fluffy*uint:8', *range(3), fluffy=3)
#a, b, c = s.readlist('2*uint:8, 1*uint:8, 0*uint:8')
#self.assertEqual((a, b, c), (0, 1, 2))
def testMultiplicativeFactorsUnpacking(self):
s = ConstBitStream('0b10111')
a, b, c, d = s.unpack('3*bool, bin')
self.assertEqual((a, b, c), (True, False, True))
self.assertEqual(d, '11')
def testPackingDefaultIntWithKeyword(self):
s = pack('12', 100)
self.assertEqual(s.unpack('12')[0], 100)
s = pack('oh_no_not_the_eyes=33', oh_no_not_the_eyes=17)
self.assertEqual(s.uint, 33)
self.assertEqual(s.len, 17)
def testInitFromIterable(self):
self.assertTrue(isinstance(range(10), collections.Iterable))
s = ConstBitStream(range(12))
self.assertEqual(s, '0x7ff')
def testFunctionNegativeIndices(self):
# insert
s = BitStream('0b0111')
s.insert('0b0', -1)
self.assertEqual(s, '0b01101')
self.assertRaises(ValueError, s.insert, '0b0', -1000)
# reverse
s.reverse(-2)
self.assertEqual(s, '0b01110')
t = BitStream('0x778899abcdef')
t.reverse(-12, -4)
self.assertEqual(t, '0x778899abc7bf')
# reversebytes
t.byteswap(0, -40, -16)
self.assertEqual(t, '0x77ab9988c7bf')
# overwrite
t.overwrite('0x666', -20)
self.assertEqual(t, '0x77ab998666bf')
# find
found = t.find('0x998', bytealigned=True, start=-31)
self.assertFalse(found)
found = t.find('0x998', bytealigned=True, start=-32)
self.assertTrue(found)
self.assertEqual(t.pos, 16)
t.pos = 0
found = t.find('0x988', bytealigned=True, end=-21)
self.assertFalse(found)
found = t.find('0x998', bytealigned=True, end=-20)
self.assertTrue(found)
self.assertEqual(t.pos, 16)
#findall
s = BitStream('0x1234151f')
l = list(s.findall('0x1', bytealigned=True, start=-15))
self.assertEqual(l, [24])
l = list(s.findall('0x1', bytealigned=True, start=-16))
self.assertEqual(l, [16, 24])
l = list(s.findall('0x1', bytealigned=True, end=-5))
self.assertEqual(l, [0, 16])
l = list(s.findall('0x1', bytealigned=True, end=-4))
self.assertEqual(l, [0, 16, 24])
# rfind
found = s.rfind('0x1f', end=-1)
self.assertFalse(found)
found = s.rfind('0x12', start=-31)
self.assertFalse(found)
# cut
s = BitStream('0x12345')
l = list(s.cut(4, start=-12, end=-4))
self.assertEqual(l, ['0x3', '0x4'])
# split
s = BitStream('0xfe0012fe1200fe')
l = list(s.split('0xfe', bytealigned=True, end=-1))
self.assertEqual(l, ['', '0xfe0012', '0xfe1200f, 0b111'])
l = list(s.split('0xfe', bytealigned=True, start=-8))
self.assertEqual(l, ['', '0xfe'])
# startswith
self.assertTrue(s.startswith('0x00f', start=-16))
self.assertTrue(s.startswith('0xfe00', end=-40))
self.assertFalse(s.startswith('0xfe00', end=-41))
# endswith
self.assertTrue(s.endswith('0x00fe', start=-16))
self.assertFalse(s.endswith('0x00fe', start=-15))
self.assertFalse(s.endswith('0x00fe', end=-1))
self.assertTrue(s.endswith('0x00f', end=-4))
# replace
s.replace('0xfe', '', end=-1)
self.assertEqual(s, '0x00121200fe')
s.replace('0x00', '', start=-24)
self.assertEqual(s, '0x001212fe')
def testRotateStartAndEnd(self):
a = BitStream('0b110100001')
a.rol(1, 3, 6)
self.assertEqual(a, '0b110001001')
a.ror(1, start=-4)
self.assertEqual(a, '0b110001100')
a.rol(202, end=-5)
self.assertEqual(a, '0b001101100')
a.ror(3, end=4)
self.assertEqual(a, '0b011001100')
self.assertRaises(ValueError, a.rol, 5, start=-4, end=-6)
def testByteSwapInt(self):
s = pack('5*uintle:16', *range(10, 15))
self.assertEqual(list(range(10, 15)), s.unpack('5*uintle:16'))
swaps = s.byteswap(2)
self.assertEqual(list(range(10, 15)), s.unpack('5*uintbe:16'))
self.assertEqual(swaps, 5)
s = BitStream('0xf234567f')
swaps = s.byteswap(1, start=4)
self.assertEqual(swaps, 3)
self.assertEqual(s, '0xf234567f')
s.byteswap(2, start=4)
self.assertEqual(s, '0xf452367f')
s.byteswap(2, start=4, end=-4)
self.assertEqual(s, '0xf234567f')
s.byteswap(3)
self.assertEqual(s, '0x5634f27f')
s.byteswap(2, repeat=False)
self.assertEqual(s, '0x3456f27f')
swaps = s.byteswap(5)
self.assertEqual(swaps, 0)
swaps = s.byteswap(4, repeat=False)
self.assertEqual(swaps, 1)
self.assertEqual(s, '0x7ff25634')
def testByteSwapPackCode(self):
s = BitStream('0x0011223344556677')
swaps = s.byteswap('b')
self.assertEqual(s, '0x0011223344556677')
self.assertEqual(swaps, 8)
swaps = s.byteswap('>3h', repeat=False)
self.assertEqual(s, '0x1100332255446677')
self.assertEqual(swaps, 1)
def testByteSwapIterable(self):
s = BitStream('0x0011223344556677')
swaps = s.byteswap(range(1, 4), repeat=False)
self.assertEqual(swaps, 1)
self.assertEqual(s, '0x0022115544336677')
swaps = s.byteswap([2], start=8)
self.assertEqual(s, '0x0011224455663377')
self.assertEqual(3, swaps)
swaps = s.byteswap([2, 3], start=4)
self.assertEqual(swaps, 1)
self.assertEqual(s, '0x0120156452463377')
def testByteSwapErrors(self):
s = BitStream('0x0011223344556677')
self.assertRaises(ValueError, s.byteswap, 'z')
self.assertRaises(ValueError, s.byteswap, -1)
self.assertRaises(ValueError, s.byteswap, [-1])
self.assertRaises(ValueError, s.byteswap, [1, 'e'])
self.assertRaises(ValueError, s.byteswap, '!h')
self.assertRaises(ValueError, s.byteswap, 2, start=-1000)
def testByteSwapFromFile(self):
s = BitStream(filename='smalltestfile')
swaps = s.byteswap('2bh')
self.assertEqual(s, '0x0123674589abefcd')
self.assertEqual(swaps, 2)
def testBracketExpander(self):
be = bitstring.bits.expand_brackets
self.assertEqual(be('hello'), 'hello')
self.assertEqual(be('(hello)'), 'hello')
self.assertEqual(be('1*(hello)'), 'hello')
self.assertEqual(be('2*(hello)'), 'hello,hello')
self.assertEqual(be('1*(a, b)'), 'a,b')
self.assertEqual(be('2*(a, b)'), 'a,b,a,b')
self.assertEqual(be('2*(a), 3*(b)'), 'a,a,b,b,b')
self.assertEqual(be('2*(a, b, 3*(c, d), e)'), 'a,b,c,d,c,d,c,d,e,a,b,c,d,c,d,c,d,e')
def testBracketTokens(self):
s = BitStream('3*(0x0, 0b1)')
self.assertEqual(s, '0x0, 0b1, 0x0, 0b1, 0x0, 0b1')
s = pack('2*(uint:12, 3*(7, 6))', *range(3, 17))
a = s.unpack('12, 7, 6, 7, 6, 7, 6, 12, 7, 6, 7, 6, 7, 6')
self.assertEqual(a, list(range(3, 17)))
b = s.unpack('2*(12,3*(7,6))')
self.assertEqual(a, b)
def testPackCodeDicts(self):
self.assertEqual(sorted(bitstring.bits.REPLACEMENTS_BE.keys()),
sorted(bitstring.bits.REPLACEMENTS_LE.keys()))
self.assertEqual(sorted(bitstring.bits.REPLACEMENTS_BE.keys()),
sorted(bitstring.bits.PACK_CODE_SIZE.keys()))
for key in bitstring.bits.PACK_CODE_SIZE:
be = pack(bitstring.bits.REPLACEMENTS_BE[key], 0)
le = pack(bitstring.bits.REPLACEMENTS_LE[key], 0)
self.assertEqual(be.len, bitstring.bits.PACK_CODE_SIZE[key] * 8)
self.assertEqual(le.len, be.len)
# These tests don't compile for Python 3, so they're commented out to save me stress.
#def testUnicode(self):
#a = ConstBitStream(u'uint:12=34')
#self.assertEqual(a.uint, 34)
#a += u'0xfe'
#self.assertEqual(a[12:], '0xfe')
#a = BitStream('0x1122')
#c = a.byteswap(u'h')
#self.assertEqual(c, 1)
#self.assertEqual(a, u'0x2211')
#def testLongInt(self):
#a = BitStream(4L)
#self.assertEqual(a, '0b0000')
#a[1:3] = -1L
#self.assertEqual(a, '0b0110')
#a[0] = 1L
#self.assertEqual(a, '0b1110')
#a *= 4L
#self.assertEqual(a, '0xeeee')
#c = a.byteswap(2L)
#self.assertEqual(c, 1)
#a = BitStream('0x11223344')
#a.byteswap([1, 2L])
#self.assertEqual(a, '0x11332244')
#b = a*2L
#self.assertEqual(b, '0x1133224411332244')
#s = pack('uint:12', 46L)
#self.assertEqual(s.uint, 46)
class UnpackWithDict(unittest.TestCase):
def testLengthKeywords(self):
a = ConstBitStream('2*13=100, 0b111')
x, y, z = a.unpack('n, uint:m, bin:q', n=13, m=13, q=3)
self.assertEqual(x, 100)
self.assertEqual(y, 100)
self.assertEqual(z, '111')
def testLengthKeywordsWithStretch(self):
a = ConstBitStream('0xff, 0b000, 0xf')
x, y, z = a.unpack('hex:a, bin, hex:b', a=8, b=4)
self.assertEqual(y, '000')
def testUnusedKeyword(self):
a = ConstBitStream('0b110')
x, = a.unpack('bin:3', notused=33)
self.assertEqual(x, '110')
def testLengthKeywordErrors(self):
a = pack('uint:p=33', p=12)
self.assertRaises(ValueError, a.unpack, 'uint:p')
self.assertRaises(ValueError, a.unpack, 'uint:p', p='a_string')
class ReadWithDict(unittest.TestCase):
def testLengthKeywords(self):
s = BitStream('0x0102')
x, y = s.readlist('a, hex:b', a=8, b=4)
self.assertEqual((x, y), (1, '0'))
self.assertEqual(s.pos, 12)
class PeekWithDict(unittest.TestCase):
def testLengthKeywords(self):
s = BitStream('0x0102')
x, y = s.peeklist('a, hex:b', a=8, b=4)
self.assertEqual((x, y), (1, '0'))
self.assertEqual(s.pos, 0)
##class Miscellany(unittest.TestCase):
##
## def testNumpyInt(self):
## try:
## import numpy
## a = ConstBitStream(uint=numpy.uint8(5), length=3)
## self.assertEqual(a.uint, 5)
## except ImportError:
## # Not to worry
## pass
class BoolToken(unittest.TestCase):
def testInterpretation(self):
a = ConstBitStream('0b1')
self.assertEqual(a.bool, True)
self.assertEqual(a.read('bool'), True)
self.assertEqual(a.unpack('bool')[0], True)
b = ConstBitStream('0b0')
self.assertEqual(b.bool, False)
self.assertEqual(b.peek('bool'), False)
self.assertEqual(b.unpack('bool')[0], False)
def testPack(self):
a = pack('bool=True')
b = pack('bool=False')
self.assertEqual(a.bool, True)
self.assertEqual(b.bool, False)
c = pack('4*bool', False, True, 'False', 'True')
self.assertEqual(c, '0b0101')
def testAssignment(self):
a = BitStream()
a.bool = True
self.assertEqual(a.bool, True)
a.hex = 'ee'
a.bool = False
self.assertEqual(a.bool, False)
a.bool = 'False'
self.assertEqual(a.bool, False)
a.bool = 'True'
self.assertEqual(a.bool, True)
a.bool = 0
self.assertEqual(a.bool, False)
a.bool = 1
self.assertEqual(a.bool, True)
def testErrors(self):
self.assertRaises(bitstring.CreationError, pack, 'bool', 'hello')
self.assertRaises(bitstring.CreationError, pack, 'bool=true')
self.assertRaises(bitstring.CreationError, pack, 'True')
self.assertRaises(bitstring.CreationError, pack, 'bool', 2)
a = BitStream('0b11')
self.assertRaises(bitstring.InterpretError, a._getbool)
b = BitStream()
self.assertRaises(bitstring.InterpretError, a._getbool)
self.assertRaises(bitstring.CreationError, a._setbool, 'false')
def testLengthWithBoolRead(self):
a = ConstBitStream('0xf')
self.assertRaises(ValueError, a.read, 'bool:0')
self.assertRaises(ValueError, a.read, 'bool:1')
self.assertRaises(ValueError, a.read, 'bool:2')
class ReadWithIntegers(unittest.TestCase):
def testReadInt(self):
a = ConstBitStream('0xffeedd')
b = a.read(8)
self.assertEqual(b.hex, 'ff')
self.assertEqual(a.pos, 8)
b = a.peek(8)
self.assertEqual(b.hex, 'ee')
self.assertEqual(a.pos, 8)
b = a.peek(1)
self.assertEqual(b, '0b1')
b = a.read(1)
self.assertEqual(b, '0b1')
def testReadIntList(self):
a = ConstBitStream('0xab, 0b110')
b, c = a.readlist([8, 3])
self.assertEqual(b.hex, 'ab')
self.assertEqual(c.bin, '110')
class FileReadingStrategy(unittest.TestCase):
def testBitStreamIsAlwaysRead(self):
a = BitStream(filename='smalltestfile')
self.assertTrue(isinstance(a._datastore, bitstring.bitstream.ByteStore))
f = open('smalltestfile', 'rb')
b = BitStream(f)
self.assertTrue(isinstance(b._datastore, bitstring.bitstream.ByteStore))
def testBitsIsNeverRead(self):
a = ConstBitStream(filename='smalltestfile')
self.assertTrue(isinstance(a._datastore._rawarray, bitstring.bits.MmapByteArray))
f = open('smalltestfile', 'rb')
b = ConstBitStream(f)
self.assertTrue(isinstance(b._datastore._rawarray, bitstring.bits.MmapByteArray))
class Count(unittest.TestCase):
def testCount(self):
a = ConstBitStream('0xf0f')
self.assertEqual(a.count(True), 8)
self.assertEqual(a.count(False), 4)
b = BitStream()
self.assertEqual(b.count(True), 0)
self.assertEqual(b.count(False), 0)
def testCountWithOffsetData(self):
a = ConstBitStream('0xff0120ff')
b = a[1:-1]
self.assertEqual(b.count(1), 16)
self.assertEqual(b.count(0), 14)
class ZeroBitReads(unittest.TestCase):
def testInteger(self):
a = ConstBitStream('0x123456')
self.assertRaises(bitstring.InterpretError, a.read, 'uint:0')
self.assertRaises(bitstring.InterpretError, a.read, 'float:0')
#class EfficientBitsCopies(unittest.TestCase):
#
# def testBitsCopy(self):
# a = ConstBitStream('0xff')
# b = ConstBitStream(a)
# c = a[:]
# d = copy.copy(a)
# self.assertTrue(a._datastore is b._datastore)
# self.assertTrue(a._datastore is c._datastore)
# self.assertTrue(a._datastore is d._datastore)
class InitialiseFromBytes(unittest.TestCase):
def testBytesBehaviour(self):
a = ConstBitStream(b'uint:5=2')
b = ConstBitStream(b'')
c = ConstBitStream(bytes=b'uint:5=2')
if b'' == '':
# Python 2
self.assertEqual(a, 'uint:5=2')
self.assertFalse(b)
self.assertEqual(c.bytes, b'uint:5=2')
else:
self.assertEqual(a.bytes, b'uint:5=2')
self.assertFalse(b)
self.assertEqual(c, b'uint:5=2')
def testBytearrayBehaviour(self):
a = ConstBitStream(bytearray(b'uint:5=2'))
b = ConstBitStream(bytearray(4))
c = ConstBitStream(bytes=bytearray(b'uint:5=2'))
self.assertEqual(a.bytes, b'uint:5=2')
self.assertEqual(b, '0x00000000')
self.assertEqual(c.bytes, b'uint:5=2')
class CoverageCompletionTests(unittest.TestCase):
def testUeReadError(self):
s = ConstBitStream('0b000000001')
self.assertRaises(bitstring.ReadError, s.read, 'ue')
def testOverwriteWithSelf(self):
s = BitStream('0b1101')
s.overwrite(s)
self.assertEqual(s, '0b1101')
| [
"dwindsor@networksecurityservicesllc.com"
] | dwindsor@networksecurityservicesllc.com |
3df5d2864b2526594da536cb97351fd21b615766 | f8ee3224c99732805a09bd2cb4af787c47d6bc5f | /src/map/Menu.py | 46c1d1c2cc713c69b77073a5d18d639b1ca0b9fc | [] | no_license | q3226257/FreeFantasy_v2 | b57b2e5b4431f8d730ef7d4e6dafb1bf535be138 | d1b5ba206b03a4b57a28a539618772c19f91169e | refs/heads/master | 2020-04-13T15:50:52.044653 | 2019-01-01T15:36:39 | 2019-01-01T15:36:39 | 163,303,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 893 | py | import pygame
import thorpy
from info.App import AppInfo
from main.Constant import *
from info.Items import *
import main.Main
from interface.EventDeal import Update
a = [Big(), Big(), Big()]
class Menu(Update):
def __init__(self, screen: pygame.Surface):
self.screen = screen
rect = pygame.Rect(screen.get_width() / 2, 0, screen.get_width() / 2, screen.get_height())
menu_screen = self.screen.subsurface(rect)
self.table_list: thorpy.TableList = thorpy.TableList(
menu_screen, column_num=5, row_num=10)
def update(self, clock, *params):
# while AppInfo.current_stat == STAT.MENU_STAT:
fps = clock.tick(60)
self.table_list.update(fps, *a)
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
main.Main.change_stat(STAT.MAP_STAT)
| [
"616784622@qq.com"
] | 616784622@qq.com |
7acf6cb2e275ff6ff0ce9cbc15c9a5046904a862 | c8dee6f9c6885a8005765726673062f00cb18565 | /main.py | bb748e27c54729621348c5365dd91804f2f2b9b0 | [] | no_license | bhavanasrini/NFLFinal | 6e41aab7c667cb073b4964d7ea906baa8076393d | 5a877b336834b7ac341da3d8806481e9a0e6c0ab | refs/heads/master | 2021-08-24T13:29:13.029930 | 2017-12-10T01:07:05 | 2017-12-10T01:07:05 | 113,593,079 | 0 | 1 | null | 2017-12-08T18:52:06 | 2017-12-08T16:12:06 | Python | UTF-8 | Python | false | false | 31,166 | py |
def teamname1():
return "GB"
def teamname2():
return "DET"
def list(teamname):
# CHIEFS
if teamname == "KC":
return ["http://www.espn.com/nfl/matchup?gameId=400951566", "http://www.espn.com/nfl/matchup?gameId=400951636",
"http://www.espn.com/nfl/matchup?gameId=400951634", "http://www.espn.com/nfl/matchup?gameId=400951752",
"http://www.espn.com/nfl/matchup?gameId=400951664", "http://www.espn.com/nfl/matchup?gameId=400951776",
"http://www.espn.com/nfl/matchup?gameId=400951571", "http://www.espn.com/nfl/matchup?gameId=400951737",
"http://www.espn.com/nfl/matchup?gameId=400951786", "http://www.espn.com/nfl/matchup?gameId=400951595"]
# LIONS
if teamname == "DET":
return ["http://www.espn.com/nfl/matchup?gameId=400951576", "http://www.espn.com/nfl/matchup?gameId=400951681",
"http://www.espn.com/nfl/matchup?gameId=400951594", "http://www.espn.com/nfl/matchup?gameId=400951724",
"http://www.espn.com/nfl/matchup?gameId=400951558", "http://www.espn.com/nfl/matchup?gameId=400951704",
"http://www.espn.com/nfl/matchup?gameId=400951736", "http://www.espn.com/nfl/matchup?gameId=400951790",
"http://www.espn.com/nfl/matchup?gameId=400951563", "http://www.espn.com/nfl/matchup?gameId=400951701",
"http://www.espn.com/nfl/matchup?gameId=400951569"]
# STEELERS
if teamname == "PIT":
return ["http://www.espn.com/nfl/matchup?gameId=400951574", "http://www.espn.com/nfl/matchup?gameId=400951643",
"http://www.espn.com/nfl/matchup?gameId=400951708", "http://www.espn.com/nfl/matchup?gameId=400951655",
"http://www.espn.com/nfl/matchup?gameId=400951776", "http://www.espn.com/nfl/matchup?gameId=400951609",
"http://www.espn.com/nfl/matchup?gameId=400951736", "http://www.espn.com/nfl/matchup?gameId=400951565",
"http://www.espn.com/nfl/matchup?gameId=400951698", "http://www.espn.com/nfl/matchup?gameId=400951633"]
# RAIDERS
if teamname == "OAK":
return ["http://www.espn.com/nfl/matchup?gameId=400951584", "http://www.espn.com/nfl/matchup?gameId=400951669",
"http://www.espn.com/nfl/matchup?gameId=400951644", "http://www.espn.com/nfl/matchup?gameId=400951744",
"http://www.espn.com/nfl/matchup?gameId=400951659", "http://www.espn.com/nfl/matchup?gameId=400951779",
"http://www.espn.com/nfl/matchup?gameId=400951571", "http://www.espn.com/nfl/matchup?gameId=400951706",
"http://www.espn.com/nfl/matchup?gameId=400951787", "http://www.espn.com/nfl/matchup?gameId=400951815",
"http://www.espn.com/nfl/matchup?gameId=400951629"]
# BRONCOS
if teamname == "DEN":
return ["http://www.espn.com/nfl/matchup?gameId=400951615","http://www.espn.com/nfl/matchup?gameId=400951673",
"http://www.espn.com/nfl/matchup?gameId=400951583", "http://www.espn.com/nfl/matchup?gameId=400951744",
"http://www.espn.com/nfl/matchup?gameId=400951782", "http://www.espn.com/nfl/matchup?gameId=400951624",
"http://www.espn.com/nfl/matchup?gameId=400951737", "http://www.espn.com/nfl/matchup?gameId=400951760",
"http://www.espn.com/nfl/matchup?gameId=400951690", "http://www.espn.com/nfl/matchup?gameId=400951810",
"http://www.espn.com/nfl/matchup?gameId=400951629"]
# EAGLES
if teamname == "PHI":
return ["http://www.espn.com/nfl/matchup?gameId=400951592", "http://www.espn.com/nfl/matchup?gameId=400951636",
"http://www.espn.com/nfl/matchup?gameId=400951616", "http://www.espn.com/nfl/matchup?gameId=400951738",
"http://www.espn.com/nfl/matchup?gameId=400951695", "http://www.espn.com/nfl/matchup?gameId=400951641",
"http://www.espn.com/nfl/matchup?gameId=400951723", "http://www.espn.com/nfl/matchup?gameId=400951651",
"http://www.espn.com/nfl/matchup?gameId=400951760", "http://www.espn.com/nfl/matchup?gameId=400951817",
"http://www.espn.com/nfl/matchup?gameId=400951610"]
# BUCCANEERS
if teamname == "TB":
return["http://www.espn.com/nfl/matchup?gameId=400951552","http://www.espn.com/nfl/matchup?gameId=400951645",
"http://www.espn.com/nfl/matchup?gameId=400951604","http://www.espn.com/nfl/matchup?gameId=400951742",
"http://www.espn.com/nfl/matchup?gameId=400951770","http://www.espn.com/nfl/matchup?gameId=400951575",
"http://www.espn.com/nfl/matchup?gameId=400951725", "http://www.espn.com/nfl/matchup?gameId=400951755",
"http://www.espn.com/nfl/matchup?gameId=400951653", "http://www.espn.com/nfl/matchup?gameId=400981391",
"http://www.espn.com/nfl/matchup?gameId=400951586"]
# PANTHERS
if teamname == "CAR":
return ["http://www.espn.com/nfl/matchup?gameId=400951605","http://www.espn.com/nfl/matchup?gameId=400951630",
"http://www.espn.com/nfl/matchup?gameId=400951587","http://www.espn.com/nfl/matchup?gameId=400951727",
"http://www.espn.com/nfl/matchup?gameId=400951558","http://www.espn.com/nfl/matchup?gameId=400951695",
"http://www.espn.com/nfl/matchup?gameId=400951578", "http://www.espn.com/nfl/matchup?gameId=400951725",
"http://www.espn.com/nfl/matchup?gameId=400951749", "http://www.espn.com/nfl/matchup?gameId=400951693",
"http://www.espn.com/nfl/matchup?gameId=400951606"]
# BILLS
if teamname == "BUF":
return ["http://www.espn.com/nfl/matchup?gameId=400951567","http://www.espn.com/nfl/matchup?gameId=400951630",
"http://www.espn.com/nfl/matchup?gameId=400951583", "http://www.espn.com/nfl/matchup?gameId=400951685",
"http://www.espn.com/nfl/matchup?gameId=400951554", "http://www.espn.com/nfl/matchup?gameId=400951575",
"http://www.espn.com/nfl/matchup?gameId=400951706", "http://www.espn.com/nfl/matchup?gameId=400951743",
"http://www.espn.com/nfl/matchup?gameId=400951555", "http://www.espn.com/nfl/matchup?gameId=400951807",
"http://www.espn.com/nfl/matchup?gameId=400951595"]
# CHARGERS
if teamname == "LAC":
return ["http://www.espn.com/nfl/matchup?gameId=400951615", "http://www.espn.com/nfl/matchup?gameId=400951666",
"http://www.espn.com/nfl/matchup?gameId=400951634", "http://www.espn.com/nfl/matchup?gameId=400951738",
"http://www.espn.com/nfl/matchup?gameId=400951650", "http://www.espn.com/nfl/matchup?gameId=400951779",
"http://www.espn.com/nfl/matchup?gameId=400951624", "http://www.espn.com/nfl/matchup?gameId=400951715",
"http://www.espn.com/nfl/matchup?gameId=400951807", "http://www.espn.com/nfl/matchup?gameId=400951573"]
# JETS
if teamname == "NYJ":
return ["http://www.espn.com/nfl/matchup?gameId=400951567","http://www.espn.com/nfl/matchup?gameId=400951669",
"http://www.espn.com/nfl/matchup?gameId=400951611","http://www.espn.com/nfl/matchup?gameId=400951556",
"http://www.espn.com/nfl/matchup?gameId=400951766", "http://www.espn.com/nfl/matchup?gameId=400951598",
"http://www.espn.com/nfl/matchup?gameId=400951721", "http://www.espn.com/nfl/matchup?gameId=400951743",
"http://www.espn.com/nfl/matchup?gameId=400951653", "http://www.espn.com/nfl/matchup?gameId=400951606"]
# FALCONS
if teamname == "ATL":
return ["http://www.espn.com/nfl/matchup?gameId=400951570", "http://www.espn.com/nfl/matchup?gameId=400951679",
"http://www.espn.com/nfl/matchup?gameId=400951594", "http://www.espn.com/nfl/matchup?gameId=400951685",
"http://www.espn.com/nfl/matchup?gameId=400951697", "http://www.espn.com/nfl/matchup?gameId=400951638",
"http://www.espn.com/nfl/matchup?gameId=400951721", "http://www.espn.com/nfl/matchup?gameId=400951749",
"http://www.espn.com/nfl/matchup?gameId=400951686", "http://www.espn.com/nfl/matchup?gameId=400951818",
"http://www.espn.com/nfl/matchup?gameId=400951586"]
# SAINTS
if teamname == "NO":
return ["http://www.espn.co.uk/nfl/matchup?gameId=400951612", "http://www.espn.com/nfl/matchup?gameId=400951639",
"http://www.espn.com/nfl/matchup?gameId=400951587", "http://www.espn.com/nfl/matchup?gameId=400950241",
"http://www.espn.com/nfl/matchup?gameId=400951704", "http://www.espn.com/nfl/matchup?gameId=400951585",
"http://www.espn.com/nfl/matchup?gameId=400951717", "http://www.espn.com/nfl/matchup?gameId=400951755",
"http://www.espn.com/nfl/matchup?gameId=400951555", "http://www.espn.com/nfl/matchup?gameId=400951614"]
# VIKINGS
if teamname == "MIN":
return ["http://www.espn.co.uk/nfl/matchup?gameId=400951612", "http://www.espn.com/nfl/matchup?gameId=400951643",
"http://www.espn.com/nfl/matchup?gameId=400951604", "http://www.espn.com/nfl/matchup?gameId=400951724",
"http://www.espn.com/nfl/matchup?gameId=400951691", "http://www.espn.com/nfl/matchup?gameId=400951702",
"http://www.espn.com/nfl/matchup?gameId=400951603", "http://www.espn.com/nfl/matchup?gameId=400951683",
"http://www.espn.com/nfl/matchup?gameId=400951658", "http://www.espn.com/nfl/matchup?gameId=400951775",
"http://www.espn.com/nfl/matchup?gameId=400951569"]
# BENGALS
if teamname == "CIN":
return ["http://www.espn.com/nfl/matchup?gameId=400951572", "http://www.espn.com/nfl/matchup?gameId=400951620",
"http://www.espn.com/nfl/matchup?gameId=400951712", "http://www.espn.com/nfl/matchup?gameId=400951554",
"http://www.espn.com/nfl/matchup?gameId=400951609", "http://www.espn.com/nfl/matchup?gameId=400951711",
"http://www.espn.com/nfl/matchup?gameId=400951753", "http://www.espn.com/nfl/matchup?gameId=400951656",
"http://www.espn.com/nfl/matchup?gameId=400951810", "http://www.espn.com/nfl/matchup?gameId=400951588"]
# RAVENS
if teamname == "BAL":
return ["http://www.espn.com/nfl/matchup?gameId=400951572", "http://www.espn.com/nfl/matchup?gameId=400951626",
"http://www.espn.com/nfl/matchup?gameId=400951579", "http://www.espn.com/nfl/matchup?gameId=400951708",
"http://www.espn.com/nfl/matchup?gameId=400951659", "http://www.espn.com/nfl/matchup?gameId=400951603",
"http://www.espn.com/nfl/matchup?gameId=400951670", "http://www.espn.com/nfl/matchup?gameId=400951761",
"http://www.espn.com/nfl/matchup?gameId=400951703", "http://www.espn.com/nfl/matchup?gameId=400951640"]
# COLTS
if teamname == "IND":
return ['http://www.espn.com/nfl/matchup?gameId=400951597', "http://www.espn.com/nfl/matchup?gameId=400951599",
"http://www.espn.com/nfl/matchup?gameId=400951747", "http://www.espn.com/nfl/matchup?gameId=400951785",
"http://www.espn.com/nfl/matchup?gameId=400951589", "http://www.espn.com/nfl/matchup?gameId=400951711",
"http://www.espn.com/nfl/matchup?gameId=400951751", "http://www.espn.com/nfl/matchup?gameId=400951565",
"http://www.espn.com/nfl/matchup?gameId=400951591"]
# RAMS
if teamname == "LAR":
return ['http://www.espn.com/nfl/matchup?gameId=400951597', "http://www.espn.com/nfl/matchup?gameId=400951674",
"http://www.espn.com/nfl/matchup?gameId=400951568", "http://www.espn.com/nfl/matchup?gameId=400951716",
"http://www.espn.com/nfl/matchup?gameId=400951657", "http://www.espn.com/nfl/matchup?gameId=400951773",
"http://www.espn.com/nfl/matchup?gameId=400951593", "http://www.espn.com/nfl/matchup?gameId=400951758",
"http://www.espn.com/nfl/matchup?gameId=400951663", "http://www.espn.com/nfl/matchup?gameId=400951775",
"http://www.espn.com/nfl/matchup?gameId=400951614"]
# REDSKINS
if teamname == "WAS":
return ["http://www.espn.com/nfl/matchup?gameId=400951592", "http://www.espn.com/nfl/matchup?gameId=400951644",
"http://www.espn.com/nfl/matchup?gameId=400951674", "http://www.espn.com/nfl/matchup?gameId=400951752",
"http://www.espn.com/nfl/matchup?gameId=400951767", "http://www.espn.com/nfl/matchup?gameId=400951641",
"http://www.espn.com/nfl/matchup?gameId=400951732", "http://www.espn.com/nfl/matchup?gameId=400951765",
"http://www.espn.com/nfl/matchup?gameId=400951658", "http://www.espn.com/nfl/matchup?gameId=400951577"]
# BROWNS
if teamname == "CLE":
return ["http://www.espn.com/nfl/matchup?gameId=400951574","http://www.espn.com/nfl/matchup?gameId=400951626",
"http://www.espn.com/nfl/matchup?gameId=400951599", "http://www.espn.com/nfl/matchup?gameId=400951712",
"http://www.espn.com/nfl/matchup?gameId=400951556", "http://www.espn.com/nfl/matchup?gameId=400951700",
"http://www.espn.com/nfl/matchup?gameId=400951683", "http://www.espn.com/nfl/matchup?gameId=400951563",
"http://www.espn.com/nfl/matchup?gameId=400951769", "http://www.espn.com/nfl/matchup?gameId=400951588"]
# PATRIOTS
if teamname == "NE":
return ["http://www.espn.com/nfl/matchup?gameId=400951566", "http://www.espn.com/nfl/matchup?gameId=400951639",
"http://www.espn.com/nfl/matchup?gameId=400951607", "http://www.espn.com/nfl/matchup?gameId=400951727",
"http://www.espn.com/nfl/matchup?gameId=400951552", "http://www.espn.com/nfl/matchup?gameId=400951766",
"http://www.espn.com/nfl/matchup?gameId=400951638", "http://www.espn.com/nfl/matchup?gameId=400951715",
"http://www.espn.com/nfl/matchup?gameId=400951690", "http://www.espn.com/nfl/matchup?gameId=400951815",
"http://www.espn.com/nfl/matchup?gameId=400951600"]
# BEARS
if teamname == "CHI":
return ["http://www.espn.com/nfl/matchup?gameId=400951570", "http://www.espn.com/nfl/matchup?gameId=400951645",
"http://www.espn.com/nfl/matchup?gameId=400951678", "http://www.espn.com/nfl/matchup?gameId=400951691",
"http://www.espn.com/nfl/matchup?gameId=400951578", "http://www.espn.com/nfl/matchup?gameId=400951717",
"http://www.espn.com/nfl/matchup?gameId=400951559", "http://www.espn.com/nfl/matchup?gameId=400951701",
"http://www.espn.com/nfl/matchup?gameId=400951610"]
# TITANS
if teamname == "TEN":
return ["http://www.espn.com/nfl/matchup?gameId=400951584", "http://www.espn.com/nfl/matchup?gameId=400951635",
"http://www.espn.com/nfl/matchup?gameId=400951623", "http://www.espn.com/nfl/matchup?gameId=400951720",
"http://www.espn.com/nfl/matchup?gameId=400951646", "http://www.espn.com/nfl/matchup?gameId=400951785",
"http://www.espn.com/nfl/matchup?gameId=400951761", "http://www.espn.com/nfl/matchup?gameId=400951656",
"http://www.espn.com/nfl/matchup?gameId=400951698", "http://www.espn.com/nfl/matchup?gameId=400951591"]
# JAGUARS
if teamname == "JAX":
return ["http://www.espn.com/nfl/matchup?gameId=400951580", "http://www.espn.com/nfl/matchup?gameId=400951635",
"http://www.espn.com/nfl/matchup?gameId=400951579", "http://www.espn.com/nfl/matchup?gameId=400951655",
"http://www.espn.com/nfl/matchup?gameId=400951773", "http://www.espn.com/nfl/matchup?gameId=400951589",
"http://www.espn.com/nfl/matchup?gameId=400951753", "http://www.espn.com/nfl/matchup?gameId=400951622",
"http://www.espn.com/nfl/matchup?gameId=400951769" ]
# TEXANS
if teamname == "HOU":
return ["http://www.espn.com/nfl/matchup?gameId=400951580", "http://www.espn.com/nfl/matchup?gameId=400951620",
"http://www.espn.com/nfl/matchup?gameId=400951607", "http://www.espn.com/nfl/matchup?gameId=400951720",
"http://www.espn.com/nfl/matchup?gameId=400951664", "http://www.espn.com/nfl/matchup?gameId=400951700",
"http://www.espn.com/nfl/matchup?gameId=400951729", "http://www.espn.com/nfl/matchup?gameId=400951751",
"http://www.espn.com/nfl/matchup?gameId=400951663", "http://www.espn.com/nfl/matchup?gameId=400951771",
"http://www.espn.com/nfl/matchup?gameId=400951640"]
# COWBOYS
if teamname == "DAL":
return ["http://www.espn.com/nfl/matchup?gameId=400951608","http://www.espn.com/nfl/matchup?gameId=400951673",
"http://www.espn.com/nfl/matchup?gameId=400951668", "http://www.espn.com/nfl/matchup?gameId=400951716",
"http://www.espn.com/nfl/matchup?gameId=400951661", "http://www.espn.com/nfl/matchup?gameId=400951619",
"http://www.espn.com/nfl/matchup?gameId=400951732", "http://www.espn.com/nfl/matchup?gameId=400951786",
"http://www.espn.com/nfl/matchup?gameId=400951686", "http://www.espn.com/nfl/matchup?gameId=400951817",
"http://www.espn.com/nfl/matchup?gameId=400951573"]
# CARDINALS
if teamname == "ARI":
return ["http://www.espn.com/nfl/matchup?gameId=400951576", "http://www.espn.com/nfl/matchup?gameId=400951668",
"http://www.espn.com/nfl/matchup?gameId=400951651", "http://www.espn.com/nfl/matchup?gameId=400951770",
"http://www.espn.com/nfl/matchup?gameId=400951593", "http://www.espn.com/nfl/matchup?gameId=400951763",
"http://www.espn.com/nfl/matchup?gameId=400951553", "http://www.espn.com/nfl/matchup?gameId=400951771",
"http://www.espn.com/nfl/matchup?gameId=400951622"]
# 4e9ers
if teamname == "SFO":
return ["http://www.espn.com/nfl/matchup?gameId=400951605", "http://www.espn.com/nfl/matchup?gameId=400951676",
"http://www.espn.com/nfl/matchup?gameId=400951568", "http://www.espn.com/nfl/matchup?gameId=400951767",
"http://www.espn.com/nfl/matchup?gameId=400951619", "http://www.espn.com/nfl/matchup?gameId=400951723",
"http://www.espn.com/nfl/matchup?gameId=400951763", "http://www.espn.com/nfl/matchup?gameId=400951688",
"http://www.espn.com/nfl/matchup?gameId=400951618"]
# SEAHAWKS
if teamname == "SEA":
return ["http://www.espn.com/nfl/matchup?gameId=400951601", "http://www.espn.com/nfl/matchup?gameId=400951676",
"http://www.espn.com/nfl/matchup?gameId=400951623", "http://www.espn.com/nfl/matchup?gameId=400951747",
"http://www.espn.com/nfl/matchup?gameId=400951657", "http://www.espn.com/nfl/matchup?gameId=400951628",
"http://www.espn.com/nfl/matchup?gameId=400951729", "http://www.espn.com/nfl/matchup?gameId=400951765",
"http://www.espn.com/nfl/matchup?gameId=400951553", "http://www.espn.com/nfl/matchup?gameId=400951818",
"http://www.espn.com/nfl/matchup?gameId=400951618"]
# PACKERS
if teamname == "GB":
return ["http://www.espn.com/nfl/matchup?gameId=400951601", "http://www.espn.com/nfl/matchup?gameId=400951679",
"http://www.espn.com/nfl/matchup?gameId=400951678", "http://www.espn.com/nfl/matchup?gameId=400951661",
"http://www.espn.com/nfl/matchup?gameId=400951702", "http://www.espn.com/nfl/matchup?gameId=400951585",
"http://www.espn.com/nfl/matchup?gameId=400951790", "http://www.espn.com/nfl/matchup?gameId=400951559",
"http://www.espn.com/nfl/matchup?gameId=400951703", "http://www.espn.com/nfl/matchup?gameId=400951633"]
# GIANTS
if teamname == "NYG":
return ["http://www.espn.com/nfl/matchup?gameId=400951608", "http://www.espn.com/nfl/matchup?gameId=400951681",
"http://www.espn.com/nfl/matchup?gameId=400951616", "http://www.espn.com/nfl/matchup?gameId=400951742",
"http://www.espn.com/nfl/matchup?gameId=400951650", "http://www.espn.com/nfl/matchup?gameId=400951782",
"http://www.espn.com/nfl/matchup?gameId=400951628", "http://www.espn.com/nfl/matchup?gameId=400951758",
"http://www.espn.com/nfl/matchup?gameId=400951688", "http://www.espn.com/nfl/matchup?gameId=400951577"]
# DOLPHINS
if teamname == "MIA":
return ["http://www.espn.com/nfl/matchup?gameId=400951666", "http://www.espn.com/nfl/matchup?gameId=400951611",
"http://www.espn.com/nfl/matchup?gameId=400950241", "http://www.espn.com/nfl/matchup?gameId=400951646",
"http://www.espn.com/nfl/matchup?gameId=400951697", "http://www.espn.com/nfl/matchup?gameId=400951598",
"http://www.espn.com/nfl/matchup?gameId=400951670", "http://www.espn.com/nfl/matchup?gameId=400951787",
"http://www.espn.com/nfl/matchup?gameId=400951693", "http://www.espn.com/nfl/matchup?gameId=400981391",
"http://www.espn.com/nfl/matchup?gameId=400951600"]
# HAVE FOUR WEEKS
def masterlist():
##if teamname() == "MASTER":
return ["http://www.espn.com/nfl/matchup?gameId=400951574", "http://www.espn.com/nfl/matchup?gameId=400951566",
"http://www.espn.com/nfl/matchup?gameId=400951570","http://www.espn.com/nfl/matchup?gameId=400951592",
"http://www.espn.com/nfl/matchup?gameId=400951567","http://www.espn.com/nfl/matchup?gameId=400951584",
"http://www.espn.com/nfl/matchup?gameId=400951580","http://www.espn.com/nfl/matchup?gameId=400951669",
"http://www.espn.com/nfl/matchup?gameId=400951644","http://www.espn.com/nfl/matchup?gameId=400951615",
"http://www.espn.com/nfl/matchup?gameId=400951673", "http://www.espn.com/nfl/matchup?gameId=400951583",
"http://www.espn.com/nfl/matchup?gameId=400951576", "http://www.espn.com/nfl/matchup?gameId=400951572",
"http://www.espn.com/nfl/matchup?gameId=400951597", "http://www.espn.com/nfl/matchup?gameId=400951605",
"http://www.espn.com/nfl/matchup?gameId=400951601", "http://www.espn.com/nfl/matchup?gameId=400951608",
"http://www.espn.com/nfl/matchup?gameId=400951612", "http://www.espn.com/nfl/matchup?gameId=400951620",
"http://www.espn.com/nfl/matchup?gameId=400951636", "http://www.espn.com/nfl/matchup?gameId=400951635",
"http://www.espn.com/nfl/matchup?gameId=400951645", "http://www.espn.com/nfl/matchup?gameId=400951630",
"http://www.espn.com/nfl/matchup?gameId=400951626", "http://www.espn.com/nfl/matchup?gameId=400951643",
"http://www.espn.com/nfl/matchup?gameId=400951639", "http://www.espn.com/nfl/matchup?gameId=400951666",
"http://www.espn.com/nfl/matchup?gameId=400951676", "http://www.espn.com/nfl/matchup?gameId=400951674",
"http://www.espn.com/nfl/matchup?gameId=400951679", "http://www.espn.com/nfl/matchup?gameId=400951681",
"http://www.espn.com/nfl/matchup?gameId=400951568", "http://www.espn.com/nfl/matchup?gameId=400951579",
"http://www.espn.com/nfl/matchup?gameId=400951594", "http://www.espn.com/nfl/matchup?gameId=400951599",
"http://www.espn.com/nfl/matchup?gameId=400951587", "http://www.espn.com/nfl/matchup?gameId=400951616",
"http://www.espn.com/nfl/matchup?gameId=400951611", "http://www.espn.com/nfl/matchup?gameId=400951607",
"http://www.espn.com/nfl/matchup?gameId=400951604", "http://www.espn.com/nfl/matchup?gameId=400951623",
"http://www.espn.com/nfl/matchup?gameId=400951634", "http://www.espn.com/nfl/matchup?gameId=400951668",
"http://www.espn.com/nfl/matchup?gameId=400951678", "http://www.espn.com/nfl/matchup?gameId=400950241",
"http://www.espn.com/nfl/matchup?gameId=400951720", "http://www.espn.com/nfl/matchup?gameId=400951716",
"http://www.espn.com/nfl/matchup?gameId=400951712", "http://www.espn.com/nfl/matchup?gameId=400951685",
"http://www.espn.com/nfl/matchup?gameId=400951708", "http://www.espn.com/nfl/matchup?gameId=400951727",
"http://www.espn.com/nfl/matchup?gameId=400951724", "http://www.espn.com/nfl/matchup?gameId=400951742",
"http://www.espn.com/nfl/matchup?gameId=400951738", "http://www.espn.com/nfl/matchup?gameId=400951744",
"http://www.espn.com/nfl/matchup?gameId=400951747", "http://www.espn.com/nfl/matchup?gameId=400951752",
"http://www.espn.com/nfl/matchup?gameId=400951552", "http://www.espn.com/nfl/matchup?gameId=400951558",
"http://www.espn.com/nfl/matchup?gameId=400951556", "http://www.espn.com/nfl/matchup?gameId=400951554",
"http://www.espn.com/nfl/matchup?gameId=400951655", "http://www.espn.com/nfl/matchup?gameId=400951651",
"http://www.espn.com/nfl/matchup?gameId=400951650", "http://www.espn.com/nfl/matchup?gameId=400951646",
"http://www.espn.com/nfl/matchup?gameId=400951657", "http://www.espn.com/nfl/matchup?gameId=400951659",
"http://www.espn.com/nfl/matchup?gameId=400951661", "http://www.espn.com/nfl/matchup?gameId=400951664",
"http://www.espn.com/nfl/matchup?gameId=400951691", "http://www.espn.com/nfl/matchup?gameId=400951695",
"http://www.espn.com/nfl/matchup?gameId=400951700", "http://www.espn.com/nfl/matchup?gameId=400951767",
"http://www.espn.com/nfl/matchup?gameId=400951697", "http://www.espn.com/nfl/matchup?gameId=400951766",
"http://www.espn.com/nfl/matchup?gameId=400951704", "http://www.espn.com/nfl/matchup?gameId=400951702",
"http://www.espn.com/nfl/matchup?gameId=400951773", "http://www.espn.com/nfl/matchup?gameId=400951770",
"http://www.espn.com/nfl/matchup?gameId=400951776", "http://www.espn.com/nfl/matchup?gameId=400951779",
"http://www.espn.com/nfl/matchup?gameId=400951782", "http://www.espn.com/nfl/matchup?gameId=400951785",
"http://www.espn.com/nfl/matchup?gameId=400951571", "http://www.espn.com/nfl/matchup?gameId=400951578",
"http://www.espn.com/nfl/matchup?gameId=400951575", "http://www.espn.com/nfl/matchup?gameId=400951593",
"http://www.espn.com/nfl/matchup?gameId=400951603", "http://www.espn.com/nfl/matchup?gameId=400951585",
"http://www.espn.com/nfl/matchup?gameId=400951589", "http://www.espn.com/nfl/matchup?gameId=400951619",
"http://www.espn.com/nfl/matchup?gameId=400951624", "http://www.espn.com/nfl/matchup?gameId=400951609",
"http://www.espn.com/nfl/matchup?gameId=400951598", "http://www.espn.com/nfl/matchup?gameId=400951628",
"http://www.espn.com/nfl/matchup?gameId=400951638", "http://www.espn.com/nfl/matchup?gameId=400951641",
"http://www.espn.com/nfl/matchup?gameId=400951670", "http://www.espn.com/nfl/matchup?gameId=400951683",
"http://www.espn.com/nfl/matchup?gameId=400951711", "http://www.espn.com/nfl/matchup?gameId=400951706",
"http://www.espn.com/nfl/matchup?gameId=400951725", "http://www.espn.com/nfl/matchup?gameId=400951723",
"http://www.espn.com/nfl/matchup?gameId=400951721", "http://www.espn.com/nfl/matchup?gameId=400951715",
"http://www.espn.com/nfl/matchup?gameId=400951717", "http://www.espn.com/nfl/matchup?gameId=400951729",
"http://www.espn.com/nfl/matchup?gameId=400951732", "http://www.espn.com/nfl/matchup?gameId=400951736",
"http://www.espn.com/nfl/matchup?gameId=400951737", "http://www.espn.com/nfl/matchup?gameId=400951743",
"http://www.espn.com/nfl/matchup?gameId=400951749", "http://www.espn.com/nfl/matchup?gameId=400951760",
"http://www.espn.com/nfl/matchup?gameId=400951761", "http://www.espn.com/nfl/matchup?gameId=400951758",
"http://www.espn.com/nfl/matchup?gameId=400951755", "http://www.espn.com/nfl/matchup?gameId=400951753",
"http://www.espn.com/nfl/matchup?gameId=400951751", "http://www.espn.com/nfl/matchup?gameId=400951763",
"http://www.espn.com/nfl/matchup?gameId=400951765", "http://www.espn.com/nfl/matchup?gameId=400951786",
"http://www.espn.com/nfl/matchup?gameId=400951787", "http://www.espn.com/nfl/matchup?gameId=400951790",
"http://www.espn.com/nfl/matchup?gameId=400951553", "http://www.espn.com/nfl/matchup?gameId=400951656",
"http://www.espn.com/nfl/matchup?gameId=400951563",
"http://www.espn.com/nfl/matchup?gameId=400951565", "http://www.espn.com/nfl/matchup?gameId=400951559",
"http://www.espn.com/nfl/matchup?gameId=400951555", "http://www.espn.com/nfl/matchup?gameId=400951658",
"http://www.espn.com/nfl/matchup?gameId=400951653", "http://www.espn.com/nfl/matchup?gameId=400951663",
"http://www.espn.com/nfl/matchup?gameId=400951686", "http://www.espn.com/nfl/matchup?gameId=400951688",
"http://www.espn.com/nfl/matchup?gameId=400951690", "http://www.espn.com/nfl/matchup?gameId=400951693",
"http://www.espn.com/nfl/matchup?gameId=400951698", "http://www.espn.com/nfl/matchup?gameId=400951775",
"http://www.espn.com/nfl/matchup?gameId=400981391", "http://www.espn.com/nfl/matchup?gameId=400951771",
"http://www.espn.com/nfl/matchup?gameId=400951703", "http://www.espn.com/nfl/matchup?gameId=400951769",
"http://www.espn.com/nfl/matchup?gameId=400951701", "http://www.espn.com/nfl/matchup?gameId=400951807",
"http://www.espn.com/nfl/matchup?gameId=400951810", "http://www.espn.com/nfl/matchup?gameId=400951815",
"http://www.espn.com/nfl/matchup?gameId=400951817", "http://www.espn.com/nfl/matchup?gameId=400951818",
"http://www.espn.com/nfl/matchup?gameId=400951569", "http://www.espn.com/nfl/matchup?gameId=400951573",
"http://www.espn.com/nfl/matchup?gameId=400951577", "http://www.espn.com/nfl/matchup?gameId=400951600",
"http://www.espn.com/nfl/matchup?gameId=400951595", "http://www.espn.com/nfl/matchup?gameId=400951591",
"http://www.espn.com/nfl/matchup?gameId=400951588", "http://www.espn.com/nfl/matchup?gameId=400951586",
"http://www.espn.com/nfl/matchup?gameId=400951610", "http://www.espn.com/nfl/matchup?gameId=400951606",
"http://www.espn.com/nfl/matchup?gameId=400951618", "http://www.espn.com/nfl/matchup?gameId=400951622",
"http://www.espn.com/nfl/matchup?gameId=400951614", "http://www.espn.com/nfl/matchup?gameId=400951629",
"http://www.espn.com/nfl/matchup?gameId=400951633", "http://www.espn.com/nfl/matchup?gameId=400951640"]
| [
"crystinrodrick7@hotmail.com"
] | crystinrodrick7@hotmail.com |
fc286ee99bce63c3cf6dad314ec4c1925f581e0f | 7a98fb6d68f06f7926ed4a27e58ea0721d03e008 | /utils/utils.py | a81cc75cb506506a3f3fc833b1e91252bbd44802 | [] | no_license | d-c-a-i/DualVar | 6ae0515e1ec2a35f27aab33cea008a74bb614f0d | 244b19e29b9a8f67db94c41faeb50b91ec5f45f9 | refs/heads/master | 2023-08-31T13:02:07.917998 | 2021-10-22T17:09:44 | 2021-10-22T17:09:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,824 | py | import os
import glob
import math
import pickle
import numpy as np
import torch
from torchvision import transforms
import torch.distributed as dist
from datetime import datetime
from collections import deque
import time
from numba import jit
from shutil import copyfile
from dataloader import KVReader
def save_checkpoint(state, is_best=0, gap=1, filename='models/checkpoint.pth.tar', keep_all=False, is_save=True, save_latest=False):
last_epoch_path = os.path.join(os.path.dirname(filename), 'epoch%s.pth.tar' % str(state['epoch'] - gap))
if not keep_all:
try:
os.remove(last_epoch_path)
except:
pass
if is_save:
torch.save(state, filename)
if save_latest:
latest_filename = os.path.join(os.path.dirname(filename), 'latest.pth.tar')
if os.path.exists(latest_filename):
os.remove(latest_filename)
time.sleep(3)
torch.save(state, latest_filename)
if is_best:
past_best = glob.glob(os.path.join(os.path.dirname(filename), 'model_best_*.pth.tar'))
past_best = sorted(past_best, key=lambda x: int(''.join(filter(str.isdigit, x))))
if len(past_best) >= 5:
try:
os.remove(past_best[0])
except:
pass
torch.save(state, os.path.join(os.path.dirname(filename), 'model_best_epoch%s.pth.tar' % str(state['epoch'])))
def write_log(content, epoch, filename):
if not os.path.exists(filename):
log_file = open(filename, 'w')
else:
log_file = open(filename, 'a')
log_file.write('## Epoch %d:\n' % epoch)
log_file.write('time: %s\n' % str(datetime.now()))
log_file.write(content + '\n\n')
log_file.close()
def denorm(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]):
assert len(mean) == len(std) == 3
inv_mean = [-mean[i] / std[i] for i in range(3)]
inv_std = [1 / i for i in std]
return transforms.Normalize(mean=inv_mean, std=inv_std)
def batch_denorm(tensor, mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225], channel=1):
shape = [1] * tensor.dim();
shape[channel] = 3
dtype = tensor.dtype
mean = torch.as_tensor(mean, dtype=dtype, device=tensor.device).view(shape)
std = torch.as_tensor(std, dtype=dtype, device=tensor.device).view(shape)
output = tensor.mul(std).add(mean)
return output
def calc_topk_accuracy(output, target, topk=(1,)):
"""
Modified from: https://gist.github.com/agermanidis/275b23ad7a10ee89adccf021536bb97e
Given predicted and ground truth labels,
calculate top-k accuracies.
"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred)).contiguous()
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(1 / batch_size))
return res
def calc_mask_accuracy(output, target_mask, topk=(1,)):
maxk = max(topk)
_, pred = output.topk(maxk, 1, True, True)
zeros = torch.zeros_like(target_mask).long()
pred_mask = torch.zeros_like(target_mask).long()
res = []
for k in range(maxk):
pred_ = pred[:, k].unsqueeze(1)
onehot = zeros.scatter(1, pred_, 1)
pred_mask = onehot + pred_mask # accumulate
if k + 1 in topk:
res.append(((pred_mask * target_mask).sum(1) >= 1).float().mean(0))
return res
def neq_load_customized(model, pretrained_dict, verbose=True, args=None):
''' load pre-trained model in a not-equal way,
when new model has been partially modified '''
assert args is not None
model_dict = model.state_dict()
tmp = {}
if verbose:
content = "\n=======Check Weights Loading======\nWeights not used from pretrained file:"
args.logger.info(content)
for k, v in pretrained_dict.items():
if k in model_dict:
tmp[k] = v
else:
args.logger.info(k)
args.logger.info('---------------------------')
args.logger.info('Weights not loaded into new model:')
for k, v in model_dict.items():
if k not in pretrained_dict:
args.logger.info(k)
args.logger.info('===================================\n')
# pretrained_dict = {k: v for k, v in pretrained_dict.items() if k in model_dict}
del pretrained_dict
model_dict.update(tmp)
del tmp
model.load_state_dict(model_dict)
return model
def strfdelta(tdelta, fmt):
d = {"d": tdelta.days}
d["h"], rem = divmod(tdelta.seconds, 3600)
d["m"], d["s"] = divmod(rem, 60)
return fmt.format(**d)
class Logger(object):
'''write something to txt file'''
def __init__(self, path):
self.birth_time = datetime.now()
filepath = os.path.join(path, self.birth_time.strftime('%Y-%m-%d-%H:%M:%S') + '.log')
self.filepath = filepath
with open(filepath, 'a') as f:
f.write(self.birth_time.strftime('%Y-%m-%d %H:%M:%S') + '\n')
def log(self, string):
with open(self.filepath, 'a') as f:
time_stamp = datetime.now() - self.birth_time
f.write(strfdelta(time_stamp, "{d}-{h:02d}:{m:02d}:{s:02d}") + '\t' + string + '\n')
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, name='null', fmt=':.4f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
self.local_history = deque([])
self.local_avg = 0
self.history = []
self.dict = {} # save all data values here
self.save_dict = {} # save mean and std here, for summary table
def update(self, val, n=1, history=0, step=5):
self.val = val
self.sum += val * n
self.count += n
if n == 0: return
self.avg = self.sum / self.count
if history:
self.history.append(val)
if step > 0:
self.local_history.append(val)
if len(self.local_history) > step:
self.local_history.popleft()
self.local_avg = np.average(self.local_history)
def dict_update(self, val, key):
if key in self.dict.keys():
self.dict[key].append(val)
else:
self.dict[key] = [val]
def print_dict(self, title='IoU', save_data=False):
"""Print summary, clear self.dict and save mean+std in self.save_dict"""
total = []
for key in self.dict.keys():
val = self.dict[key]
avg_val = np.average(val)
len_val = len(val)
std_val = np.std(val)
if key in self.save_dict.keys():
self.save_dict[key].append([avg_val, std_val])
else:
self.save_dict[key] = [[avg_val, std_val]]
print('Activity:%s, mean %s is %0.4f, std %s is %0.4f, length of data is %d' \
% (key, title, avg_val, title, std_val, len_val))
total.extend(val)
self.dict = {}
avg_total = np.average(total)
len_total = len(total)
std_total = np.std(total)
print('\nOverall: mean %s is %0.4f, std %s is %0.4f, length of data is %d \n' \
% (title, avg_total, title, std_total, len_total))
if save_data:
print('Save %s pickle file' % title)
with open('img/%s.pickle' % title, 'wb') as f:
pickle.dump(self.save_dict, f)
def __len__(self):
return self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
def mini_str(self):
fmtstr = '{name} {val' + self.fmt + '}'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix="", logger=None):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
self.logger = logger
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [meter.mini_str() for meter in self.meters]
if self.logger:
self.logger.info(". ".join(entries))
else:
print('. '.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
### from: https://github.com/pytorch/pytorch/issues/15849#issuecomment-518126031
class _RepeatSampler(object):
""" Sampler that repeats forever.
Args:
sampler (Sampler)
"""
def __init__(self, sampler):
self.sampler = sampler
def __iter__(self):
while True:
yield from iter(self.sampler)
class Timer():
def __init__(self):
self.o = time.time()
def measure(self, p=1):
x = (time.time() - self.o) / p
x = int(x)
if x >= 3600:
return '{:.1f}h'.format(x / 3600)
if x >= 60:
return '{}m'.format(round(x / 60))
return '{}s'.format(x)
# https://github.com/pytorch/pytorch/issues/15849#issuecomment-573921048
class FastDataLoader(torch.utils.data.dataloader.DataLoader):
'''for reusing cpu workers, to save time'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
object.__setattr__(self, 'batch_sampler', _RepeatSampler(self.batch_sampler))
# self.batch_sampler = _RepeatSampler(self.batch_sampler)
self.iterator = super().__iter__()
def __len__(self):
return len(self.batch_sampler.sampler)
def __iter__(self):
for i in range(len(self)):
yield next(self.iterator)
def worker_init_fn(_):
worker_info = torch.utils.data.get_worker_info()
dataset = worker_info.dataset
# Avoid "cannot pickle KVReader object" error
dataset.reader = None # KVReader(dataset.db_path, dataset.num_readers)
class GatherLayer(torch.autograd.Function):
"""Gather tensors from all process, supporting backward propagation.
"""
@staticmethod
def forward(ctx, input):
ctx.save_for_backward(input)
output = [torch.zeros_like(input) \
for _ in range(dist.get_world_size())]
dist.all_gather(output, input)
return tuple(output)
@staticmethod
def backward(ctx, *grads):
input, = ctx.saved_tensors
grad_out = torch.zeros_like(input)
grad_out[:] = grads[dist.get_rank()]
return grad_out | [
"bytedance@c02dw62hml85.sso.bytedance.com"
] | bytedance@c02dw62hml85.sso.bytedance.com |
1c8145007edb09d77a3b15de5c34d0bc86c0ba97 | f3f38df4c88ab9818bf9c8ef1fe4f7d2533d023c | /libwyag.py | b52610c487ba19ad0f1185c2d04c72475b8b4807 | [] | no_license | dipam7/own_git | 1c2c275f7873e2c09e04b5f8ca7f9ba12e82cd38 | 9c0598bd79cae02d3bb76c1d481593774f3ac1a1 | refs/heads/master | 2022-04-25T03:07:36.082607 | 2020-04-28T01:44:56 | 2020-04-28T01:44:56 | 258,418,031 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,967 | py | import argparse # for handling command line arguments
import collections # for container types like OrderedDict
import configparser
import hashlib # for SHA-1
import os
import re
import sys
import zlib # git compresses everything using zlib
argparser = argparse.ArgumentParser(description="The stupid content tracker")
# we don't just call git, we always call git command (init, add, clone)
# hence we need to add subparsers to our arg parser
# dest=command means the command we pass will be stored as a string
# in an attribute called command
argsubparsers = argparser.add_subparsers(title="Commands", dest="command")
argsubparsers.required = True
def main(args = sys.argv[1:]):
args = argparser.parse_args(argv)
if args.command == "add" : cmd_add(args)
elif args.command == "cat-file" : cmd_cat_file(args)
elif args.command == "checkout" : cmd_checkout(args)
elif args.command == "commit" : cmd_commit(args)
elif args.command == "hash-object" : cmd_hash_object(args)
elif args.command == "init" : cmd_init(args)
elif args.command == "log" : cmd_log(args)
elif args.command == "ls-tree" : cmd_ls-tree(args)
elif args.command == "merge" : cmd_merge(args)
elif args.command == "rebase" : cmd_rebase(args)
elif args.command == "rev-parse" : cmd_rev_parse(args)
elif args.command == "rm" : cmd_rm(args)
elif args.command == "show-ref" : cmd_show_ref(args)
elif args.command == "tag" : cmd_tag(args)
# abstraction for a git repository
class GitRepository(object):
"""A git repository"""
# a git repo contains 2 things, worktree which is the folder we want to apply version control on
# and a .git repo where git stores its own things
# the config file is stored in .git/config
worktree = None
gitdir = None
conf = None
# an additional force parameter to disable checks
def __init__(self, path, force=False):
self.worktree = path
self.gitdir = os.path.join(path, ".git")
if not (force or os.path.isdir(self.gitdir)):
raise Exception("Not a git repository %s" % path)
# Read configuration file in .git/config
self.conf = configparser.ConfigParser()
cf = repo_file(self, "config")
if cf and os.path.exists(cf):
self.conf.read([cf])
elif not force:
raise Exception("Configuration file missing")
if not force:
vers = int(self.conf.get("core", "repositoryformatversion"))
if vers != 0:
raise Exception("Unsupported repositoryformatversion %s " %vers)
# we will be doing a lot of path manipulations hence we will write some utility functions
def repo_path(repo, *path):
"""Compute path under repo's gitdir"""
return os.path.join(repo.gitdir, *path)
def repo_file(repo, *path, mkdir=False):
"""Same as repo_path, but creates dirname(*path) if absent. For example repo_file(r, "refs", "remotes", "origin")
will create .git/refs/remotes."""
if repo_dir(repo, *path[:-1], mkdir=mkdir):
return repo_path(repo, *path)
def repo_dir(repo, *path, mkdir=False):
"""Same as repo_path, but mkdir *path if absent if mkdir"""
path = repo_path(repo, *path)
if os.path.exists(path):
if (os.path.isdir(path)):
return path
else:
raise Exception("Not a directory %s" % path)
if mkdir:
os.makedirs(path)
return path
else:
return None
# to create a new git repo, we create the following paths
# .git is the git repository
# .git/objects: the object store
# .git/refs: the reference store, it contains 2 subdirs heads and tags
# .git/HEAD: a reference to the current head
# .git/config: repository's configuration file
# .git/description: repository's description file
def repo_create(path):
"""Create a new repository at path."""
repo = GitRepository(path, True)
if os.path.exists(repo.worktree):
if not os.path.isdir(repo.worktree):
raise Exception("%s is not a directory!" % path)
if os.listdir(repo.worktree):
raise Exception("%s is not empty!" % path)
else:
os.makedirs(repo.worktree)
assert(repo_dir(repo, "branches", mkdir=True)
assert(repo_dir(repo, "objects", mkdir=True)
assert(repo_dir(repo, "refs", "tags", mkdir=True)
assert(repo_dir(repo, "refs", "heads", mkdir=True)
# .git/description
with open(repo_file(repo, "description"), "w") as f:
f.write("Unnamed repository: edit this file 'description' to name the repository.\n")
# .git/HEAD
with open(repo_file(repo, "HEAD"), "w") as f:
f.write("ref: refs/heads/master\n")
with open(repo_file(repo, "config"), "w") as f:
config = repo_default_config()
config.write(f)
return repo
| [
"dipam44@gmail.com"
] | dipam44@gmail.com |
063873b31a1e996e64f6e6dbfa4b16583a381395 | 50784449f366b224f9e652ab49789fe7c148b4f5 | /environments/Env_RoleChangingZeroSum.py | ccd1da607ae956d4e8fe7fe91d0421252e6a3270 | [
"BSD-2-Clause"
] | permissive | wbarfuss/POLD | 655c5e3a499ed2b338d8826c52314a749f2d3175 | 0c12d3f937831770efa83e20d72c37df60c96882 | refs/heads/main | 2023-04-16T22:37:04.126714 | 2022-04-11T08:52:38 | 2022-04-11T08:52:38 | 470,242,430 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,230 | py | """
The 2-state Matching Pennies according to HennesEtAl2010
"""
# import sys
# from pathlib import Path
# base_dir = Path(__file__).resolve().parent.parent.parent
# sys.path.append(str(base_dir))
# from LearningDynamics.Envs.Env_Null import NullEnv
from .Env_Null import NullEnv
import numpy as np
class RoleChangingZeroSum(NullEnv):
def __init__(self, obsnoise):
if not hasattr(obsnoise, "__iter__"):
self.noise = np.array([obsnoise, obsnoise])
else:
assert len(obsnoise) == 2
self.noise = np.array(obsnoise)
assert min(self.noise) >= 0.0
self.N = 2
self.M = len(self.actions())
self.Z = len(self.states())
self.Q = len(self.observations())
# --
self.T = self.TransitionTensor()
self.R = self.RewardTensor()
self.state = 1 # inital state
def actions(self):
acts = ['a', 'b']
return acts
def states(self):
states = ['X', 'Y']
return states
def observations(self):
if not np.all(self.noise > 0.5):
obs = ['x', 'y']
else:
obs = ['z']
return obs
def FinalStates(self):
return [0, 0]
def TransitionTensor(self):
"""Get the Transition Tensor."""
Tsas = np.ones((2, 2, 2, 2)) * (-1)
#investiagte
# T1 = np.array([[1.0, 1.0],
# [0.0, 0.0]])
# T2 = np.array([[0.0, 0.0],
# [1.0, 1.0]])
# T1 = np.array([[0.0, 1.0], # from state 0 to state 1
# [1.0, 0.0]])
# T2 = np.array([[1.0, 0.0], # from state 1 to state 0
# [0.0, 1.0]])
T1 = np.array([[1.0, 1.0], # from state 0 to state 1
[0.0, 0.0]])
T2 = np.array([[0.0, 0.0], # from state 1 to state 0
[1.0, 1.0]])
Tsas[0, :, :, 1] = T1
Tsas[0, :, :, 0] = 1-T1
Tsas[1, :, :, 0] = T2
Tsas[1, :, :, 1] = 1-T2
return Tsas
def RewardTensor(self):
"""Get the Reward Tensor R[i,s,a1,...,aN,s']."""
R = np.zeros((2, 2, 2, 2, 2))
R[0, 0, :, :, 0] = [[1 , 0 ],
[0 , 1 ]]
R[1, 0, :, :, 0] = [[0 , 1 ],
[1 , 0 ]]
R[:, 0, :, :, 1] = R[:, 0, :, :, 0]
R[0, 1, :, :, 1] = [[0 , 1 ],
[1 , 0 ]]
R[1, 1, :, :, 1] = [[1 , 0 ],
[0 , 1 ]]
R[:, 1, :, :, 0] = R[:, 1, :, :, 1]
return R
def ObservationTensor(self):
if np.all(self.noise > 0.5):
#self.Q = 1
Oiso = np.ones((self.N, self.Z, self.Q))
else:
#self.Q = self.Z
Oiso = np.zeros((self.N, self.Z, self.Q))
for i in range(self.N):
Oiso[i,0,0] = 1 - min(self.noise[i], 0.5)
Oiso[i,0,1] = 0 + min(self.noise[i], 0.5)
Oiso[i,1,0] = 0 + min(self.noise[i], 0.5)
Oiso[i,1,1] = 1 - min(self.noise[i], 0.5)
return Oiso | [
"noreply@github.com"
] | noreply@github.com |
f00c3e6b4362f9f605b96e38965bde24e43788d5 | eb10fc180021d020b0bf1ef73a37b96661270a25 | /dbms_submissions/dbms_assignment_006/query.py | be7bc1956e3a11e80d86453f463bcfe51ec1559c | [] | no_license | Venkataramana228/dbms | efdffc66433476d69117aaf26143a87b851ac330 | 94f70053014c6f79170ae8c05b8a03f2ec315e0c | refs/heads/master | 2022-09-08T16:42:57.615502 | 2020-05-28T09:19:12 | 2020-05-28T09:19:12 | 263,019,317 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | #Q1="SELECT fname,lname FROM Actor a INNER JOIN Cast c on a.id==c.pid WHERE c.mid==12148;"
Q1="SELECT fname,lname FROM Actor INNER JOIN Cast on id==pid WHERE mid==12148;"
Q2="SELECT COUNT(mid) FROM Actor a INNER JOIN Cast c on a.id=c.pid WHERE fname='Harrison (I)' and lname='Ford';"
Q3="SELECT DISTINCT(pid) FROM Movie m INNER JOIN Cast c on m.id=c.mid where m.name LIKE 'Young Latin Girls%';"
Q4="SELECT COUNT(DISTINCT pid) FROM movie m INNER JOIN Cast a on m.id=a.mid where m.year BETWEEN 1990 and 2000;" | [
"ec2-user@ip-172-31-24-189.ap-southeast-1.compute.internal"
] | ec2-user@ip-172-31-24-189.ap-southeast-1.compute.internal |
04047ac8d8a9f25d66d99cc2fac1fb7c0d56021c | f8054fae8e496cb9859a363d2571b4ac94c1d7a2 | /Python/LineGrapg.py | c23e0235ad9bd49ab90ff0f8844f609f92732e79 | [] | no_license | 22aishwaryagoyal/Python | 54f5d394a88c64496291de6de8dfa6c36d87b5f0 | 1daa508a664af423ce28d92dc7e837a68312fdf1 | refs/heads/main | 2023-07-03T20:18:00.729924 | 2021-08-19T12:15:45 | 2021-08-19T12:15:45 | 397,931,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | import pandas as pd
data={'year':[1971,1981,1991,2001,2011],'pop':[50,73,81,98,111]}
df=pd.DataFrame(data,columns=['year','pop'])
df.plot(x='year',y='pop',kind='line')
| [
"noreply@github.com"
] | noreply@github.com |
467dc99e87a1d24ba74fc20773404fbbbbab9966 | 828e541b8c218db557da35b9d9d7a66fae68485a | /answer5.py | e67977dadcd32fbf0c9906928ea5d8f8e7163257 | [] | no_license | arsh1807/Assignment-2 | 57b177e25dda5b41ce28382621c18c8731e71448 | 586c61138c4667ca651c53052170e9f2ea34edf8 | refs/heads/master | 2020-03-26T17:03:49.923704 | 2018-08-17T16:43:26 | 2018-08-17T16:43:26 | 145,139,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | #5
s="Acadview"
course="Python"
fees=5000
print ('%s' %(s))
print ('%s' %(course))
print ('%d' %(fees))
| [
"arshi.1807@gmail.com"
] | arshi.1807@gmail.com |
971dd6b3cb304f9c7d87eacd5e07e92e1786bc2e | f8d181f293ce950f1a70bef1d023139d9e70a2c7 | /tests/contrib/operators/test_gcp_vision_operator_system.py | 2b75642d6f3a3c93aab282d82e823a4a09d01087 | [
"Apache-2.0",
"BSD-3-Clause",
"Python-2.0",
"MIT",
"BSD-2-Clause"
] | permissive | Piboonsak/airflow | d242f79561d893111ad73b9e3481b9180adecfd4 | dce92a54190155898c75c0f3392d42fb28f1884a | refs/heads/master | 2020-04-29T15:16:06.779329 | 2019-03-18T05:16:14 | 2019-03-18T05:16:14 | 176,222,528 | 1 | 0 | Apache-2.0 | 2019-03-18T06:57:38 | 2019-03-18T06:57:38 | null | UTF-8 | Python | false | false | 1,397 | py | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from tests.contrib.utils.base_gcp_system_test_case import DagGcpSystemTestCase, SKIP_TEST_WARNING
from tests.contrib.utils.gcp_authenticator import GCP_AI_KEY
@unittest.skipIf(DagGcpSystemTestCase.skip_check(GCP_AI_KEY), SKIP_TEST_WARNING)
class CloudVisionExampleDagsSystemTest(DagGcpSystemTestCase):
def __init__(self, method_name='runTest'):
super(CloudVisionExampleDagsSystemTest, self).__init__(
method_name, dag_id='example_gcp_vision', gcp_key=GCP_AI_KEY
)
def test_run_example_dag_function(self):
self._run_dag()
| [
"kaxilnaik@gmail.com"
] | kaxilnaik@gmail.com |
21f87cfc4e94213f0943b398cd2ea89c8f3719cd | 268a5ff900334afe5a3201391857663cdda9b854 | /VectoElem.py | 5250eb381bfd722154c6b4d417290df90b45bda6 | [] | no_license | Albertillo/Mecanica-Orbital | b065f297bd84421f1e5b1471c53e45ef32060026 | 0185a2614805be0ad39a22aa19439798a4c8b3d1 | refs/heads/master | 2020-03-28T07:06:43.501474 | 2018-09-07T22:50:51 | 2018-09-07T22:50:51 | 147,881,512 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,524 | py | ###############################################################################################################################################################
# VectoElem.py: Esta función se ha desarrollado para uso académico. No se recomienda que se utilice con otros propósitos. #
# La función funciona de la siguiente manera: Se debe especificar un foco de la órbita y los vectores de posición y velocidad. La función calculará los #
# elementos orbitales: Semieje mayor "a" (km), inclinación "i" (rad), longitud del nodo ascendente "Omega" (rad), excentricidad "mode", argumento del #
# periastro "omega" (rad) y anomalía verdadera "theta" (rad). Los focos disponibles en este momento son "Sol", "Tierra" y "Jupiter", pero pueden añadirse #
# facilmente introduciendo las diferentes "mu" en el primer if. #
# Algoritmo de Howard D. Curtis, Orbital Mechanics for Engineering Students, First Edition, Elsevier Butterworth-Heinemann, Oxford (UK), 2005. #
###############################################################################################################################################################
import numpy as np
from math import *
#Elementos orbitales desde vectores de posición y velocidad.
def VectoElem(focpoint,rx,ry,rz,vx,vy,vz): #Posición en km y velocidad en km/s. focpoint solo admite "Sol", "Tierra" o "Jupiter".
if focpoint=="Sol":
mu=132712439935.5 #km^3/s^2
elif focpoint=="Tierra":
mu=398600.4 #km^3/s^2
elif focpoint=="Jupiter":
mu=126711995.4 #km^3/s^2
else:
print("ERROR, FOCO DE LA ÓRBITA NO VÁLIDO.")
r=np.array([rx,ry,rz])
v=np.array([vx,vy,vz])
modr=np.linalg.norm(r)
modv=np.linalg.norm(v)
a=mu/(2*mu/modr-modv*modv)
h=np.cross(r,v)
i=np.arccos(h[2]/np.linalg.norm(h))
N=np.cross([0,0,1],h)
if N[1]>=0:
Omega=np.arccos(N[0]/np.linalg.norm(N))
else:
Omega=2*pi-np.arccos(N[0]/np.linalg.norm(N))
vr=np.dot(r,v)/modr
e=1/mu*((modv*modv-mu/modr)*r-modr*vr*v)
mode=np.linalg.norm(e)
if mode>=0:
omega=np.arccos(np.dot(N,e)/(np.linalg.norm(N)*mode))
else:
omega=2*pi-np.arccos(np.dot(N,e)/(np.linalg.norm(N)*mode))
if vr>=0:
theta=np.arccos(np.dot(e,r)/(modr*mode))
else:
theta=2*pi-np.arccos(np.dot(e,r)/(modr*mode))
return a,i,Omega,mode,omega,theta
| [
"noreply@github.com"
] | noreply@github.com |
882b8ffb7a237ed542db1f3b35ec88de5b26db07 | 223a8859b801df7603f239a0ea3bdda0446591ce | /Corrector.py | 081999aa39ec3369043fa86d70f3a53a69e3a36b | [] | no_license | nthuepl/On-Pen-handwritten-Word-Recognition-Using-Long-Short-Term-Memory-Model | bc3db0cb254b818efbac543314f4e6175bf69766 | 7cb597aa4dc749c03115421570472e43865a1583 | refs/heads/master | 2020-03-27T13:27:53.992505 | 2018-09-10T10:30:54 | 2018-09-10T10:30:54 | 146,611,269 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,681 | py | from spell import correction
from Levenshtein import levenshtein, index
import numpy as np
class Lexicon():
def __init__(self):
vocab_list = []
# load from file
fp = open('20k.txt', 'r')
for line in fp.readlines():
word = line[:-1]
vocab_list.append(word)
fp.close()
self.word_list = []
self.page_table = [0 for i in range(20)]
self.cnt = -1
for length in range(1, 18):
self.page_table[length] = len(self.word_list)
word_n = []
for i in range(len(vocab_list)):
if len(vocab_list[i]) == length:
word_n.append(vocab_list[i])
word_n = sorted(word_n, key=str.upper)
self.word_list += word_n
au_w, bp_w, ce_w, fl_w, hn_w, rv_w = (0.136, 0.695, 0.628, \
0.501, 0.917, 0.139)
sub_w, del_w, ins_w = (1.389, 1.925, 1.954)
self.del_map = np.array([del_w for i in range(26)])
self.ins_map = np.array([ins_w for i in range(26)])
self.sub_map = np.array([[sub_w for i in range(26)] for j in range(26)])
self.sub_map[index('a'), index('u')] = au_w
self.sub_map[index('p'), index('b')] = bp_w
self.sub_map[index('e'), index('c')] = ce_w
self.sub_map[index('l'), index('f')] = fl_w
self.sub_map[index('r'), index('v')] = rv_w
self.sub_map[index('n'), index('h')] = hn_w
def __iter__(self):
return self
def __next__(self):
if self.cnt >= len(self.word_list):
raise StopIteration
else:
self.cnt += 1
return str(self.word_list[cnt])
def __str__(self):
return "Lexicon:\n\t"+str(len(self.word_list))+" words\n\t"+"page table: "+str(self.page_table)
def page(self, i):
return str(self.word_list[i])
def index(self, i):
return int(self.page_table[i])
def tolist(self):
return list(self.word_list)
def leven_fit(self, word, area=None):
answer = ''
MIN = 20
head, tail = 0, len(self.word_list)-1
if area != None:
head, tail = area
# for w in lexicon:
for w in self.word_list[head:tail]:
d = levenshtein(word, w, insert_costs=self.ins_map, delete_costs=self.del_map, substitute_costs=self.sub_map)
if d < MIN:
MIN = d
answer = w
if d == 0:
break
return answer
class Corrector():
def __init__(self, lexicon=None):
self.lex = lexicon
if lexicon == None:
self.lex = Lexicon()
print(self.lex)
self.MSR = {'r': 0.09, 'other': 0.514, 'n': 0.196, 'e': 0.139, 'l': 0.208, 'p': 0.227, 'a': 0.142}
self.dw = 0.459
def correction(self, word):
tmp = str(word)
word = correction(word, MSR=self.MSR, distance_weight=self.dw)
if word == None:
word = self.lex.leven_fit(tmp, area=(self.lex.index(len(tmp)), self.lex.index(len(tmp)+2)))
return word
if __name__ == '__main__':
corrector = Corrector()
print(corrector.lex)
| [
"gn02336853@gmail.com"
] | gn02336853@gmail.com |
13b67511127b138050e2016acbbbb75feb3e8ca5 | f153a36b5e211690ded1af00c0160eebd2add1ca | /PROGRAMMERS/Level 1/두 정수 사이의 합.py | 70d41e935d84b333409311cfd37d198781261bca | [] | no_license | KyungHoon0126/Algorithm | 47551bbe22c70eac04ed518c2c9c1f65d48ee5b9 | 8369f0e1103d282cdc138666add65dd0ca926e70 | refs/heads/master | 2021-08-17T08:32:09.970502 | 2021-06-22T12:52:22 | 2021-06-22T12:52:22 | 214,456,043 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | def solution(a, b):
answer = 0
first = 0
second = 0
first = a if a < b else b
second = b if b > a else a
print(first, second)
for i in range(first, second + 1):
answer += i
return answer | [
"kidi052812@gmail.com"
] | kidi052812@gmail.com |
f1a64d7a84c54f957d4f22b4840665e842377b13 | 5c052790b46d7b8e22dcf980c52bb3044b55a18f | /CSC308 Examples/Examples/Exceptions/UniversalExcept.py | ef4798f933a46e0471c8c6f950c2d51d5bbf2896 | [] | no_license | raarnoldy23/pythonExamples | 84ae05a20d962ae6f9ef3f2048260ce654e2baaa | 3f50625198fdc51315810ffd7ff6647fbba9fd8a | refs/heads/master | 2022-06-20T04:27:11.293644 | 2020-05-11T18:08:06 | 2020-05-11T18:08:06 | 263,116,977 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 334 | py |
def main():
total = 0.0
try:
infile = open('text'.txt,'r')
for line in infile:
amount = float(line)
total += amount
infile.close()
print(format(total, ".2f"))
except:
print("AN ERROR OCCURED!!!")
main()
| [
"ARN0644@calu.edu"
] | ARN0644@calu.edu |
2e21fbc4566ec48ec6d3e36c44da1af16c81e5ea | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/abc101/A/4927211.py | 4e4246f50058560e91aa73a6173b4e550e2b0b90 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | s = list(input())
print(s.count('+')-s.count('-')) | [
"kwnafi@yahoo.com"
] | kwnafi@yahoo.com |
31475d7e6cd976e2ad2ea6c3ecd3f56b4ae48fbc | 326a026bcc6bad962159677110d78d3d836532ed | /markote/api/notebook.py | e05023873ff40b79701ec2540061e8c2d53ca0e2 | [
"MIT"
] | permissive | Frederick-S/markote | f63a5007fd0a70ce4b3ae9d03425ae9f9c8b54f3 | 095dabe3da83b5d8809593758661eb78fa527f49 | refs/heads/master | 2023-03-04T16:50:30.541147 | 2022-08-12T01:24:43 | 2022-08-12T01:24:43 | 110,396,888 | 9 | 2 | MIT | 2023-03-04T13:11:38 | 2017-11-12T02:04:32 | Vue | UTF-8 | Python | false | false | 870 | py | from flask import jsonify, request
from markote.api.api_blueprint import api_blueprint
from markote.oauth import oauth
@api_blueprint.route('/notebooks', methods=['GET'])
def get_notebooks():
oauth_client = oauth.microsoft_graph
response = oauth_client.get(
'me/onenote/notebooks?$select=id,displayName')
return jsonify(response.json()), response.status_code
@api_blueprint.route('/notebooks/<notebook_id>/sections', methods=['GET'])
def get_sections(notebook_id):
name = request.args.get('name')
query_filter = '$filter=displayName eq \'{0}\''.format(name) \
if name else ''
oauth_client = oauth.microsoft_graph
response = oauth_client.get(
'me/onenote/notebooks/{0}/sections?$select=id,displayName&{1}'.format(
notebook_id, query_filter))
return jsonify(response.json()), response.status_code
| [
"mao_xiaodan@hotmail.com"
] | mao_xiaodan@hotmail.com |
cfb58a7a49bde127229470f43e7c101d5f9d7168 | ba1ddbc6b364dc2fd55f83ea807b50bf45ce3d1a | /PageObject/VivaVideo/home.py | 23b61b58c20490654f07d632cf8e5bfc9c4414a4 | [] | no_license | zlmone/ATX-UI | 81c58fa722586fe6fb20cd39e3a85afa6057db93 | 44bfa67ed2274c2eeb36f905d5bd482fd96a6707 | refs/heads/master | 2022-05-28T09:03:40.380824 | 2020-05-06T11:39:39 | 2020-05-06T11:39:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,770 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Public.Decorator import *
from Public.Test_data import *
log = Log()
class home_Page(BasePage):
'''创作页首页'''
# @teststep
# def wait_page(self):
# try:
# if self.d(resourceId="com.quvideo.xiaoying:id/iv_vip_home8_cut").wait(timeout=10):
# pass
# else:
# raise Exception('Not in Creation_Page')
# except Exception:
# raise Exception('Not in Creation_Page')
# @teststep
# def close_float_imag(self):
# if self.d(resourceId="com.quvideo.xiaoying:id/float_imageview").wait(timeout=5):
# log.i('关闭创作页浮窗图片')
# self.d(resourceId="com.quvideo.xiaoying:id/float_imageview").child(className="android.widget.ImageView",
# instance=1).click_exists(timeout=3)
# else:
# log.i('没有创作页浮窗图片,跳过')
# pass
@teststep
def close_popup(self):
log.i('关闭首页家庭政策弹窗')
try:
self.d(resourceId="com.quvideo.xiaoying:id/iv_close").click(3)
except:
log.i('弹窗未弹出或者已消除')
pass
@teststep
def close_ad_popup(self,timeout = 3):
log.i('关闭广告弹窗 ')
self.d(resourceId="com.quvideo.xiaoying:id/tt_insert_dislike_icon_img").click_exists(timeout=timeout)
@teststep
def click_template_btn(self):
log.i('点击底部拍同款按钮')
self.d(resourceId="com.quvideo.xiaoying:id/tv_home_tab", text="拍同款").click()
@teststep
def click_home_btn(self):
log.i('点击底部剪辑按钮')
self.d(resourceId="com.quvideo.xiaoying:id/tv_home_tab", text="剪辑").click()
@teststep
def click_me_btn(self):
log.i('点击底部我按钮')
self.d(resourceId="com.quvideo.xiaoying:id/tv_home_tab", text="我").click()
@teststep
def click_vip_btn(self):
log.i('点击VIP按钮')
self.d(resourceId="com.quvideo.xiaoying:id/iv_vip_home8_cut").click()
@teststep
def click_edit_btn(self):
log.i('点击视频剪辑')
self.d(resourceId="com.quvideo.xiaoying:id/iv_edit_home8_cut").click()
try:
self.d(resourceId="com.quvideo.xiaoying:id/imgbtn_help_exit").implicitly_wait(3).click()
except:
log.i("立刻升级页面已消除")
pass
@teststep
def click_mv_btn(self):
log.i('点击相册MV')
self.d(resourceId="com.quvideo.xiaoying:id/iv_mv_home8_cut").click()
@teststep
def click_draft_btn(self):
log.i('点击草稿')
self.d(resourceId="com.quvideo.xiaoying:id/tv_draft_icon_home8_cut",text= '草稿').click()
@teststep
def click_home_more(self):
log.i('点击素材中心查看更多按钮')
self.d(text="查看更多").click()
@teststep
def click_camera_btn(self):
log.i('点击拍摄按钮')
self.watch_device('取消|允许|始终允许')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight4_home8_cut").click()
time.sleep(5) # 等待相机加载完成
self.d.click(0.5, 0.5) # 点击对焦,取消弹出的滤镜
@teststep
def click_sec_addText(self):
log.i('点击次要功能位加字幕')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight0_home8_cut").click()
@teststep
def click_sec_Mixer(self):
log.i('点击次要功能位画中画')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight1_home8_cut").click()
@teststep
def click_sec_Mosaic(self):
log.i('点击次要功能位马赛克')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight2_home8_cut").click()\
@teststep
def click_sec_FAQ(self):
log.i('点击次要功能位新手教程')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight3_home8_cut").click()
@teststep
def click_sec_Capture(self):
log.i('点击次要功能位拍摄')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight4_home8_cut").click()
@teststep
def click_sec_musicExtraction(self):
log.i('点击次要功能位音频提取')
self.d(resourceId="com.quvideo.xiaoying:id/ll_eight5_home8_cut").click()
# @teststep
# def click_view_pager_btn(self, text):
# '''
# 次要功能位置,各个按钮的点击操作
# :param text: 次要功能位置的text名称
# :return:
# '''
# log.i('查找次要功能位 %s 并进行点击操作'% text)
# if self.d(text=text).wait(timeout=1):
# self.d(text=text).click()
# return True
# else:
# try:
# self.d(resourceId="com.quvideo.xiaoying:id/view_pager", scrollable=True).scroll.horiz.to(text=text)
# self.d(text=text).click()
# return True
# except UiObjectNotFoundError:
# log.i("找不到控件-->%s" % text)
# return False
# @teststep
# def select_studio_view(self, inst=1):
# '''
# 点击我的工作室的view 默认第一个
# :param inst: 0为第一个view 以此类推 1、2、3--> 一二三
# '''
# log.i('点击我的工作室第%s个草稿' % inst)
# self.d(resourceId="com.quvideo.xiaoying:id/layout_draft_item").child(className='android.widget.ImageView')[inst-1].click()
if __name__ == '__main__':
from Public.Log import Log
Log().set_logger('udid', './log.log')
BasePage().set_driver(None)
home_Page().close_ad_popup()
| [
"lixin.zhu@quvideo.com"
] | lixin.zhu@quvideo.com |
5d531cf3d38f4598b918f4a682fe7d61279880ef | 2e4d1da92dbf5b3d8a88322c19fc700f3bd1ef4e | /FDLNet-master/latency/rfnet/config.py | 28d993a8b6d6bdf0d30d823a1e0305b76247a83a | [] | no_license | iamwangyabin/hardnetNas | e0ad756134556dacb152f1a326014baa48d6d010 | ed2b22031971b5de15aa40844cab350ec1a1b8aa | refs/heads/master | 2022-09-26T11:33:40.607810 | 2022-09-19T01:49:23 | 2022-09-19T01:49:23 | 215,681,633 | 1 | 0 | null | 2022-09-19T02:02:11 | 2019-10-17T01:59:48 | Python | UTF-8 | Python | false | false | 3,115 | py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# `pip install easydict` if you don't have it
from easydict import EasyDict as edict
__C = edict()
cfg = __C
"""
Project options
"""
__C.PROJ = edict()
# whether us gpu
__C.PROJ.USE_GPU = True
# seed for random
__C.PROJ.SEED = 0
# training, evaluate and test data
__C.PROJ.TRAIN = "view"
__C.PROJ.TRAIN_ALL = False
__C.PROJ.TRAIN_PPT = 0.8
__C.PROJ.EVAL = "view"
__C.PROJ.EVAL_ALL = False
__C.PROJ.EVAL_PPT = 0.1
__C.PROJ.TEST = "view"
__C.PROJ.TEST_ALL = False
__C.PROJ.TEST_PPT = 0.1
"""
Model options
"""
__C.MODEL = edict()
# gaussian kernel size
__C.MODEL.GAUSSIAN_KSIZE = 15
# gaussian kernel sigma
__C.MODEL.GAUSSIAN_SIGMA = 0.5
# Descriptor Threshold
__C.MODEL.DES_THRSH = 1.0
# Coordinate Threshold
__C.MODEL.COO_THRSH = 5.0
# Ksize
__C.MODEL.KSIZE = 3
# padding
__C.MODEL.padding = 1
# dilation
__C.MODEL.dilation = 1
# scale_list
__C.MODEL.scale_list = [3.0, 5.0, 7.0, 9.0, 11.0, 13.0, 15.0, 17.0, 19.0, 21.0]
"""
Loss options
"""
__C.LOSS = edict()
# score loss wight
__C.LOSS.SCORE = 1000
# pair loss weight
__C.LOSS.PAIR = 1
"""
Training options
"""
__C.TRAIN = edict()
# batch size
__C.TRAIN.BATCH_SIZE = 1
# Train epoch
__C.TRAIN.EPOCH_NUM = 201
# Train log interval
__C.TRAIN.LOG_INTERVAL = 5
# weight decay
__C.TRAIN.WEIGHT_DECAY = 1e-4
# detector learning rate
__C.TRAIN.DET_LR = 0.1
# descriptor learning rate
__C.TRAIN.DES_LR = 10
# detection optimizer (adam/sgd)
__C.TRAIN.DET_OPTIMIZER = "adam"
# adjust detection lr (sgd/exp)
__C.TRAIN.DET_LR_SCHEDULE = "exp"
# detector weight decay
__C.TRAIN.DET_WD = 0
# descriptor optimizer (adam/sgd)
__C.TRAIN.DES_OPTIMIZER = "adam"
# adjust descriptor lr (sgd/exp)
# __C.TRAIN.DES_LR_SCHEDULE = 'exp'
__C.TRAIN.DES_LR_SCHEDULE = "sgd"
# descriptor weight decay
__C.TRAIN.DES_WD = 0
# learning rate decay epoch
__C.TRAIN.LR_DECAY_EPOCH = 5
# learning rate base line
__C.TRAIN.LR_BASE = 0.0001
# score strength weight
__C.TRAIN.score_com_strength = 100.0
# scale strength weight
__C.TRAIN.scale_com_strength = 100.0
# non maximum supression threshold
__C.TRAIN.NMS_THRESH = 0.0
# nms kernel size
__C.TRAIN.NMS_KSIZE = 5
# top k patch
__C.TRAIN.TOPK = 250
"""
Image data options
"""
# View train sequence Mean and Std
__C.view = edict()
__C.view.csv = "hpatch_view.csv"
__C.view.root = "../data/hpatch_v_sequence"
__C.view.MEAN = 0.4230204841414801
__C.view.STD = 0.25000138349993173
__C.view.NUM = 295
# illumination sequence Mean and Std
__C.illu = edict()
__C.illu.csv = "hpatch_illum.csv"
__C.illu.root = "../data/hpatch_i_sequence"
__C.illu.MEAN = 0.4337542740124942
__C.illu.STD = 0.2642307153894012
__C.illu.NUM = 285
# illumination sequence Mean and Std
__C.ef = edict()
__C.ef.csv = "EFDataset.csv"
__C.ef.root = "../data/EFDataset"
__C.ef.MEAN = 0.4630827743610772
__C.ef.STD = 0.24659232013004403
__C.ef.NUM = 293
"""
Patch options
"""
__C.PATCH = edict()
# patch size
__C.PATCH.SIZE = 32
"""
Hardnet options
"""
__C.HARDNET = edict()
# margin for hardnet loss
__C.HARDNET.MARGIN = 1.0
| [
"38123329+iamwangyabin@users.noreply.github.com"
] | 38123329+iamwangyabin@users.noreply.github.com |
54a1edcce616c00b9f5f2d3535a386ba07406025 | 4b9d121c8df2d1a9985bec2315aa639976889b81 | /Test de la manette.py | 84a21e43250fb8d5b77ed31b4d06307649601b5f | [] | no_license | totowarx/proberXY | 5d5d833a81b0e96b2e732922b6ede7f7ebcb4f0a | d0b40121d2795d413893b31a39ab2879ae1af980 | refs/heads/main | 2023-06-19T18:04:12.988081 | 2021-07-20T09:30:10 | 2021-07-20T09:30:10 | 360,071,861 | 1 | 0 | null | 2021-06-25T13:21:02 | 2021-04-21T07:28:07 | G-code | UTF-8 | Python | false | false | 1,480 | py | from __future__ import print_function
import xbox
# from xbox import Joystick
# change le nombre à virgule flottante au format de chaîne -x.xxx
def fmtFloat(n):
return '{:6.3f}'.format(n)
def show(*args):
for arg in args:
print(arg, end="")
def showIf(boolean, ifTrue, ifFalse=" "):
if boolean:
show(ifTrue)
else:
show(ifFalse)
joy = xbox.Joystick()
# Boutons
print("Appuyez sur Back pour quitter")
while not joy.Back():
show("Connecté!!!")
showIf(joy.connected(), "Y", "N")
# Joystick droit
show(" Joystick droit:", fmtFloat(joy.rightX()), "/", fmtFloat(joy.rightY()))
# Joystick gauche
show(" Joystick gauche:", fmtFloat(joy.leftX()), "/", fmtFloat(joy.leftY()))
# Gachette droite
show(" Gachette droite:", fmtFloat(joy.rightTrigger()))
# Gauchette gauche
show(" Gauchette gauche:", fmtFloat(joy.leftTrigger()))
# A/B/X/Y
show(" Boutons:")
showIf(joy.A(), "A")
showIf(joy.B(), "B")
showIf(joy.X(), "X")
showIf(joy.Y(), "Y")
# Dpad U/D/L/R
show(" Croix directionnel:")
showIf(joy.dpadUp(), "U")
showIf(joy.dpadDown(), "D")
showIf(joy.dpadLeft(), "L")
showIf(joy.dpadRight(), "R")
# Bumper gauche
show(" Bumper gauche:")
showIf(joy.leftBumper(), "LB")
# Bumper gauche
show(" Bumper droit:")
showIf(joy.rightBumper(), "RB")
# Curseur en debut de ligne : affichage unique
show(chr(13))
# Fin
joy.close()
| [
"noreply@github.com"
] | noreply@github.com |
4b9a62611c764cd8d705fcf54fd46f2a5624deae | d9e26e516ab3863b6e7d00c4e3cdecf1af7028eb | /src/oaklib/io/rollup_report_writer.py | e4644c058309aeb0aeae82b0c4cc2fa52f2b5e04 | [
"Apache-2.0"
] | permissive | INCATools/ontology-access-kit | 2f08a64b7308e8307d1aaac2a81764e7d98b5928 | 8d2a124f7af66fe2e796f9e0ece55585438796a5 | refs/heads/main | 2023-08-30T14:28:57.201198 | 2023-08-29T17:40:19 | 2023-08-29T17:40:19 | 475,072,415 | 67 | 15 | Apache-2.0 | 2023-09-07T01:06:04 | 2022-03-28T15:50:45 | Jupyter Notebook | UTF-8 | Python | false | false | 3,444 | py | from typing import Dict, List, TextIO
from airium import Airium
from linkml_runtime.dumpers import json_dumper, yaml_dumper
def format_object(curie, label):
if label:
return f"{label} [{curie}]"
else:
return curie
def add_association_group(doc: Airium, associations: List[Dict], subject: str, header_label: str):
associations_for_subject = [a for a in associations if a.get("subject") == subject]
if associations_for_subject:
with doc.div(klass="association-group"):
doc.div(_t=header_label, klass="association-group-header")
with doc.ul(klass="association-group-list"):
for association in associations_for_subject:
label = format_object(
association.get("object"), association.get("object_label")
)
doc.li(_t=label)
def generate_html(subjects: List[str], groups: List[Dict]) -> str:
doc = Airium()
doc("<!DOCTYPE html>")
with doc.html(lang="en"):
with doc.head():
doc.meta(charset="utf-8")
doc.title(_t="Rollup Table")
doc.style(
_t="""
.rollup-table {
border-collapse: collapse;
width: 100%;
}
.rollup-table tr {
vertical-align: top;
}
.rollup-table td {
padding: 0.25rem;
border-top: 1px solid black;
}
.primary-group-label {
font-weight: bold;
}
.association-group {
margin-bottom: 1rem;
}
.association-group-header {
font-style: italic;
}
.association-group-list {
margin: 0;
}
"""
)
with doc.body():
with doc.table(klass="rollup-table"):
with doc.tr():
doc.td(_t="Subject", klass="primary-group-label")
for subject in subjects:
doc.td(_t=subject)
for group in groups:
with doc.tr():
label = format_object(
group.get("group_object"), group.get("group_object_label")
)
doc.td(_t=label, klass="primary-group-label")
for subject in subjects:
with doc.td():
for sub_group in group.get("sub_groups", []):
add_association_group(
doc,
sub_group.get("associations", []),
subject,
format_object(
sub_group.get("group_object"),
sub_group.get("group_object_label"),
),
)
add_association_group(
doc, group.get("associations", []), subject, "Other"
)
return str(doc)
def write_report(subjects: List[str], groups: List[Dict], output: TextIO, format: str):
if format == "json":
output.write(json_dumper.dumps(groups, inject_type=False))
elif format == "yaml":
output.write(yaml_dumper.dumps(groups))
elif format == "html":
output.write(generate_html(subjects, groups))
else:
raise ValueError(f"Unsupported format: {format}")
| [
"noreply@github.com"
] | noreply@github.com |
aa2a3f50015d27ba5e13535865c82d7ddc196297 | 6a200b3c86329d501c9c8082fb69a512fa69228b | /tasksupervisor/endpoint/fiware_orion/orion_interface.py | de408a5dc5b0878f8f6a886bb523c3fd1e473f09 | [
"Apache-2.0"
] | permissive | iml130/mod.sw.tp.ts | 09792a33e4ec4e53ac89d59aa19cb4dae7762b90 | 4cda3ef0d3791eb204d5510631fdb9ec7ec57aab | refs/heads/develop | 2023-04-17T04:07:15.909996 | 2021-04-26T22:00:14 | 2021-04-26T22:00:14 | 335,564,272 | 0 | 1 | Apache-2.0 | 2021-04-26T22:00:15 | 2021-02-03T09:00:35 | Python | UTF-8 | Python | false | false | 6,795 | py | """ Contains OrionInterface class """
import threading
import logging
# import local libs
import tasksupervisor.my_globals as my_globals
from tasksupervisor.helpers import servercheck
from tasksupervisor.helpers.config_reader import ConfigReader
from tasksupervisor.endpoint.fiware_orion.flask.flask_setup import create_flask_app, FI_SUB_ID, FI_DATA
from tasksupervisor.endpoint.fiware_orion.entities.materialflow import Materialflow
from tasksupervisor.endpoint.fiware_orion.entities.sensor_agent_node import SensorAgent
from tasksupervisor.endpoint.fiware_orion.entities.materialflow_specification_state import MaterialflowSpecificationState
from tasksupervisor.endpoint.fiware_orion.entities.transport_order_update import TransportOrderUpdate
from tasksupervisor.endpoint.fiware_orion.entities.tasksupervisor_info import TaskSupervisorInfo
from tasksupervisor.endpoint.fiware_orion.entities.materialflow_update import MaterialflowUpdate
from tasksupervisor.endpoint.broker_interface import BrokerInterface
from tasksupervisor.endpoint.fiware_orion.contextbrokerhandler import ContextBrokerHandler
logger = logging.getLogger(__name__)
def callback_flask_server(flask_app):
""" Callback method to create for the flask server """
logger.info("Starting thread_flask_server")
flask_app.run(host=my_globals.parsed_config_file.FLASK_HOST,
port=my_globals.parsed_config_file.TASKPLANNER_PORT,
threaded=True, use_reloader=False, debug=True)
class OrionInterface(BrokerInterface):
""" Implements the BrokerInterface for the Orion Context Broker """
def __init__(self, broker_connector, broker_name = ""):
BrokerInterface.__init__(self, broker_connector)
self.subscription_dict = {}
self.flask_app = create_flask_app(self)
self.lock = threading.Lock()
config_file_path = "./tasksupervisor/config.ini"
try:
parsed_config_file = ConfigReader(config_file_path)
parsed_config_file.is_valid()
except Exception:
raise Exception("Error while parsing Fiware config file")
self.context_broker_handler = ContextBrokerHandler(parsed_config_file.get_fiware_server_address())
logger.info("Setting up thread_check_if_server_is_up")
self.thread_check_if_server_is_up = threading.Thread(name="checkServerRunning",
target=servercheck.webserver_is_running,
args=("localhost", my_globals.parsed_config_file.TASKPLANNER_PORT,))
logger.info("Setting up thread_flask_server")
self.thread_flask_server = threading.Thread(name="callback_flask_server", target=callback_flask_server,
args=(self.flask_app,))
def start_interface(self):
self.thread_check_if_server_is_up.start()
self.thread_flask_server.start()
logger.info("Starting Flask and wait")
self.thread_check_if_server_is_up.join()
logger.info("Flask is running")
def subscribe(self, topic, opt_data=None, generic=False):
with self.lock:
class_name = str(topic.__class__.__name__)
description = class_name + " subscription"
notification = my_globals.parsed_config_file.get_taskplanner_address() + "/" + class_name.lower()
if opt_data:
description = opt_data.description
if class_name == "SensorAgent":
notification = my_globals.parsed_config_file.get_taskplanner_address() + "/san/" + opt_data.to_id
entities = [{"id": topic.id, "type": class_name}]
sub_id = self.context_broker_handler.subscribe_to_entity(description, entities,
notification, generic=generic)
self.subscription_dict[sub_id] = class_name
return sub_id
def create(self, entity):
fiware_entity = self.create_fiware_entity(entity)
self.context_broker_handler.create_entity(fiware_entity)
def update(self, entity):
fiware_entity = self.create_fiware_entity(entity)
fiware_entity.update_time()
self.context_broker_handler.update_entity(fiware_entity)
def delete(self, id_, delete_entity=True):
with self.lock:
if delete_entity:
self.context_broker_handler.delete_entity(id_)
else:
self.context_broker_handler.delete_subscription_by_id(id_)
def create_fiware_entity(self, entity):
class_name = str(entity.__class__.__name__)
fiware_entity = None
if class_name == "MaterialflowSpecificationState":
fiware_entity = MaterialflowSpecificationState.from_api_object(entity)
elif class_name == "MaterialflowUpdate":
fiware_entity = MaterialflowUpdate.from_api_object(entity)
elif class_name == "TaskSupervisorInfo":
fiware_entity = TaskSupervisorInfo.from_api_object(entity)
elif class_name == "TransportOrderUpdate":
fiware_entity = TransportOrderUpdate.from_api_object(entity)
else:
raise ValueError("Creation of fiware entity for unknown class was requested: {}".format(class_name))
return fiware_entity
def retreive(self, json_requests):
with self.lock:
subscription_id = json_requests[FI_SUB_ID]
if subscription_id in self.subscription_dict:
entity_type = self.subscription_dict[subscription_id]
if entity_type == "Materialflow":
# it might be possible that there are multiple entities
# iterate over each json request
for temp_json_request in json_requests[FI_DATA]:
# create an entity from the json request
orion_materialflow = Materialflow.CreateObjectFromJson(temp_json_request)
api_materialflow = orion_materialflow.to_api_object()
self.broker_connector.retreive(api_materialflow, self)
elif entity_type == "SensorAgent":
for temp_json_request in json_requests[FI_DATA]:
orion_sensor_agent = SensorAgent.create_object_from_json(temp_json_request)
api_sensor_agent = orion_sensor_agent.to_api_object()
self.broker_connector.retreive(api_sensor_agent, self)
else:
raise ValueError("Data from an unknown subscription id was received: {}".format(subscription_id))
def shutdown(self):
self.context_broker_handler.shutdown()
| [
"noreply@github.com"
] | noreply@github.com |
fe1cc4e8b6b8201c08c79ccc09f50d705606c468 | 69e7dca194ab7b190e1a72928e28aa3821b47cfb | /Concepts/Strings/49.py | 579955f18e9b68d977d8b50ba8f8ff8b211b3947 | [] | no_license | Dinesh94Singh/PythonArchivedSolutions | a392891b431d47de0d5f606f7342a11b3127df4d | 80cca595dc688ca67c1ebb45b339e724ec09c374 | refs/heads/master | 2023-06-14T14:56:44.470466 | 2021-07-11T06:07:38 | 2021-07-11T06:07:38 | 384,871,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | """
49. Group Anagrams
Given an array of strings, group anagrams together.
Example:
Input: ["eat", "tea", "tan", "ate", "nat", "bat"],
Output:
[
["ate","eat","tea"],
["nat","tan"],
["bat"]
]
Note:
All inputs will be in lowercase.
The order of your output does not matter.
"""
import collections
def group_anagrams(strs):
dic = collections.defaultdict(list)
ans = []
for each_word in strs:
dic[tuple(sorted(each_word))].append(each_word)
for key, values in dic.items():
ans.append(values)
return ans
group_anagrams(["eat", "tea", "tan", "ate", "nat", "bat"])
| [
"dinesh94singh@gmail.com"
] | dinesh94singh@gmail.com |
12360d4c69b79a4dda8833a2bc7d232357e2cee1 | d0a240a606f6de871197ab21ff911f94c6eb6e20 | /encoder.py | ba1215fdc88fed29ccbdbcc25a1cf565796be6cd | [] | no_license | PhucNguyen12038/RNN | e7bd2d8ed8fadab16af92257b9fc4b81b5e634e6 | e7c8fee7fca8e846ddc01e6c4e5a2adfb64fd65f | refs/heads/main | 2023-04-24T13:20:02.541340 | 2021-05-09T10:12:29 | 2021-05-09T10:12:29 | 349,739,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | import json
import numpy as np
class NumpyEncoder(json.JSONEncoder):
""" Special json encoder for numpy types """
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
return json.JSONEncoder.default(self, obj) | [
"nguyenhuuphuc12038@gmail.com"
] | nguyenhuuphuc12038@gmail.com |
6c04b7cbd139e06c8c9112a9e505b24f5d41fcbb | 3a9ed017ed45361811fee0980af2eaf1bd7e3624 | /homework3.py | 9b7384f0e0a60585a00be03fd7b61b7ea89d6117 | [] | no_license | harshsjani/State-space-search-Artificial-Intelligence | 058e97f1bda8ce2bc55f52aad9a8584f5a956944 | 5609dbaa92c44a8cc5c7df554f7edf0b69428cd0 | refs/heads/main | 2023-06-19T05:58:12.968683 | 2021-07-17T00:58:58 | 2021-07-17T00:58:58 | 337,557,565 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,604 | py | import os
from collections import deque
class Constants:
input_filename = "input.txt"
output_filename = "output.txt"
fail_output = "FAIL"
directions = [(0, 1), (1, 0), (0, -1), (-1, 0), (1, 1), (1, -1), (-1, 1), (-1, -1)]
class Solver:
def __init__(self):
self.parse_input()
self.paths_to_settling_sites = [None for _ in range(self.num_settling_sites)]
self.serialized_output = []
self.calls = {"BFS": self.bfs, "UCS": self.ucs, "A*": self.a_star}
def parse_input(self):
cwd = os.getcwd()
input_file_path = os.path.join(cwd, Constants.input_filename)
with open(input_file_path, 'r') as file:
self.algo = file.readline().strip("\n")
self.num_cols, self.num_rows = map(int, file.readline().split())
self.starting_x, self.starting_y = map(int, file.readline().split())
self.max_rock_height = int(file.readline())
self.num_settling_sites = int(file.readline())
self.settling_sites = []
for _ in range(self.num_settling_sites):
self.settling_sites.append((map(int, file.readline().split())))
self.grid = [[0 for _ in range(self.num_cols)] for _ in range(self.num_rows)]
for idx in range(self.num_rows):
col_vals = map(int, file.readline().split())
for widx, val in enumerate(col_vals):
self.grid[idx][widx] = val
def serialize_outputs(self):
for path in self.paths_to_settling_sites:
if path is None:
self.serialized_output.append(Constants.fail_output)
else:
self.serialized_output.append(" ".join(["%s,%s" % cell for cell in path]))
def write_output(self):
cwd = os.getcwd()
output_file_path = os.path.join(cwd, Constants.output_filename)
self.serialize_outputs()
with open(output_file_path, "w") as file:
file.writelines('\n'.join(self.serialized_output))
def show_input(self):
print("Algorithm: %s\nW H: %d %d\nStarting Position: %d %d\nMaximum Rock Height Difference: %d\nNumber of Settling Sites: %d\nSettling Sites: %s" % (
self.algo, self.num_cols, self.num_rows, self.starting_x, self.starting_y, self.max_rock_height, self.num_settling_sites, self.settling_sites
))
print("\nGrid:")
for row in self.grid:
print(row)
def get_valid_neighbors(self, x, y):
neighbors = []
for i, j in Constants.directions:
p = x + i
q = y + j
cost = 14 if abs(i) == abs(j) else 10
if 0 <= p < self.num_cols and 0 <= q < self.num_rows:
cur_height = self.grid[y][x]
new_height = self.grid[q][p]
height_dif = 0
if cur_height < 0:
if new_height < 0:
height_dif = abs(cur_height - new_height)
else:
height_dif = abs(cur_height)
elif new_height < 0:
height_dif = abs(new_height)
if height_dif <= self.max_rock_height:
neighbors.append((p, q, cost))
return neighbors
def bfs(self):
sx, sy = self.starting_x, self.starting_y
open = deque()
open.append((sx, sy))
visited = set([sx, sy])
parentpointer = {}
while open:
temp = deque()
while open:
nx, ny = open.popleft()
for p, q, _ in self.get_valid_neighbors(nx, ny):
if (p, q) not in visited:
visited.add((p, q))
parentpointer[(p, q)] = (nx, ny)
temp.append((p, q))
open = temp
for idx, (x, y) in enumerate(self.settling_sites):
path = []
if (x, y) not in parentpointer:
continue
while (x, y) != (sx, sy):
path.append((x, y))
x, y = parentpointer[(x, y)]
path.append((sx, sy))
self.paths_to_settling_sites[idx] = reversed(path)
def ucs(self):
print("SOLVING UCS")
def a_star(self):
print("SOLVING A*!")
def solve(self):
self.calls[self.algo]()
def main():
solver = Solver()
solver.solve()
solver.write_output()
# solver.show_input()
if __name__ == "__main__":
main()
| [
"whizkid@gmail.com"
] | whizkid@gmail.com |
00fb40dc657af4f71f40f6e500abe5ae5e629c29 | 66c8b9ee95b951a60847cfabad08250e65289812 | /src/stringMatching.py | 3d448a7161cd94aaf929827be219ed63195eda51 | [] | no_license | sampanayak/ccfl-elastic | 707206c2c08cc63d67bd9846fc38fce078b3b091 | c717cce1855de0dce6d4b9c3b709a94331db55c6 | refs/heads/master | 2021-07-24T15:14:29.855623 | 2020-04-28T19:10:01 | 2020-04-28T19:10:01 | 157,615,068 | 0 | 0 | null | 2018-11-14T21:44:36 | 2018-11-14T21:44:35 | null | UTF-8 | Python | false | false | 529 | py | #contents = []
import re
#i = 0
with open('gene.txt') as f:
lines = f.readlines()
i = 1
sizeOflines = len(lines)
while i < sizeOflines:
# print(lines[i])
fullString = lines[i]
i += 1
sub = lines[i]
print(fullString)
print(sub)
i += 1
for match in re.finditer('sub','fullString'):
print(match.start())
#[m.start() for m in re.finditer('sub', 'fullString')]
| [
"31867379+sampanayak@users.noreply.github.com"
] | 31867379+sampanayak@users.noreply.github.com |
2d9c96d214896f6dc2b7daa6ac2d6b09a83c5fe6 | 00acd54857f007df1ab2cacfaf48954d7d8a1cd2 | /cp_2/sporysh_fb-95_cp2/lab2_encrypt.py | 1d270d92d444d9f042abce5c435ab6df7901af6e | [] | no_license | Zhek0nBek0n/fb-labs-2021 | 2cc4a20680148a6e6e296f3c37034da8d23d8d5e | 2c14c112ee23cc617227db41283799c3fb9271ad | refs/heads/master | 2023-09-05T20:27:27.289457 | 2021-11-03T15:25:26 | 2021-11-03T15:25:26 | 413,180,574 | 0 | 0 | null | 2021-10-03T19:46:06 | 2021-10-03T19:46:05 | null | UTF-8 | Python | false | false | 965 | py | import numpy as np
import time
import matplotlib.pyplot as plt
import os
from Decryptor import lab_2_Decryptor as L2D
from Cryptor import lab_2_Cryptor as L2C
if __name__ == '__main__':
start_time = time.time()
cryptor = L2C("./voina_i_mir_small.txt")
keys = "да нет киви жалко " \
"приветкиви " \
"приветжалко " \
"приветкивида " \
"приветжалкода " \
"приветжалконет " \
"приветкивижалко " \
"приветкивикивида " \
"приветкивикивинет " \
"приветкивижалконет " \
"приветжалкожалконет " \
"приветивижалкокивида"
keys = keys.split(" ")
for key in keys:
text = cryptor.encrypt(key)
with open(f"./examples/{key}.txt", 'wb') as outfile:
outfile.write(cryptor.toStr(text).encode("utf-8"))
print(cryptor.toStr(text)[:10])
| [
"sporyshzhenya@gmail.com"
] | sporyshzhenya@gmail.com |
bb926ed9470058408a9f838241a53266d6394661 | 3411c5b7b6821fb7ea5c836b47395cd6692cfc66 | /single.py | 9066023ec75cf1fab996795331007848d92db18d | [] | no_license | sparticlesteve/py-mt-example | 410dbd057e77bee83793ccf244a52ac7cbf5a5af | 349fb2e674388a0e50ad6dd881de0a57c0e72703 | refs/heads/master | 2020-04-28T23:18:10.610223 | 2015-04-04T22:19:44 | 2015-04-04T22:19:44 | 33,418,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 787 | py | #!/usr/bin/env python3
import logging
import os
from time import time
from download import setup_download_dir, get_links, download_link
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging.getLogger('requests').setLevel(logging.CRITICAL)
logger = logging.getLogger(__name__)
def main():
ts = time()
client_id = os.getenv('IMGUR_CLIENT_ID')
if not client_id:
raise Exception('Couldn\'t find IMGUR_CLIENT_ID environment variable!')
download_dir = setup_download_dir()
links = [l for l in get_links(client_id) if l.endswith('.jpg')]
for link in links:
download_link(download_dir, link)
print('Took {}s'.format(time() - ts))
if __name__ == '__main__':
main()
| [
"sfarrell@cern.ch"
] | sfarrell@cern.ch |
d2087a647ace24760336a7b0244273fd6458835a | 144151dba4a365018a0e109d3173c1af0ea8f149 | /scan.py | 0ed330ae2cb006f7353a05af74ff1225bda113aa | [] | no_license | miguelps/document_detection | 1e24035374e63d9234da066718527d5fffa190fb | 9f6afc62481dd7a90259e1064a8b4de65f30446d | refs/heads/master | 2021-10-26T16:01:11.459227 | 2019-04-13T17:07:48 | 2019-04-13T17:07:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,575 | py | from imutils.perspective import four_point_transform
from skimage.filters import threshold_local
#from ocr.helpers import implt
import numpy as np
import cv2
import imutils
img = cv2.imread('images/page.jpg')
ratio = img.shape[0] / 500.0
orig = img.copy()
img = imutils.resize(img, height = 500)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (5, 5), 0)
invGamma = 1.0 / 0.3
table = np.array([((i / 255.0) ** invGamma) * 255
for i in np.arange(0, 256)]).astype("uint8")
gray = cv2.LUT(gray, table)
ret,thresh1 = cv2.threshold(gray,50,255,cv2.THRESH_BINARY)
_, contours, hierarchy = cv2.findContours(thresh1, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
def biggestRectangle(contours):
biggest = None
max_area = 0
indexReturn = -1
for index in range(len(contours)):
i = contours[index]
area = cv2.contourArea(i)
if area > 100:
peri = cv2.arcLength(i,True)
approx = cv2.approxPolyDP(i,0.1*peri,True)
if area > max_area: #and len(approx)==4:
biggest = approx
max_area = area
indexReturn = index
return indexReturn, biggest
indexReturn, biggest = biggestRectangle(contours)
x,y,w,h = cv2.boundingRect(contours[indexReturn])
cv2.rectangle(img,(x,y),(x+w,y+h),(0,255,0),2)
#implt(img, t='Result')
cv2.imwrite('test1.jpg',img)
warped = four_point_transform(orig, biggest.reshape(4, 2) * ratio)
#implt(warped, t='Result')
cv2.imwrite('test.jpg',warped)
| [
"noreply@github.com"
] | noreply@github.com |
0b6ed03178625556243a0a366ec1c4729202755e | 93a35763a4fe06d6d7daa17166f7a899420905c0 | /SRC/create_output.py | 43256507f3904e77134d8e6a0b5157adad20be70 | [] | no_license | uscoburgo/project-pipelines | 43308d392d4c515204065cb1dec114449c38feea | aba5808a4d93d20e582155610634cc80657c2055 | refs/heads/master | 2022-11-11T15:43:30.782726 | 2020-06-23T01:47:12 | 2020-06-23T01:47:12 | 273,896,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,792 | py | import pandas as pd
import numpy as np
import re
import requests
from bs4 import BeautifulSoup
from alpha_vantage.timeseries import TimeSeries
from alpha_vantage.techindicators import TechIndicators
from plots_stocks import plotSentiment
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
import os
from dotenv import load_dotenv
load_dotenv()
def CreateReport(stock=None):
#Creates a report filtering the database according to the parameters given
data = pd.read_csv("../INPUT/df_analysis.csv")
df = pd.DataFrame(data)
outputString=''
if stock:
try:
df_stock = df[df['Ticker'] == stock]
positive=df_stock.iloc[0,1]
negative=df_stock.iloc[0,2]
low=df_stock.iloc[0,3]
high=df_stock.iloc[0,4]
outputString += f'For {stock}, we found {positive} tweets with positive sentiment and {negative} with negative sentiment.\n'
outputString += f'The high price for {stock} was {high} and the low price was {low}.\n'
return outputString
except:
return "The ticker you entered has no sentiment analysis available"
#plot sentiment
res=plotSentiment(df_stock)
print(res)
print("\n")
"""
#-------------------------------------------------------------------------
#basic statistics
mean=df_analysis['DELAY'].mean()
outputString += f'mean delay = {mean}\n'
maxd=df_analysis['DELAY'].max()#show the maximum delay
outputString += f'max delay = {maxd}\n'
mind=df_analysis['DELAY'].min()#show the minimum delay
outputString += f'min dealy = {mind}\n'
stdd=df_analysis['DELAY'].std()
outputString += f'std delay = {stdd}\n'
#if the dataframe is empty it means the input parameters were not a valid input
if df_analysis.shape[0]==0:
outputString='There is no flight connection between these airports'
print(outputString + '\n')
"""
#------------------------------------------------------------------------------
"""
#plotBestAirport
if best:
res=plotBestAirport(df_analysis)
print(res)
print('\n')
else:
res=plotWorstAirport(df_analysis)
print(res)
print('\n')
##################################################################################
#PDF GENERATION
generatePDF1()
##################################################################################
#email
filename = "OUTPUT/report.pdf"
#address='tzvuccyseraf@gmail.com'
address=input('insert your e-mail address: ')
sendMail(address,filename,outputString)
""" | [
"uscoburgo@gmail.com"
] | uscoburgo@gmail.com |
0063e50243f7175fd73817b1c36e95c42296fd57 | a7c86066f6c5477b98cf82b9ead3532b2f942153 | /FP.py | 722088fcb732773535fb5f25399d590df4d551d2 | [] | no_license | TegarSU/K-Means | 171e261f22f4eb263eaca377efda0afee5256960 | ac15f22841c5d4f99606e7a528a5ae87095d3515 | refs/heads/master | 2020-04-24T12:40:52.238490 | 2019-02-22T18:34:08 | 2019-02-22T18:34:08 | 171,963,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,195 | py | import csv
import math
import copy
import random
def loadDataset(filename,k,dataSet=[]):
with open(filename) as csvfile:
lines=csv.reader(csvfile)
dataset=list(lines)
normalize(dataset)
banyak=len(dataset)/k
mulai=0
for x in range(len(dataset)):
for y in range(len(dataset[x])-1): #kalau gak ada kelasnya seperti Iris-virginica hapus -1nya
dataset[x][y]=float(dataset[x][y])
dataset[x].append(0) #buat kelas baru
dataSet.append(dataset[x])
def loadDataset2(filename,k,centroid=[]):
with open(filename) as csvfile:
lines=csv.reader(csvfile)
dataset=list(lines)
normalize(dataset)
banyak=len(dataset)//k
mulai=0
for x in range(k):
if x==k-1:
z=dataset[len(dataset)-1]
else:
z=dataset[mulai]
for y in range(len(z)-1):#kalau gakada kelasnya seperti Iris-virginica hapus -1nya
z[y]=float(z[y])
z.append(0)#buat kelas baru
centroid.append(z)
mulai=mulai+banyak
def normalize(dataset):
for m in range(len(dataset[0]) - 1):
temp = []
for n in range(len(dataset)):
temp.append(float(dataset[n][m]))
minimal = min(temp)
maksimal = max(temp)
for o in range(len(dataset)):
if maksimal - minimal == 0:
dataset[o][m] = temp[o]
else:
dataset[o][m] = (temp[o] - minimal) / (maksimal - minimal)
def carijarak(dataset,centroid):
distance=0
distance=int(distance)
#print "---------hitungmulai-------"
for x in range(len(dataset)-2):#ganti -1 kalau gakada kelas seperti Iris-virginica
dif=dataset[x]-centroid[x]
distance=distance+(dif*dif)
#print "--------hitungakhir--------"
return math.sqrt(distance)
def carikelas(dataset,k,centroid):
terpendek=9223372036854775807
kelas=0
for y in range(k):
a=carijarak(dataset,centroid[y])
#print a
if a<terpendek:
terpendek=a
kelas=y+1
#print a
#print "kelas"
return kelas
def printdataset(dataset):
for x in range(len(dataset)):
print (dataset[x])
#print(updatecentroid)
def updatecentroid(dataset,k,centroid=[]):
awal=[]
for x in range(k):
for y in range(len(centroid[x])):
centroid[x][y]=0
atribut=len(dataset[0])
#print atribut
for x in range(len(dataset)):#mencari jumlah total atribut
kls=dataset[x][atribut-1]
for y in range(atribut-2):#ganti -1 kalau gak ada kelas
centroid[kls-1][y]=centroid[kls-1][y]+dataset[x][y]
centroid[kls-1][atribut-1]=centroid[kls-1][atribut-1]+1#terakhir sendiri
for x in range(k):#mencari jumlah rata-ratanya
for y in range(atribut-2):#ganti -1 kalau gak ada kelas
centroid[x][y]=centroid[x][y]/centroid[x][atribut-1]
#def centroidakhir(oldcen[],centriod[]):
def main():
masuk=input("Masukkan Data : ")
k=input("Jumlah Kelas yang Diinginkan : ")
k=int(k)
dataset=[]
centroid=[]
loadDataset(masuk,k,dataset)
#######################Membuat K Means############################################
#loadDataset2(masuk,k,centroid)
pick = input("Input 0 untuk Centroid random :")
if(pick=="0"):
loadDataset2(masuk,k,centroid)
#else
for x in range(len(dataset)):
#print "---------mulai--------------"
kelas=carikelas(dataset[x],k,centroid)
dataset[x][len(dataset[x])-1]=kelas
#print "----------Akhir-------------"
updatecentroid(dataset,k,centroid)#mengupdate centroid
while True:
cek=1#udah konfergen belum
for x in range(len(dataset)):
#print "---------mulai--------------"
kelas=carikelas(dataset[x],k,centroid)
if dataset[x][len(dataset[x])-1]!=kelas:
cek=0
dataset[x][len(dataset[x])-1]=kelas
#print "----------Akhir-------------"
updatecentroid(dataset,k,centroid)#mengupdate centroid
#printdataset(centroid)
if cek==1:
#print "Sudah Konfergen"
break
#input()
print ("===================Data Baru Setelah K Means============================")
printdataset(dataset)
print ("\nCentroid akhir :\n")
printdataset(centroid)
##################################Akhir K Means########################################
main()
| [
"noreply@github.com"
] | noreply@github.com |
9da3525a508b015180d6cba65f119f57588df51e | 608900d5be9f4d45fdfebda9822a314405754913 | /config/settings/test.py | 0e7b2de859fa698aba87ce0873244f80f8d82f1b | [
"MIT"
] | permissive | brahimchougrani/e-sanad | 2a204cd4198ab702638ff247850850379e2760ad | 0b3869a9cd07dd037421de7f562ffdc70ddb867c | refs/heads/master | 2022-12-05T16:57:02.707895 | 2020-08-23T12:43:55 | 2020-08-23T12:43:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,702 | py | """
With these settings, tests run faster.
"""
from .base import * # noqa
from .base import env
# GENERAL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
SECRET_KEY = env(
"DJANGO_SECRET_KEY",
default="0hRchob8ySF9ncjpDqq4RdtY5WgRdRfGR8uAHx2r1IVMXSw0JM5KdBi1lvXDrL32",
)
# https://docs.djangoproject.com/en/dev/ref/settings/#test-runner
TEST_RUNNER = "django.test.runner.DiscoverRunner"
# CACHES
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
"LOCATION": "",
}
}
# PASSWORDS
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers
PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"]
# TEMPLATES
# ------------------------------------------------------------------------------
TEMPLATES[-1]["OPTIONS"]["loaders"] = [ # type: ignore[index] # noqa F405
(
"django.template.loaders.cached.Loader",
[
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
)
]
# EMAIL
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
# Your stuff...
# ------------------------------------------------------------------------------
| [
"brahimchiugrani31@gmail.com"
] | brahimchiugrani31@gmail.com |
13fc5bfd35ccc53b0ba4c7f355f221b3b9619a9a | 73004bfe307af66fc0486e4ce4d79c9f4f9c1158 | /messenger/user_profile/migrations/0008_remove_user_nick.py | 0d13178b86c66410baf21327d8c652902f180b55 | [] | no_license | ArtemCoolAc/2019-2-Atom-Backend-A-Kutuzov | a618662882448a0058208f1165697fe774568c58 | 54254aee1a7ff0e3920d9205f3ba57c2f77f3c3a | refs/heads/master | 2022-09-06T00:14:35.129550 | 2019-12-01T15:19:51 | 2019-12-01T15:19:51 | 210,880,063 | 0 | 0 | null | 2022-08-23T18:07:02 | 2019-09-25T15:33:04 | JavaScript | UTF-8 | Python | false | false | 329 | py | # Generated by Django 2.2.5 on 2019-11-13 17:30
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('user_profile', '0007_auto_20191113_1535'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='nick',
),
]
| [
"us210716@yandex.ru"
] | us210716@yandex.ru |
7e32fb23b50d259b671d2a54a962904278c56be9 | 4c4b5bae788c4ac2029e975e814acdb0eef89b35 | /news_blog/migrations/0003_auto_20180210_1858.py | c8ddcd0d4a11276bf7b90bf789849804273711e1 | [] | no_license | barabashka7/DjangoProject | db76b7a7ea8be8af776b52516af6546eff577cc3 | efb268ebb2247daf51e69a727c4f8b0d129cb1b8 | refs/heads/master | 2021-05-05T06:02:02.502404 | 2018-02-18T07:36:57 | 2018-02-18T07:36:57 | 118,735,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 736 | py | # Generated by Django 2.0.2 on 2018-02-10 11:58
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('news_blog', '0002_auto_20180210_1856'),
]
operations = [
migrations.AlterField(
model_name='author',
name='id',
field=models.UUIDField(default=uuid.UUID('cdc7c68e-28ab-4d5b-b01e-bb4845fdc69f'), editable=False, primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='comment',
name='id',
field=models.UUIDField(default=uuid.UUID('1867856a-1826-4b04-90af-f53c90736f74'), editable=False, primary_key=True, serialize=False),
),
]
| [
"vadikpro7@mail.ru"
] | vadikpro7@mail.ru |
91cfc2c67452f91c2a9477b90f68a5ca951e0c4a | 71053f65cad20188ae5182fce4c77a074fde4309 | /background-backup/hardware.py | 71a5393a9447748f831f99a7812aa47e0a3a70b1 | [] | no_license | Chenzhiyong47/Hello-World | f446b6ae4429e976ecafb27dec4fe4de63bceab3 | 5f41681a1630487af21d01013dba0618f8aebac9 | refs/heads/master | 2021-01-13T01:17:38.448915 | 2020-08-07T08:14:09 | 2020-08-07T08:14:09 | 81,441,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,304 | py | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Writing in newfile.py:
from hardware import Hardware
hardware = Hardware()
# Press "Ctrl+C" to end a progress.
hardware.ultrasound_A.test_distance()
hardware.ultrasound_B.test_distance()
hardware.ultrasound_C.test_distance()
hardware.ultrasound_D.test_distance()
Input in commanding script:
python3 newfile.py
"""
import RPi.GPIO as GPIO
from time import time, sleep
GPIO.setwarnings(False)
# To control motor to start or stop.
class Motor:
def __init__(self, Control):
self.Control = Control
self.init()
def init(self):
GPIO.setup(self.Control, GPIO.OUT, initial=GPIO.HIGH)
sleep(0.05)
def start(self):
GPIO.output(self.Control, GPIO.LOW)
sleep(0.01)
def stop(self):
GPIO.output(self.Control, GPIO.HIGH)
sleep(0.01)
# To use ultrasound to measure the distance
class Ultrasound:
def __init__(self, Trig, Echo):
self.Trig = Trig
self.Echo = Echo
self.init()
def init(self):
GPIO.setup(self.Trig, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(self.Echo, GPIO.IN)
sleep(0.08)
'''
这是一个超声测距模块的测量转换函数,它的原理是先向TRIG脚输入至少10us的触发信号,
该模块内部将发出 8 个 40kHz 周期电平并检测回波。一旦检测到有回波信号则ECHO输出
高电平回响信号。回响信号的脉冲宽度与所测的距离成正比。由此通过发射信号到收到的回
响信号时间间隔可以计算得到距离。公式: 距离=高电平时间*声速(34000cm/S)/2。返回一个
测量值(单位是cm)
其中:
t1是发现Echo脚收到高电平时的瞬时时间
t2是发现Echo脚由高电平变为低电平时的瞬时时间
t2-t1 就是Echo检测到高电平的时间
'''
def get_distance_cm(self):
# 给TRIG脚一个12μs的高电平脉冲,发出一个触发信号
GPIO.output(self.Trig, GPIO.HIGH)
sleep(0.00012)
GPIO.output(self.Trig, GPIO.LOW)
while not GPIO.input(self.Echo):
pass
t1 = time()
while GPIO.input(self.Echo):
pass
t2 = time()
distance = (t2 - t1) * 34000 / 2
return float('%.1f' % distance)
def test(self):
print("distance: " + str(self.get_distance_cm()))
# Object the ultrasound and motor
class Hardware:
def __init__(self):
self.setmode_BCM()
# Four ultrasound: A, B, C, D
self.ultrasound_A = Ultrasound(Trig=2, Echo=3)
self.ultrasound_B = Ultrasound(Trig=17, Echo=27)
self.ultrasound_C = Ultrasound(Trig=10, Echo=9)
self.ultrasound_D = Ultrasound(Trig=5, Echo=6)
# Four motor: A, B, C, D
self.motor_A = Motor(Control=14)
self.motor_B = Motor(Control=15)
self.motor_C = Motor(Control=18)
self.motor_D = Motor(Control=23)
def setmode_BCM(self):
GPIO.setmode(GPIO.BCM)
def clearmode(self):
GPIO.cleanup()
| [
"chenzhiyong47@163.com"
] | chenzhiyong47@163.com |
3df81fa28702b06dbef1125fae7633efa3dc8cc4 | f81cb2e289ea1554c09164908c05dda1714005fc | /readSql_execute_in_DB/readScript.py | fe3ec84372868d68a7183ec7fccdd9f665b1a9ee | [] | no_license | priya100697/Python_ | 1a1997f409039c2406d3efea534e3e91e2cc91cc | 55c62b9b2999022b4407d0ac9ccea56a4535d3dd | refs/heads/master | 2022-12-18T03:05:31.651468 | 2020-09-20T18:21:11 | 2020-09-20T18:21:11 | 297,124,710 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | def readfile(path):
data = list()
buffer = str()
with open(path, "r") as file:
for line in file:
if not line.isspace():
if line.strip()[-1] == ";":
data.append(buffer + line.strip())
buffer = str()
else:
buffer = buffer + line.strip() + "\n"
return data
| [
"pandaypriya926@gmail.com"
] | pandaypriya926@gmail.com |
4413bba5a363984d44c924cc07685156b01cd5a9 | 5820d1c5d784f95257834eb42da8069905e2c426 | /ocear/preprocess/normalize.py | 3e6ea57f421dfec08681f6ef6d7122d157f4df94 | [
"MIT"
] | permissive | bartdegoede/ocear | b976133d81ed92fd651e9161bccd2d023a570f37 | 8b155457a9085df72bb6a84c6549abeabebf27ba | refs/heads/master | 2020-03-29T12:00:06.598638 | 2018-09-28T12:55:28 | 2018-09-28T12:55:28 | 149,880,597 | 0 | 0 | MIT | 2018-09-28T12:49:49 | 2018-09-22T13:43:21 | Python | UTF-8 | Python | false | false | 270 | py | import numpy as np
def normalize(image):
"""
Scale pixel values between 0.0 and 1.0
"""
if image is None or np.max(image) == np.min(image):
raise Exception('No valid image provided')
img = image - np.min(image)
return img / np.max(img)
| [
"bdegoede@gmail.com"
] | bdegoede@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.