text stringlengths 8 6.05M |
|---|
# -*- coding: utf-8 -*-
"""
An autorun directive with ``nobel_prize`` as the default directory
"""
from subprocess import check_output
from docutils.parsers.rst.directives import flag, unchanged
from autorun import RunBlock, LinksMixin, AddVars
from writefile import WriteFile
class PrizeRun(RunBlock):
default_cwd = 'nobel_prize'
class PrizeWrite(WriteFile):
default_cwd = 'nobel_prize'
def backtick(*args, **kwargs):
""" Get command output as stripped string """
output = check_output(*args, **kwargs)
return output.decode('latin1').strip()
class PrizeCommit(RunBlock, LinksMixin):
""" Do a runblock with a commit in it somewhere """
required_arguments = 3
default_cwd = 'nobel_prize'
option_spec = {
'linenos': flag,
'hide': flag,
'cwd': unchanged,
'links_file': unchanged,
}
def run(self):
name, date, time = self.arguments[0:3]
assert len(self.arguments) == 3 or self.arguments[3] == 'bash'
self.arguments[:] = []
env = self.state.document.settings.env
# Add lines setting git dates
self.default_exe_pre = \
"""export GIT_AUTHOR_DATE="{date}T{time}"
export GIT_COMMITTER_DATE="{date}T{time}"
""".format(date=date, time=time)
# Execute code, return nodes to insert
nodes = RunBlock.run(self)
# Get git commit hash
_, cwd = env.relfn2path(self.options.get('cwd', self.default_cwd))
commit = backtick(['git', 'rev-parse', 'HEAD'], cwd=cwd)
# Insert into names dict
self.add_var(name, commit)
# Write links
self.add_links({name: commit, name + '-7': commit[:7]})
return nodes
class PrizeVars(AddVars):
default_cwd = 'nobel_prize'
def setup(app):
app.add_directive('prizerun', PrizeRun)
app.add_directive('prizecommit', PrizeCommit)
app.add_directive('prizevars', PrizeVars)
app.add_directive('prizewrite', PrizeWrite)
# vim: set expandtab shiftwidth=4 softtabstop=4 :
|
parking_length = float(input("How long are you parking for? (hours) "))
if parking_length <= 2:
print("It's free!")
else:
price = int(parking_length - 2) * 2
print("Parking is โฌ", price) |
s = input().split('/')
print('TBD' if int(''.join(s)) > 20190430 else 'Heisei')
|
from settings import settings
from office365.runtime.auth.client_credential import ClientCredential
site_url = settings.get('url')
credentials = ClientCredential(settings.get('client_credentials').get('client_id'),
settings.get('client_credentials').get('client_secret'))
# ctx = ListDataService(site_url)
# ctx = ClientContext(site_url).with_credentials(credentials)
# ctx.execute_query()
|
from pyramid.security import remember
from pyramid.security import forget
from pyramid.httpexceptions import HTTPFound
from pyramid.view import forbidden_view_config
from shared.LogParser import LogParser
from frontend_manager.py.utils.security import USERS
from frontend_manager.py.utils.security import check_password
# ------------------------------------------------------------------
class ViewManager():
"""views for given web addresses
"""
# ------------------------------------------------------------------
def __init__(self, base_config, *args, **kwargs):
self.log = LogParser(base_config=base_config, title=__name__)
self.log.info([['y', " - ViewManager - "], ['g', base_config.site_type]])
self.base_config = base_config
self.app_prefix = base_config.app_prefix
self.site_type = base_config.site_type
self.websocket_route = base_config.websocket_route
return
# ------------------------------------------------------------------
def get_display_user_id(self, request):
user_id = request.authenticated_userid
return ('' if user_id is None else user_id)
# ------------------------------------------------------------------
def get_display_user_group(self, request):
user_group = ''
for princ in request.effective_principals:
if princ.startswith('group:'):
user_group = princ[len('group:'):]
return str(user_group)
# ------------------------------------------------------------------
# login page with authentication - check the DB for
# the given user_id/password
# ------------------------------------------------------------------
def view_login(self, request):
view_name = "login"
# if already logged in, go to the index
if request.authenticated_userid is not None:
return HTTPFound(location=request.route_url("index"))
# preform the authentication check against the DB
if 'user_name' in request.params and 'password' in request.params:
user_name = request.params['user_name']
password = request.params['password']
hashed_pw = USERS.get(user_name)
if hashed_pw and check_password(password, hashed_pw):
headers = remember(request, user_name)
return HTTPFound(location=request.route_url("index"), headers=headers)
return dict(
location=request.route_url(view_name),
login=request.authenticated_userid,
app_prefix=self.app_prefix,
ns_type=self.site_type,
websocket_route=self.websocket_route['client'],
widget_type=view_name,
display_user_id=self.get_display_user_id(request),
display_user_group=self.get_display_user_group(request),
)
# ------------------------------------------------------------------
def view_logout(self, request):
"""logout page with a redirect to the login
"""
# forget the current loged-in user
headers = forget(request)
# redirect to the login page
return HTTPFound(location=request.route_url("index"), headers=headers)
# ------------------------------------------------------------------
@forbidden_view_config()
def view_forbidden(self, request):
"""forbidden view redirects to the login
"""
return HTTPFound(location=request.route_url("login"))
# ------------------------------------------------------------------
def view_index(self, request):
"""index, empty, not-found
"""
view_name = "index"
return dict(
ns_type=self.site_type,
websocket_route=self.websocket_route['client'],
widget_type=view_name,
app_prefix=self.app_prefix,
login=request.authenticated_userid,
came_from=request.route_url(view_name),
display_user_id=self.get_display_user_id(request),
display_user_group=self.get_display_user_group(request),
)
# ------------------------------------------------------------------
def view_empty(self, request):
"""redirects
"""
return HTTPFound(location=request.route_url("index"))
def view_not_found(self, request):
"""redirects
"""
view_name = "not_found"
return dict(
ns_type=self.site_type,
websocket_route=self.websocket_route['client'],
widget_type=view_name,
app_prefix=self.app_prefix,
login=request.authenticated_userid,
location=request.route_url(view_name),
display_user_id=self.get_display_user_id(request),
display_user_group=self.get_display_user_group(request),
)
# ------------------------------------------------------------------
def view_common(self, request):
"""the widgets
"""
view_name = request.matched_route.name
return dict(
ns_type=self.site_type,
websocket_route=self.websocket_route['client'],
widget_type=view_name,
app_prefix=self.app_prefix,
login=request.authenticated_userid,
came_from=request.route_url(view_name),
display_user_id=self.get_display_user_id(request),
display_user_group=self.get_display_user_group(request),
)
|
import torch
from torch import nn, distributions
from yarp.policies.base import Policy
from drivingenvs.envs.driving_env_with_vehicles import DrivingEnvWithVehicles
from drivingenvs.priors.lane_following import LaneFollowing
from drivingenvs.priors.idm import IDM
from drivingenvs.vehicles.ackermann import AckermannSteeredVehicle
class LaneFollowingIDM(Policy):
"""
Prior policy for distance-keeping based on Intelligent Driver Model.
"""
def __init__(self, env, dist = 5.0, headway = 0.5, braking = 2.0, acc_std = 0.5, lookahead = 10.0):
self.idm = IDM(env, dist, headway, braking, acc_std)
self.lane_following = LaneFollowing(env, lookahead)
def forward(self, obs):
acc = self.idm.forward(obs)
steer = self.lane_following.forward(obs)
mean_new = torch.stack([acc.mean[:, 0], steer.mean[:, 1]], dim=1)
s_new = torch.stack([acc.scale[:, 0], steer.scale[:, 1]], dim=1)
dist = distributions.normal.Normal(mean_new, s_new)
return dist
def action(self, obs, deterministic=False):
"""
Given a single observation, return a single action.
"""
dist = self.forward(obs.unsqueeze(0))
if deterministic:
return dist.mean.squeeze()
else:
return dist.sample().squeeze()
def actions(self, obs, deterministic=False):
"""
Given observations in batch, return an action for each observation.
"""
dist = self.forward(obs)
if deterministic:
return dist.mean
else:
return dist.sample()
def cuda(self):
"""
Move policy to GPU.
"""
pass
def cpu(self):
"""
Move policy to CPU.
"""
pass
|
grid = []
ans = []
for _ in range(20):
grid.append(list(map(int, input().split())))
for i in grid:
for j in range(3, 20):
ans.append(i[j-3]*i[j-2]*i[j-1]*i[j])
for i in range(3, 20):
for j in range(20):
ans.append(grid[j][i-3]*grid[j][i-2]*grid[j][i-1]*grid[j][i])
for i in range(17):
for j in range(17):
ans.append(grid[i][j]*grid[i+1][j+1]*grid[i+2][j+2]*grid[i+3][j+3])
for i in range(3, 20):
for j in range(17):
ans.append(grid[i][j]*grid[i-1][j+1]*grid[i-2][j+2]*grid[i-3][j+3])
print(max(ans)) |
# Generated by Django 2.1.5 on 2020-04-01 21:14
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('API', '0002_auto_20200402_0229'),
]
operations = [
migrations.RenameField(
model_name='users',
old_name='ok',
new_name='user_lists',
),
]
|
# Copyright 2020, Alex Badics, All Rights Reserved
from typing import Tuple, Dict, List, Type, ClassVar, Union
from pathlib import Path
from abc import ABC, abstractmethod
import gzip
import json
from collections import defaultdict
import yaml
import attr
from hun_law.structure import \
Act, Article, Paragraph, SubArticleElement, BlockAmendmentContainer, \
StructuralElement, Subtitle, Book,\
Reference, StructuralReference, SubtitleArticleComboType, \
EnforcementDate, \
SemanticData, Repeal, TextAmendment, ArticleTitleAmendment, BlockAmendment, \
SubArticleChildType
from hun_law.utils import Date, identifier_less
from hun_law.parsers.semantic_parser import ActSemanticsParser, SemanticParseState
from hun_law import dict2object
from ajdb.structure import ConcreteEnforcementDate, \
ArticleWM, ArticleWMProxy, ActWM, ParagraphWM,\
SaeWMType, SaeMetadata, add_metadata, WM_ABLE_SAE_CLASSES, SAE_WM_CLASSES, \
ActSet
from ajdb.utils import iterate_all_saes_of_act, first_matching_index, evolve_into
from ajdb.fixups import apply_fixups
NOT_ENFORCED_TEXT = ' '
act_converter = dict2object.get_converter(Act)
@attr.s(slots=True, frozen=True, auto_attribs=True)
class EnforcementDateSet:
default: ConcreteEnforcementDate
specials: Tuple[Tuple[Reference, ConcreteEnforcementDate], ...]
@classmethod
def from_act(cls, act: Act) -> 'EnforcementDateSet':
default = None
specials = []
for sae in iterate_all_saes_of_act(act):
assert sae.semantic_data is not None
for semantic_data_element in sae.semantic_data:
if isinstance(semantic_data_element, EnforcementDate):
concrete_ed = ConcreteEnforcementDate.from_enforcement_date(semantic_data_element, act.publication_date)
if semantic_data_element.position is None:
assert default is None
default = concrete_ed
else:
ref = attr.evolve(semantic_data_element.position, act=act.identifier)
specials.append((ref, concrete_ed))
assert default is not None, act.identifier
assert all(default.from_date <= special.from_date for _, special in specials)
assert all(special.to_date is None for _, special in specials)
return EnforcementDateSet(default, tuple(specials))
def sae_modifier(self, reference: Reference, sae: SaeWMType) -> SaeWMType:
applicable_ced = self.default
for ced_reference, ced in self.specials:
if ced_reference.contains(reference):
applicable_ced = ced
return attr.evolve(
sae,
metadata=attr.evolve(
sae.metadata,
enforcement_date=applicable_ced
)
)
def article_modifier(self, reference: Reference, article: ArticleWM) -> ArticleWM:
return article.map_recursive_wm(reference, self.sae_modifier, children_first=True)
def interesting_dates(self) -> Tuple[Date, ...]:
result = set()
result.add(self.default.from_date)
if self.default.to_date is not None:
result.add(self.default.to_date)
result.update(special.from_date for _, special in self.specials)
return tuple(result)
@attr.s(slots=True, auto_attribs=True)
class ModificationApplier(ABC):
modification: SemanticData = attr.ib()
source_sae: SaeWMType = attr.ib()
current_date: Date = attr.ib()
applied: bool = attr.ib(init=False, default=False)
@classmethod
@abstractmethod
def can_apply(cls, modification: SemanticData) -> bool:
pass
@abstractmethod
def apply(self, act: ActWM) -> ActWM:
pass
@property
def priority(self) -> int:
# Mostly meaningful in TextReplacementApplier
# Higher means it has to be applied sooner
return 0
@attr.s(slots=True, auto_attribs=True)
class TextReplacementApplier(ModificationApplier):
position: Reference = attr.ib(init=False)
original_text: str = attr.ib(init=False)
replacement_text: str = attr.ib(init=False)
@position.default
def _position_default(self) -> Reference:
if isinstance(self.modification, TextAmendment):
return self.modification.position
assert isinstance(self.modification, Repeal) and isinstance(self.modification.position, Reference)
return self.modification.position
@original_text.default
def _original_text_default(self) -> str:
if isinstance(self.modification, TextAmendment):
return self.modification.original_text
if isinstance(self.modification, Repeal):
assert self.modification.text is not None
return self.modification.text
raise TypeError("Unknown SemanticData type in TextReplacementApplier")
@replacement_text.default
def _replacement_text_default(self) -> str:
if isinstance(self.modification, TextAmendment):
return self.modification.replacement_text
if isinstance(self.modification, Repeal):
return NOT_ENFORCED_TEXT
raise TypeError("Unknown SemanticData type in TextReplacementApplier")
@classmethod
def can_apply(cls, modification: SemanticData) -> bool:
return isinstance(modification, TextAmendment) or (isinstance(modification, Repeal) and modification.text is not None)
def text_replacer(self, _reference: Reference, sae: SaeWMType) -> SaeWMType:
new_text = sae.text.replace(self.original_text, self.replacement_text) if sae.text is not None else None
new_intro = sae.intro.replace(self.original_text, self.replacement_text) if sae.intro is not None else None
new_wrap_up = sae.wrap_up.replace(self.original_text, self.replacement_text) if sae.wrap_up is not None else None
if sae.text == new_text and sae.intro == new_intro and sae.wrap_up == new_wrap_up:
return sae
self.applied = True
return attr.evolve(
sae,
text=new_text,
intro=new_intro,
wrap_up=new_wrap_up,
semantic_data=None,
outgoing_references=None,
act_id_abbreviations=None,
)
def apply(self, act: ActWM) -> ActWM:
return act.map_saes(self.text_replacer, self.position)
@property
def priority(self) -> int:
# Sorting these modifications is needed because of cases like:
# Original text: "This is ABC, and also ABC is important for ABCD reasons"
# Replacement 1: ABC => DEF
# Replacement 2: ABCD => DEFG
# In the wrong order, this produces "This is DEF, and also DEF is important for DEFD reasons"
# Higher means it has to be applied sooner
return len(self.original_text)
@attr.s(slots=True, auto_attribs=True)
class ArticleTitleAmendmentApplier(ModificationApplier):
@classmethod
def can_apply(cls, modification: SemanticData) -> bool:
return isinstance(modification, ArticleTitleAmendment)
def modifier(self, _reference: Reference, article: ArticleWM) -> ArticleWM:
assert isinstance(self.modification, ArticleTitleAmendment)
assert article.title is not None
self.applied = self.modification.original_text in article.title
new_title = article.title.replace(self.modification.original_text, self.modification.replacement_text)
return attr.evolve(
article,
title=new_title,
)
def apply(self, act: ActWM) -> ActWM:
assert isinstance(self.modification, ArticleTitleAmendment)
_, reference_type = self.modification.position.last_component_with_type()
assert reference_type is Article
return act.map_articles(self.modifier, self.modification.position)
CuttableChildrenType = Tuple[Union[SubArticleChildType, ArticleWMProxy], ...]
def get_cut_points_for_structural_reference(position: StructuralReference, children: CuttableChildrenType) -> Tuple[int, int]:
structural_id, structural_type_nc = position.last_component_with_type()
assert structural_id is not None
assert structural_type_nc is not None
assert issubclass(structural_type_nc, StructuralElement)
# Pypy does not properly infer the type of structural_type without this explicit assignment
structural_type: Type[StructuralElement] = structural_type_nc
start_cut = 0
if position.book is not None:
book_id = position.book
start_cut = first_matching_index(children, lambda c: bool(isinstance(c, Book) and book_id == c.identifier))
assert start_cut < len(children)
start_cut = first_matching_index(
children,
lambda c: isinstance(c, structural_type) and structural_id in (c.identifier, c.title),
start=start_cut
)
# TODO: Insertions are should be legal though, but this is most likely a mistake, so
# keep an error until I find an actual insertion. It will need to be handled separately.
assert start_cut < len(children), ("Only replacements are supported for structural amendments or repeals", position)
end_cut = first_matching_index(
children,
lambda c: isinstance(c, (structural_type, * structural_type.PARENT_TYPES)),
start=start_cut + 1
)
return start_cut, end_cut
@attr.s(slots=True, auto_attribs=True)
class RepealApplier(ModificationApplier):
@classmethod
def can_apply(cls, modification: SemanticData) -> bool:
return isinstance(modification, Repeal) and modification.text is None
def create_new_metadata(self, sae: SaeWMType) -> SaeMetadata:
assert sae.metadata.enforcement_date is not None
return SaeMetadata(
enforcement_date=ConcreteEnforcementDate(
from_date=sae.metadata.enforcement_date.from_date,
to_date=self.current_date,
)
)
def sae_repealer(self, _reference: Reference, sae: SaeWMType) -> SaeWMType:
self.applied = True
return sae.__class__(
identifier=sae.identifier,
text=NOT_ENFORCED_TEXT,
semantic_data=(),
outgoing_references=(),
act_id_abbreviations=(),
metadata=self.create_new_metadata(sae),
)
def article_repealer(self, _reference: Reference, article: ArticleWM) -> ArticleWM:
first_paragraph = article.children[0]
assert isinstance(first_paragraph, ParagraphWM)
self.applied = True
return ArticleWM(
identifier=article.identifier,
children=(
ParagraphWM(
text=NOT_ENFORCED_TEXT,
semantic_data=(),
outgoing_references=(),
act_id_abbreviations=(),
metadata=self.create_new_metadata(first_paragraph),
),
),
)
def apply_to_act(self, act: ActWM) -> ActWM:
assert isinstance(self.modification, Repeal)
assert isinstance(self.modification.position, StructuralReference)
position: StructuralReference = self.modification.position
if position.special is None:
start_cut, end_cut = get_cut_points_for_structural_reference(position, act.children)
else:
assert position.special.position == SubtitleArticleComboType.BEFORE_WITHOUT_ARTICLE, \
"Only BEFORE_WITHOUT_ARTICLE is supported for special subtitle repeals for now"
article_id = position.special.article_id
end_cut = first_matching_index(act.children, lambda c: isinstance(c, (Article, ArticleWMProxy)) and c.identifier == article_id)
if end_cut >= len(act.children):
# Not found: probably an error. Calling code will Warn probably.
return act
start_cut = end_cut - 1
self.applied = True
# TODO: Repeal articles instead of deleting them.
return attr.evolve(act, children=act.children[:start_cut] + act.children[end_cut:])
def apply(self, act: ActWM) -> ActWM:
assert isinstance(self.modification, Repeal)
if isinstance(self.modification.position, Reference):
_, reference_type = self.modification.position.last_component_with_type()
if reference_type is Article:
return act.map_articles(self.article_repealer, self.modification.position)
return act.map_saes(self.sae_repealer, self.modification.position)
return self.apply_to_act(act)
@attr.s(slots=True, auto_attribs=True)
class BlockAmendmentApplier(ModificationApplier):
new_children: CuttableChildrenType = attr.ib(init=False)
position: Union[Reference, StructuralReference] = attr.ib(init=False)
pure_insertion: bool = attr.ib(init=False)
def sae_metadata_adder(self, _reference: Reference, sae: SubArticleElement) -> SubArticleElement:
if not isinstance(sae, WM_ABLE_SAE_CLASSES):
return sae
assert not isinstance(sae, SAE_WM_CLASSES)
children_metadata = SaeMetadata(
enforcement_date=ConcreteEnforcementDate(from_date=self.current_date)
)
return add_metadata(sae, metadata=children_metadata)
@new_children.default
def _new_children_default(self) -> CuttableChildrenType:
assert self.source_sae.children is not None
assert len(self.source_sae.children) == 1
block_amendment_container = self.source_sae.children[0]
assert isinstance(block_amendment_container, BlockAmendmentContainer)
assert block_amendment_container.children is not None
result = []
for child in block_amendment_container.children:
if isinstance(child, WM_ABLE_SAE_CLASSES):
child = child.map_recursive(Reference(), self.sae_metadata_adder, children_first=True)
if isinstance(child, Article):
child = child.map_recursive(Reference(), self.sae_metadata_adder, children_first=True)
child = evolve_into(child, ArticleWM)
result.append(child)
return tuple(result)
@position.default
def _position_default(self) -> Union[Reference, StructuralReference]:
assert isinstance(self.modification, BlockAmendment)
return self.modification.position
@pure_insertion.default
def _pure_insertion_default(self) -> bool:
assert isinstance(self.modification, BlockAmendment)
return self.modification.pure_insertion
@classmethod
def can_apply(cls, modification: SemanticData) -> bool:
return isinstance(modification, BlockAmendment)
def get_cut_points_for_reference(self, parent_reference: Reference, children: CuttableChildrenType) -> Tuple[int, int]:
assert isinstance(self.position, Reference)
start_ref = self.position.first_in_range()
end_ref = self.position.last_in_range()
start_cut = first_matching_index(
children,
lambda c: bool(hasattr(c, 'relative_reference') and start_ref <= c.relative_reference.relative_to(parent_reference))
)
end_cut = first_matching_index(
children,
lambda c: bool(not hasattr(c, 'relative_reference') or end_ref < c.relative_reference.relative_to(parent_reference)),
start=start_cut
)
# TODO: assert between start_cut == end_cut and pure_insertion
# However if there is an act that marked an amendment an insertion
# or vica-versa, that will need to be fixed.
if start_cut == end_cut:
# This is a quick hack and should be handled way better
# Insertions should come before all structural elements.
while start_cut > 0 and isinstance(children[start_cut-1], StructuralElement):
start_cut -= 1
end_cut -= 1
return start_cut, end_cut
def get_cut_points_for_special_reference(self, children: CuttableChildrenType) -> Tuple[int, int]:
assert isinstance(self.position, StructuralReference)
assert self.position.special is not None
article_id = self.position.special.article_id
start_cut = first_matching_index(
children,
lambda c: isinstance(c, (Article, ArticleWMProxy)) and not identifier_less(c.identifier, article_id)
)
if start_cut < len(children) and children[start_cut].identifier == article_id:
article_found = True
end_cut = start_cut + 1
else:
article_found = False
# This is a quick hack and should be handled way better
# Insertions should come before all structural elements.
while start_cut > 0 and isinstance(children[start_cut-1], StructuralElement):
start_cut -= 1
end_cut = start_cut
if self.position.special.position == SubtitleArticleComboType.BEFORE_WITH_ARTICLE:
# TODO: assert between article_found and pure_insertion
if article_found:
start_cut -= 1
assert isinstance(children[start_cut], Subtitle), self.position
elif self.position.special.position == SubtitleArticleComboType.BEFORE_WITHOUT_ARTICLE:
assert article_found, "BEFORE_WITHOUT_ARTICLE needs an existing article"
if self.pure_insertion:
# Move the end cut above the article
end_cut -= 1
else:
assert isinstance(children[start_cut-1], Subtitle), self.position
# Move the cutting frame to the Subtitle itself
start_cut -= 1
end_cut -= 1
elif self.position.special.position == SubtitleArticleComboType.AFTER:
assert article_found, "AFTER needs an existing article"
if self.pure_insertion:
# Move the end cut below the article
start_cut += 1
else:
assert isinstance(children[start_cut + 1], Subtitle)
# Move the cutting frame to the Subtitle itself
start_cut += 1
end_cut += 1
else:
raise ValueError("Unhandled SubtitleArticleComboType", self.position.special.position)
return start_cut, end_cut
def compute_new_children(self, parent_reference: Reference, children: CuttableChildrenType) -> CuttableChildrenType:
if isinstance(self.position, Reference):
start_cut_point, end_cut_point = self.get_cut_points_for_reference(parent_reference, children)
elif isinstance(self.position, StructuralReference) and self.position.special is not None:
start_cut_point, end_cut_point = self.get_cut_points_for_special_reference(children)
elif isinstance(self.position, StructuralReference):
start_cut_point, end_cut_point = get_cut_points_for_structural_reference(self.position, children)
else:
raise ValueError("Unknown amendment position type", self.position)
assert start_cut_point <= end_cut_point
self.applied = True
return children[:start_cut_point] + self.new_children + children[end_cut_point:]
def apply_to_sae(self, reference: Reference, sae: SaeWMType) -> SaeWMType:
assert isinstance(self.position, Reference)
if reference != self.position.parent():
return sae
assert sae.children is not None
return attr.evolve(sae, children=self.compute_new_children(reference, sae.children))
def apply_to_article(self, reference: Reference, article: ArticleWM) -> ArticleWM:
new_children = []
for child in self.compute_new_children(reference, article.children):
assert isinstance(child, ParagraphWM)
new_children.append(child)
return attr.evolve(article, children=tuple(new_children))
def apply_to_act(self, act: ActWM) -> ActWM:
new_children = []
for child in self.compute_new_children(Reference(act.identifier), act.children):
assert isinstance(child, (ArticleWM, ArticleWMProxy, StructuralElement))
new_children.append(child)
return attr.evolve(act, children=tuple(new_children))
def apply(self, act: ActWM) -> ActWM:
if isinstance(self.position, Reference):
expected_type = self.position.last_component_with_type()[1]
assert expected_type is not None
if expected_type is Article:
return self.apply_to_act(act)
if expected_type is Paragraph:
article_ref = Reference(act.identifier, self.position.article)
return act.map_articles(self.apply_to_article, article_ref)
if issubclass(expected_type, SubArticleElement):
return act.map_saes(self.apply_to_sae, self.position.parent())
raise ValueError("Unknown reference type", self.position)
return self.apply_to_act(act)
@attr.s(slots=True, frozen=True, auto_attribs=True)
class ModificationSet:
APPLIER_CLASSES: ClassVar[Tuple[Type[ModificationApplier], ...]] = (
TextReplacementApplier,
ArticleTitleAmendmentApplier,
RepealApplier,
BlockAmendmentApplier,
)
modifications: Tuple[Tuple[SaeWMType, SemanticData], ...]
def apply_all(self, act: ActWM, current_date: Date) -> ActWM:
appliers: List[ModificationApplier] = []
for applier_class in self.APPLIER_CLASSES:
appliers.extend(applier_class(m, sae, current_date) for sae, m in self.modifications if applier_class.can_apply(m))
appliers.sort(key=lambda x: x.priority, reverse=True)
for applier in appliers:
act = applier.apply(act)
if not applier.applied:
print("WARN: Could not apply ", applier.modification)
return act
@attr.s(slots=True, auto_attribs=True)
class AmendmentAndRepealExtractor:
at_date: Date
act_identifier: str
modifications_per_act: Dict[str, List[Tuple[SaeWMType, SemanticData]]] = \
attr.ib(init=False, factory=lambda: defaultdict(list))
def sae_walker(self, reference: Reference, sae: SaeWMType) -> SaeWMType:
if sae.semantic_data is None:
return sae
assert sae.metadata.enforcement_date is not None
if not sae.metadata.enforcement_date.is_in_force_at_date(self.at_date):
return sae
for semantic_data_element in sae.semantic_data:
if isinstance(semantic_data_element, EnforcementDate):
continue
# Type is ignored here, since all subclasses except for EnforcementDate
# have a position field. Maybe this should be solved by introducing a class
# in the middle with a position, but it isn't worth it TBH.
# This will fail very fast and very loudly if there is a problem.
modified_ref = semantic_data_element.position # type: ignore
assert modified_ref.act is not None
self.modifications_per_act[modified_ref.act].append((sae, semantic_data_element))
self.modifications_per_act[self.act_identifier].append((sae, Repeal(position=reference)))
return sae
@classmethod
def get_amendments_and_repeals(cls, act: ActWM, at_date: Date) -> Dict[str, List[Tuple[SaeWMType, SemanticData]]]:
instance = cls(at_date, act.identifier)
act.map_saes(instance.sae_walker)
return instance.modifications_per_act
class ActConverter:
@classmethod
def sae_metadata_adder(cls, _reference: Reference, sae: SubArticleElement) -> SubArticleElement:
if not isinstance(sae, WM_ABLE_SAE_CLASSES):
return sae
assert not isinstance(sae, SAE_WM_CLASSES)
return add_metadata(sae)
@classmethod
def article_modifier(cls, article: Article) -> ArticleWM:
article = article.map_recursive(Reference(), cls.sae_metadata_adder, children_first=True)
article_wm: ArticleWM = evolve_into(article, ArticleWM)
return article_wm
@classmethod
def load_hun_law_act(cls, path: Path) -> Act:
if path.suffix == '.gz':
with gzip.open(path, 'rt') as f:
the_dict = json.load(f)
elif path.suffix == '.yaml':
with open(path, 'rt') as f:
the_dict = yaml.load(f, Loader=yaml.Loader)
else:
with open(path, 'rt') as f:
the_dict = json.load(f)
result: Act = act_converter.to_object(the_dict)
return result
@classmethod
def save_hun_law_act_json_gz(cls, path: Path, act: Act) -> None:
with gzip.open(path, 'wt') as f:
json.dump(act_converter.to_dict(act), f, indent=' ', sort_keys=True, ensure_ascii=False)
@classmethod
def convert_hun_law_act(cls, act: Act) -> ActWM:
act = apply_fixups(act)
enforcement_set = EnforcementDateSet.from_act(act)
new_children: List[Union[StructuralElement, ArticleWM]] = []
for c in act.children:
if isinstance(c, Article):
new_children.append(cls.article_modifier(c))
else:
new_children.append(c)
result = ActWM(
identifier=act.identifier,
publication_date=act.publication_date,
subject=act.subject,
preamble=act.preamble,
children=tuple(new_children),
interesting_dates=enforcement_set.interesting_dates(),
)
# TODO: Use Somehting like EnforcementDateSet.update_act instead
# it should set the interesting dates
result = result.map_articles(enforcement_set.article_modifier)
return result
class ActSetAmendmentApplier:
@classmethod
def add_semantics_to_act(cls, act: ActWM) -> ActWM:
# This needs to be an almost copy of ActSemanticsParser.add_semantics_to_act
state = SemanticParseState()
def article_semantics_adder(_reference: Reference, article: ArticleWM) -> ArticleWM:
result = ActSemanticsParser.add_semantics_to_article(article, state)
assert isinstance(result, ArticleWM)
return result
return act.map_articles(article_semantics_adder)
@classmethod
def apply_single_act(cls, act_set: ActSet, amending_act: ActWM, date: Date) -> ActSet:
extracted_modifications = AmendmentAndRepealExtractor.get_amendments_and_repeals(amending_act, date)
if not extracted_modifications:
return act_set
modified_acts: List[ActWM] = []
for act_id, modifications in extracted_modifications.items():
if not act_set.has_act(act_id):
continue
act = act_set.act(act_id)
if act.identifier != amending_act.identifier:
print("AMENDING ", act.identifier, "WITH", amending_act.identifier)
modification_set = ModificationSet(tuple(modifications))
act = modification_set.apply_all(act, date)
act = cls.add_semantics_to_act(act)
modified_acts.append(act)
return act_set.replace_acts(modified_acts)
@classmethod
def apply_all_amendments(cls, act_set: ActSet, date: Date) -> ActSet:
for act in act_set.interesting_acts_at_date(date):
act_set = cls.apply_single_act(act_set, act, date)
return act_set
|
#!/usr/bin/env python
import subprocess
import os, sys, multiprocessing, traceback
from os.path import join, dirname, basename, realpath, splitext, isdir, isfile
from collections import namedtuple
from PIL import Image
BASE_DIR = dirname(realpath(__file__))
#OUTPUT_DIR = '/opt/for-resize'
OUTPUT_DIR = '/mnt/uray_resized'
INPUT_DIR = '/mnt/20170823_frames'
q = multiprocessing.Queue()
def resize_img(img, save_path, size_tuple):
img_dir = dirname(img)
img_base_name = basename(img)
img_name, img_ext = splitext(img_base_name)
im = Image.open(img)
im.resize(size_tuple).save(save_path)
def frame_resize(plock):
process_name = multiprocessing.current_process().name
plock.acquire()
print ">>> Process [ %s ] started!" % process_name
plock.release()
while True:
try:
frame_dir = q.get(timeout=5)
plock.acquire()
print ">>> Process [ %s ] handler [ %s ]!" % (process_name, basename(frame_dir))
plock.release()
#new_frame_dir = join(BASE_DIR, "new", basename(frame_dir))
new_frame_dir = join(OUTPUT_DIR, "new", basename(frame_dir))
if not isdir(new_frame_dir):
os.makedirs(new_frame_dir)
os.makedirs( join(new_frame_dir, 'normal') )
os.makedirs( join(new_frame_dir, 'zoomin') )
for class_name in ['normal', 'zoomin']:
#frames = [join(frame_dir, f) for f in os.listdir(frame_dir) if splitext(f)[1] == '.jpg']
real_frame_dir = join(frame_dir, class_name);
frames = [join(real_frame_dir, f) for f in os.listdir(real_frame_dir) if splitext(f)[1] == '.jpg']
frames.sort()
for f in frames:
save_path = join(new_frame_dir, class_name, basename(f))
#resize_img(f, save_path, (320,180))
#ffmpeg -i each -vf scale=320:180 save_path
#os.system("ffmpeg -i {} -vf scale=320:180 {}".format(each, save_path))
command = ['ffmpeg','-i', f, '-vf','scale=320:180', save_path]
#ffmpeg = subprocess.Popen(command)
#ffmpeg.wait()
subprocess.call(command)
except:
traceback.print_exc()
break
plock = multiprocessing.Lock()
def main():
#camera_list = [d for d in os.listdir(BASE_DIR) if isdir(d) and d.startswith('c1')]
#camera_list = [d for d in os.listdir(INPUT_DIR) if isdir(d) and d.startswith('c1')]
camera_list1 = ['c10', 'c11', 'c12', 'c13', 'c14', 'c15', 'c16', 'c17','c18','c19']
camera_list2 = ['c10-c11-06','c11-c12-06','c12-c13-06','c13-c14-06','c14-c15-06','c15-c16-06','c16-c17-06','c17-c18-06','c18-c19-06']
camera_list = camera_list1 + camera_list2
print camera_list
#camera_list = ['c10']
camera_list.sort()
for c in camera_list:
#c = join(BASE_DIR, c)
c = join(INPUT_DIR, c)
q.put(c)
process_list = []
for i in range(4):
p = multiprocessing.Process(target=frame_resize, args=(plock,))
p.start()
process_list.append(p)
for p in process_list:
p.join()
if __name__ == '__main__':
main()
|
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.feature_extraction import DictVectorizer
from sklearn import tree, svm
from sklearn.ensemble import RandomForestRegressor
from sklearn.neural_network import MLPClassifier
# data = pd.read_excel("one_more_day.xlsx")
data = pd.read_excel("the_day.xlsx")
dv_train = DictVectorizer(sparse=False) # sparse=False
test_size = 0.75
X = data.drop(["type_is"], axis=1) # dv_train.fit_transform(data.drop(["type_is"], axis=1))
Y = dv_train.fit_transform([{"type_is": i} for i in data["type_is"]])
x_train, x_test, y_train, y_test = train_test_split(X, Y, random_state=42, test_size=test_size)
Y_clf = data["type_is"]
x_train_clf, x_test_clf, y_train_clf, y_test_clf = train_test_split(X, Y_clf, random_state=42, test_size=test_size)
d_tree = tree.DecisionTreeClassifier(criterion="entropy")
d_tree.fit(x_train, y_train)
r_score = d_tree.score(x_train, y_train)
acc_score = d_tree.score(x_test, y_test)
print("dt")
print(r_score)
print(acc_score)
regr = RandomForestRegressor(max_depth=5, random_state=42)
regr.fit(x_train, y_train)
r_score = regr.score(x_train, y_train)
acc_score = regr.score(x_test, y_test)
print("r_dt")
print(r_score)
print(acc_score)
clf = svm.SVC(kernel="rbf", gamma="auto")
clf.fit(x_train_clf, y_train_clf)
r_score = clf.score(x_train_clf, y_train_clf)
acc_score = clf.score(x_test_clf, y_test_clf)
print("svc")
print(r_score)
print(acc_score)
print(clf.predict(x_train_clf))
mlp = MLPClassifier(hidden_layer_sizes=(50,), max_iter=10, alpha=1e-4,
solver='sgd', verbose=10, random_state=1,
learning_rate_init=.1)
mlp.fit(x_train, y_train)
r_score = mlp.score(x_train, y_train)
acc_score = mlp.score(x_test, y_test)
print("nn")
print(r_score)
print(acc_score)
|
#-*- coding: utf-8 -*-
import os
from flask import Flask, request, jsonify
import json
app = Flask(__name__)
default_buttons = ['ํ์ด์ฌ ํด์ฆ', '์น ํด์ฆ', '์ปดํจํฐ ํด์ฆ']
quiz_dict = {'python_quiz_buttons' : ['Guido van Rossum', 'Mark Elliot Zuckerberg', 'Geoffrey Everest Hinton', 'Yann LeCun', 'Andrew Ng'],
'web_quiz_buttons': ["HTML", "XML", "XHTML", "MXML", "JSON"],
'computer_quiz_buttons' : ['์ ๋์
', "์๋์ญ", "์๋๋ฐ", "์ ๋๋ฐ","์ผ๋ฆฌ์
"]}
answer_list = [quiz_dict['python_quiz_buttons'][0], quiz_dict['web_quiz_buttons'][1]]
choice_list = list(quiz_dict.values())[0]
for i in list(quiz_dict.values())[1:]:
choice_list = choice_list+ i
@app.route('/keyboard')
def keyboard():
return jsonify({
'type' : 'buttons',
'buttons' : default_buttons
})
@app.route('/message', methods=["POST"])
def true_or_false():
dataRecieve = request.get_json()
user_input = dataRecieve["content"]
if user_input == default_buttons[0]:
response_data = {
'message' : {
"text":'๋ค์ ์ธ๋ฌผ๋ค ์ค ํ์ด์ฌ์ ๊ฐ๋ฐ์๋ ๋๊ตฌ์
๋๊น?'
},
"keyboard" : {
"buttons" : quiz_dict['python_quiz_buttons'],
"type" : "buttons",
}
}
elif user_input == default_buttons[1]:
response_data = {
'message' : {
"text":'๋ค์ ๋ณด๊ธฐ์ค ๋งํฌ์
์ธ์ด๊ฐ ์๋๊ฒ์ ๋ฌด์์
๋๊น?'
},
"keyboard" : {
"type" : "buttons",
"buttons" : quiz_dict['web_quiz_buttons']
}
}
elif user_input == default_buttons[2]:
response_data = {
'message' : {
"text":'๋ค์ ๋ณด๊ธฐ์ค ์ต์ด์ ์ปดํจํฐ๋ ๋ฌด์์
๋๊น?'
},
"keyboard" : {
"type" : "buttons",
"buttons" : quiz_dict['computer_quiz_buttons']
}
}
elif user_input in choice_list :
if user_input in answer_list:
response_data = {
'message' : {
"text":'์ ๋ต์
๋๋ค. ๋ค๋ฅธ ํด์ฆ๋ ํ์ด๋ณด์๊ฒ ์ด์?'
},
"keyboard" : {
"type" : "buttons",
"buttons" : default_buttons
}
}
else:
response_data = {
'message' : {
"text":'ํ๋ ธ์ต๋๋ค. ๋ค๋ฅธ ํด์ฆ๋ ํ์ด๋ณด์๊ฒ ์ด์?'},
"keyboard" : {
"type" : "buttons",
"buttons" : default_buttons
}
}
return jsonify(response_data)
if __name__=="__main__":
app.run(host="0.0.0.0", port=8888)
|
# JTSK-350112
# test_complex.py
# Taiyr Begeyev
# t.begeyev@jacobs-university.de
from complex import Complex
# testing
c1 = Complex(1, 2)
c2 = Complex(-1, -2)
print("c1 + c2 = {}".format(c1 + c2))
print("c1 - c2 = {}".format(c1 - c2))
print("c1 * c2 = {}".format(c1 * c2))
print("c1 / c2 = {}".format(c1 / c2))
print("c1 == c2 = {}".format(c1 == c2))
print("c1 != c2 = {}".format(c1 != c2))
|
class Student:
def __init__(self, name, subjscores): #constructor
self.name = name
self.subscores = subjscores
def average(self):
return sum(self.subscores)/len(self.subscores)
stu_1 = Student("Lehan",[45,55,79])
print(f"{stu_1.average():.2f}") |
from django.apps import AppConfig
class UsersConfig(AppConfig):
name = 'users'
def ready(self):
"""
signals: User ability to create and save. makes a profile and user at
the same time
"""
import users.signals
|
class LRUCache:
class DLLNode:
def __init__(self, val=None):
self.val = val
self.prev = None
self.next = None
class DLL:
def __init__(self):
self.head = None
self.tail = None
def update_index(self, the_key):
if self.cache[the_key][1] != self.index.tail:
if self.cache[the_key][1] == self.index.head:
self.index.head = self.cache[the_key][1].next
self.index.head.prev = None
else:
self.cache[the_key][1].prev.next = self.cache[the_key][1].next
self.cache[the_key][1].next.prev = self.cache[the_key][1].prev
self.index.tail.next = self.cache[the_key][1]
self.cache[the_key][1].prev = self.index.tail
self.index.tail = self.cache[the_key][1]
self.cache[the_key][1].next = None
# @param capacity, an integer
def __init__(self, capacity):
self.cache = {}
self.capacity = capacity
self.index = self.DLL()
# @return an integer
def get(self, key):
if key in self.cache:
self.update_index(key)
return self.cache[key][0]
else:
return -1
# @param key, an integer
# @param value, an integer
# @return nothing
def set(self, key, value):
if key in self.cache:
self.cache[key][0] = value
self.update_index(key)
else:
new_node = self.DLLNode(key)
if len(self.cache) < self.capacity:
if not self.index.head:
self.index.head = new_node
self.index.tail = new_node
else:
self.index.tail.next = new_node
new_node.prev = self.index.tail
self.index.tail = new_node
else:
del self.cache[self.index.head.val]
if self.capacity == 1:
self.index.head = new_node
self.index.tail = new_node
else:
self.index.head = self.index.head.next
self.index.head.prev = None
new_node.prev = self.index.tail
self.index.tail.next = new_node
self.index.tail = new_node
self.cache[key] = [value, new_node]
|
import unittest
import main
import pygame
class TestCamera(unittest.TestCase):
def setUp(self):
self.game = main.Game()
self.game.start()
def test_big_camera(self):
camera = main.Camera(main.big_camera_configure,
self.game.total_level_width,
self.game.total_level_height)
camera.update(self.game.hero)
self.assertEqual(camera.state, pygame.Rect(0, 0, 2480, 960))
def test_small_camera(self):
camera = main.Camera(main.small_camera_configure,
self.game.total_level_width,
self.game.total_level_height)
camera.update(self.game.hero)
self.assertEqual(camera.state, pygame.Rect(0, 0, 2480, 960))
if __name__ == '__main__':
unittest.main()
|
import pygame
import os
import random
filepath = os.path.abspath(__file__)
filedir = os.path.dirname(filepath)
pygame.init()
pygame.font.init()
pygame.mixer.init()
pygame.mixer.music.load(filedir + str('\Music\Life Goes On - Persona 5.mp3'))
#Music reference: Meguro, S. (2017). Persona 5 Original Soundtrack: Life Goes On [CD]. Tokyo: Atlus.
game_status = 'start'
scriptLine = 0
questionLine = 0
name = ''
Feelings = ''
questionsList = []
TeacherMood = ''
generator = 0
rightAnswer = ''
rightAnswerNo= 0
ExamScore = 0
MaxExamScore = 0
while game_status == 'start':
win = pygame.display.set_mode((800,600))
pygame.display.set_caption("Exam Simulator")
x = 250
y = 250
width = 350
height = 50
vol = 5
background_image = pygame.image.load(filedir + str('\\pics\\bg Outside.png'))
#Taken from wikipedia: Charles Darwin University. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Charles_Darwin_University
win.blit(background_image, [0, 0])
pygame.draw.rect(win,(255,255,255), (x, y, width, height))
font = pygame.font.Font('freesansbold.ttf', 32)
text = font.render('Exam Simulator', True, (0,0,0))
win.blit(text,(250,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 550 > mousex > 250 and 300 > mousey > 250:
pygame.mixer.music.play(-1)
game_status = 'intro'
while game_status == 'intro':
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
script = open(filedir + '/script/' + game_status + '.txt', 'r')
lines = script.readlines()
if lines[questionLine].startswith('Question'):
theQuestion = (lines[questionLine][13:].strip('\n'))
font = pygame.font.Font('freesansbold.ttf', 32)
text = font.render(str(theQuestion), True, (0,0,0))
win.blit(text,(50,50))
questions = lines[questionLine][10:].split()[0]
game_status = 'chooseName'
else:
font = pygame.font.Font('freesansbold.ttf', 32)
text = font.render(str(lines[scriptLine]).strip('\n'), True, (0,0,0))
win.blit(text,(50,50))
questionLine = scriptLine
teacher = pygame.transform.scale(pygame.image.load(filedir + '\pics\P3_Edogawa_Render.png'), (350, 450))
win.blit(teacher,(450,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
while game_status == 'chooseName':
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
win.blit(teacher,(450,260))
qBoxWidth = 150
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 50
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(lines[questionLine + 1], True, (0,0,0)),(50,60))
win.blit(font.render(lines[questionLine + 2], True, (0,0,0)),(50,160))
win.blit(font.render(lines[questionLine + 3], True, (0,0,0)),(50,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 200 > mousex > 50 and 100 > mousey > 50:
name = str(lines[questionLine + 1])
scriptLine = 0
game_status = 'name choosen'
elif 200 > mousex > 50 and 200 > mousey > 150:
name = str(lines[questionLine + 2])
scriptLine = 0
game_status = 'name choosen'
elif 200 > mousex > 50 and 300 > mousey > 250:
name = str(lines[questionLine + 3])
scriptLine = 0
game_status = 'name choosen'
while game_status == 'name choosen':
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
teacher = pygame.transform.scale(pygame.image.load(filedir + '\pics\P3_Edogawa_Render.png'),(350, 450))
#ripped from: Persona 3 FES(Playstation 2) [Video game]. (2006). Tokyo, Japan: Atlus Sega.
win.blit(teacher,(450,260))
script = open(filedir + '/script/' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('freesansbold.ttf', 32)
try:
text = font.render(str(lines[scriptLine]), True, (0,0,0))
if 'insName' in str(lines[scriptLine]):
namedLine = (str(lines[scriptLine]).replace('insName', name))
text = font.render(namedLine, True, (0,0,0))
win.blit(text,(50,50))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
else:
win.blit(text,(50,50))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
except IndexError:
game_status = 'Exam Hall'
scriptLine = 0
while game_status == 'Exam Hall':
win = pygame.display.set_mode((800,600))
background_image = pygame.image.load(filedir + str('\\pics\\exam hall.png'))
win.blit(background_image, [0, 0])
teacher = pygame.transform.scale(pygame.image.load(filedir + '\pics\exam_teacher.png'), (300, 450))
#ripped from: Persona 3 FES(Playstation 2) [Video game]. (2006). Tokyo, Japan: Atlus Sega.
win.blit(teacher,(450,260))
script = open(filedir + '/script/' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('freesansbold.ttf', 32)
try:
text = font.render(str(lines[scriptLine]), True, (255,255,255))
if 'insName' in str(lines[scriptLine]):
namedLine = (str(lines[scriptLine]).replace('insName', name))
text = font.render(namedLine.strip('\n'), True, (255,255,255))
win.blit(text,(50,50))
pygame.display.update()
else:
win.blit(text,(50,50))
pygame.display.update()
except IndexError:
game_status = 'Exam Teacher Mood'
scriptLine = 0
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
while game_status == 'Exam Teacher Mood':
script = open(filedir + '/script/' + game_status + '.txt', 'r')
questionLine = 0
lines = script.readlines()
win = pygame.display.set_mode((800,600))
win.blit(teacher,(450,260))
win.blit(background_image, [0, 0])
qBoxWidth = 500
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 50
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(lines[questionLine + 1], True, (0,0,0)),(50,60))
win.blit(font.render(lines[questionLine + 2], True, (0,0,0)),(50,160))
win.blit(font.render(lines[questionLine + 3], True, (0,0,0)),(50,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 500 > mousex > 50 and 100 > mousey > 50:
name = str(lines[questionLine + 1])
game_status = 'ExamTeach'
TeacherMood = 'Angry'
scriptLine = 0
elif 500 > mousex > 50 and 200 > mousey > 150:
name = str(lines[questionLine + 2])
game_status = 'ExamTeach'
TeacherMood = 'Neutral'
scriptLine = 3
elif 500 > mousex > 50 and 300 > mousey > 250:
name = str(lines[questionLine + 3])
game_status = 'ExamTeach'
TeacherMood = 'Disappointed'
scriptLine = 6
while game_status == 'ExamTeach':
win = pygame.display.set_mode((800,600))
win.blit(teacher,(450,260))
win.blit(background_image, [0, 0])
script = open(filedir + '/script/' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('freesansbold.ttf', 32)
if TeacherMood == 'Angry':
if scriptLine == 2:
game_status = 'Exam start'
scriptLine = 0
else:
text = font.render(str(lines[scriptLine]), True, (255,255,255))
if 'insName' in str(lines[scriptLine]):
namedLine = (str(lines[scriptLine]).replace('insName', name))
text = font.render(namedLine.strip('\n'), True, (255,255,255))
win.blit(text,(50,50))
pygame.display.update()
else:
win.blit(text,(50,50))
pygame.display.update()
if TeacherMood == 'Neutral':
if scriptLine == 5:
game_status = 'Exam start'
scriptLine = 0
else:
text = font.render(str(lines[scriptLine]), True, (255,255,255))
if 'insName' in str(lines[scriptLine]):
namedLine = (str(lines[scriptLine]).replace('insName', name))
text = font.render(namedLine.strip('\n'), True, (255,255,255))
win.blit(text,(50,50))
pygame.display.update()
else:
win.blit(text,(50,50))
pygame.display.update()
if TeacherMood == 'Disappointed':
if scriptLine == 8:
game_status = 'Exam start'
scriptLine = 0
else:
text = font.render(str(lines[scriptLine]), True, (255,255,255))
if 'insName' in str(lines[scriptLine]):
namedLine = (str(lines[scriptLine]).replace('insName', name))
text = font.render(namedLine.strip('\n'), True, (255,255,255))
win.blit(text,(50,50))
pygame.display.update()
else:
win.blit(text,(50,50))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
pygame.mixer.music.stop()
pygame.mixer.music.load(filedir + '\music\So Boring - Persona 5.mp3')
#Meguro, S. (2017). Persona 5 Original Soundtrack: So Boring [CD]. Tokyo: Atlus.
pygame.mixer.music.play(-1)
while game_status == 'Exam start':
win = pygame.display.set_mode((800,600))
text = font.render('Exam started', True, (0,0,0))
win.blit(text,(250,260))
script = open(filedir + '/script/' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('C:\WINDOWS\FONTS\LHANDW.TTF', 32)
background_image = pygame.image.load(filedir + str('\\pics\\paperback.png'))
win.blit(background_image, [0, 0])
pygame.display.update()
try:
text = font.render(str(lines[scriptLine]), True, (0,0,0))
if 'insName' in str(lines[scriptLine]):
namedLine = (str(lines[scriptLine]).replace('insName', name))
text = font.render(namedLine.strip('\n'), True, (0,0,0))
win.blit(text,(50,250))
pygame.display.update()
else:
win.blit(text,(50,250))
pygame.display.update()
except IndexError:
game_status = 'Exam Question 1'
scriptLine = 0
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
rightJingle = pygame.mixer.Sound(filedir + '\music\Persona 4 - Social Link Jingle.wav')
#ripped from: Persona 4 Golden (PC/Steam version) [Video game]. (2020). Tokyo, Japan: Atlus Sega.
wrongJingle = pygame.mixer.Sound(filedir + '\music\Turning Page - Sound Effect .wav')
#sourced from: GFX Sounds. (2018). Turning Page - Sound Effect [HD] [Video]. Retrieved from https://www.youtube.com/watch?v=tuHaY1lwlEQ
while game_status == 'Exam Question 1':
while generator != 1:
script = open(filedir + '/script/Exam questions/' + game_status +'.txt', 'r')
lines = script.readlines()
randNo = random.randint(0,2)
questionsList = lines[randNo].split(', ')
generator = 1
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
qBoxWidth = 500
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 330
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(questionsList[0], True, (0,0,0)),(50,230))
questionImg = pygame.transform.scale(pygame.image.load(filedir + '/pics/' + questionsList[5].rstrip('\n')), (269, 187))
win.blit(questionImg,(50,25))
if ' - right' in questionsList[1]:
rightAnswer = questionsList[1].replace(" - right", "")
win.blit(font.render(questionsList[1].replace(" - right", ""), True, (0,0,0)),(50,330))
rightAnswerNo = 1
else:
win.blit(font.render(questionsList[1], True, (0,0,0)),(50,330))
if ' - right' in questionsList[2]:
rightAnswer = questionsList[2].replace(" - right", "")
win.blit(font.render(questionsList[2].replace(" - right", ""), True, (0,0,0)),(50,430))
rightAnswerNo = 2
else:
win.blit(font.render(questionsList[2], True, (0,0,0)),(50,430))
if ' - right' in questionsList[3]:
rightAnswer = questionsList[3].replace(" - right", "")
win.blit(font.render(questionsList[3].replace(" - right", ""), True, (0,0,0)),(50,530))
rightAnswerNo = 3
else:
win.blit(font.render(questionsList[3], True, (0,0,0)),(50,530))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 500 > mousex > 50 and (qboxPosy + 50) > mousey > qboxPosy:
if rightAnswerNo == 1:
rightJingle.play(0)
generator = 0
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 2'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 2'
elif 500 > mousex > 50 and ((qboxPosy + 100) + 50) > mousey > (qboxPosy + 100):
if rightAnswerNo == 2:
rightJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
ExamScore = ExamScore + int(questionsList[4])
game_status = 'Exam Question 2'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 2'
elif 500 > mousex > 50 and ((qboxPosy + 200) + 50) > mousey > (qboxPosy + 200):
if rightAnswerNo == 3:
rightJingle.play(0)
generator = 0
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 2'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 2'
#Pictures taken from:
#Python logo: Welcome to Python.org. (2001). Retrieved 16 October 2020, from https://www.python.org/
#SQL logo: Sql Server Logo - Unlimited Download. cleanpng.com. Retrieved 18 October 2020, from https://www.cleanpng.com/png-microsoft-sql-server-mysql-database-logo-2447831/
#Optical Illusion: Natalia, J. 10 Fun Visual Brain Teasers You Will Want To Share. Retrieved 18 October 2020, from https://www.ba-bamail.com/content.aspx?emailid=32451
while game_status == 'Exam Question 2':
while generator != 1:
script = open(filedir + '/script/Exam questions/' + game_status +'.txt', 'r')
lines = script.readlines()
randNo = random.randint(0,2)
questionsList = lines[randNo].split(', ')
generator = 1
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
qBoxWidth = 500
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 330
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(questionsList[0], True, (0,0,0)),(50,230))
questionImg = pygame.transform.scale(pygame.image.load(filedir + '/pics/' + questionsList[5].rstrip('\n')), (269, 187))
win.blit(questionImg,(50,25))
if ' - right' in questionsList[1]:
rightAnswer = questionsList[1].replace(" - right", "")
win.blit(font.render(questionsList[1].replace(" - right", ""), True, (0,0,0)),(50,330))
rightAnswerNo = 1
else:
win.blit(font.render(questionsList[1], True, (0,0,0)),(50,330))
if ' - right' in questionsList[2]:
rightAnswer = questionsList[2].replace(" - right", "")
win.blit(font.render(questionsList[2].replace(" - right", ""), True, (0,0,0)),(50,430))
rightAnswerNo = 2
else:
win.blit(font.render(questionsList[2], True, (0,0,0)),(50,430))
if ' - right' in questionsList[3]:
rightAnswer = questionsList[3].replace(" - right", "")
win.blit(font.render(questionsList[3].replace(" - right", ""), True, (0,0,0)),(50,530))
rightAnswerNo = 3
else:
win.blit(font.render(questionsList[3], True, (0,0,0)),(50,530))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 500 > mousex > 50 and (qboxPosy + 50) > mousey > qboxPosy:
if rightAnswerNo == 1:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 3'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 3'
elif 500 > mousex > 50 and ((qboxPosy + 100) + 50) > mousey > (qboxPosy + 100):
if rightAnswerNo == 2:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 3'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 3'
elif 500 > mousex > 50 and ((qboxPosy + 200) + 50) > mousey > (qboxPosy + 200):
if rightAnswerNo == 3:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
#Pictures taken from:
#Python logo: Welcome to Python.org. (2001). Retrieved 16 October 2020, from https://www.python.org/
#SQL logo: Sql Server Logo - Unlimited Download. cleanpng.com. Retrieved 18 October 2020, from https://www.cleanpng.com/png-microsoft-sql-server-mysql-database-logo-2447831/
#Optical Illusion: Natalia, J. 10 Fun Visual Brain Teasers You Will Want To Share. Retrieved 18 October 2020, from https://www.ba-bamail.com/content.aspx?emailid=32451
while game_status == 'Exam Question 3':
while generator != 1:
script = open(filedir + '/script/Exam questions/' + game_status +'.txt', 'r')
lines = script.readlines()
randNo = random.randint(0,2)
questionsList = lines[randNo].split(', ')
generator = 1
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
qBoxWidth = 500
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 330
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(questionsList[0], True, (0,0,0)),(50,230))
questionImg = pygame.transform.scale(pygame.image.load(filedir + '/pics/' + questionsList[5].rstrip('\n')), (269, 187))
win.blit(questionImg,(50,25))
if ' - right' in questionsList[1]:
rightAnswer = questionsList[1].replace(" - right", "")
win.blit(font.render(questionsList[1].replace(" - right", ""), True, (0,0,0)),(50,330))
rightAnswerNo = 1
else:
win.blit(font.render(questionsList[1], True, (0,0,0)),(50,330))
if ' - right' in questionsList[2]:
rightAnswer = questionsList[2].replace(" - right", "")
win.blit(font.render(questionsList[2].replace(" - right", ""), True, (0,0,0)),(50,430))
rightAnswerNo = 2
else:
win.blit(font.render(questionsList[2], True, (0,0,0)),(50,430))
if ' - right' in questionsList[3]:
rightAnswer = questionsList[3].replace(" - right", "")
win.blit(font.render(questionsList[3].replace(" - right", ""), True, (0,0,0)),(50,530))
rightAnswerNo = 3
else:
win.blit(font.render(questionsList[3], True, (0,0,0)),(50,530))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 500 > mousex > 50 and (qboxPosy + 50) > mousey > qboxPosy:
if rightAnswerNo == 1:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
elif 500 > mousex > 50 and ((qboxPosy + 100) + 50) > mousey > (qboxPosy + 100):
if rightAnswerNo == 2:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
elif 500 > mousex > 50 and ((qboxPosy + 200) + 50) > mousey > (qboxPosy + 200):
if rightAnswerNo == 3:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 4'
#Pictures taken Portugal. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Portugal
#Pictures taken Spain. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Spain
#Pictures taken Canada. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Canada
while game_status == 'Exam Question 4':
while generator != 1:
script = open(filedir + '/script/Exam questions/' + game_status +'.txt', 'r')
lines = script.readlines()
randNo = random.randint(0,2)
questionsList = lines[randNo].split(', ')
generator = 1
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
qBoxWidth = 500
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 330
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(questionsList[0], True, (0,0,0)),(50,230))
questionImg = pygame.transform.scale(pygame.image.load(filedir + '/pics/' + questionsList[5].rstrip('\n')), (269, 187))
win.blit(questionImg,(50,25))
if ' - right' in questionsList[1]:
rightAnswer = questionsList[1].replace(" - right", "")
win.blit(font.render(questionsList[1].replace(" - right", ""), True, (0,0,0)),(50,330))
rightAnswerNo = 1
else:
win.blit(font.render(questionsList[1], True, (0,0,0)),(50,330))
if ' - right' in questionsList[2]:
rightAnswer = questionsList[2].replace(" - right", "")
win.blit(font.render(questionsList[2].replace(" - right", ""), True, (0,0,0)),(50,430))
rightAnswerNo = 2
else:
win.blit(font.render(questionsList[2], True, (0,0,0)),(50,430))
if ' - right' in questionsList[3]:
rightAnswer = questionsList[3].replace(" - right", "")
win.blit(font.render(questionsList[3].replace(" - right", ""), True, (0,0,0)),(50,530))
rightAnswerNo = 3
else:
win.blit(font.render(questionsList[3], True, (0,0,0)),(50,530))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 500 > mousex > 50 and (qboxPosy + 50) > mousey > qboxPosy:
if rightAnswerNo == 1:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 5'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 5'
elif 500 > mousex > 50 and ((qboxPosy + 100) + 50) > mousey > (qboxPosy + 100):
if rightAnswerNo == 2:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
generator = 0
game_status = 'Exam Question 5'
else:
wrongJingle.play(0)
MaxExamScore = MaxExamScore + int(questionsList[4])
generator = 0
game_status = 'Exam Question 5'
elif 500 > mousex > 50 and ((qboxPosy + 200) + 50) > mousey > (qboxPosy + 200):
if rightAnswerNo == 3:
rightJingle.play(0)
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
generator = 0
game_status = 'Exam Question 5'
else:
wrongJingle.play(0)
generator = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'Exam Question 5'
#Pictures taken Portugal. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Portugal
#Pictures taken Spain. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Spain
#Pictures taken Canada. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Canada
while game_status == 'Exam Question 5':
while generator != 1:
script = open(filedir + '/script/Exam questions/' + game_status +'.txt', 'r')
lines = script.readlines()
randNo = random.randint(0,2)
questionsList = lines[randNo].split(', ')
generator = 1
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
qBoxWidth = 500
qBoxLength = 50
qBoxPosX = 50
qboxPosy = 330
pygame.draw.rect(win,(255,255,255), (qBoxPosX, qboxPosy, qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 100), qBoxWidth, qBoxLength))
pygame.draw.rect(win,(255,255,255), (qBoxPosX, (qboxPosy + 200), qBoxWidth, qBoxLength))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(font.render(questionsList[0], True, (0,0,0)),(50,230))
questionImg = pygame.transform.scale(pygame.image.load(filedir + '/pics/' + questionsList[5].rstrip('\n')), (269, 187))
win.blit(questionImg,(50,25))
if ' - right' in questionsList[1]:
rightAnswer = questionsList[1].replace(" - right", "")
win.blit(font.render(questionsList[1].replace(" - right", ""), True, (0,0,0)),(50,330))
rightAnswerNo = 1
else:
win.blit(font.render(questionsList[1], True, (0,0,0)),(50,330))
if ' - right' in questionsList[2]:
rightAnswer = questionsList[2].replace(" - right", "")
win.blit(font.render(questionsList[2].replace(" - right", ""), True, (0,0,0)),(50,430))
rightAnswerNo = 2
else:
win.blit(font.render(questionsList[2], True, (0,0,0)),(50,430))
if ' - right' in questionsList[3]:
rightAnswer = questionsList[3].replace(" - right", "")
win.blit(font.render(questionsList[3].replace(" - right", ""), True, (0,0,0)),(50,530))
rightAnswerNo = 3
else:
win.blit(font.render(questionsList[3], True, (0,0,0)),(50,530))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.MOUSEBUTTONDOWN:
mousex, mousey = event.pos
if 500 > mousex > 50 and (qboxPosy + 50) > mousey > qboxPosy:
if rightAnswerNo == 1:
rightJingle.play(0)
scriptLine = 0
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'End Exam'
else:
wrongJingle.play(0)
scriptLine = 0
game_status = 'End Exam'
MaxExamScore = MaxExamScore + int(questionsList[4])
elif 500 > mousex > 50 and ((qboxPosy + 100) + 50) > mousey > (qboxPosy + 100):
if rightAnswerNo == 2:
rightJingle.play(0)
scriptLine = 0
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'End Exam'
else:
wrongJingle.play(0)
scriptLine = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'End Exam'
elif 500 > mousex > 50 and ((qboxPosy + 200) + 50) > mousey > (qboxPosy + 200):
if rightAnswerNo == 3:
rightJingle.play(0)
scriptLine = 0
ExamScore = ExamScore + int(questionsList[4])
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'End Exam'
else:
wrongJingle.play(0)
scriptLine = 0
MaxExamScore = MaxExamScore + int(questionsList[4])
game_status = 'End Exam'
#liberty: Hudson, M. (2020). Why Is the Statue of Liberty a Woman?. Retrieved 18 October 2020, from https://www.britannica.com/story/why-is-the-statue-of-liberty-a-woman
#taj: Centre, U. (1992). Taj Mahal. Retrieved 18 October 2020, from https://whc.unesco.org/en/list/252/
#Juche: Juche Tower guide, North Korea. Retrieved 18 October 2020, from https://www.koreakonsult.com/Attraction_Pyongyang_juche_tower_eng.html
pygame.mixer.music.stop()
pygame.mixer.music.load(filedir + '\music\My Homie - Persona 5.mp3')
#Meguro, S. (2017). Persona 5 Original Soundtrack: My Homie [CD]. Tokyo: Atlus.
pygame.mixer.music.play(-1)
while game_status == 'End Exam':
win = pygame.display.set_mode((800,600))
background_image = pygame.image.load(filedir + str('\\pics\\exam hall.png'))
win.blit(background_image, [0, 0])
script = open(filedir + '\\script\\' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('freesansbold.ttf', 32)
try:
text = font.render(str(lines[scriptLine]).strip('\n'), True, (255,255,255))
except IndexError:
scriptLine = 0
game_status = 'Exam results'
win.blit(text,(50,50))
questionLine = scriptLine
teacher = pygame.transform.scale(pygame.image.load(filedir + '\pics\exam_teacher.png'), (350, 450))
win.blit(teacher,(450,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
#Pics taken from:
# Donkey:Cyprus donkey. (2020). Retrieved 18 October 2020, from https://en.wikipedia.org/wiki/Cyprus_donkey
scriptLine = 0
while game_status == 'Exam results':
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
script = open(filedir + '\\script\\' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('freesansbold.ttf', 32)
try:
namedLine = str(lines[scriptLine])
text = font.render(str(lines[scriptLine]).strip('\n'), True, (255,255,255))
except IndexError:
scriptLine = 0
game_status = 'one week passes'
win.blit(text,(50,50))
questionLine = scriptLine
teacher = pygame.transform.scale(pygame.image.load(filedir + '\pics\exam_teacher.png'), (350, 450))
win.blit(teacher,(450,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
while game_status == 'one week passes':
win = pygame.display.set_mode((800,600))
font = pygame.font.Font('freesansbold.ttf', 32)
text = font.render("One week passes", True, (255,255,255))
win.blit(text,(50,50))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
if (ExamScore/MaxExamScore) > 0.74:
Feelings = 'Confident'
game_status = 'Feels'
scriptLine = 0
generator = 0
elif (ExamScore/MaxExamScore) > 0.49:
Feelings = 'Okay'
game_status = 'Feels'
scriptLine = 0
generator = 0
elif (ExamScore/MaxExamScore) > 0.24:
Feelings = 'Not so confident'
game_status = 'Feels'
scriptLine = 0
generator = 0
while game_status == 'Feels':
win = pygame.display.set_mode((800,600))
win.blit(background_image, [0, 0])
if lines[scriptLine].startswith('Feel') == True:
if Feelings == 'Confident':
scriptLine = 4
generator = 1
elif Feelings == 'Okay':
scriptLine = 5
generator = 1
elif Feelings == 'Not so confident':
scriptLine = 6
generator = 1
script = open(filedir + '\\script\\' + game_status + '.txt', 'r')
lines = script.readlines()
text = font.render(str(lines[scriptLine]).strip('\n'), True, (255,255,255))
font = pygame.font.Font('freesansbold.ttf', 32)
win.blit(text,(50,50))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
if generator == 1:
scriptLine = 0
game_status = 'Results'
while game_status == 'Results':
win = pygame.display.set_mode((800,600))
background_image = pygame.image.load(filedir + str('\\pics\\exam hall.png'))
win.blit(background_image, [0, 0])
script = open(filedir + '\\script\\' + game_status + '.txt', 'r')
lines = script.readlines()
font = pygame.font.Font('freesansbold.ttf', 32)
try:
textHolder = str(lines[scriptLine]).strip('\n')
if textHolder.startswith('insExam') == True:
textholder2 = textHolder.replace('insExam', str(ExamScore) + '/'+ str(MaxExamScore))
text = font.render(textholder2, True, (255,255,255))
else:
text = font.render(textHolder, True, (255,255,255))
except IndexError:
scriptLine = 0
quit()
win.blit(text,(50,50))
questionLine = scriptLine
teacher = pygame.transform.scale(pygame.image.load(filedir + '\pics\exam_teacher.png'), (350, 450))
win.blit(teacher,(450,260))
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
scriptLine = scriptLine + 1
|
import scipy.io
import numpy as np
import random
import sys
def fuzzyKmeans(data, k):
# Pick random points in the array as cluster centers
centers = []
nums = []
count = 0
while(count < k):
num = random.randint(0, data.shape[0])
if num not in nums:
nums.append(num)
centers.append(data[num])
count += 1
# Offset centers to avoid undefined weight
for center in centers:
center += 0.1
# Convert the the list of arrays into an array of arrays
centers = np.array(centers)
# Create a list to hold distances between data point and certain cluster
# Each row is a cluster center and each point in the row is a distance between point and cluster center
distances = []
for i in range(k):
distances.append([])
for center in range(k):
for point in data:
dist = np.linalg.norm(centers[center] - point)
distances[center].append(dist)
# Assign weights for each data point belonging to a certain cluster
distances = np.array([np.array(x) for x in distances])
sumOfDist = np.sum(distances, axis = 0)
weights = []
for i in range(k):
weights.append([])
for point in range(data.shape[0]):
if(distances[i][point] == 0):
weights[i].append(sys.maxsize)
else:
weights[i].append(1 / (distances[i][point] / sumOfDist[point]))
# Normalize weights
weights = np.array([np.array(x) for x in weights])
normalizeSums = np.sum(weights, axis = 0)
for center in weights:
for point in range(data.shape[0]):
center[point] /= normalizeSums[point]
# Update the center locations
newCenters = np.zeros((k, data.shape[1]))
centerSums = np.sum(weights, axis = 1)
for i in range(k):
for point in range(data.shape[0]):
newCenters[i] += data[point] * weights[i][point]
newCenters[i] /= centerSums[i]
def loadData(file, entry):
data = scipy.io.loadmat(file)
dList = data[entry]
return dList
def reshape(a, b):
# Reshapes a to have the same number of columns as b
tempA = a
while(tempA.shape[1] != b.shape[1]):
tempA = np.delete(tempA, -1, axis = 1)
return tempA
def main():
# Load in matlab files and convert into numpy arrays
green = loadData("green_data.mat", "green_data")
red = loadData("red_data.mat", "red_data")
# Compare the row length and match file with longer row to length of shorter row
if(green.shape[1] > red.shape[1]):
green = reshape(green, red)
else:
red = reshape(red, green)
# Concatenate the green and red channel arrays
combine = np.concatenate((green, red))
fuzzyKmeans(combine, 3)
if __name__ == "__main__":
main() |
from util import *
import re
class Database:
def __init__(self, name, persistence_unit_name):
self.name = name
self.tables = []
self.persistence_unit_name = persistence_unit_name
self.package_name = camel_case(persistence_unit_name).lower()
def get_name(self):
return self.name
def get_tables(self):
return self.tables
def add_table(self, table):
self.tables.append(table)
def get_persistence_unit_name(self):
return self.persistence_unit_name
def get_package_name(self):
return self.package_name
class Table:
def __init__(self, name, persistence_unit_name):
self.name = name
self.camel_name = camel_case(name)
self.class_name = pascal_case(name)
self.columns = []
self.not_null_columns = []
self.indices = []
self.persistence_unit_name = persistence_unit_name
self.package_name = camel_case(persistence_unit_name).lower()
def get_name(self):
return self.name
def get_class_name(self):
return self.class_name
def get_columns(self):
return self.columns
def add_column(self, column):
self.columns.append(column)
if column.is_not_null() and not column.auto_increment and column.name != "created_at" and column.name != "updated_at":
self.not_null_columns.append(column)
def get_indices(self):
return self.indices
def add_index(self, index):
self.indices.append(index)
def get_persistence_unit_name(self):
return self.persistence_unit_name
def get_package_name(self):
return self.package_name
def find_column_by_name(self, _column_name):
for column in self.columns:
if column.get_name() == _column_name:
return column
return None
def get_primary_key(self):
for index in self.indices:
if index.get_type() == "PRIMARY KEY":
return index
return None
def is_primary_key(self, _column):
index = self.get_primary_key()
if index:
for column_name in index.get_column_names():
if _column.get_name() == column_name:
return True
return False
def has_composite_primary_keys(self):
index = self.get_primary_key()
return index and len(index.get_columns()) > 1
class Index:
def __init__(self, _type):
self.column_names = []
self.columns = []
self.type = _type
def get_column_names(self):
return self.column_names
def set_column_names(self, _names):
self.column_names = _names
def get_columns(self):
return self.columns
def add_column(self, _column):
self.columns.append(_column)
def get_type(self):
return self.type
def get_combined_pascal_column_names(self):
combined_name = ""
for column in self.columns:
combined_name += column.pascal_name
return combined_name
def is_composite_pk(self):
if self.type != "PRIMARY KEY":
return False
return len(self.columns) > 1
class Column:
def __init__(self, _name, _type, _unsigned=None, _not_null=None, _default=None, _auto_increment=None):
self.name = _name
self.field_name = camel_case(_name)
self.pascal_name = pascal_case(_name)
self.type = _type
self.field_type = _convert_to_java_type(_type)
self.field_size = _filter_size(self.field_type, _type)
self.unsigned = ""
if _unsigned:
self.unsigned = _unsigned
self.not_null = False
if _not_null == 'NOT NULL':
self.not_null = True
self.default = ""
if _default:
self.default = _default
self.auto_increment = ""
if _auto_increment:
self.auto_increment = _auto_increment
self.primary_key = False
def get_name(self):
return self.name
def get_type(self):
return self.type
def get_unsigned(self):
return self.unsigned
def is_not_null(self):
return self.not_null
def get_default(self):
return self.default
def get_auto_increment(self):
return self.auto_increment
def is_primary_key(self):
return self.primary_key
def set_primary_key(self, _primary_key):
self.primary_key = _primary_key
def _convert_to_java_type(_type):
_boolean = re.compile('^tinyint\(1\)', re.IGNORECASE)
_integer = re.compile('^(tinyint\([23]\)|smallint|mediumint|int)', re.IGNORECASE)
_long = re.compile('^bigint', re.IGNORECASE)
_double = re.compile('^double', re.IGNORECASE)
_string = re.compile('^varchar|^text', re.IGNORECASE)
_datetime = re.compile('^datetime')
_timestamp = re.compile('^timestamp')
if _boolean.match(_type):
return "boolean"
if _integer.match(_type):
return "Integer"
if _long.match(_type):
return "Long"
if _double.match(_type):
return "Double"
if _string.match(_type):
return "String"
if _datetime.match(_type):
return "ZonedDateTime"
if _timestamp.match(_type):
return "ZonedDateTime"
def _filter_size(_field_type, _type):
if _field_type == "String":
match = re.search('\((\d*?)\)', _type)
if match:
return match.group(1)
return None
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
from flask import render_template, request, jsonify, current_app, render_template_string
from . import case
from flask_babel import gettext
from ..models import Case
@case.route('/')
def index():
cases = Case.query.all()
return render_template("case/index.html",
title=gettext('Project List'),
cases=cases)
@case.route('/<int:case_id>/', methods=['GET', 'POST'])
def show_case(case_id):
if request.method == 'GET':
cur_case = Case.query.filter_by(id=case_id).first_or_404()
return render_template('case/case_detail.html',
title=gettext('Project Display'),
case=cur_case)
elif request.method == 'POST':
case_to_show = Case.query.filter_by(id=request.form['case_id']).first_or_404()
return jsonify(status='success', description=case_to_show.description)
|
#!/usr/bin/python3
# Task 8. Search API
if __name__ == "__main__":
import sys
import requests
the_url = "https://api.github.com/user"
my_user = sys.argv[1]
my_pass = sys.argv[2]
my_requ = requests.get(the_url, auth=(my_user, my_pass))
the_resp = my_requ.json()
print("{}".format(the_resp.get("id")))
|
from collections import defaultdict
from lib.tree import BinaryTreeNode
from lib.list import List
def build_lists_recursive(node, depth, lists):
if node is None:
return
build_lists_recursive(node.left, depth + 1, lists)
lists[depth].add(node.data)
build_lists_recursive(node.right, depth + 1, lists)
return lists
def build_lists(root):
lists = defaultdict(List)
build_lists_recursive(root, 0, lists)
return lists
root = BinaryTreeNode(1,
BinaryTreeNode(3,
BinaryTreeNode(7),
BinaryTreeNode(10)),
BinaryTreeNode(2,
BinaryTreeNode(9)))
lists = build_lists(root)
assert str(lists[0]) == '1'
assert str(lists[1]) == '3,2'
assert str(lists[2]) == '7,10,9'
|
import click
@click.group()
@click.option('--debug/--no-debug', default=False)
def cli(debug):
click.echo('Debug mode is %s' % ('on' if debug else 'off'))
@cli.command()
@click.option('-n', '--name', help='synching of the tool')
def sync(name):
print("synching ", name)
@cli.command()
@click.option('-v', '--verbose', count=True)
def log(verbose):
click.echo('Verbosity: %s' % verbose)
@cli.command()
@click.option('--pos', nargs=2, type=float)
def findme(pos):
click.echo('%s' % pos)
if __name__ == '__main__':
cli()
|
from AutoFillOperator import AutoFillOperator
class AutoFillCallBackOperator(AutoFillOperator):
def __init__(self, window, param_names, slider_text_pairs, model, configs_path):
super().__init__(
window,
param_names,
slider_text_pairs,
model,
configs_path
)
# overridden
def ConnectCallBack(self):
self.window.AutoFillpushButton.clicked.connect(self.AutoFill)
self.window.SavePresetpushButton.clicked.connect(self.SavePreset)
self.window.DeletePresetpushButton.clicked.connect(self.DeletePreset)
# overridden
def init_config_line_edit(self):
self.config_line_edit = self.window.ConfigFileNamelineEdit |
# reference: https://github.com/LTS4/DeepFool
import torch.nn as nn
import torch.nn.functional as F
import torchvision
import torchvision.transforms as transforms
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.optim as optim
import torch.utils.data as data_utils
from torch.autograd import Variable
import math
import torchvision.models as models
from PIL import Image
from targeted_universal_adversarial_perturbations import targeted_universal_adversarial_perturbations
import os
import sys
TOTAL_IMAGES = 100
# change the data path to the directory with all the images from the same class
DATA_PATH = '***'
CLEAN_CLASS = 88 # 88 for macaw
TARGET_CLASS = 130
MAX_ITER = 10
net = models.resnet34(pretrained=True)
# Switch to evaluation mode
net.eval()
mean = [ 0.485, 0.456, 0.406 ]
std = [ 0.229, 0.224, 0.225 ]
# Remove the mean
transform = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(mean = mean,
std = std)])
counter = 0
images = []
for image in os.listdir(DATA_PATH):
if counter >= TOTAL_IMAGES:
break
# open the image and apply transform
image = transform(Image.open(DATA_PATH + image))[None, :, :, :]
# if it's predicted correctly, then add it to the images array
if torch.argmax(net(image), dim=1).item() == CLEAN_CLASS:
images.append(image)
counter += 1
# create a single batch of size 100
images = torch.cat(images, dim=0)
universal_v = targeted_universal_adversarial_perturbations(images, net, TARGET_CLASS, MAX_ITER)
torch.save(universal_v, 'universal_perturbation.pth')
torchvision.utils.save_image(universal_v, 'universal_perturbation.png')
# test the perturbation
# im1 = Image.open('***')
# im1 = transform(im1)[None, :, :, :]
# universal_v = torch.load('universal_perturbation.pth')
# print(torch.argmax(net(im1)))
# print(f'prediction: {torch.argmax(net(im1 + universal_v))}') |
image_size = 256
nb_class=6
|
# -*-coding:utf-8-*-
"""
็จlistๅฎ็ฐๆ
ๅฎไพๅๆไฝไธบa = Stack()
=============================
isEmpty() ๅคๆญๆฏๅฆไธบ็ฉบ,็ฉบๅ่ฟๅTrue๏ผไธ็ฉบๅ่ฟๅFalse
push() ๅๅ
ฅๅ
็ด
pop() ๅผนๅบๅ
็ด
peek() ่ฟๅๆ้กถๅฑๅ
็ด ๏ผไฝไธๅ ้คๅฎ
size() ่ฟๅๆ ๅ
ๅ
็ด ็ไธชๆฐ
=============================
example:
=============================
a = Stack()
length = a.size()
a.push(1)
top = a.peek()
a.pop()
============================
"""
class Stack(object):
def __init__(self):
self.stack = []
def isEmpty(self):
return self.stack == []
def push(self, value):
self.stack.append(value)
def pop(self):
if self.isEmpty():
raise IndexError('pop from empty stack')
else:
return self.stack.pop()
def peek(self):
if self.isEmpty():
raise IndexError('peek from empty stack')
else:
return self.stack[-1]
def size(self):
return len(self.stack)
|
from django.db import models
import datetime
class User(models.Model):
gender = (
('male', 'Male'),
('female', 'Female'),
('None', 'Others'),
)
name = models.CharField(max_length=128, unique=True)
password = models.CharField(max_length=256)
email = models.EmailField(unique=True)
sex = models.CharField(max_length=32, choices=gender, default="ๆ ")
create_time = models.DateTimeField(auto_now_add=True)
time = models.DateTimeField(default=datetime.datetime.now())
image = models.CharField(max_length=256, default="")
def __str__(self):
return self.name
|
end = int(input("p: "))
i = 1
p = 1
while i <= end:
p *= i
i += 1
print(p)
# ๆชๅ: exercise0709.py
# ไฝ่
: Kaiching Chang
# ๆ้: July, 2014
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# Takes in a labelled image (labelled mask) and outputs a copy of the labelled image but without the objects which are touching the image border.
# In[ ]:
import cv2
import numpy as np
# In[ ]:
def im_ClearBorder (labelled_img):
n_objs = np.amax(labelled_img)
assert n_objs>0, 'There has to be at least 1 object in the image!'
labelled_ClearedBorder=np.zeros(labelled_img.shape, dtype=bool)
for n in range(n_objs):
obj_n_mask = labelled_img==n+1
BoundRec = cv2.boundingRect(np.uint8(obj_n_mask))
x1=BoundRec[0]
x2=BoundRec[0]+BoundRec[2]-1
y1=BoundRec[1]
y2=BoundRec[1]+BoundRec[3]-1
canvas_h,canvas_w = labelled_img.shape
if x1>0 and x1<canvas_w-1 and x2>0 and x2<canvas_w-1 and y1>0 and y1<canvas_h-1 and y2>0 and y2<canvas_h-1:
labelled_ClearedBorder += obj_n_mask
labelled_ClearedBorder = labelled_ClearedBorder.astype(labelled_img.dtype) * labelled_img
return labelled_ClearedBorder
|
#!/usr/bin/env python
# encoding: utf-8
"""
admin.py
Created by yang.zhou on 2012-09-13.
Copyright (c) 2012 zhouyang.me. All rights reserved.
"""
import json
import logging
from tornado.web import authenticated, asynchronous, HTTPError
from tornado.httpclient import HTTPRequest, AsyncHTTPClient
from tornado.httputil import HTTPHeaders
from mongoengine import Q
from core.base.base import BaseHandler
from core.base.route import route
from core.base.auth import authByLevel
from core.base.models import Settings, Topic, Member
from core.base.oauth2lib import TencentMixin, WeiboMixin, DoubanMixin
from urllib import urlencode
@route("^/admin/$", dict(current_page="admin.index", level=10))
class AdminIndexHandler(BaseHandler):
@authByLevel
def get(self):
weibo_access_token = None
tencent_access_token = None
douban_access_token = None
print self.profiles
if self.profiles:
weibo_access_token = self.profiles.weibo_access_token
tencent_access_token = self.profiles.tencent_access_token
douban_access_token = self.profiles.douban_access_token
self.render("admin/index.html",
weibo_access_token=weibo_access_token,
tencent_access_token=tencent_access_token,
douban_access_token=douban_access_token)
@route("^/admin/settings$", dict(current_page="admin.settings", level=10))
class AdminSettingsHandler(BaseHandler):
@authByLevel
def get(self):
self.render("admin/settings.html")
@route("^/admin/jobs$", dict(current_page="admin.jobs", level=10))
class AdminJobsHandler(BaseHandler):
@authByLevel
def get(self):
member_tasks = Member.objects(
Q(has_fetcher="enabled")|Q(has_fetcher="suspend")).order_by("-created")
self.render("admin/jobs.html", member_tasks=member_tasks)
@route("^/admin/jobs/operation$", dict(current_page="admin.jobs", level=10))
class AdminJobsOperation(BaseHandler):
"""
change job status, is_enabled : True or False.
if is_enabled is True, start a fetch job right now.
"""
@authByLevel
def post(self):
data = json.loads(self.request.body)
member = Member.getObjectByObjectId(data["data-id"])
if member:
member.has_fetcher = data["status"]
member.save()
member.reload()
if member.has_fetcher == "enabled":
import socket
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("127.0.0.1", 5000))
print "Send start fetch weibo task"
sock.send(json.dumps({'task':'single', 'port':'weibo', 'operation':'start', 'username':member.username}))
sock.close()
self.finishedMsg(status="success", info="change job to %s" %data["status"])
else:
self.finishedMsg(status="error", info="object not found")
@route("^/admin/weibo/redirect$", dict(current_page="login", level=10))
class WeiboAdminLogin(BaseHandler, WeiboMixin):
@asynchronous
def get(self):
if self.get_argument("code", None):
self.get_authenticated_user(
redirect_uri='%s/admin/weibo/redirect' %self.prefix_url,
client_id=self.settings['weibo_key'],
client_secret=self.settings['weibo_secret'],
code=self.get_argument("code"),
extra_params={},
callback=self.async_callback(self._on_auth)
)
return
self.authorize_redirect(
redirect_uri='%s/admin/weibo/redirect' %self.prefix_url,
client_id=self.settings["weibo_key"]
)
def _on_auth(self, user):
next = self.get_argument("next", self.prefix_url)
if not user:
raise HTTPError(500, "Weibo auth failed")
else:
access_token = user.get("access_token")
openid = user.get("oauth_id")
if access_token and openid:
settings, created = Settings.objects.get_or_create(name="settings")
settings.weibo_access_token = access_token
settings.weibo_openid = openid
settings.save()
self.update_profiles()
self.redirect("/admin/")
else:
raise HTTPError(500, "Weibo auth failed")
@route("^/admin/tencent/redirect$", dict(current_page="login"))
class TencentAdminLogin(BaseHandler, TencentMixin):
@asynchronous
def get(self):
if self.get_argument("code", None):
logging.info("code: %s" %self.get_argument("code"))
self.get_authenticated_user(
redirect_uri='%s/admin/tencent/redirect' %self.prefix_url,
client_id=self.settings['tencent_key'],
client_secret=self.settings['tencent_secret'],
code=self.get_argument("code"),
openid=self.get_argument("openid"),
callback=self.async_callback(self._on_auth),
)
return
self.authorize_redirect(
redirect_uri='%s/admin/tencent/redirect' %self.prefix_url,
client_id=self.settings["tencent_key"],
extra_params={"response_type":"code"}
)
def _on_auth(self, user):
next = self.get_argument("next", self.prefix_url)
if not user:
raise HTTPError(500, "Weibo auth failed")
else:
access_token = user.get("access_token")
openid = user.get("oauth_id")
if access_token and openid:
settings, created = Settings.objects.get_or_create(name="settings")
settings.tencent_access_token = access_token
settings.tencent_openid = openid
settings.save()
self.update_profiles()
self.redirect("/admin/")
else:
raise HTTPError(500, "Weibo auth failed")
@route("^/admin/douban/redirect$", dict(current_page="login"))
class AdminDoubanLogin(BaseHandler, DoubanMixin):
@asynchronous
def get(self):
if self.get_argument("code", None):
logging.info("code: %s" %self.get_argument("code"))
self.get_authenticated_user(
redirect_uri='%s/admin/douban/redirect' %self.prefix_url,
client_id=self.settings['douban_key'],
client_secret=self.settings['douban_secret'],
code=self.get_argument("code"),
callback=self.async_callback(self._on_auth),
)
return
self.authorize_redirect(
redirect_uri='%s/admin/douban/redirect' %self.prefix_url,
client_id=self.settings["douban_key"],
extra_params={"response_type":"code"}
)
def _on_auth(self, user):
next = self.get_argument("next", self.prefix_url)
if not user:
raise HTTPError(500, "Weibo auth failed")
else:
access_token = user.get("access_token")
openid = user.get("oauth_id")
if access_token and openid:
settings, created = Settings.objects.get_or_create(name="settings")
settings.douban_access_token = access_token
settings.douban_openid = openid
settings.save()
self.update_profiles()
self.redirect("/admin/")
else:
raise HTTPError(500, "Weibo auth failed")
@route("^/admin/weibo/fetch$", dict(current_page="admin.fetcher", level=0))
class AdminWeiboFetch(BaseHandler):
@authByLevel
def post(self):
self.finishedMsg(status="success", info="redirected soon", next="/admin/weibo/redirect")
@route("^/admin/tencent/fetch$", dict(current_page="admin.fetcher", level=0))
class AdminTencentFetch(BaseHandler):
def post(self):
self.finishedMsg(status="success", info="redirected soon", next="/admin/tencent/redirect")
@route("^/admin/douban/fetch$", dict(current_page="admin.fetcher", level=0))
class AdminDoubanFetch(BaseHandler):
def post(self):
self.finishedMsg(status="success", info="redirected soon", next="/admin/douban/redirect") |
# Generated by Django 2.1.3 on 2019-02-19 18:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0006_customer_image'),
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, default='', max_length=400)),
('description', models.TextField(blank=True, default='')),
('image', models.TextField(blank=True, default='')),
('is_active', models.BooleanField(default=False)),
('company', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Company')),
],
),
migrations.CreateModel(
name='Variation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(blank=True, default='', max_length=400)),
('sku', models.CharField(blank=True, default='', max_length=400)),
('image', models.TextField(blank=True, default='')),
('price', models.FloatField(default=0)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Product')),
],
),
migrations.CreateModel(
name='VariationType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(blank=True, default='', max_length=400)),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Product')),
],
),
migrations.CreateModel(
name='VariationTypeAttribute',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('attribute', models.CharField(blank=True, default='', max_length=400)),
('variation_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.VariationType')),
],
),
]
|
#!usr/bin/python3
num = 1
def fun1():
global num # ้่ฆไฝฟ็จ global ๅ
ณ้ฎๅญๅฃฐๆ
print(num)
num = 123
print(num)
fun1()
def outer():
num = 10
def inner():
nonlocal num # nonlocal ๅ
ณ้ฎๅญๅฃฐๆ
num = 100
print(num)
inner()
print(num)
outer() |
total = int(input())
prices = []
for i in range(9):
prices.append(int(input()))
print(total - sum(prices))
|
""" Seismic Crop Batch. """
import string
import random
from copy import copy
from warnings import warn
import numpy as np
import cv2
from scipy.interpolate import interp1d
from scipy.ndimage import gaussian_filter1d
from scipy.signal import butter, lfilter, hilbert
from ..batchflow import DatasetIndex, Batch, action, inbatch_parallel, SkipBatchException, apply_parallel
from .labels import Horizon
from .plotters import plot_image
from .synthetic.generator import generate_synthetic
from .utils import compute_attribute, to_list, AugmentedDict
AFFIX = '___'
SIZE_POSTFIX = 12
SIZE_SALT = len(AFFIX) + SIZE_POSTFIX
CHARS = string.ascii_uppercase + string.digits
class SeismicCropBatch(Batch):
""" Batch with ability to generate 3d-crops of various shapes.
The first action in any pipeline with this class should be `make_locations` to transform batch index from
individual cubes into crop-based indices. The transformation uses randomly generated postfix (see `:meth:.salt`)
to obtain unique elements.
"""
components = None
apply_defaults = {
'target': 'for',
'post': '_assemble'
}
def _init_component(self, *args, **kwargs):
""" Create and preallocate a new attribute with the name ``dst`` if it
does not exist and return batch indices.
"""
_ = args
dst = kwargs.get("dst")
if dst is None:
raise KeyError("dst argument must be specified")
if isinstance(dst, str):
dst = (dst,)
for comp in dst:
if not hasattr(self, comp):
self.add_components(comp, np.array([np.nan] * len(self.index)))
return self.indices
@action
def add_components(self, components, init=None):
""" Add new components, checking that attributes of the same name are not present in dataset.
Parameters
----------
components : str or list
new component names
init : array-like
initial component data
Raises
------
ValueError
If a component or an attribute with the given name already exists in batch or dataset.
"""
for component in to_list(components):
if hasattr(self.dataset, component):
msg = f"Component with `{component}` name cannot be added to batch, "\
"since attribute with this name is already present in dataset."
raise ValueError(msg)
super().add_components(components=components, init=init)
# Inner workings
@staticmethod
def salt(path):
""" Adds random postfix of predefined length to string.
Parameters
----------
path : str
supplied string.
Returns
-------
path : str
supplied string with random postfix.
Notes
-----
Action `make_locations` makes a new instance of SeismicCropBatch with different (enlarged) index.
Items in that index should point to cube location to cut crops from.
Since we can't store multiple copies of the same string in one index (due to internal usage of dictionary),
we need to augment those strings with random postfix, which can be removed later.
"""
return path + AFFIX + ''.join(random.choice(CHARS) for _ in range(SIZE_POSTFIX))
@staticmethod
def has_salt(path):
""" Check whether path is salted. """
return path[::-1].find(AFFIX) == SIZE_POSTFIX
@staticmethod
def unsalt(path):
""" Removes postfix that was made by `salt` method.
Parameters
----------
path : str
supplied string.
Returns
-------
str
string without postfix.
"""
if AFFIX in path:
return path[:-SIZE_SALT]
return path
def get(self, item=None, component=None):
""" Custom access for batch attributes.
If `component` is present in dataset and is an instance of `AugmentedDict`,
then index it with item and return it.
Otherwise retrieve `component` from batch itself and optionally index it with `item` position in `self.indices`.
"""
data = getattr(self.dataset, component, None)
if isinstance(data, AugmentedDict):
if isinstance(item, str) and self.has_salt(item):
item = self.unsalt(item)
return data[item]
data = getattr(self, component) if isinstance(component, str) else component
if item is not None:
if isinstance(data, (np.ndarray, list)) and len(data) == len(self):
pos = np.where(self.indices == item)[0][0]
return data[pos]
return super().get(item, component)
return data
# Core actions
@action
def make_locations(self, generator, batch_size=None, passdown=None):
""" Use `generator` to create `batch_size` locations.
Each location defines position in a cube and can be used to retrieve data/create masks at this place.
Generator can be either Sampler or Grid to make locations in a random or deterministic fashion.
`generator` must be a callable and return (batch_size, 9+) array, where the first nine columns should be:
(field_id, label_id, orientation, i_start, x_start, h_start, i_stop, x_stop, h_stop).
`generator` must have `to_names` method to convert cube and label ids into actual strings.
Field and label ids are transformed into names of actual fields and labels (horizons, faults, facies, etc).
Then we create a completely new instance of `SeismicCropBatch`, where the new index is set to
cube names with additional postfixes (see `:meth:.salt`), which is returned as the result of this action.
After parsing contents of generated (batch_size, 9+) array we add following attributes:
- `locations` with triplets of slices
- `orientations` with crop orientation: 0 for iline direction, 1 for crossline direction
- `shapes`
- `label_names`
- `generated` with originally generated data
If `generator` creates more than 9 columns, they are not used, but stored in the `generated` attribute.
Parameters
----------
generator : callable
Sampler or Grid to retrieve locations. Must be a callable from positive integer.
batch_size : int
Number of locations to generate.
passdown : str or sequence of str
Components to pass down to a newly created batch.
Returns
-------
SeismicCropBatch
A completely new instance of Batch.
"""
# pylint: disable=protected-access
generated = generator(batch_size)
# Convert IDs to names, that are used in dataset
field_names, label_names = generator.to_names(generated[:, [0, 1]]).T
# Locations: 3D slices in the cube coordinates
locations = [[slice(i_start, i_stop), slice(x_start, x_stop), slice(h_start, h_stop)]
for i_start, x_start, h_start, i_stop, x_stop, h_stop in generated[:, 3:9]]
# Additional info
orientations = generated[:, 2]
shapes = generated[:, [6, 7, 8]] - generated[:, [3, 4, 5]]
# Create a new SeismicCropBatch instance
new_index = [self.salt(ix) for ix in field_names]
new_batch = type(self)(DatasetIndex.from_index(index=new_index))
# Keep chosen components in the new batch
if passdown:
passdown = [passdown] if isinstance(passdown, str) else passdown
for component in passdown:
if hasattr(self, component):
new_batch.add_components(component, getattr(self, component))
new_batch.add_components(('locations', 'generated', 'shapes', 'orientations', 'label_names'),
(locations, generated, shapes, orientations, label_names))
return new_batch
@action
def adaptive_reshape(self, src=None, dst=None):
""" Transpose crops with crossline orientation into (x, i, h) order. """
src = src if isinstance(src, (tuple, list)) else [src]
if dst is None:
dst = src
dst = dst if isinstance(dst, (tuple, list)) else [dst]
for src_, dst_ in zip(src, dst):
result = []
for ix in self.indices:
item = self.get(ix, src_)
if self.get(ix, 'orientations'):
item = item.transpose(1, 0, 2)
result.append(item)
setattr(self, dst_, np.stack(result))
return self
# Loading of cube data and its derivatives
@action
@inbatch_parallel(init='indices', post='_assemble', target='for')
def load_cubes(self, ix, dst, native_slicing=False, src_geometry='geometry', **kwargs):
""" Load data from cube for stored `locations`.
Parameters
----------
dst : str
Component of batch to put loaded crops in.
slicing : str
If 'custom', use `load_crop` method to make crops.
if 'native', crop will be loaded as a slice of geometry. Preferred for 3D crops to speed up loading.
src_geometry : str
Field attribute with desired geometry.
"""
field = self.get(ix, 'fields')
location = self.get(ix, 'locations')
return field.load_seismic(location=location, native_slicing=native_slicing, src=src_geometry, **kwargs)
@action
@inbatch_parallel(init='indices', src='indices', target='for', post='_assemble',
dst=('cubes', 'masks', 'faults'))
def generate_synthetic(self, ix, dst=None, **kwargs):
""" Generate synthetic seismic, corresponding horizons' and faults'
masks and put it into batch-components.
Parameters
----------
dst : sequence
Sequence of length=3 to put created components in.
**kwargs : dict
All arguments of `generate_synthetic`-method. See the docstring for more
info.
"""
_, _ = self, ix
# if the requested shape is in fact 2d (for instance [1, 64, 128] or [64, 1, 128])
# performs `generate_synthetic` in 2d and then adds the missing axis
shape = np.array(kwargs['shape'])
if 1 in shape[:2]:
kwargs['shape'] = tuple(shape[shape != 1])
axis_num = 0 if shape[0] == 1 else 1
else:
axis_num = None
# generate synthetic crop, horizons and faults
crop, horizons, faults = generate_synthetic(**kwargs)
if axis_num is not None:
crop, horizons, faults = [np.expand_dims(array, axis=axis_num) for array in (crop, horizons, faults)]
# set locations if needed
# locations = (slice(0, crop.shape[0]), slice(0, crop.shape[1]), slice(0, crop.shape[2]))
return crop, horizons, faults
@action
@inbatch_parallel(init='indices', post='_assemble', target='for')
def normalize(self, ix, mode=None, itemwise=False, src=None, dst=None, q=(0.01, 0.99)):
""" Normalize values in crop.
Parameters
----------
mode : callable or str
If callable, then directly applied to data.
If str, then :meth:`~SeismicGeometry.scaler` applied in one of the modes:
- `minmax`: scaled to [0, 1] via minmax scaling.
- `q` or `normalize`: divided by the maximum of absolute values
of the 0.01 and 0.99 quantiles. Quantiles can
be changed by `q` parameter.
- `q_clip`: clipped to 0.01 and 0.99 quantiles and then divided
by the maximum of absolute values of the two. Quantiles can
be changed by `q` parameter.
itemwise : bool
The way to compute 'min', 'max' and quantiles. If False, stats will be computed
for the whole cubes. Otherwise, for each data item separately.
q : tuple
Left and right quantiles to use.
"""
data = self.get(ix, src)
if callable(mode):
normalized = mode(data)
return normalized
if itemwise:
# Adjust data based on the current item only
if mode == 'minmax':
min_, max_ = data.min(), data.max()
normalized = (data - min_) / (max_ - min_) if (max_ != min_) else np.zeros_like(data)
else:
left, right = np.quantile(data, q)
if mode in ['q', 'normalize']:
normalized = 2 * (data - left) / (right - left) - 1 if right != left else np.zeros_like(data)
elif mode == 'q_clip':
normalized = np.clip(data, left, right) / max(abs(left), abs(right))
else:
raise ValueError(f'Unknown mode: {mode}')
else:
field = self.get(ix, 'fields')
normalized = field.geometry.normalize(data, mode=mode)
return normalized
@action
@inbatch_parallel(init='indices', post='_assemble', target='for')
def compute_attribute(self, ix, dst, src='images', attribute='semblance', window=10, stride=1, device='cpu'):
""" Compute geological attribute.
Parameters
----------
dst : str
Destination batch component
src : str, optional
Source batch component, by default 'images'
attribute : str, optional
Attribute to compute, by default 'semblance'
window : int or tuple, optional
Window to compute attribute, by default 10 (for each axis)
stride : int, optional
Stride for windows, by default 1 (for each axis)
device : str, optional
Device to compute attribute, by default 'cpu'
Returns
-------
SeismicCropBatch
Batch with loaded masks in desired components.
"""
image = self.get(ix, src)
result = compute_attribute(image, window, device, attribute)
return result
# Loading of labels
@action
@inbatch_parallel(init='indices', post='_assemble', target='for')
def create_masks(self, ix, dst, indices='all', width=3, src_labels='labels'):
""" Create masks from labels in stored `locations`.
Parameters
----------
dst : str
Component of batch to put loaded masks in.
indices : str, int or sequence of ints
Which labels to use in mask creation.
If 'all', then use all labels.
If 'single' or `random`, then use one random label.
If int or array-like, then element(s) are interpreted as indices of desired labels.
width : int
Width of the resulting label.
src_labels : str
Dataset attribute with labels dict.
"""
field = self.get(ix, 'fields')
location = self.get(ix, 'locations')
return field.make_mask(location=location, width=width, indices=indices, src=src_labels)
@action
@inbatch_parallel(init='indices', post='_assemble', target='for')
def compute_label_attribute(self, ix, dst, src='amplitudes', atleast_3d=True, dtype=np.float32, **kwargs):
""" Compute requested attribute along label surface. Target labels are defined by sampled locations.
Parameters
----------
src : str
Keyword that defines label attribute to compute.
atleast_3d : bool
Whether add one more dimension to 2d result or not.
dtype : valid dtype compatible with requested attribute
A dtype that result must have.
kwargs : misc
Passed directly to one of attribute-evaluating methods.
Notes
-----
Correspondence between the attribute and the method that computes it
is defined by :attr:`~Horizon.ATTRIBUTE_TO_METHOD`.
"""
field = self.get(ix, 'fields')
location = self.get(ix, 'locations')
label_index = self.get(ix, 'generated')[1]
src = src.replace('*', str(label_index))
src_labels = src[:src.find(':')]
label = getattr(field, src_labels)[label_index]
label_name = self.get(ix, 'label_names')
if label.short_name != label_name:
msg = f"Name `{label.short_name}` of the label loaded by index {label_index} "\
f"from {src_labels} does not match label name {label_name} from batch."\
f"This might have happened due to items order change in {src_labels} "\
f"in between sampler creation and `make_locations` call."
raise ValueError(msg)
result = field.load_attribute(src=src, location=location, atleast_3d=atleast_3d, dtype=dtype, **kwargs)
return result
# More methods to work with labels
@action
@inbatch_parallel(init='indices', post='_post_mask_rebatch', target='for',
src='masks', threshold=0.8, passdown=None, axis=-1)
def mask_rebatch(self, ix, src='masks', threshold=0.8, passdown=None, axis=-1):
""" Remove elements with masks area lesser than a threshold.
Parameters
----------
threshold : float
Minimum percentage of covered area for a mask to be kept in the batch.
passdown : sequence of str
Components to filter in the batch.
axis : int
Axis to project masks to before computing mask area.
"""
_ = threshold, passdown
mask = self.get(ix, src)
reduced = np.max(mask, axis=axis) > 0.0
return np.sum(reduced) / np.prod(reduced.shape)
def _post_mask_rebatch(self, areas, *args, src=None, passdown=None, threshold=None, **kwargs):
#pylint: disable=protected-access, access-member-before-definition, attribute-defined-outside-init
_ = args, kwargs
new_index = [self.indices[i] for i, area in enumerate(areas) if area > threshold]
if len(new_index) > 0:
self.index = DatasetIndex.from_index(index=new_index)
else:
raise SkipBatchException
passdown = passdown or []
passdown.extend([src, 'locations', 'shapes', 'generated', 'orientations', 'label_names'])
passdown = list(set(passdown))
for compo in passdown:
new_data = [getattr(self, compo)[i] for i, area in enumerate(areas) if area > threshold]
setattr(self, compo, np.array(new_data))
return self
@action
@inbatch_parallel(init='_init_component', post='_assemble', target='for')
def filter_out(self, ix, src=None, dst=None, expr=None, low=None, high=None, length=None, p=1.0):
""" Zero out mask for horizon extension task.
Parameters
----------
src : str
Component of batch with mask
dst : str
Component of batch to put cut mask in.
expr : callable, optional.
Some vectorized function. Accepts points in cube, returns either float.
If not None, low or high/length should also be supplied.
p : float
Probability of applying the transform. Default is 1.
"""
if not (src and dst):
raise ValueError('Src and dst must be provided')
mask = self.get(ix, src)
coords = np.where(mask > 0)
if np.random.binomial(1, 1 - p) or len(coords[0]) == 0 or expr is None:
new_mask = mask
else:
new_mask = np.zeros_like(mask)
coords = np.array(coords).astype(np.float).T
cond = np.ones(shape=coords.shape[0]).astype(bool)
coords /= np.reshape(mask.shape, newshape=(1, 3))
if low is not None:
cond &= np.greater_equal(expr(coords), low)
if high is not None:
cond &= np.less_equal(expr(coords), high)
if length is not None:
low = 0 if not low else low
cond &= np.less_equal(expr(coords), low + length)
coords *= np.reshape(mask.shape, newshape=(1, 3))
coords = np.round(coords).astype(np.int32)[cond]
new_mask[coords[:, 0], coords[:, 1], coords[:, 2]] = mask[coords[:, 0],
coords[:, 1],
coords[:, 2]]
return new_mask
@apply_parallel
def shift_masks(self, crop, n_segments=3, max_shift=4, min_len=5, max_len=10):
""" Randomly shift parts of the crop up or down.
Parameters
----------
n_segments : int
Number of segments to shift.
max_shift : int
Max size of shift along vertical axis.
min_len : int
Min size of shift along horizontal axis.
max_len : int
Max size of shift along horizontal axis.
"""
crop = np.copy(crop)
for _ in range(n_segments):
# Point of starting the distortion, its length and size
begin = np.random.randint(0, crop.shape[1])
length = np.random.randint(min_len, max_len)
shift = np.random.randint(-max_shift, max_shift)
# Apply shift
segment = crop[:, begin:min(begin + length, crop.shape[1]), :]
shifted_segment = np.zeros_like(segment)
if shift > 0:
shifted_segment[:, :, shift:] = segment[:, :, :-shift]
elif shift < 0:
shifted_segment[:, :, :shift] = segment[:, :, -shift:]
if shift != 0:
crop[:, begin:min(begin + length, crop.shape[1]), :] = shifted_segment
return crop
@apply_parallel
def bend_masks(self, crop, angle=10):
""" Rotate part of the mask on a given angle.
Must be used for crops in (xlines, heights, inlines) format.
Parameters
----------
angle : float
Rotation angle in degrees.
"""
shape = crop.shape
point_x = np.random.randint(0, shape[0])
point_h = np.argmax(crop[point_x, :, :])
if np.sum(crop[point_x, point_h, :]) == 0.0:
return crop
matrix = cv2.getRotationMatrix2D((point_h, point_x), angle, 1)
rotated = cv2.warpAffine(crop, matrix, (shape[1], shape[0])).reshape(shape)
combined = np.zeros_like(crop)
if point_x >= shape[0]//2:
combined[:point_x, :, :] = crop[:point_x, :, :]
combined[point_x:, :, :] = rotated[point_x:, :, :]
else:
combined[point_x:, :, :] = crop[point_x:, :, :]
combined[:point_x, :, :] = rotated[:point_x, :, :]
return combined
@apply_parallel
def linearize_masks(self, crop, n=3, shift=0, kind='random', width=None):
""" Sample `n` points from the original mask and create a new mask by interpolating them.
Parameters
----------
n : int
Number of points to sample.
shift : int
Maximum amplitude of random shift along the heights axis.
kind : {'random', 'linear', 'slinear', 'quadratic', 'cubic', 'previous', 'next'}
Type of interpolation to use. If 'random', then chosen randomly for each crop.
width : int
Width of interpolated lines.
"""
# Parse arguments
if kind == 'random':
kind = np.random.choice(['linear', 'slinear', 'quadratic', 'cubic', 'previous', 'next'])
if width is None:
width = np.sum(crop, axis=2)
width = int(np.round(np.mean(width[width!=0])))
# Choose the anchor points
axis = 1 - np.argmin(crop.shape)
*nz, _ = np.nonzero(crop)
min_, max_ = nz[axis][0], nz[axis][-1]
idx = [min_, max_]
step = (max_ - min_) // n
for i in range(0, max_-step, step):
idx.append(np.random.randint(i, i + step))
# Put anchors into new mask
mask_ = np.zeros_like(crop)
slc = (idx if axis == 0 else slice(None),
idx if axis == 1 else slice(None),
slice(None))
mask_[slc] = crop[slc]
*nz, y = np.nonzero(mask_)
# Shift heights randomly
x = nz[axis]
y += np.random.randint(-shift, shift + 1, size=y.shape)
# Sort and keep only unique values, based on `x` to remove width of original mask
sort_indices = np.argsort(x)
x, y = x[sort_indices], y[sort_indices]
_, unique_indices = np.unique(x, return_index=True)
x, y = x[unique_indices], y[unique_indices]
# Interpolate points; put into mask
interpolator = interp1d(x, y, kind=kind)
indices = np.arange(min_, max_, dtype=np.int32)
heights = interpolator(indices).astype(np.int32)
slc = (indices if axis == 0 else indices * 0,
indices if axis == 1 else indices * 0,
np.clip(heights, 0, crop.shape[2]-1))
mask_ = np.zeros_like(crop)
mask_[slc] = 1
# Make horizon wider
structure = np.ones((1, width), dtype=np.uint8)
shape = mask_.shape
mask_ = mask_.reshape((mask_.shape[axis], mask_.shape[2]))
mask_ = cv2.dilate(mask_, kernel=structure, iterations=1).reshape(shape)
return mask_
# Predictions
@action
@inbatch_parallel(init='indices', post=None, target='for')
def update_accumulator(self, ix, src, accumulator):
""" Update accumulator with data from crops.
Allows to gradually accumulate predicitons in a single instance, instead of
keeping all of them and assembling later.
Parameters
----------
src : str
Component with crops.
accumulator : Accumulator3D
Container for cube aggregation.
"""
crop = self.get(ix, src)
location = self.get(ix, 'locations')
if self.get(ix, 'orientations'):
crop = crop.transpose(1, 0, 2)
accumulator.update(crop, location)
return self
@action
@inbatch_parallel(init='indices', target='for', post='_masks_to_horizons_post')
def masks_to_horizons(self, ix, src_masks='masks', dst='predicted_labels',
threshold=0.5, mode='mean', minsize=0, mean_threshold=2.0,
adjacency=1, skip_merge=False, prefix='predict'):
""" Convert predicted segmentation mask to a list of Horizon instances.
Parameters
----------
src_masks : str
Component of batch that stores masks.
dst : str/object
Component of batch to store the resulting horizons.
threshold, mode, minsize, mean_threshold, adjacency, prefix
Passed directly to :meth:`Horizon.from_mask`.
"""
_ = dst, mean_threshold, adjacency, skip_merge
# Threshold the mask, transpose and rotate the mask if needed
mask = self.get(ix, src_masks)
if self.get(ix, 'orientations'):
mask = np.transpose(mask, (1, 0, 2))
field = self.get(ix, 'fields')
origin = [self.get(ix, 'locations')[k].start for k in range(3)]
horizons = Horizon.from_mask(mask, field=field, origin=origin, threshold=threshold,
mode=mode, minsize=minsize, prefix=prefix)
return horizons
def _masks_to_horizons_post(self, horizons_lists, *args, dst=None, skip_merge=False,
mean_threshold=2.0, adjacency=1, **kwargs):
""" Flatten list of lists of horizons, attempting to merge what can be merged. """
_, _ = args, kwargs
if dst is None:
raise ValueError("dst should be initialized with empty list.")
if skip_merge:
setattr(self, dst, [hor for hor_list in horizons_lists for hor in hor_list])
return self
for horizons in horizons_lists:
for horizon_candidate in horizons:
for horizon_target in dst:
merge_code, _ = Horizon.verify_merge(horizon_target, horizon_candidate,
mean_threshold=mean_threshold,
adjacency=adjacency)
if merge_code == 3:
merged = Horizon.overlap_merge(horizon_target, horizon_candidate, inplace=True)
elif merge_code == 2:
merged = Horizon.adjacent_merge(horizon_target, horizon_candidate, inplace=True,
adjacency=adjacency, mean_threshold=mean_threshold)
else:
merged = False
if merged:
break
else:
# If a horizon can't be merged to any of the previous ones, we append it as it is
dst.append(horizon_candidate)
return self
# More component actions
@action
def concat_components(self, src, dst, axis=-1):
""" Concatenate a list of components and save results to `dst` component.
Parameters
----------
src : array-like
List of components to concatenate of length more than one.
dst : str
Component of batch to put results in.
axis : int
The axis along which the arrays will be joined.
"""
if len(src) == 1:
warn("Since `src` contains only one component, concatenation not needed.")
items = [self.get(None, attr) for attr in src]
concat_axis_length = sum(item.shape[axis] for item in items)
final_shape = [*items[0].shape]
final_shape[axis] = concat_axis_length
if axis < 0:
axis = len(final_shape) + axis
prealloc = np.empty(final_shape, dtype=np.float32)
length_counter = 0
slicing = [slice(None) for _ in range(axis + 1)]
for item in items:
length_shift = item.shape[axis]
slicing[-1] = slice(length_counter, length_counter + length_shift)
prealloc[slicing] = item
length_counter += length_shift
setattr(self, dst, prealloc)
return self
@action
def transpose(self, src, order):
""" Change order of axis. """
src = [src] if isinstance(src, str) else src
order = [i+1 for i in order] # Correct for batch items dimension
for attr in src:
setattr(self, attr, np.transpose(self.get(component=attr), (0, *order)))
return self
@apply_parallel
def rotate_axes(self, crop):
""" The last shall be the first and the first last.
Notes
-----
Actions `make_locations`, `load_cubes`, `create_mask` make data in [iline, xline, height] format.
Since most of the TensorFlow models percieve ilines as channels, it might be convinient
to change format to [xlines, height, ilines] via this action.
"""
crop_ = np.swapaxes(crop, 0, 1)
crop_ = np.swapaxes(crop_, 1, 2)
return crop_
# Augmentations
@apply_parallel
def add_axis(self, crop):
""" Add new axis.
Notes
-----
Used in combination with `dice` and `ce` losses to tell model that input is
3D entity, but 2D convolutions are used.
"""
return crop[..., np.newaxis]
@apply_parallel
def additive_noise(self, crop, scale):
""" Add random value to each entry of crop. Added values are centered at 0.
Parameters
----------
scale : float
Standart deviation of normal distribution.
"""
rng = np.random.default_rng()
noise = scale * rng.standard_normal(dtype=np.float32, size=crop.shape)
return crop + noise
@apply_parallel
def multiplicative_noise(self, crop, scale):
""" Multiply each entry of crop by random value, centered at 1.
Parameters
----------
scale : float
Standart deviation of normal distribution.
"""
rng = np.random.default_rng()
noise = 1 + scale * rng.standard_normal(dtype=np.float32, size=crop.shape)
return crop * noise
@apply_parallel
def cutout_2d(self, crop, patch_shape, n):
""" Change patches of data to zeros.
Parameters
----------
patch_shape : array-like
Shape or patches along each axis.
n : float
Number of patches to cut.
"""
rnd = np.random.RandomState(int(n*100)).uniform
patch_shape = patch_shape.astype(int)
copy_ = copy(crop)
for _ in range(int(n)):
starts = [int(rnd(crop.shape[ax] - patch_shape[ax])) for ax in range(3)]
stops = [starts[ax] + patch_shape[ax] for ax in range(3)]
slices = [slice(start, stop) for start, stop in zip(starts, stops)]
copy_[tuple(slices)] = 0
return copy_
@apply_parallel
def rotate(self, crop, angle):
""" Rotate crop along the first two axes. Angles are defined as Tait-Bryan angles and the sequence of
extrinsic rotations axes is (axis_2, axis_0, axis_1).
Parameters
----------
angle : float or tuple of floats
Angles of rotation about each axes (axis_2, axis_0, axis_1). If float, angle of rotation
about the last axis.
"""
angle = angle if isinstance(angle, (tuple, list)) else (angle, 0, 0)
crop = self._rotate(crop, angle[0])
if angle[1] != 0:
crop = crop.transpose(1, 2, 0)
crop = self._rotate(crop, angle[1])
crop = crop.transpose(2, 0, 1)
if angle[2] != 0:
crop = crop.transpose(2, 0, 1)
crop = self._rotate(crop, angle[2])
crop = crop.transpose(1, 2, 0)
return crop
def _rotate(self, crop, angle):
shape = crop.shape
matrix = cv2.getRotationMatrix2D((shape[1]//2, shape[0]//2), angle, 1)
return cv2.warpAffine(crop, matrix, (shape[1], shape[0])).reshape(shape)
@apply_parallel
def flip(self, crop, axis=0, seed=0.1, threshold=0.5):
""" Flip crop along the given axis.
Parameters
----------
axis : int
Axis to flip along
"""
rnd = np.random.RandomState(int(seed*100)).uniform
if rnd() >= threshold:
return cv2.flip(crop, axis).reshape(crop.shape)
return crop
@apply_parallel
def scale_2d(self, crop, scale):
""" Zoom in or zoom out along the first two axis.
Parameters
----------
scale : tuple or float
Zooming factor for the first two axis.
"""
scale = scale if isinstance(scale, (list, tuple)) else [scale] * 2
crop = self._scale(crop, [scale[0], scale[1]])
return crop
@apply_parallel
def scale(self, crop, scale):
""" Zoom in or zoom out along each axis of crop.
Parameters
----------
scale : tuple or float
Zooming factor for each axis.
"""
scale = scale if isinstance(scale, (list, tuple)) else [scale] * 3
crop = self._scale(crop, [scale[0], scale[1]])
crop = crop.transpose(1, 2, 0)
crop = self._scale(crop, [1, scale[-1]]).transpose(2, 0, 1)
return crop
def _scale(self, crop, scale):
shape = crop.shape
matrix = np.zeros((2, 3))
matrix[:, :-1] = np.diag([scale[1], scale[0]])
matrix[:, -1] = np.array([shape[1], shape[0]]) * (1 - np.array([scale[1], scale[0]])) / 2
return cv2.warpAffine(crop, matrix, (shape[1], shape[0])).reshape(shape)
@apply_parallel
def affine_transform(self, crop, alpha_affine=10):
""" Perspective transform. Moves three points to other locations.
Guaranteed not to flip image or scale it more than 2 times.
Parameters
----------
alpha_affine : float
Maximum distance along each axis between points before and after transform.
"""
rnd = np.random.RandomState(int(alpha_affine*100)).uniform
shape = np.array(crop.shape)[:2]
if alpha_affine >= min(shape)//16:
alpha_affine = min(shape)//16
center_ = shape // 2
square_size = min(shape) // 3
pts1 = np.float32([center_ + square_size,
center_ - square_size,
[center_[0] + square_size, center_[1] - square_size]])
pts2 = pts1 + rnd(-alpha_affine, alpha_affine, size=pts1.shape).astype(np.float32)
matrix = cv2.getAffineTransform(pts1, pts2)
return cv2.warpAffine(crop, matrix, (shape[1], shape[0])).reshape(crop.shape)
@apply_parallel
def perspective_transform(self, crop, alpha_persp):
""" Perspective transform. Moves four points to other four.
Guaranteed not to flip image or scale it more than 2 times.
Parameters
----------
alpha_persp : float
Maximum distance along each axis between points before and after transform.
"""
rnd = np.random.RandomState(int(alpha_persp*100)).uniform
shape = np.array(crop.shape)[:2]
if alpha_persp >= min(shape) // 16:
alpha_persp = min(shape) // 16
center_ = shape // 2
square_size = min(shape) // 3
pts1 = np.float32([center_ + square_size,
center_ - square_size,
[center_[0] + square_size, center_[1] - square_size],
[center_[0] - square_size, center_[1] + square_size]])
pts2 = pts1 + rnd(-alpha_persp, alpha_persp, size=pts1.shape).astype(np.float32)
matrix = cv2.getPerspectiveTransform(pts1, pts2)
return cv2.warpPerspective(crop, matrix, (shape[1], shape[0])).reshape(crop.shape)
@apply_parallel
def elastic_transform(self, crop, alpha=40, sigma=4):
""" Transform indexing grid of the first two axes.
Parameters
----------
alpha : float
Maximum shift along each axis.
sigma : float
Smoothening factor.
"""
rng = np.random.default_rng(seed=int(alpha*100))
shape_size = crop.shape[:2]
grid_scale = 4
alpha //= grid_scale
sigma //= grid_scale
grid_shape = (shape_size[0]//grid_scale, shape_size[1]//grid_scale)
blur_size = int(4 * sigma) | 1
rand_x = cv2.GaussianBlur(rng.random(size=grid_shape, dtype=np.float32) * 2 - 1,
ksize=(blur_size, blur_size), sigmaX=sigma) * alpha
rand_y = cv2.GaussianBlur(rng.random(size=grid_shape, dtype=np.float32) * 2 - 1,
ksize=(blur_size, blur_size), sigmaX=sigma) * alpha
if grid_scale > 1:
rand_x = cv2.resize(rand_x, shape_size[::-1])
rand_y = cv2.resize(rand_y, shape_size[::-1])
grid_x, grid_y = np.meshgrid(np.arange(shape_size[1]), np.arange(shape_size[0]))
grid_x = (grid_x.astype(np.float32) + rand_x)
grid_y = (grid_y.astype(np.float32) + rand_y)
distorted_img = cv2.remap(crop, grid_x, grid_y,
borderMode=cv2.BORDER_REFLECT_101,
interpolation=cv2.INTER_LINEAR)
return distorted_img.reshape(crop.shape)
@apply_parallel
def bandwidth_filter(self, crop, lowcut=None, highcut=None, fs=1, order=3):
""" Keep only frequences between lowcut and highcut.
Notes
-----
Use it before other augmentations, especially before ones that add lots of zeros.
Parameters
----------
lowcut : float
Lower bound for frequences kept.
highcut : float
Upper bound for frequences kept.
fs : float
Sampling rate.
order : int
Filtering order.
"""
nyq = 0.5 * fs
if lowcut is None:
b, a = butter(order, highcut / nyq, btype='high')
elif highcut is None:
b, a = butter(order, lowcut / nyq, btype='low')
else:
b, a = butter(order, [lowcut / nyq, highcut / nyq], btype='band')
return lfilter(b, a, crop, axis=1)
@apply_parallel
def sign(self, crop):
""" Element-wise indication of the sign of a number. """
return np.sign(crop)
@apply_parallel
def analytic_transform(self, crop, axis=1, mode='phase'):
""" Compute instantaneous phase or frequency via the Hilbert transform.
Parameters
----------
axis : int
Axis of transformation. Intended to be used after `rotate_axes`, so default value
is to make transform along depth dimension.
mode : str
If 'phase', compute instantaneous phase.
If 'freq', compute instantaneous frequency.
"""
analytic = hilbert(crop, axis=axis)
phase = np.unwrap(np.angle(analytic))
if mode == 'phase':
return phase
if 'freq' in mode:
return np.diff(phase, axis=axis, prepend=0) / (2*np.pi)
raise ValueError('Unknown `mode` parameter.')
@apply_parallel
def gaussian_filter(self, crop, axis=1, sigma=2, order=0):
""" Apply a gaussian filter along specified axis. """
return gaussian_filter1d(crop, sigma=sigma, axis=axis, order=order)
@apply_parallel
def central_crop(self, crop, shape):
""" Central crop of defined shape. """
crop_shape = np.array(crop.shape)
shape = np.array(shape)
if (shape > crop_shape).any():
raise ValueError(f"shape can't be large then crop shape ({crop_shape}) but {shape} was given.")
corner = crop_shape // 2 - shape // 2
slices = tuple(slice(start, start+length) for start, length in zip(corner, shape))
return crop[slices]
@apply_parallel
def translate(self, crop, shift=5, scale=0.0):
""" Add and multiply values by uniformly sampled values. """
shift = self.random.uniform(-shift, shift)
scale = self.random.uniform(1-scale, 1+scale)
return (crop + shift)*scale
@action
def adaptive_expand(self, src, dst=None, channels='first'):
""" Add channels dimension to 4D components if needed. If component data has shape `(batch_size, 1, n_x, n_d)`,
it will be keeped. If shape is `(batch_size, n_i, n_x, n_d)` and `n_i > 1`, channels axis
at position `axis` will be created.
"""
dst = dst or src
src = [src] if isinstance(src, str) else src
dst = [dst] if isinstance(dst, str) else dst
axis = 1 if channels in [0, 'first'] else -1
for _src, _dst in zip(src, dst):
crop = getattr(self, _src)
if crop.ndim == 4 and crop.shape[1] != 1:
crop = np.expand_dims(crop, axis=axis)
setattr(self, _dst, crop)
return self
@action
def adaptive_squeeze(self, src, dst=None, channels='first'):
""" Remove channels dimension from 5D components if needed. If component data has shape
`(batch_size, n_c, n_i, n_x, n_d)` for `channels='first'` or `(batch_size, n_i, n_x, n_d, n_c)`
for `channels='last'` and `n_c > 1`, shape will be keeped. If `n_c == 1` , channels axis at position `axis`
will be squeezed.
"""
dst = dst or src
src = [src] if isinstance(src, str) else src
dst = [dst] if isinstance(dst, str) else dst
axis = 1 if channels in [0, 'first'] else -1
for _src, _dst in zip(src, dst):
crop = getattr(self, _src)
if crop.ndim == 5 and crop.shape[axis] == 1:
crop = np.squeeze(crop, axis=axis)
setattr(self, _dst, crop)
return self
def plot_components(self, *components, idx=0, slide=None, **kwargs):
""" Plot components of batch.
Parameters
----------
components : str or sequence of str
Components to get from batch and draw.
idx : int or None
If int, then index of desired image in list.
If None, then no indexing is applied.
slide : slice
Indexing element for individual images.
"""
# Get components data
if idx is not None:
data = [getattr(self, comp)[idx].squeeze() for comp in components]
else:
data = [getattr(self, comp).squeeze() for comp in components]
if slide is not None:
data = [item[slide] for item in data]
# Get location
l = self.locations[idx]
field_name = self.unsalt(self.indices[idx])
displayed_name = self.dataset[field_name].displayed_name
if (l[0].stop - l[0].start) == 1:
suptitle = f'INLINE {l[0].start} CROSSLINES {l[1].start}:{l[1].stop} DEPTH {l[2].start}:{l[2].stop}'
elif (l[1].stop - l[1].start) == 1:
suptitle = f'CROSSLINE {l[1].start} INLINES {l[0].start}:{l[0].stop} DEPTH {l[2].start}:{l[2].stop}'
else:
suptitle = f'DEPTH {l[2].start} INLINES {l[0].start}:{l[0].stop} CROSSLINES {l[1].start}:{l[1].stop}'
suptitle = f'batch item {idx} {displayed_name}\n{suptitle}'
# Plot parameters
kwargs = {
'figsize': (8 * len(components), 8),
'suptitle_label': suptitle,
'title': list(components),
'xlabel': 'xlines',
'ylabel': 'depth',
'cmap': ['gray'] + ['viridis'] * len(components),
'bad_values': (),
**kwargs
}
return plot_image(data, **kwargs)
def show(self, n=1, separate=True, components=None, **kwargs):
""" Plot `n` random batch items. """
available_components = components or ['images', 'masks', 'predictions']
available_components = [compo for compo in available_components
if hasattr(self, compo)]
n = min(n, len(self))
for idx in self.random.choice(len(self), size=n, replace=False):
self.plot_components(*available_components, idx=idx, separate=separate, **kwargs)
|
import pyautogui
import time
print(""" __ _ _ _ _
/ _| | | | | | | |
| |_| | ___ ___ __| | ___ __ _| | __| |_ __
| _| |/ _ \ / _ \ / _` | / __|/ _` | |/ _` | '__|
| | | | (_) | (_) | (_| | \__ \ (_| | | (_| | |
|_| |_|\___/ \___/ \__,_| |___/\__,_|_|\__,_|_|
""")
print("developer = Ahmet Parmaksฤฑzoฤlu")
print("https://github.com/ahmetpar1")
d0 = str(input("SALDIRIDA KULLANILACAK KELฤฐMEYฤฐ GฤฐRฤฐN >>> "))
d1 = int(input("KAรTANE KELฤฐME GรNDERSฤฐN >>> "))
d2 = -1
print("5 saniye sonra baลlฤฑycak")
time.sleep(5)
while(True):
d2 += 1
if d2 != d1:
pyautogui.typewrite(d0)
pyautogui.press('enter')
else:
quit()
|
import sys
import time
from tello import Tello
from datetime import datetime
from command_mode import CommandMode
from free_mode import FreeMode
from tello_state import TelloState
start_time = time.strftime("%Y%m%d-%H%M%S")
tello = Tello()
tello.send_command_await('command')
while True:
print('Available Modes: Free, Command')
print('Mode?....')
mode = raw_input().rstrip().lower()
try:
if mode == 'free':
print('Entered free mode')
tello.print_thread_response = False
mode = FreeMode(tello)
mode.free_mode()
elif mode == 'command':
print('Entered command mode')
mode = CommandMode(tello)
mode.command_mode()
elif mode == 'exit':
break
except:
print('Invalid Command....')
log = tello.get_log()
out = open('C:/Users/Craig/Desktop/Tello-master/log/' + start_time + '.txt', 'w')
for stat in log:
stat.print_stats()
str = stat.return_stats()
out.write(str) |
# -*- coding: utf-8 -*-
# @Time : 2019-12-23
# @Author : mizxc
# @Email : xiangxianjiao@163.com
import re
import time
import datetime
from bs4 import BeautifulSoup
def strLength(str,min,max):
"""
ๅค่ฏปๆฐๆฎ้ฟๅบฆ
"""
return len(str)>=min and len(str)<=max
#่ทๅๆ ็ญพๆฐ็ป๏ผๅๆถๆธ
้ค็ฉบๅญ็ฌฆไธฒ,ๆธ
้ค็ธๅtag
def getTagsFormStrTag(str):
array = str.strip().split(' ')
while "" in array:
array.remove("")
return list(set(array))
#_______________________________________________
# datetimeๆถ้ด่ฝฌไธบๅญ็ฌฆไธฒ
def getStrTime():
return datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
def datetimeToStr(datetime1):
str1 = datetime1.strftime('%Y-%m-%d %H:%M:%S')
return str1
# ๅญ็ฌฆไธฒๆถ้ด่ฝฌไธบๆถ้ดๆณ
def strToTimestamp(str1):
Unixtime = time.mktime(time.strptime(str1, '%Y-%m-%d %H:%M:%S'))
return Unixtime
# datetimeๆถ้ด่ฝฌไธบๆถ้ดๆณ
def datetimeToTimestamp(dt1):
Unixtime = time.mktime(time.strptime(dt1.strftime('%Y-%m-%d %H:%M:%S'), '%Y-%m-%d %H:%M:%S'))
return Unixtime
# ๆถ้ดๆณ่ฝฌไธบdatetimeๆถ้ด
def timestampToDatetime(timestamp):
dt = datetime.datetime.fromtimestamp(timestamp)
return dt
# uinxๆถ้ดๆณ่ฝฌๆขไธบๆฌๅฐๆถ้ด
def timestampToLocaltime(datetime1):
Localtime = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(datetime1))
return Localtime
# ๅญ็ฌฆไธฒๆถ้ด่ฝฌdatetime
def strToDatetime(str1):
return timestampToDatetime(strToTimestamp(str1))
#_______________________________________________
#mongoengine ๅ้กต่ทๅ
def getPagingParameters(page,count=10):
#้ป่ฎคๅ้กต๏ผ10
page = int(page)
return [page*count,(page+1)*count]
#่ทๅๆ็ซ htmlไปฃ็ ๏ผๅฏผ่ฏญ/cover
def getLeadAndCover(s):
s = '<div>%s</div>' % s
ret = [None,None]
bs = BeautifulSoup(s, "html.parser")
ret[0] = bs.div.get_text()[:200] + '...'
img = bs.find_all('img')
if img:ret[1]=img[0].get('src')
return ret
if __name__ == '__main__':
a = ' sdf sdf sdf sdf sfwefgw sfd wef sd sd d fewef '
ret = getTagsFormStrTag(a)
print (ret) |
import unittest
import mock
import numpy as np
from smqtk.algorithms.nn_index.hash_index.sklearn_balltree import \
SkLearnBallTreeHashIndex
from smqtk.representation.data_element.memory_element import DataMemoryElement
from smqtk.utils.bit_utils import int_to_bit_vector_large
class TestBallTreeHashIndex (unittest.TestCase):
def test_is_usable(self):
# Should always be true because major dependency (sklearn) is a package
# requirement.
self.assertTrue(SkLearnBallTreeHashIndex.is_usable())
def test_default_configuration(self):
c = SkLearnBallTreeHashIndex.get_default_config()
self.assertEqual(len(c), 3)
self.assertIsInstance(c['cache_element'], dict)
self.assertIsNone(c['cache_element']['type'])
self.assertEqual(c['leaf_size'], 40)
self.assertIsNone(c['random_seed'])
def test_init_without_cache(self):
i = SkLearnBallTreeHashIndex(cache_element=None, leaf_size=52,
random_seed=42)
self.assertIsNone(i.cache_element)
self.assertEqual(i.leaf_size, 52)
self.assertEqual(i.random_seed, 42)
self.assertIsNone(i.bt)
def test_init_with_empty_cache(self):
empty_cache = DataMemoryElement()
i = SkLearnBallTreeHashIndex(cache_element=empty_cache,
leaf_size=52,
random_seed=42)
self.assertEqual(i.cache_element, empty_cache)
self.assertEqual(i.leaf_size, 52)
self.assertEqual(i.random_seed, 42)
self.assertIsNone(i.bt)
def test_get_config(self):
bt = SkLearnBallTreeHashIndex()
bt_c = bt.get_config()
self.assertEqual(len(bt_c), 3)
self.assertIn('cache_element', bt_c)
self.assertIn('leaf_size', bt_c)
self.assertIn('random_seed', bt_c)
self.assertIsInstance(bt_c['cache_element'], dict)
self.assertIsNone(bt_c['cache_element']['type'])
def test_init_consistency(self):
# Test that constructing an instance with a configuration yields the
# same config via ``get_config``.
# - Default config should be a valid configuration for this impl.
c = SkLearnBallTreeHashIndex.get_default_config()
self.assertEqual(
SkLearnBallTreeHashIndex.from_config(c).get_config(),
c
)
# With non-null cache element
c['cache_element']['type'] = 'DataMemoryElement'
self.assertEqual(
SkLearnBallTreeHashIndex.from_config(c).get_config(),
c
)
def test_build_index_no_input(self):
bt = SkLearnBallTreeHashIndex(random_seed=0)
self.assertRaises(
ValueError,
bt.build_index, []
)
def test_build_index(self):
bt = SkLearnBallTreeHashIndex(random_seed=0)
# Make 1000 random bit vectors of length 256
m = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256)
bt.build_index(m)
# deterministically sort index of built and source data to determine
# that an index was built.
self.assertIsNotNone(bt.bt)
np.testing.assert_array_almost_equal(
sorted(np.array(bt.bt.data).tolist()),
sorted(m.tolist())
)
def test_update_index_no_input(self):
bt = SkLearnBallTreeHashIndex(random_seed=0)
self.assertRaises(
ValueError,
bt.update_index, []
)
def test_update_index_new_index(self):
# Virtually the same as `test_build_index` but using update_index.
bt = SkLearnBallTreeHashIndex(random_seed=0)
# Make 1000 random bit vectors of length 256
m = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256).astype(bool)
bt.update_index(m)
# deterministically sort index of built and source data to determine
# that an index was built.
self.assertIsNotNone(bt.bt)
np.testing.assert_array_almost_equal(
sorted(np.array(bt.bt.data).tolist()),
sorted(m.tolist())
)
def test_update_index_additive(self):
# Test updating an existing index, i.e. rebuilding using the union of
# previous and new data.
bt = SkLearnBallTreeHashIndex(random_seed=0)
# Make 1000 random bit vectors of length 256
m1 = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256)\
.astype(bool)
m2 = np.random.randint(0, 2, 100 * 256).reshape(100, 256).astype(bool)
# Build initial index
bt.build_index(m1)
# Current model should only contain m1's data.
np.testing.assert_array_almost_equal(
sorted(np.array(bt.bt.data).tolist()),
sorted(m1.tolist())
)
# "Update" index with new hashes
bt.update_index(m2)
# New model should contain the union of the data.
np.testing.assert_array_almost_equal(
sorted(np.array(bt.bt.data).tolist()),
sorted(np.concatenate([m1, m2], 0).tolist())
)
def test_remove_from_index_no_index(self):
# A key error should be raised if there is no ball-tree index yet.
bt = SkLearnBallTreeHashIndex(random_seed=0)
rm_hash = np.random.randint(0, 2, 256)
self.assertRaisesRegexp(
KeyError,
str(rm_hash[0]),
bt.remove_from_index,
[rm_hash]
)
def test_remove_from_index_invalid_key_single(self):
bt = SkLearnBallTreeHashIndex(random_seed=0)
index = np.ndarray((1000, 256), bool)
for i in range(1000):
index[i] = int_to_bit_vector_large(i, 256)
bt.build_index(index)
# Copy post-build index for checking no removal occurred
bt_data = np.copy(bt.bt.data)
self.assertRaises(
KeyError,
bt.remove_from_index, [
int_to_bit_vector_large(1001, 256),
]
)
np.testing.assert_array_equal(
bt_data,
np.asarray(bt.bt.data)
)
def test_remove_from_index_invalid_key_multiple(self):
# Test that mixed valid and invalid keys raises KeyError and does not
# modify the index.
bt = SkLearnBallTreeHashIndex(random_seed=0)
index = np.ndarray((1000, 256), bool)
for i in range(1000):
index[i] = int_to_bit_vector_large(i, 256)
bt.build_index(index)
# Copy post-build index for checking no removal occurred
bt_data = np.copy(bt.bt.data)
self.assertRaises(
KeyError,
bt.remove_from_index, [
int_to_bit_vector_large(42, 256),
int_to_bit_vector_large(1008, 256),
]
)
np.testing.assert_array_equal(
bt_data,
np.asarray(bt.bt.data)
)
def test_remove_from_index(self):
# Test that we actually remove from the index.
bt = SkLearnBallTreeHashIndex(random_seed=0)
index = np.ndarray((1000, 256), bool)
for i in range(1000):
index[i] = int_to_bit_vector_large(i, 256)
bt.build_index(index)
# Copy post-build index for checking no removal occurred
bt_data = np.copy(bt.bt.data)
bt.remove_from_index([
int_to_bit_vector_large(42, 256),
int_to_bit_vector_large(998, 256),
])
# Make sure expected arrays are missing from data block.
new_data = np.asarray(bt.bt.data)
self.assertEqual(new_data.shape, (998, 256))
new_data_set = set(tuple(r) for r in new_data.tolist())
self.assertNotIn(tuple(int_to_bit_vector_large(42, 256)),
new_data_set)
self.assertNotIn(tuple(int_to_bit_vector_large(998, 256)),
new_data_set)
def test_remove_from_index_last_element(self):
"""
Test removing the final the only element / final elements from the
index.
"""
# Add one hash, remove one hash.
bt = SkLearnBallTreeHashIndex(random_seed=0)
index = np.ndarray((1, 256), bool)
index[0] = int_to_bit_vector_large(1, 256)
bt.build_index(index)
self.assertEqual(bt.count(), 1)
bt.remove_from_index(index)
self.assertEqual(bt.count(), 0)
self.assertIsNone(bt.bt)
# Add many hashes, remove many hashes in batches until zero
bt = SkLearnBallTreeHashIndex(random_seed=0)
index = np.ndarray((1000, 256), bool)
for i in range(1000):
index[i] = int_to_bit_vector_large(i, 256)
bt.build_index(index)
# Remove first 250
bt.remove_from_index(index[:250])
self.assertEqual(bt.count(), 750)
self.assertIsNotNone(bt.bt)
# Remove second 250
bt.remove_from_index(index[250:500])
self.assertEqual(bt.count(), 500)
self.assertIsNotNone(bt.bt)
# Remove third 250
bt.remove_from_index(index[500:750])
self.assertEqual(bt.count(), 250)
self.assertIsNotNone(bt.bt)
# Remove final 250
bt.remove_from_index(index[750:])
self.assertEqual(bt.count(), 0)
self.assertIsNone(bt.bt)
def test_remove_from_index_last_element_with_cache(self):
"""
Test removing final element also clears the cache element.
"""
c = DataMemoryElement()
bt = SkLearnBallTreeHashIndex(cache_element=c, random_seed=0)
index = np.ndarray((1, 256), bool)
index[0] = int_to_bit_vector_large(1, 256)
bt.build_index(index)
self.assertEqual(bt.count(), 1)
self.assertFalse(c.is_empty())
bt.remove_from_index(index)
self.assertEqual(bt.count(), 0)
self.assertTrue(c.is_empty())
def test_count_empty(self):
bt = SkLearnBallTreeHashIndex()
self.assertEqual(bt.count(), 0)
def test_count_nonempty(self):
bt = SkLearnBallTreeHashIndex()
# Make 1000 random bit vectors of length 256
m = np.random.randint(0, 2, 234 * 256).reshape(234, 256)
bt.build_index(m)
self.assertEqual(bt.count(), 234)
def test_nn_no_index(self):
i = SkLearnBallTreeHashIndex()
self.assertRaisesRegexp(
ValueError,
"No index currently set to query from",
i.nn, [0, 0, 0]
)
@mock.patch('smqtk.algorithms.nn_index.hash_index.sklearn_balltree.np'
'.savez')
def test_save_model_no_cache(self, m_savez):
bt = SkLearnBallTreeHashIndex()
m = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256)
bt._build_bt_internal(m)
# Underlying serialization function should not have been called
# because no cache element set.
self.assertFalse(m_savez.called)
def test_save_model_with_readonly_cache(self):
cache_element = DataMemoryElement(readonly=True)
bt = SkLearnBallTreeHashIndex(cache_element)
m = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256)
self.assertRaises(
ValueError,
bt._build_bt_internal, m
)
@mock.patch('smqtk.algorithms.nn_index.hash_index.sklearn_balltree.np'
'.savez')
def test_save_model_with_cache(self, m_savez):
cache_element = DataMemoryElement()
bt = SkLearnBallTreeHashIndex(cache_element, random_seed=0)
m = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256)
bt._build_bt_internal(m)
self.assertTrue(m_savez.called)
self.assertEqual(m_savez.call_count, 1)
def test_load_model(self):
# Create two index instances, building model with one, and loading
# the other with the cache of the first instance. Each should have
# distinct model instances, but should otherwise have equal model
# values and parameters.
cache_element = DataMemoryElement()
bt1 = SkLearnBallTreeHashIndex(cache_element, random_seed=0)
m = np.random.randint(0, 2, 1000 * 256).reshape(1000, 256)
bt1.build_index(m)
bt2 = SkLearnBallTreeHashIndex(cache_element)
self.assertIsNotNone(bt2.bt)
q = np.random.randint(0, 2, 256).astype(bool)
bt_neighbors, bt_dists = bt1.nn(q, 10)
bt2_neighbors, bt2_dists = bt2.nn(q, 10)
self.assertIsNot(bt1, bt2)
self.assertIsNot(bt1.bt, bt2.bt)
np.testing.assert_equal(bt2_neighbors, bt_neighbors)
np.testing.assert_equal(bt2_dists, bt_dists)
|
import conn_db
try:
cur = conn_db.con.cursor()
cur.execute("""select id, contact_id, phone_num, encoding, lang, offer_id, src_id, xml_text from sbl_list""")
for res in cur:
print(res)
except:
conn_db.con.close()
|
# 1st level Model Structure: Equation Block
# this module define the rules for constructing a kinetics block in the master block
# this is the global component set import, so that all modules uses the same set
from global_sets.component import m
from physics.bounds import kinetic_bounds
# data import
from data import kinetic_data as k
from utility.data_utility import cal_op
from pyomo import environ as pe
# import mean
from statistics import mean
# pre-processing
op_ratio = cal_op(k.op_ratio)
h2_consumption = [(2*(i+1)+2)/2*op_ratio[i] + (2*(i+1))/2*(1-op_ratio[i]) for i in range(len(op_ratio))]
# defile knietic block rule
def kinetic_block_rule(block):
#-----------------------------------SETS-----------------------------------
# local sets that will only be used in kinetics model
block.C_NUMBER = pe.RangeSet(1,56)
#-----------------------------GLOBAL VARIABLES-----------------------------
# global variables
# print('\t'*2,'Importing Kinetics Block......')
# print('\t'*2,'Using the following parent variable:')
# print('\t'*2,'-'*36)
# print('\t'*2,block.parent_block().T.name)
# print('\t'*2,block.parent_block().P.name)
# print('\t'*2,block.parent_block().cat.name)
# print('\t'*2,block.parent_block().f_V.name)
# print('\t'*2,block.parent_block().r_total_comp.name)
# print('\t'*2,'-'*36)
# print('')
#-----------------------------VARIABLES Bounds------------------------------
def k_FT_bounds(model):
lower = min(kinetic_bounds['k_FT'])
lower = lower - abs(lower)*0.1
upper = max(kinetic_bounds['k_FT'])
upper = upper + abs(upper)*0.1
return (lower,upper)
def g0_FT_bounds(model):
lower = min(kinetic_bounds['g0_FT'])
lower = lower - abs(lower)*0.1
upper = max(kinetic_bounds['g0_FT'])
upper = upper + abs(upper)*0.1
return (lower,upper)
def alpha_bounds(model):
lower = min(kinetic_bounds['alpha'])
lower = lower - abs(lower)*0.1
upper = max(kinetic_bounds['alpha'])
upper = upper + abs(upper)*0.1
return (lower,upper)
def k_WGS_bounds(model):
lower = min(kinetic_bounds['k_WGS'])
lower = lower - abs(lower)*0.1
upper = max(kinetic_bounds['k_WGS'])
upper = upper + abs(upper)*0.1
return (lower,upper)
def Ke_WGS_bounds(model):
lower = min(kinetic_bounds['Ke_WGS'])
lower = lower - abs(lower)*0.1
upper = max(kinetic_bounds['Ke_WGS'])
upper = upper + abs(upper)*0.1
return (lower,upper)
#------------------------------LOCAL VARIABLES------------------------------
# FT Reaction
block.k_FT = pe.Var(within=pe.PositiveReals,bounds=k_FT_bounds,initialize=5e-4)
block.r_FT_total = pe.Var(within=pe.PositiveReals)
block.g0_FT = pe.Var(within=pe.Reals,bounds=g0_FT_bounds)
block.alpha = pe.Var(within=pe.PositiveReals,bounds=alpha_bounds,initialize=0.7)
block.r_FT_cnum = pe.Var(block.C_NUMBER,within=pe.PositiveReals)
block.r_FT_comp = pe.Var(m.COMP_TOTAL,within=pe.Reals) # kmol/s
# WGS Reaction
block.k_WGS = pe.Var(within=pe.PositiveReals,bounds=k_WGS_bounds,initialize=2e-4)
block.Ke_WGS = pe.Var(within=pe.PositiveReals,bounds=Ke_WGS_bounds,initialize=3)
block.r_WGS = pe.Var(within=pe.PositiveReals)
block.r_WGS_comp = pe.Var(m.COMP_INORG,within=pe.Reals) # kmol/s
# initialize these variable: 1/2(ub+lb)
block.k_FT = mean(kinetic_bounds['k_FT'])
block.g0_FT = mean(kinetic_bounds['g0_FT'])
block.alpha = mean(kinetic_bounds['alpha'])
block.k_WGS = mean(kinetic_bounds['k_WGS'])
block.Ke_WGS = mean(kinetic_bounds['Ke_WGS'])
print('>','Importing Kinetics Blocks......')
print('>','Adding the following local variable:')
print('-'*50)
for i in block.component_objects(pe.Var,active=True):
print('|',i)
print('-'*50)
print('')
#---------------------------------Equations---------------------------------
# FT Reaction
def r_FT_total_rule(block):
return block.r_FT_total * (1+k.c_FT*(block.parent_block().f_V['CO'])**0.5+k.d_FT*(block.parent_block().f_V['H2'])**0.5)**2 == \
(block.parent_block().cat*block.k_FT*block.parent_block().f_V['CO']**0.5*block.parent_block().f_V['H2']**0.5)
block.r_FT_total_con = pe.Constraint(rule=r_FT_total_rule)
def k_FT_rule(block):
return block.k_FT == k.k0_FT * pe.exp(-k.E_FT*1e3/(k.R*block.parent_block().T))
block.k_KT_con = pe.Constraint(rule=k_FT_rule)
# ASF Distribution
def r_FT_cnum_rule1(block,i):
if i == 1:
return pe.Constraint.Skip
else:
return block.r_FT_cnum[i] == block.alpha*block.r_FT_cnum[i-1]
block.r_FT_cnum_con1 = pe.Constraint(block.C_NUMBER, rule=r_FT_cnum_rule1)
def r_FT_cnum_rule2(block):
return sum(i*block.r_FT_cnum[i] for i in block.C_NUMBER) == block.r_FT_total
block.r_FT_cnum_con2 = pe.Constraint(rule=r_FT_cnum_rule2)
def g0_FT_rule(block):
return block.g0_FT == k.g0_inter_FT + k.g0_slope_FT * block.parent_block().T
block.g0_FT_con = pe.Constraint(rule=g0_FT_rule)
def alpha_rule(block):
return block.alpha**2 == (1-block.alpha) * pe.exp(block.g0_FT*1e3/(k.R*block.parent_block().T))
block.alpha_con = pe.Constraint(rule=alpha_rule)
# Apply O/P Ratio
def r_FT_para_rule(block,i):
k = m.COMP_PARAFFIN.ord(i)
return block.r_FT_comp[i] == op_ratio[k-1] * block.r_FT_cnum[k]
block.r_FT_para_con = pe.Constraint(m.COMP_PARAFFIN, rule=r_FT_para_rule)
def r_FT_ole_rule(block,i):
k = m.COMP_OLEFIN.ord(i)+1
return block.r_FT_comp[i] == (1-op_ratio[k-1]) * block.r_FT_cnum[k]
block.r_FT_ole_con = pe.Constraint(m.COMP_OLEFIN, rule=r_FT_ole_rule)
def r_FT_inorg_rule(block,i):
if i == 'CO': return block.r_FT_comp[i] == -block.r_FT_total
if i == 'H2O': return block.r_FT_comp[i] == block.r_FT_total
if i == 'H2': return block.r_FT_comp[i] == -sum(h2_consumption[n-1]*block.r_FT_cnum[n] for n in block.C_NUMBER) - block.r_FT_total
if i == 'CO2': return block.r_FT_comp[i] == 0
block.r_FT_inorg_con = pe.Constraint(m.COMP_INORG, rule=r_FT_inorg_rule)
# WGS Reaction
def r_WGS_rule(block):
return block.r_WGS * (block.Ke_WGS*block.parent_block().f_V['H2O']) == 0.5*block.parent_block().cat*block.k_WGS*block.parent_block().P**0.75*(block.parent_block().f_V['CO']* \
(block.Ke_WGS*block.parent_block().f_V['H2O']) - (block.parent_block().f_V['H2']*block.parent_block().f_V['CO2']) )
block.r_WGS_con = pe.Constraint(rule=r_WGS_rule)
def k_WGS_rule(block):
return block.k_WGS == k.k0_WGS * pe.exp(-k.E_WGS*1e3/(k.R*block.parent_block().T))
block.k_WGS_con = pe.Constraint(rule=k_WGS_rule)
def Ke_WGS_rule(block):
return block.Ke_WGS == 5*pe.exp((k.s1_WGS/block.parent_block().T + k.s2_WGS + k.s3_WGS*block.parent_block().T + k.s4_WGS*(block.parent_block().T**2))/k.R) * \
block.parent_block().T**(k.s5_WGS/k.R)
block.Ke_WGS_con = pe.Constraint(rule=Ke_WGS_rule)
def r_WGS_comp_rule(block,i):
if i == 'CO' or i == 'H2O': return block.r_WGS_comp[i] == -block.r_WGS
if i == 'CO2' or i == 'H2': return block.r_WGS_comp[i] == block.r_WGS
block.r_WGS_comp_con = pe.Constraint(m.COMP_INORG, rule=r_WGS_comp_rule)
# Combine both reactions
def r_total_comp_rule(block,i):
if i in m.COMP_INORG: return block.parent_block().r_total_comp[i] == block.r_FT_comp[i] + block.r_WGS_comp[i]
return block.parent_block().r_total_comp[i] == block.r_FT_comp[i]
block.r_total_comp_con = pe.Constraint(m.COMP_TOTAL,rule=r_total_comp_rule)
|
from __future__ import unicode_literals
from django.db import models
class Post(models.Model):
title = models.CharField(max_length=200)
sub_heading = models.CharField(max_length=300)
pub_date = models.DateTimeField('date published')
text = models.TextField(default='Blog post goes here :)')
header_image_path = models.FilePathField(path='blog/img', default='zilla-bg.png')
|
import logging
import sys
import zerorpc
from dynamo import DynamoNode
logging.basicConfig(level=logging.ERROR)
from gevent import monkey
monkey.patch_all()
addr = sys.argv[1]
# config_file = sys.argv[2]
config_file = 'config_master'
server = DynamoNode(config_file, addr)
s = zerorpc.Server(server)
s.bind('tcp://' + addr)
s.run() |
# In order to run the code for MC on lxplus
#[cmsRun IIHE.py DataProcessing='mc' dataset='RunIIFall15MiniAODv2' sample='TT_TuneCUETP8M1_13TeV-powheg-pythia8' address='MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12_ext4-v1/00000' file='F8D2CEAA-C5D1-E511-9895-001E675A6C2A.root' ]
#root://eoscms//cms/store/mc/RunIIFall15MiniAODv2/TT_TuneCUETP8M1_13TeV-powheg-pythia8/MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12_ext4-v1/00000/F8D2CEAA-C5D1-E511-9895-001E675A6C2A.root
# In order to run the code for DATA on lxplus
#[cmsRun IIHE.py DataProcessing='data' dataset='Run2015D' sample='SingleElectron' address='MINIAOD/16Dec2015-v1/20000' file='001E76A5-D3A6-E511-BC32-008CFA05E874.root' ]
#root://eoscms//cms/store/data/Run2015D/DoubleEG/MINIAOD/16Dec2015-v2/00000/F6E918C9-87A6-E511-B3D3-0CC47A4D76B2.root
import sys
import FWCore.ParameterSet.Config as cms
import FWCore.ParameterSet.VarParsing as opts
import copy
import os
options = opts.VarParsing ('analysis')
options.register('sample',
'',
opts.VarParsing.multiplicity.singleton,
opts.VarParsing.varType.string,
'Sample to analyze')
options.register('address',
'',
opts.VarParsing.multiplicity.singleton,
opts.VarParsing.varType.string,
'address of sample in eos like: MINIAODSIM/PU25nsData2015v1_76X_mcRun2_asymptotic_v12_ext4-v1/00000')
options.register('file',
'',
opts.VarParsing.multiplicity.singleton,
opts.VarParsing.varType.string,
'file to analyze')
options.register('DataProcessing',
"data",
opts.VarParsing.multiplicity.singleton,
opts.VarParsing.varType.string,
'Data processing types. Options are:mc,data')
options.register('dataset',
"",
opts.VarParsing.multiplicity.singleton,
opts.VarParsing.varType.string,
'datasets to analyze: SingleElectron, DoubleEG')
options.register('grid',
False,
opts.VarParsing.multiplicity.singleton,
opts.VarParsing.varType.bool,
'If you run on grid or localy on eos')
options.parseArguments()
##########################################################################################
# Global tags #
##########################################################################################
if options.DataProcessing == "mc":
globalTag = '76X_mcRun2_asymptotic_v12'
if options.DataProcessing == "data":
globalTag = '76X_dataRun2_v15'
##########################################################################################
# Start the sequences #
##########################################################################################
process = cms.Process("IIHEAnalysis")
process.load("Configuration.StandardSequences.MagneticField_cff")
process.load("Configuration.Geometry.GeometryIdeal_cff")
process.load("Configuration.EventContent.EventContent_cff")
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_condDBv2_cff")
process.GlobalTag.globaltag = globalTag
print "Global Tag is ", process.GlobalTag.globaltag
process.options = cms.untracked.PSet( SkipEvent = cms.untracked.vstring('ProductNotFound') )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.load('FWCore.MessageService.MessageLogger_cfi')
process.MessageLogger.cerr.FwkReport.reportEvery = 10000
##########################################################################################
# Files #
##########################################################################################
if options.DataProcessing == "mc":
path = 'root://eoscms//eos/cms/store/'+ options.DataProcessing + '/' + options.dataset + '/' + options.sample + '/' + options.address + '/' + options.file
if options.DataProcessing == "data":
path = 'root://eoscms//eos/cms/store/'+ options.DataProcessing + '/' + options.dataset + '/' + options.sample + '/' + options.address + '/' + options.file
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring())
process.source.fileNames.append( path )
#readFiles.extend([
#'file:/pnfs/iihe/cms/store/user/rgoldouz/ZToEE_NNPDF30_13TeV-powheg_M_2300_3500/C84500E1-6BB8-E511-A9D6-002590E505FE.root'
#])
if options.DataProcessing == "mc":
if options.grid:
filename_out = "outfile.root"
else:
filename_out = "file:/tmp/output_%s" % (options.sample + '_' + options.file)
if options.DataProcessing == "data":
if options.grid:
filename_out = "outfile.root"
else:
filename_out = "file:/tmp/output_%s" % (options.sample + '_' + options.file)
process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(filename_out) )
process.TFileService = cms.Service("TFileService", fileName = cms.string(filename_out) )
##########################################################################################
# IIHETree options #
##########################################################################################
process.load("UserCode.IIHETree.IIHETree_cfi")
# Set pt or mass thresholds for the truth module here
# Setting thresholds reduces the size of the output files significantly
process.IIHEAnalysis.MCTruth_ptThreshold = cms.untracked.double(10.0)
process.IIHEAnalysis.MCTruth_mThreshold = cms.untracked.double(20.0)
pt_threshold = 15
# Only save some triggers.
process.IIHEAnalysis.TriggerResults = cms.InputTag('TriggerResults', '', 'HLT')
process.IIHEAnalysis.triggerEvent = cms.InputTag('selectedPatTrigger')
triggers = 'singleElectron;doubleElectron'
process.IIHEAnalysis.triggers = cms.untracked.string(triggers)
#process.IIHEAnalysis.triggers = cms.untracked.string('doubleElectron')
process.IIHEAnalysis.globalTag = cms.string(globalTag)
# Collections.
process.IIHEAnalysis.photonCollection = cms.InputTag('slimmedPhotons' )
process.IIHEAnalysis.electronCollection = cms.InputTag('slimmedElectrons')
process.IIHEAnalysis.muonCollection = cms.InputTag('slimmedMuons' )
process.IIHEAnalysis.METCollection = cms.InputTag('slimmedMETs' )
process.IIHEAnalysis.superClusterCollection = cms.InputTag('reducedEgamma', 'reducedSuperClusters')
process.IIHEAnalysis.reducedBarrelRecHitCollection = cms.InputTag('reducedEcalRecHitsEB')
process.IIHEAnalysis.reducedEndcapRecHitCollection = cms.InputTag('reducedEcalRecHitsEE')
process.IIHEAnalysis.eventRho = cms.InputTag('fixedGridRhoFastjetAll')
process.IIHEAnalysis.ebReducedRecHitCollection = cms.InputTag("reducedEgamma", "reducedEBRecHits")
process.IIHEAnalysis.eeReducedRecHitCollection = cms.InputTag("reducedEgamma", "reducedEERecHits")
process.IIHEAnalysis.esReducedRecHitCollection = cms.InputTag("reducedEgamma", "reducedESRecHits")
process.IIHEAnalysis.generatorLabel = cms.InputTag("generator")
process.IIHEAnalysis.PileUpSummaryInfo = cms.untracked.InputTag('slimmedAddPileupInfo')
process.IIHEAnalysis.genParticleSrc = cms.InputTag("prunedGenParticles")
process.IIHEAnalysis.pfCands = cms.InputTag("packedPFCandidates")
# Trigger matching stuff. 0.5 should be sufficient.
process.IIHEAnalysis.muon_triggerDeltaRThreshold = cms.untracked.double(0.5)
process.IIHEAnalysis.HEEP_triggerDeltaRThreshold = cms.untracked.double(0.5)
# In the absence of high ET electrons, only save events with really high Z candidates.
process.IIHEAnalysis.ZBosonZMassAcceptLower = cms.untracked.double(850)
# Don't bother with J/psi or Upsilon, they will only weigh us down!
process.IIHEAnalysis.ZBosonJPsiAcceptMassLower = cms.untracked.double(1e6)
process.IIHEAnalysis.ZBosonJPsiAcceptMassUpper = cms.untracked.double(1e6)
process.IIHEAnalysis.ZBosonUpsAcceptMassLower = cms.untracked.double(1e6)
process.IIHEAnalysis.ZBosonUpsAcceptMassUpper = cms.untracked.double(1e6)
# But make sure we save Z bosons from 50 GeV and up.
process.IIHEAnalysis.ZBosonZMassLowerCuttoff = cms.untracked.double( 50)
process.IIHEAnalysis.ZBosonDeltaRCut = cms.untracked.double(1e-3)
# Only save Z->ee, Z->em.
process.IIHEAnalysis.ZBosonEtThreshold = cms.untracked.double(pt_threshold)
process.IIHEAnalysis.ZBosonSaveZee = cms.untracked.bool(True )
process.IIHEAnalysis.ZBosonSaveZmm = cms.untracked.bool(True )
process.IIHEAnalysis.ZBosonSaveZem = cms.untracked.bool(True )
process.IIHEAnalysis.ZBosonSaveZeeg = cms.untracked.bool(False)
process.IIHEAnalysis.ZBosonSaveZmmg = cms.untracked.bool(False)
process.IIHEAnalysis.electrons_ETThreshold = cms.untracked.double(pt_threshold)
process.IIHEAnalysis.muon_pTThreshold = cms.untracked.double(pt_threshold)
process.IIHEAnalysis.LeptonsAccept_pTThreshold = cms.untracked.double(pt_threshold)
# Require at least two leptons...
process.IIHEAnalysis.LeptonsAccept_nLeptons = cms.untracked.double(2)
# ...at least one of which is an electron.
process.IIHEAnalysis.LeptonsAccept_nElectrons = cms.untracked.double(1)
process.IIHEAnalysis.includeLeptonsAcceptModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeTriggerModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeEventModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeVertexModule = cms.untracked.bool(True)
process.IIHEAnalysis.includePhotonModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeElectronModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeMuonModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeMETModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeHEEPModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeZBosonModule = cms.untracked.bool(True)
process.IIHEAnalysis.includeSuperClusterModule = cms.untracked.bool(False)
process.IIHEAnalysis.includeTracksModule = cms.untracked.bool(False)
process.IIHEAnalysis.includeMCTruthModule = cms.untracked.bool(('mc' in options.DataProcessing))
#change it to true if you want to save all events
process.IIHEAnalysis.includeAutoAcceptEventModule= cms.untracked.bool(False)
process.IIHEAnalysis.debug = cms.bool(False)
##########################################################################################
# Woohoo! We're ready to start! #
##########################################################################################
#process.p1 = cms.Path(process.kt6PFJetsForIsolation+process.IIHEAnalysis)
process.p1 = cms.Path(process.IIHEAnalysis)
|
from tfcgp.problem import Problem
from tfcgp.config import Config
from tfcgp.evolver import Evolver
from tensorflow.contrib.keras import datasets
import argparse
import numpy as np
import os
parser = argparse.ArgumentParser(description='CGP with Tensorflow')
parser.add_argument('--no-learn', dest='learn', action='store_const',
const=False, default=True,
help='Turn off learning')
parser.add_argument('--no-evo', dest='evo', action='store_const',
const=False, default=True,
help='Turn off evolution')
parser.add_argument('--lamarck', dest='lamarck', action='store_const',
const=True, default=False,
help='Turn on Lamarckian evolution')
parser.add_argument('--log', type=str, help='Log file')
parser.add_argument('--config', type=str, help='Config file', default='cfg/base.yaml')
parser.add_argument('--epochs', type=int, help='Number of epochs', default=1)
parser.add_argument('--seed', type=int, help='Random seed', default=0)
args = parser.parse_args()
train, test = datasets.mnist.load_data()
data = np.concatenate((train[0], test[0]))
data = np.reshape(data, (data.shape[0], data.shape[1]*data.shape[2]))
targets = np.concatenate((train[1], test[1]))
c = Config()
c.update(args.config)
p = Problem(data, targets, learn=args.learn, epochs=args.epochs)
e = Evolver(p, c, logname=args.log)
while p.eval_count < c.cfg["total_evals"]:
e.step()
|
import requests
import sys
from bs4 import BeautifulSoup
filename = sys.argv[1]
programs_file = open(filename)
programs = programs_file.readlines()
programs_file.close()
programs = [program.rstrip('\n') for program in programs]
should_remove = []
not_found = []
more_than_one_program = []
for program in programs:
result = requests.get("http://www.shouldiremoveit.com/programs.aspx?q=%s" % program)
soup = BeautifulSoup(result.content)
program_found = soup.find_all(name="span", class_="programbartxt_remove")
if program_found:
if len(program_found) > 1:
more_than_one_program.append(program)
else:
for found in program_found:
percentage = found.text.split("% remove")
if float(percentage) > 80:
should_remove.append(program)
else:
not_found.append(program)
print "you should remove: %s" % str(should_remove).strip("[]")
print "These programs were not found: %s" % str(not_found).strip("[]")
print "These names match with more than one program: %s" % str(more_than_one_program).strip("[]")
|
import pytest
from prereise.cli.download.download_manager import generate_parser
START_DATE = "2020-01-01"
END_DATE = "2021-01-01"
FILE_PATH = "./data.pkl"
REGION = "Texas"
EMAIL = "fakeemail@bev.com"
API_KEY = "FAKE_API_KEY"
STRING_YEAR_2020 = "2020"
METHOD = "sam"
WIND_DATA_RAP_ARG_LIST_FULL_FLAGS = [
"wind_data_rap",
f"--start_date={START_DATE}",
f"--end_date={END_DATE}",
f"--file_path={FILE_PATH}",
f"--region={REGION}",
]
WIND_DATA_RAP_ARG_LIST_SHORT_FLAGS = [
"wind_data_rap",
f"-sd={START_DATE}",
f"-ed={END_DATE}",
f"-fp={FILE_PATH}",
f"-r={REGION}",
]
SOLAR_DATA_GA_ARG_LIST_FULL_FLAGS = [
"solar_data_ga",
f"--start_date={START_DATE}",
f"--end_date={END_DATE}",
f"--file_path={FILE_PATH}",
f"--region={REGION}",
f"--key={API_KEY}",
]
SOLAR_DATA_GA_ARG_LIST_SHORT_FLAGS = [
"solar_data_ga",
f"-sd={START_DATE}",
f"-ed={END_DATE}",
f"-fp={FILE_PATH}",
f"-r={REGION}",
f"-k={API_KEY}",
]
SOLAR_DATA_NSRDB_ARG_LIST_FULL_FLAGS = [
"solar_data_nsrdb",
f"--year={STRING_YEAR_2020}",
f"--email={EMAIL}",
f"--file_path={FILE_PATH}",
f"--region={REGION}",
f"--key={API_KEY}",
f"--method={METHOD}",
]
SOLAR_DATA_NSRDB_ARG_LIST_SHORT_FLAGS = [
"solar_data_nsrdb",
f"-y={STRING_YEAR_2020}",
f"-e={EMAIL}",
f"-fp={FILE_PATH}",
f"-r={REGION}",
f"-k={API_KEY}",
f"-m={METHOD}",
]
ALL_ARGUMENTS = [
WIND_DATA_RAP_ARG_LIST_FULL_FLAGS,
WIND_DATA_RAP_ARG_LIST_SHORT_FLAGS,
SOLAR_DATA_GA_ARG_LIST_FULL_FLAGS,
SOLAR_DATA_GA_ARG_LIST_SHORT_FLAGS,
SOLAR_DATA_NSRDB_ARG_LIST_FULL_FLAGS,
SOLAR_DATA_NSRDB_ARG_LIST_SHORT_FLAGS,
]
@pytest.mark.parametrize("args", ALL_ARGUMENTS)
def test_parser(args):
parser = generate_parser()
parser.parse_args(args)
@pytest.mark.parametrize("args", ALL_ARGUMENTS)
def test_parser_missing_flags(args):
parser = generate_parser()
for arg in args:
bad_arg_list = args.copy()
bad_arg_list.remove(arg)
with pytest.raises(SystemExit):
parser.parse_args(bad_arg_list)
|
# Generated by Django 3.0.3 on 2020-03-21 18:39
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('main_app', '0015_auto_20200318_1138'),
]
operations = [
migrations.AlterModelOptions(
name='membre',
options={},
),
migrations.RemoveField(
model_name='membre',
name='mail',
),
migrations.RemoveField(
model_name='membre',
name='nom',
),
migrations.RemoveField(
model_name='membre',
name='prenom',
),
migrations.RemoveField(
model_name='membre',
name='pseudo',
),
migrations.RemoveField(
model_name='membre',
name='slug_pseudo',
),
migrations.AddField(
model_name='membre',
name='slug_username',
field=models.SlugField(blank=True, default='', unique=True),
),
migrations.AddField(
model_name='membre',
name='user',
field=models.OneToOneField(default='', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
]
|
#get user input
current_speed = 38_241 #miles/hour
current_distance = 16_637_000_000 #miles
def get_user_input():
try:
number_of_days_str = input('Enter number of days since 9/25/09')
number_of_days_int = int(number_of_days_str)
return number_of_days_int
except (TypeError, ValueError) as e:
print(e)
return None
#convert days to hours
def convert_days_to_hours(days_int):
days_hours = days_int * 24
return days_hours
def calculate_distance_in_miles(time_in_hours):
distance_in_miles = time_in_hours * current_speed
return distance_in_miles + current_distance
def calculate_distance_in_km(time_in_hours):
distance_in_km = calculate_distance_in_miles(time_in_hours) * 1.609344
return distance_in_km
def calculate_astronomical_ditance(time_in_hours):
#
distance_in_au = ((calculate_distance_in_miles(time_in_hours) + current_distance) / 92955807.267433)
return distance_in_au
def calculate_radio_trip(time_in_hours):
#convert distance to meters
distance_in_radio_seconds = (calculate_distance_in_miles(time_in_hours)+current_distance) / 1609.34
distance_in_radio_hours = distance_in_radio_seconds / 3600
return distance_in_radio_hours
def main():
#get input from user
days = get_user_input()
#convert days to hours
days_in_hours = convert_days_to_hours(days)
distance_in_miles = calculate_distance_in_miles(days_in_hours)
distance_in_km = calculate_distance_in_km(days_in_hours)
distance_in_au = calculate_astronomical_ditance(days_in_hours)
radio_time_in_hours = calculate_radio_trip(days_in_hours)
print("Distance in miles is")
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jun 5 20:49:53 2019
@author: Dharmik joshi
"""
import cv2
video = cv2.VideoCapture(0)
face_detection_data = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
while True:
check,img = video.read()
gray_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
#actual code
faces = face_detection_data.detectMultiScale(gray_img,1.1,5)
for x,y,w,h in faces:
img = cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),(4))
print(cv2.putText(img,"Face",(x,y),cv2.FONT_HERSHEY_SIMPLEX,1,(0,255,0),2))
cv2.imshow("Face Detection",img)
key = cv2.waitKey(1)
if key == ord('e'):
break
video.release()
cv2.destroyAllWindows()
|
from django.contrib import admin
# Register your models here.
from django.contrib import admin
# Register your models here.
from .models import Product, Variation, ProductImage, Category, ProductFeatured
class ProductImageInline(admin.TabularInline):
model = ProductImage
extra = 0
max_num = 10
class VariationInline(admin.TabularInline):
model = Variation
extra = 0
max_num = 10
class ProductAdmin(admin.ModelAdmin):
list_display = ['__unicode__', 'price']
inlines = [
ProductImageInline,
VariationInline,
]
class Meta:
model = Product
admin.site.register(Product, ProductAdmin)
#admin.site.register(Variation)
admin.site.register(ProductImage)
admin.site.register(Category)
admin.site.register(ProductFeatured) |
from newscrawler.sentiment.mongo_tool import MongoTool
from newscrawler.sentiment.processString import CharLevel_preProcess
import newscrawler.sentiment.FileIO as FIO
class MySentences(object):
def __init__(self, whichType='csv',collection='news',skip=4000, limit=2000,
filepath=None,filename=None,encoding='gbk'):
self.filepath = filepath
self.filename = filename
self.whichType = whichType
self.encoding= encoding
self.collection= collection
self.skip = skip
self.limit = limit
def __iter__(self):
# try:
# with MongoTool() as mon:
# mongoGen1 = mon.MongoReadAll(collection='news')
# for data in mongoGen1:
# # ๆๆฌ้ขๅค็
# content = CharLevel_preProcess(data)
# if content:
# charList = self.stringToList(content)
# yield charList
# except Exception as e:
# print(e)
# pass
# try:
# with MongoTool() as mon:
# mongoGen2 = mon.MongoReadAll(collection='comment')
# for data in mongoGen2:
# # ๆๆฌ้ขๅค็
# content = CharLevel_preProcess(data)
# if content:
# charList = self.stringToList(content)
# yield charList
# except Exception as e:
# print(e)
# pass
try:
for rows in FIO.load_News_csv(parentpath=self.filepath,
filename=self.filename,
encoding=self.encoding):
for col in rows:
data = col[0]
# ๆๆฌ้ขๅค็
content = CharLevel_preProcess(data)
if content:
charList = self.stringToList(content)
yield charList
except Exception as e:
print(e)
pass
@classmethod
def csvToCharGen(self, parentpath ,filename ,encoding ):
for rows in FIO.load_News_csv(parentpath=parentpath, filename=filename,
encoding=encoding):
for col in rows:
data = col[0]
# ๆๆฌ้ขๅค็
content = CharLevel_preProcess(data)
if content:
charList = self.stringToList(content)
yield charList
@classmethod
def DBlimitToCharGen(self,collection, skip=4000, limit=2000):
try:
with MongoTool() as mon:
mongoGen = mon.MongoReadLimit(collection=collection, skip=skip, limit=limit)
for data in mongoGen:
# ๆๆฌ้ขๅค็
content = CharLevel_preProcess(data)
if content:
charList = self.stringToList(content)
yield charList
except Exception as e:
print(e)
pass
@classmethod
def DBAllToCharGen(self,collection):
try:
with MongoTool() as mon:
mongoGen1 = mon.MongoReadAll(collection=collection)
for data in mongoGen1:
# ๆๆฌ้ขๅค็
content = CharLevel_preProcess(data)
if content:
charList = self.stringToList(content)
yield charList
except Exception as e:
print(e)
@staticmethod
def stringToList(content):
charListBlank = " ".join(content)
charList = charListBlank.split(' ')
return charList |
MENU = {
"espresso": {
"ingredients": {
"water": 50,
"coffee": 18,
},
"cost": 1.5,
},
"latte": {
"ingredients": {
"water": 200,
"milk": 150,
"coffee": 24,
},
"cost": 2.5,
},
"cappuccino": {
"ingredients": {
"water": 250,
"milk": 100,
"coffee": 24,
},
"cost": 3.0,
}
}
resources = {
"water": 300,
"milk": 200,
"coffee": 100,
}
def checklatte(ingredients):
if ingredients['water'] > 200:
global resources
resources['water'] = resources['water'] - 200
else: print(f'Sorry there is not enough water.')
if ingredients['milk'] > 150:
resources['milk'] = resources['milk'] - 150
else: print(f'Sorry there is not enough milk.')
if ingredients['coffee'] > 24:
resources['coffee'] = resources['coffee'] - 24
else: print(f'Sorry there is not enough coffee.')
def checkespresso(ingredients):
if ingredients['water'] > 50:
global resources
resources['water'] = resources['water'] - 50
else: print(f'Sorry there is not enough water.')
if ingredients['coffee'] > 18:
resources['coffee'] = resources['coffee'] - 18
else: print(f'Sorry there is not enough coffee.')
def checkcappuccino(ingredients):
if ingredients['water'] > 250:
global resources
resources['water'] = resources['water'] - 250
else: print(f'Sorry there is not enough water.')
if ingredients['milk'] > 100:
resources['milk'] = resources['coffee'] - 100
else: print(f'Sorry there is not enough milk.')
if ingredients['coffee'] > 24:
resources['coffee'] = resources['coffee'] - 24
else: print(f'Sorry there is not enough coffee.')
def total(n, q, d, p):
money = 0
money = (n * 0.50) + (q * 0.25) + (d * 0.10) + (p * 0.1)
return money
user_input = False
while user_input == False:
# print(MENU['latte']['cost'])
# TODO: 1. welcome statement - What would you like? (espresso/latte/cappuccino):
choice = input("What would you like? (espresso/latte/cappuccino): ").lower()
# TODO: 2. Create buckets money_input. + Change
# TODO: 3. Create functions for each drink to should check ingredients and remove the amount from the resor.
# it needs to return if out print(Sorry there is not enough {resource}.
# TODO: 5. Create IF statements to choose which function to call. Also one that prints report.
if choice == 'report':
print(resources)
continue
elif choice == 'espresso':
checkespresso(resources)
elif choice == 'latte':
checklatte(resources)
elif choice == 'cappuccinno':
checkcappuccino(resources)
nickles = 0
quarters = 0
dimes = 0
pennies = 0
nickles = float(input('how many nickles?: '))
quarters = float(input('how many quarters?: '))
dimes = float(input('how many dimes?: '))
pennies = float(input('how many pennies?: '))
# TODO: 6. Print(Please insert coins.) Then create 4 input Q. how many quarters?: how many dimes?: how many nickles?:
# how many pennies?: thenn a function to add up the total
total = total(nickles, quarters, dimes, pennies)
change = round(total - MENU[choice]['cost'], 2)
# TODO: 7. Create function to add up the 4 amounts and compare to price. Return change or print statement
if MENU[choice]['cost'] > total:
print("Sorry that's not enough money. Money refunded.")
user_end = True
elif MENU[choice]['cost'] <= total:
print(f"Here is ${change} in change. Here is your latte โ๏ธ. Enjoy!")
# If Under print(Sorry that's not enough money. Money refunded.)
# If equal or over print(f"Here is ${change}in change. & print(Here is your latte โ๏ธ. Enjoy!")
# loop back to beginning
# TODO: 9. Loop back to start
|
#!/usr/bin/python
import os, getopt
import sys
import logging
import traceback as tb
from webservice.provider import CaseClient
from webservice.ticketing.autotask import Autotask
from webservice.ticketing.jira import Jira
from webservice.ticketing.connectwise import ConnectWise
from webservice.ticketing.freshservice import FreshService
from webservice.ticketing.salesforce import SalesForce
from webservice.ticketing.servicenow import ServiceNow
from configuration import ProviderConfiguration
from logger import setup_logging
from webservice.provider import Case
from customer import *
from queue import Queue
from suds import WebFault
class Connector:
def __init__(self, incident_id):
self.incident_id = incident_id
self.corelog = None
self.queue = None
self.provider_configuration = None
self.customer_mapping = None
self.ticketing_system = None
self.stage = 0
def add_case_to_queue(self, customer_id = 999999, customer_name = "SIM Provider", ticket_number=""):
try:
self.queue.add(customer_id, customer_name, ticket_number, self.stage)
except IOError as e:
self.corelog.error("add_case_to_queue I/O error(%d): %s" % (e.errno, e.strerror))
except Exception, e:
self.corelog.error("*** Failed to add case %d in queue." % (self.incident_id) )
self.corelog.exception(e)
def remove_case_from_queue(self):
try:
self.queue.remove(self.incident_id)
except IOError as e:
self.corelog.error("remove_case_from_queue I/O error(%d): %s" % (e.errno, e.strerror))
except Exception, e:
self.corelog.error("*** Failed to remove case %d from queue." % (self.incident_id) )
self.corelog.exception(e)
def initialize(self):
""" Initialize file logger """
try:
setup_logging("Notification", os.path.join("data", "log"), scrnlog = False)
# setup_logging("suds.client", os.path.join("data", "log"), scrnlog = True)
self.corelog = logging.getLogger("Notification")
except Exception, e:
tb.print_exc()
exit("Error Initializing logger")
self.corelog.info("Starting notifier script for incident id: %s" % self.incident_id)
self.queue = Queue(self.incident_id)
def read_provider_configuration(self):
try:
self.corelog.debug("Reading Provider Configuration")
self.provider_configuration = ProviderConfiguration()
self.provider_configuration.read_configuration(os.path.dirname(os.path.realpath(__file__)))
return
except IOError as e:
self.corelog.error("read_customer_mapping I/O error(%d): %s" % (e.errno, e.strerror))
except Exception, e:
self.corelog.error("Exception caught in read_provider_configuration.")
self.corelog.exception(e)
self.add_case_to_queue()
exit("Error Reading provider Configuration")
def read_customer_mapping(self):
try:
mapping_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
self.provider_configuration.customer_mapping_path)
self.corelog.debug("Reading Customer Mapping from %s"
% mapping_file)
self.customer_mapping = CustomerMapping(mapping_file)
if self.customer_mapping.read_mapping() <= 0:
raise Exception ("No Customer information found at %s" %
mapping_file)
return
except IOError as e:
self.corelog.error("read_customer_mapping I/O error(%d): %s" % (e.errno, e.strerror))
except Exception, e:
self.corelog.error("Exception caught in read_customer_mapping.")
self.corelog.exception(e)
self.add_case_to_queue()
exit("Error Reading Customer Mapping File")
def get_case_details_from_provider(self):
try:
self.corelog.debug("Reading case (%s) details from provider"
% self.incident_id)
caseclient = CaseClient(self.provider_configuration.url, self.provider_configuration.username,
self.provider_configuration.password)
providercase = caseclient.get_case_detail(self.incident_id)
if providercase and len(providercase) > 0:
""" Valid Response """
case = Case(providercase[0])
return case
self.corelog.error("Failed to get case details for case %d." % self.incident_id)
except WebFault, f:
self.corelog.error("Failed to query case details for case %d. Fault: %s" % (self.incident_id, f.fault ))
except Exception, e:
self.corelog.error("Exception caught in get_case_details_from_provider.")
self.corelog.exception(e)
self.add_case_to_queue()
exit("Error Reading Case Details from provider")
def get_customer_details(self, customer_id, customer_name):
try:
self.corelog.debug("Fetching customer's '%s(ID: %s)' details from mapping data"
% (customer_name, customer_id))
customer = self.customer_mapping.get_customer(customer_id)
if customer:
customer.print_info()
return customer
self.corelog.error("Failed to get customer's '%s(ID: %s)' details from mapping data for case %s." %
(customer_name, customer_id, self.incident_id))
except WebFault, f:
self.corelog.error("Failed to query case details for case %d. Fault: %s" % (self.incident_id, f.fault ))
except Exception, e:
self.corelog.error("Exception caught in get_case_details_from_provider.")
self.corelog.exception(e)
self.add_case_to_queue()
exit("Error Fetching customer details from mapping data")
def update_ticketing_system(self, case_detail, customer_detail):
try:
ticket_number = ""
self.corelog.debug("Updating (%s) Ticketing system for customer: %s, url: %s"
% (TICKET_API[customer_detail.ticket_api],
case_detail.customer_name, customer_detail.url))
if customer_detail.ticket_api == TICKETING_API_AUTOTASK:
self.ticketing_system = Autotask(customer_detail.url, customer_detail.username,
customer_detail.password)
elif customer_detail.ticket_api == TICKETING_API_JIRA:
self.ticketing_system = Jira(customer_detail.url, customer_detail.username,
customer_detail.password)
elif customer_detail.ticket_api == TICKETING_API_CONNECTWISE:
self.ticketing_system = ConnectWise(customer_detail.url, customer_detail.username,
customer_detail.password)
elif customer_detail.ticket_api == TICKETING_API_FRESHSERVICE :
self.ticketing_system = FreshService(customer_detail.url, customer_detail.username,
customer_detail.password)
elif customer_detail.ticket_api == TICKETING_API_SALESFORCE :
self.ticketing_system = SalesForce(customer_detail.url, customer_detail.username,
customer_detail.password)
elif customer_detail.ticket_api == TICKETING_API_SERVICENOW:
self.ticketing_system = ServiceNow(customer_detail.url, customer_detail.username,
customer_detail.password)
if self.ticketing_system:
self.ticketing_system.set_mandatory_fields(customer_detail.extra_info)
self.ticketing_system.save(case_detail)
self.stage = 2
ticket_number = self.ticketing_system.ticket_number
return ticket_number
self.corelog.error("Failed to update ticketing system for case %d. Customer: %s" %
(self.incident_id, case_detail.customer_name) )
customer_detail.print_info()
except WebFault, f:
self.corelog.error("Failed to update ticketing system for case %d. Fault: %s" % (self.incident_id, f.fault ))
except Exception, e:
self.corelog.error("Exception caught in update_ticketing_system")
self.corelog.exception(e)
if(self.ticketing_system and self.ticketing_system.ticket_number and len(self.ticketing_system.ticket_number) > 0):
self.stage = 1
self.add_case_to_queue(customer_id=customer_detail.id,
customer_name=case_detail.customer_name,
ticket_number=ticket_number)
exit("Error updating ticketing system")
def update_case_details_to_provider(self, case):
try:
self.corelog.debug("Updating case (%s) details to provider with External ID: %s"
% (case.case_id, case.external_ticket) )
caseclient = CaseClient(self.provider_configuration.url, self.provider_configuration.username,
self.provider_configuration.password)
status = caseclient.update_external_ticket(case)
if status:
return
self.corelog.error("Failed to update case details for case %d." % case.case_id)
except WebFault, f:
self.corelog.error("Failed to update case details for case %d. Fault: %s"
% (case.case_id, f.fault ))
except Exception, e:
self.corelog.error("Exception caught in update_case_details_to_provider.")
self.corelog.exception(e)
self.add_case_to_queue(customer_id=case.customer_id,
customer_name=case.customer_name,
ticket_number=case.external_ticket)
exit("Error updating Case Details back to provider")
def main(incident_id):
connector = Connector(incident_id)
connector.initialize()
connector.read_provider_configuration()
connector.corelog.setLevel(connector.provider_configuration.loglevel)
connector.read_customer_mapping()
case_detail = connector.get_case_details_from_provider()
customer_detail = connector.get_customer_details(case_detail.customer_id, case_detail.customer_name)
if connector.queue.data.stage > 0 and connector.queue.data.ticket_number and len(connector.queue.data.ticket_number) > 0:
case_detail.external_ticket = connector.queue.data.ticket_number
ticket_number = connector.update_ticketing_system(case_detail, customer_detail)
if ticket_number and len(ticket_number) > 0:
if case_detail.external_ticket is None or ( case_detail.external_ticket.lower() != ticket_number.lower() ):
case_detail.external_ticket = ticket_number
connector.update_case_details_to_provider(case_detail)
connector.remove_case_from_queue()
def usage():
print ("%s -i <incident ID>" % __file__)
sys.exit(1)
if __name__ == '__main__':
incident_id = 0
os.chdir(os.path.dirname(os.path.abspath(sys.argv[0])))
try:
opts, args = getopt.getopt(sys.argv[1:],"hi:",["incident=", "help"])
except getopt.GetoptError:
usage()
if(len(opts) <= 0):
usage()
for opt, arg in opts:
if opt == '-h':
usage()
elif opt in ("-i", "--incident"):
incident_id = arg.strip()
try:
from tendo import singleton
me = singleton.SingleInstance(flavor_id = incident_id)
except Exception, e:
print "Another instance of script is running. Exiting!!!"
sys.exit(2)
main(int(incident_id)) |
fib = [1, 2]
result = 2
while fib[-1] < 4000000:
curr_term = fib[-1] + fib[-2]
fib.append(curr_term)
if curr_term % 2 == 0:
result += curr_term
print result
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import json
import os
from xml.dom import minidom
from pants.base.build_environment import get_buildroot
from pants.util.contextutil import temporary_file
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class IdeaPluginIntegrationTest(PantsRunIntegrationTest):
RESOURCE = 'java-resource'
TEST_RESOURCE = 'java-test-resource'
def _do_check(self, project_dir_path, expected_project_path, expected_targets):
"""Check to see that the project contains the expected source folders."""
iws_file = os.path.join(project_dir_path, 'project.iws')
self.assertTrue(os.path.exists(iws_file))
dom = minidom.parse(iws_file)
self.assertEqual(1, len(dom.getElementsByTagName("project")))
project = dom.getElementsByTagName("project")[0]
self.assertEqual(1, len(project.getElementsByTagName('component')))
component = project.getElementsByTagName('component')[0]
actual_properties = component.getElementsByTagName('property')
# 3 properties: targets, project_path, pants_idea_plugin_version
self.assertEqual(3, len(actual_properties))
self.assertEqual('targets', actual_properties[0].getAttribute('name'))
actual_targets = json.loads(actual_properties[0].getAttribute('value'))
abs_expected_target_specs = [os.path.join(get_buildroot(), relative_spec) for relative_spec in expected_targets]
self.assertEqual(abs_expected_target_specs, actual_targets)
self.assertEqual('project_path', actual_properties[1].getAttribute('name'))
actual_project_path = actual_properties[1].getAttribute('value')
self.assertEqual(os.path.join(get_buildroot(), expected_project_path), actual_project_path)
self.assertEqual('pants_idea_plugin_version', actual_properties[2].getAttribute('name'))
self.assertEqual('0.0.1', actual_properties[2].getAttribute('value'))
def _get_project_dir(self, output_file):
with open(output_file, 'r') as result:
return result.readlines()[0]
def _run_and_check(self, project_path, targets):
with self.temporary_workdir() as workdir:
with temporary_file(root_dir=workdir, cleanup=True) as output_file:
pants_run = self.run_pants_with_workdir(
['idea-plugin', '--output-file={}'.format(output_file.name), '--no-open'] + targets, workdir)
self.assert_success(pants_run)
project_dir = self._get_project_dir(output_file.name)
self.assertTrue(os.path.exists(project_dir), "{} does not exist".format(project_dir))
self._do_check(project_dir, project_path, targets)
def test_idea_plugin_single_target(self):
target = 'examples/src/scala/org/pantsbuild/example/hello:hello'
project_path = "examples/src/scala/org/pantsbuild/example/hello"
self._run_and_check(project_path, [target])
def test_idea_plugin_single_directory(self):
target = 'testprojects/src/python/antlr::'
project_path = "testprojects/src/python/antlr"
self._run_and_check(project_path, [target])
def test_idea_plugin_multiple_targets(self):
target_a = 'examples/src/scala/org/pantsbuild/example/hello:'
target_b = 'testprojects/src/python/antlr::'
# project_path is always the directory of the first target,
# which is where intellij is going to zoom in at project view.
project_path = 'examples/src/scala/org/pantsbuild/example/hello'
self._run_and_check(project_path, [target_a, target_b])
|
import cv2
import os
import time
output_dir = r'C:/Users/zx/Desktop/photo'
cap = cv2.VideoCapture(0)
i = 1
while 1:
ret, frame = cap.read()
cv2.imshow('cap', frame)
flag = cv2.waitKey(1)
output_path = os.path.join(output_dir, "%04d.jpg" % i)
print("successful save photo %04d.jpg" % i)
frame = cv2.resize(frame, None,
fx=0.5,
fy=0.5,
interpolation=cv2.INTER_CUBIC)
cv2.imwrite(output_path, frame)
time.sleep(0.5)
i += 1
if flag == 27: # ๆไธESC้ฎ
break
|
password = input("please enter:")
print(password)
|
print ("Hello Poland") |
#! /usr/bin/python
import sys
import socket
import threading
import time
# Creat a TCP/IP socket
def recv_timeout(the_socket,timeout=2):
#make socket non blocking
the_socket.setblocking(0)
#total data partwise in an array
total_data=[];
data='';
#beginning time
begin=time.time()
while 1:
#if you got some data, then break after timeout
if total_data and time.time()-begin > timeout:
break
#if you got no data at all, wait a little longer, twice the timeout
elif time.time()-begin > timeout*2:
break
#recv something
try:
data = the_socket.recv(8192)
if data:
total_data.append(data)
#change the beginning time for measurement
begin=time.time()
else:
#sleep for sometime to indicate a gap
time.sleep(0.1)
except:
pass
#join all parts to make final string
return ''.join(total_data)
class ClientThreading(threading.Thread):
def __init__(seft,threadID,ip,port):
threading.Thread.__init__(seft)
seft.threadID = threadID
seft.ip = ip;
seft.port = port
print '[+] New Thread %s started %s for : %s' %(threadID,ip,port )
def run(seft):
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#Bind the socket to the port
server_address = (seft.ip,seft.port)
print >> sys.stderr,'Starting up on %s port %s' %(seft.ip,seft.port)
sock.connect(server_address)
#get reply and print
try:
#send data
message = 'This is message.It is important'
print >> sys.stderr,'sending "%s" '%message
sock.sendall(message)
#Lock for the response
print recv_timeout(sock)
finally:
print >>sys.stderr,'Closing socket from thread %s' %seft.threadID
sock.close()
for i in range (1,10):
newThread = ClientThreading(i,"192.168.1.65",2000)
newThread.start(); |
# -*- coding: utf-8 -*-
from zeam.form.base import interfaces
from zeam.form.base.fields import Field as BaseField
from zeam.form.base.markers import NO_VALUE
from zeam.form.base.widgets import FieldWidget, WidgetExtractor
from zeam.form.ztk.interfaces import ISchemaField, IFieldCreatedEvent
from grokcore import component as grok
from zope import schema, component
from zope.event import notify
from zope.i18nmessageid import MessageFactory
from zope.interface import Interface, Invalid, implementer
from zope.schema import interfaces as schema_interfaces
from zope.schema._bootstrapinterfaces import IContextAwareDefaultFactory
import zope.interface.interfaces
_ = MessageFactory("zeam.form.base")
@implementer(IFieldCreatedEvent)
class FieldCreatedEvent:
def __init__(self, field, interface=None, origin=None):
self.interface = interface
self.field = field
self.origin = origin
@implementer(interfaces.IFieldFactory)
class SchemaFieldFactory:
"""Create form fields from a zope.schema field (by adapting it).
"""
def __init__(self, context):
self.context = context
def produce(self):
interface = self.context.interface
if interface is None and not getattr(self.context, '__name__', None):
raise AssertionError("Field has no interface or __name__")
result = interfaces.IField(self.context)
notify(FieldCreatedEvent(result, interface, self.context))
yield result
@implementer(interfaces.IFieldFactory)
class InterfaceSchemaFieldFactory:
"""Create a set of form fields from a zope.interface by looking
each zope.schema fields defined on it and adapting them.
"""
def __init__(self, context):
self.context = context
def produce(self):
for name, field in schema.getFieldsInOrder(self.context):
result = interfaces.IField(field)
notify(FieldCreatedEvent(result, self.context, field))
yield result
class Field(BaseField):
def getDefaultValue(self, form):
if self.defaultFactory is not None:
if IContextAwareDefaultFactory.providedBy(self.defaultFactory):
if form is None:
raise TypeError('defaultFactory context required.')
default = self.defaultFactory(form.getContent())
else:
default = self.defaultFactory()
else:
default = super(Field, self).getDefaultValue(form)
if default is NO_VALUE:
default = self.defaultValue
if default is None:
return NO_VALUE
return default
@implementer(ISchemaField)
class SchemaField(BaseField):
"""A form field using a zope.schema field as settings.
"""
def __init__(self, field):
super(SchemaField, self).__init__(
field.title or None, field.__name__,
description=field.description,
required=field.required,
readonly=field.readonly,
interface=field.interface)
self._field = field
def get_field(self):
return self._field
def clone(self, new_identifier=None):
copy = self.__class__(self._field)
copy.__dict__.update(self.__dict__)
if new_identifier is not None:
copy.identifier = new_identifier
return copy
def validate(self, value, form):
error = super(SchemaField, self).validate(value, form)
if error is not None:
return error
if value is not NO_VALUE:
context = None
if form is not None:
context = form.context
try:
binded_field = self._field.bind(context)
binded_field.validate(value)
except schema_interfaces.ValidationError as error:
return error.doc()
except Invalid as error:
return error.args[0]
return None
def fromUnicode(self, value):
if schema_interfaces.IFromUnicode.providedBy(self._field):
return self._field.fromUnicode(value)
return value
def getDefaultValue(self, form):
default = super(SchemaField, self).getDefaultValue(form)
if default is not NO_VALUE:
return default
default = self._field.default
if default is None: # Zope schema use None to say no default
return NO_VALUE
return default
def registerSchemaField(factory, schema_field):
# We register it by hand to have the adapter available when loading ZCML.
component.provideAdapter(factory, (schema_field,), interfaces.IField)
class SchemaFieldWidget(FieldWidget):
grok.adapts(ISchemaField, Interface, Interface)
def htmlClass(self):
css_class = ['field']
if ISchemaField.providedBy(self.component):
# Prevent old FieldWidget to fail if they haven't been
# updated to the new API.
css_class.append('field-%s' % (
self.component._field.__class__.__name__.lower()))
if self.required:
css_class.append('field-required')
return ' '.join(css_class)
class SchemaWidgetExtractor(WidgetExtractor):
grok.adapts(ISchemaField, Interface, Interface)
def extract(self):
value, error = super(SchemaWidgetExtractor, self).extract()
if error is not None:
return value, error
if value is not NO_VALUE:
try:
value = self.component.fromUnicode(value)
except schema_interfaces.ValidationError as e:
return None, e.doc()
except Invalid as e:
return None, e.args[0]
except ValueError as e:
return None, _(u"Invalid value.")
return value, None
class HiddenSchemaWidgetExtractor(SchemaWidgetExtractor):
grok.name('hidden')
class ReadOnlySchemaWidgetExtractor(SchemaWidgetExtractor):
grok.name('readonly')
def registerDefault():
"""Register default fields factories.
"""
component.provideAdapter(
SchemaFieldFactory,
(zope.schema.interfaces.IField,))
component.provideAdapter(
InterfaceSchemaFieldFactory,
(zope.interface.interfaces.IInterface,))
registerSchemaField(SchemaField, schema_interfaces.IField)
|
# Copyright 2022 NVIDIA Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import pytest
from utils.generators import mk_seq_array
import cunumeric as num
from legate.core import LEGATE_MAX_DIM
def test_None():
x = mk_seq_array(np, (256,))
x_num = mk_seq_array(num, (256,))
indices = mk_seq_array(np, (125,))
indices_num = num.array(indices)
np.put_along_axis(x, indices, -10, None)
num.put_along_axis(x_num, indices_num, -10, None)
assert np.array_equal(x_num, x)
N = 10
@pytest.mark.parametrize("ndim", range(1, LEGATE_MAX_DIM + 1))
def test_ndim(ndim):
shape = (N,) * ndim
np_arr = mk_seq_array(np, shape)
num_arr = num.array(np_arr)
shape_idx = (1,) * ndim
np_indices = mk_seq_array(np, shape_idx) % N
num_indices = mk_seq_array(num, shape_idx) % N
for axis in range(-1, ndim):
np_a = np_arr.copy()
num_a = num_arr.copy()
np.put_along_axis(np_a, np_indices, 8, axis=axis)
num.put_along_axis(num_a, num_indices, 8, axis=axis)
assert np.array_equal(np_a, num_a)
if __name__ == "__main__":
import sys
sys.exit(pytest.main(sys.argv))
|
import requests
class WeWork:
token_url = "https://qyapi.weixin.qq.com/cgi-bin/gettoken"
corpid = 'wwd5220f12351c7c6f' |
# Copyright (C) 2014-2016 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import subprocess
import logging
log = logging.getLogger(__name__)
class Process:
"""Linux process."""
first_process = True
first_process_pid = None
def __init__(self, pid=0):
"""@param pid: PID.
"""
self.pid = pid
def is_alive(self):
if not os.path.exists("/proc/%u" % self.pid): return False
status = self.get_proc_status()
if not status: return False
if "zombie" in status.get("State:", ""): return False
return True
def get_parent_pid(self):
return self.get_proc_status().get("PPid", None)
def get_proc_status(self):
try:
status = open("/proc/%u/status" % self.pid).readlines()
status_values = dict((i[0], i[1]) for i in [j.strip().split(None, 1) for j in status])
return status_values
except:
log.critical("could not get process status for pid %u", self.pid)
return {}
def execute(self, cmd):
self.proc = proc = subprocess.Popen(cmd)
self.pid = proc.pid
return True
|
#!/usr/bin/env python
import pexpect
import sys
ERROR_MESSAGE = "The password should have: \r\n" \
"- At least one number\r\n" \
"- At least one lower case\r\n" \
"- At least one upper case\r\n" \
"- Allowed characters : numbers, letters, '_','-' and '.'\r\n" \
"Try again\r\n"
def test_valid_password():
'''
:return:
'''
print "------------------"
print "test_valid_password"
print "------------------"
ssh_child = pexpect.spawn("python qualcomm/exercise3/passwordTask.py")
ssh_child.logfile = sys.stdout
ssh_child.expect("Username:\r\n", timeout=10)
ssh_child.sendline("Valid user")
ssh_child.expect_exact("New password:\r\n", timeout=10)
ssh_child.sendline("ValidUser1")
ssh_child.expect("Confirm password:\r\n", timeout=10)
ssh_child.sendline("ValidUser1")
ssh_child.sendline('exit')
def test_invalid_password():
print "------------------"
print "test_invalid_password"
print "------------------"
ssh_child = pexpect.spawn("python qualcomm/exercise3/passwordTask.py")
ssh_child.logfile = sys.stdout
ssh_child.expect("Username:\r\n", timeout=10)
ssh_child.sendline("Reb")
ssh_child.expect_exact("New password:\r\n", timeout=10)
ssh_child.sendline("Reb")
ssh_child.expect(ERROR_MESSAGE, timeout=10)
ssh_child.sendline("Reb")
ssh_child.expect(ERROR_MESSAGE, timeout=10)
ssh_child.sendline("Reb")
ssh_child.expect(ERROR_MESSAGE, timeout=10)
ssh_child.sendline("Reb")
ssh_child.expect("Incorrect password\r\n", timeout=10)
ssh_child.sendline('exit')
if __name__ == "__main__":
test_valid_password()
test_invalid_password()
|
# Copyright 2014 CloudByte Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
cloudbyte_connection_opts = [
cfg.StrOpt('cb_apikey',
default=None,
help=('Elasticenter authorization purpose.')),
cfg.StrOpt('cb_account_name',
default=None,
help=('Is mapped against a VSM.')),
cfg.StrOpt('tsm_name',
default=None,
help=('Used to create volume.')),
cfg.StrOpt('cb_account_id',
default=None,
help=('Account ID of super admin or account admin')),
cfg.StrOpt('cb_dataset_id',
default=None,
help=('The VSM dataset_id.')),
cfg.StrOpt('cb_tsm_id',
default=None,
help=('Refers to VSM ID.')),
cfg.IntOpt('confirm_volume_create_retry_interval',
default=10,
help=('Sleep value is in seconds.')),
cfg.IntOpt('confirm_volume_create_sleep_counter',
default=5,
help=('Will confirm a successful volume '
'creation by making this many attempts.')),
cfg.IntOpt('confirm_volume_delete_retry_interval',
default=10,
help=('Sleep value is in seconds.')),
cfg.IntOpt('confirm_volume_delete_sleep_counter',
default=5,
help=('Will confirm a successful volume '
'deletion by making this many attempts.')), ]
cloudbyte_add_qosgroup_opts = [
cfg.DictOpt('add_qosgroup',
default={
'iops': '10',
'latency': '15',
'graceallowed': 'false',
'networkspeed': '0',
'memlimit': '0',
'tpcontrol': 'false',
'throughput': '0',
'iopscontrol': 'true'
},
help=('These values will be used by addQos api.')), ]
cloudbyte_create_volume_opts = [
cfg.DictOpt('create_volume',
default={
'blocklength': '512B',
'compression': 'off',
'deduplication': 'off',
'sync': 'always',
'recordsize': '128k',
'protocoltype': 'ISCSI'
},
help=('These values will be used by createVolume api.')), ]
cloudbyte_initiator_group_opts = [
cfg.StrOpt('cb_initiator_group_name',
default='None',
help=('Initiator group name is assigned to a volume.'
'Based on this name CloudByte storage verifies whether '
'iscsi connection made to this volume was initiatied '
'from the expected host.')), ]
CONF = cfg.CONF
CONF.register_opts(cloudbyte_add_qosgroup_opts)
CONF.register_opts(cloudbyte_create_volume_opts)
CONF.register_opts(cloudbyte_connection_opts)
CONF.register_opts(cloudbyte_initiator_group_opts)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import urllib2
una_web = urllib2.urlopen("http://www.psicobyte.com")
cabeceras = una_web.info()
contenido = una_web.read()
|
# -*- coding: utf-8 -*-
import json
import os
def menu():
lista=['Menรบ de Opciones','a. Listar archivos','b. Crear archivo','c. Modificar archivo','d. Eliminar archivo','e. Salir']
cadena=json.dumps(lista)
return cadena
def menu_lista(cadena):
lista=json.loads(cadena)
for i in lista:
print i
def listarfiles():
path = 'D:/programaciondistribuida/taller2/'
# Lista vacia para incluir los ficheros
lstFiles = []
# Lista con todos los ficheros del directorio:
lstDir = os.walk(path) # os.walk()Lista directorios y ficheros
# Crea una lista de los ficheros jpg que existen en el directorio y los incluye a la lista.
for root, dirs, files in lstDir:
for fichero in files:
(nombreFichero, extension) = os.path.splitext(fichero)
if (extension == ".txt"):
lstFiles.append(nombreFichero + extension)
cadena = json.dumps(lstFiles)
return cadena
def crearfile(nfile,contenido):
fo = open(nfile + ".txt", "a")
fo.write(contenido)
fo.close()
lista=["Archivo creado Exitosamente!!!",'Menรบ de Opciones','a. Listar archivos','b. Crear archivo','c. Modificar archivo','d. Eliminar archivo','e. Salir']
cadena = json.dumps(lista)
return cadena
def editafile(file):
if os.path.isfile(file + ".txt"):
fo = open(file+ ".txt", "r")
contenido=fo.read()
else:
contenido="Archivo No existe"
return contenido
def actufile(file,conten):
if os.path.isfile(file + ".txt"):
fo = open(file+".txt", "a")
fo.write(conten+"\n")
fo.close()
lista = ["Archivo modificado Exitosamente!!!", 'Menรบ de Opciones', 'a. Listar archivos', 'b. Crear archivo',
'c. Modificar archivo', 'd. Eliminar archivo', 'e. Salir','Digite una opcion']
else:
lista = ["Archivo No existe", 'Menรบ de Opciones', 'a. Listar archivos', 'b. Crear archivo',
'c. Modificar archivo', 'd. Eliminar archivo', 'e. Salir', 'Digite una opcion']
cadena = json.dumps(lista)
return cadena
def eliminar(file):
if os.path.isfile(file+".txt"):
os.remove(file+".txt") # remove the file
lista = ["Archivo eliminado Exitosamente!!!", 'Menรบ de Opciones', 'a. Listar archivos', 'b. Crear archivo',
'c. Modificar archivo', 'd. Eliminar archivo', 'e. Salir']
else:
lista = ["Archivo No existe", 'Menรบ de Opciones', 'a. Listar archivos', 'b. Crear archivo',
'c. Modificar archivo', 'd. Eliminar archivo', 'e. Salir']
cadena = json.dumps(lista)
return cadena
|
import codecs
import os
import os.path
import shutil
import string
import sys
import warnings
from typing import Any, Callable, Dict, List, Optional, Tuple
from urllib.error import URLError
import numpy as np
import torch
from PIL import Image
from .utils import _flip_byte_order, check_integrity, download_and_extract_archive, extract_archive, verify_str_arg
from .vision import VisionDataset
class MNIST(VisionDataset):
"""`MNIST <http://yann.lecun.com/exdb/mnist/>`_ Dataset.
Args:
root (string): Root directory of dataset where ``MNIST/raw/train-images-idx3-ubyte``
and ``MNIST/raw/t10k-images-idx3-ubyte`` exist.
train (bool, optional): If True, creates dataset from ``train-images-idx3-ubyte``,
otherwise from ``t10k-images-idx3-ubyte``.
download (bool, optional): If True, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
"""
mirrors = [
"http://yann.lecun.com/exdb/mnist/",
"https://ossci-datasets.s3.amazonaws.com/mnist/",
]
resources = [
("train-images-idx3-ubyte.gz", "f68b3c2dcbeaaa9fbdd348bbdeb94873"),
("train-labels-idx1-ubyte.gz", "d53e105ee54ea40749a09fcbcd1e9432"),
("t10k-images-idx3-ubyte.gz", "9fb629c4189551a2d022fa330f9573f3"),
("t10k-labels-idx1-ubyte.gz", "ec29112dd5afa0611ce80d1b7f02629c"),
]
training_file = "training.pt"
test_file = "test.pt"
classes = [
"0 - zero",
"1 - one",
"2 - two",
"3 - three",
"4 - four",
"5 - five",
"6 - six",
"7 - seven",
"8 - eight",
"9 - nine",
]
@property
def train_labels(self):
warnings.warn("train_labels has been renamed targets")
return self.targets
@property
def test_labels(self):
warnings.warn("test_labels has been renamed targets")
return self.targets
@property
def train_data(self):
warnings.warn("train_data has been renamed data")
return self.data
@property
def test_data(self):
warnings.warn("test_data has been renamed data")
return self.data
def __init__(
self,
root: str,
train: bool = True,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
download: bool = False,
) -> None:
super().__init__(root, transform=transform, target_transform=target_transform)
self.train = train # training set or test set
if self._check_legacy_exist():
self.data, self.targets = self._load_legacy_data()
return
if download:
self.download()
if not self._check_exists():
raise RuntimeError("Dataset not found. You can use download=True to download it")
self.data, self.targets = self._load_data()
def _check_legacy_exist(self):
processed_folder_exists = os.path.exists(self.processed_folder)
if not processed_folder_exists:
return False
return all(
check_integrity(os.path.join(self.processed_folder, file)) for file in (self.training_file, self.test_file)
)
def _load_legacy_data(self):
# This is for BC only. We no longer cache the data in a custom binary, but simply read from the raw data
# directly.
data_file = self.training_file if self.train else self.test_file
return torch.load(os.path.join(self.processed_folder, data_file))
def _load_data(self):
image_file = f"{'train' if self.train else 't10k'}-images-idx3-ubyte"
data = read_image_file(os.path.join(self.raw_folder, image_file))
label_file = f"{'train' if self.train else 't10k'}-labels-idx1-ubyte"
targets = read_label_file(os.path.join(self.raw_folder, label_file))
return data, targets
def __getitem__(self, index: int) -> Tuple[Any, Any]:
"""
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target class.
"""
img, target = self.data[index], int(self.targets[index])
# doing this so that it is consistent with all other datasets
# to return a PIL Image
img = Image.fromarray(img.numpy(), mode="L")
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self) -> int:
return len(self.data)
@property
def raw_folder(self) -> str:
return os.path.join(self.root, self.__class__.__name__, "raw")
@property
def processed_folder(self) -> str:
return os.path.join(self.root, self.__class__.__name__, "processed")
@property
def class_to_idx(self) -> Dict[str, int]:
return {_class: i for i, _class in enumerate(self.classes)}
def _check_exists(self) -> bool:
return all(
check_integrity(os.path.join(self.raw_folder, os.path.splitext(os.path.basename(url))[0]))
for url, _ in self.resources
)
def download(self) -> None:
"""Download the MNIST data if it doesn't exist already."""
if self._check_exists():
return
os.makedirs(self.raw_folder, exist_ok=True)
# download files
for filename, md5 in self.resources:
for mirror in self.mirrors:
url = f"{mirror}{filename}"
try:
print(f"Downloading {url}")
download_and_extract_archive(url, download_root=self.raw_folder, filename=filename, md5=md5)
except URLError as error:
print(f"Failed to download (trying next):\n{error}")
continue
finally:
print()
break
else:
raise RuntimeError(f"Error downloading {filename}")
def extra_repr(self) -> str:
split = "Train" if self.train is True else "Test"
return f"Split: {split}"
class FashionMNIST(MNIST):
"""`Fashion-MNIST <https://github.com/zalandoresearch/fashion-mnist>`_ Dataset.
Args:
root (string): Root directory of dataset where ``FashionMNIST/raw/train-images-idx3-ubyte``
and ``FashionMNIST/raw/t10k-images-idx3-ubyte`` exist.
train (bool, optional): If True, creates dataset from ``train-images-idx3-ubyte``,
otherwise from ``t10k-images-idx3-ubyte``.
download (bool, optional): If True, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
"""
mirrors = ["http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/"]
resources = [
("train-images-idx3-ubyte.gz", "8d4fb7e6c68d591d4c3dfef9ec88bf0d"),
("train-labels-idx1-ubyte.gz", "25c81989df183df01b3e8a0aad5dffbe"),
("t10k-images-idx3-ubyte.gz", "bef4ecab320f06d8554ea6380940ec79"),
("t10k-labels-idx1-ubyte.gz", "bb300cfdad3c16e7a12a480ee83cd310"),
]
classes = ["T-shirt/top", "Trouser", "Pullover", "Dress", "Coat", "Sandal", "Shirt", "Sneaker", "Bag", "Ankle boot"]
class KMNIST(MNIST):
"""`Kuzushiji-MNIST <https://github.com/rois-codh/kmnist>`_ Dataset.
Args:
root (string): Root directory of dataset where ``KMNIST/raw/train-images-idx3-ubyte``
and ``KMNIST/raw/t10k-images-idx3-ubyte`` exist.
train (bool, optional): If True, creates dataset from ``train-images-idx3-ubyte``,
otherwise from ``t10k-images-idx3-ubyte``.
download (bool, optional): If True, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
"""
mirrors = ["http://codh.rois.ac.jp/kmnist/dataset/kmnist/"]
resources = [
("train-images-idx3-ubyte.gz", "bdb82020997e1d708af4cf47b453dcf7"),
("train-labels-idx1-ubyte.gz", "e144d726b3acfaa3e44228e80efcd344"),
("t10k-images-idx3-ubyte.gz", "5c965bf0a639b31b8f53240b1b52f4d7"),
("t10k-labels-idx1-ubyte.gz", "7320c461ea6c1c855c0b718fb2a4b134"),
]
classes = ["o", "ki", "su", "tsu", "na", "ha", "ma", "ya", "re", "wo"]
class EMNIST(MNIST):
"""`EMNIST <https://www.westernsydney.edu.au/bens/home/reproducible_research/emnist>`_ Dataset.
Args:
root (string): Root directory of dataset where ``EMNIST/raw/train-images-idx3-ubyte``
and ``EMNIST/raw/t10k-images-idx3-ubyte`` exist.
split (string): The dataset has 6 different splits: ``byclass``, ``bymerge``,
``balanced``, ``letters``, ``digits`` and ``mnist``. This argument specifies
which one to use.
train (bool, optional): If True, creates dataset from ``training.pt``,
otherwise from ``test.pt``.
download (bool, optional): If True, downloads the dataset from the internet and
puts it in root directory. If dataset is already downloaded, it is not
downloaded again.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
"""
url = "https://www.itl.nist.gov/iaui/vip/cs_links/EMNIST/gzip.zip"
md5 = "58c8d27c78d21e728a6bc7b3cc06412e"
splits = ("byclass", "bymerge", "balanced", "letters", "digits", "mnist")
# Merged Classes assumes Same structure for both uppercase and lowercase version
_merged_classes = {"c", "i", "j", "k", "l", "m", "o", "p", "s", "u", "v", "w", "x", "y", "z"}
_all_classes = set(string.digits + string.ascii_letters)
classes_split_dict = {
"byclass": sorted(list(_all_classes)),
"bymerge": sorted(list(_all_classes - _merged_classes)),
"balanced": sorted(list(_all_classes - _merged_classes)),
"letters": ["N/A"] + list(string.ascii_lowercase),
"digits": list(string.digits),
"mnist": list(string.digits),
}
def __init__(self, root: str, split: str, **kwargs: Any) -> None:
self.split = verify_str_arg(split, "split", self.splits)
self.training_file = self._training_file(split)
self.test_file = self._test_file(split)
super().__init__(root, **kwargs)
self.classes = self.classes_split_dict[self.split]
@staticmethod
def _training_file(split) -> str:
return f"training_{split}.pt"
@staticmethod
def _test_file(split) -> str:
return f"test_{split}.pt"
@property
def _file_prefix(self) -> str:
return f"emnist-{self.split}-{'train' if self.train else 'test'}"
@property
def images_file(self) -> str:
return os.path.join(self.raw_folder, f"{self._file_prefix}-images-idx3-ubyte")
@property
def labels_file(self) -> str:
return os.path.join(self.raw_folder, f"{self._file_prefix}-labels-idx1-ubyte")
def _load_data(self):
return read_image_file(self.images_file), read_label_file(self.labels_file)
def _check_exists(self) -> bool:
return all(check_integrity(file) for file in (self.images_file, self.labels_file))
def download(self) -> None:
"""Download the EMNIST data if it doesn't exist already."""
if self._check_exists():
return
os.makedirs(self.raw_folder, exist_ok=True)
download_and_extract_archive(self.url, download_root=self.raw_folder, md5=self.md5)
gzip_folder = os.path.join(self.raw_folder, "gzip")
for gzip_file in os.listdir(gzip_folder):
if gzip_file.endswith(".gz"):
extract_archive(os.path.join(gzip_folder, gzip_file), self.raw_folder)
shutil.rmtree(gzip_folder)
class QMNIST(MNIST):
"""`QMNIST <https://github.com/facebookresearch/qmnist>`_ Dataset.
Args:
root (string): Root directory of dataset whose ``raw``
subdir contains binary files of the datasets.
what (string,optional): Can be 'train', 'test', 'test10k',
'test50k', or 'nist' for respectively the mnist compatible
training set, the 60k qmnist testing set, the 10k qmnist
examples that match the mnist testing set, the 50k
remaining qmnist testing examples, or all the nist
digits. The default is to select 'train' or 'test'
according to the compatibility argument 'train'.
compat (bool,optional): A boolean that says whether the target
for each example is class number (for compatibility with
the MNIST dataloader) or a torch vector containing the
full qmnist information. Default=True.
download (bool, optional): If True, downloads the dataset from
the internet and puts it in root directory. If dataset is
already downloaded, it is not downloaded again.
transform (callable, optional): A function/transform that
takes in an PIL image and returns a transformed
version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform
that takes in the target and transforms it.
train (bool,optional,compatibility): When argument 'what' is
not specified, this boolean decides whether to load the
training set or the testing set. Default: True.
"""
subsets = {"train": "train", "test": "test", "test10k": "test", "test50k": "test", "nist": "nist"}
resources: Dict[str, List[Tuple[str, str]]] = { # type: ignore[assignment]
"train": [
(
"https://raw.githubusercontent.com/facebookresearch/qmnist/master/qmnist-train-images-idx3-ubyte.gz",
"ed72d4157d28c017586c42bc6afe6370",
),
(
"https://raw.githubusercontent.com/facebookresearch/qmnist/master/qmnist-train-labels-idx2-int.gz",
"0058f8dd561b90ffdd0f734c6a30e5e4",
),
],
"test": [
(
"https://raw.githubusercontent.com/facebookresearch/qmnist/master/qmnist-test-images-idx3-ubyte.gz",
"1394631089c404de565df7b7aeaf9412",
),
(
"https://raw.githubusercontent.com/facebookresearch/qmnist/master/qmnist-test-labels-idx2-int.gz",
"5b5b05890a5e13444e108efe57b788aa",
),
],
"nist": [
(
"https://raw.githubusercontent.com/facebookresearch/qmnist/master/xnist-images-idx3-ubyte.xz",
"7f124b3b8ab81486c9d8c2749c17f834",
),
(
"https://raw.githubusercontent.com/facebookresearch/qmnist/master/xnist-labels-idx2-int.xz",
"5ed0e788978e45d4a8bd4b7caec3d79d",
),
],
}
classes = [
"0 - zero",
"1 - one",
"2 - two",
"3 - three",
"4 - four",
"5 - five",
"6 - six",
"7 - seven",
"8 - eight",
"9 - nine",
]
def __init__(
self, root: str, what: Optional[str] = None, compat: bool = True, train: bool = True, **kwargs: Any
) -> None:
if what is None:
what = "train" if train else "test"
self.what = verify_str_arg(what, "what", tuple(self.subsets.keys()))
self.compat = compat
self.data_file = what + ".pt"
self.training_file = self.data_file
self.test_file = self.data_file
super().__init__(root, train, **kwargs)
@property
def images_file(self) -> str:
(url, _), _ = self.resources[self.subsets[self.what]]
return os.path.join(self.raw_folder, os.path.splitext(os.path.basename(url))[0])
@property
def labels_file(self) -> str:
_, (url, _) = self.resources[self.subsets[self.what]]
return os.path.join(self.raw_folder, os.path.splitext(os.path.basename(url))[0])
def _check_exists(self) -> bool:
return all(check_integrity(file) for file in (self.images_file, self.labels_file))
def _load_data(self):
data = read_sn3_pascalvincent_tensor(self.images_file)
if data.dtype != torch.uint8:
raise TypeError(f"data should be of dtype torch.uint8 instead of {data.dtype}")
if data.ndimension() != 3:
raise ValueError("data should have 3 dimensions instead of {data.ndimension()}")
targets = read_sn3_pascalvincent_tensor(self.labels_file).long()
if targets.ndimension() != 2:
raise ValueError(f"targets should have 2 dimensions instead of {targets.ndimension()}")
if self.what == "test10k":
data = data[0:10000, :, :].clone()
targets = targets[0:10000, :].clone()
elif self.what == "test50k":
data = data[10000:, :, :].clone()
targets = targets[10000:, :].clone()
return data, targets
def download(self) -> None:
"""Download the QMNIST data if it doesn't exist already.
Note that we only download what has been asked for (argument 'what').
"""
if self._check_exists():
return
os.makedirs(self.raw_folder, exist_ok=True)
split = self.resources[self.subsets[self.what]]
for url, md5 in split:
download_and_extract_archive(url, self.raw_folder, md5=md5)
def __getitem__(self, index: int) -> Tuple[Any, Any]:
# redefined to handle the compat flag
img, target = self.data[index], self.targets[index]
img = Image.fromarray(img.numpy(), mode="L")
if self.transform is not None:
img = self.transform(img)
if self.compat:
target = int(target[0])
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def extra_repr(self) -> str:
return f"Split: {self.what}"
def get_int(b: bytes) -> int:
return int(codecs.encode(b, "hex"), 16)
SN3_PASCALVINCENT_TYPEMAP = {
8: torch.uint8,
9: torch.int8,
11: torch.int16,
12: torch.int32,
13: torch.float32,
14: torch.float64,
}
def read_sn3_pascalvincent_tensor(path: str, strict: bool = True) -> torch.Tensor:
"""Read a SN3 file in "Pascal Vincent" format (Lush file 'libidx/idx-io.lsh').
Argument may be a filename, compressed filename, or file object.
"""
# read
with open(path, "rb") as f:
data = f.read()
# parse
magic = get_int(data[0:4])
nd = magic % 256
ty = magic // 256
assert 1 <= nd <= 3
assert 8 <= ty <= 14
torch_type = SN3_PASCALVINCENT_TYPEMAP[ty]
s = [get_int(data[4 * (i + 1) : 4 * (i + 2)]) for i in range(nd)]
parsed = torch.frombuffer(bytearray(data), dtype=torch_type, offset=(4 * (nd + 1)))
# The MNIST format uses the big endian byte order, while `torch.frombuffer` uses whatever the system uses. In case
# that is little endian and the dtype has more than one byte, we need to flip them.
if sys.byteorder == "little" and parsed.element_size() > 1:
parsed = _flip_byte_order(parsed)
assert parsed.shape[0] == np.prod(s) or not strict
return parsed.view(*s)
def read_label_file(path: str) -> torch.Tensor:
x = read_sn3_pascalvincent_tensor(path, strict=False)
if x.dtype != torch.uint8:
raise TypeError(f"x should be of dtype torch.uint8 instead of {x.dtype}")
if x.ndimension() != 1:
raise ValueError(f"x should have 1 dimension instead of {x.ndimension()}")
return x.long()
def read_image_file(path: str) -> torch.Tensor:
x = read_sn3_pascalvincent_tensor(path, strict=False)
if x.dtype != torch.uint8:
raise TypeError(f"x should be of dtype torch.uint8 instead of {x.dtype}")
if x.ndimension() != 3:
raise ValueError(f"x should have 3 dimension instead of {x.ndimension()}")
return x
|
# from twilio.rest import TwilioRestClient
# Your Account Sid and Auth Token from twilio.com/user/account
from twilio import rest
account_sid = "ACe9596fc15143af79e4c810b6f901f4a3"
auth_token = "85b212b907947f5f2845ae9d85655556"
client = rest.TwilioRestClient(account_sid, auth_token)
message = client.messages.create(body="Hi Geo",
to="+19259517677", # Replace with your phone number
from_="+19253095975") # Replace with your Twilio number
print message.sid
# SM1f0b961d798f4d72811ab82ea67e50e7
|
from django.db import models
class Author(models.Model):
Name = models.CharField(max_length=40,default=1)
AuthorID = models.CharField(max_length=5,default=1)
Age = models.CharField(max_length=3,default=1)
Country = models.CharField(max_length=5,default=1)
def __unicode__(self):
return self.Name
class Book(models.Model):
Title = models.CharField(max_length=100,default=1)
ISBN = models.CharField(max_length=13,default=1)
AuthorID = models.CharField(max_length=5,default=1)
Publisher = models.CharField(max_length=5,default=1)
PublicshDate = models.DateField(blank=True, null=True)
def __unicode__(self):
return self.Title |
from app import db
from hashlib import md5
ROLE_USER = 0
ROLE_ADMIN = 1
class User(db.Model):
id = db.Column(db.Integer, primary_key = True)
nickname = db.Column(db.String(64), index = True, unique = True)
email = db.Column(db.String(120), index = True, unique = True)
role = db.Column(db.SmallInteger, default = ROLE_USER)
tasks = db.relationship('Task', backref = 'author', lazy = 'dynamic')
def is_authenticated(self):
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return unicode(self.id)
def avatar(self, size):
return 'http://www.gravatar.com/avatar/' + md5(self.email).hexdigest() + '?d=mm&s=' + str(size)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname = nickname).first() == None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname = new_nickname).first() == None:
break
version += 1
return new_nickname
def __repr__(self):
return '<User %r>' % (self.nickname)
class Task(db.Model):
id = db.Column(db.Integer, primary_key = True)
title = db.Column(db.String(120))
created = db.Column(db.DateTime)
modified = db.Column(db.DateTime)
completed = db.Column(db.DateTime)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Task %r>' % (self.title) |
import matplotlib.pyplot as plt
import pandas as pd
from pathlib import Path
from qbstyles import mpl_style
DR = Path(__file__).parent
def main():
"""Main."""
fp = DR / '2018.csv'
df = pd.read_csv(str(fp))
mpl_style(dark=True)
fig = plt.figure(figsize=(10, 8), dpi=72) # figsize * dpi
ax = fig.add_subplot(111, xlabel=df['GameID'].name, ylabel='number')
ax.plot(df['H'])
ax.plot(df['HR'], 'rs:', label='HR', ms=10, mew=5, mec='green')
ax.plot(df['K'], marker='^', linestyle='-.')
fig.savefig(str(DR / 'plot.png'))
plt.show()
if __name__ == '__main__':
main()
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
import logging
from concurrent.futures import Future
from typing import Any, BinaryIO, ClassVar
from pylsp_jsonrpc.endpoint import Endpoint # type: ignore[import]
from pylsp_jsonrpc.exceptions import ( # type: ignore[import]
JsonRpcException,
JsonRpcInvalidRequest,
JsonRpcMethodNotFound,
)
from pylsp_jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter # type: ignore[import]
from pants.bsp.context import BSPContext
from pants.bsp.spec.notification import BSPNotification
from pants.core.util_rules.environments import determine_bootstrap_environment
from pants.engine.environment import EnvironmentName
from pants.engine.fs import Workspace
from pants.engine.internals.scheduler import SchedulerSession
from pants.engine.internals.selectors import Params
from pants.engine.unions import UnionMembership, union
try:
from typing import Protocol # Python 3.8+
except ImportError:
# See https://github.com/python/mypy/issues/4427 re the ignore
from typing_extensions import Protocol # type: ignore
_logger = logging.getLogger(__name__)
class BSPRequestTypeProtocol(Protocol):
@classmethod
def from_json_dict(cls, d: dict[str, Any]) -> Any:
...
class BSPResponseTypeProtocol(Protocol):
def to_json_dict(self) -> dict[str, Any]:
...
@union(in_scope_types=[EnvironmentName])
class BSPHandlerMapping:
"""Union type for rules to register handlers for BSP methods."""
# Name of the JSON-RPC method to be handled.
method_name: ClassVar[str]
# Type requested from the engine. This will be provided as the "subject" of an engine query.
# Must implement class method `from_json_dict`.
request_type: type[BSPRequestTypeProtocol]
# Type produced by the handler rule. This will be requested as the "product" of the engine query.
# Must implement instance method `to_json_dict`.
response_type: type[BSPResponseTypeProtocol]
# True if this handler is for a notification.
# TODO: Consider how to pass notifications (which do not have responses) to the engine rules.
is_notification: bool = False
def _make_error_future(exc: Exception) -> Future:
fut: Future = Future()
fut.set_exception(exc)
return fut
class BSPConnection:
_INITIALIZE_METHOD_NAME = "build/initialize"
_SHUTDOWN_METHOD_NAME = "build/shutdown"
_EXIT_NOTIFCATION_NAME = "build/exit"
def __init__(
self,
scheduler_session: SchedulerSession,
union_membership: UnionMembership,
context: BSPContext,
inbound: BinaryIO,
outbound: BinaryIO,
max_workers: int = 5,
) -> None:
self._scheduler_session = scheduler_session
# TODO: We might eventually want to make this configurable.
self._env_name = determine_bootstrap_environment(self._scheduler_session)
self._inbound = JsonRpcStreamReader(inbound)
self._outbound = JsonRpcStreamWriter(outbound)
self._context: BSPContext = context
self._endpoint = Endpoint(self, self._send_outbound_message, max_workers=max_workers)
self._handler_mappings: dict[str, type[BSPHandlerMapping]] = {}
impls = union_membership.get(BSPHandlerMapping)
for impl in impls:
self._handler_mappings[impl.method_name] = impl
def run(self) -> None:
"""Run the listener for inbound JSON-RPC messages."""
self._inbound.listen(self._received_inbound_message)
def _received_inbound_message(self, msg):
"""Process each inbound JSON-RPC message."""
_logger.info(f"_received_inbound_message: msg={msg}")
self._endpoint.consume(msg)
def _send_outbound_message(self, msg):
_logger.info(f"_send_outbound_message: msg={msg}")
self._outbound.write(msg)
# TODO: Figure out how to run this on the `Endpoint`'s thread pool by returing a callable. For now, we
# need to return errors as futures given that `Endpoint` only handles exceptions returned that way versus using a try ... except block.
def _handle_inbound_message(self, *, method_name: str, params: Any):
# If the connection is not yet initialized and this is not the initialization request, BSP requires
# returning an error for methods (and to discard all notifications).
#
# Concurrency: This method can be invoked from multiple threads (for each individual request). By returning
# an error for all other requests, only the thread running the initialization RPC should be able to proceed.
# This ensures that we can safely call `initialize_connection` on the BSPContext with the client-supplied
# init parameters without worrying about multiple threads. (Not entirely true though as this does not handle
# the client making multiple concurrent initialization RPCs, but which would violate the protocol in any case.)
if (
not self._context.is_connection_initialized
and method_name != self._INITIALIZE_METHOD_NAME
):
return _make_error_future(
JsonRpcException(
code=-32002, message=f"Client must first call `{self._INITIALIZE_METHOD_NAME}`."
)
)
# Handle the `build/shutdown` method and `build/exit` notification.
if method_name == self._SHUTDOWN_METHOD_NAME:
# Return no-op success for the `build/shutdown` method. This doesn't actually cause the server to
# exit. That will occur once the client sends the `build/exit` notification.
return None
elif method_name == self._EXIT_NOTIFCATION_NAME:
# The `build/exit` notification directs the BSP server to immediately exit.
# The read-dispatch loop will exit once it notices that the inbound handle is closed. So close the
# inbound handle (and outbound handle for completeness) and then return to the dispatch loop
# to trigger the exit.
self._inbound.close()
self._outbound.close()
return None
method_mapping = self._handler_mappings.get(method_name)
if not method_mapping:
return _make_error_future(JsonRpcMethodNotFound.of(method_name))
try:
request = method_mapping.request_type.from_json_dict(params)
except Exception:
return _make_error_future(JsonRpcInvalidRequest())
# TODO: This should not be necessary: see https://github.com/pantsbuild/pants/issues/15435.
self._scheduler_session.new_run_id()
workspace = Workspace(self._scheduler_session)
params = Params(request, workspace, self._env_name)
execution_request = self._scheduler_session.execution_request(
requests=[(method_mapping.response_type, params)],
)
(result,) = self._scheduler_session.execute(execution_request)
# Initialize the BSPContext with the client-supplied init parameters. See earlier comment on why this
# call to `BSPContext.initialize_connection` is safe.
if method_name == self._INITIALIZE_METHOD_NAME:
self._context.initialize_connection(request, self.notify_client)
return result.to_json_dict()
# Called by `Endpoint` to dispatch requests and notifications.
# TODO: Should probably vendor `Endpoint` so we can detect notifications versus method calls, which
# matters when ignoring unknown notifications versus erroring for unknown methods.
def __getitem__(self, method_name):
def handler(params):
return self._handle_inbound_message(method_name=method_name, params=params)
return handler
def notify_client(self, notification: BSPNotification) -> None:
try:
self._endpoint.notify(notification.notification_name, notification.to_json_dict())
except Exception as ex:
_logger.warning(f"Received exception while notifying BSP client: {ex}")
|
import struct
class ByteBuffer(object):
def __init__(self):
self._buffer = bytearray()
self._offset = 0
def __len__(self):
return len(self._buffer)
def push_int8(self, value):
self._buffer.extend(struct.pack('<b', value))
def push_int16(self, value):
self._buffer.extend(struct.pack('<h', value))
def push_int32(self, value):
self._buffer.extend(struct.pack('<i', value))
def push_int64(self, value):
self._buffer.extend(struct.pack('<q', value))
def push_uint8(self, value):
self._buffer.extend(struct.pack('<B', value))
def push_uint16(self, value):
self._buffer.extend(struct.pack('<H', value))
def push_uint32(self, value):
self._buffer.extend(struct.pack('<I', value))
def push_uint64(self, value):
self._buffer.extend(struct.pack('<Q', value))
def push_string(self, value):
l = len(value)
self.push_uint32(l)
# self._buffer.extend(bytearray(value))
self._buffer.extend(struct.pack('%ds' % (l,), value))
def push_float(self, value):
self.push_string(str(value))
def push_double(self, value):
self.push_string(str(value))
def pop_int8(self):
value, = struct.unpack_from('<b', self._buffer, self._offset)
self._offset = self._offset + 1
return value
def pop_int16(self):
value, = struct.unpack_from('<h', self._buffer, self._offset)
self._offset = self._offset + 2
return value
def pop_int32(self):
value, = struct.unpack_from('<i', self._buffer, self._offset)
self._offset = self._offset + 4
return value
def pop_int64(self):
value, = struct.unpack_from('<l', self._buffer, self._offset)
self._offset = self._offset + 8
return value
def pop_uint8(self):
value, = struct.unpack_from('<B', self._buffer, self._offset)
self._offset = self._offset + 1
return value
def pop_uint16(self):
value, = struct.unpack_from('<H', self._buffer, self._offset)
self._offset = self._offset + 2
return value
def pop_uint32(self):
value, = struct.unpack_from('<I', self._buffer, self._offset)
self._offset = self._offset + 4
return value
def pop_uint64(self):
value, = struct.unpack_from('<L', self._buffer, self._offset)
self._offset = self._offset + 8
return value
def pop_string(self):
value = self.pop_uint32()
# s = self._buffer[self._offset:self._offset + value]
# self._offset = self._offset + value
# return str(s)
s, = struct.unpack_from('%ds' % (value,), self._buffer, self._offset)
self._offset = self._offset + value
return s
def pop_float(self):
return float(self.pop_string())
def pop_double(self):
return float(self.pop_string())
def main():
bytebuffer = ByteBuffer()
bytebuffer.push_int8(42)
assert(len(bytebuffer) == 1)
bytebuffer.push_int16(42)
assert(len(bytebuffer) == 1 + 2)
bytebuffer.push_int32(42)
assert(len(bytebuffer) == 1 + 2 + 4)
bytebuffer.push_int64(42)
assert(len(bytebuffer) == 1 + 2 + 4 + 8)
bytebuffer.push_string('')
assert(len(bytebuffer) == 1 + 2 + 4 + 8 + 4)
bytebuffer.push_string('42')
assert(len(bytebuffer) == 1 + 2 + 4 + 8 + 4 + 4 + 2)
bytebuffer.push_float(4.2)
assert(bytebuffer.pop_int8() == 42)
assert(bytebuffer.pop_int16() == 42)
assert(bytebuffer.pop_int32() == 42)
assert(bytebuffer.pop_int64() == 42)
assert(bytebuffer.pop_string() == '')
assert(bytebuffer.pop_string() == '42')
assert(bytebuffer.pop_float() == 4.2)
if __name__ == '__main__':
main()
|
from scipy.optimize import anneal
result = anneal(func, x0, args=(),
schedule='fast', full_output=True, T0=None,
Tf=1e-12, maxeval=None, maxaccept=None, maxiter=400,
boltzmann=1.0, learn_rate=0.5, feps=1e-06, quench=1.0,
m=1.0, n=1.0, lower=-100, upper=100, dwell=50, disp=True)
print(result)
|
from itertools import combinations
def solution(nums):
answer = 0
for i in list(combinations(nums, 3)):
answer += 1
checknum = sum(i)
for j in range(2, int(checknum**(0.5)) + 1):
if checknum % j == 0:
answer -= 1
break
return answer |
from rest_framework.permissions import BasePermission
class UserPermission(BasePermission):
def has_object_permission(self, request, view, obj):
current_user = request.user
if current_user.role == 2:
return current_user.client == obj.client
elif current_user == obj:
return True
return False
class OwnerPermission(BasePermission):
def has_object_permission(self, request, view, obj):
current_user = request.user
return current_user==obj
class AdminPermission(BasePermission):
def has_permission(self, request, view):
current_user = request.user
return current_user.role >1
|
email = input()
while True:
tokens = input()
if tokens == "Complete":
break
tokens = tokens.split()
command = tokens[0]
if command == "Make":
upper_lower = tokens[1]
if upper_lower == "Upper":
email = email.upper()
else:
email = email.lower()
print(email)
elif command == "GetDomain":
count = int(tokens[1])
print(email[-count:])
elif command == "GetUsername":
if "@" not in email:
print(f"The email {email} doesn't contain the @ symbol.")
else:
ch_to_print = ""
for ch in email:
if ch == "@":
break
else:
ch_to_print += ch
print(ch_to_print)
elif command == "Replace":
char = tokens[1]
while char in email:
email = email.replace(char, "-")
print(email)
elif command == "Encrypt":
values_of_characters = [str(ord(ch)) for ch in email]
print(" ".join(values_of_characters))
|
๏ปฟ#coding:utf-8
#!/usr/bin/env python
from arenarank.models.medal_arena import medal_arena
class medal:
@staticmethod
def grab_medal(offenceRoleid, defenceRoleid, level, medalid, chipnum):
"""
ๅคบๅๅ็ซ
"""
ma = medal_arena.instance()
ld = medal_arena.instance()
if ma.is_protect(defenceRoleid):
return {'msg':'arene_grab_in_protect'}
if ma.lose_medal(defenceRoleid, medalid, chipnum) == 0:
return {'msg':'medal_not_exist'}
return ma.win_medal(offenceRoleid, level, medalid, chipnum)
@staticmethod
def seek_holder(roleid ,level, medalid, chipnum):
"""
ๅฏปๆพๆๆ่
"""
ma = medal_arena.instance()
return ma.seek_holder(roleid, level, medalid, chipnum)
@staticmethod
def medal_levelup(roleid, medalid):
"""
ๅ็ซ ๅ็บง
"""
ma = medal_arena.instance()
return ma.medal_levelup(roleid, medalid)
@staticmethod
def new_medal(roleid, level, medalid ,chipnum, cnt):
"""
ๆฐๅ็ซ
"""
ma = medal_arena.instance()
return ma.new_medal(roleid, level, medalid ,chipnum, cnt)
@staticmethod
def delete_medal(roleid, level, medalid, chipnum, cnt):
"""
ๅ ้คๅ็ซ
"""
ma = medal_arena.instance()
return ma.delete_medal(roleid, level, medalid, chipnum, cnt)
@staticmethod
def try_grab(defenceRoleid):
"""
ๆขๅคบ
"""
ma = medal_arena.instance()
return {'protect': ma.is_protect(defenceRoleid)}
@staticmethod
def add_protect_time(roleid, second):
"""
ๆทปๅ ไฟๆคๆถ้ด
"""
ma = medal.instance()
return ma.add_protect_time(roleid, second)
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# Modified by Xingyi Zhou
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numba
import numpy as np
import torch
def gaussian_radius(det_size, min_overlap=0.7):
height, width = det_size
a1 = 1
b1 = (height + width)
c1 = width * height * (1 - min_overlap) / (1 + min_overlap)
sq1 = np.sqrt(b1 ** 2 - 4 * a1 * c1)
r1 = (b1 + sq1) / 2
a2 = 4
b2 = 2 * (height + width)
c2 = (1 - min_overlap) * width * height
sq2 = np.sqrt(b2 ** 2 - 4 * a2 * c2)
r2 = (b2 + sq2) / 2
a3 = 4 * min_overlap
b3 = -2 * min_overlap * (height + width)
c3 = (min_overlap - 1) * width * height
sq3 = np.sqrt(b3 ** 2 - 4 * a3 * c3)
r3 = (b3 + sq3) / 2
return min(r1, r2, r3)
def gaussian2D(shape, sigma=1):
m, n = [(ss - 1.) / 2. for ss in shape]
y, x = np.ogrid[-m:m + 1, -n:n + 1]
h = np.exp(-(x * x + y * y) / (2 * sigma * sigma))
h[h < np.finfo(h.dtype).eps * h.max()] = 0
return h
def draw_umich_gaussian(heatmap, center, radius, k=1):
diameter = 2 * radius + 1
gaussian = gaussian2D((diameter, diameter), sigma=diameter / 6)
x, y = int(center[0]), int(center[1])
height, width = heatmap.shape[0:2]
left, right = min(x, radius), min(width - x, radius + 1)
top, bottom = min(y, radius), min(height - y, radius + 1)
masked_heatmap = heatmap[y - top:y + bottom, x - left:x + right]
masked_gaussian = gaussian[radius - top:radius + bottom, radius - left:radius + right]
if min(masked_gaussian.shape) > 0 and min(masked_heatmap.shape) > 0: # TODO debug
np.maximum(masked_heatmap, masked_gaussian * k, out=masked_heatmap)
return heatmap
def draw_dense_reg(regmap, heatmap, center, value, radius, is_offset=False):
diameter = 2 * radius + 1
gaussian = gaussian2D((diameter, diameter), sigma=diameter / 6)
value = np.array(value, dtype=np.float32).reshape(-1, 1, 1)
dim = value.shape[0]
reg = np.ones((dim, diameter * 2 + 1, diameter * 2 + 1), dtype=np.float32) * value
if is_offset and dim == 2:
delta = np.arange(diameter * 2 + 1) - radius
reg[0] = reg[0] - delta.reshape(1, -1)
reg[1] = reg[1] - delta.reshape(-1, 1)
x, y = int(center[0]), int(center[1])
height, width = heatmap.shape[0:2]
left, right = min(x, radius), min(width - x, radius + 1)
top, bottom = min(y, radius), min(height - y, radius + 1)
masked_heatmap = heatmap[y - top:y + bottom, x - left:x + right]
masked_regmap = regmap[:, y - top:y + bottom, x - left:x + right]
masked_gaussian = gaussian[radius - top:radius + bottom,
radius - left:radius + right]
masked_reg = reg[:, radius - top:radius + bottom,
radius - left:radius + right]
if min(masked_gaussian.shape) > 0 and min(masked_heatmap.shape) > 0: # TODO debug
idx = (masked_gaussian >= masked_heatmap).reshape(
1, masked_gaussian.shape[0], masked_gaussian.shape[1])
masked_regmap = (1 - idx) * masked_regmap + idx * masked_reg
regmap[:, y - top:y + bottom, x - left:x + right] = masked_regmap
return regmap
def draw_msra_gaussian(heatmap, center, sigma):
tmp_size = sigma * 3
mu_x = int(center[0] + 0.5)
mu_y = int(center[1] + 0.5)
w, h = heatmap.shape[0], heatmap.shape[1]
ul = [int(mu_x - tmp_size), int(mu_y - tmp_size)]
br = [int(mu_x + tmp_size + 1), int(mu_y + tmp_size + 1)]
if ul[0] >= h or ul[1] >= w or br[0] < 0 or br[1] < 0:
return heatmap
size = 2 * tmp_size + 1
x = np.arange(0, size, 1, np.float32)
y = x[:, np.newaxis]
x0 = y0 = size // 2
g = np.exp(- ((x - x0) ** 2 + (y - y0) ** 2) / (2 * sigma ** 2))
g_x = max(0, -ul[0]), min(br[0], h) - ul[0]
g_y = max(0, -ul[1]), min(br[1], w) - ul[1]
img_x = max(0, ul[0]), min(br[0], h)
img_y = max(0, ul[1]), min(br[1], w)
heatmap[img_y[0]:img_y[1], img_x[0]:img_x[1]] = np.maximum(
heatmap[img_y[0]:img_y[1], img_x[0]:img_x[1]],
g[g_y[0]:g_y[1], g_x[0]:g_x[1]])
return heatmap
def _sigmoid(x):
y = torch.clamp(x.sigmoid_(), min=1e-4, max=1 - 1e-4)
return y
def _gather_feat(feat, ind, mask=None):
dim = feat.size(2)
ind = ind.unsqueeze(2).expand(ind.size(0), ind.size(1), dim)
feat = feat.gather(1, ind)
if mask is not None:
mask = mask.unsqueeze(2).expand_as(feat)
feat = feat[mask]
feat = feat.view(-1, dim)
return feat
def _tranpose_and_gather_feat(feat, ind):
feat = feat.permute(0, 2, 3, 1).contiguous()
feat = feat.view(feat.size(0), -1, feat.size(3))
feat = _gather_feat(feat, ind)
return feat
def flip_tensor(x):
return torch.flip(x, [3])
# tmp = x.detach().cpu().numpy()[..., ::-1].copy()
# return torch.from_numpy(tmp).to(x.device)
def flip_lr(x, flip_idx):
tmp = x.detach().cpu().numpy()[..., ::-1].copy()
shape = tmp.shape
for e in flip_idx:
tmp[:, e[0], ...], tmp[:, e[1], ...] = \
tmp[:, e[1], ...].copy(), tmp[:, e[0], ...].copy()
return torch.from_numpy(tmp.reshape(shape)).to(x.device)
def flip_lr_off(x, flip_idx):
tmp = x.detach().cpu().numpy()[..., ::-1].copy()
shape = tmp.shape
tmp = tmp.reshape(tmp.shape[0], 17, 2,
tmp.shape[2], tmp.shape[3])
tmp[:, :, 0, :, :] *= -1
for e in flip_idx:
tmp[:, e[0], ...], tmp[:, e[1], ...] = \
tmp[:, e[1], ...].copy(), tmp[:, e[0], ...].copy()
return torch.from_numpy(tmp.reshape(shape)).to(x.device)
@numba.jit(nopython=True, nogil=True)
def gen_oracle_map(feat, ind, w, h):
# feat: B x maxN x featDim
# ind: B x maxN
batch_size = feat.shape[0]
max_objs = feat.shape[1]
feat_dim = feat.shape[2]
out = np.zeros((batch_size, feat_dim, h, w), dtype=np.float32)
vis = np.zeros((batch_size, h, w), dtype=np.uint8)
ds = [(0, 1), (0, -1), (1, 0), (-1, 0)]
for i in range(batch_size):
queue_ind = np.zeros((h * w * 2, 2), dtype=np.int32)
queue_feat = np.zeros((h * w * 2, feat_dim), dtype=np.float32)
head, tail = 0, 0
for j in range(max_objs):
if ind[i][j] > 0:
x, y = ind[i][j] % w, ind[i][j] // w
out[i, :, y, x] = feat[i][j]
vis[i, y, x] = 1
queue_ind[tail] = x, y
queue_feat[tail] = feat[i][j]
tail += 1
while tail - head > 0:
x, y = queue_ind[head]
f = queue_feat[head]
head += 1
for (dx, dy) in ds:
xx, yy = x + dx, y + dy
if xx >= 0 and yy >= 0 and xx < w and yy < h and vis[i, yy, xx] < 1:
out[i, :, yy, xx] = f
vis[i, yy, xx] = 1
queue_ind[tail] = xx, yy
queue_feat[tail] = f
tail += 1
return out |
#!/usr/bin/python3
import common_constants as const
import common
import os
def create_bs_config_json(web_ui_port: str, base_dir: str):
bs_config_file_content = '{"port": ' + web_ui_port + ', "server":{"baseDir": "' + base_dir + '"}}'
with open("bs-config.json", "w+") as bs_config_file:
bs_config_file.write(bs_config_file_content)
if __name__ == "__main__":
if common.configuration_file_exists():
project_information = common. read_project_information()
web_ui_port = project_information[const.WEB_UI_PORT_OPTION]
print("launching web ui")
print(" with angular port: {}".format(web_ui_port))
print(" with dist folder: {}".format(const.DIST_DIRECTORY))
create_bs_config_json(web_ui_port, const.COMPONENTS_WEB_UI_DIST_DIRECTORY)
os.system("lite-server")
else:
print("ERROR: Please make sure configuration file: {} exists".format(const.CONFIGURATION_FILE))
|
#!/usr/bin/env python
""" 20110116: Looking at alsit of Joey debosscher source ids,
I determine the dotastro.org source ids.
"""
import os, sys
import MySQLdb
sys.path.append(os.path.abspath(os.path.expandvars('$TCP_DIR/' +
'Algorithms')))
import simbad_id_lookup
class Find_DotAstro_Deboss_Sources():
def __init__(self, pars={}):
self.pars = pars
self.db = MySQLdb.connect(host=self.pars['tcptutor_hostname'],
user=self.pars['tcptutor_username'],
db=self.pars['tcptutor_database'],
port=self.pars['tcptutor_port'],
passwd=self.pars['tcptutor_password'])
self.cursor = self.db.cursor()
def main(self):
"""
"""
list_fpath = os.path.abspath(os.path.expandvars('$TCP_DIR/' +
'Data/tutor_new_deboss.list'))
#list_fpath = os.path.expandvars('$HOME/scratch/tutor_new_deboss.list')
lines = open(list_fpath).readlines()
joey_id_list = []
class_name_list = []
joey_source_name_list = []
tutor_source_id_list = []
for line in lines:
tups = line.split()
(joey_id, class_name, source_name) = tups
joey_id_list.append(joey_id)
class_name_list.append(class_name)
joey_source_name_list.append(source_name)
#select_str = 'SELECT source_id, source_name FROM sources WHERE project_id=123 and source_name like "%' + source_name + '%"'
select_str = 'SELECT source_id, source_name FROM sources WHERE project_id=123 and source_name = "' + source_name + '"'
self.cursor.execute(select_str)
results = self.cursor.fetchall()
tutor_source_id = results[0][0]
if len(results) == 0:
print "NO MATCH: ", joey_id, class_name, source_name
elif len(results) > 1:
print "TOO MANY: ", joey_id, class_name, source_name
print results
else:
pass #print joey_id, tutor_source_id, results[0][1]
if tutor_source_id in tutor_source_id_list:
print "ALREADY matched this tutor source_id: %d(%s) joey_id=%d class_name=%s joey_source_name=%s" % (tutor_source_id, results[0][1], joey_id, class_name, source_name)
else:
tutor_source_id_list.append(tutor_source_id)
# TODO: now query tutor rdb for all sources and match with the tutor_source_id to see what extra srcids
select_str = 'SELECT source_id, source_name FROM sources WHERE project_id=123'
self.cursor.execute(select_str)
results = self.cursor.fetchall()
for row in results:
if row[0] not in tutor_source_id_list:
print "In TUTOR, but not in Joey 1542 list: source_id=%d source_name=%s" % (row[0], row[1])
import pdb; pdb.set_trace()
print
if __name__ == '__main__':
pars = { \
'user_id':3, # 3 = dstarr in tutor.users
'tcptutor_hostname':'192.168.1.103',
'tcptutor_username':'dstarr', # guest
'tcptutor_password':'ilove2mass', #'iamaguest',
'tcptutor_database':'tutor',
'tcptutor_port':3306,
}
FindDotAstroDebossSources = Find_DotAstro_Deboss_Sources(pars=pars)
FindDotAstroDebossSources.main()
|
#coding:utf-8
import requests
import urllib2
import urllib
import httplib
import cookielib
import re
r="hello";
#print("my name is"+r)
def testRequest():
url = "http://www.baidu.com"
response = requests.get(url)
content = requests.get(url).content
print content
return
def testGet(url):
z_request=urllib2.Request(url)
#print z_request
response=urllib2.urlopen(z_request)
#resquest_data=response.read()
print response.read()
return ;
def testHttpGet(url):
conn=httplib.HTTPConnection(url)
conn.request(method="get",url=url)
response=conn.getresponse()
print response.read()
#testRequest();
#testGet("https://api.tuxiaobei.com/story/video-types")
def testPost():
cookiestr = 'gkr8_2132_saltkey=8gr4YzYy; gkr8_2132_auth=ee5co0gnXEcCChRjWolv5HiUKqb9uJ%2FseQDYzVrCp7Jy5xqzPnOqerJXAUDSJdlly9x2XU3km0F%2BH7cY%2F0wvLCbUtA0 ';
headers={
"User-agent":"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/49.0.2623.75 Safari/537.36",
"Referer":"http://www.tsdm.me/home.php?mod=space&do=notice&isread=1",
"Cookie":cookiestr
}
loginurl = 'http://www.tsdm.me/plugin.php?id=dsu_paulsign:sign&21ddc11c&infloat=yes&handlekey=dsu_paulsign&inajax=1&ajaxtarget=fwin_content_dsu_paulsign';
signurl = 'http://www.tsdm.me/plugin.php?id=dsu_paulsign:sign&operation=qiandao&infloat=1&sign_as=1&inajax=1';
request=urllib2.Request(loginurl,None,headers)
conn= urllib2.urlopen(request)
response=conn.read()
searchobj=re.search(r"<input type=\"hidden\" name=\"formhash\" value=\"(.*)\">",response)
print "hash is:"+searchobj.group(1)
if (searchobj.group(1)):
data={
"qdxq":"wl",
"qdmode":"1",
"todaysay":"็ญพๅฐไบaaa",
"fastreply":"1",
"formhash":searchobj.group(1)
}
postdata = urllib.urlencode(data)
request2 = urllib2.Request(signurl, postdata, headers)
conn = urllib2.urlopen(request2)
response = conn.read()
print response
return
#testHttpGet("https://api.tuxiaobei.com/story/videos")
#testPost()
|
try:
from kfp.components import InputPath
from kfp.components import OutputPath
except ImportError:
def InputPath(c):
return c
def OutputPath(c):
return c
def prepro_class(dataset_path: InputPath(str), wikiqa_path: OutputPath(str)):
import nltk
from wikiqa.prepro_class import prepro
nltk.download("punkt")
def get_args():
from types import SimpleNamespace
source_dir = dataset_path + "/WikiQACorpus"
target_dir = wikiqa_path + "/wikiqa-class"
glove_dir = dataset_path + "/glove"
args = SimpleNamespace(
source_dir=source_dir,
target_dir=target_dir,
debug=False,
glove_corpus="6B",
glove_dir=glove_dir,
glove_vec_size="100",
tokenizer="PTB",
)
return args
args = get_args()
prepro(args)
|
# -*- coding: utf-8 -*-
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def sumOfLeftLeaves(self, root):
def _sumOfLeftLeaves(root, is_left=False):
if root is None:
return 0
elif root.left is None and root.right is None and is_left:
return root.val
return _sumOfLeftLeaves(root.left, is_left=True) + _sumOfLeftLeaves(
root.right
)
return _sumOfLeftLeaves(root)
if __name__ == "__main__":
solution = Solution()
t0_0 = TreeNode(3)
t0_1 = TreeNode(9)
t0_2 = TreeNode(20)
t0_3 = TreeNode(15)
t0_4 = TreeNode(7)
t0_2.left = t0_3
t0_2.right = t0_4
t0_0.left = t0_1
t0_0.right = t0_2
assert 24 == solution.sumOfLeftLeaves(t0_0)
|
from django.contrib import admin
from .models.technical_sheet import TechnicalSheet
admin.site.register(TechnicalSheet)
|
from model import connect_to_db, db, User, Watchlist, Stock
import requests
def key_word_search(word):
"""Search stocks by the stock symbol or the key words of the company names.
symbol: stock id.
"""
# Get the company name of the stocks from Alphavantage search endpoint
payload_name = {'function': 'SYMBOL_SEARCH',
'keywords': word,
'apikey': 'CYUYBFS101MCWYTS'}
req_name = requests.get("https://www.alphavantage.co/query", params=payload_name)
# print(req_name.url)
js_data_name = req_name.json()
best_matches = js_data_name.get('bestMatches', 0)
stock_names = []
symbols = []
for stock in best_matches:
stock_names.append(stock['2. name'])
symbols.append(stock['1. symbol'])
stocks =[]
for smbl, name in zip(symbols, stock_names):
stocks.append({'symbol': smbl, 'name': name})
results = {'stocks':stocks}
return results
def get_realtime_price(symbol):
"""Show realtime (close) price from Alphavantage API."""
payload_rt = {'function': 'TIME_SERIES_INTRADAY',
'symbol': symbol,
'interval': '60min',
'outputsize': 'compact',
'apikey': 'CYUYBFS101MCWYTS'}
req_realtime = requests.get("https://www.alphavantage.co/query", params=payload_rt)
# print(req.url)
js_data_rt = req_realtime.json()
hourly_series_dict = js_data_rt.get('Time Series (60min)', 0)
middle_key = list(hourly_series_dict.keys())[0]
price = hourly_series_dict.get(middle_key, 0).get('4. close', 0)
# print(price)
realtime = {'symbol': symbol, 'realtime': price}
print(realtime)
return realtime
def get_weekly_ave(symbol):
"""Display weekly EMA of 10 days average price in the chart."""
# Get weekly EMA open price data in csv from Alphavantage API
payload_ema = {'function': 'EMA',
'symbol': symbol,
'interval': 'weekly',
'time_period': 10,
'series_type': 'open',
'datatype': 'csv',
'apikey': 'CYUYBFS101MCWYTS'}
data = requests.get("https://www.alphavantage.co/query", params=payload_ema)
print(data.url)
result = {'data': data.text, 'stock': symbol}
# print(result)
return result
def get_weekly_price(symbol):
"""Get weekly open price to display in the chart."""
# Get weekly open price data in csv from Alphavantage API
payload_ema = {'function': 'TIME_SERIES_WEEKLY',
'symbol': symbol,
'interval': 'weekly',
'datatype': 'csv',
'apikey': 'G91S3ATZL5YIK83E'}
data = requests.get("https://www.alphavantage.co/query", params=payload_ema)
result = {'data': data.text, 'stock': symbol}
print('/nIt is working')
return result
def get_user(email):
"""Query database for user id.
Examples:
>>> get_user_id('johnsonamanda@hotmail.com')
1
>>> get_user_id('msdaiyichen@gmail.com')
48
"""
# Get user id via its email
this_user = User.query.filter_by(email=email).first()
return this_user
if __name__ == "__main__":
from server import app
connect_to_db(app)
print("Connected to DB.")
|
######################## Monte Carlo Based Pathway Search Algorithm ##############################
import numpy as np
import math as m
from random import seed
from random import randint
import multiprocessing as mp
from multiprocessing import Pool
from joblib import Parallel, delayed
from datetime import datetime
import matplotlib.pyplot as plt
import csv
##### procedure used to standardize energies ######
def standardize_energy(energy_data):
mean=np.mean(energy_data)
std=np.std(energy_data)
return [(e-mean)/float(std) for e in energy_data ]
####### procedure to obtain allowable frames for the next iteration################
def allowed_frames(data,z_list,memory_array,prev_frame,inc_ind,az_ind,inc_step,az_step,z_i,indep=0):
allowed_fr=[]
allowed_fr_z_bin={}
##################### angle values from previous bin ###########################
prev_inc=data[int(prev_frame)][inc_ind]
prev_az=data[int(prev_frame)][az_ind]
###################### boundaries for angles in next step ############################
inc_top=prev_inc+inc_step
inc_bott=prev_inc-inc_step
######### need to take care of periodicity effects for azimuthal angle
az_top=prev_az+az_step
az_top_per=az_top - 360 ###### if az_top is above 360, this variable shifts the top boundary to the periodic image
az_bott=prev_az-az_step
az_bott_per=360+az_bott ###### if az_bot is below 0, this variable shifts the bottom boundary to the periodic image
################# Create the list of allowable frames using boundaries on angular space #######################
############## Create list using frames in the same z bin
for f in z_list[z_i]:
n_f=int(f)
next_inc=data[n_f][inc_ind]
next_az=data[n_f][az_ind]
########## Move in multiple angles in one step. only check frame if its memory_array value is 0 (not previously accepted) ####################
if (indep==0 and memory_array[n_f]==0):
######### if azimuthal bottom boundary is less than 0:
if ( az_bott < 0 and next_inc>inc_bott and next_inc<inc_top and\
( ( next_az>az_bott_per and next_az<360) \
or (next_az<az_top and next_az > 0) ) ):
allowed_fr=np.append(allowed_fr,n_f)
allowed_fr_z_bin[n_f]=z_i
####### if azimuthal top boundary is greater than 360
elif (az_top > 360 and next_inc>inc_bott and next_inc<inc_top and \
( (next_az < az_top_per and next_az > 0) \
or ( next_az > az_bott and next_az < 360 )) ):
allowed_fr=np.append(allowed_fr,n_f)
allowed_fr_z_bin[n_f]=z_i
###### if az top boundary is < 360 and bott boundary >0, don't worry about periodicity
elif (az_top < 360 and az_bott > 0 and next_inc>inc_bott and \
next_inc<inc_top and \
next_az>az_bott and next_az<az_top):
allowed_fr=np.append(allowed_fr,n_f)
allowed_fr_z_bin[n_f]=z_i
############ compute similar search for next z bin.
for f in z_list[int(z_i+1)]:
n_f=int(f)
next_inc=data[n_f][inc_ind]
next_az=data[n_f][az_ind]
if(memory_array[n_f]==0):
if ( az_bott < 0 and next_inc>inc_bott and next_inc<inc_top and\
( ( next_az>az_bott_per and next_az<360) \
or (next_az<az_top and next_az > 0) ) ):
allowed_fr=np.append(allowed_fr,n_f)
allowed_fr_z_bin[n_f]=z_i+1
elif (az_top > 360 and next_inc>inc_bott and next_inc<inc_top and \
( (next_az < az_top_per and next_az > 0) \
or ( next_az > az_bott and next_az < 360 )) ):
allowed_fr=np.append(allowed_fr,n_f)
allowed_fr_z_bin[n_f]=z_i+1
elif (az_bott < 360 and az_bott > 0 and next_inc>inc_bott and \
next_inc<inc_top and \
next_az>az_bott and next_az<az_top):
allowed_fr=np.append(allowed_fr,n_f)
allowed_fr_z_bin[n_f]=z_i+1
return allowed_fr,allowed_fr_z_bin ########### return both the frames as well as the z bin each frame belongs to
############### Procedure to perform actual Monte Carlo Based Pathway Search #############################
def Transition_Search(data,z_list,output_file,N_zbins,inc_step,az_step,z_ind,inc_ind,az_ind,energy_ind,iteration):
############### Define variables and initialize arrays used in rest of code ##############################
N_frames=len(data) ### Total poses in docking data
memory_array=np.zeros(N_frames) #### array used to check if frame has already been accepted
total_en_unstand=data[:,energy_ind]
en_stand=standardize_energy(total_en_unstand) #### Standardize all total energies to use in algorithm
z_i = 0 ############ start in 0th (top) z bin
acc_frames=[] ######## initialize array holding accepted frames
acc_energies=[] ######## initialize array holding enery of acc frame
############ Pick random initial frame in topmost z bin ###########################
seed(iteration) ####### seed using user provided iteration
rand=randint(0,len(z_list[0])-1) ###### pick a random position in the beginning to start
start_fr=int(z_list[0][rand])
start_en=en_stand[start_fr]
acc_frames=np.append(acc_frames,start_fr) ####### add random frame to accepted frames array
############# While loop which performs pathway search until the last primary coordinate (z) bin is reached ###################
step_num=0 ###### step along path identified so far
while (z_i<N_zbins-1):
prev_frame=acc_frames[int(step_num)] ########## previous frame identified in path
prev_en=en_stand[int(prev_frame)]
################ Use above allowed_frames procedure to create allowed frames for next step of algorithm ##################
allowed_frames_proc=allowed_frames(data,z_list,memory_array,prev_frame,inc_ind,az_ind,inc_step,az_step,z_i,indep=0)
allowed_fr=allowed_frames_proc[0]
allowed_fr_z_bin=allowed_frames_proc[1]
allowed_fr_num=len(allowed_fr)
####### if no frames are available, termainate pathway search ########
if (allowed_fr==[]):
return 0
accepted=0
rejected=0
bad_energy=0
################ Perform Metropolis search until a pose is accepted #############################
while (accepted==0):
######## if no frames allowed to be checked anymore, terminate pathway #######
if(rejected>=allowed_fr_num or allowed_fr_num==[]):
return 0
####### choose random frame from allowed_fr (fr_try) to check acceptance
index=randint(0,int(len(allowed_fr)-1))
fr_try=int(allowed_fr[index])
en_try=en_stand[fr_try]
met_crit=m.exp(prev_en-en_try) #Metropolis relation between previous energy and new one.
###### automatically accept if met_crit >=1
##### if accepted, add to memory array so it wont be checked again, update z_i to new z bin, increase step_num
if met_crit>=1:
accepted+=1
acc_frames=np.append(acc_frames,fr_try)
z_i=allowed_fr_z_bin[fr_try]
memory_array[fr_try]=1
step_num+=1
######### otherwise spit random number between 0, 1. if metrop> random number, accept
else:
rand=np.random.uniform(0,1)
if met_crit>rand:
accepted+=1
acc_frames=np.append(acc_frames,fr_try)
z_i=allowed_fr_z_bin[fr_try]
memory_array[fr_try]=1
step_num+=1
else:
it=np.where(allowed_fr==fr_try)
allowed_fr=np.delete(allowed_fr,it) ##### remove from list of allowed poses
memory_array[fr_try]=1 #### removing from memory if rejected as well right now
rejected+=1
continue
############### once a path is accepted write to output file ########################
f=open(output_file,'a+',newline='')
writer=csv.writer(f,delimiter=' ',quotechar='|', quoting=csv.QUOTE_MINIMAL)
writer.writerow(acc_frames)
return np.array(acc_frames)
|
#algorithm to find the ideal spot
#need parameter for where car is
#need variables for the workinging area perimeter
#need parameter for the ideal perimeter
import math
import matplotlib.pyplot as plt
import numpy as np
class Ideal_Pos:
def __init__(self,r1=1,r2=2,t=30,show=False):
#working radius around car
self.working_rad = r1
#ideal radius should be larger than working_radius
self.ideal_rad = r2
#increments of theta to search for
self.theta_it = t
#holds values of the ideal positions
self.positions = []
#holds values of the distances to the ideal positions
self.dists = []
#holds the indexes of the distance values after being sorted ascending
self.indexes = []
self.draw_ani = show
def get_pos(self,x2,y2):
theta = 0
pos = []
while theta < 360:
rad = math.radians(theta)
x = x2 + ((self.ideal_rad+self.working_rad)/2) * math.cos(rad)
y = y2 + ((self.ideal_rad+self.working_rad)/2) * math.sin(rad)
pos.append((x,y))
theta = theta + self.theta_it
return pos
def get_dists(self,x,y):
diss = []
for pos in self.positions:
dist = math.sqrt((x-pos[0])**2 + (y-pos[1])**2)
diss.append(dist)
return diss
def get_ideal(self,x1,y1,x2,y2):
#x,y is the center,
#split circle into segments
#find the x,y for each segment
self.positions = self.get_pos(x2,y2)
#print(self.positions)
if self.draw_ani:
self.my_plot(x1,y1)
#find the distance from you
self.dists = self.get_dists(x1,y1)
#print(self.dists)
#sort the shortest distance
#return the index according to the shortest distance
self.indexes = sorted(range(len(self.dists)), key=lambda k: self.dists[k])
#print(self.indexes)
return self.positions, self.indexes
#get the equation of a line
def get_line(self,y1,y2,x1,x2):
beg = x1
end = x2
if x1 > x2:
beg = x2
end = x1
x = np.arange(beg,end,0.01)
m = (y1-y2)/(x1-x2)
y = m*(x - x1) + y1
return(x,y)
def my_plot(self,x1,y1):
xs = [ x for (x,y) in self.positions]
ys = [ y for (x,y) in self.positions]
#print(xs)
#print(ys)
#eqs = []
for pos in self.positions:
x,y = self.get_line(y1,pos[1],x1,pos[0])
#eqs.append(y)
plt.plot(x,y)
plt.plot(xs,ys,marker='o')
plt.show()
def main(cur_x=-2,cur_z=4, car_x=2,car_y=2,show_plot = True):
pos = Ideal_Pos(show=show_plot)
pos, ind = pos.get_ideal(cur_x,cur_z,car_x,car_y)
print(pos)
print(ind)
if __name__ == '__main__':
main()
|
from autodisc.gui.gui import *
from autodisc.gui.explorationgui import ExplorationGUI
from autodisc.gui.explorationparametergui import ExplorationParametersGUI
from autodisc.gui.observationpreviewgui import ObservationPreviewGUI
from autodisc.gui.observationplayergui import ObservationPlayerGUI
from autodisc.gui.statisticguis import StatisticTableGUI, StatisticLineGUI, StatisticBarGUI
from autodisc.gui.dataviewergui import DataViewerGUI
import autodisc.gui.jupyter
|
from reef import database as db
class Reader(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100))
email = db.Column(db.String(100))
phone = db.Column(db.String(100))
notes = db.Column(db.String(1000))
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('readers', lazy='dynamic'))
book_records = db.relationship('BookRecord', backref='reader', lazy='dynamic') |
# Generated by Django 3.1.6 on 2021-02-05 19:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main_app', '0009_app_creator'),
]
operations = [
migrations.AlterField(
model_name='app',
name='creator',
field=models.IntegerField(max_length=100),
),
]
|
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# import recommonmark
# from recommonmark.transform import AutoStructify
github_doc_root = 'https://github.com/lbc-team/ethers.js-docs/tree/master/source/'
# At the bottom of conf.py
def setup(app):
app.add_stylesheet( "css/ethers.css" )
# app.add_config_value('recommonmark_config', {
# 'url_resolver': lambda url: github_doc_root + url[:-4],
# 'auto_toc_tree_section': 'Content',
# }, True)
# app.add_transform(AutoStructify)
# app.add_javascript('//s95.cnzz.com/z_stat.php?id=1265946080&web_id=1265946080')
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'ethers.js ไธญๆๆๆกฃ'
copyright = '2019, ็ป้พ็คพๅบ'
author = 'Tiny็'
# The full version, including alpha/beta/rc tags
release = '4.0.0'
version = '4.0'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
html_theme_options = {
'canonical_url': '',
'logo_only': False,
'display_version': True,
'prev_next_buttons_location': 'bottom',
'style_external_links': False,
# 'style_nav_header_background': 'blue',
# Toc options
'collapse_navigation': False,
'sticky_navigation': True,
'navigation_depth': 4,
'includehidden': True,
'titles_only': False,
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'zh_CN'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
html_last_updated_fmt = '%Y/%b/%d'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
html_search_language = 'zh'
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js' |
from recordclass import recordclass
from itertools import combinations
import numpy as np
def change_velocity(a1, a2, va1, va2):
if a1 > a2:
return (va1 - 1, va2 + 1)
elif a1 < a2:
return (va1 + 1, va2 - 1)
return va1, va2
Moon = recordclass('Moon', 'n x y z vx vy vz')
f = open("input", "r")
positions = [x.strip('\n').strip('<').strip('>').split(',') for x in f.readlines()]
moons = []
for n, p in enumerate (positions):
x = int(p[0][2:])
y = int(p[1][3:])
z = int(p[2][3:])
moons.append(Moon(n, x, y, z, 0, 0, 0))
for i in range (0, 1000):
pairs = combinations(moons, 2)
for p in pairs:
m1, m2 = p
m1.vx, m2.vx = change_velocity(m1.x, m2.x, m1.vx, m2.vx)
m1.vy, m2.vy = change_velocity(m1.y, m2.y, m1.vy, m2.vy)
m1.vz, m2.vz = change_velocity(m1.z, m2.z, m1.vz, m2.vz)
for m in moons:
m.x += m.vx
m.y += m.vy
m.z += m.vz
energy = 0
for m in moons:
energy += (abs(m.x) + abs(m.y) + abs(m.z)) * (abs(m.vx) + abs(m.vy) + abs(m.vz))
print (energy)
moons = []
ix = []
iy = []
iz = []
for n, p in enumerate (positions):
x = int(p[0][2:])
y = int(p[1][3:])
z = int(p[2][3:])
moons.append(Moon(n, x, y, z, 0, 0, 0))
ix.append((x, 0))
iy.append((y, 0))
iz.append((z, 0))
cx = 0
cy = 0
cz = 0
foundx = foundy = foundz = False
while not foundx or not foundy or not foundz:
pairs = combinations(moons, 2)
curx = []
cury = []
curz = []
for p in pairs:
m1, m2 = p
m1.vx, m2.vx = change_velocity(m1.x, m2.x, m1.vx, m2.vx)
m1.vy, m2.vy = change_velocity(m1.y, m2.y, m1.vy, m2.vy)
m1.vz, m2.vz = change_velocity(m1.z, m2.z, m1.vz, m2.vz)
for m in moons:
m.x += m.vx
m.y += m.vy
m.z += m.vz
curx.append((m.x, m.vx))
cury.append((m.y, m.vy))
curz.append((m.z, m.vz))
if not foundx:
cx += 1
foundx = ix == curx
if not foundy:
cy += 1
foundy = iy == cury
if not foundz:
cz += 1
foundz = iz == curz
steps = np.lcm.reduce([cx, cy, cz])
print (steps)
f.close() |
import math
from collections import defaultdict
from typing import Dict, DefaultDict, List
import logging
from bitmex_futures_arbitrage.const import XBTM20, XBTU20
from bitmex_futures_arbitrage.models import Quote, Direction, Order, OrderKind
logger = logging.getLogger()
class PaperOrdersExecutor:
""" dealing with exchange pseudo-orders in paper-mode """
def __init__(self):
self.symbol2direction2order: DefaultDict[str, Dict[str, Order]] = defaultdict(dict)
self.symbol2position: DefaultDict[str, float] = defaultdict(float)
def orders_execution_on_quotes(
self,
quotes: Dict[str, Quote],
):
""" Calculate order execution on quotes.
Keep in mind that there are no partial fill for now since we use minimum possible size.
"""
buy_order = self.symbol2direction2order[XBTM20].get(Direction.BUY)
if buy_order and quotes[XBTM20].ask_price and buy_order.price >= quotes[XBTM20].ask_price:
logger.info(f'BUY {XBTM20} price={quotes[XBTM20].ask_price} size={buy_order.size} {buy_order=}')
del self.symbol2direction2order[XBTM20][Direction.BUY]
self.symbol2position[XBTM20] += buy_order.size
logger.info(f'{self.symbol2position=}')
sell_order = self.symbol2direction2order[XBTM20].get(Direction.SELL)
if sell_order and quotes[XBTM20].bid_price and sell_order.price <= quotes[XBTM20].bid_price:
logger.info(f'SELL {XBTM20} price={quotes[XBTM20].bid_price} size={sell_order.size} {sell_order=}')
del self.symbol2direction2order[XBTM20][Direction.SELL]
self.symbol2position[XBTM20] -= sell_order.size
logger.info(f'{self.symbol2position=}')
def manage_orders(self, orders: List[Order], quotes: Dict[str, Quote]):
""" place / amend / cancel orders """
desired_limit_orders_symbol_direction = []
for order in orders:
if order.kind == OrderKind.MARKET:
self.execute_market_order(order=order, quotes=quotes)
elif order.kind == OrderKind.LIMIT:
self.manage_limit_order(order)
desired_limit_orders_symbol_direction.append((order.symbol, order.direction))
else:
raise ValueError(order.kind)
for symbol, direction2order in self.symbol2direction2order.items():
for direction, order in list(direction2order.items()): # copy to avoid iteration over mutating structure
if (symbol, direction) not in desired_limit_orders_symbol_direction:
self.cancel_limit_order(order)
def execute_market_order(self, order: Order, quotes: Dict[str, Quote]):
""" immediately execute market orders using best bid/ask """
assert order.symbol == XBTU20
if order.direction == Direction.BUY and quotes[order.symbol].ask_price:
logger.info(
f'BUY {order.symbol} '
f'price={quotes[order.symbol].ask_price} size={order.size} {order=}')
self.symbol2position[order.symbol] += order.size
logger.info(f'{self.symbol2position=}')
elif order.direction == Direction.SELL and quotes[order.symbol].bid_price:
logger.info(
f'SELL {order.symbol} '
f'price={quotes[order.symbol].bid_price} size={order.size} {order=}')
self.symbol2position[order.symbol] -= order.size
logger.info(f'{self.symbol2position=}')
else:
raise ValueError(order.direction)
@staticmethod
def rounded_direction_price(direction: str, price: float) -> float:
""" floor/ceil buy/sell price """
if direction == Direction.BUY:
return math.floor(price * 2) / 2
elif direction == Direction.SELL:
return math.ceil(price * 2) / 2
else:
raise ValueError(direction)
def manage_limit_order(self, order: Order):
""" place / amend limit order """
order.price = self.rounded_direction_price(direction=order.direction, price=order.price)
placed_order = self.symbol2direction2order[order.symbol].get(order.direction)
if placed_order:
if placed_order.price != order.price or placed_order.size != order.size:
placed_order.price = order.price
placed_order.size = order.size
if placed_order.price <= 0:
self.cancel_limit_order(placed_order)
else:
self.edit_limit_order(placed_order)
elif order.price > 0:
self.place_limit_order(order)
def place_limit_order(self, order: Order):
logger.info(f'PLACE order {order}')
self.symbol2direction2order[order.symbol][order.direction] = order
def edit_limit_order(self, order: Order):
logger.info(f'EDIT order {order}')
def cancel_limit_order(self, order: Order):
logger.info(f'CANCEL order {order}')
del self.symbol2direction2order[order.symbol][order.direction]
|
"""
installs Gemini3D prerequisite libraries for:
* Linux: CentOS, Debian, Ubuntu, Windows Subsystem for Linux
* MacOS: Homebrew
* Windows: MSYS2, Cygwin
assumes GCC/Gfortran
"""
from __future__ import annotations
import subprocess
import sys
from argparse import ArgumentParser
import shutil
PKG = {
"yum": [
"epel-release",
"gcc-gfortran",
"MUMPS-openmpi-devel",
"lapack-devel",
"scalapack-openmpi-devel",
"openmpi-devel",
"hdf5-devel",
],
"apt": [
"gfortran",
"libmumps-dev",
"liblapack-dev",
"libscalapack-mpi-dev",
"libopenmpi-dev",
"openmpi-bin",
"libhdf5-dev",
],
"pacman": ["gcc-fortran", "ninja", "lapack", "openmpi", "hdf5"],
"brew": ["gcc", "ninja", "cmake", "lapack", "scalapack", "openmpi", "hdf5"],
"cygwin": ["gcc-fortran", "liblapack-devel", "libopenmpi-devel"],
"msys": [
"mingw-w64-x86_64-gcc-fortran",
"mingw-w64-x86_64-ninja",
"mingw-w64-x86_64-hdf5",
"mingw-w64-x86_64-lapack",
"mingw-w64-x86_64-scalapack",
"mingw-w64-x86_64-mumps",
],
}
def main(package_manager: str):
cmd: list[str] = []
if sys.platform == "linux":
if not package_manager:
from gemini3d.linux_info import get_package_manager
package_manager = get_package_manager()
if package_manager == "yum":
subprocess.run(["sudo", "yum", "--assumeyes", "updateinfo"])
cmd = ["sudo", "yum", "install"] + PKG["yum"]
elif package_manager == "apt":
subprocess.run(["sudo", "apt", "update", "--yes"])
cmd = ["sudo", "apt", "install"] + PKG["apt"]
elif package_manager == "pacman":
subprocess.run(["sudo", "pacman", "-S", "--needed"] + PKG["pacman"])
else:
raise ValueError(
f"Unknown package manager {package_manager}, try installing the prereqs manually"
)
elif sys.platform == "darwin":
if not shutil.which("brew"):
raise SystemExit(
"We assume Homebrew is available, need to manually install a Fortran compiler otherwise."
)
cmd = ["brew", "install"] + PKG["brew"]
# autobuild Mumps, it's much faster
elif sys.platform == "cygwin":
cmd = ["setup-x86_64.exe", "-P"] + PKG["cygwin"]
elif sys.platform == "win32":
if not shutil.which("pacman"):
raise SystemExit("Windows Subsystem for Linux or MSYS2 is recommended.")
# assume MSYS2
cmd = ["pacman", "-S", "--needed"] + PKG["msys"]
else:
raise ValueError(f"unknown platform {sys.platform}")
print(" ".join(cmd))
ret = subprocess.run(cmd)
raise SystemExit(ret.returncode)
if __name__ == "__main__":
p = ArgumentParser()
p.add_argument(
"package_manager",
help="specify package manager e.g. apt, yum",
choices=list(PKG.keys()),
nargs="?",
)
P = p.parse_args()
main(P.package_manager)
|
import numpy as np
import pygame
import math
ROWS = 6
COLUMNS = 7
board = np.zeros((ROWS, COLUMNS))
gameover = False
turn = 0
SLOT = 70
width = COLUMNS * SLOT
height = (ROWS+1)*SLOT
size = (width, height)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
GRAY = (128, 128, 128)
WHITE = ((255, 255, 255))
OFFSET = 70
RADIUS = int(SLOT/2-5)
pygame.init()
def draw(board):
for c in range(COLUMNS):
for r in range(ROWS):
c2 = (int(c*SLOT+SLOT/2), height-int(r*SLOT+SLOT/2))
if board[r][c] == 1:
pygame.draw.circle(window, RED, c2, RADIUS)
elif board[r][c] == 2:
pygame.draw.circle(window, GREEN, c2, RADIUS)
else:
pygame.draw.circle(window, WHITE, c2, RADIUS)
def valid(board, col):
return board[ROWS-1][col] == 0
def drop(board, col, piece):
for r in range(ROWS):
if board[r][col] == 0:
board[r][col] = piece
return
def win(board, piece):
# horizontal
for c in range(COLUMNS-3):
for r in range(ROWS):
if board[r][c] == piece and board[r][c+1] == piece and board[r][c+2] == piece and board[r][c+3] == piece:
return True
# vertical
for c in range(COLUMNS):
for r in range(ROWS-3):
if board[r][c] == piece and board[r+1][c] == piece and board[r+2][c] == piece and board[r+3][c] == piece:
return True
# diagonal
for c in range(COLUMNS-3):
for r in range(ROWS-3):
if board[r][c] == piece and board[r+1][c+1] == piece and board[r+2][c+2] == piece and board[r+3][c+3] == piece:
return True
# diagonal.1
for c in range(COLUMNS-3):
for r in range(3, ROWS-3):
if board[r][c] == piece and board[r-1][c+1] == piece and board[r-2][c+2] == piece and board[r-3][c+3] == piece:
return True
print(board)
display_winner = False
winner = None
# Font to display winner
font = pygame.font.Font("Pixeboy.ttf", 32)
window = pygame.display.set_mode(size)
pygame.display.set_caption("connect 4")
# draw(board)
while not gameover:
# Move events to the top of the game loop
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameover = True
# need to check if the game is still going on by "not display_winner",
# This is just to disable user input if we have a winner.
if event.type == pygame.MOUSEBUTTONDOWN and not display_winner:
posx = event.pos[0]
col = math.floor(posx/SLOT)
if valid(board, col):
drop(board, col, (turn % 2)+1)
# Move is played and then change the turn.
if win(board, (turn % 2)+1):
print(f"player {(turn%2) + 1} won")
# If we have a winner the variables are set
display_winner = True
winner = (turn % 2)+1
turn = 1 if turn == 0 else 0
print(np.flip(board, 0))
pygame.draw.rect(window, (0, 0, 0), (0, 0, width, height))
pygame.draw.rect(window, WHITE, (0, 0, width, SLOT))
draw(board)
# If we have a winner,
if display_winner:
text = font.render(f'Player {winner} won!', True, BLUE)
textRect = text.get_rect()
textRect.center = (width // 2, SLOT // 2)
window.blit(text, textRect)
pygame.display.update()
continue
mouse_position = pygame.mouse.get_pos()
# Get x, y position
posx = SLOT//2 + SLOT * (mouse_position[0] // SLOT)
# y is SLOT // 2
posy = SLOT//2
# Display the preview
if turn % 2 == 0:
pygame.draw.circle(window, RED, (posx, posy), RADIUS)
else:
pygame.draw.circle(window, GREEN, (posx, posy), RADIUS)
pygame.display.update()
pygame.quit()
|
#-*-coding:utf-8 -*-
import urllib,web_spider,re,json
def baidu_translate(daifanyi):
host = "openapi.baidu.com"
YourApiKey = "CpEdx1EKHKw5SF32G93dxPxg"
url = "http://openapi.baidu.com/public/2.0/bmt/translate?client_id={0}&q={1}&from=auto&to=auto" .format(YourApiKey,daifanyi)
request = web_spider.web_spider(url,host)
request1 = json.loads(request)
request2 = request1["trans_result"][0]
request3 = request2["dst"]
return request3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.