hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 | count_classes int64 0 1.6M | score_classes float64 0 1 | count_generators int64 0 651k | score_generators float64 0 1 | count_decorators int64 0 990k | score_decorators float64 0 1 | count_async_functions int64 0 235k | score_async_functions float64 0 1 | count_documentation int64 0 1.04M | score_documentation float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
cae59e6b2b5461e084cfd5ef2de955cd7cc489f7 | 103 | py | Python | test/models/__init__.py | Tomos-Evans/garrison | 6a0eea6822f7a5a64e80852427d4576c9018d0b4 | [
"MIT"
] | 1 | 2018-10-20T15:53:15.000Z | 2018-10-20T15:53:15.000Z | test/models/__init__.py | Tomos-Evans/garrison | 6a0eea6822f7a5a64e80852427d4576c9018d0b4 | [
"MIT"
] | null | null | null | test/models/__init__.py | Tomos-Evans/garrison | 6a0eea6822f7a5a64e80852427d4576c9018d0b4 | [
"MIT"
] | null | null | null | from .ingredient import *
from .drink_component import *
from .drink import *
from .dispenser import *
| 20.6 | 30 | 0.76699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
cae7b1ecc01e83c210f73c696d1cd08fb5490fc9 | 14,924 | py | Python | bb-master/sandbox/lib/python3.5/site-packages/buildbot/www/oauth2.py | Alecto3-D/testable-greeter | 09e8e488edfb7e46cf5867b2b5a6ebe0b1929f78 | [
"MIT"
] | 2 | 2017-07-11T18:56:27.000Z | 2017-07-28T14:01:12.000Z | bb-master/sandbox/lib/python3.5/site-packages/buildbot/www/oauth2.py | Alecto3-D/testable-greeter | 09e8e488edfb7e46cf5867b2b5a6ebe0b1929f78 | [
"MIT"
] | 1 | 2017-07-28T13:53:41.000Z | 2017-07-31T15:30:40.000Z | bb-master/sandbox/lib/python3.5/site-packages/buildbot/www/oauth2.py | Alecto3-D/testable-greeter | 09e8e488edfb7e46cf5867b2b5a6ebe0b1929f78 | [
"MIT"
] | null | null | null | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from future.moves.urllib.parse import parse_qs
from future.moves.urllib.parse import urlencode
from future.utils import iteritems
import json
import re
import textwrap
from posixpath import join
import jinja2
import requests
from twisted.internet import defer
from twisted.internet import threads
from buildbot import config
from buildbot.util import bytes2unicode
from buildbot.util.logger import Logger
from buildbot.www import auth
from buildbot.www import resource
log = Logger()
class OAuth2LoginResource(auth.LoginResource):
# disable reconfigResource calls
needsReconfig = False
def __init__(self, master, _auth):
auth.LoginResource.__init__(self, master)
self.auth = _auth
def render_POST(self, request):
return self.asyncRenderHelper(request, self.renderLogin)
@defer.inlineCallbacks
def renderLogin(self, request):
code = request.args.get(b"code", [""])[0]
token = request.args.get(b"token", [""])[0]
if not token and not code:
url = request.args.get("redirect", [None])[0]
url = yield self.auth.getLoginURL(url)
raise resource.Redirect(url)
else:
if not token:
details = yield self.auth.verifyCode(code)
else:
details = yield self.auth.acceptToken(token)
if self.auth.userInfoProvider is not None:
infos = yield self.auth.userInfoProvider.getUserInfo(details['username'])
details.update(infos)
session = request.getSession()
session.user_info = details
session.updateSession(request)
state = request.args.get("state", [""])[0]
if state:
for redirect in parse_qs(state).get('redirect', []):
raise resource.Redirect(self.auth.homeUri + "#" + redirect)
raise resource.Redirect(self.auth.homeUri)
class OAuth2Auth(auth.AuthBase):
name = 'oauth2'
getTokenUseAuthHeaders = False
authUri = None
tokenUri = None
grantType = 'authorization_code'
authUriAdditionalParams = {}
tokenUriAdditionalParams = {}
loginUri = None
homeUri = None
sslVerify = None
def __init__(self,
clientId, clientSecret, autologin=False, **kwargs):
auth.AuthBase.__init__(self, **kwargs)
self.clientId = clientId
self.clientSecret = clientSecret
self.autologin = autologin
def reconfigAuth(self, master, new_config):
self.master = master
self.loginUri = join(new_config.buildbotURL, "auth/login")
self.homeUri = new_config.buildbotURL
def getConfigDict(self):
return dict(name=self.name,
oauth2=True,
fa_icon=self.faIcon,
autologin=self.autologin
)
def getLoginResource(self):
return OAuth2LoginResource(self.master, self)
def getLoginURL(self, redirect_url):
"""
Returns the url to redirect the user to for user consent
"""
oauth_params = {'redirect_uri': self.loginUri,
'client_id': self.clientId, 'response_type': 'code'}
if redirect_url is not None:
oauth_params['state'] = urlencode(dict(redirect=redirect_url))
oauth_params.update(self.authUriAdditionalParams)
sorted_oauth_params = sorted(oauth_params.items(), key=lambda val: val[0])
return defer.succeed("%s?%s" % (self.authUri, urlencode(sorted_oauth_params)))
def createSessionFromToken(self, token):
s = requests.Session()
s.params = {'access_token': token['access_token']}
s.verify = self.sslVerify
return s
def get(self, session, path):
ret = session.get(self.resourceEndpoint + path)
return ret.json()
# If the user wants to authenticate directly with an access token they
# already have, go ahead and just directly accept an access_token from them.
def acceptToken(self, token):
def thd():
session = self.createSessionFromToken({'access_token': token})
return self.getUserInfoFromOAuthClient(session)
return threads.deferToThread(thd)
# based on https://github.com/maraujop/requests-oauth
# from Miguel Araujo, augmented to support header based clientSecret
# passing
def verifyCode(self, code):
# everything in deferToThread is not counted with trial --coverage :-(
def thd():
url = self.tokenUri
data = {'redirect_uri': self.loginUri, 'code': code,
'grant_type': self.grantType}
auth = None
if self.getTokenUseAuthHeaders:
auth = (self.clientId, self.clientSecret)
else:
data.update(
{'client_id': self.clientId, 'client_secret': self.clientSecret})
data.update(self.tokenUriAdditionalParams)
response = requests.post(
url, data=data, auth=auth, verify=self.sslVerify)
response.raise_for_status()
responseContent = bytes2unicode(response.content)
try:
content = json.loads(responseContent)
except ValueError:
content = parse_qs(responseContent)
for k, v in iteritems(content):
content[k] = v[0]
except TypeError:
content = responseContent
session = self.createSessionFromToken(content)
return self.getUserInfoFromOAuthClient(session)
return threads.deferToThread(thd)
def getUserInfoFromOAuthClient(self, c):
return {}
class GoogleAuth(OAuth2Auth):
name = "Google"
faIcon = "fa-google-plus"
resourceEndpoint = "https://www.googleapis.com/oauth2/v1"
authUri = 'https://accounts.google.com/o/oauth2/auth'
tokenUri = 'https://accounts.google.com/o/oauth2/token'
authUriAdditionalParams = dict(scope=" ".join([
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]))
def getUserInfoFromOAuthClient(self, c):
data = self.get(c, '/userinfo')
return dict(full_name=data["name"],
username=data['email'].split("@")[0],
email=data["email"],
avatar_url=data["picture"])
class GitHubAuth(OAuth2Auth):
name = "GitHub"
faIcon = "fa-github"
authUri = 'https://github.com/login/oauth/authorize'
authUriAdditionalParams = {'scope': 'user:email read:org'}
tokenUri = 'https://github.com/login/oauth/access_token'
resourceEndpoint = 'https://api.github.com'
getUserTeamsGraphqlTpl = textwrap.dedent(r'''
{%- if organizations %}
query getOrgTeamMembership {
{%- for org_slug, org_name in organizations.items() %}
{{ org_slug }}: organization(login: "{{ org_name }}") {
teams(first: 100) {
edges {
node {
name,
slug
}
}
}
}
{%- endfor %}
}
{%- endif %}
''')
def __init__(self,
clientId, clientSecret, serverURL=None, autologin=False,
apiVersion=3, getTeamsMembership=False, debug=False,
**kwargs):
OAuth2Auth.__init__(self, clientId, clientSecret, autologin, **kwargs)
if serverURL is not None:
# setup for enterprise github
if serverURL.endswith("/"):
serverURL = serverURL[:-1]
self.authUri = '{0}/login/oauth/authorize'.format(serverURL)
self.tokenUri = '{0}/login/oauth/access_token'.format(serverURL)
self.serverURL = serverURL or self.resourceEndpoint
if apiVersion not in (3, 4):
config.error(
'GitHubAuth apiVersion must be 3 or 4 not {}'.format(
apiVersion))
self.apiVersion = apiVersion
if apiVersion == 3:
if getTeamsMembership is True:
config.error(
'Retrieving team membership information using GitHubAuth is only '
'possible using GitHub api v4.')
self.apiResourceEndpoint = '{0}/api/v3'.format(self.serverURL)
else:
self.apiResourceEndpoint = '{0}/graphql'.format(self.serverURL)
if getTeamsMembership:
# GraphQL name aliases must comply with /^[_a-zA-Z][_a-zA-Z0-9]*$/
self._orgname_slug_sub_re = re.compile(r'[^_a-zA-Z0-9]')
self.getUserTeamsGraphqlTplC = jinja2.Template(
self.getUserTeamsGraphqlTpl.strip())
self.getTeamsMembership = getTeamsMembership
self.debug = debug
def post(self, session, query):
if self.debug:
log.info('{klass} GraphQL POST Request: {endpoint} -> '
'DATA:\n----\n{data}\n----',
klass=self.__class__.__name__,
endpoint=self.apiResourceEndpoint,
data=query)
ret = session.post(self.apiResourceEndpoint, json={'query': query})
return ret.json()
def getUserInfoFromOAuthClient(self, c):
if self.apiVersion == 3:
return self.getUserInfoFromOAuthClient_v3(c)
return self.getUserInfoFromOAuthClient_v4(c)
def getUserInfoFromOAuthClient_v3(self, c):
user = self.get(c, '/user')
emails = self.get(c, '/user/emails')
for email in emails:
if email.get('primary', False):
user['email'] = email['email']
break
orgs = self.get(c, '/user/orgs')
return dict(full_name=user['name'],
email=user['email'],
username=user['login'],
groups=[org['login'] for org in orgs])
def getUserInfoFromOAuthClient_v4(self, c):
graphql_query = textwrap.dedent('''
query {
viewer {
email
login
name
organizations(first: 100) {
edges {
node {
login
}
}
}
}
}
''')
data = self.post(c, graphql_query.strip())
data = data['data']
if self.debug:
log.info('{klass} GraphQL Response: {response}',
klass=self.__class__.__name__,
response=data)
user_info = dict(full_name=data['viewer']['name'],
email=data['viewer']['email'],
username=data['viewer']['login'],
groups=[org['node']['login'] for org in
data['viewer']['organizations']['edges']])
if self.getTeamsMembership:
orgs_name_slug_mapping = dict(
[(self._orgname_slug_sub_re.sub('_', n), n)
for n in user_info['groups']])
graphql_query = self.getUserTeamsGraphqlTplC.render(
{'user_info': user_info,
'organizations': orgs_name_slug_mapping})
if graphql_query:
data = self.post(c, graphql_query)
if self.debug:
log.info('{klass} GraphQL Response: {response}',
klass=self.__class__.__name__,
response=data)
teams = set()
for org, team_data in data['data'].items():
for node in team_data['teams']['edges']:
# On github we can mentions organization teams like
# @org-name/team-name. Let's keep the team formatting
# identical with the inclusion of the organization
# since different organizations might share a common
# team name
teams.add('%s/%s' % (orgs_name_slug_mapping[org], node['node']['name']))
user_info['groups'].extend(sorted(teams))
if self.debug:
log.info('{klass} User Details: {user_info}',
klass=self.__class__.__name__,
user_info=user_info)
return user_info
class GitLabAuth(OAuth2Auth):
name = "GitLab"
faIcon = "fa-git"
def __init__(self, instanceUri, clientId, clientSecret, **kwargs):
uri = instanceUri.rstrip("/")
self.authUri = "%s/oauth/authorize" % uri
self.tokenUri = "%s/oauth/token" % uri
self.resourceEndpoint = "%s/api/v3" % uri
super(GitLabAuth, self).__init__(clientId, clientSecret, **kwargs)
def getUserInfoFromOAuthClient(self, c):
user = self.get(c, "/user")
groups = self.get(c, "/groups")
return dict(full_name=user["name"],
username=user["username"],
email=user["email"],
avatar_url=user["avatar_url"],
groups=[g["path"] for g in groups])
class BitbucketAuth(OAuth2Auth):
name = "Bitbucket"
faIcon = "fa-bitbucket"
authUri = 'https://bitbucket.org/site/oauth2/authorize'
tokenUri = 'https://bitbucket.org/site/oauth2/access_token'
resourceEndpoint = 'https://api.bitbucket.org/2.0'
def getUserInfoFromOAuthClient(self, c):
user = self.get(c, '/user')
emails = self.get(c, '/user/emails')
for email in emails["values"]:
if email.get('is_primary', False):
user['email'] = email['email']
break
orgs = self.get(c, '/teams?role=member')
return dict(full_name=user['display_name'],
email=user['email'],
username=user['username'],
groups=[org['username'] for org in orgs["values"]])
| 38.463918 | 96 | 0.577325 | 13,634 | 0.913562 | 1,086 | 0.072769 | 1,113 | 0.074578 | 0 | 0 | 4,053 | 0.271576 |
cae85af56f28cb944803582d2acd6f034c6fb314 | 2,281 | py | Python | bin/contentctl_project/contentctl_core/domain/entities/story.py | arjunkhunti-crest/security_content | 41e354485e5917d3366ef735a9c5b25a20d3b8cc | [
"Apache-2.0"
] | null | null | null | bin/contentctl_project/contentctl_core/domain/entities/story.py | arjunkhunti-crest/security_content | 41e354485e5917d3366ef735a9c5b25a20d3b8cc | [
"Apache-2.0"
] | null | null | null | bin/contentctl_project/contentctl_core/domain/entities/story.py | arjunkhunti-crest/security_content | 41e354485e5917d3366ef735a9c5b25a20d3b8cc | [
"Apache-2.0"
] | null | null | null | import string
import uuid
import requests
from pydantic import BaseModel, validator, ValidationError
from datetime import datetime
from bin.contentctl_project.contentctl_core.domain.entities.security_content_object import SecurityContentObject
from bin.contentctl_project.contentctl_core.domain.entities.story_tags import StoryTags
class Story(BaseModel, SecurityContentObject):
# story spec
name: str
id: str
version: int
date: str
author: str
description: str
narrative: str
references: list
tags: StoryTags
# enrichments
detection_names: list = None
investigation_names: list = None
baseline_names: list = None
author_company: str = None
author_name: str = None
detections: list = None
investigations: list = None
@validator('name')
def name_invalid_chars(cls, v):
invalidChars = set(string.punctuation.replace("-", ""))
if any(char in invalidChars for char in v):
raise ValueError('invalid chars used in name: ' + v)
return v
@validator('id')
def id_check(cls, v, values):
try:
uuid.UUID(str(v))
except:
raise ValueError('uuid is not valid: ' + values["name"])
return v
@validator('date')
def date_valid(cls, v, values):
try:
datetime.strptime(v, "%Y-%m-%d")
except:
raise ValueError('date is not in format YYYY-MM-DD: ' + values["name"])
return v
@validator('description', 'narrative')
def encode_error(cls, v, values, field):
try:
v.encode('ascii')
except UnicodeEncodeError:
raise ValueError('encoding error in ' + field.name + ': ' + values["name"])
return v
# @validator('references')
# def references_check(cls, v, values):
# for reference in v:
# try:
# get = requests.get(reference)
# if not get.status_code == 200:
# raise ValueError('Reference ' + reference + ' is not reachable: ' + values["name"])
# except requests.exceptions.RequestException as e:
# raise ValueError('Reference ' + reference + ' is not reachable: ' + values["name"])
# return v | 30.824324 | 112 | 0.615081 | 1,946 | 0.853135 | 0 | 0 | 942 | 0.412977 | 0 | 0 | 678 | 0.297238 |
cae8b2ce05a3582cc0f1a01c67f2c8e7637a486b | 99 | py | Python | staff/__init__.py | alvienzo720/Dep_Nadine | b23688aa87ba3cfe138f9b243eed3f50a74e1486 | [
"Apache-2.0"
] | 1 | 2019-08-15T00:10:38.000Z | 2019-08-15T00:10:38.000Z | staff/__init__.py | alvienzo720/Dep_Nadine | b23688aa87ba3cfe138f9b243eed3f50a74e1486 | [
"Apache-2.0"
] | 4 | 2021-03-19T16:10:13.000Z | 2022-03-12T00:55:50.000Z | staff/__init__.py | alvienzo720/Dep_Nadine | b23688aa87ba3cfe138f9b243eed3f50a74e1486 | [
"Apache-2.0"
] | 1 | 2020-02-24T08:23:45.000Z | 2020-02-24T08:23:45.000Z | """The Django app which provides member tracking and billing calculation for a coworking space."""
| 49.5 | 98 | 0.787879 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 98 | 0.989899 |
cae8e93abab06a2a41e95369a1d01056c15a4dc8 | 12,758 | py | Python | sedre.py | donerancl/sedre | ad87ea8db7508ffce0060e4aca3eb176fda9e329 | [
"MIT"
] | null | null | null | sedre.py | donerancl/sedre | ad87ea8db7508ffce0060e4aca3eb176fda9e329 | [
"MIT"
] | 1 | 2020-03-21T00:10:49.000Z | 2020-03-21T00:10:49.000Z | sedre.py | donerancl/sedre | ad87ea8db7508ffce0060e4aca3eb176fda9e329 | [
"MIT"
] | 1 | 2020-03-25T14:34:49.000Z | 2020-03-25T14:34:49.000Z | """Output file parser for quantum chemistry.
Intended to be easy, logical to use. Should be quickly incorporable
into workflow by high level functions and objects.
Requires only the Python standard library, sed, and sedre's submodules
Should be agnositic to Python 3.X version,
but was written in Spring 2019 so tested exclusively on Python 3.7.3
nstructions for adding a lookup file
Make a file in sedre/lookups/ with title <program_name>.py
look at an existing file for guidance, make sure to import common
write the lookup file. Section delims are passed directly to sed, so they
need to be formatted s.t. they work with sed (slightly unintuitive)
Energy, properties are regular expressions that only search within the
text sections produced by slicing the file into chunks at the section delimiters.
To sedre/lookups/_init_.py add a line:
from .<program_name> import lookup
In sedre.py add a line with the other elifs:
elif program is '<program_name>':
self.<program_name>()
Then add a function with the other program names:
def <program_name>(self):
self.lookup = copy.deepcopy(lookups.<program_name>.lookup)
###############################
#Data management and import
###############################
#load() converts the output file into two forms, str and list.
#str is easy to search with a global (not restricted to a block) regex
#list makes it easy to take the sections of the computations and search
#..only within a certain type. e.g. get HF dipole by just finding 'dipole'
#..w/in the HF section
###############################
"""
#Modules imported from Python Standard Library
#
import sys
import os
import subprocess as sp
import re
import json
import copy
from time import time
#Imports from
#Submodules
from . import lookups
from . import common
class Parser:
def __init__(self,filename='output.dat',program=None,indict=None):
assert type(filename) == str
assert (type(program) == str) or (program == None)
assert (type(indict) == dict) or (indict == None)
self.file = filename
self.program = program
self.content = {}
self.lookup = {}
self.data = {}
if program is None:
return
elif program is 'molpro':
self.molpro() #started
elif program is 'orca':
self.orca()
elif program is 'psi4':
self.psi4()
elif program is 'qchem':
self.qchem()
elif program is 'cfour':
self.cfour() #started, confusing
elif program is 'mrcc':
self.mrcc()
elif program is 'gaussian':
try:
pass
except:
raise ValueError('Gaussian is NOT supported.')
if indict is not None:
self.data = indict
elif self.load():
self.get_line_numbers(self.lookup,self.data)
#self.lines_to_blocks()
self.lines_to_blocks2(self.data['sections'])
#self.scrape()
self.scrape2(self.lookup['energy'],self.data['sections'],self.data['energy'])
self.scrape2(self.lookup['properties'],self.data['sections'],self.data['properties'])
else:
print('Problem loading file.\nNo data loaded.')
def molpro(self):
"""Grabs core MOLPRO regexes.
Written using version 2010.1.67 output files."""
self.lookup = copy.deepcopy(lookups.molpro.lookup) #need deepcopy s.t. two parser() instances for the same program
#can be open simultaneously
def cfour(self):
"Grabs core CFOUR regexes"
self.lookup = copy.deepcopy(lookups.cfour.lookup)
def orca(self):
"""Grabs core ORCA regexes.
Written using version 4.X output files."""
self.lookup = copy.deepcopy(lookups.orca.lookup)
def psi4(self):
"""Grabs core Psi4 regexes.
Written using version 1.2/1.3 output files."""
self.lookup = copy.deepcopy(lookups.psi4.lookup)
def mrcc(self):
"Grabs core MRCC regexes"
self.lookup = copy.deepcopy(lookups.mrcc.lookup)
def qchem(self):
"""Grabs core QChem regexes.
Written using version 5.0 output files."""
self.lookup = copy.deepcopy(lookups.qchem.lookup)
def look_for(self,rstr,lines=None):
"""looks for regular expression in self.content.
Inputs
rstr : regular expression to search for.
lines=None : line numbers to search between.
"""
if type(rstr) is not str:
return None
if lines is not None:
startline,endline = lines
temp = ''.join(self.content['list'][startline:endline])
#print(temp)
else:
temp = self.content['str']
result = re.findall(rstr,temp)
return result
def line_num(self,rstr,lines=None):
if lines is not None:
startline,endline = lines
temp = ''.join(self.content['list'][startline:endline])
else:
temp = self.content['str']
#result = common.line_num()
def load(self):
"""
Called first when opening a Parser object.
Loads file contents into self.content.
self.content['str'] is contents in raw string form.
self.contents['list'] is contents in list of lines.
"""
if os.path.isfile(self.file):
with open(self.file,'r') as f: self.content['str'] = f.read()
with open(self.file,'r') as f: self.content['list'] = f.readlines()
return True
else:
return False
def get_line_numbers(self,indict,outdict,clean=False):
"""
Uses lookup information (self.lookup) and contents (self.content)
to find line numbers for all regular expressions.
For sections, these are 'start' and 'end' lines to be processed
into sections of the file.
"""
if type(indict) != dict:
return
else:
for key in indict:
if key in outdict:
return
if type(indict[key]) == dict:
outdict[key] = {}
if key == 'energy':
ctemp = True
else:
ctemp = clean
self.get_line_numbers(indict[key],outdict[key],clean=ctemp) #recursive
elif type(indict[key]) == str:
outdict[key] = {}
if clean:
continue
else:
outdict[key]['ln'] = common.line_num(indict[key],self.file) #ln = line numbers
else:
return
def lines_to_blocks(self):
"""
###############################
#lines_to_blocks is called after load() and get_line_numbers()
#uses sed commands to take line information
#..and turn it into blocks.
###############################
"""
for key in self.lookup['sections']:
if self.data['sections'][key]['start']['ln'] is None:
self.data['sections'][key]['hits'] = None
continue
sections = []
for i in self.data['sections'][key]['start']['ln']:
if (len(sections) > 0) and (i <= sections[-1][1]):
continue
for j in self.data['sections'][key]['end']['ln']:
#find closest end after each start
if i < j:
sections.append((i,j))
break
self.data['sections'][key]['hits'] = sections
def lines_to_blocks2(self,indict):#,outdict):
"""
(experimental)
recursive version of sedre.Parser.lines_to_blocks() for upcoming
subsection feature implementation.
"""
if type(indict) != dict:
return
if ('start' in indict.keys()) and ('end' in indict.keys()):
#then this is a section
hits=[]
if (indict['start']['ln'] is not None) and (indict['end']['ln'] is not None):
for i in indict['start']['ln']:
#if (len(hits) > 0) and (i <= sections[-1][1]):
# continue
for j in indict['end']['ln']:
#find closest end after each start
if i < j:
hits.append((i,j))
break #break so we dont make more than one hit from each start match
indict['hits'] = hits
for key in indict:
#print(key)
if (indict[key] not in ('start','end')) and (type(indict[key]) == dict):
#then this is a subsection dict
#what to do?
self.lines_to_blocks2(indict[key])
pass
def scrape2(self,lookup,section,data,supersection=False):
# if (not supersection) and ('hits' not in section.keys()):
# """
# #then we're not a baby section, so we should have our own
# #list of hits!
# #shouldn't happen
# """
# return
# if 'hits' in section.keys():
# #then we are looking at a section of some kind
# print(section)
for attribute in lookup:
if attribute not in data:
data[attribute] = {}
if type(lookup[attribute]) == dict:
#then its either a subsection or a point of distinction
if attribute in section.keys() and 'hits' in section[attribute].keys():
#then its a subsection, and we don't need a supersection
self.scrape2(lookup[attribute],section[attribute],data[attribute]) #recursive
else:
#then its a point of distinction
self.scrape2(lookup[attribute],section,data[attribute]) #recursive with implicit supersection
elif type(lookup[attribute] == str) and ('hits' in section.keys()) and (len(section['hits']) > 0):
data[attribute]['vals'] = [] #make a list to hold numeric values
#then we should use the regex
for hit in section['hits']:
data[attribute]['vals'].append(self.look_for(lookup[attribute],lines=list(hit)))
else:
pass
def scrape(self,subset=['energy','properties']):
"""
###############################
#scrape() is called after lines_to_blocks()
#scrape() searches for energy and property regexes
#..within the appropriate blocks.
###############################
"""
for sub in subset:
for key in self.data['sections']:
hits = self.data['sections'][key]['hits']
if hits is not None and (key in self.data[sub].keys()):
for attribute in self.data[sub][key]:
if self.data[sub][key][attribute] is None:
self.data[sub][key][attribute] = {}
self.data[sub][key][attribute]['vals'] = []
rstr = self.lookup[sub][key][attribute]
if type(rstr) != str:
continue #TODO: need a way to read in subsections e.g. molpro MRCI corrections
for hit in hits:
#print(type(hit))
#print(type(sub))
#print(type(key))
#print(type(attribute))
#print(type(rstr))
#print(rstr)
self.data[sub][key][attribute]['vals'].append(self.look_for(rstr,lines=list(hit)))
def pickup(self):
"returns JSON dump"
return json.dumps(self.data)
def write(self,fname=None):
"""writes to a file
.sdf -> sedre dump file
fname=None : optional filename. If not given then
quickwrite_<datetime>.sdf"""
assert not os.path.isfile(fname)
if fname:
t = fname
else:
t = 'quickwrite_'+'dd'.join(str(time()).split('.'))
t = t + '.sdf'
with open(t,'w') as f: f.write(self.pickup())
| 37.304094 | 122 | 0.524769 | 10,976 | 0.860323 | 0 | 0 | 0 | 0 | 0 | 0 | 5,315 | 0.416601 |
cae9f9112bf10e3c5039ce94fd6acb072c0cb24e | 2,793 | py | Python | backend/api/migrations/0001_initial.py | INSRapperswil/nornir-web | 458e6b24bc373197044b4b7b5da74f16f93a9459 | [
"MIT"
] | 2 | 2021-06-01T08:33:04.000Z | 2021-08-20T04:22:39.000Z | backend/api/migrations/0001_initial.py | INSRapperswil/nornir-web | 458e6b24bc373197044b4b7b5da74f16f93a9459 | [
"MIT"
] | null | null | null | backend/api/migrations/0001_initial.py | INSRapperswil/nornir-web | 458e6b24bc373197044b4b7b5da74f16f93a9459 | [
"MIT"
] | null | null | null | # Generated by Django 3.1.2 on 2020-10-05 10:33
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Inventory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('hosts_file', models.TextField()),
('groups_file', models.TextField()),
],
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('status', models.IntegerField(choices=[(1, 'Scheduled'), (2, 'Running'), (3, 'Finished'), (4, 'Failed')], default=1)),
('date_scheduled', models.DateTimeField(verbose_name='Date Scheduled')),
('date_started', models.DateTimeField(null=True, verbose_name='Date Started')),
('date_finished', models.DateTimeField(null=True, verbose_name='Date Finished')),
('variables', models.TextField()),
('input', models.TextField()),
('result', models.TextField()),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='JobTemplate',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('description', models.TextField()),
('file_path', models.TextField()),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='InventoryFilter',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('filter', models.TextField()),
('inventory', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.inventory')),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.task')),
],
),
]
| 45.786885 | 137 | 0.56928 | 2,626 | 0.940208 | 0 | 0 | 0 | 0 | 0 | 0 | 430 | 0.153956 |
caeaeb8e248d85efebefc42cea8e45e4fc2350e0 | 11,051 | py | Python | tests/docx/document_xml_tests.py | JoshBarr/python-mammoth | e24c3d90fee2bcc4c2c3ad47e38c0cbba42945d8 | [
"BSD-2-Clause"
] | 1 | 2015-08-15T21:53:03.000Z | 2015-08-15T21:53:03.000Z | tests/docx/document_xml_tests.py | JoshBarr/python-mammoth | e24c3d90fee2bcc4c2c3ad47e38c0cbba42945d8 | [
"BSD-2-Clause"
] | null | null | null | tests/docx/document_xml_tests.py | JoshBarr/python-mammoth | e24c3d90fee2bcc4c2c3ad47e38c0cbba42945d8 | [
"BSD-2-Clause"
] | null | null | null | import io
from nose.tools import istest, assert_equal
import funk
from mammoth import documents, results
from mammoth.docx.xmlparser import element as xml_element, text as xml_text
from mammoth.docx.document_xml import read_document_xml_element
from mammoth.docx.numbering_xml import Numbering
from mammoth.docx.relationships_xml import Relationships, Relationship
@istest
class ReadXmlElementTests(object):
@istest
def text_from_text_element_is_read(self):
element = _text_element("Hello!")
assert_equal(documents.Text("Hello!"), _read_and_get_document_xml_element(element))
@istest
def can_read_text_within_run(self):
element = _run_element_with_text("Hello!")
assert_equal(
documents.run([documents.Text("Hello!")]),
_read_and_get_document_xml_element(element)
)
@istest
def can_read_text_within_paragraph(self):
element = _paragraph_element_with_text("Hello!")
assert_equal(
documents.paragraph([documents.run([documents.Text("Hello!")])]),
_read_and_get_document_xml_element(element)
)
@istest
def can_read_text_within_document(self):
element = _document_element_with_text("Hello!")
assert_equal(
documents.Document([documents.paragraph([documents.run([documents.Text("Hello!")])])]),
_read_and_get_document_xml_element(element)
)
@istest
def paragraph_has_no_style_if_it_has_no_properties(self):
element = xml_element("w:p")
assert_equal(None, _read_and_get_document_xml_element(element).style_name)
@istest
def paragraph_has_style_name_read_from_paragraph_properties_if_present(self):
style_xml = xml_element("w:pStyle", {"w:val": "Heading1"})
properties_xml = xml_element("w:pPr", {}, [style_xml])
paragraph_xml = xml_element("w:p", {}, [properties_xml])
paragraph = _read_and_get_document_xml_element(paragraph_xml)
assert_equal("Heading1", paragraph.style_name)
@istest
def paragraph_has_no_numbering_if_it_has_no_numbering_properties(self):
element = xml_element("w:p")
assert_equal(None, _read_and_get_document_xml_element(element).numbering)
@istest
def paragraph_has_numbering_properties_from_paragraph_properties_if_present(self):
numbering_properties_xml = xml_element("w:numPr", {}, [
xml_element("w:ilvl", {"w:val": "1"}),
xml_element("w:numId", {"w:val": "42"}),
])
properties_xml = xml_element("w:pPr", {}, [numbering_properties_xml])
paragraph_xml = xml_element("w:p", {}, [properties_xml])
numbering = Numbering({"42": {"1": documents.numbering_level("1", True)}})
paragraph = _read_and_get_document_xml_element(paragraph_xml, numbering=numbering)
assert_equal("1", paragraph.numbering.level_index)
assert_equal(True, paragraph.numbering.is_ordered)
@istest
def run_has_no_style_if_it_has_no_properties(self):
element = xml_element("w:r")
assert_equal(None, _read_and_get_document_xml_element(element).style_name)
@istest
def run_has_style_name_read_from_run_properties_if_present(self):
style_xml = xml_element("w:rStyle", {"w:val": "Emphasis"})
run = self._read_run_with_properties([style_xml])
assert_equal("Emphasis", run.style_name)
@istest
def run_is_not_bold_if_bold_element_is_not_present(self):
run = self._read_run_with_properties([])
assert_equal(False, run.is_bold)
@istest
def run_is_bold_if_bold_element_is_present(self):
run = self._read_run_with_properties([xml_element("w:b")])
assert_equal(True, run.is_bold)
@istest
def run_is_not_italic_if_italic_element_is_not_present(self):
run = self._read_run_with_properties([])
assert_equal(False, run.is_italic)
@istest
def run_is_italic_if_italic_element_is_present(self):
run = self._read_run_with_properties([xml_element("w:i")])
assert_equal(True, run.is_italic)
def _read_run_with_properties(self, properties):
properties_xml = xml_element("w:rPr", {}, properties)
run_xml = xml_element("w:r", {}, [properties_xml])
return _read_and_get_document_xml_element(run_xml)
@istest
def can_read_tab_element(self):
element = xml_element("w:tab")
tab = _read_and_get_document_xml_element(element)
assert_equal(documents.tab(), tab)
@istest
def children_of_w_ins_are_converted_normally(self):
element = xml_element("w:p", {}, [
xml_element("w:ins", {}, [
xml_element("w:r")
])
])
assert_equal(
documents.paragraph([documents.run([])]),
_read_and_get_document_xml_element(element)
)
@istest
def children_of_w_smart_tag_are_converted_normally(self):
element = xml_element("w:p", {}, [
xml_element("w:smartTag", {}, [
xml_element("w:r")
])
])
assert_equal(
documents.paragraph([documents.run([])]),
_read_and_get_document_xml_element(element)
)
@istest
def hyperlink_is_read_if_it_has_a_relationship_id(self):
relationships = Relationships({
"r42": Relationship(target="http://example.com")
})
run_element = xml_element("w:r")
element = xml_element("w:hyperlink", {"r:id": "r42"}, [run_element])
assert_equal(
documents.hyperlink("http://example.com", [documents.run([])]),
_read_and_get_document_xml_element(element, relationships=relationships)
)
@istest
def hyperlink_is_ignored_if_it_does_not_have_a_relationship_id(self):
run_element = xml_element("w:r")
element = xml_element("w:hyperlink", {}, [run_element])
assert_equal(
[documents.run([])],
_read_and_get_document_xml_element(element)
)
@istest
@funk.with_context
def can_read_inline_pictures(self, context):
drawing_element = _create_inline_image(
relationship_id="rId5",
description="It's a hat",
)
image_bytes = b"Not an image at all!"
relationships = Relationships({
"rId5": Relationship(target="media/hat.png")
})
docx_file = context.mock()
funk.allows(docx_file).open("word/media/hat.png").returns(io.BytesIO(image_bytes))
content_types = context.mock()
funk.allows(content_types).find_content_type("word/media/hat.png").returns("image/png")
image = _read_and_get_document_xml_element(
drawing_element,
content_types=content_types,
relationships=relationships,
docx_file=docx_file,
)[0]
assert_equal(documents.Image, type(image))
assert_equal("It's a hat", image.alt_text)
assert_equal("image/png", image.content_type)
with image.open() as image_file:
assert_equal(image_bytes, image_file.read())
@istest
@funk.with_context
def can_read_anchored_pictures(self, context):
drawing_element = _create_anchored_image(
relationship_id="rId5",
description="It's a hat",
)
image_bytes = b"Not an image at all!"
relationships = Relationships({
"rId5": Relationship(target="media/hat.png")
})
docx_file = context.mock()
funk.allows(docx_file).open("word/media/hat.png").returns(io.BytesIO(image_bytes))
content_types = context.mock()
funk.allows(content_types).find_content_type("word/media/hat.png").returns("image/png")
image = _read_and_get_document_xml_element(
drawing_element,
content_types=content_types,
relationships=relationships,
docx_file=docx_file,
)[0]
assert_equal(documents.Image, type(image))
assert_equal("It's a hat", image.alt_text)
assert_equal("image/png", image.content_type)
with image.open() as image_file:
assert_equal(image_bytes, image_file.read())
@istest
def ignored_elements_are_ignored_without_message(self):
element = xml_element("w:bookmarkStart")
result = read_document_xml_element(element)
assert_equal(None, result.value)
assert_equal([], result.messages)
@istest
def unrecognised_elements_emit_warning(self):
element = xml_element("w:huh", {}, [])
result = read_document_xml_element(element)
expected_warning = results.warning("An unrecognised element was ignored: w:huh")
assert_equal([expected_warning], result.messages)
@istest
def unrecognised_elements_are_ignored(self):
element = xml_element("w:huh", {}, [])
assert_equal(None, read_document_xml_element(element).value)
@istest
def unrecognised_children_are_ignored(self):
element = xml_element("w:r", {}, [_text_element("Hello!"), xml_element("w:huh", {}, [])])
assert_equal(
documents.run([documents.Text("Hello!")]),
read_document_xml_element(element).value
)
def _read_and_get_document_xml_element(*args, **kwargs):
result = read_document_xml_element(*args, **kwargs)
assert_equal([], result.messages)
return result.value
def _document_element_with_text(text):
return xml_element("w:document", {}, [
xml_element("w:body", {}, [_paragraph_element_with_text(text)])
])
def _paragraph_element_with_text(text):
return xml_element("w:p", {}, [_run_element_with_text(text)])
def _run_element_with_text(text):
return xml_element("w:r", {}, [_text_element(text)])
def _text_element(value):
return xml_element("w:t", {}, [xml_text(value)])
def _create_inline_image(description, relationship_id):
return xml_element("w:drawing", {}, [
xml_element("wp:inline", {}, _create_image_elements(description, relationship_id))
])
def _create_anchored_image(description, relationship_id):
return xml_element("w:drawing", {}, [
xml_element("wp:anchor", {}, _create_image_elements(description, relationship_id))
])
def _create_image_elements(description, relationship_id):
return [
xml_element("wp:docPr", {"descr": description}),
xml_element("a:graphic", {}, [
xml_element("a:graphicData", {}, [
xml_element("pic:pic", {}, [
xml_element("pic:blipFill", {}, [
xml_element("a:blip", {"r:embed": relationship_id})
])
])
])
])
]
| 36.114379 | 99 | 0.64139 | 9,181 | 0.830785 | 0 | 0 | 9,189 | 0.831508 | 0 | 0 | 934 | 0.084517 |
caec913d6621cf7f754967667188e3d0cafa8eeb | 3,982 | py | Python | tests/primitives/high_precision_condition_slowtest.py | gift-surg/puma | 58beae3459a0c8d96adfe9af323e26868428df4d | [
"Apache-2.0"
] | null | null | null | tests/primitives/high_precision_condition_slowtest.py | gift-surg/puma | 58beae3459a0c8d96adfe9af323e26868428df4d | [
"Apache-2.0"
] | 13 | 2020-05-04T14:14:58.000Z | 2020-07-29T16:37:03.000Z | tests/primitives/high_precision_condition_slowtest.py | gift-surg/puma | 58beae3459a0c8d96adfe9af323e26868428df4d | [
"Apache-2.0"
] | null | null | null | import time
from threading import Thread
from unittest import TestCase
from puma.primitives import HighPrecisionCondition
from tests.github_issue_11_quickfix import skip_on_windows_until_github_issue_11_is_resolved
TIMEOUTS = [0.0, 0.000001, 0.014, 0.015, 0.016, 0.017, 0.018, 0.3]
PRECISION = 0.005
TIMEOUT = 0.3
class HighPrecisionConditionTest(TestCase):
# HighPrecisionCondition is derived from threading.Condition, and only overrides the wait() method. We only test the modified behaviour, not the base class's behaviour.
@skip_on_windows_until_github_issue_11_is_resolved
def test_if_times_out(self) -> None:
self._test_wait_when_time_out(0.1)
def test_if_notified_with_timeout(self) -> None:
condition = HighPrecisionCondition()
thread = Thread(target=self._sleep_then_notify, args=[TIMEOUT, condition])
thread.start()
try:
with condition:
t1 = time.perf_counter()
ret = condition.wait(TIMEOUT * 3) # timout used here should not happen: condition should be raised after TIMEOUT
t2 = time.perf_counter()
self.assertTrue(ret)
self.assertGreaterEqual(t2 - t1, TIMEOUT) # Check the wait did not return until the condition was raised
self.assertLess(t2 - t1, TIMEOUT * 2) # Check that the wait did return early (when the condition was raised)
finally:
thread.join()
def test_if_notified_timeout_is_none(self) -> None:
condition = HighPrecisionCondition()
thread = Thread(target=self._sleep_then_notify, args=[TIMEOUT, condition])
thread.start()
try:
with condition:
t1 = time.perf_counter()
ret = condition.wait(None)
t2 = time.perf_counter()
self.assertTrue(ret)
self.assertGreaterEqual(t2 - t1, TIMEOUT) # Check the wait did not return until the condition was raised
self.assertLess(t2 - t1, TIMEOUT * 2) # Crude sanity check in case some default timeout was (wrongly) implemented
finally:
thread.join()
@skip_on_windows_until_github_issue_11_is_resolved
def test_wait_timeout_precision(self) -> None:
for timeout in TIMEOUTS:
time.sleep(0.25) # Try to reduce load on CPU so that test is less likely to fail on heavily loaded machine
self._test_wait_when_time_out(timeout)
@skip_on_windows_until_github_issue_11_is_resolved
def test_wait_for_timeout_precision(self) -> None:
for timeout in TIMEOUTS:
time.sleep(0.25) # Try to reduce load on CPU so that test is less likely to fail on heavily loaded machine
self._test_wait_for_when_time_out(timeout)
def _test_wait_when_time_out(self, timeout: float) -> None:
condition = HighPrecisionCondition()
with condition:
t1 = time.perf_counter()
ret = condition.wait(timeout)
t2 = time.perf_counter()
self.assertFalse(ret)
self.assertGreaterEqual(t2 - t1, timeout, f"Took {t2 - t1} when timeout was {timeout}")
self.assertLess(t2 - t1, timeout + PRECISION, f"Took {t2 - t1} when timeout was {timeout}")
def _test_wait_for_when_time_out(self, timeout: float) -> None:
condition = HighPrecisionCondition()
with condition:
t1 = time.perf_counter()
ret = condition.wait_for(lambda: False, timeout)
t2 = time.perf_counter()
self.assertFalse(ret)
self.assertGreaterEqual(t2 - t1, timeout, f"Took {t2 - t1} when timeout was {timeout}")
self.assertLess(t2 - t1, timeout + PRECISION, f"Took {t2 - t1} when timeout was {timeout}")
@staticmethod
def _sleep_then_notify(delay: float, condition: HighPrecisionCondition) -> None:
time.sleep(delay)
with condition:
condition.notify()
| 45.770115 | 172 | 0.659216 | 3,663 | 0.91989 | 0 | 0 | 931 | 0.233802 | 0 | 0 | 869 | 0.218232 |
caedfc9ca4d335bf9a6a0bb330a7d228fed7431b | 4,163 | py | Python | mouselight/visualizations/view_ng_mouselight_n5.py | pattonw/mouselight | 296e6df7d4e79776ed9f8533d17d937bb6866082 | [
"MIT"
] | null | null | null | mouselight/visualizations/view_ng_mouselight_n5.py | pattonw/mouselight | 296e6df7d4e79776ed9f8533d17d937bb6866082 | [
"MIT"
] | null | null | null | mouselight/visualizations/view_ng_mouselight_n5.py | pattonw/mouselight | 296e6df7d4e79776ed9f8533d17d937bb6866082 | [
"MIT"
] | null | null | null | from funlib.show.neuroglancer import add_layer, ScalePyramid
import argparse
import daisy
import glob
import neuroglancer
import numpy as np
import os
import webbrowser
from swc_parser import _parse_swc
from pathlib import Path
import itertools
import random
import logging
ngid = itertools.count(start=1)
parser = argparse.ArgumentParser()
parser.add_argument(
"--file", "-f", type=str, action="append", help="The path to the container to show"
)
parser.add_argument(
"--datasets",
"-d",
type=str,
nargs="+",
action="append",
help="The datasets in the container to show",
)
parser.add_argument(
"--synapses",
"-s",
type=str,
action="append",
help="A numpy npz containing synapse annotations as stored by "
"synful.gunpowder.ExtractSynapses",
)
parser.add_argument(
"--time",
"-t",
type=int,
action="store",
dest="minutes",
default=0,
help="How long you want neuroglancer to stay available",
)
parser.add_argument(
"--output",
"-o",
type=str,
action="store",
dest="log",
default="",
help="Where to output url to",
)
args = parser.parse_args()
print("passed in arguments: {}".format(args))
minutes = args.minutes
print("showing neuroglancer for {} minutes".format(minutes))
if args.log != "":
logging.basicConfig(level=logging.INFO, filename=args.log)
else:
logging.basicConfig(level=logging.INFO)
neuroglancer.set_server_bind_address("0.0.0.0")
viewer = neuroglancer.Viewer()
swc_path = Path(
"/nrs/funke/mouselight-v2/2017-07-02",
"consensus-neurons-with-machine-centerpoints-labelled-as-swcs/G-002.swc",
)
swc_path = Path(
"/groups/mousebrainmicro/mousebrainmicro/cluster/2018-07-02/carver/augmented-with-skeleton-nodes-as-swcs/G-002.swc"
)
n5_path = Path(
"/nrs/funke/mouselight-v2/2018-07-02",
"consensus-neurons-with-machine-centerpoints-labelled-as-swcs-carved.n5/",
)
transform = Path("/nrs/mouselight/SAMPLES/2018-07-02/transform.txt")
def load_transform(transform_path: Path):
text = transform_path.open("r").read()
lines = text.split("\n")
constants = {}
for line in lines:
if len(line) > 0:
variable, value = line.split(":")
constants[variable] = float(value)
spacing = (
np.array([constants["sx"], constants["sy"], constants["sz"]])
/ 2 ** (constants["nl"] - 1)
/ 1000
)
origin = spacing * (
(np.array([constants["ox"], constants["oy"], constants["oz"]]) // spacing)
/ 1000
)
return origin, spacing
def swc_to_voxel_coords(swc_coord, origin, spacing):
return np.round((swc_coord - origin) / spacing).astype(int)
# swc
neuron_graph = _parse_swc(swc_path)
origin, spacing = load_transform(transform)
voxel_size = spacing
voxel_size_rounded = np.array((10, 3, 3)[::-1])
nodes = []
edges = []
print(len(neuron_graph.nodes))
for node_a, node_b in neuron_graph.edges:
a = swc_to_voxel_coords(neuron_graph.nodes[node_a]["location"], origin, spacing)
b = swc_to_voxel_coords(neuron_graph.nodes[node_b]["location"], origin, spacing)
pos_u = a
pos_v = b
nodes.append(
neuroglancer.EllipsoidAnnotation(
center=pos_u, radii=(3, 3, 3) / voxel_size, id=next(ngid)
)
)
edges.append(
neuroglancer.LineAnnotation(point_a=pos_u, point_b=pos_v, id=next(ngid))
)
if len(nodes) > 10000:
break
nodes.append(
neuroglancer.EllipsoidAnnotation(
center=pos_v, radii=(1, 1, 1) / voxel_size, id=next(ngid)
)
)
a = daisy.open_ds(str(n5_path.absolute()), "volume")
with viewer.txn() as s:
add_layer(s, a, "volume", shader="rgb", c=[0, 0, 0])
with viewer.txn() as s:
s.layers["edges"] = neuroglancer.AnnotationLayer(
filter_by_segmentation=False, annotation_color="#add8e6", annotations=edges
)
s.layers["nodes"] = neuroglancer.AnnotationLayer(
filter_by_segmentation=False, annotation_color="#ff00ff", annotations=nodes
)
url = str(viewer)
logging.info(url)
import time
time.sleep(60 * minutes)
try:
if minutes < 1:
input("Press ENTER to exit:")
except:
pass
| 24.779762 | 119 | 0.665866 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 962 | 0.231083 |
caef5559516f4fd218345acf0eae5ad95febdf7a | 1,470 | py | Python | BERT_Custom/bert_sequence_tagger/metrics.py | gp201/BERT | 0479bfb4faf7fb107acb38bd1a0e27a23719aa92 | [
"MIT"
] | 1 | 2020-03-20T09:58:58.000Z | 2020-03-20T09:58:58.000Z | BERT_Custom/bert_sequence_tagger/metrics.py | gp201/BERT | 0479bfb4faf7fb107acb38bd1a0e27a23719aa92 | [
"MIT"
] | null | null | null | BERT_Custom/bert_sequence_tagger/metrics.py | gp201/BERT | 0479bfb4faf7fb107acb38bd1a0e27a23719aa92 | [
"MIT"
] | null | null | null | import itertools
from sklearn.metrics import f1_score as f1_score_sklearn
from seqeval.metrics import f1_score
from sklearn.metrics import classification_report
def f1_entity_level(*args, **kwargs):
return f1_score(*args, **kwargs)
def f1_token_level(true_labels, predictions):
true_labels = list(itertools.chain(*true_labels))
predictions = list(itertools.chain(*predictions))
labels = list(set(true_labels) - {'[PAD]', 'O'})
return f1_score_sklearn(true_labels,
predictions,
average='micro',
labels=labels)
def f1_per_token(true_labels, predictions):
true_labels = list(itertools.chain(*true_labels))
predictions = list(itertools.chain(*predictions))
labels = list(set(true_labels) - {'[PAD]', 'O'})
return classification_report(predictions,
true_labels,
labels=labels)
def f1_per_token_plot(true_labels, predictions):
true_labels = list(itertools.chain(*true_labels))
predictions = list(itertools.chain(*predictions))
labels = list(set(true_labels) - {'[PAD]', 'O'})
return classification_report(predictions,
true_labels,
labels=labels,
output_dict=True)
| 35 | 57 | 0.572109 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 37 | 0.02517 |
caef5626558b749af00c48af86762d3db322f006 | 15,501 | py | Python | mediagrains/testsignalgenerator.py | bbc/rd-apmm-python-lib-mediagrains | 84c9de511cc53418c277867eaf143f2cc8730d02 | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2018-03-26T23:49:34.000Z | 2021-12-23T10:06:09.000Z | mediagrains/testsignalgenerator.py | bbc/rd-apmm-python-lib-mediagrains | 84c9de511cc53418c277867eaf143f2cc8730d02 | [
"ECL-2.0",
"Apache-2.0"
] | 34 | 2018-03-21T16:45:10.000Z | 2022-03-28T13:27:34.000Z | mediagrains/testsignalgenerator.py | bbc/rd-apmm-python-lib-mediagrains | 84c9de511cc53418c277867eaf143f2cc8730d02 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | #
# Copyright 2018 British Broadcasting Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""\
The submodule of mediagrains which contains code for generating test video
grains.
This module is deprecated, please use mediagrains.patterngenerators instead.
"""
from fractions import Fraction
from mediatimestamp.immutable import TimeOffset, Timestamp
from copy import deepcopy
import struct
import fractions
from deprecated import deprecated
from . import AudioGrain
from .cogenums import CogFrameFormat, CogAudioFormat
from .patterngenerators.video import LumaSteps as LumaStepsPatternGenerator
from .patterngenerators.video import ColourBars as ColourBarsPatternGenerator
from .patterngenerators.audio import Tone as TonePatternGenerator
from .patterngenerators.audio import Silence as SilencePatternGenerator
__all__ = ["LumaSteps", "Tone1K", "Tone", "Silence", "ColourBars", "MovingBarOverlay"]
# information about formats
# in the order:
# (num_bytes_per_sample, (offset, range), (offset, range), (offset, range), active_bits_per_sample)
# in YUV order
pixel_ranges = {
CogFrameFormat.U8_444: (1, (16, 235-16), (128, 224), (128, 224), 8),
CogFrameFormat.U8_422: (1, (16, 235-16), (128, 224), (128, 224), 8),
CogFrameFormat.U8_420: (1, (16, 235-16), (128, 224), (128, 224), 8),
CogFrameFormat.S16_444_10BIT: (2, (64, 940-64), (512, 896), (512, 896), 10),
CogFrameFormat.S16_422_10BIT: (2, (64, 940-64), (512, 896), (512, 896), 10),
CogFrameFormat.S16_420_10BIT: (2, (64, 940-64), (512, 896), (512, 896), 10),
CogFrameFormat.S16_444_12BIT: (2, (256, 3760-256), (2048, 3584), (2048, 3584), 12),
CogFrameFormat.S16_422_12BIT: (2, (256, 3760-256), (2048, 3584), (2048, 3584), 12),
CogFrameFormat.S16_420_12BIT: (2, (256, 3760-256), (2048, 3584), (2048, 3584), 12),
CogFrameFormat.S16_444: (2, (4096, 60160-4096), (32768, 57344), (32768, 57344), 16),
CogFrameFormat.S16_422: (2, (4096, 60160-4096), (32768, 57344), (32768, 57344), 16),
CogFrameFormat.S16_420: (2, (4096, 60160-4096), (32768, 57344), (32768, 57344), 16),
}
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def LumaSteps(src_id, flow_id, width, height,
rate=Fraction(25, 1),
origin_timestamp=None,
cog_frame_format=CogFrameFormat.U8_444,
step=1):
"""Returns a generator for video grains in U8_444 format.
:param src_id: source_id for grains
:param flow_id: flow_id for grains
:param width: width of grains
:param height: height of grains
:param rate: rate of grains
:param origin_timestamp: the origin timestamp of the first grain.
:param step: The number of grains to increment by each time (values above 1 cause skipping)"""
origin_timestamp = origin_timestamp if origin_timestamp is not None else Timestamp.get_time()
yield from LumaStepsPatternGenerator(
src_id,
flow_id,
width,
height,
rate=rate,
cog_frame_format=cog_frame_format)[origin_timestamp::step]
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def ColourBars(src_id, flow_id, width, height,
intensity=0.75,
rate=Fraction(25, 1),
origin_timestamp=None,
cog_frame_format=CogFrameFormat.U8_444,
step=1):
"""Returns a generator for colour bar video grains in specified format.
:param src_id: source_id for grains
:param flow_id: flow_id for grains
:param width: width of grains
:param height: height of grains
:param intensity: intensity of colour bars (usually 1.0 or 0.75)
:param rate: rate of grains
:param origin_timestamp: the origin timestamp of the first grain.
:param step: The number of grains to increment by each time (values above 1 cause skipping)"""
origin_timestamp = origin_timestamp if origin_timestamp is not None else Timestamp.get_time()
yield from ColourBarsPatternGenerator(
src_id,
flow_id,
width,
height,
intensity=intensity,
rate=rate,
cog_frame_format=cog_frame_format
)[origin_timestamp::step]
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def MovingBarOverlay(grain_gen, height=100, speed=1.0):
"""Call this method and pass an iterable of video grains as the first parameter. This method will overlay a moving black bar onto the grains.
:param grain_gen: An iterable which yields video grains
:param heigh: The height of the bar in pixels
:param speed: A floating point speed in pixels per frame
:returns: A generator which yields video grains
"""
bar = None
for grain in grain_gen:
v_subs = (grain.components[0].height + grain.components[1].height - 1)//grain.components[1].height
if bar is None:
if grain.format not in pixel_ranges:
raise ValueError("Not a supported format for this generator")
_bpp = pixel_ranges[grain.format][0]
bar = [bytearray(grain.components[0].width*_bpp * height),
bytearray(grain.components[1].width*_bpp * height // v_subs),
bytearray(grain.components[2].width*_bpp * height // v_subs)]
for y in range(0, height):
for x in range(0, grain.components[0].width):
bar[0][y*grain.components[0].width * _bpp + _bpp*x + 0] = pixel_ranges[grain.format][1][0] & 0xFF
if _bpp > 1:
bar[0][y*grain.components[0].width * _bpp + _bpp*x + 1] = pixel_ranges[grain.format][1][0] >> 8
for y in range(0, height // v_subs):
for x in range(0, grain.components[1].width):
bar[1][y*grain.components[1].width * _bpp + _bpp*x + 0] = pixel_ranges[grain.format][2][0] & 0xFF
if _bpp > 1:
bar[1][y*grain.components[1].width * _bpp + _bpp*x + 1] = pixel_ranges[grain.format][2][0] >> 8
bar[2][y*grain.components[2].width * _bpp + _bpp*x + 0] = pixel_ranges[grain.format][3][0] & 0xFF
if _bpp > 1:
bar[2][y*grain.components[2].width * _bpp + _bpp*x + 1] = pixel_ranges[grain.format][3][0] >> 8
fnum = int(speed*grain.origin_timestamp.to_count(grain.rate.numerator, grain.rate.denominator))
for y in range(0, height):
grain.data[
grain.components[0].offset + ((fnum + y) % grain.components[0].height)*grain.components[0].stride:
grain.components[0].offset + ((fnum + y) % grain.components[0].height)*grain.components[0].stride + grain.components[0].width*_bpp] = (
bar[0][y*grain.components[0].width * _bpp: (y+1)*grain.components[0].width * _bpp])
for y in range(0, height // v_subs):
grain.data[
grain.components[1].offset + ((fnum//v_subs + y) % grain.components[1].height)*grain.components[1].stride:
grain.components[1].offset + ((fnum//v_subs + y) % grain.components[1].height)*grain.components[1].stride + grain.components[1].width*_bpp] = (
bar[1][y*grain.components[1].width * _bpp: (y+1)*grain.components[1].width * _bpp])
grain.data[
grain.components[2].offset + ((fnum//v_subs + y) % grain.components[2].height)*grain.components[2].stride:
grain.components[2].offset + ((fnum//v_subs + y) % grain.components[2].height)*grain.components[2].stride + grain.components[2].width*_bpp] = (
bar[2][y*grain.components[2].width * _bpp: (y+1)*grain.components[2].width * _bpp])
yield grain
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def Tone1K(src_id, flow_id,
samples=1920,
channels=1,
origin_timestamp=None,
cog_audio_format=CogAudioFormat.S16_INTERLEAVED,
step=1,
sample_rate=48000):
return Tone(src_id, flow_id,
1000,
samples=samples,
channels=channels,
origin_timestamp=origin_timestamp,
cog_audio_format=cog_audio_format,
step=step,
sample_rate=sample_rate)
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def Tone(src_id, flow_id,
frequency,
samples=1920,
channels=1,
origin_timestamp=None,
cog_audio_format=CogAudioFormat.S16_INTERLEAVED,
step=1,
sample_rate=48000):
origin_timestamp = origin_timestamp if origin_timestamp is not None else Timestamp.get_time()
for grain in TonePatternGenerator(
src_id,
flow_id,
frequency=frequency,
samples=samples,
channels=channels,
cog_audio_format=cog_audio_format,
sample_rate=sample_rate
)[Timestamp()::step]:
grain.origin_timestamp = grain.origin_timestamp + origin_timestamp
grain.sync_timestamp = grain.sync_timestamp + origin_timestamp
yield grain
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def Silence(src_id, flow_id,
samples=1920,
channels=1,
origin_timestamp=None,
cog_audio_format=CogAudioFormat.S16_INTERLEAVED,
step=1,
sample_rate=48000):
origin_timestamp = origin_timestamp if origin_timestamp is not None else Timestamp.get_time()
for grain in SilencePatternGenerator(
src_id,
flow_id,
samples=samples,
channels=channels,
cog_audio_format=cog_audio_format,
sample_rate=sample_rate
)[Timestamp()::step]:
grain.origin_timestamp = grain.origin_timestamp + origin_timestamp
grain.sync_timestamp = grain.sync_timestamp + origin_timestamp
yield grain
@deprecated(version="2.13.0", reason="Please use mediagrains.patterngenerators instead")
def AudioGrainsLoopingData(src_id, flow_id,
sample_data,
samples=1920,
channels=1,
origin_timestamp=None,
cog_audio_format=CogAudioFormat.S16_INTERLEAVED,
step=1,
volume=0.5,
sample_rate=48000):
"""
A generator which yields audio grains of a specified format using input
data in the form of a list of floating point values that will be repeated
as samples indefinitely.
"""
data_samples = {}
if cog_audio_format in [CogAudioFormat.S16_PLANES,
CogAudioFormat.S16_PAIRS,
CogAudioFormat.S16_INTERLEAVED]:
formatted_sample_data = [round(x*volume*(1 << 15)) for x in sample_data]
depth = 16
elif cog_audio_format in [CogAudioFormat.S24_PLANES,
CogAudioFormat.S24_PAIRS,
CogAudioFormat.S24_INTERLEAVED]:
formatted_sample_data = [round(x*volume*(1 << 23)) for x in sample_data]
depth = 24
elif cog_audio_format in [CogAudioFormat.S32_PLANES,
CogAudioFormat.S32_PAIRS,
CogAudioFormat.S32_INTERLEAVED]:
formatted_sample_data = [round(x*volume*(1 << 31)) for x in sample_data]
depth = 32
elif cog_audio_format in [CogAudioFormat.FLOAT_PLANES,
CogAudioFormat.FLOAT_PAIRS,
CogAudioFormat.FLOAT_INTERLEAVED]:
formatted_sample_data = [x*volume for x in sample_data]
depth = 'f'
elif cog_audio_format in [CogAudioFormat.DOUBLE_PLANES,
CogAudioFormat.DOUBLE_PAIRS,
CogAudioFormat.DOUBLE_INTERLEAVED]:
formatted_sample_data = [x*volume for x in sample_data]
depth = 'd'
planes = False
pairs = False
interleaved = False
if cog_audio_format in [CogAudioFormat.S16_PLANES,
CogAudioFormat.S24_PLANES,
CogAudioFormat.S32_PLANES,
CogAudioFormat.FLOAT_PLANES,
CogAudioFormat.DOUBLE_PLANES]:
planes = True
elif cog_audio_format in [CogAudioFormat.S16_PAIRS,
CogAudioFormat.S24_PAIRS,
CogAudioFormat.S32_PAIRS,
CogAudioFormat.FLOAT_PAIRS,
CogAudioFormat.DOUBLE_PAIRS]:
pairs = True
elif cog_audio_format in [CogAudioFormat.S16_INTERLEAVED,
CogAudioFormat.S24_INTERLEAVED,
CogAudioFormat.S32_INTERLEAVED,
CogAudioFormat.FLOAT_INTERLEAVED,
CogAudioFormat.DOUBLE_INTERLEAVED]:
interleaved = True
rate = fractions.Fraction(sample_rate, samples)
duration = 1/rate
ag = AudioGrain(src_id, flow_id,
origin_timestamp=origin_timestamp,
cog_audio_format=cog_audio_format,
samples=samples,
channels=channels,
rate=rate,
duration=duration,
sample_rate=sample_rate)
origin_timestamp = ag.origin_timestamp
ots = origin_timestamp
offs = 0
count = 0
def make_samples(offs, samples, channels):
line = [formatted_sample_data[n % len(formatted_sample_data)] for n in range(offs, offs+samples)]
if planes:
line = line * channels
elif pairs:
line = [x for x in line for _ in range(0, 2)] * (channels//2)
elif interleaved:
line = [x for x in line for _ in range(0, channels)]
if depth == 16:
return struct.pack('@' + ('h'*samples*channels), *line)
elif depth == 24:
return b''.join(struct.pack('@i', x)[:3] for x in line)
elif depth == 32:
return struct.pack('@' + ('i'*samples*channels), *line)
elif depth == 'f':
return struct.pack('@' + ('f'*samples*channels), *line)
elif depth == 'd':
return struct.pack('@' + ('d'*samples*channels), *line)
while True:
grain = deepcopy(ag)
grain.origin_timestamp = ots
grain.sync_timestamp = ots
if offs not in data_samples:
data_samples[offs] = make_samples(offs, samples, channels)
grain.data = bytearray(data_samples[offs][:grain.expected_length])
yield grain
offs = (offs + samples*step) % len(formatted_sample_data)
count += samples*step
ots = origin_timestamp + TimeOffset.from_count(count, sample_rate, 1)
| 43.788136 | 159 | 0.622928 | 0 | 0 | 11,761 | 0.758725 | 12,896 | 0.831946 | 0 | 0 | 2,930 | 0.18902 |
caf0139fdfe557f953aad0f3d0c2c6c1ddb373a5 | 1,091 | py | Python | catch.py | AlexLito666/my_first_project | 94898038bb6b0558328ee80791340d1ad3b92260 | [
"CC0-1.0"
] | null | null | null | catch.py | AlexLito666/my_first_project | 94898038bb6b0558328ee80791340d1ad3b92260 | [
"CC0-1.0"
] | null | null | null | catch.py | AlexLito666/my_first_project | 94898038bb6b0558328ee80791340d1ad3b92260 | [
"CC0-1.0"
] | null | null | null | from pygame import *
#создай окно игры
window = display.set_mode((700, 500))
display.set_caption("Догонялки")
#задай фон сцены
background = transform.scale(image.load("background.png"), (700, 500))
sprite1 = transform.scale(image.load('sprite1.png'), (100, 100))
sprite2 = transform.scale(image.load('sprite2.png'), (100, 100))
x1 = 100
y1 = 200
x2 = 200
y2 = 200
clock = time.Clock()
FPS = 60
game = True
while game:
window.blit(background,(0, 0))
window.blit(sprite1, (x1, y1))
window.blit(sprite2, (x2, y2))
key_pressed = key.get_pressed()
if key_pressed[K_UP]:
y1-=10
if key_pressed[K_DOWN]:
y1+=10
if key_pressed[K_LEFT]:
x1-=10
if key_pressed[K_RIGHT]:
x1+=10
if key_pressed[K_w]:
y2-=10
if key_pressed[K_s]:
y2+=10
if key_pressed[K_a]:
x2-=10
if key_pressed[K_d]:
x2+=10
#обработай событие «клик по кнопке "Закрыть окно"»
for e in event.get():
if e.type == QUIT:
game = False
display.update()
clock.tick(FPS)
| 18.491525 | 73 | 0.600367 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 213 | 0.182363 |
caf18d00a3de41fc29f984810d3ca6dd460764c6 | 4,731 | py | Python | tests/read/test_read_api_query_endpoint.py | NRCan/backdrop | ba45e21b81c09859310ca4be249b25016b8731dc | [
"MIT"
] | 9 | 2015-10-20T04:36:48.000Z | 2020-09-08T18:47:01.000Z | tests/read/test_read_api_query_endpoint.py | NRCan/backdrop | ba45e21b81c09859310ca4be249b25016b8731dc | [
"MIT"
] | 31 | 2015-01-11T11:57:05.000Z | 2021-03-24T10:52:33.000Z | tests/read/test_read_api_query_endpoint.py | alphagov/backdrop | 1256e5075d7e5a0e41afb0f0913a5f2c4bdb9ad8 | [
"MIT"
] | 4 | 2015-01-25T09:06:45.000Z | 2021-04-10T20:27:36.000Z | import unittest
import urllib
import datetime
from hamcrest import assert_that, is_
from mock import patch
import pytz
from backdrop.read import api
from backdrop.core.query import Query
from tests.support.performanceplatform_client import fake_data_set_exists
from tests.support.test_helpers import has_status, has_header
from warnings import warn
class NoneData(object):
def data(self):
return None
warn("This test is deprecated in favour of "
"tests.read.test_read_api_service_data_endpoint")
class QueryingApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
@fake_data_set_exists("foo", raw_queries_allowed=True)
@patch('backdrop.core.data_set.DataSet.execute_query')
def test_filter_by_query_is_executed(self, mock_query):
mock_query.return_value = NoneData()
self.app.get('/foo?filter_by=zombies:yes')
mock_query.assert_called_with(
Query.create(filter_by=[[u'zombies', u'yes']]))
@fake_data_set_exists("foo")
@patch('backdrop.core.data_set.DataSet.execute_query')
def test_group_by_query_is_executed(self, mock_query):
mock_query.return_value = NoneData()
self.app.get('/foo?group_by=zombies')
mock_query.assert_called_with(
Query.create(group_by=[u'zombies']))
@fake_data_set_exists("foo", raw_queries_allowed=True)
@patch('backdrop.core.data_set.DataSet.execute_query')
def test_query_with_start_and_end_is_executed(self, mock_query):
mock_query.return_value = NoneData()
expected_start_at = datetime.datetime(2012, 12, 5, 8, 12, 43,
tzinfo=pytz.UTC)
expected_end_at = datetime.datetime(2012, 12, 12, 8, 12, 43,
tzinfo=pytz.UTC)
self.app.get(
'/foo?start_at=' + urllib.quote("2012-12-05T08:12:43+00:00") +
'&end_at=' + urllib.quote("2012-12-12T08:12:43+00:00")
)
mock_query.assert_called_with(
Query.create(start_at=expected_start_at, end_at=expected_end_at))
@fake_data_set_exists("foo", raw_queries_allowed=True)
@patch('backdrop.core.data_set.DataSet.execute_query')
def test_sort_query_is_executed(self, mock_query):
mock_query.return_value = NoneData()
self.app.get(
'/foo?sort_by=value:ascending'
)
mock_query.assert_called_with(
Query.create(sort_by=["value", "ascending"]))
self.app.get(
'/foo?sort_by=value:descending'
)
mock_query.assert_called_with(
Query.create(sort_by=["value", "descending"]))
@fake_data_set_exists("data_set", queryable=False)
def test_returns_404_when_data_set_is_not_queryable(self):
response = self.app.get('/data_set')
assert_that(response, has_status(404))
@fake_data_set_exists("data_set", queryable=False)
def test_allow_origin_set_on_404(self):
response = self.app.get('/data_set')
assert_that(response, has_status(404))
assert_that(response, has_header('Access-Control-Allow-Origin', '*'))
@fake_data_set_exists("foo", raw_queries_allowed=True)
@patch('backdrop.core.data_set.DataSet.execute_query')
def test_allow_origin_set_on_500(self, mock_query):
mock_query.side_effect = StandardError('fail!')
response = self.app.get('/foo')
assert_that(response, has_status(500))
assert_that(response, has_header('Access-Control-Allow-Origin', '*'))
class PreflightChecksApiTestCase(unittest.TestCase):
def setUp(self):
self.app = api.app.test_client()
@fake_data_set_exists("data_set")
def test_cors_preflight_requests_have_empty_body(self):
response = self.app.open('/data_set', method='OPTIONS')
assert_that(response.status_code, is_(200))
assert_that(response.data, is_(""))
@fake_data_set_exists("data_set")
def test_cors_preflight_are_allowed_from_all_origins(self):
response = self.app.open('/data_set', method='OPTIONS')
assert_that(response, has_header('Access-Control-Allow-Origin', '*'))
@fake_data_set_exists("data_set")
def test_cors_preflight_result_cache(self):
response = self.app.open('/data_set', method='OPTIONS')
assert_that(response.headers['Access-Control-Max-Age'],
is_('86400'))
@fake_data_set_exists("data_set")
def test_cors_requests_can_cache_control(self):
response = self.app.open('/data_set', method='OPTIONS')
assert_that(response.headers['Access-Control-Allow-Headers'],
is_('cache-control, govuk-request-id, request-id'))
| 38.778689 | 77 | 0.682731 | 4,270 | 0.902558 | 0 | 0 | 3,920 | 0.828577 | 0 | 0 | 976 | 0.206299 |
caf2e3c5025891a382a27aff6769ca77937b4ee1 | 137 | py | Python | scripts/npc/autogen_kasandra.py | hsienjan/SideQuest-Server | 3e88debaf45615b759d999255908f99a15283695 | [
"MIT"
] | null | null | null | scripts/npc/autogen_kasandra.py | hsienjan/SideQuest-Server | 3e88debaf45615b759d999255908f99a15283695 | [
"MIT"
] | null | null | null | scripts/npc/autogen_kasandra.py | hsienjan/SideQuest-Server | 3e88debaf45615b759d999255908f99a15283695 | [
"MIT"
] | null | null | null | # Character field ID when accessed: 820000000
# ObjectID: 1000000
# ParentID: 9010010
# Object Position X: -449
# Object Position Y: 225
| 22.833333 | 45 | 0.751825 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 132 | 0.963504 |
caf371c53116ba86bb2553a759aef45ace9ab067 | 1,373 | py | Python | media_browser/imdb/_logging.py | arjun-namdeo/my_python | 43ce84d04d773faa0d3c8f2281393767855a8465 | [
"MIT"
] | null | null | null | media_browser/imdb/_logging.py | arjun-namdeo/my_python | 43ce84d04d773faa0d3c8f2281393767855a8465 | [
"MIT"
] | null | null | null | media_browser/imdb/_logging.py | arjun-namdeo/my_python | 43ce84d04d773faa0d3c8f2281393767855a8465 | [
"MIT"
] | null | null | null | """
_logging module (imdb package).
"""
import logging
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warn': logging.WARNING,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
imdbpyLogger = logging.getLogger('media_browser')
imdbpyStreamHandler = logging.StreamHandler()
imdbpyFormatter = logging.Formatter('%(asctime)s %(levelname)s [%(name)s]' \
' %(pathname)s:%(lineno)d: %(message)s')
imdbpyStreamHandler.setFormatter(imdbpyFormatter)
imdbpyLogger.addHandler(imdbpyStreamHandler)
def setLevel(level):
"""Set logging level for the main logger."""
level = level.lower().strip()
imdbpyLogger.setLevel(LEVELS.get(level, logging.NOTSET))
imdbpyLogger.log(imdbpyLogger.level, 'set logging threshold to "%s"',
logging.getLevelName(imdbpyLogger.level))
#imdbpyLogger.setLevel(logging.DEBUG)
# It can be an idea to have a single function to log and warn:
#import warnings
#def log_and_warn(msg, args=None, logger=None, level=None):
# """Log the message and issue a warning."""
# if logger is None:
# logger = imdbpyLogger
# if level is None:
# level = logging.WARNING
# if args is None:
# args = ()
# #warnings.warn(msg % args, stacklevel=0)
# logger.log(level, msg % args)
| 29.847826 | 76 | 0.656227 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 697 | 0.507647 |
caf433f35fafdcea95c489ab8322ae30c22ed4f7 | 786 | py | Python | simplecmr/utils.py | KMarkert/simple-cmr | 7c6500466715692f18482ea379572ef30b46b380 | [
"MIT"
] | 1 | 2020-04-10T17:08:35.000Z | 2020-04-10T17:08:35.000Z | simplecmr/utils.py | KMarkert/simple-cmr | 7c6500466715692f18482ea379572ef30b46b380 | [
"MIT"
] | null | null | null | simplecmr/utils.py | KMarkert/simple-cmr | 7c6500466715692f18482ea379572ef30b46b380 | [
"MIT"
] | null | null | null | import datetime
from collections import OrderedDict
def decode_date(string):
"""Decodes a date from a command line argument, returning datetime object".
Args:
string: See AssetSetCommand class comment for the allowable
date formats.v
Returns:
long, datetime object
Raises:
ValueError: if string does not conform to a legal date format.
"""
date_formats = ['%Y%m%d',
'%Y-%m-%d',
'%Y-%m-%dT%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%f']
for date_format in date_formats:
try:
dt = datetime.datetime.strptime(string, date_format)
return dt
except ValueError:
continue
raise ValueError('Invalid format for date: "%s".' % string)
| 28.071429 | 79 | 0.577608 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 397 | 0.505089 |
caf601b1c7832716a886f026f7b2515dd1da5282 | 1,976 | py | Python | .ipynb_checkpoints/data_cleaning-checkpoint.py | VivanVatsa/ds-salary-proj | b78b5498d2e3326799cae445ba47d6e6a5637ccb | [
"MIT"
] | 5 | 2020-12-30T06:17:19.000Z | 2021-12-14T21:26:57.000Z | .ipynb_checkpoints/data_cleaning-checkpoint.py | VivanVatsa/ds-salary-proj | b78b5498d2e3326799cae445ba47d6e6a5637ccb | [
"MIT"
] | null | null | null | .ipynb_checkpoints/data_cleaning-checkpoint.py | VivanVatsa/ds-salary-proj | b78b5498d2e3326799cae445ba47d6e6a5637ccb | [
"MIT"
] | 5 | 2021-01-19T14:06:34.000Z | 2022-02-25T15:57:16.000Z | # %%
# from numpy.core.fromnumeric import size
import pandas as pd
df = pd.read_csv("glassdoor_jobs.csv")
df = df[df["Salary Estimate"] != "-1"]
df
# TO DO IN DATA CLEANING
# -------------------
# salary parsing
# %%
salary = df["Salary Estimate"].apply(lambda x: x.split("(")[0])
# salary
minus_Kd = salary.apply(
lambda x: x.replace("K", "").replace("₹", "").replace(",", ""))
minus_Kd[0]
df["min_salary"] = minus_Kd.apply(lambda x: int(x.split("-")[0]))
# df
# #%%
# type(df["min_salary"])
# df["min_salary"].dtype
df["max_salary"] = minus_Kd.apply(lambda x: int((x.split("-")[1])))
# df
df["average-salary"] = (df.min_salary + df.max_salary) / 2
# df
df["currency"] = "LAKh"
# df
df
# company name text only
# %%
df["company_txt"] = df["Company Name"].apply(lambda x: x.split("\n")[0])
df
# state field
# %%
df.Location.value_counts()
# %%
# 2 ways to delete undesired column from the data frame
# 1.
# del df["Headquarters"]
# df = df.drop("Headquarters", 1)
df = df.drop("Competitors", 1)
df
# age of company
# %%
df["age"] = df.Founded.apply(lambda x: x if x < 1 else 2020 - x)
df
# parsing of job description (PYTHON)
# %%
# will check all job descriptions keyword - analysis
# python
df["analysis"] = df["Job Description"].apply(lambda x: 1
if "analysis" in x.lower() else 0)
df.analysis.value_counts()
# %%
df["Job Description"][0]
# df["hourly"] = df["Salary Estimate"].apply(lambda x: 1
# if "per hour" in x.lower() else 0)
# df
# %%
df
# df["employer_provided"] = df["Salary Estimate"].apply(lambda x: 1
# if "employer provided" in x.lower() else 0)
# df
# min_hr = minus_Kd.apply(lambda x: x.lower().replace("per hour". '').replace('employer provided salary:', ''))
# %%
# *df cleaned*
df_out = df
df_out
# %%
df_out.to_csv("GL_sal_data_cleaned.csv", index=False)
# %%
pd.read_csv("GL_sal_data_cleaned.csv")
# %%
| 22.712644 | 111 | 0.593623 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,213 | 0.613246 |
caf671b7d9859cff696a323490350293855a283d | 2,301 | py | Python | Ultraviz/src/log_handler.py | ultraleap/ultraleap-labs-ultraviz | d011724ccd46a76400a309795f34d3614f456520 | [
"Apache-2.0"
] | null | null | null | Ultraviz/src/log_handler.py | ultraleap/ultraleap-labs-ultraviz | d011724ccd46a76400a309795f34d3614f456520 | [
"Apache-2.0"
] | null | null | null | Ultraviz/src/log_handler.py | ultraleap/ultraleap-labs-ultraviz | d011724ccd46a76400a309795f34d3614f456520 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
# Class to handle reading of SDK Log Data as a pipe/fifo for
# Windows and Unix, to avoid big log files.
----------------------------------------------------------
"""
import platform
import os
import tempfile
IS_WINDOWS = platform.system().lower() == "windows"
if IS_WINDOWS:
try:
import win32pipe, win32file
except:
print("*** WARNING: PyWin dependencies not found for Windows - Please install via:\n\n pip3 install --user pywin32 \n")
class SDKLogPipeHandler(object):
def __init__(self, is_windows=True):
super(SDKLogPipeHandler, self).__init__()
# Do we need Unix/Windows setup?
self.isWindows = is_windows
# A default location to create a named pipe
# This shall be used instead of writing the SDK Log to a file.
if self.isWindows:
self.pipe_name = r'\\.\pipe\UHSDK'
else:
tmpdir = tempfile.mkdtemp()
self.pipe_name = os.path.join(tmpdir, 'myfifo')
self.namedPipe = None
self.xyzi_regex = r'\[(-?[0-9.]+),(-?[0-9.]+),(-?[0-9.]+)\] intensity (-?[0-9.]+)'
# Number of bytes to read from SDK Log on Windows
self.num_bytes = 64*1024
def setupNamedPipe(self):
# On Windows, we use the win32pipe module
if self.isWindows:
if not self.namedPipe:
self.namedPipe = win32pipe.CreateNamedPipe(self.pipe_name, win32pipe.PIPE_ACCESS_DUPLEX,
win32pipe.PIPE_TYPE_MESSAGE | win32pipe.PIPE_READMODE_MESSAGE | win32pipe.PIPE_WAIT,
win32pipe.PIPE_UNLIMITED_INSTANCES,
self.num_bytes,
self.num_bytes,
0,
None)
# Else, on Unix systems, use mkfifo
else:
if not self.namedPipe:
try:
self.namedPipe = os.mkfifo(self.pipe_name)
except:
print("EXCEPTION: Pipe for %s exists!" % self.pipe_name)
# win32pipe, Windows only methods
def connectToSDKPipe(self):
win32pipe.ConnectNamedPipe(self.namedPipe, None)
def getDataFromNamedPipe(self):
data = win32file.ReadFile(self.namedPipe, self.num_bytes)
return data
| 35.4 | 127 | 0.575837 | 1,801 | 0.782703 | 0 | 0 | 0 | 0 | 0 | 0 | 732 | 0.318123 |
caf7109a216b415b4600a19f6e38c0eecc9526de | 505 | py | Python | stackimpact/utils.py | timgates42/stackimpact-python | 4d0a415b790c89e7bee1d70216f948b7fec11540 | [
"BSD-3-Clause"
] | 742 | 2017-06-26T13:16:34.000Z | 2022-02-06T11:05:31.000Z | stackimpact/utils.py | gaecom/stackimpact-python | 4d0a415b790c89e7bee1d70216f948b7fec11540 | [
"BSD-3-Clause"
] | 7 | 2017-06-28T06:01:04.000Z | 2021-05-18T20:06:53.000Z | stackimpact/utils.py | gaecom/stackimpact-python | 4d0a415b790c89e7bee1d70216f948b7fec11540 | [
"BSD-3-Clause"
] | 30 | 2017-06-27T15:26:04.000Z | 2021-05-16T11:08:53.000Z |
import time
import uuid
import base64
import hashlib
def millis():
return int(round(time.time() * 1000))
def timestamp():
return int(time.time())
def base64_encode(s):
return base64.b64encode(s.encode('utf-8')).decode('utf-8')
def base64_decode(b):
return base64.b64decode(b).decode('utf-8')
def generate_uuid():
return str(uuid.uuid4())
def generate_sha1(text):
sha1_hash = hashlib.sha1()
sha1_hash.update(text.encode('utf-8'))
return sha1_hash.hexdigest()
| 14.852941 | 62 | 0.681188 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 28 | 0.055446 |
caf860b3185ef94fc3db805ba75414ec7fad05a1 | 174 | py | Python | example/app/views.py | aolkin/django-bootstrap-form | 5fff56f715bd9f2f29793f6a5a87baa1be25e409 | [
"BSD-3-Clause"
] | 324 | 2015-01-12T19:30:51.000Z | 2022-02-11T07:13:19.000Z | example/app/views.py | caumons/django-bootstrap-form | 62af3e076612a5d3b67ebc020c05a8db20e3fe62 | [
"BSD-3-Clause"
] | 37 | 2015-03-04T23:43:45.000Z | 2021-10-18T16:08:52.000Z | example/app/views.py | caumons/django-bootstrap-form | 62af3e076612a5d3b67ebc020c05a8db20e3fe62 | [
"BSD-3-Clause"
] | 132 | 2015-01-01T18:13:06.000Z | 2022-01-10T07:06:19.000Z | from django.shortcuts import render
from app.forms import ExampleForm
def index(request):
form = ExampleForm()
return render(request, 'index.html', {'form': form})
| 21.75 | 56 | 0.724138 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.103448 |
caf9fd42bc789c264cd6023256055e13907047ef | 6,519 | py | Python | yt_downloader_with_GUI.py | francecon/YT-AUDIO-DOWNLOADER | d437a112595b0e182dca1042a4e1524d94107c06 | [
"CNRI-Python"
] | null | null | null | yt_downloader_with_GUI.py | francecon/YT-AUDIO-DOWNLOADER | d437a112595b0e182dca1042a4e1524d94107c06 | [
"CNRI-Python"
] | null | null | null | yt_downloader_with_GUI.py | francecon/YT-AUDIO-DOWNLOADER | d437a112595b0e182dca1042a4e1524d94107c06 | [
"CNRI-Python"
] | null | null | null | import glob
import os
# os.environ["IMAGEIO_FFMPEG_EXE"] = "C:/ffmpeg-4.4.1-essentials_build/ffmpeg-4.4.1-essentials_build/bin"
import re
import sys
import urllib
from tkinter import (BOTH, RIGHT, YES, Button, Entry, Label, Listbox, Menu,
Scrollbar, StringVar, Tk, Y)
from tkinter import messagebox as m_box
import validators
from moviepy.editor import AudioFileClip
from pytube import YouTube, exceptions
import subprocess
ws = Tk()
ws.title('YT Downloader - Scarica le tue canzoni')
ws.geometry('1000x600')
ws.eval('tk::PlaceWindow . center')
if getattr(sys, 'frozen', False):
dirname = os.path.dirname(sys.executable)
elif __file__:
dirname = os.path.dirname(__file__)
# ws.iconbitmap(os.path.join(dirname, "icon", "icon.ico"))
### Center the window ###
#Same size will be defined in variable for center screen in Tk_Width and Tk_height
Tk_Width = 1000
Tk_Height = 600
#calculate coordination of screen and window form
x_Left = int(ws.winfo_screenwidth()/2 - Tk_Width/2)
y_Top = int(ws.winfo_screenheight()/2 - Tk_Height/2)
# Write following format for center screen
ws.geometry("+{}+{}".format(x_Left, y_Top))
###
def make_menu(w):
global the_menu
the_menu = Menu(w, tearoff=0)
the_menu.add_command(label="Taglia")
the_menu.add_command(label="Copia")
the_menu.add_command(label="Incolla")
def show_menu(e):
w = e.widget
the_menu.entryconfigure("Taglia",
command=lambda: w.event_generate("<<Cut>>"))
the_menu.entryconfigure("Copia",
command=lambda: w.event_generate("<<Copy>>"))
the_menu.entryconfigure("Incolla",
command=lambda: w.event_generate("<<Paste>>"))
the_menu.tk.call("tk_popup", the_menu, e.x_root, e.y_root)
def delSelected():
link_selected = lb.curselection()
if len(link_selected) == 0:
m_box.showerror("Error", "Nessun link selezionato")
for i in link_selected:
lb.delete(i)
def insert_link():
inserted_link = link.get()
inserted_link.replace(" ", "")
# check if inserted string is a valid url
if validators.url(inserted_link):
#check if the link is a YouTube link
try:
YouTube(inserted_link).check_availability()
list_of_urls = lb.get(0, 'end')
# check if the link was already inserted
if inserted_link not in list_of_urls:
lb.insert('end',inserted_link)
yt_link.delete(0,'end')
else:
yt_link.delete(0,'end')
m_box.showerror("Error", "Link YouTube già inserito!")
except exceptions.VideoUnavailable:
yt_link.delete(0,'end')
m_box.showerror("Error", "Link video YouTube non disponibile!\nInserisci un link di un video YouTube!")
except urllib.error.URLError:
yt_link.delete(0,'end')
m_box.showerror("Error", "Internet non disponibile")
else:
yt_link.delete(0,'end')
m_box.showerror("Error", "Inserisci un link valido!")
def download():
list_of_urls = lb.get(0, 'end')
if len(list_of_urls) == 0:
m_box.showerror("Error", "Nessun link inserito")
else:
answer=m_box.askyesnocancel("Richiesta", "Vuoi davvero scaricare tutte le canzoni?")
if answer:
if os.path.isdir(dirname+"/Canzoni_mp4"): #if Canzoni_mp4 esiste allora chiedi se vuole cancellare
answer=m_box.askyesnocancel("Richiesta", "Vuoi cancellare tutte le canzoni che ci sono nella cartella 'Canzoni_mp4'?")
if answer:
files = glob.glob('./Canzoni_mp4/*')
for f in files:
os.remove(f)
try:
for i in list_of_urls:
yt = YouTube(i)
title = yt.title
title = re.sub(r'[\\/*?:"<>|]',"-",title)
default_filename = title + ".mp4"
new_filename = title+'.mp3'
parent_dir = os.path.join(dirname, "Canzoni_mp4")
str = yt.streams.get_audio_only()
str.download(output_path=parent_dir,filename=default_filename,max_retries=10)
try:
subprocess.run([
'ffmpeg', '-y',
'-i', os.path.join(parent_dir, default_filename),
os.path.join(parent_dir, new_filename)
],shell=True)
# audioclip = AudioFileClip(os.path.join(parent_dir, default_filename))
# audioclip.write_audiofile(os.path.join(parent_dir, new_filename))
# audioclip.close()
files = glob.glob(parent_dir+'/*.mp4')
for f in files:
os.remove(f)
except:
files = glob.glob(parent_dir+'/*.mp4')
for f in files:
os.remove(f)
m_box.showerror("Error", "Errore di conversione da MP4 a MP3")
except:
m_box.showerror("Error", "Errore di download")
m_box.showinfo("Scaricato", "Ho scaricato tutto")
else:
pass
make_menu(ws)
show = Label(ws, anchor="w",fg ="#f5453c", text = 'Bentornato su "YT Downloader - Scarica le tue canzoni"', font = ("Serif", 14), padx = 0, pady = 10)
show.pack()
show = Label(ws, text = "Lista dei link delle canzoni che vuoi scaricare: ",
font = ("Times", 14), padx = 10, pady = 10)
show.pack()
lb = Listbox(ws, selectmode = "multiple")
scroll_one=Scrollbar(ws,command=lb.yview)
lb.configure(yscrollcommand=scroll_one.set)
lb.pack(padx = 20, pady = 0, expand = YES, fill = BOTH)
scroll_one.pack(side=RIGHT,fill=Y)
get_info = Label(ws, text="Inserisci il link della canzone che vuoi scaricare: ",
font = ("Times", 14), padx = 10, pady = 10)
get_info.pack()
link = StringVar()
yt_link = Entry(ws, width=60, textvariable=link)
yt_link.pack()
yt_link.bind_class("Entry", "<Button-3><ButtonRelease-3>", show_menu)
yt_link.focus()
Button(ws, text="Inserisci link", command=insert_link).pack()
Button(ws, text="Cancella link", command=delSelected).pack()
Button(ws, text="Scarica le canzoni", command=download, activeforeground =
"#f5453c").pack()
ws.mainloop()
| 35.237838 | 150 | 0.591042 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,814 | 0.278221 |
caf9fdaea0cfc21fcb1eeef6a8902847ee871447 | 177 | py | Python | sdap_ingest_manager/history_manager/__init__.py | tloubrieu-jpl/incubator-sdap-nexus-ingestion-manager | 1fedc94265056ea9f9f96e9851bfe885959893fd | [
"Apache-2.0"
] | null | null | null | sdap_ingest_manager/history_manager/__init__.py | tloubrieu-jpl/incubator-sdap-nexus-ingestion-manager | 1fedc94265056ea9f9f96e9851bfe885959893fd | [
"Apache-2.0"
] | 1 | 2020-06-08T18:12:42.000Z | 2020-06-09T02:47:47.000Z | sdap_ingest_manager/history_manager/__init__.py | tloubrieu-jpl/incubator-sdap-nexus-ingestion-manager | 1fedc94265056ea9f9f96e9851bfe885959893fd | [
"Apache-2.0"
] | null | null | null | from .util import md5sum_from_filepath
from .datasetingestionhistorysolr import DatasetIngestionHistorySolr
from .datasetingestionhistoryfile import DatasetIngestionHistoryFile
| 44.25 | 68 | 0.915254 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
cafb72709a31d2ac7f7d2f7eba103eb452019e09 | 867 | py | Python | SistemaPago/contact/views.py | cluco91/Django_SistemaPago | 5c1909af6bea9ff57c7252245049de6536bbcba1 | [
"MIT"
] | null | null | null | SistemaPago/contact/views.py | cluco91/Django_SistemaPago | 5c1909af6bea9ff57c7252245049de6536bbcba1 | [
"MIT"
] | null | null | null | SistemaPago/contact/views.py | cluco91/Django_SistemaPago | 5c1909af6bea9ff57c7252245049de6536bbcba1 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.core.mail import send_mail
from django.conf import settings
from .forms import contactForm
# Create your views here.
def contact(request):
title = 'Contacto'
form = contactForm(request.POST or None)
confirm_message = None
if form.is_valid():
name = form.cleaned_data['name']
comment = form.cleaned_data['comment']
subject = 'Mensaje desde MISITIO.com'
message = '%s %s' %(comment, name)
emailFrom = form.cleaned_data['email']
emailTo = [settings.EMAIL_HOST_USER]
send_mail(subject, message, emailFrom, emailTo, fail_silently=True)
title = 'Gracias!'
confirm_message = "Gracias por el mensaje. Nos contactaremos contigo más tarde."
form = None
context = {'title': title,'form': form, 'confirm_message': confirm_message, }
template = 'contact.html'
return render(request, template, context) | 32.111111 | 82 | 0.742791 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 208 | 0.239631 |
cafc588d566696ccb6cf77935803ead728bb260c | 2,674 | py | Python | Image Classification/CGIAR Crop Yield Prediction Challenge/omarhzm/fields (1).py | ZindiAfrica/Computer-Vision | bf4c00a0633506270dc6d07df938a100a10ee799 | [
"MIT"
] | null | null | null | Image Classification/CGIAR Crop Yield Prediction Challenge/omarhzm/fields (1).py | ZindiAfrica/Computer-Vision | bf4c00a0633506270dc6d07df938a100a10ee799 | [
"MIT"
] | null | null | null | Image Classification/CGIAR Crop Yield Prediction Challenge/omarhzm/fields (1).py | ZindiAfrica/Computer-Vision | bf4c00a0633506270dc6d07df938a100a10ee799 | [
"MIT"
] | null | null | null | import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
import glob, random
import sklearn
from sklearn.decomposition import PCA
from xgboost.sklearn import XGBRegressor
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import GradientBoostingRegressor,BaggingRegressor, RandomForestRegressor,VotingRegressor
from sklearn.linear_model import LinearRegression
from lightgbm import LGBMRegressor
import catboost
from catboost import CatBoostRegressor
from tqdm import tqdm
from sklearn.preprocessing import LabelEncoder
le = LabelEncoder()
#import warnings
#warnings.filterwarnings('ignore')
folder = os.path.dirname(os.path.abspath(__file__))
train_new = pd.read_csv(folder+'/Train.csv')
bands_of_interest = ['S2_B5', 'S2_B4', 'S2_B3', 'S2_B2', 'CLIM_pr', 'CLIM_soil']
band_names = [l.strip() for l in open(folder + '/band_names.txt', 'r').readlines()]
def process_train(fid, folder= folder+'/imtrain'):
fn = f'{folder}/{fid}.npy'
arr = np.load(fn)
values = {}
for month in range(12):
bns = [str(month) + '_' + b for b in bands_of_interest] # Bands of interest for this month
idxs = np.where(np.isin(band_names, bns)) # Index of these bands
vs = arr[idxs, 20, 20] # Sample the im at the center point
for bn, v in zip(bns, vs[0]):
values[bn] = v
return values
def process_test(fid, folder= folder+'/imtest'):
fn = f'{folder}/{fid}.npy'
arr = np.load(fn)
values = {}
for month in range(12):
bns = [str(month) + '_' + b for b in bands_of_interest] # Bands of interest for this month
idxs = np.where(np.isin(band_names, bns)) # Index of these bands
vs = arr[idxs, 20, 20] # Sample the im at the center point
for bn, v in zip(bns, vs[0]):
values[bn] = v
return values
# Make a new DF with the sampled values from each field
train_sampled = pd.DataFrame([process_train(fid) for fid in train_new['Field_ID'].values])
#MODEL
X = train_sampled.copy()
y = train_new['Yield'].values
print(X.head)
print(y)
X_train, X_test, y_train, y_test = train_test_split(X, y)
model=BaggingRegressor(CatBoostRegressor(silent=True),n_estimators=55)
model.fit(X_train, y_train)
print('Score:', mean_squared_error(y_test, model.predict(X_test), squared=False))
#SUBMITTING
ss = pd.read_csv(folder+'/SampleSubmission.csv')
test_sampled = pd.DataFrame([process_test(fid) for fid in ss['Field_ID'].values])
preds = model.predict(test_sampled)
ss['Yield'] = preds
ss.to_csv(folder+'/Sub.csv', index=False)
| 34.727273 | 111 | 0.715782 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 541 | 0.202319 |
cafd8ddcc741b3376fcc1b0b66c815390e2e9e6c | 4,977 | py | Python | parserAlejandroTejada.py | tej17584/proyecto3DisenoLenguajes | 6d358e53054322818fe0c02934f52e84d945b5f7 | [
"MIT"
] | null | null | null | parserAlejandroTejada.py | tej17584/proyecto3DisenoLenguajes | 6d358e53054322818fe0c02934f52e84d945b5f7 | [
"MIT"
] | null | null | null | parserAlejandroTejada.py | tej17584/proyecto3DisenoLenguajes | 6d358e53054322818fe0c02934f52e84d945b5f7 | [
"MIT"
] | null | null | null |
import pickle
class parserAlejandro():
def __init__(self) -> None:
self.tokensScaneados = "" # los tokens leidos
self.tokensScaneadosV2 = []
self.tokensMapeados = ""
self.lastToken = ""
self.lookAheadToken = ""
self.leerTokensAndMap()
self.Parser()
def leerTokensAndMap(self):
infile = open("arrayTokensLeidos", 'rb')
self.tokensScaneados = pickle.load(infile)
infile.close()
infile = open("diccionarioTokensMapeados", 'rb')
self.tokensMapeados = pickle.load(infile)
infile.close()
for llave, valor in self.tokensMapeados.items():
for x in self.tokensScaneados:
valoresToken = x.getAllValues()
if(llave == valoresToken[0]):
x.setNumeracion(valor)
elif(valoresToken[0] == "ERROR" and (valoresToken[1] == llave)):
x.setNumeracion(valor)
for x in range(len(self.tokensScaneados)):
if(self.tokensScaneados[x].getNumeracion() != ""):
self.tokensScaneadosV2.append(self.tokensScaneados[x])
def Expect(self, tokenId):
if(self.lookAheadToken.getNumeracion() == tokenId):
#print("llamare un nuevo token con tokenID: ", tokenId)
self.GetNewToken()
else:
self.printERROROnScreen(tokenId)
def GetNewToken(self):
self.lastToken = self.lookAheadToken
if(len(self.tokensScaneadosV2) > 0):
self.lookAheadToken = self.tokensScaneadosV2.pop(0)
else:
self.lookAheadToken = self.lookAheadToken
def getNumber(self):
if(self.lookAheadToken.getValor() != "+" and self.lookAheadToken.getValor() != "-" and self.lookAheadToken.getValor() != "*" and self.lookAheadToken.getValor() != "/" and self.lookAheadToken.getValor() != ";"):
return int(self.lastToken.getValor())
else:
return self.lastToken.getValor()
def getVar(self):
return self.lookAheadToken.getValor()
def Expr(self):
self.StatSeq()
def StatSeq(self):
while self.lookAheadToken.getNumeracion() == 5 or self.lookAheadToken.getNumeracion() == 2 or self.lookAheadToken.getNumeracion() == 8:
self.Stat()
self.Expect(3)
def Stat(self):
value = 0
value = self.Expression(value)
print("El Resultado de la operacion es: ", value)
def Expression(self, result):
result1, result2 = 0, 0
result1 = self.Term(result1)
while self.lookAheadToken.getNumeracion() == 4 or self.lookAheadToken.getNumeracion() == 5:
if(self.lookAheadToken.getNumeracion() == 4):
self.Expect(4)
result2 = self.Term(result2)
result1 = int(result1)
result2 = int(result2)
result1 += result2
elif(self.lookAheadToken.getNumeracion() == 5):
self.Expect(5)
result2 = self.Term(result2)
result1 -= result2
result = result1
return result
def Term(self, result):
result1, result2 = 1, 1
result1 = self.Factor(result1)
while self.lookAheadToken.getNumeracion() == 6 or self.lookAheadToken.getNumeracion() == 7:
if(self.lookAheadToken.getNumeracion() == 6):
self.Expect(6)
result2 = self.Factor(result2)
result1 = int(result1)
result2 = int(result2)
result1 *= result2
elif(self.lookAheadToken.getNumeracion() == 7):
self.Expect(7)
result2 = self.Factor(result2)
result1 = int(result1)
result2 = int(result2)
result1 /= result2
result = result1
return result
def Factor(self, result):
sign = 1
if(self.lookAheadToken.getNumeracion() == 5):
self.Expect(5)
sign = -1
if(self.lookAheadToken.getNumeracion() == 2):
result = self.Number(result)
elif(self.lookAheadToken.getNumeracion() == 8):
self.Expect(8)
result = self.Expression(result)
self.Expect(9)
result *= sign
return result
def Number(self, result):
self.Expect(2)
result = self.getNumber()
return result
def Parser(self):
self.GetNewToken()
self.Expr()
def printERROROnScreen(self, tokenId):
for x in self.tokensScaneadosV2:
if(x.getNumeracion() == tokenId):
if(x.getTipoToken() == "ERROR"):
errorPrint = x.getValor()
print(f'{errorPrint} expected')
elif(x.getTipoToken() != "ERROR"):
errorPrint = x.getTipoToken()
print(f'{errorPrint} expected')
obj = parserAlejandro()
| 32.960265 | 218 | 0.560378 | 4,933 | 0.991159 | 0 | 0 | 0 | 0 | 0 | 0 | 257 | 0.051638 |
caffd2175e5188949d3d8483e90d7b9cd449fd73 | 957 | py | Python | humidityModule.py | HarvardURC/MATE_ROV | 30f7dc3a1caa6532e5cd44e3029de5353e79d656 | [
"MIT"
] | 1 | 2018-04-09T00:57:10.000Z | 2018-04-09T00:57:10.000Z | humidityModule.py | HarvardURC/MATE_ROV | 30f7dc3a1caa6532e5cd44e3029de5353e79d656 | [
"MIT"
] | 27 | 2017-10-03T02:02:32.000Z | 2018-04-09T19:44:41.000Z | humidityModule.py | HarvardURC/MATE_ROV | 30f7dc3a1caa6532e5cd44e3029de5353e79d656 | [
"MIT"
] | 1 | 2017-10-27T01:39:31.000Z | 2017-10-27T01:39:31.000Z | #!/usr/bin/env python3
import os, random
import Adafruit_DHT
import robomodules as rm
from messages import *
ADDRESS = os.environ.get("BIND_ADDRESS","localhost")
PORT = os.environ.get("BIND_PORT", 11297)
FREQUENCY = 10
SENSOR_PIN1 = 21
class HumidityModule(rm.ProtoModule):
def __init__(self, addr, port):
self.subscriptions = []
super().__init__(addr, port, message_buffers, MsgType, FREQUENCY)
self.sensor = Adafruit_DHT.DHT22
def msg_received(self, msg, msg_type):
return
def tick(self):
humidity, temperature = Adafruit_DHT.read_retry(self.sensor, SENSOR_PIN1)
if humidity is not None and temperature is not None:
msg = HumidityMsg()
msg.humidity = humidity
msg = msg.SerializeToString()
self.write(msg, MsgType.HUMIDITY_MSG)
def main():
module = HumidityModule(ADDRESS, PORT)
module.run()
if __name__ == "__main__":
main()
| 25.184211 | 81 | 0.667712 | 604 | 0.631139 | 0 | 0 | 0 | 0 | 0 | 0 | 68 | 0.071055 |
1b003975472ca4e747fcb857f7a8e214a5857ca2 | 3,643 | py | Python | src/towerData.py | jahrmarkt/TowerGame | 7d15b72a2dd7bdd1bb31795129d1261c021e07e4 | [
"MIT"
] | null | null | null | src/towerData.py | jahrmarkt/TowerGame | 7d15b72a2dd7bdd1bb31795129d1261c021e07e4 | [
"MIT"
] | null | null | null | src/towerData.py | jahrmarkt/TowerGame | 7d15b72a2dd7bdd1bb31795129d1261c021e07e4 | [
"MIT"
] | null | null | null |
from enum import Enum
from pygame.sprite import *
pygame.init()
pygame.mixer.init()
size = (width, height) = 512, 704
screen = pygame.display.set_mode(size)
def loadTower(n):
filename = "assets/sprites/towers/" + n + ".png"
return pygame.image.load(filename).convert_alpha()
def loadProjectile(n):
filename = "assets/sprites/projectiles/" + n + ".png"
return pygame.image.load(filename).convert_alpha()
class BoostType(Enum):
SPEED = 1
DAMAGE = 2
RANGE = 3
DOUBLE = 4
class TowerType(Enum):
FIRE = 0
ROCKY = 1
YELLOW = 2
GREEN = 3
REDROOF = 4
HIGH = 5
HIGHBLUE = 6
HIGHBROWN = 7
ROCKYBOOST = 8
LIBRARY = 9
class ProjectileType(Enum):
FIRE = 0
LIGHTNING1 = 1
LIGHTNING2 = 2
ARROW = 3
POISON = 4
BOMB = 5
spriteTowerFire = loadTower('fire')
spriteTowerRocky = loadTower('rocky')
spriteTowerYellow = loadTower('yellow')
spriteTowerGreen = loadTower('green')
spriteTowerRedroof = loadTower('redroof')
spriteTowerHigh = loadTower('high')
spriteTowerHighblue = loadTower('highblue')
spriteTowerHighbrown = loadTower('highbrown')
spriteTowerRockyboost = loadTower('rockyboost')
spriteTowerLibrary = loadTower('library')
iconTowerFire = pygame.transform.scale(loadTower('fire'), (64,64))
iconTowerRocky = pygame.transform.scale(loadTower('rocky'), (64,64))
iconTowerYellow = pygame.transform.scale(loadTower('yellow'), (64,64))
iconTowerGreen = pygame.transform.scale(loadTower('green'), (64,64))
iconTowerRedroof = pygame.transform.scale(loadTower('redroof'), (64,64))
iconTowerHigh = pygame.transform.scale(loadTower('high'), (64,64))
iconTowerHighblue = pygame.transform.scale(loadTower('highblue'), (64,64))
iconTowerHighbrown = pygame.transform.scale(loadTower('highbrown'), (64,64))
iconTowerRockyboost = pygame.transform.scale(loadTower('rockyboost'), (64,64))
iconTowerLibrary = pygame.transform.scale(loadTower('library'), (64,64))
spriteProjectileFire = loadProjectile('fire')
spriteProjectileLightning1 = loadProjectile('lightning1')
spriteProjectileLightning2 = loadProjectile('lightning2')
spriteProjectileArrow = loadProjectile('arrow')
spriteProjectilePoison = loadProjectile('poison')
spriteProjectileBomb = loadProjectile('bomb')
towerList = \
{ TowerType.FIRE : (spriteTowerFire, 100, 15, False, ProjectileType.FIRE, None, iconTowerFire),
TowerType.ROCKY : (spriteTowerRocky, 150, 30, False, ProjectileType.ARROW, None, iconTowerRocky),
TowerType.YELLOW : (spriteTowerYellow, 150, 30, False, ProjectileType.LIGHTNING1, None, iconTowerYellow),
TowerType.GREEN : (spriteTowerGreen, 150, 30, False, ProjectileType.POISON, None, iconTowerGreen),
TowerType.REDROOF : (spriteTowerRedroof, 150, 30, False, ProjectileType.ARROW, None, iconTowerRedroof),
TowerType.HIGH : (spriteTowerHigh, 10, 10, True, None, BoostType.DAMAGE, iconTowerHigh),
TowerType.HIGHBLUE : (spriteTowerHighblue, 10, 10, True, None, BoostType.RANGE, iconTowerHighblue),
TowerType.HIGHBROWN : (spriteTowerHighbrown, 10, 10, True, None, BoostType.RANGE, iconTowerHighbrown),
TowerType.ROCKYBOOST : (spriteTowerRockyboost, 10, 10, True, None, BoostType.DAMAGE, iconTowerRockyboost),
TowerType.LIBRARY : (spriteTowerLibrary, 10, 10, True, None, BoostType.DOUBLE, iconTowerLibrary),
}
projectileList = \
{ ProjectileType.FIRE : (spriteProjectileFire, 3, 4, ),
ProjectileType.LIGHTNING1 : (spriteProjectileLightning1, 2, 1, ),
ProjectileType.LIGHTNING2 : (spriteProjectileLightning2, 2, 1, ),
ProjectileType.ARROW : (spriteProjectileArrow, 2, 1, ),
ProjectileType.POISON : (spriteProjectilePoison, 3, 1, ),
ProjectileType.BOMB : (spriteProjectileBomb, 1, 1, ),
}
| 35.368932 | 108 | 0.747461 | 329 | 0.09031 | 0 | 0 | 0 | 0 | 0 | 0 | 286 | 0.078507 |
1b0104d9632c8d080c1827f39f0a95094b98554a | 657 | py | Python | lib/iputils.py | cmusatyalab/PyEdgeSim | edc13488d66ec9582543e5f116c6611ef7b65d9d | [
"Apache-2.0"
] | null | null | null | lib/iputils.py | cmusatyalab/PyEdgeSim | edc13488d66ec9582543e5f116c6611ef7b65d9d | [
"Apache-2.0"
] | null | null | null | lib/iputils.py | cmusatyalab/PyEdgeSim | edc13488d66ec9582543e5f116c6611ef7b65d9d | [
"Apache-2.0"
] | null | null | null | import math
def ipno2ipadd(ipno):
if math.isnan(ipno):
ipno = 0
w = int ( ipno / 16777216 ) % 256
x = int ( ipno / 65536 ) % 256
y = int ( ipno / 256 ) % 256
z = int ( ipno ) % 256
return "{}.{}.{}.{}".format(w,x,y,z)
def ipadd2ipno(ipadd):
w,x,y,z = ipadd.split('.')
retipno = int(w) * 16777216 + int(x) * 65536 + int(y) * 256 + int(z)
return retipno
def isip(strin):
a = strin.split('.')
if len(a) != 4:
return False
for x in a:
if not x.isdigit():
return False
i = int(x)
if i < 0 or i >255:
return False
return True
| 24.333333 | 72 | 0.479452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 19 | 0.028919 |
1b02966f6bd91456827cf2718b30f4933e7e5684 | 480 | py | Python | example_app/blog/models.py | yswtrue/taggit-selectize | 7c417e5179629414d2ef8ed000b1d14d5980da5b | [
"BSD-3-Clause"
] | 77 | 2015-02-15T23:57:39.000Z | 2021-06-04T06:32:41.000Z | example_app/blog/models.py | yswtrue/taggit-selectize | 7c417e5179629414d2ef8ed000b1d14d5980da5b | [
"BSD-3-Clause"
] | 38 | 2015-02-16T08:11:27.000Z | 2021-11-11T15:08:19.000Z | example_app/blog/models.py | yswtrue/taggit-selectize | 7c417e5179629414d2ef8ed000b1d14d5980da5b | [
"BSD-3-Clause"
] | 29 | 2016-01-25T21:55:44.000Z | 2021-11-09T00:19:45.000Z | from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from taggit_selectize.managers import TaggableManager
@python_2_unicode_compatible
class Blog(models.Model):
title = models.CharField(max_length=255)
slug = models.SlugField(editable=True, max_length=255, unique=True)
body = models.TextField()
date = models.DateTimeField(auto_now_add=True)
tags = TaggableManager()
def __str__(self):
return self.title
| 28.235294 | 71 | 0.76875 | 302 | 0.629167 | 0 | 0 | 331 | 0.689583 | 0 | 0 | 0 | 0 |
1b02c52eb8e50d7a9e3fc36190eaa0db8578b01f | 2,627 | py | Python | site_scons/site_tools/mongo_test_list.py | benety/mongo | 203430ac9559f82ca01e3cbb3b0e09149fec0835 | [
"Apache-2.0"
] | null | null | null | site_scons/site_tools/mongo_test_list.py | benety/mongo | 203430ac9559f82ca01e3cbb3b0e09149fec0835 | [
"Apache-2.0"
] | null | null | null | site_scons/site_tools/mongo_test_list.py | benety/mongo | 203430ac9559f82ca01e3cbb3b0e09149fec0835 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 MongoDB Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
"""Pseudo-builders for building test lists for Resmoke"""
import SCons
from collections import defaultdict
TEST_REGISTRY = defaultdict(list)
def register_test(env, file, test):
"""Register test into the dictionary of tests for file_name"""
test_path = test
if env.get("AUTO_INSTALL_ENABLED", False) and env.GetAutoInstalledFiles(test):
test_path = env.GetAutoInstalledFiles(test)[0]
if SCons.Util.is_String(file):
file = env.File(file)
env.Depends(file, test_path)
file_name = file.path
TEST_REGISTRY[file_name].append(test_path)
env.GenerateTestExecutionAliases(test)
def test_list_builder_action(env, target, source):
"""Build a test list used by resmoke.py to execute binary tests."""
if SCons.Util.is_String(target[0]):
filename = env.subst(target[0])
else:
filename = target[0].path
source = [env.File(s).path if SCons.Util.is_String(s) else s.path for s in source]
with open(filename, "w") as ofile:
tests = TEST_REGISTRY[filename]
if source:
tests.extend(source)
for s in tests:
ofile.write("{}\n".format(str(s)))
TEST_LIST_BUILDER = SCons.Builder.Builder(
action=SCons.Action.FunctionAction(
test_list_builder_action, {"cmdstr": "Generating $TARGETS"},
)
)
def exists(env):
return True
def generate(env):
env["MONGO_TEST_REGISTRY"] = TEST_REGISTRY
env.Append(BUILDERS={"TestList": TEST_LIST_BUILDER})
env.AddMethod(register_test, "RegisterTest")
| 33.253165 | 86 | 0.723258 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,361 | 0.518081 |
1b0516b8ceec71b4a22f3512c68394f69f6ca430 | 36 | py | Python | Pluto/Systems/__init__.py | n8vm/Foton | eacec2de9bf53d8fecff387b60604e6227baea28 | [
"MIT"
] | 10 | 2019-12-16T18:04:48.000Z | 2021-05-06T00:40:11.000Z | Pluto/Systems/__init__.py | natevm/Foton | eacec2de9bf53d8fecff387b60604e6227baea28 | [
"MIT"
] | 35 | 2019-01-29T21:57:44.000Z | 2019-04-29T02:40:20.000Z | Pluto/Systems/__init__.py | natevm/Foton | eacec2de9bf53d8fecff387b60604e6227baea28 | [
"MIT"
] | 1 | 2019-01-19T22:34:00.000Z | 2019-01-19T22:34:00.000Z | from Pluto.Systems.Systems import *
| 18 | 35 | 0.805556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1b0551d9765111fa7a1ec6da6ab967658f426c99 | 3,659 | py | Python | MULTITASK_FILES/TSM_FILES/scripts/anns-stats.py | egoodman92/semi-supervised-surgery | 42f7af7e707e71ecd64b9f215fab5c07e2b71d70 | [
"MIT"
] | null | null | null | MULTITASK_FILES/TSM_FILES/scripts/anns-stats.py | egoodman92/semi-supervised-surgery | 42f7af7e707e71ecd64b9f215fab5c07e2b71d70 | [
"MIT"
] | null | null | null | MULTITASK_FILES/TSM_FILES/scripts/anns-stats.py | egoodman92/semi-supervised-surgery | 42f7af7e707e71ecd64b9f215fab5c07e2b71d70 | [
"MIT"
] | null | null | null | import pandas as pd
import click
import statistics
def stats(ann_path):
df = pd.read_csv(ann_path)
df = df.sort_values(by=['video_id', 'start_seconds'])
if not 'label' in df.columns:
df['label'] = df['labeler_2']
df.loc[df['label'] == 'none', 'label'] = 'background'
df.loc[df['labeler_2'].isnull(), 'label'] = df['labeler_1']
df.loc[~df['labeler_3'].isnull(), 'label'] = df['labeler_3']
print("Number of records before dedupe: %d" % len(df))
df.drop_duplicates(subset=["video_id", "start_seconds", "end_seconds", "label"],
keep='first', inplace=True)
print("Number of records after dedupe: %d" % len(df))
label_counts = {}
label_seconds = {}
label_minutes = {}
label_avg_length = {}
label_median_length = {}
labels = ['cutting', 'tying', 'suturing', 'background', 'abstain']
#labels = list(set(df['label']))
for label in labels:
label_counts[label] = len(df[df['label'] == label])
label_seconds[label] = sum(df[df['label'] == label]['duration'])
label_minutes[label] = round(sum(df[df['label'] == label]['duration']) / 60, 1)
label_avg_length[label] = round(label_seconds[label] / label_counts[label], 1)
label_median_length[label] = statistics.median(list(df[df['label'] == label]['duration']))
video_ids = list(set(df['video_id']))
window_length = 10
label_changes_in_10sec_window = {}
labels_sandwiched = {}
for video_id in video_ids:
stats_df = df[df['video_id'] == video_id]
duration = int(stats_df.iloc[len(stats_df)-1]['end_seconds'])
windows = int(duration / window_length)
for i in range(windows):
start = i * window_length
end = (i+1) * window_length
results = stats_df[(stats_df['start_seconds'] >= start) & (stats_df['start_seconds'] < end) |
(stats_df['end_seconds'] > start) & (stats_df['end_seconds'] <= end)]
results = results[~results['label'].isin(['background', 'abstain'])]
labels = list(results['label'])
label_changes = 0
middle_label = None
if len(labels) > 0:
last_label = labels[0]
for l in labels[1:]:
if l != last_label:
label_changes += 1
if label_changes == 1:
middle_label = l
last_label = l
if middle_label and label_changes > 1:
if not middle_label in labels_sandwiched:
labels_sandwiched[middle_label] = 0
labels_sandwiched[middle_label] += 1
if not label_changes in label_changes_in_10sec_window:
label_changes_in_10sec_window[label_changes] = 0
label_changes_in_10sec_window[label_changes] += 1
label_changes_perc = {}
for k, v in label_changes_in_10sec_window.items():
label_changes_perc[k] = v / sum(label_changes_in_10sec_window.values())
all_stats = {}
all_stats['# Annotations'] = label_counts
all_stats['Total Minutes'] = label_minutes
all_stats['Avg Length (s)'] = label_avg_length
all_stats['Median Length (s)'] = label_median_length
all_stats['# Sandwiched in 10s Window'] = labels_sandwiched
stats_df = pd.DataFrame(all_stats)
stats_df = stats_df.transpose()
print(stats_df)
return stats_df
@click.command()
@click.option('--ann-path', default='../annotations/v0.5.0-anns-smoothed.csv')
def start(ann_path):
stats(ann_path)
if __name__ == '__main__':
start()
| 38.515789 | 105 | 0.598524 | 0 | 0 | 0 | 0 | 136 | 0.037169 | 0 | 0 | 682 | 0.18639 |
1b069eee129e69f7b50529f8283d98c09f05a638 | 836 | py | Python | architecture/structures/fsm.py | TEKERone/VuelaBot | c333ba213a91fd2297b5bd2ee393226dcbb39c01 | [
"MIT"
] | null | null | null | architecture/structures/fsm.py | TEKERone/VuelaBot | c333ba213a91fd2297b5bd2ee393226dcbb39c01 | [
"MIT"
] | null | null | null | architecture/structures/fsm.py | TEKERone/VuelaBot | c333ba213a91fd2297b5bd2ee393226dcbb39c01 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from architecture.structures.states import State
from architecture.sensors.parsing import NLParser
class FSM:
"""Máquina de estados
"""
def __init__(self, current_state, intent_data='data/intents2.csv'):
# compatibilidad
assert isinstance(current_state, State)
# estado actual
self.current_state = current_state
# parser de intenciones
self.parser = NLParser(intent_data)
def run(self):
self.current_state.run()
def transition(self, user_input):
self.current_state = self.current_state.get_next_state(user_input,
self.parser)
| 27.866667 | 75 | 0.657895 | 578 | 0.690562 | 0 | 0 | 0 | 0 | 0 | 0 | 148 | 0.176822 |
1b070569440283ace41e531ed1fdfe7ceba09609 | 2,919 | py | Python | generate_pre-test_confis.py | DirkZomerdijk/status | 299aca6986c0b274500c40613151d55aa98d5f52 | [
"MIT"
] | null | null | null | generate_pre-test_confis.py | DirkZomerdijk/status | 299aca6986c0b274500c40613151d55aa98d5f52 | [
"MIT"
] | null | null | null | generate_pre-test_confis.py | DirkZomerdijk/status | 299aca6986c0b274500c40613151d55aa98d5f52 | [
"MIT"
] | null | null | null | #%%
import numpy as np
import matplotlib.pyplot as plt
import json
import copy
# chronic_threshold
# repeats
# time
# stress_max
global_settings = {
"data_file": "clean",
"save_folder": "pre-test\\",
"status_type": "linear",
"distance_measure": "euclidean",
"scenario": "normal",
# "dutch_w": 0.664,
# "turkish_w": 0.075,
# "moroccan_w": 0.13,
# "ghanaian_w": 0.021,
# "suriname_w": 0.11,
"dutch_w": 0.664,
"turkish_w": 0.075,
"moroccan_w": 0.13,
"ghanaian_w": 0.021,
"suriname_w": 0.11,
"parameters": {}
}
param_range = {
"similarity_min" : {"range": [0.1, 1], "type": "f"},
"ses_noise" : {"range": [0, 4], "type": "i"},
# "vul_param" : {"range": [0.1,1], "type": "f"},
"psr_param" : {"range": [0.1,1], "type": "f"},
"recover_param" : {"range": [0.001, 0.1], "type": "f"},
"prestige_beta" : {"range": [0.005, 0.05], "type": "f"},
"prestige_param" : {"range": [0.1,1], "type": "f"},
"stressor_param" : {"range": [0.1,1], "type": "f"},
"interactions": {"range": [1,3], "type": "i"},
"coping_noise" : {"range": [0.01, 0.1], "type": "f"},
}
if __name__ == "__main__":
samples = 500
configs = []
param_samples = {}
for k in param_range.keys():
typ = param_range[k]['type']
if typ is "f":
params = np.array(np.random.uniform(param_range[k]['range'][0], param_range[k]['range'][1], samples))
else:
params = np.array(np.random.randint(param_range[k]['range'][0], param_range[k]['range'][1], samples))
param_samples[k] = params
for i in range(samples):
print(i)
config_settings = copy.deepcopy(global_settings)
for k in param_range.keys():
config_settings['parameters'][k] = [param_samples[k][i].item()]
config_settings['parameters']['repeats'] = [10]
config_settings['parameters']['population_size'] = [502]
config_settings['parameters']['chronic_threshold'] = [0.0001]
config_settings['parameters']['stress_max'] = [10000]
config_settings['parameters']['time'] = [500]
config_settings['parameters']['job_nr'] = [i]
config_settings['parameters']['vul_param'] = [0]
configs.append(config_settings)
for i, config in enumerate(configs):
filenr = "{0:03}".format(i)
# print(filenr)
with open('configs/pre-test/sample_'+str(filenr)+'.json', 'w') as outfile:
json.dump(config, outfile)
# np.random.uniform(similarity_base[0], similarity_base[1])
# np.random.uniform(ses_noise[0], ses_noise[1])
# np.random.uniform(vul_param[0], vul_param[1])
# np.random.uniform(psr_param[0], psr_param[1])
# np.random.uniform(prestige_beta[0], prestige_beta[1])
# np.random.uniform(prestige_param[0], prestige_param[1])
# np.random.uniform(stressor_param[0], stressor_param[1])
# %%
| 33.170455 | 113 | 0.584447 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,309 | 0.448441 |
1b084957ce54dfc38011de897df30991e0e6f6c8 | 901 | py | Python | checkov/terraform/checks/resource/gcp/GoogleComputeBootDiskEncryption.py | shimont/checkov | 470e4998f3a0287cdb80b75a898927027c42e16b | [
"Apache-2.0"
] | null | null | null | checkov/terraform/checks/resource/gcp/GoogleComputeBootDiskEncryption.py | shimont/checkov | 470e4998f3a0287cdb80b75a898927027c42e16b | [
"Apache-2.0"
] | null | null | null | checkov/terraform/checks/resource/gcp/GoogleComputeBootDiskEncryption.py | shimont/checkov | 470e4998f3a0287cdb80b75a898927027c42e16b | [
"Apache-2.0"
] | null | null | null | from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.models.enums import CheckResult, CheckCategories
class GoogleComputeBootDiskEncryption(BaseResourceCheck):
def __init__(self):
name = "Ensure VM disks for critical VMs are encrypted with CustomerSupplied Encryption Keys (CSEK)"
id = "CKV_GCP_38"
supported_resources = ['google_compute_instance']
categories = [CheckCategories.ENCRYPTION]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
if 'boot_disk' in conf.keys():
if 'disk_encryption_key_raw' in conf['boot_disk'][0] or 'kms_key_self_link' in conf['boot_disk'][0]:
return CheckResult.PASSED
return CheckResult.FAILED
check = GoogleComputeBootDiskEncryption()
| 40.954545 | 112 | 0.738069 | 700 | 0.776915 | 0 | 0 | 0 | 0 | 0 | 0 | 207 | 0.229745 |
1b0ad8cc1c1a79ccf74d4af235e3eba0c62262a2 | 3,008 | py | Python | siren.py | idgmatrix/coin | 2f2df0614ed4fc866d4b7715ee206081e08b9424 | [
"MIT"
] | 84 | 2021-04-10T17:25:31.000Z | 2022-03-24T10:45:08.000Z | siren.py | idgmatrix/coin | 2f2df0614ed4fc866d4b7715ee206081e08b9424 | [
"MIT"
] | null | null | null | siren.py | idgmatrix/coin | 2f2df0614ed4fc866d4b7715ee206081e08b9424 | [
"MIT"
] | 13 | 2021-04-22T09:28:48.000Z | 2022-03-24T11:46:02.000Z | # Based on https://github.com/lucidrains/siren-pytorch
import torch
from torch import nn
from math import sqrt
class Sine(nn.Module):
"""Sine activation with scaling.
Args:
w0 (float): Omega_0 parameter from SIREN paper.
"""
def __init__(self, w0=1.):
super().__init__()
self.w0 = w0
def forward(self, x):
return torch.sin(self.w0 * x)
class SirenLayer(nn.Module):
"""Implements a single SIREN layer.
Args:
dim_in (int): Dimension of input.
dim_out (int): Dimension of output.
w0 (float):
c (float): c value from SIREN paper used for weight initialization.
is_first (bool): Whether this is first layer of model.
use_bias (bool):
activation (torch.nn.Module): Activation function. If None, defaults to
Sine activation.
"""
def __init__(self, dim_in, dim_out, w0=30., c=6., is_first=False,
use_bias=True, activation=None):
super().__init__()
self.dim_in = dim_in
self.is_first = is_first
self.linear = nn.Linear(dim_in, dim_out, bias=use_bias)
# Initialize layers following SIREN paper
w_std = (1 / dim_in) if self.is_first else (sqrt(c / dim_in) / w0)
nn.init.uniform_(self.linear.weight, -w_std, w_std)
if use_bias:
nn.init.uniform_(self.linear.bias, -w_std, w_std)
self.activation = Sine(w0) if activation is None else activation
def forward(self, x):
out = self.linear(x)
out = self.activation(out)
return out
class Siren(nn.Module):
"""SIREN model.
Args:
dim_in (int): Dimension of input.
dim_hidden (int): Dimension of hidden layers.
dim_out (int): Dimension of output.
num_layers (int): Number of layers.
w0 (float): Omega 0 from SIREN paper.
w0_initial (float): Omega 0 for first layer.
use_bias (bool):
final_activation (torch.nn.Module): Activation function.
"""
def __init__(self, dim_in, dim_hidden, dim_out, num_layers, w0=30.,
w0_initial=30., use_bias=True, final_activation=None):
super().__init__()
layers = []
for ind in range(num_layers):
is_first = ind == 0
layer_w0 = w0_initial if is_first else w0
layer_dim_in = dim_in if is_first else dim_hidden
layers.append(SirenLayer(
dim_in=layer_dim_in,
dim_out=dim_hidden,
w0=layer_w0,
use_bias=use_bias,
is_first=is_first
))
self.net = nn.Sequential(*layers)
final_activation = nn.Identity() if final_activation is None else final_activation
self.last_layer = SirenLayer(dim_in=dim_hidden, dim_out=dim_out, w0=w0,
use_bias=use_bias, activation=final_activation)
def forward(self, x):
x = self.net(x)
return self.last_layer(x)
| 31.663158 | 90 | 0.602061 | 2,888 | 0.960106 | 0 | 0 | 0 | 0 | 0 | 0 | 1,042 | 0.34641 |
1b0b5385379c866640e35c8a01868562a900ce1f | 8,606 | py | Python | asset/google/cloud/asset_v1/proto/asset_service_pb2_grpc.py | conwaychriscosmo/google-cloud-python | 8e7b7f8a5f4bb04d13f4d88ec3848f017faf834a | [
"Apache-2.0"
] | 1 | 2019-03-26T21:44:51.000Z | 2019-03-26T21:44:51.000Z | asset/google/cloud/asset_v1/proto/asset_service_pb2_grpc.py | conwaychriscosmo/google-cloud-python | 8e7b7f8a5f4bb04d13f4d88ec3848f017faf834a | [
"Apache-2.0"
] | 40 | 2019-07-16T10:04:48.000Z | 2020-01-20T09:04:59.000Z | asset/google/cloud/asset_v1/proto/asset_service_pb2_grpc.py | conwaychriscosmo/google-cloud-python | 8e7b7f8a5f4bb04d13f4d88ec3848f017faf834a | [
"Apache-2.0"
] | 2 | 2019-07-18T00:05:31.000Z | 2019-11-27T14:17:22.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.asset_v1.proto import (
asset_service_pb2 as google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2,
)
from google.longrunning import (
operations_pb2 as google_dot_longrunning_dot_operations__pb2,
)
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class AssetServiceStub(object):
"""Asset service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ExportAssets = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/ExportAssets",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.ExportAssetsRequest.SerializeToString,
response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString,
)
self.BatchGetAssetsHistory = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/BatchGetAssetsHistory",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.BatchGetAssetsHistoryRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.BatchGetAssetsHistoryResponse.FromString,
)
self.CreateFeed = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/CreateFeed",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.CreateFeedRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.Feed.FromString,
)
self.GetFeed = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/GetFeed",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.GetFeedRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.Feed.FromString,
)
self.ListFeeds = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/ListFeeds",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.ListFeedsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.ListFeedsResponse.FromString,
)
self.UpdateFeed = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/UpdateFeed",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.UpdateFeedRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.Feed.FromString,
)
self.DeleteFeed = channel.unary_unary(
"/google.cloud.asset.v1.AssetService/DeleteFeed",
request_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.DeleteFeedRequest.SerializeToString,
response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
)
class AssetServiceServicer(object):
"""Asset service definition.
"""
def ExportAssets(self, request, context):
"""Exports assets with time and resource types to a given Cloud Storage
location. The output format is newline-delimited JSON.
This API implements the [google.longrunning.Operation][google.longrunning.Operation] API allowing you
to keep track of the export.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def BatchGetAssetsHistory(self, request, context):
"""Batch gets the update history of assets that overlap a time window.
For RESOURCE content, this API outputs history with asset in both
non-delete or deleted status.
For IAM_POLICY content, this API outputs history when the asset and its
attached IAM POLICY both exist. This can create gaps in the output history.
If a specified asset does not exist, this API returns an INVALID_ARGUMENT
error.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateFeed(self, request, context):
"""Creates a feed in a parent project/folder/organization to listen to its
asset updates.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetFeed(self, request, context):
"""Gets details about an asset feed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListFeeds(self, request, context):
"""Lists all asset feeds in a parent project/folder/organization.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateFeed(self, request, context):
"""Updates an asset feed configuration.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DeleteFeed(self, request, context):
"""Deletes an asset feed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_AssetServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
"ExportAssets": grpc.unary_unary_rpc_method_handler(
servicer.ExportAssets,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.ExportAssetsRequest.FromString,
response_serializer=google_dot_longrunning_dot_operations__pb2.Operation.SerializeToString,
),
"BatchGetAssetsHistory": grpc.unary_unary_rpc_method_handler(
servicer.BatchGetAssetsHistory,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.BatchGetAssetsHistoryRequest.FromString,
response_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.BatchGetAssetsHistoryResponse.SerializeToString,
),
"CreateFeed": grpc.unary_unary_rpc_method_handler(
servicer.CreateFeed,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.CreateFeedRequest.FromString,
response_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.Feed.SerializeToString,
),
"GetFeed": grpc.unary_unary_rpc_method_handler(
servicer.GetFeed,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.GetFeedRequest.FromString,
response_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.Feed.SerializeToString,
),
"ListFeeds": grpc.unary_unary_rpc_method_handler(
servicer.ListFeeds,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.ListFeedsRequest.FromString,
response_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.ListFeedsResponse.SerializeToString,
),
"UpdateFeed": grpc.unary_unary_rpc_method_handler(
servicer.UpdateFeed,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.UpdateFeedRequest.FromString,
response_serializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.Feed.SerializeToString,
),
"DeleteFeed": grpc.unary_unary_rpc_method_handler(
servicer.DeleteFeed,
request_deserializer=google_dot_cloud_dot_asset__v1_dot_proto_dot_asset__service__pb2.DeleteFeedRequest.FromString,
response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.cloud.asset.v1.AssetService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
| 51.843373 | 145 | 0.757146 | 5,440 | 0.632117 | 0 | 0 | 0 | 0 | 0 | 0 | 2,028 | 0.23565 |
1b0b98eac31f986426335fe4a1b639f5a2b189be | 52 | py | Python | sat_datetime/__init__.py | junhg0211/sat-datetime | 519700011c8d17e33fd844b53a6e17563bb2abae | [
"MIT"
] | null | null | null | sat_datetime/__init__.py | junhg0211/sat-datetime | 519700011c8d17e33fd844b53a6e17563bb2abae | [
"MIT"
] | null | null | null | sat_datetime/__init__.py | junhg0211/sat-datetime | 519700011c8d17e33fd844b53a6e17563bb2abae | [
"MIT"
] | null | null | null | from .sat_datetime import SatDatetime, SatTimedelta
| 26 | 51 | 0.865385 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1b0be51bf909125818059e14554ee8ddc0df6805 | 357 | py | Python | src/main/python/cobra/mit/_loader.py | tm0nk/cobra-j | daeb85250e63cb21e9c02ecc2acfdf8ba1e44d6b | [
"Apache-2.0"
] | null | null | null | src/main/python/cobra/mit/_loader.py | tm0nk/cobra-j | daeb85250e63cb21e9c02ecc2acfdf8ba1e44d6b | [
"Apache-2.0"
] | null | null | null | src/main/python/cobra/mit/_loader.py | tm0nk/cobra-j | daeb85250e63cb21e9c02ecc2acfdf8ba1e44d6b | [
"Apache-2.0"
] | null | null | null | # Copyright (c) '2015' Cisco Systems, Inc. All Rights Reserved
import importlib
class ClassLoader(object):
@classmethod
def loadClass(cls, fqClassName):
fqClassName = str(fqClassName)
moduleName, className = fqClassName.rsplit('.', 1)
module = importlib.import_module(moduleName)
return getattr(module, className)
| 27.461538 | 62 | 0.694678 | 273 | 0.764706 | 0 | 0 | 242 | 0.677871 | 0 | 0 | 65 | 0.182073 |
1b0c11892ef3aa133d021c6ddd9006dc0cf33060 | 2,367 | py | Python | Knil Dungeon/Dialog.py | WexyR/KNIL | b81c5d4025d1f4e1607b0e948c4611cff0fdbc2e | [
"MIT"
] | null | null | null | Knil Dungeon/Dialog.py | WexyR/KNIL | b81c5d4025d1f4e1607b0e948c4611cff0fdbc2e | [
"MIT"
] | null | null | null | Knil Dungeon/Dialog.py | WexyR/KNIL | b81c5d4025d1f4e1607b0e948c4611cff0fdbc2e | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import Menu
import Game
import Tools
def dialog_box_create(ROWS, COLS, percent_size_rows=0.25):
"""Initialise une boite de dialogue"""
assert isinstance(percent_size_rows, float) and 0 <= percent_size_rows
d = dict()
x = 1
y = int((ROWS-1) * (1-percent_size_rows))+1
d["ylen"] = int((ROWS-1)*percent_size_rows)
d["xlen"] = COLS - 4
d["COLS"] = COLS
d["ROWS"] = ROWS
d["x"], d["y"] = x, y
return d
def run_dialog(d, txt, speaker=''):
"""Lance un texte dans la boite de dialogue, en tenant compte de sa taille (
ajoute des retours à la ligne si elle est trop longue, et des pages si il y a
trop de lignes)"""
ROWS, COLS = d["ROWS"], d["COLS"]
pages = txt.split('\n\n') # Sépare tout d'abord les pages
for page in pages:
resized_txt_lst = [resized_line for line in page.splitlines() for resized_line in resize_line(line, d["xlen"], '\n')]
for t in range(0, len(resized_txt_lst), d["ylen"]-int(bool(speaker))):
text = "".join(resized_txt_lst[t:t+d["ylen"]-int(bool(speaker))])
if speaker:
text = Tools.reformat('<bold><underlined>{0} :</>\n'.format(speaker) + text) # Si l'éméteur est précisé, on l'affiche en haut de chaque page en gras souligné
m = Menu.create([[(text, lambda: None)]], d["x"], d["y"], COLS, d['ylen']+2, text_align="left") # On utilise un menu d'une seule case pour afficher chaque page
Menu.run(m)
def resize_line(line, size, carac='', pile=None):
"""Fonction récursive qui sépare une chaîne de caractère en blocs de taille donnée, en ajoutant
un caractère entre chaque bloc si besoin"""
if pile is None:
pile = [] # Can not put a list as default value for a function
assert isinstance(line, (str, unicode))
assert isinstance(size, int) and size > 3
assert isinstance(pile, list)
if len(line) > size:
line1, space, remainder = line[:size+1].rpartition(' ')
if space:
line1 += carac
pile.append(line1)
line2 = remainder + line[size+1:]
else:
line1 = line[:size-1] + "-" + carac
pile.append(line1)
line2 = "-" + line[size-1:]
resize_line(line2, size, carac, pile)
else:
pile.append(line + carac)
return pile
return pile
| 38.177419 | 173 | 0.606253 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 745 | 0.313025 |
1b0d7458eb7f29bf8431f38d4ac67830ed31e880 | 4,283 | py | Python | auth/oauth.py | sensi277/rebble-auth-py | 298b7b15f0835d8379e0ef132a129940061a9ca1 | [
"MIT"
] | null | null | null | auth/oauth.py | sensi277/rebble-auth-py | 298b7b15f0835d8379e0ef132a129940061a9ca1 | [
"MIT"
] | null | null | null | auth/oauth.py | sensi277/rebble-auth-py | 298b7b15f0835d8379e0ef132a129940061a9ca1 | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
import logging
from flask import Blueprint, abort, render_template, request, redirect
from flask_oauthlib.provider import OAuth2Provider
from oauthlib.common import generate_token as generate_random_token
from flask_login import current_user, login_required
from auth.login.base import demand_pebble
from .models import db, IssuedToken, AuthClient, User
from .redis import client as redis
import json
oauth_bp = Blueprint('oauth_bp', __name__)
oauth = OAuth2Provider()
class Grant:
def __init__(self, client_id, code, user_id, scopes, redirect_uri):
self.client_id = client_id
self.code = code
self.user_id = user_id
self.scopes = scopes
self.redirect_uri = redirect_uri
@property
def user(self):
return User.query.filter_by(id=self.user_id).one()
def delete(self):
redis.delete(self.key)
@property
def key(self):
return self.redis_key(self.client_id, self.code)
def serialise(self):
return json.dumps([self.client_id, self.code, self.user_id, self.scopes, self.redirect_uri]).encode('utf-8')
@classmethod
def deserialise(cls, serialised):
return cls(*json.loads(serialised.decode('utf-8')))
@classmethod
def redis_key(cls, client_id, code):
return f'grant-{client_id}-{code}'
@oauth.grantgetter
def load_grant(client_id, code):
return Grant.deserialise(redis.get(Grant.redis_key(client_id, code)))
@oauth.grantsetter
def set_grant(client_id, code, request, *args, **kwargs):
if not current_user.is_authenticated:
logging.error("Tried to set a grant for a user who is not logged in!?")
return None
grant = Grant(client_id, code['code'], current_user.id, request.scopes, request.redirect_uri)
redis.setex(grant.key, 100, grant.serialise())
return grant
@oauth.tokengetter
def get_token(access_token=None, refresh_token=None):
if access_token:
# There are two valid 'tokens': ones we've issued, and the Pebble token.
# Because we don't actually store the pebble token as an issued token, we have to
# check for it here and invent a token if it's the one we tried to use.
token = IssuedToken.query.filter_by(access_token=access_token).one_or_none()
if token:
return token
user = User.query.filter_by(pebble_token=access_token).one_or_none()
if user:
return IssuedToken(access_token=access_token, refresh_token=None, expires=None, client_id=None, user=user,
scopes=['pebble', 'pebble_token', 'profile'])
elif refresh_token:
return IssuedToken.query.filter_by(refresh_token=refresh_token).one_or_none()
@oauth.tokensetter
def set_token(token, request, *args, **kwargs):
expires_in = token.get('expires_in')
expires = datetime.utcnow() + timedelta(seconds=expires_in)
scopes = token['scope'].split(' ')
token = IssuedToken(access_token=token['access_token'], refresh_token=token['refresh_token'], expires=expires,
client_id=request.client.client_id, user_id=request.user.id,
scopes=scopes)
db.session.add(token)
db.session.commit()
return token
@oauth.clientgetter
def get_client(client_id):
return AuthClient.query.filter_by(client_id=client_id).one()
@oauth_bp.route('/authorise', methods=['GET', 'POST'])
@login_required
@oauth.authorize_handler
def authorise(*args, **kwargs):
return True
@oauth_bp.route('/token', methods=['GET', 'POST'])
@oauth.token_handler
def access_token():
return None
@oauth_bp.route('/error')
def oauth_error():
return render_template('oauth-error.html',
error=request.args.get('error', 'unknown'),
error_description=request.args.get('error_description', '')), 400
def generate_token(request, refresh_token=False):
return generate_random_token()
def init_app(app):
app.config['OAUTH2_PROVIDER_TOKEN_EXPIRES_IN'] = 315576000 # 10 years
app.config['OAUTH2_PROVIDER_ERROR_ENDPOINT'] = 'oauth_bp.oauth_error'
oauth.init_app(app)
app.register_blueprint(oauth_bp, url_prefix='/oauth')
app.extensions['csrf'].exempt(oauth_bp)
| 32.694656 | 118 | 0.698342 | 845 | 0.197292 | 0 | 0 | 2,894 | 0.675695 | 0 | 0 | 636 | 0.148494 |
1b0d87e9e2bba7df5791ad78a281a75b6788eda5 | 10,061 | py | Python | datasets/creator.py | ionicsolutions/kokolores | 179db18d384ce31645bcce3506924ac235723309 | [
"MIT"
] | 1 | 2021-06-15T14:38:45.000Z | 2021-06-15T14:38:45.000Z | datasets/creator.py | ionicsolutions/kokolores | 179db18d384ce31645bcce3506924ac235723309 | [
"MIT"
] | null | null | null | datasets/creator.py | ionicsolutions/kokolores | 179db18d384ce31645bcce3506924ac235723309 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2018 Kilian Kluge
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import bz2
import itertools
import json
import logging
import os
import time
import pymysql
import toolforge
from reverts import RevertDetector
from utils import load_query
# Load SQL query for all manually approved revisions
# based on https://quarry.wmflabs.org/query/27156
MANUALLY_APPROVED = load_query("queries/manually_approved.sql")
# Load SQL query for potentially reverted revisions
# based on https://quarry.wmflabs.org/query/27161
POTENTIALLY_REVERTED = load_query("queries/potentially_reverted.sql")
# Load SQL query for all revisions of a page
# based on https://quarry.wmflabs.org/query/27173
ALL_REVISIONS = load_query("queries/all_revisions.sql")
# Load SQL query to fetch all approved revisions of a page
ALL_APPROVED = load_query("queries/all_approved.sql")
# Load SQL query for a randomized batch of pages with flagged revision data
RANDOM_BATCH = load_query("queries/random_batch.sql")
# Load SQL query to fetch all pages with flagged revision data within
# a certain period of time
TIMEFRAME_BATCH = load_query("queries/timeframe_batch.sql")
class Creator:
"""Creator for Flagged Revision datasets.
Each dataset entry is a 3-tuple `(int, bool, int)` where the
first entry is the `rev_id` of the revision in question,
the second entry indicates whether the revision was manually
approved or rejected, and the third entry is the `rev_id` of
the preceding approved revision. The latter was approved
either manually or automatically.
Dataset creation will work for all wikis where a substantial
amount of revisions have been reviewed through the FlaggedRevs
extension.
"""
START_DEFAULT = 20080507000000 # FlaggedRevs started on May 6th 2008
STOP_DEFAULT = int(time.strftime("%Y%m%d%H%M%S", time.gmtime())) + 1000000
def __init__(self, db="dewiki_p", revert_detector=None):
self.logger = logging.getLogger("kokolores.creator")
self.start = self.START_DEFAULT
self.stop = self.START_DEFAULT
if revert_detector is None:
self.revert_detector = RevertDetector(db)
else:
self.revert_detector = revert_detector
self.conn = toolforge.connect(db,
cursorclass=pymysql.cursors.DictCursor)
def create(self, size=None, start=None, stop=None, batch_size=1000,
fname=None, resume=True, keep_tmp=True):
"""Generate a new dataset.
:param size: Total size of the data set.
:param start: Start of the time period.
:param stop: End of the time period.
:param batch_size: Number of pages to retrieve data for at once.
:param fname: If given, store the dataset in this file (bzipped JSON)
and store a temporary copy after each batch. Useful for
large datasets.
:param resume: If `True`, look for temporary files and resume dataset
creation.
:param keep_tmp: If `True`, do not delete temporary files after
dataset creation is completed.
:type size: int
:type batch_size: int
:type fname: str
:type resume: bool
:type keep_tmp: bool
:return: A dataset with `size` entries.
"""
if start is None:
self.start = self.START_DEFAULT
else:
self.start = start
if stop is None:
self.stop = self.START_DEFAULT
else:
self.stop = stop
if self.start > self.stop:
raise ValueError("Start (%d) has to come before stop (%d)"
% (self.start, self.stop))
if fname is not None and os.path.exists("%s.tmp" % fname) \
and os.path.exists("%s.pages.tmp" % fname) and resume:
dataset = self._load("%s.tmp" % fname)
pages = self._load("%s.pages.tmp" % fname)
else:
dataset = []
pages = []
if size is None:
get_batch = self._get_timeframe_batch
else:
get_batch = self._get_random_batch
for batch in get_batch(batch_size):
self.logger.info("Processing batch of %d pages", len(batch))
batch = [page_id for page_id in batch
if page_id not in pages]
pages.extend(batch)
for page_id in batch:
dataset.extend(self._find_approved(page_id))
dataset.extend(self._find_reverted(page_id))
self.logger.info("Dataset has %d entries now.", len(dataset))
if size is not None:
if len(dataset) >= size:
dataset = dataset[:size]
break
if fname is not None:
self._store("%s.tmp" % fname, dataset)
self._store("%s.pages.tmp" % fname, pages)
if fname is not None:
self._store(fname, dataset)
if not keep_tmp:
os.remove("%s.tmp" % fname)
os.remove("%s.pages.tmp" % fname)
return dataset
def _store(self, fname, data):
self.logger.info("Writing data to file %s", fname)
with bz2.open(fname, "wt") as f:
json.dump(data, f)
def _load(self, fname):
self.logger.info("Loading data from file %s", fname)
with bz2.open(fname, "rt") as f:
return json.load(f)
def _get_random_batch(self, size=1000):
self.logger.info("Get random batch of size %d", size)
while True:
with self.conn.cursor() as cursor:
cursor.execute(RANDOM_BATCH, {"num": size})
self.conn.commit()
batch = [int(item["page_id"]) for item in cursor.fetchall()]
self.logger.info("Prepared batch of %d pages", len(batch))
yield batch
def _get_timeframe_batch(self, size=1000):
self.logger.info("Get all pages in timeframe %d to %d",
self.start, self.stop)
for offset in itertools.count(step=size):
with self.conn.cursor() as cursor:
cursor.execute(TIMEFRAME_BATCH, {"start": self.start,
"stop": self.stop,
"offset": offset,
"number": size})
self.conn.commit()
batch = [int(item["page_id"]) for item in cursor.fetchall()]
if len(batch) < 1:
break
self.logger.info("Prepared timeframe batch of %d pages", len(batch))
yield batch
def _find_approved(self, page_id):
"""Find all manually approved revisions of a page."""
with self.conn.cursor() as cursor:
cursor.execute(MANUALLY_APPROVED, {"page_id": page_id,
"start": self.start,
"stop": self.stop})
self.conn.commit()
for item in cursor.fetchall():
yield (item["rev_id"], True, item["rev_parent"])
def _find_reverted(self, page_id):
with self.conn.cursor() as cursor:
# Find all revisions which were neither auto-approved
# nor manually approved
cursor.execute(POTENTIALLY_REVERTED, {"page_id": page_id,
"start": self.start,
"stop": self.stop})
self.conn.commit()
result = cursor.fetchall()
candidates = [item["rev_id"] for item in result]
if not candidates:
return []
# Retrieve all revisions of the page for revert detection
cursor.execute(ALL_REVISIONS, {"page_id": page_id,
"from_rev": min(candidates),
"start": self.start,
"stop": self.stop})
self.conn.commit()
result = cursor.fetchall()
all_revisions = [item["rev_id"] for item in result]
# Retrieve all approved revisions of the page
cursor.execute(ALL_APPROVED, {"page_id": page_id,
"from_rev": min(candidates),
"start": self.start,
"stop": self.stop})
self.conn.commit()
result = cursor.fetchall()
all_approved_revisions = [item["rev_id"] for item in result]
return self.revert_detector.get_all(all_revisions, candidates,
all_approved_revisions)
def __del__(self):
self.logger.info("Closing database connection")
self.conn.close()
| 40.405622 | 80 | 0.587516 | 7,880 | 0.783222 | 1,707 | 0.169665 | 0 | 0 | 0 | 0 | 4,158 | 0.413279 |
1b0d9662f2b05d2ad83bc0fe148544efb0eb5d9a | 2,492 | py | Python | 04-Working-With-Dataframes/4.Exercise_ Distinct Articles.py | RodriGonca/DP-203-Data-Engineer | ef8e81bd4bda1e285c2e43714368d46be3ad041b | [
"MIT"
] | null | null | null | 04-Working-With-Dataframes/4.Exercise_ Distinct Articles.py | RodriGonca/DP-203-Data-Engineer | ef8e81bd4bda1e285c2e43714368d46be3ad041b | [
"MIT"
] | null | null | null | 04-Working-With-Dataframes/4.Exercise_ Distinct Articles.py | RodriGonca/DP-203-Data-Engineer | ef8e81bd4bda1e285c2e43714368d46be3ad041b | [
"MIT"
] | null | null | null | # Databricks notebook source
# MAGIC %md
# MAGIC # Introduction to DataFrames Lab
# MAGIC ## Distinct Articles
# COMMAND ----------
# MAGIC %md
# MAGIC ## Instructions
# MAGIC
# MAGIC In the cell provided below, write the code necessary to count the number of distinct articles in our data set.
# MAGIC 0. Copy and paste all you like from the previous notebook.
# MAGIC 0. Read in our parquet files.
# MAGIC 0. Apply the necessary transformations.
# MAGIC 0. Assign the count to the variable `totalArticles`
# MAGIC 0. Run the last cell to verify that the data was loaded correctly.
# MAGIC
# MAGIC **Bonus**
# MAGIC
# MAGIC If you recall from the beginning of the previous notebook, the act of reading in our parquet files will trigger a job.
# MAGIC 0. Define a schema that matches the data we are working with.
# MAGIC 0. Update the read operation to use the schema.
# COMMAND ----------
# MAGIC %md
# MAGIC ## Getting Started
# MAGIC
# MAGIC Run the following cell to configure our "classroom."
# COMMAND ----------
# MAGIC %run "./Includes/Classroom-Setup"
# COMMAND ----------
# MAGIC %md
# MAGIC ## Show Your Work
# COMMAND ----------
(source, sasEntity, sasToken) = getAzureDataSource()
spark.conf.set(sasEntity, sasToken)
path = source + "/wikipedia/pagecounts/staging_parquet_en_only_clean/"
# COMMAND ----------
# TODO
# Replace <<FILL_IN>> with your code.
df = (spark # Our SparkSession & Entry Point
.read # Our DataFrameReader
<<FILL_IN>> # Read in the parquet files
<<FILL_IN>> # Reduce the columns to just the one
<<FILL_IN>> # Produce a unique set of values
)
totalArticles = df.<<FILL_IN>> # Identify the total number of records remaining.
print("Distinct Articles: {0:,}".format(totalArticles))
# COMMAND ----------
# MAGIC %md
# MAGIC ## Verify Your Work
# MAGIC Run the following cell to verify that your `DataFrame` was created properly.
# COMMAND ----------
expected = 1783138
assert totalArticles == expected, "Expected the total to be " + str(expected) + " but found " + str(totalArticles)
| 34.611111 | 126 | 0.6874 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,010 | 0.806581 |
1b0dc54f280a6c3892e4a75f5fe210b433cdd5a8 | 12,098 | py | Python | schema_validators.py | iamtemazhe/excel_API | e800aa6b19c56093765a2c155677b5f118a0fc2f | [
"MIT"
] | null | null | null | schema_validators.py | iamtemazhe/excel_API | e800aa6b19c56093765a2c155677b5f118a0fc2f | [
"MIT"
] | null | null | null | schema_validators.py | iamtemazhe/excel_API | e800aa6b19c56093765a2c155677b5f118a0fc2f | [
"MIT"
] | null | null | null | from re import compile as re_compile
from urllib.parse import parse_qs, quote_plus, urlencode, urlparse, urlunparse
from marshmallow import ValidationError
from .base_validators import BaseError, BaseValidator
class ValidError(BaseError):
@classmethod
def field_is_missing(cls, field: str, field_name: str) -> ValidationError:
return cls.required_field_is_empty(field, field_name)
@classmethod
def field_is_empty(cls, field: str, field_name: str,
is_required: bool=False) -> ValidationError:
if is_required:
return cls.required_field_is_empty(field, field_name)
else:
return ValidationError(
f'Поле "{field}" не может быть пустым.',
field_name, field, error_code=cls.get_empty()
)
@classmethod
def field_incorrect_value(cls, field: str, field_name: str,
value = None) -> ValidationError:
postfix = f': "{value}".' if value else '.'
return ValidationError(
f'Поле "{field}" содержит недопустимое значение{postfix}',
field_name, field, error_code=cls.get_incorrect()
)
@classmethod
def id_incorrect_value(cls, field: str, field_name: str,
value) -> ValidationError:
return ValidationError(
(
f'Поле "{field}" содержит недопустимое значение. '
f'Должно быть: число >= 0 в формате int или str. '
f'Имеется: {value}.'
),
field_name, field, value=value, error_code=cls.get_incorrect(True)
)
@classmethod
def obj_not_exist(cls, field: str, field_name: str,
id: int or str) -> ValidationError:
return ValidationError(
f'{field_name} с id="{id}" не существует.',
field_name, field, error_code=cls.get_missing()
)
@classmethod
def obj_data_not_exist(cls, obj_name: str, obj_id,
field_name: str, field: str, field_id) -> ValidationError:
return ValidationError(
(f'{obj_name} = "{obj_id}": '
f'{field_name} с id = "{field_id}" не существует.'),
field_name, field, error_code=cls.get_missing()
)
@classmethod
def required_field_is_empty(cls, field: str,
field_name: str) -> ValidationError:
return ValidationError(
'Обязательный параметр не может быть пустым.',
field_name, field, error_code=cls.get_empty(True)
)
@classmethod
def required_field_is_missing(cls, field: str,
field_name: str) -> ValidationError:
return ValidationError(
'Отсутствует обязательный параметр.',
field_name, field, error_code=cls.get_missing(True)
)
@classmethod
def obj_is_already_exist(cls, field: str, obj_name: str,
value) -> ValidationError:
return ValidationError(
(f'{obj_name} с {field}="{value}" уже существует. '
'Запрещено добавлять дубликаты.'),
obj_name, field
)
class SchemaValidator(BaseValidator):
@classmethod
def is_field_exist(cls, data: dict, field: str, field_name: str) -> object:
"""Проверка обязательного поля КП на существование.
Args:
data (dict): словарь полей КП.
field (str): наименование поля КП в схеме.
field_name (str): наименование поля КП для отображения.
Returns:
object: поле, прошедшее валидацию.
Raises:
ValidError: если параметр пуст.
"""
try:
field_data = data[field]
if field_data is None:
raise ValidError.required_field_is_empty(field, field_name)
if isinstance(field_data, str):
field_data = cls.parse_string(field_data)
if not field_data and not str(field_data).isdigit():
raise ValidError.required_field_is_empty(field, field_name)
except KeyError:
raise ValidError.required_field_is_missing(field, field_name)
else:
data[field] = field_data
return data.get(field)
@classmethod
def str_field(cls, data: dict, field: str, field_name: str,
is_required: bool=False) -> str:
"""Валидация строковых полей параметров.
Заменяет исходную строку на строку, прошедшую валидацию.
Args:
data (dict): словарь поля extra.
field (str): наименование поля параметра в схеме.
field_name (str): наименование поля параметра для отображения.
is_required (bool, optional): признак обязательности параметра
(по умолчанию - False).
Returns:
str: поле, прошедшее валидацию.
Raises:
ValidError: если параметр не является строкой.
"""
try:
field_data = data[field]
if field_data is None:
raise KeyError
field_data = cls.parse_string(field_data)
if not field_data:
raise KeyError
except AttributeError:
raise ValidError.field_incorrect_value(field, field_name,
data.get(field))
except KeyError:
if is_required:
raise ValidError.required_field_is_missing(field, field_name)
else:
data[field] = field_data
return data.get(field)
@classmethod
def url_field(cls, data: dict, field: str, field_name: str,
is_required: bool=False) -> str:
"""Валидация url-поля.
Заменяет исходную строку на строку, прошедшую валидацию.
Args:
data (dict): словарь поля extra.
field (str): наименование поля параметра в схеме.
field_name (str): наименование поля параметра для отображения.
is_required (bool, optional): признак обязательности параметра
(по умолчанию - False).
Returns:
str: поле, прошедшее валидацию.
Raises:
ValidError: если параметр не является валидной ссылкой.
"""
try:
url = data[field]
if url is None:
raise KeyError
url = cls.parse_string(url)
if not url:
raise KeyError
while url[-1] == '/':
url = url[:-1]
if url.count('/') < 2:
raise AttributeError
try:
parsed_url = urlparse(url)
except:
raise AttributeError
else:
if parsed_url.query:
new_url = list(parsed_url)
new_url[4] = urlencode(parse_qs(parsed_url.query),
quote_plus)
url = urlunparse(new_url)
pattern = re_compile((
r'(ftp|https?):\/\/(www\.)?'
r'[^\s\\\/\*\^|&\!\?()\{\}\[\]:;\'"%$\+=`]{1,256}'
r'\.[a-zA-Z0-9-а-яёА-ЯЁ()]{1,10}(:[0-9]{2,6})?(\/.*)?$'
))
if not pattern.search(url):
raise AttributeError
except AttributeError:
raise ValidError.field_incorrect_value(field, field_name,
data.get(field))
except KeyError:
if is_required:
raise ValidError.required_field_is_missing(field, field_name)
else:
data[field] = url
return data.get(field)
@classmethod
def url(cls, url: str) -> str:
return cls.url_field({'url': url}, 'url', 'Ссылка', is_required = True)
@classmethod
def id_field(cls, data: dict, field: str, field_name: str,
is_required: bool=False):
"""Валидация поля, содержащего идентифкатор/-ы.
Args:
data (dict): словарь полей КП.
field (str): наименование поля.
field_name (str): наименование поля для отображения.
is_required (bool, optional): признак обязательности параметра
(по умолчанию - False).
Returns:
any: поле, прошедшее валидацию.
Raises:
ValidError: если параметр не прошел валидацию.
"""
try:
field_data = data[field]
if field_data is None:
raise KeyError
data_type = type(field_data)
if isinstance(field_data, str):
field_data = [int(field_data), ]
elif isinstance(field_data, int):
field_data = [field_data, ]
else:
field_data = set([int(v) for v in field_data])
if not len(field_data):
raise KeyError
for v in field_data:
if v < 0:
raise ValueError
except (ValueError, TypeError):
raise ValidError.id_incorrect_value(field, field_name,
data.get(field))
except KeyError:
if is_required:
raise ValidError.required_field_is_missing(field, field_name)
else:
if data_type in (str, int):
data[field] = data_type(field_data.pop())
else:
data[field] = sorted(data_type(field_data))
return data.get(field)
@classmethod
def class_field(cls, data: dict, field: str, field_name: str,
FieldClass, is_required: bool=False, to_type: bool=False) -> dict:
"""Валидация поля, имеющего собственный класс.
Args:
data (dict): словарь, содержащий поле.
FieldClass (object): класс данных параметра.
field (str): наименование поля параметра в схеме.
field_name (str): наименование поля параметра для отображения.
is_required (bool, optional): признак обязательности параметра
(по умолчанию - False).
to_type (bool, optional): приведение данных к типу исходных данных
(по умолчанию - False).
Returns:
dict: словарь с обновленными данными.
Raises:
ValidError: если параметр содержит недопустимое значение.
"""
try:
field_data = data[field]
if field_data is None:
raise KeyError
if to_type and field_data == '':
data[field] = None
raise KeyError
data_type = type(field_data)
key_type = type(list(FieldClass.ALL.keys())[0])
if data_type in (str, int):
field_data = [key_type(field_data), ]
else:
field_data = set([key_type(v) for v in field_data])
if not len(field_data):
raise KeyError
try:
[FieldClass.ALL[v] for v in field_data]
except KeyError:
raise ValueError
except (ValueError, TypeError):
raise ValidError.field_incorrect_value(field, field_name,
data.get(field))
except KeyError:
if is_required:
raise ValidError.required_field_is_missing(field, field_name)
else:
if data_type in (str, int):
if to_type:
data[field] = field_data.pop()
else:
data[field] = data_type(field_data.pop())
else:
if to_type:
data[field] = sorted(list(field_data))
else:
data[field] = sorted(data_type(field_data))
return data.get(field)
| 33.41989 | 80 | 0.538684 | 13,282 | 0.983852 | 0 | 0 | 13,129 | 0.972519 | 0 | 0 | 4,764 | 0.352889 |
1b0f00c8a4f0b6ea6e4be35b7b0a28aaec35303d | 3,501 | py | Python | _CNDB/_OMP/Net_Credentials.py | CNDB/CNDB | 2e3a41111f604cf2f4f22a7c9370bb3f753e3e88 | [
"BSD-3-Clause"
] | null | null | null | _CNDB/_OMP/Net_Credentials.py | CNDB/CNDB | 2e3a41111f604cf2f4f22a7c9370bb3f753e3e88 | [
"BSD-3-Clause"
] | null | null | null | _CNDB/_OMP/Net_Credentials.py | CNDB/CNDB | 2e3a41111f604cf2f4f22a7c9370bb3f753e3e88 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2012-2014 Mag. Christian Tanzer All rights reserved
# Glasauergasse 32, A--1130 Wien, Austria. tanzer@swing.co.at
# #*** <License> ************************************************************#
# This module is part of the package CNDB.OMP.
#
# This module is licensed under the terms of the BSD 3-Clause License
# <http://www.c-tanzer.at/license/bsd_3c.html>.
# #*** </License> ***********************************************************#
#
#++
# Name
# CNDB.OMP.Net_Credentials
#
# Purpose
# Model credentials for a network interface
#
# Revision Dates
# 14-Mar-2012 (CT) Creation
# 6-Dec-2012 (RS) Add `belongs_to_node`, add `max_links`
# 15-May-2013 (CT) Replace `auto_cache` by `link_ref_attr_name`
# 20-May-2013 (CT) Set `_Net_Credentials_.left.link_ref_suffix` to `None`
# 13-Aug-2013 (CT) Add `key.typ`
# 30-Sep-2013 (CT) Mixin `Belongs_to_Node_Left`, not `Belongs_to_Node`
# 14-Apr-2014 (CT) Add mixin `Belongs_to_Net_Device_Left`
# ««revision-date»»···
#--
from __future__ import absolute_import, division, print_function, unicode_literals
from _MOM.import_MOM import *
from _MOM.import_MOM import _A_String_Ascii_
from _CNDB import CNDB
import _CNDB._OMP
import _CNDB._OMP.Net_Interface
import _CNDB._OMP.Belongs_to_Net_Device
import _CNDB._OMP.Belongs_to_Node
from _TFL.Regexp import Regexp, re
_Ancestor_Essence = CNDB.OMP.Link1
_Mixin_1 = CNDB.OMP.Belongs_to_Node_Left
_Mixin_2 = CNDB.OMP.Belongs_to_Net_Device_Left
class _Net_Credentials_ (_Mixin_1, _Mixin_2, _Ancestor_Essence) :
"""Model credentials used by a Net_Interface, e.g., `802.1x`
authentication for a wired interface, or WPA authentication for a WiFi
interface.
"""
is_partial = True
class _Attributes \
( _Mixin_1._Attributes
, _Mixin_2._Attributes
, _Ancestor_Essence._Attributes
) :
_Ancestor = _Ancestor_Essence._Attributes
### Primary attributes
class left (_Ancestor.left) :
"""The network interface using these credentials."""
role_type = CNDB.OMP.Net_Interface
role_name = "interface"
link_ref_attr_name = "credentials"
link_ref_suffix = None
max_links = 1
# end class left
### *** BEWARE ***
### To ensure that a `Net_Interface` has only one `credentials`, no
### other essential primary key attributes must be defined here or by
### derived classes
# end class _Attributes
# end class _Net_Credentials_
_Ancestor_Essence = _Net_Credentials_
class WPA_Credentials (_Ancestor_Essence) :
"""Model credentials necessary for WPA authentication."""
class _Attributes (_Ancestor_Essence._Attributes) :
_Ancestor = _Ancestor_Essence._Attributes
class key (Eval_Mixin, _A_String_Ascii_) :
"""Key used for WPA authentication."""
kind = Attr.Required
max_length = 32
typ = "Key"
### allow characters up to "\xFF"
_cooked_re = Regexp \
( "^[\x00-\xFF]*$"
, re.VERBOSE
)
# end class key
# end class _Attributes
# end class WPA2
if __name__ != "__main__" :
CNDB.OMP._Export ("*")
### __END__ CNDB.OMP.Net_Credentials
| 30.443478 | 85 | 0.608683 | 1,757 | 0.500855 | 0 | 0 | 0 | 0 | 0 | 0 | 1,773 | 0.505416 |
1b1163b5c1c6d08f8b1ea7b7d3e6b064f710268e | 1,283 | py | Python | 52_最长不含重复字符的子字符串.py | wode1/-offer-python- | 22cb55b7a89f48c7d3686f0a7a822c16ff3976f1 | [
"Apache-2.0"
] | null | null | null | 52_最长不含重复字符的子字符串.py | wode1/-offer-python- | 22cb55b7a89f48c7d3686f0a7a822c16ff3976f1 | [
"Apache-2.0"
] | null | null | null | 52_最长不含重复字符的子字符串.py | wode1/-offer-python- | 22cb55b7a89f48c7d3686f0a7a822c16ff3976f1 | [
"Apache-2.0"
] | null | null | null | '''
从字符串中找出一个最长的不包含重复字符的子字符串,计算该最长子字符串的长度。假设字符串中只包含'a'~'z的字符。例如,在字
符串"arabcacfr"中,最长的不含重复字符的子字符串是"acfr",长度为4。
'''
class Solution:
def lengthOfLongestSubstring(self, s):
if len(s) <= 1:
return len(s)
head, tail = 0, 0
maxLen = 1
while tail+1 < len(s):
tail += 1 # 往窗口内添加元素
if s[tail] not in s[head: tail]: # 窗口内没有重复的字符,实时更新窗口最大长度
maxLen = max(maxLen, tail - head + 1)
else: # 窗口内有重复字符,移动head直至窗口内不再含重复的元素
while s[tail] in s[head: tail]:
head += 1
return maxLen
def lengthOfLongestSubdtring1(self, s):
if len(s)<=1:
return len(s)
head, tail=0, 0
maxLen=1
while tail+1<len(s):
tail+=1 # 王窗口内添加元素
if s[tail] not in s[head:tail]:# 如果窗口内没有重复的字符,实时更新窗口的元素
maxLen=max(maxLen, tail-head+1)
else: # 窗口内有重复字符,移动head直至窗口内不再含重复的元素
while s[tail] in s[head:tail]:
head+=1 # 当有重复的数字时首指针会一直相加
return maxLen
def lengthOfLongestSubdtring2(self, s):
if len(s)<=1:
return len(s)
head, tail=0, 0 # 双指针
maxLen=1
while tail+1<len(s):
tail+=1
if s[tail] not in s[head:tail]:
maxLen=max(maxLen, tail-head+1)
else:
while s[tail] in s[head:tail]:
head+=1
return maxLen
s=Solution()
print(s.lengthOfLongestSubdtring2("yyabcdabjcabceg")) # cdabj
| 12.578431 | 62 | 0.631333 | 1,292 | 0.763142 | 0 | 0 | 0 | 0 | 0 | 0 | 697 | 0.411695 |
1b12e4960541d556c4b0e852acd3330b2c80755e | 541 | py | Python | capybara_tw/__init__.py | capybara-translation/capybara-tw | 5b588b6a29c17f33a94b7fe1a368fc92ad383953 | [
"MIT"
] | null | null | null | capybara_tw/__init__.py | capybara-translation/capybara-tw | 5b588b6a29c17f33a94b7fe1a368fc92ad383953 | [
"MIT"
] | null | null | null | capybara_tw/__init__.py | capybara-translation/capybara-tw | 5b588b6a29c17f33a94b7fe1a368fc92ad383953 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import sys
from PyQt5.QtWidgets import QApplication
from capybara_tw.app import MainWindow
__version__ = '0.1'
__application_name__ = 'Capybara Translation Workbench'
__organization_name__ = 'Capybara Translation'
def run():
app_ = QApplication(sys.argv)
app_.setApplicationName(__application_name__)
app_.setOrganizationName(__organization_name__)
app_.setApplicationVersion(__version__)
window = MainWindow()
window.show()
sys.exit(app_.exec_())
if __name__ == '__main__':
run()
| 23.521739 | 55 | 0.757856 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 91 | 0.168207 |
1b1460a957da9eb21eaa0d16c51c3543620bbed5 | 5,394 | py | Python | app/car/tests/test_cars_api.py | raszidzie/cars-api | f212d59722fe37f3b2fc7f7f9d064c2a614a9d20 | [
"MIT"
] | 1 | 2021-02-05T07:10:55.000Z | 2021-02-05T07:10:55.000Z | app/car/tests/test_cars_api.py | raszidzie/cars-api | f212d59722fe37f3b2fc7f7f9d064c2a614a9d20 | [
"MIT"
] | null | null | null | app/car/tests/test_cars_api.py | raszidzie/cars-api | f212d59722fe37f3b2fc7f7f9d064c2a614a9d20 | [
"MIT"
] | null | null | null | import requests
from django.urls import reverse
from django.test import TestCase
from django.db.models import Count
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.test import APIClient
from core.models import Car, Rate
from car.serializers import CarSerializer, PopularCarSerializer
CAR_URL = reverse('car:cars-list')
POPULAR_CAR_URL = reverse('car:popular-list')
CAR_MAKE_EXTERNAL_API = 'https://vpic.nhtsa.dot.gov/api/vehicles/getallmakes?format=json'
CAR_MODEL_EXTERNAL_API = 'https://vpic.nhtsa.dot.gov/api/vehicles/getmodelsformake/{}?format=json'
def sample_car(**params):
"""Create and return a sample car"""
defaults = {
'make_name': 'HONDA',
'model_name': 'Accord',
}
defaults.update(params)
return Car.objects.create(**defaults)
class PublicCarApiTests(TestCase):
"""Test the publicly available cars API"""
def setUp(self):
self.client = APIClient()
def test_retrieve_car_list(self):
"""Test retriving a list of cars"""
sample_car()
sample_car()
res = self.client.get(CAR_URL)
cars = Car.objects.all().order_by('-make_name')
serializer = CarSerializer(cars, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_create_car_successful(self):
"""Test a new car creation was successful by checking External API"""
payload = {
'make_name': 'ASTON MARTIN',
'model_name': 'V8 Vantage',
}
self.client.post(CAR_URL, payload)
exists = Car.objects.filter(
make_name=payload['make_name'],
model_name=payload['model_name']
).exists()
self.assertTrue(exists)
def test_create_car_with_lowercase(self):
"""Test a new car creation with lowercase"""
payload = {
'make_name': 'aston martin',
'model_name': 'V8 Vantage',
}
res = self.client.post(CAR_URL, payload)
self.assertEqual(res.status_code, status.HTTP_201_CREATED)
def test_create_car_invalid(self):
"""Test a new car creation failed"""
payload = {'model_name': ''}
res = self.client.post(CAR_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_car_average_rate_value(self):
"""Test average rate value for particular car"""
car = sample_car()
Rate.objects.create(car=car, rate=3)
Rate.objects.create(car=car, rate=5)
res = self.client.get(CAR_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data[0]['rating'], 4)
def test_car_default_rate_value(self):
"""Test default rate value for particular car"""
sample_car()
res = self.client.get(CAR_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data[0]['rating'], 0)
def test_retrieve_popular_cars(self):
"""Test retrieve popular cars based on number of rates"""
car1 = sample_car(make_name="BMW", model_name="M4")
car2 = sample_car(make_name="Mercedes", model_name="Benz")
Rate.objects.create(car=car1, rate=4)
Rate.objects.create(car=car1, rate=2)
Rate.objects.create(car=car1, rate=2)
Rate.objects.create(car=car2, rate=3)
Rate.objects.create(car=car2, rate=5)
res = self.client.get(POPULAR_CAR_URL)
ordered_queryset = Rate.objects.filter(car_id=car1.id).values('car').annotate(total_rates=Count('car'))
popular_cars = [get_object_or_404(Car, id=item['car']) for item in ordered_queryset]
serializer = PopularCarSerializer(popular_cars, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data[0], serializer.data[0])
class ExternalCarApiTests(TestCase):
""" Test for External Vehicle API"""
def setUp(self):
self.client = APIClient()
def test_car_in_vehicles(self):
"""Test requested car exists inside the external API data"""
payload = {
'make_name': 'ASTON MARTIN',
'model_name': 'V8 Vantage',
}
car_makes_res = requests.get(CAR_MAKE_EXTERNAL_API).json()
car_make = next(item for item in car_makes_res['Results'] if item["Make_Name"] == payload['make_name'])
if car_make:
car_models_res = requests.get(CAR_MODEL_EXTERNAL_API.format(car_make['Make_Name'])).json()
car_model = next(item for item in car_models_res['Results']
if item["Model_Name"] == payload['model_name'])
self.assertIn(car_model['Make_Name'], payload['make_name'])
def test_car_not_in_vehicles(self):
"""Test requested car DO NOT exists inside the external API data"""
payload = {
'make_name': 'Test Make',
'model_name': 'Test Model',
}
car_makes_res = requests.get(CAR_MAKE_EXTERNAL_API).json()
try:
car_make = next(item for item in car_makes_res['Results']
if item["Make_Name"] == payload['make_name'])
except StopIteration:
return None
self.assertEqual(car_make, None) | 33.503106 | 111 | 0.644049 | 4,552 | 0.843901 | 0 | 0 | 0 | 0 | 0 | 0 | 1,213 | 0.224879 |
1b15ea98329556d9deeac5fd4430a13ee99a5e45 | 833 | py | Python | setup.py | 84KaliPleXon3/bni-api | 402b33b04b99481d9c2c152a19f1db14f8562826 | [
"MIT"
] | 9 | 2019-06-28T14:11:48.000Z | 2022-03-15T01:10:52.000Z | setup.py | p4kl0nc4t/bni-api | 402b33b04b99481d9c2c152a19f1db14f8562826 | [
"MIT"
] | 39 | 2019-06-28T15:41:39.000Z | 2020-05-17T20:37:46.000Z | setup.py | 84KaliPleXon3/bni-api | 402b33b04b99481d9c2c152a19f1db14f8562826 | [
"MIT"
] | 3 | 2019-07-04T14:37:02.000Z | 2019-10-02T13:39:20.000Z | import setuptools
import re
with open('README.md', 'r') as fh:
long_description = fh.read()
def get_version():
with open('bni_api/__init__.py') as f:
v = re.findall(r'__version__ = \'(.+?)\'', f.read())[0]
return v
setuptools.setup(
name="bni_api",
version=get_version(),
author="loncat",
author_email="me@lcat.dev",
description=
"A Python wrapper for some of BNI's internet banking functionalities.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/p4kl0nc4t/bni_api",
packages=setuptools.find_packages(),
install_requires=['requests', 'requests_html'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: MIT License",
"Operating System :: OS Independent",
],
) | 28.724138 | 75 | 0.654262 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 336 | 0.403361 |
1b1607f0ebb0c870703e285c64a547163919d409 | 1,184 | py | Python | example/controller/tests/testing/foo.py | donghak-shin/dp-tornado | 095bb293661af35cce5f917d8a2228d273489496 | [
"MIT"
] | 18 | 2015-04-07T14:28:39.000Z | 2020-02-08T14:03:38.000Z | example/controller/tests/testing/foo.py | donghak-shin/dp-tornado | 095bb293661af35cce5f917d8a2228d273489496 | [
"MIT"
] | 7 | 2016-10-05T05:14:06.000Z | 2021-05-20T02:07:22.000Z | example/controller/tests/testing/foo.py | donghak-shin/dp-tornado | 095bb293661af35cce5f917d8a2228d273489496 | [
"MIT"
] | 11 | 2015-12-15T09:49:39.000Z | 2021-09-06T18:38:21.000Z | # -*- coding: utf-8 -*-
from dp_tornado.engine.controller import Controller
class FooController(Controller):
def post(self):
"""
.. test::
expect(text='foo==bar', params={'foo': 'bar'})
expect(code=400, params={'foo': 'foo'})
expect(
code=200,
text='done')
!expect(code=400, text='foo==bar', params={'foo': 'baz'})
"""
if self.get_argument('foo') == 'bar':
return self.finish('foo==bar')
elif self.get_argument('foo') == 'foo':
return self.finish_with_error(400)
self.finish('done')
def get(self, a, b):
"""
.. test::
expect(text='30', args=(10, 20), params={'foo': 'bar'})
"""
a = int(a)
b = int(b)
self.finish(str(a + b))
def put(self, a, b):
"""
.. test::
expect(code=200, args=(10, 20))
expect(json={'a': 10, 'b': 20}, args=(10, 20))
"""
a = int(a)
b = int(b)
self.finish({
'a': a,
'b': b
})
| 23.215686 | 73 | 0.407095 | 1,103 | 0.931588 | 0 | 0 | 0 | 0 | 0 | 0 | 639 | 0.539696 |
1b16d8170d438512d59dc6d2288c379436227f3d | 9,668 | py | Python | pytorch3d/implicitron/dataset/dataset_zoo.py | JulianKnodt/pytorch3d | d28a50e70aae62355e6e52908d3172c5e9175ba8 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | pytorch3d/implicitron/dataset/dataset_zoo.py | JulianKnodt/pytorch3d | d28a50e70aae62355e6e52908d3172c5e9175ba8 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | pytorch3d/implicitron/dataset/dataset_zoo.py | JulianKnodt/pytorch3d | d28a50e70aae62355e6e52908d3172c5e9175ba8 | [
"MIT",
"BSD-3-Clause"
] | null | null | null | # Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import copy
import json
import os
from typing import Any, Dict, List, Optional, Sequence
from iopath.common.file_io import PathManager
from .implicitron_dataset import ImplicitronDataset, ImplicitronDatasetBase
from .utils import (
DATASET_TYPE_KNOWN,
DATASET_TYPE_TEST,
DATASET_TYPE_TRAIN,
DATASET_TYPE_UNKNOWN,
)
# TODO from dataset.dataset_configs import DATASET_CONFIGS
DATASET_CONFIGS: Dict[str, Dict[str, Any]] = {
"default": {
"box_crop": True,
"box_crop_context": 0.3,
"image_width": 800,
"image_height": 800,
"remove_empty_masks": True,
}
}
# fmt: off
CO3D_CATEGORIES: List[str] = list(reversed([
"baseballbat", "banana", "bicycle", "microwave", "tv",
"cellphone", "toilet", "hairdryer", "couch", "kite", "pizza",
"umbrella", "wineglass", "laptop",
"hotdog", "stopsign", "frisbee", "baseballglove",
"cup", "parkingmeter", "backpack", "toyplane", "toybus",
"handbag", "chair", "keyboard", "car", "motorcycle",
"carrot", "bottle", "sandwich", "remote", "bowl", "skateboard",
"toaster", "mouse", "toytrain", "book", "toytruck",
"orange", "broccoli", "plant", "teddybear",
"suitcase", "bench", "ball", "cake",
"vase", "hydrant", "apple", "donut",
]))
# fmt: on
_CO3D_DATASET_ROOT: str = os.getenv("CO3D_DATASET_ROOT", "")
def dataset_zoo(
dataset_name: str = "co3d_singlesequence",
dataset_root: str = _CO3D_DATASET_ROOT,
category: str = "DEFAULT",
limit_to: int = -1,
limit_sequences_to: int = -1,
n_frames_per_sequence: int = -1,
test_on_train: bool = False,
load_point_clouds: bool = False,
mask_images: bool = False,
mask_depths: bool = False,
restrict_sequence_name: Sequence[str] = (),
test_restrict_sequence_id: int = -1,
assert_single_seq: bool = False,
only_test_set: bool = False,
aux_dataset_kwargs: dict = DATASET_CONFIGS["default"],
path_manager: Optional[PathManager] = None,
) -> Dict[str, ImplicitronDatasetBase]:
"""
Generates the training / validation and testing dataset objects.
Args:
dataset_name: The name of the returned dataset.
dataset_root: The root folder of the dataset.
category: The object category of the dataset.
limit_to: Limit the dataset to the first #limit_to frames.
limit_sequences_to: Limit the dataset to the first
#limit_sequences_to sequences.
n_frames_per_sequence: Randomly sample #n_frames_per_sequence frames
in each sequence.
test_on_train: Construct validation and test datasets from
the training subset.
load_point_clouds: Enable returning scene point clouds from the dataset.
mask_images: Mask the loaded images with segmentation masks.
mask_depths: Mask the loaded depths with segmentation masks.
restrict_sequence_name: Restrict the dataset sequences to the ones
present in the given list of names.
test_restrict_sequence_id: The ID of the loaded sequence.
Active for dataset_name='co3d_singlesequence'.
assert_single_seq: Assert that only frames from a single sequence
are present in all generated datasets.
only_test_set: Load only the test set.
aux_dataset_kwargs: Specifies additional arguments to the
ImplicitronDataset constructor call.
Returns:
datasets: A dictionary containing the
`"dataset_subset_name": torch_dataset_object` key, value pairs.
"""
restrict_sequence_name = tuple(restrict_sequence_name)
aux_dataset_kwargs = dict(aux_dataset_kwargs)
datasets = {}
# TODO:
# - implement loading multiple categories
if dataset_name in ["co3d_singlesequence", "co3d_multisequence"]:
# This maps the common names of the dataset subsets ("train"/"val"/"test")
# to the names of the subsets in the CO3D dataset.
set_names_mapping = _get_co3d_set_names_mapping(
dataset_name,
test_on_train,
only_test_set,
)
# load the evaluation batches
task = dataset_name.split("_")[-1]
batch_indices_path = os.path.join(
dataset_root,
category,
f"eval_batches_{task}.json",
)
if not os.path.isfile(batch_indices_path):
# The batch indices file does not exist.
# Most probably the user has not specified the root folder.
raise ValueError("Please specify a correct dataset_root folder.")
with open(batch_indices_path, "r") as f:
eval_batch_index = json.load(f)
if task == "singlesequence":
assert (
test_restrict_sequence_id is not None and test_restrict_sequence_id >= 0
), (
"Please specify an integer id 'test_restrict_sequence_id'"
+ " of the sequence considered for 'singlesequence'"
+ " training and evaluation."
)
assert len(restrict_sequence_name) == 0, (
"For the 'singlesequence' task, the restrict_sequence_name has"
" to be unset while test_restrict_sequence_id has to be set to an"
" integer defining the order of the evaluation sequence."
)
# a sort-stable set() equivalent:
eval_batches_sequence_names = list(
{b[0][0]: None for b in eval_batch_index}.keys()
)
eval_sequence_name = eval_batches_sequence_names[test_restrict_sequence_id]
eval_batch_index = [
b for b in eval_batch_index if b[0][0] == eval_sequence_name
]
# overwrite the restrict_sequence_name
restrict_sequence_name = [eval_sequence_name]
for dataset, subsets in set_names_mapping.items():
frame_file = os.path.join(dataset_root, category, "frame_annotations.jgz")
assert os.path.isfile(frame_file)
sequence_file = os.path.join(
dataset_root, category, "sequence_annotations.jgz"
)
assert os.path.isfile(sequence_file)
subset_lists_file = os.path.join(dataset_root, category, "set_lists.json")
assert os.path.isfile(subset_lists_file)
# TODO: maybe directly in param list
params = {
**copy.deepcopy(aux_dataset_kwargs),
"frame_annotations_file": frame_file,
"sequence_annotations_file": sequence_file,
"subset_lists_file": subset_lists_file,
"dataset_root": dataset_root,
"limit_to": limit_to,
"limit_sequences_to": limit_sequences_to,
"n_frames_per_sequence": n_frames_per_sequence
if dataset == "train"
else -1,
"subsets": subsets,
"load_point_clouds": load_point_clouds,
"mask_images": mask_images,
"mask_depths": mask_depths,
"pick_sequence": restrict_sequence_name,
"path_manager": path_manager,
}
datasets[dataset] = ImplicitronDataset(**params)
if dataset == "test":
if len(restrict_sequence_name) > 0:
eval_batch_index = [
b for b in eval_batch_index if b[0][0] in restrict_sequence_name
]
datasets[dataset].eval_batches = datasets[
dataset
].seq_frame_index_to_dataset_index(eval_batch_index)
if assert_single_seq:
# check theres only one sequence in all datasets
assert (
len(
{
e["frame_annotation"].sequence_name
for dset in datasets.values()
for e in dset.frame_annots
}
)
<= 1
), "Multiple sequences loaded but expected one"
else:
raise ValueError(f"Unsupported dataset: {dataset_name}")
if test_on_train:
datasets["val"] = datasets["train"]
datasets["test"] = datasets["train"]
return datasets
def _get_co3d_set_names_mapping(
dataset_name: str,
test_on_train: bool,
only_test: bool,
) -> Dict[str, List[str]]:
"""
Returns the mapping of the common dataset subset names ("train"/"val"/"test")
to the names of the corresponding subsets in the CO3D dataset
("test_known"/"test_unseen"/"train_known"/"train_unseen").
"""
single_seq = dataset_name == "co3d_singlesequence"
if only_test:
set_names_mapping = {}
else:
set_names_mapping = {
"train": [
(DATASET_TYPE_TEST if single_seq else DATASET_TYPE_TRAIN)
+ "_"
+ DATASET_TYPE_KNOWN
]
}
if not test_on_train:
prefixes = [DATASET_TYPE_TEST]
if not single_seq:
prefixes.append(DATASET_TYPE_TRAIN)
set_names_mapping.update(
{
dset: [
p + "_" + t
for p in prefixes
for t in [DATASET_TYPE_KNOWN, DATASET_TYPE_UNKNOWN]
]
for dset in ["val", "test"]
}
)
return set_names_mapping
| 36.760456 | 88 | 0.608399 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,992 | 0.412909 |
1b1717b44e9148ae97616955dad12e1104067a0e | 1,147 | py | Python | sh/scripts/view_faucet_account_keys.py | goral09/stests | 4de26485535cadf1b708188a7133a976536ccba3 | [
"Apache-2.0"
] | 4 | 2020-03-10T15:28:17.000Z | 2021-10-02T11:41:17.000Z | sh/scripts/view_faucet_account_keys.py | goral09/stests | 4de26485535cadf1b708188a7133a976536ccba3 | [
"Apache-2.0"
] | 1 | 2020-03-25T11:31:44.000Z | 2020-03-25T11:31:44.000Z | sh/scripts/view_faucet_account_keys.py | goral09/stests | 4de26485535cadf1b708188a7133a976536ccba3 | [
"Apache-2.0"
] | 9 | 2020-02-25T18:43:42.000Z | 2021-08-10T17:08:42.000Z | import argparse
from stests.core import cache
from stests.core import factory
from stests.core.utils import args_validator
from stests.core.utils import cli as utils
from stests.core.utils import env
from arg_utils import get_network
# CLI argument parser.
ARGS = argparse.ArgumentParser("Displays a keys asssociated with a network's faucet account.")
# CLI argument: network name.
ARGS.add_argument(
"--net",
default=env.get_network_name(),
dest="network",
help="Network name {type}{id}, e.g. nctl1.",
type=args_validator.validate_network,
)
def main(args):
"""Entry point.
:param args: Parsed CLI arguments.
"""
network = get_network(args)
utils.log(f"NETWORK: {network.name} -> faucet account-key = {network.faucet.account_key}")
utils.log(f"NETWORK: {network.name} -> faucet account-hash = {network.faucet.account_hash}")
utils.log(f"NETWORK: {network.name} -> faucet private-key = {network.faucet.private_key}")
utils.log(f"NETWORK: {network.name} -> faucet public-key = {network.faucet.public_key}")
# Entry point.
if __name__ == '__main__':
main(ARGS.parse_args())
| 27.97561 | 96 | 0.710549 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 575 | 0.501308 |
1b1778c3fb652ba615f85e01a3d409cb27dcc56d | 6,195 | py | Python | tvae/experiments/tvae_norb.py | anoniccv2021/Predictive-Coding-Topographic-VAE | 8a05c706dd9bce1399ac02440c1e50dd287c239e | [
"MIT"
] | null | null | null | tvae/experiments/tvae_norb.py | anoniccv2021/Predictive-Coding-Topographic-VAE | 8a05c706dd9bce1399ac02440c1e50dd287c239e | [
"MIT"
] | null | null | null | tvae/experiments/tvae_norb.py | anoniccv2021/Predictive-Coding-Topographic-VAE | 8a05c706dd9bce1399ac02440c1e50dd287c239e | [
"MIT"
] | null | null | null | import os
import wandb
from tqdm import tqdm
import torch
from torch import optim
from torch.optim.lr_scheduler import StepLR
from torch import nn
from torch.nn import functional as F
from tvae.data.small_norb import get_dataloader
from tvae.containers.tvae import TVAE
from tvae.models.mlp import MLP_Encoder_w_WN, MLP_Decoder_w_WN
from tvae.containers.encoder import Gaussian_Encoder
from tvae.containers.decoder import Bernoulli_Decoder
from tvae.containers.grouper import Chi_Squared_Capsules_from_Gaussian_1d, NonTopographic_Capsules1d
from tvae.utils.logging import configure_logging, get_dirs
from tvae.utils.train_loops import train_epoch, eval_epoch
def create_model(n_caps, cap_dim, mu_init, n_transforms, group_kernel, n_off_diag):
s_dim = n_caps * cap_dim
z_encoder = Gaussian_Encoder(MLP_Encoder_w_WN(s_dim=s_dim, n_cin=1, n_hw=48),
loc=0.0, scale=1.0)
u_encoder = Gaussian_Encoder(MLP_Encoder_w_WN(s_dim=s_dim, n_cin=1, n_hw=48),
loc=0.0, scale=1.0)
decoder = Bernoulli_Decoder(MLP_Decoder_w_WN(s_dim=s_dim, n_cout=1, n_hw=48))
grouper = Chi_Squared_Capsules_from_Gaussian_1d(
nn.ConvTranspose3d(in_channels=1, out_channels=1,
kernel_size=group_kernel,
padding=(2*(group_kernel[0] // 2),
2*(group_kernel[1] // 2),
2*(group_kernel[2] // 2)),
stride=(1,1,1), padding_mode='zeros', bias=False),
lambda x: F.pad(x, (group_kernel[2] // 2, group_kernel[2] // 2,
group_kernel[1] // 2, group_kernel[1] // 2,
group_kernel[0] // 2, group_kernel[0] // 2),
mode='circular'),
n_caps=n_caps, cap_dim=cap_dim, n_transforms=n_transforms,
mu_init=mu_init, n_off_diag=n_off_diag)
return TVAE(z_encoder, u_encoder, decoder, grouper)
def main():
config = {
'wandb_on': True,
'lr': 1e-4,
'momentum': 0.9,
'batch_size': 8,
'max_epochs': 10000,
'eval_epochs': 5,
'dataset': 'Small Norb',
'seed': 1,
'n_caps': 144,
'cap_dim': 18,
'n_transforms': 18,
'max_n_objs': 25,
'mu_init': 30.0,
'n_off_diag': 1,
'group_kernel': (13, 13, 1),
'train_eq_loss': False,
'n_is_samples': 10
}
name = 'TVAE_SmallNorb_1e-4_13x13-mu30_w_WN_144caps_25objs'
config['savedir'], config['data_dir'], config['wandb_dir'] = get_dirs()
# config['data_dir'] = '/home/akeller/repo/OMR/data/small_norb/'
config['data_dir'] = '/var/scratch/takeller/small_norb/'
savepath = os.path.join(config['savedir'], name)
train_loader, test_loader = get_dataloader(dir=config['data_dir'],
max_n_objs=config['max_n_objs'],
batch_size=config['batch_size'])
model = create_model(n_caps=config['n_caps'], cap_dim=config['cap_dim'], mu_init=config['mu_init'],
n_transforms=config['n_transforms'], group_kernel=config['group_kernel'], n_off_diag=config['n_off_diag'])
model.to('cuda')
log, checkpoint_path = configure_logging(config, name, model)
checkpoint_path = os.path.join(wandb.run.dir, 'checkpoint.tar')
# model.load_state_dict(torch.load(checkpoint_path))
optimizer = optim.SGD(model.parameters(),
lr=config['lr'],
momentum=config['momentum'])
scheduler = StepLR(optimizer, step_size=1, gamma=1.0)
for e in range(config['max_epochs']):
log('Epoch', e)
total_loss, total_neg_logpx_z, total_kl, total_eq_loss, num_batches = train_epoch(model, optimizer,
train_loader, log,
savepath, e, eval_batches=3000,
plot_weights=False,
plot_fullcaptrav=False,
wandb_on=config['wandb_on'])
log("Epoch Avg Loss", total_loss / num_batches)
log("Epoch Avg -LogP(x|z)", total_neg_logpx_z / num_batches)
log("Epoch Avg KL", total_kl / num_batches)
log("Epoch Avg EQ Loss", total_eq_loss / num_batches)
scheduler.step()
torch.save(model.state_dict(), checkpoint_path)
if e % config['eval_epochs'] == 0:
total_loss, total_neg_logpx_z, total_kl, total_is_estimate, total_eq_loss, num_batches = eval_epoch(model, test_loader, log, savepath, e,
n_is_samples=config['n_is_samples'],
plot_maxact=False,
plot_class_selectivity=False,
plot_cov=False,
wandb_on=config['wandb_on'],
plot_fullcaptrav=True)
log("Val Avg Loss", total_loss / num_batches)
log("Val Avg -LogP(x|z)", total_neg_logpx_z / num_batches)
log("Val Avg KL", total_kl / num_batches)
log("Val IS Estiamte", total_is_estimate / num_batches)
log("Val EQ Loss", total_eq_loss / num_batches)
if __name__ == '__main__':
main()
| 48.398438 | 150 | 0.510734 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 823 | 0.132849 |
1b18107e731c97c78274e71e34a4e53422066da2 | 54 | py | Python | tests/mac-logs/examples/tag/tag.py | andrewp-as-is/mac-logs.py | 13f39b9541e775ef886fc3501dd04b2d06f4aa04 | [
"Unlicense"
] | 1 | 2019-01-14T14:34:59.000Z | 2019-01-14T14:34:59.000Z | tests/mac-logs/examples/tag/tag.py | looking-for-a-job/mac-logs.py | 13f39b9541e775ef886fc3501dd04b2d06f4aa04 | [
"Unlicense"
] | null | null | null | tests/mac-logs/examples/tag/tag.py | looking-for-a-job/mac-logs.py | 13f39b9541e775ef886fc3501dd04b2d06f4aa04 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python
import mac_logs
mac_logs.tag()
| 10.8 | 21 | 0.740741 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 0.388889 |
1b182cfc73f0ae464fec05f9d3d9dbf900f37a69 | 1,401 | py | Python | ann.py | ErikRichardS/text-generator | 057990ce790e45f3f32d4f7b789b205c60156418 | [
"MIT"
] | null | null | null | ann.py | ErikRichardS/text-generator | 057990ce790e45f3f32d4f7b789b205c60156418 | [
"MIT"
] | null | null | null | ann.py | ErikRichardS/text-generator | 057990ce790e45f3f32d4f7b789b205c60156418 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torch.nn.functional as F
import math
class RNN(nn.Module):
def __init__(self, num_layers, hidden_size, input_size):
super(RNN, self).__init__()
self.num_layers = num_layers
self.hidden_size = hidden_size
self.encoder = nn.Embedding(input_size, self.hidden_size, padding_idx=0)
self.lstm = nn.LSTM(input_size=self.hidden_size,
hidden_size=self.hidden_size,
num_layers=self.num_layers,
batch_first=False)
self.decoder = nn.Sequential(
nn.Dropout(p = 0.2),
nn.Linear(self.hidden_size, input_size)
#nn.Softmax(dim=2)
)
self.cuda()
def forward(self, x, h):
x = self.encoder(x)
x, h = self.lstm(x, h)
#print(x.shape)
out = self.decoder(x)
return out, h
def init_state(self, sequence_length=32, cuda=True):
# batch_size x hidden_size
if cuda:
return (torch.zeros(self.num_layers, sequence_length, self.hidden_size).cuda(),
torch.zeros(self.num_layers, sequence_length, self.hidden_size).cuda())
return (torch.zeros(self.num_layers, 1, self.hidden_size),
torch.zeros(self.num_layers, 1, self.hidden_size))
| 23.35 | 91 | 0.573876 | 1,307 | 0.932905 | 0 | 0 | 0 | 0 | 0 | 0 | 59 | 0.042113 |
1b18c3ce0af69bb346628f41fe72f124b56c49f7 | 3,349 | py | Python | camd3/infrastructure/component/tests/test_idgenerators.py | mamrhein/CAmD3 | d20f62295771a297c3fbb314beef314e5ec7a2b5 | [
"BSD-2-Clause"
] | null | null | null | camd3/infrastructure/component/tests/test_idgenerators.py | mamrhein/CAmD3 | d20f62295771a297c3fbb314beef314e5ec7a2b5 | [
"BSD-2-Clause"
] | null | null | null | camd3/infrastructure/component/tests/test_idgenerators.py | mamrhein/CAmD3 | d20f62295771a297c3fbb314beef314e5ec7a2b5 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python
# ----------------------------------------------------------------------------
# Name: test_idgenerators
# Purpose: Test driver for module idgenerators
#
# Author: Michael Amrhein (michael@adrhinum.de)
#
# Copyright: (c) Michael Amrhein
# ----------------------------------------------------------------------------
# $Source$
# $Revision$
"""Test driver for module idgenerators"""
import unittest
from uuid import UUID
from camd3.infrastructure.component.idfactories import (
uuid_generator, local_id_generator, local_num_id_generator)
class StringId(str):
def incr(self):
if self:
head, tail = self[:-1], self[-1]
if tail == 'z':
return StringId(self + 'a')
else:
return StringId(head + chr(ord(tail) + 1))
else:
return StringId('a')
class Elem:
def __init__(self, id):
self.id = id
class IdGeneratorTest(unittest.TestCase):
def testuuid_generator(self):
idGen = uuid_generator()
nIds = 10
idMap = {next(idGen): i for i in range(nIds)}
# assert that ids are unique:
self.assertEqual(len(idMap), nIds)
for id in idMap:
self.assertTrue(isinstance(id, UUID))
def testlocal_id_generator(self):
context = []
incr = lambda id: id.incr() if id else StringId().incr()
idGen = local_id_generator(context, incr)
nIds = 30
idMap = {next(idGen): i for i in range(nIds)}
# assert that ids are unique:
self.assertEqual(len(idMap), nIds)
for id in idMap:
self.assertTrue(isinstance(id, StringId))
id = next(idGen)
self.assertEqual(incr(id), next(idGen))
idStrs = ['za', 'zzx', 'e']
context = [Elem(StringId(s)) for s in idStrs]
idGen = local_id_generator(context, incr)
self.assertEqual(StringId(max(idStrs)).incr(), next(idGen))
def testlocal_num_id_generator(self):
nIds = 10
# no context, no start value
idGen = local_num_id_generator()
idMap = {next(idGen): i for i in range(1, nIds + 1)}
# assert that ids are unique:
self.assertEqual(len(idMap), nIds)
for id in idMap:
self.assertEqual(id, idMap[id])
# no context, start value given
start = 17
idGen = local_num_id_generator(start=start)
idMap = {next(idGen): i for i in range(start, start + nIds)}
# assert that ids are unique:
self.assertEqual(len(idMap), nIds)
for id in idMap:
self.assertEqual(id, idMap[id])
# context given, no start value
ids = [7, 18, 5]
maxId = max(ids)
context = [Elem(id) for id in ids]
idGen = local_num_id_generator(context)
self.assertEqual(maxId + 1, next(idGen))
# context given, start value given, but in conflict
self.assertRaises(ValueError, local_num_id_generator, context, maxId)
# context given, start value given, no conflict
idGen = local_num_id_generator(context, maxId + 1)
self.assertEqual(maxId + 1, next(idGen))
# still incremental?
lastId = next(idGen)
self.assertEqual(lastId + 1, next(idGen))
if __name__ == '__main__':
unittest.main()
| 31.59434 | 78 | 0.571215 | 2,705 | 0.807704 | 0 | 0 | 0 | 0 | 0 | 0 | 765 | 0.228426 |
1b1aae52aa5cff45e911037583b8775cf7575004 | 2,940 | py | Python | seahub/onlyoffice/views.py | jjzhang166/seahub | 8ced28759fc1e158196a7743eb149882451f9143 | [
"Apache-2.0"
] | null | null | null | seahub/onlyoffice/views.py | jjzhang166/seahub | 8ced28759fc1e158196a7743eb149882451f9143 | [
"Apache-2.0"
] | null | null | null | seahub/onlyoffice/views.py | jjzhang166/seahub | 8ced28759fc1e158196a7743eb149882451f9143 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2012-2017 Seafile Ltd.
import json
import logging
import os
import requests
import urllib2
from django.core.cache import cache
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
from seaserv import seafile_api
from .settings import VERIFY_ONLYOFFICE_CERTIFICATE
from seahub.utils import gen_file_upload_url
# Get an instance of a logger
logger = logging.getLogger(__name__)
@csrf_exempt
def onlyoffice_editor_callback(request):
#request.body:
# {"key":"Khirz6zTPdfd7","status":1,
# "users":["uid-1488351242769"],
# "actions":[{"type":1,"userid":"uid-1488351242769"}]}
# "key":"Khirz6zTPdfd8","status":2,"url":"https://13.113.111.2/cache/files/Khirz6zTPdfd8_6379/output.docx/output.docx?md5=5oL0qGUqXw72D85f28JaFg==&expires=1488956681&disposition=attachment&ooname=output.docx","changesurl":"https://13.113.111.2/cache/files/Khirz6zTPdfd8_6379/changes.zip/changes.zip?md5=vx3VYwaPEOxtZDA_3yuVrg==&expires=1488956681&disposition=attachment&ooname=output.zip","history":{"serverVersion":"4.2.10","changes":[{"created":"2017-03-01 07:03:11","user":{"id":"uid-1488351774447","name":"Anonymous"}}]},"users":["uid-1488351774447"],"actions":[{"type":0,"userid":"uid-1488351774447"}]}
logger.debug(request.body)
if request.method != 'POST':
return HttpResponse('{"error": 0}')
post_data = json.loads(request.body)
status = int(post_data.get('status', -1))
if status == 2: # document is ready for saving
# the link to the edited document to be saved with the document storage
# service. The link is present when the status value is equal to 2 or 3 only.
url = post_data.get('url')
context = None
if VERIFY_ONLYOFFICE_CERTIFICATE is False:
import ssl
context = ssl._create_unverified_context()
try:
file_content = urllib2.urlopen(url, context=context).read()
except urllib2.URLError as e:
logger.error(e)
else:
# update file
doc_key = post_data.get('key')
doc_info = json.loads(cache.get("ONLYOFFICE_%s" % doc_key))
repo_id = doc_info['repo_id']
file_path = doc_info['file_path']
username = doc_info['username']
update_token = seafile_api.get_fileserver_access_token(repo_id,
'dummy', 'update', username)
if not update_token:
return HttpResponse('{"error": 0}')
update_url = gen_file_upload_url(update_token, 'update-api')
files = {
'file': file_content,
'file_name': os.path.basename(file_path),
'target_file': file_path,
}
requests.post(update_url, files=files)
logger.info('%s updated by %s' % (repo_id + file_path, username))
return HttpResponse('{"error": 0}')
| 40.273973 | 611 | 0.652041 | 0 | 0 | 0 | 0 | 2,506 | 0.852381 | 0 | 0 | 1,187 | 0.403741 |
1b1acb3da87ff1fab8416f66740940a5554cf27f | 5,899 | py | Python | ProgrammingAssignments/pyretic/pyretic/tutorial/of_tutorial.py | Mahdi-Asaly/Coursera-SDN-Assignments | aac5d62f40c5283e296a0f87b7ec2de8986a8efc | [
"Intel"
] | null | null | null | ProgrammingAssignments/pyretic/pyretic/tutorial/of_tutorial.py | Mahdi-Asaly/Coursera-SDN-Assignments | aac5d62f40c5283e296a0f87b7ec2de8986a8efc | [
"Intel"
] | null | null | null | ProgrammingAssignments/pyretic/pyretic/tutorial/of_tutorial.py | Mahdi-Asaly/Coursera-SDN-Assignments | aac5d62f40c5283e296a0f87b7ec2de8986a8efc | [
"Intel"
] | null | null | null |
################################################################################
# The Pyretic Project #
# frenetic-lang.org/pyretic #
# author: Joshua Reich (jreich@cs.princeton.edu) #
################################################################################
# Licensed to the Pyretic Project by one or more contributors. See the #
# NOTICES file distributed with this work for additional information #
# regarding copyright and ownership. The Pyretic Project licenses this #
# file to you under the following license. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided the following conditions are met: #
# - Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# - Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in #
# the documentation or other materials provided with the distribution. #
# - The names of the copyright holds and contributors may not be used to #
# endorse or promote products derived from this work without specific #
# prior written permission. #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT #
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #
# LICENSE file distributed with this work for specific language governing #
# permissions and limitations under the License. #
################################################################################
################################################################################
# SETUP #
# ------------------------------------------------------------------- #
# mininet: mininet.sh --topo clique,4,4 (or other single subnet) #
################################################################################
from pyretic.lib.corelib import *
from pyretic.lib.std import *
from pyretic.lib.query import *
def act_like_hub():
"""Implement hub-like behavior --- send all packets to all ports on a network
minimum spanning tree, except for the input port"""
return flood() # Return the policy flood
# we create a new dynamic policy class with the name "act_like_switch"
class act_like_switch(DynamicPolicy):
"""
Implement switch-like behavior.
"""
""" # DELETE BOTH THIS LINE AND THE ONE BELOW TO START WORKING ON THE TUTORIAL #
# Here's some psuedocode to start you off implementing a learning
# switch. You'll need to rewrite it as real Python code using Pyretic predicates
# and policies - all of which are defined and documented in pyretic/core/language.py
def __init__(self):
# Set up the initial forwarding behavior for your mac learning switch
# Tip: set up a separate variable to track this
self.forward = <some policy here>
# hint, mac learning switches start off by flooding all packets
# Set up a query that will receive new incoming packets
self.query = <a packets query for the first packet w/ a given (srcmac,switch) pair>
# Write a function to take each new packet p and update the forwarding policy
# so subsequent incoming packets on this switch whose dstmac matches p's srcmac
# (accessed like in a dictionary p['srcmac']), those packets will be forwarded out
# p's inport (pyretic packets are located, so we access this value just like srcmac
# - i.e., p['inport'])
def learn_from_a_packet(pkt):
# perhaps we want to print the incoming packet so we can see it
print pkt
# and we will need to set the forwarding policy
self.forward = <....> # hint use the 'match' policy and either
# if_(f,p1,p2) or
# a combination of parallel and sequential composition
# let's print the forwarding policy to see if it looks right
print self.forward
# and don't forget to update the dynamic policy to forward and query
# (each dynamic policy has a member 'policy'
# whenever this member is assigned, the dynamic policy updates itself)
self.policy = <forwarding and query policies composed in parallel>
# hint: 'P1 + P2' is shorthand for parallel composition of P1 and P2
# 'P1 >> P2' is shorthand for sequential composition of P1 and P2
# we need to make sure learn_from_a_packet is called back
# every time our query sees a new packet
self.query.register_callback(learn_from_a_packet)
# finally, we initialize our dynamic policy
super(act_like_switch,self).__init__(<the first value 'self.policy' should take>)
""" # DELETE BOTH THIS LINE AND THE ONE ABOVE TO START WORKING ON THE TUTORIAL #
def main():
## The main method returns the policy that will be run
## To try your code, comment the first return line and uncomment the second
### Part 0 - hub ###
return act_like_hub()
### Part 1 - write a basic mac learning module ###
# return act_like_switch()
| 55.650943 | 91 | 0.561282 | 2,635 | 0.446686 | 0 | 0 | 0 | 0 | 0 | 0 | 5,605 | 0.950161 |
1b1db678e9b628776e9c3b517be21f818fe07138 | 931 | py | Python | brambling/management/commands/update_tokens.py | j-po/django-brambling | be072903fbdecb94f1ec4680b717adc44e73c80b | [
"BSD-3-Clause"
] | null | null | null | brambling/management/commands/update_tokens.py | j-po/django-brambling | be072903fbdecb94f1ec4680b717adc44e73c80b | [
"BSD-3-Clause"
] | null | null | null | brambling/management/commands/update_tokens.py | j-po/django-brambling | be072903fbdecb94f1ec4680b717adc44e73c80b | [
"BSD-3-Clause"
] | null | null | null | from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from brambling.utils.payment import dwolla_update_tokens
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'--days',
action='store',
dest='days',
default=15,
help='Number of days ahead of time to update refresh tokens.'),
)
def handle(self, *args, **options):
try:
days = int(options['days'])
except ValueError:
raise CommandError("Days must be an integer value.")
self.stdout.write("Updating dwolla tokens...")
self.stdout.flush()
count, test_count = dwolla_update_tokens(days)
self.stdout.write("Test tokens updated: {}".format(count))
self.stdout.write("Live tokens updated: {}".format(test_count))
self.stdout.flush()
| 32.103448 | 75 | 0.62406 | 770 | 0.827068 | 0 | 0 | 0 | 0 | 0 | 0 | 192 | 0.20623 |
1b20193ab5a597fe5a739a07660e65af8a91d2ac | 385 | py | Python | scripts/shared.py | bozhnyukAlex/high-performance-graph-analysis-course | 13d4de4257a59d03c585896ee89011d6a4adc58f | [
"Apache-2.0"
] | 1 | 2021-11-14T01:04:17.000Z | 2021-11-14T01:04:17.000Z | scripts/shared.py | bozhnyukAlex/high-performance-graph-analysis-course | 13d4de4257a59d03c585896ee89011d6a4adc58f | [
"Apache-2.0"
] | 9 | 2021-09-01T12:01:23.000Z | 2021-11-25T12:25:10.000Z | scripts/shared.py | miloserdova-l/formal-lang-course | 042e1b381a8ea96a7602fb876e20cdb6506044ac | [
"Apache-2.0"
] | 12 | 2021-08-30T11:40:04.000Z | 2022-03-04T07:06:15.000Z | import os
import pathlib
ROOT = pathlib.Path(__file__).parent.parent
DOCS = ROOT / "docs"
TESTS = ROOT / "tests"
def configure_python_path():
python_path = os.getenv("PYTHONPATH")
if python_path is None:
os.environ["PYTHONPATH"] = str(ROOT)
else:
os.environ["PYTHONPATH"] += ";" + str(ROOT)
print("Configure python path: ", os.getenv("PYTHONPATH"))
| 22.647059 | 61 | 0.654545 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 89 | 0.231169 |
1b203175eee2fabf30e9c6b11be336bcd0773399 | 28,139 | py | Python | main.py | Ethan-Chew/toobox | 7f774a0eac12c9e6ceeb078474fdab6205247c29 | [
"MIT"
] | 4 | 2022-01-12T02:21:07.000Z | 2022-03-27T13:34:32.000Z | main.py | Ethan-Chew/toobox | 7f774a0eac12c9e6ceeb078474fdab6205247c29 | [
"MIT"
] | null | null | null | main.py | Ethan-Chew/toobox | 7f774a0eac12c9e6ceeb078474fdab6205247c29 | [
"MIT"
] | null | null | null | # Everyone did everything here
## JSON, Main UI, Keybinds, 'Backend' mostly by Ethan
## Refractoring, 'Backend' by Jerick
## Everything else + Starting Point by Granwyn
import tkinter as tk
from tkinter import ttk
import os
from tkinter import font
import json
from PIL import ImageTk, Image
from tkinter import Menu
import config
from toolsUI import *
from components.wrappedLabel import WrappingLabel
import os
import webbrowser
import tools.periodicTable as pt
# Path of the different Filed
ROOTDIR, _ =os.path.split(os.path.abspath(os.path.realpath(__file__)))
os. chdir(ROOTDIR)
jsonData = os.path.join(ROOTDIR, '.data.json')
appIconIcon = os.path.join(ROOTDIR,'src','images','AppIcon.ico')
appIconIcns = os.path.join(ROOTDIR,'src','images','AppIcon.icns')
appIconPng = os.path.join(ROOTDIR,'src','images','AppIcon.png')
appThemePath = os.path.join(ROOTDIR,"sun-valley.tcl")
_recentlength=10
FONT='TkDefaultFont'
# Checks for Font Size Change
def reload():
global fontMultiplier
file = open(jsonData)
extractedData = json.load(file)
file.close()
fontMultiplier = float(extractedData["fontMultiplier"])
reload()
# Variables
functionalities = {
"Settings" : Settings,
"Chemical Equation" : ChemicalEquation,
"Rectangle/Square" : Rectangle,
"Ionic Equation" : IonicEqn,
"Salt Solubilities" : SaltSolubility,
"Calculator" : calculate,
"Circle/Semicircle" : Circle,
"Quadratic" : SolveQuad,
"Parallelogram" : Parallelogram,
"Trapezium" : Trapezium,
"Simultaneous" : simsolver,
"Triangle" : triangle,
"Equation of Circle": SolveCircle,
"Periodic Table" : periodicTable,
"Pyramid" : Pyramid,
"Prism" : Prism,
"Sphere" : Sphere,
}
treeview_data = [
("", 1, "Chemistry"),
(1, 2, "Periodic Table"),
(1, 3, "Salt Solubilities"),
(1, 4, "Chemical Equation"),
(1, 5, "Ionic Equation"),
("", 6, "Mathematics"),
(6, 7, "Calculator"),
(6, 8, "Equations"),
(8, 9, "Simultaneous"),
(8, 10, "Quadratic"),
(8, 11, "Cubic"),
(8, 12, "Quartic"),
(6, 13, "Matrices"),
(6, 14, "Inequalities"),
(6, 15, "Mensuration"),
(15, 16, "Area"),
(16, 17, "Rectangle/Square"),
(16, 18, "Triangle"),
(16, 19, "Parallelogram"),
(16, 20, "Rhombus"),
(16, 21, "Trapezium"),
(16, 22, "Circle/Semicircle"),
(15, 23, "Volume and Surface Area"),
(23, 24, "Pyramid"),
(24, 25, "Triangle-Based"),
(24, 26, "Square-Based"),
(24, 27, "Cone"),
(23, 28, "Prism"),
(29, 30, "Triangular Prism"),
(29, 31, "Cylinder"),
(29, 32, "Cuboid/Cube"),
(23, 29, "Sphere"),
(6, 33, "Percentage"),
(6, 38, "Circles"),
(38, 39, "Circle Properties"),
(38, 40, "Equation of Circle"),
("", 41, "Settings"),
]
TOPICS=[treeview_data[i-1][2] for i in [1,6,8,16,15,23,34,24,29]] # Add Items into Treeview
topics = []
class App(ttk.Frame):
def __init__(self, parent):
self.notify("App Shortcuts for Toobox", "Use Control + H to go to the Home Screen and use Control + F to enter Full Screen, Escape Key to exit Full Screen.", "Boop")
self.screenlist = []
# aSecret :) hehehehe
if config.aSecret:
f()
self.check_recently_opened()
# Initalize the app
ttk.Frame.__init__(self)
self.setup_menu()
self.setup_widgets()
# Variable to track if fullscreen
self.fullScreen = False
# Set Bindings/Shortcuts
self.fullScreenBindings()
self.goHome()
# self.resetSettingsSC()
config.currentlySelected = "Home"
def getInputs(self, event):
try:
text.grid_forget()
text.destroy()
except: pass
self.resFrame.destroy()
self.resFrame = self.addframe(self.mainFrame)
e = self.inputField.get().replace(" ", "")
l=pt.search(e)[:6]
newf=self.addframe(self.resFrame,borderwidth=1)
if len(l) > 0:
temp=WrappingLabel(newf, text="Atomic Number", font=(font,int(fontMultiplier*10)))
temp.grid(row=0, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Mass Number", font=(font,int(fontMultiplier*10)))
temp.grid(row=1, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Period"+", "+"Group", font=(font,int(fontMultiplier*10)))
temp.grid(row=2, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Symbol", font=(font,int(fontMultiplier*15), 'bold'))
temp.grid(row=3, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Element", font=(font,int(fontMultiplier*12), 'bold'))
temp.grid(row=4, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Atomic Mass" , font=(font,int(fontMultiplier*10)))
temp.grid(row=5, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text=", ".join(["Protons","Neutrons","Electrons"]), font=(font,int(fontMultiplier*10)))
temp.grid(row=6, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Atomic Radius" , font=(font,int(fontMultiplier*10)))
temp.grid(row=7, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Electron Shells" , font=(font,int(fontMultiplier*10)))
temp.grid(row=8, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Valence Electrons" , font=(font,int(fontMultiplier*10)))
temp.grid(row=9, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Electronic Configuration" , font=(font,int(fontMultiplier*10)))
temp.grid(row=10, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Isotopes" , font=(font,int(fontMultiplier*10)))
temp.grid(row=11, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text=" ".join(["[{}]".format("Phase"), ", ".join(["Melting Point", "Boiling Point"])]), font=(font,int(fontMultiplier*10)))
temp.grid(row=12, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Type" , font=(font,int(fontMultiplier*10)))
temp.grid(row=13, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Radioactive" , font=(font,int(fontMultiplier*10)))
temp.grid(row=14, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Natural" , font=(font,int(fontMultiplier*10)))
temp.grid(row=15, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Density" , font=(font,int(fontMultiplier*10)))
temp.grid(row=16, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Electronegativity" , font=(font,int(fontMultiplier*10)))
temp.grid(row=17, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="First Ionisation Energy" , font=(font,int(fontMultiplier*10)))
temp.grid(row=18, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Specific Heat Capacity / J⋅kg⁻¹⋅K⁻¹" , font=(font,int(fontMultiplier*10)))
temp.grid(row=19, column=0, sticky = tk.N+tk.E, padx=2)
temp=WrappingLabel(newf, text="Discovered" , font=(font,int(fontMultiplier*10)))
temp.grid(row=20, column=0, sticky = tk.N+tk.E, padx=2)
newf.grid(row=0, column=0, sticky = tk.N+tk.E, padx=2)
r=1
for i in l:
newf=self.addframe(self.resFrame,borderwidth=1)
temp=WrappingLabel(newf, text=int(pt.ELEMENTDATA["AtomicNumber"][i]), font=(font,int(fontMultiplier*10)))
temp.grid(row=0, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(int(pt.ELEMENTDATA["MassNumber"][i])), font=(font,int(fontMultiplier*10)))
temp.grid(row=1, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(int(pt.ELEMENTDATA["Period"][i]))+", "+str(int(pt.ELEMENTDATA["Group"][i])), font=(font,int(fontMultiplier*10)))
temp.grid(row=2, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["Symbol"][i]), font=(font,int(fontMultiplier*15), 'bold'))
temp.grid(row=3, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["Element"][i]), font=(font,int(fontMultiplier*12), 'bold'))
temp.grid(row=4, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["AtomicMass"][i]) , font=(font,int(fontMultiplier*10)))
temp.grid(row=5, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=", ".join([str(int(pt.ELEMENTDATA[j][i])) for j in ["Protons","Neutrons","Electrons"]]), font=(font,int(fontMultiplier*10)))
temp.grid(row=6, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["AtomicRadius"][i]).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=7, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(int(pt.ELEMENTDATA["Shells"][i])).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=8, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(int(pt.ELEMENTDATA["Valence"][i])).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=9, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["Config"][i]), font=(font,int(fontMultiplier*10)))
temp.grid(row=10, column=0, sticky = tk.N+tk.W, padx=2)
iso = str(pt.ELEMENTDATA["Isotopes"][i])
temp=WrappingLabel(newf, text=str(int(float(iso))) if iso.replace('.','',1).isdigit() else "-", font=(font,int(fontMultiplier*10)))
temp.grid(row=11, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=" ".join(["[{}]".format(str(pt.ELEMENTDATA["Phase"][i]).title()), ", ".join([str(pt.ELEMENTDATA["MeltingPoint"][i]).title()+"K", str(pt.ELEMENTDATA["BoilingPoint"][i]).title()+"K"])]), font=(font,int(fontMultiplier*10)))
temp.grid(row=12, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["Type"][i]).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=13, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text="Yes" if pt.ELEMENTDATA["Radioactive"][i] else "No", font=(font,int(fontMultiplier*10)))
temp.grid(row=14, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text="Yes" if pt.ELEMENTDATA["Natural"][i] else "No", font=(font,int(fontMultiplier*10)))
temp.grid(row=15, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["Density"][i]).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=16, column=0, sticky = tk.N+tk.W, padx=2)
e = str(pt.ELEMENTDATA["Electronegativity"][i]).title()
temp=WrappingLabel(newf, text=e if e.replace('.','',1).isdigit() else "-", font=(font,int(fontMultiplier*10)))
temp.grid(row=17, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["FirstIonization"][i]).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=18, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=str(pt.ELEMENTDATA["SpecificHeat"][i]).title(), font=(font,int(fontMultiplier*10)))
temp.grid(row=19, column=0, sticky = tk.N+tk.W, padx=2)
temp=WrappingLabel(newf, text=", ".join([str(pt.ELEMENTDATA["Discoverer"][i]).title(), str(pt.ELEMENTDATA["Year"][i]).title()]), font=(font,int(fontMultiplier*10)))
temp.grid(row=20, column=0, sticky = tk.N+tk.W, padx=2)
newf.grid(row=0, column=r, sticky = tk.N, padx=2)
r+=1
else:
text = WrappingLabel(self.mainFrame, text="Invalid Input. Please enter a valid Symbol, Element Name, Symbol, Atomic Number, or Number of Electrons, Protons or Neutrons.", font=(font,int(fontMultiplier*14)))
text.grid(row=1, column=0, padx=2,pady=2, sticky = tk.W+tk.E, columnspan=5)
# self.resFrame.grid(row=1, column=len(l)+1, rowspan=10, columnspan=10, padx=2)
self.resFrame.grid(row=1, column=0, rowspan=len(l)+1, columnspan=10, padx=2)
def check_recently_opened(self):
file = open(jsonData)
try:
data = json.load(file)
file.close()
if type(data["recentlyOpened"]) == list:
return
except Exception as e:
file.close()
file = open(jsonData, 'w')
json.dump({'fontMultiplier': float(1),'recentlyOpened': [], "default-theme": "dark"}, file)
file.close()
# Theme switching
def change_theme(self):
file = open(jsonData, "r")
data = json.load(file)
file.close()
file = open(jsonData, "w+")
if root.tk.call("ttk::style", "theme", "use") == "sun-valley-dark":
# Set light theme
data['default-theme'] = "light"
else:
# Set dark theme
data['default-theme'] = "dark"
root.tk.call("set_theme", data['default-theme'])
json.dump(data, file)
file.close()
# MacOS Menu Bar Buttons
def setup_menu(self):
menubar = Menu(root)
# file
file=Menu(menubar, tearoff=0)
file.add_command(label="Settings", command=(lambda *args:self.run_func("Settings")))
menubar.add_cascade(label="File", menu=file)
# tools
tools=Menu(menubar, tearoff=0)
for i in sorted(list(functionalities.keys())):
tools.add_command(label=i, command=lambda i=i: self.run_func(i))
menubar.add_cascade(label="Tools", menu=tools)
root.config(menu=menubar)
# Recently Opened
romenu=Menu(menubar, tearoff=0)
file = open(jsonData)
data = json.load(file)
file.close()
data = list(set(data['recentlyOpened']))
for i in data:
if i != "Home":
romenu.add_command(label=i, command=lambda i=i: self.run_func(i))
else:
romenu.add_command(label=i, command=self.handleBackToHS)
menubar.add_cascade(label="Recently Opened", menu=romenu)
# Keybinds :D
## Full Screen Toggle
def fullScreenBindings(self):
root.attributes("-fullscreen", self.fullScreen)
root.bind("<Control-f>", self.toggleFullScreen)
root.bind("<F11>", self.toggleFullScreen)
root.bind("<Escape>", self.quitFullScreen)
root.bind("<Control-,>", (lambda e: self.run_func("Settings")))
## Back to Home
def goHome(self):
root.bind("<Control-h>", self.handleBackToHS)
## Reset Settings
# def resetSettingsSC(self):
# root.bind("<Control-`>", self.resetSettings)
def periodicTableScreen(self, yes):
if yes:
root.bind("<Return>", self.getInputs)
else:
root.bind("<Return>", (lambda e: self.periodicTableScreen(False)))
root.unbind_all('<Return>')
def removeSelectedTreeView(self):
config.currentlySelected = "Home"
if len(self.treeview.selection()) > 0:
self.treeview.selection_remove(self.treeview.selection()[0])
def handleBackToHS(self, event):
self.removeSelectedTreeView()
self.showHomeScreen()
def toggleFullScreen(self, event):
self.fullScreen = not self.fullScreen
root.attributes("-fullscreen", self.fullScreen)
def quitFullScreen(self, event):
self.fullScreen = False
root.attributes("-fullscreen", self.fullScreen)
# Setup Widgets
def setup_widgets(self):
# Panedwindow
self.paned = ttk.PanedWindow(self, orient="horizontal")
self.paned.pack(fill="both", expand=True, anchor="center")
# Selection Pane
self.pane_1 = ttk.Frame(self.paned, padding=5)
self.paned.add(self.pane_1, weight=1)
self.newpane = ttk.PanedWindow(self.pane_1, orient="horizontal")
## Treeview Label
self.treeViewTopLab = WrappingLabel(self.newpane, text="Tools", font=(FONT, int(fontMultiplier*23), 'bold'))
self.treeViewTopLab.pack(side="left",padx=5, anchor="w", fill="y")
# Switching of Themes
self.switch = ttk.Checkbutton(
self.newpane, text="Change Theme", style="Switch.TCheckbutton", command=self.change_theme
)
self.switch.pack(side="right", padx=5, anchor="e", fill="y")
self.newpane.pack(fill="x", anchor="n", pady=10)
# Scrollbar
self.scrollbar = ttk.Scrollbar(self.pane_1)
self.scrollbar.pack(side="right", fill="y")
# Treeview
self.treeview = ttk.Treeview(
self.pane_1,
selectmode="browse",
yscrollcommand=self.scrollbar.set,
style="MainUI.Treeview",
takefocus=False
)
self.treeview.bind("<<TreeviewSelect>>", self.on_tree_select)
self.treeview.pack(expand=True, fill="both")
self.scrollbar.config(command=self.treeview.yview)
## Treeview columns
self.treeview.column("#0", anchor="w", minwidth=100)
# Insert treeview data
for item in treeview_data:
if item[2] in functionalities or item[0] == "" or item[1] in {8, 15, 16, 23, 24, 29, 34, 38, 41}:
self.treeview.insert(
parent=item[0], index="end", iid=item[1], text=item[2]
)
topics.append(item[2])
if item[0] == "" or item[1] in {8, 15, 16, 23, 24, 29, 34, 38, 41}:
self.treeview.item(item[1], open=True) # Open parents
# Select and scroll
self.treeview.see(1)
# Home Screen UI
## Main Home Screen Frame
self.homeScreen = ttk.Frame(self.paned, padding=5)
self.paned.add(self.homeScreen, weight=10)
self.notebook = tk.Canvas(self.homeScreen, highlightthickness=0)
self.notebook.pack(fill="both", expand=True)
## Sizegrip
self.sizegrip = ttk.Sizegrip(self)
## Show Home Screen
self.showHomeScreen()
# Function to clear the screen of any Frames (Leaving root behind)
def clearScreen(self):
# Clear Right Side of the Screen
try:
for i in self.screenlist[::-1]:
try:
i.pack_forget()
i.place_forget()
except: pass
self.screenlist.pop(-1)
except:
pass
finally:
try:
self.thingFrame.pack_forget()
self.mainFrame.pack_forget()
self.scrolly.pack_forget()
self.scrollx.pack_forget()
except: pass
def run_func(self, current):
reload()
file = open(jsonData)
data = json.load(file)
file.close()
config.currentlySelected = current
self.clearScreen()
# First in First out
if (len(data['recentlyOpened']) <= _recentlength):
if config.currentlySelected not in data['recentlyOpened']:
data['recentlyOpened'].insert(0, config.currentlySelected)
else:
data['recentlyOpened'].insert(0, config.currentlySelected)
data['recentlyOpened'].pop(_recentlength-1)
with open(jsonData, 'w') as f:
json.dump(data,f)
self.holdROItemFrame.pack_forget()
for ropenedItem in data:
self.ropenedItemBtn = ttk.Button(self.holdROItemFrame, text=ropenedItem, width=30)
self.ropenedItemBtn.pack(side="top", pady=2)
self.notebook.update()
if config.currentlySelected in functionalities and config.currentlySelected != "Home":
functionalities[config.currentlySelected](self)
else:
if config.currentlySelected != "Home":
infoFrame(self, config.currentlySelected)
self.setup_menu()
root.update()
# Function that will run when the an item in the tree is selected
def on_tree_select(self, event):
try:
self.run_func(self.treeview.item(self.treeview.selection()[0])['text'])
self.periodicTableScreen(self.treeview.item(self.treeview.selection()[0])['text'] == "Periodic Table")
except: pass
# Function to create a screen to our default parameters
def addframe(self,frame="",**args):
if frame == "":
frame=self.notebook
self.screenlist.append(ttk.Frame(frame,**args))
return self.screenlist[-1]
def showHomeScreen(self):
# Config
config.currentlySelected = "Home"
self.clearScreen()
# Top Frame (Hello, xxx)
self.welcomeFrame = self.addframe()
self.welcomeFrame.pack(side="top", padx=25, pady=18, anchor="w")
self.helloUserLab = WrappingLabel(self.welcomeFrame,text="Hello, {}".format(config.username), font=(FONT, int(fontMultiplier*50),'bold'))
self.helloUserLab.pack(pady=2,fill="x")
self.welcomeLab = WrappingLabel(self.welcomeFrame, text="Welcome to Toobox!",font=(FONT, int(fontMultiplier*15)))
self.welcomeLab.pack(side="left", fill="x")
self.welcomeLab2 = WrappingLabel(self.welcomeFrame, text="Select a tool to get started!",font=(FONT, int(fontMultiplier*15)))
# Toobox App Logo and App Description
self.widthOfTooboxInfo = 200
self.tooboxInfoFrame = self.addframe(width=self.widthOfTooboxInfo)
self.tooboxInfoFrame.pack(side="left", padx=25, pady=18, anchor="w")
appIconImg = ImageTk.PhotoImage(Image.open(appIconPng).resize((self.widthOfTooboxInfo-20,self.widthOfTooboxInfo-20), Image.ANTIALIAS))
self.imgPanel = WrappingLabel(self.tooboxInfoFrame, image=appIconImg)
self.imgPanel.image = appIconImg
self.appDescText = WrappingLabel(self.tooboxInfoFrame, font=(fontMultiplier*17), wraplength=self.widthOfTooboxInfo, justify="left" ,text="Toobox is an app is a Toolbox of different tools to help in your Academics. Toobox provides various tools for a wide range of topics and subjects that will definately help you while revising and studying.")
self.appDescText.pack(side="bottom")
self.imgPanel.pack(side="bottom", fill="both", expand="yes", pady=32)
file = open(jsonData)
data = json.load(file)
file.close()
data = list(set(data['recentlyOpened']))
# Recently Opened
self.recentlyOpenedFrame = self.addframe(width=self.widthOfTooboxInfo)
self.recentlyOpenedFrame.pack(side="left", padx=20, pady=18, anchor="w")
self.recentlyOpenedText = WrappingLabel(self.recentlyOpenedFrame, text="Recently Opened ({})".format(str(len(data[:3]))),font=(FONT, int(fontMultiplier*20), "bold"))
self.recentlyOpenedText.pack(side="top", pady=3)
self.screenlist.append(ttk.Frame(self.recentlyOpenedFrame))
self.holdROItemFrame = self.screenlist[-1]
self.holdROItemFrame.pack(side="top")
if len(data) == 0:
self.noROText = WrappingLabel(self.recentlyOpenedFrame, text="You have not opened anything recently.".format(str(len(data[:3]))),font=(FONT, int(fontMultiplier*17)), wraplength=self.widthOfTooboxInfo)
self.noROText.pack(side="top", pady=3, anchor="w")
else:
for i in range(len(data[:3])): # Loop through all the Recently Opened Items
temp=str(data[i])
def test(x=temp):
return self.run_func(str(x))
self.ropenedItemBtn = ttk.Button(self.holdROItemFrame, text=temp, width=30, command=test)
self.ropenedItemBtn.pack(side="top", pady=2)
# Credits Section
self.creditsFrame = self.addframe(width=self.widthOfTooboxInfo)
self.creditsFrame.pack(side="left", padx=20, pady=18, anchor="w")
self.creditsTitle = WrappingLabel(self.creditsFrame, text="Credits",font=(FONT, int(fontMultiplier*20), "bold"), justify="left")
self.creditsTitle.pack(side="top", pady=3, anchor="w")
self.developersHeader = WrappingLabel(self.creditsFrame, text="Developers:",font=(FONT, int(fontMultiplier*17), "bold"), justify="left")
self.developersHeader.pack(side="top", anchor="w")
self.developersList = WrappingLabel(self.creditsFrame, text="Ethan Chew, Jerick Seng, Granwyn Tan",font=(FONT, int(fontMultiplier*17)), justify="left")
self.developersList.pack(side="top", pady=3, anchor="w")
self.dtHeader = WrappingLabel(self.creditsFrame, text="Dependencies and Themes:",font=(FONT, int(fontMultiplier*17), "bold"), justify="left")
self.dtHeader.pack(side="top", anchor="w")
self.dtList = WrappingLabel(self.creditsFrame, text="chemlib, tkinter, numpy and the sun-valley Theme for tkinker",font=(FONT, int(fontMultiplier*17)), justify="left")
self.dtList.pack(side="top", pady=3, anchor="w")
# Destroys and 'Quits' the app
def _quit(self):
root.quit()
root.destroy()
# Function to allow for sending of notifications through AppleScript
def notify(self, title, text, sound):
os.system("""
osascript -e 'display notification "{}" with title "{}" sound name "{}"'
""".format(text, title, sound))
# Its a... Secret :)
f=(lambda:exec("\x69\x6d\x70\x6f\x72\x74\x20\x77\x65\x62\x62\x72\x6f\x77\x73\x65\x72\x0a\x77\x65\x62\x62\x72\x6f\x77\x73\x65\x72\x2e\x6f\x70\x65\x6e\x5f\x6e\x65\x77\x28\x22\x68\x74\x74\x70\x73\x3a\x2f\x2f\x77\x77\x77\x2e\x79\x6f\x75\x74\x75\x62\x65\x2e\x63\x6f\x6d\x2f\x77\x61\x74\x63\x68\x3f\x76\x3d\x64\x51\x77\x34\x77\x39\x57\x67\x58\x63\x51\x22\x29"))
if __name__ == "__main__":
root = tk.Tk()
root.title("Toobox")
# Simply set the theme
root.tk.call("source", appThemePath)
file = open(jsonData, "r")
data = json.load(file)
file.close()
root.tk.call("set_theme", data['default-theme'])
# Set App Icon
# root.iconbitmap(appIconIcns)
img = tk.Image("photo", file=appIconPng)
root.tk.call('wm','iconphoto', root._w, img)
app = App(root)
app.pack(fill="both", expand=True)
root.update()
root.minsize(root.winfo_width(), root.winfo_height())
x_cordinate = root.winfo_screenwidth()
y_cordinate = root.winfo_screenheight()
root.geometry("+{}+{}".format(x_cordinate, y_cordinate))
root.state('zoomed')
root.mainloop()
| 45.312399 | 355 | 0.591457 | 23,311 | 0.828129 | 0 | 0 | 0 | 0 | 0 | 0 | 5,423 | 0.192653 |
1b206ae3f5c5fdb2c9e5d1ecca0c6fbb09923902 | 5,152 | py | Python | test/acceptance/tools/fabric_utils.py | FIWARE/cloud.PaaS | 3ddec91c2b0ef3baca1dd2e596373cf0d4d341e3 | [
"Apache-2.0"
] | null | null | null | test/acceptance/tools/fabric_utils.py | FIWARE/cloud.PaaS | 3ddec91c2b0ef3baca1dd2e596373cf0d4d341e3 | [
"Apache-2.0"
] | null | null | null | test/acceptance/tools/fabric_utils.py | FIWARE/cloud.PaaS | 3ddec91c2b0ef3baca1dd2e596373cf0d4d341e3 | [
"Apache-2.0"
] | 2 | 2016-08-22T16:03:25.000Z | 2018-03-05T23:28:55.000Z | # -*- coding: utf-8 -*-
# Copyright 2014 Telefonica Investigación y Desarrollo, S.A.U
#
# This file is part of FI-WARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = "Javier Fernández"
__email__ = "jfernandez@tcpsi.es"
__copyright__ = "Copyright 2015"
__license__ = " Apache License, Version 2.0"
__version__ = "1.0.0"
import logging
from fabric.api import env, hide, run, get
from fabric.tasks import execute
from fabric.contrib import files
from StringIO import StringIO
__logger__ = logging.getLogger("qautils")
FABRIC_ASSERT_RESULT = u'<local-only>'
class FabricAssertions():
@staticmethod
def assert_file_exist(path):
"""
Fabric assertion: Check if file exists on the current remote hosts.
:param path (string): Absolute path to file
:return (bool): True if given file exists on the current remote host (dir: PROVISION_ROOT_PATH).
"""
return files.exists(path)
@staticmethod
def assert_content_in_file(path, expected_content):
"""
Fabric assertion: Check if some text is in the given {dir_path}/{file}
:param path (string): Absolute path to file
:param expected_content (string): String to look for.
:return (bool): True if given content is in file.
"""
fd = StringIO()
get(path, fd)
file_content = fd.getvalue()
return expected_content in file_content
class FabricUtils():
def __init__(self, host_name, host_username, host_password=None, host_ssh_key=None):
"""
Init Fabric client.
:param host_name (string): Hostname
:param host_username (string): Username
:param host_password (string): Password
:param host_ssh_key (string): SSH private key file
:return: None
"""
__logger__.info("Init Fabric to execute remote commands in '%s'. Credentials: '%s/%s'; SSH Key file: '%s'",
host_name, host_username, host_password, host_ssh_key)
env.host_string = host_name
env.user = host_username
env.password = host_password
env.key_filename = host_ssh_key
self.fabric_assertions = FabricAssertions()
@staticmethod
def execute_command(command):
"""
Execute a shell command on the current remote host
:param command (string): Command to be execute
:return (string): Result of the remote execution or None if some problem happens
"""
__logger__.debug("Executing remote command: '%s'", command)
try:
with hide('running', 'stdout'):
result = run(command)
__logger__.debug("Result of execution: \n%s", result)
return result
except:
__logger__.error("Any problem executing command: '%s'", command)
return None
def file_exist(self, dir_path, file_name):
"""
Fabric executor: Run method with assertion 'assert_file_exist' in the remote host
:param dir_path (string): Path of the directory where file is located.
:param file_name (string): File name
:return (bool): True if file contains that content (dir: PROVISION_ROOT_PATH)
"""
path = "{}/{}".format(dir_path, file_name)
__logger__.debug("Checking if remote file exists: '%s'", path)
with hide('running', 'stdout'):
success = execute(self.fabric_assertions.assert_file_exist, path=path)
return success[FABRIC_ASSERT_RESULT]
def content_in_file(self, dir_path, file_name, expected_content):
"""
Fabric executor: Run method with assertion 'assert_content_in_file' on the remote host
:param dir_path (string): Path of the directory where file is located.
:param file_name (string): File name
:param expected_content (string): String to be found in file
:return (bool): True if file contains that content (dir: PROVISION_ROOT_PATH)
"""
path = "{}/{}".format(dir_path, file_name)
__logger__.debug("Checking if the content '%s' is in remote file: '%s'", expected_content, path)
try:
with hide('running', 'stdout'):
success = execute(self.fabric_assertions.assert_content_in_file,
path=path, expected_content=expected_content)
except:
__logger__.error("Problem when trying to access to remote file")
return False
return success[FABRIC_ASSERT_RESULT]
| 35.287671 | 115 | 0.658579 | 3,958 | 0.767947 | 0 | 0 | 1,465 | 0.284245 | 0 | 0 | 2,969 | 0.576057 |
1b206be8b7765566c3651186ba151112e61310ba | 3,079 | py | Python | testbot/bot/bot.py | Carberra/discord.py-2.0-testing | 6143f62636ee712f468af2462060f09ff8e24730 | [
"BSD-3-Clause"
] | null | null | null | testbot/bot/bot.py | Carberra/discord.py-2.0-testing | 6143f62636ee712f468af2462060f09ff8e24730 | [
"BSD-3-Clause"
] | null | null | null | testbot/bot/bot.py | Carberra/discord.py-2.0-testing | 6143f62636ee712f468af2462060f09ff8e24730 | [
"BSD-3-Clause"
] | null | null | null | import traceback
from pathlib import Path
import discord
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from discord.ext import commands, fancyhelp
from pytz import utc
from testbot import __version__
MAIN_GUILD_ID = 845688627265536010
STDOUT_CHANNEL_ID = 845691044527210515
class Bot(commands.Bot):
__slots__ = ("ready", "extensions", "scheduler", "error_handler")
def __init__(self) -> None:
self.ready = False
self.extensions = [p.stem for p in Path(".").glob("./testbot/bot/extensions/*.py")]
self.scheduler = AsyncIOScheduler()
self.scheduler.configure(timezone=utc)
self.error_handler = None
super().__init__(
command_prefix="-",
case_insensitive=True,
intents=discord.Intents.all(),
help_command=fancyhelp.EmbeddedHelpCommand(),
activity=discord.Activity(
name=f"-help | Version {__version__}",
type=discord.ActivityType.watching,
),
)
def setup(self) -> None:
print("Running setup...")
for ext in self.extensions:
self.load_extension(f"testbot.bot.extensions.{ext}")
print(f" `{ext}` extension loaded.")
def run(self) -> None:
self.setup()
with open("./secrets/token", mode="r", encoding="utf-8") as f:
token = f.read()
print("Running bot...")
super().run(token, reconnect=True)
async def close(self) -> None:
print("Shutting down...")
self.scheduler.shutdown()
await self.stdout.send(f"Shutting down testbot v{__version__}.")
await super().close()
async def on_connect(self) -> None:
print(f" Bot connected. DWSP latency: {self.latency * 1000:,.0f} ms")
async def on_disconnect(self) -> None:
print(f" Bot disconnected.")
async def on_error(self, err: str, *args, **kwargs):
if err == "on_command_error":
await args[0].send("Something went wrong.")
traceback.print_exc()
async def on_command_error(self, ctx: commands.Context, exc: Exception):
await self.error_handler.command_error(ctx, exc)
async def on_ready(self) -> None:
if self.ready:
return
self.guild = self.get_guild(MAIN_GUILD_ID)
self.stdout = self.guild.get_channel(STDOUT_CHANNEL_ID)
self.scheduler.start()
print(f" Scheduler started ({len(self.scheduler.get_jobs()):,} job(s) scheduled)")
await self.stdout.send(f"testbot v{__version__} is online!")
self.ready = True
print(" Bot ready!")
async def on_message(self, message: discord.Message) -> None:
if message.author.bot or isinstance(message.channel, discord.DMChannel):
return
await self.process_commands(message)
async def process_commands(self, message: discord.Message) -> None:
ctx = await self.get_context(message, cls=commands.Context)
if ctx.command is None:
return
await self.invoke(ctx)
| 31.10101 | 91 | 0.626827 | 2,785 | 0.904514 | 0 | 0 | 0 | 0 | 1,560 | 0.506658 | 541 | 0.175706 |
1b2089f485f3812855dad084d1e839ff916e3e3b | 759 | py | Python | alembic/versions/8feba263d722_added_a_flag_to_commands_if_it_should_.py | smackedlol/pajbot | cc6d00e20fd0847f88e487937ac02d0011e05e67 | [
"MIT"
] | 1 | 2020-10-01T23:36:38.000Z | 2020-10-01T23:36:38.000Z | alembic/versions/8feba263d722_added_a_flag_to_commands_if_it_should_.py | smackedlol/pajbot | cc6d00e20fd0847f88e487937ac02d0011e05e67 | [
"MIT"
] | 1 | 2021-03-25T05:37:40.000Z | 2021-03-25T05:37:40.000Z | alembic/versions/8feba263d722_added_a_flag_to_commands_if_it_should_.py | leecopland/bullbot | 52e463293097b58084afb4f9f1d85b0656a67d44 | [
"MIT"
] | 1 | 2020-03-11T19:37:10.000Z | 2020-03-11T19:37:10.000Z | """Added a flag to commands if it should be run through the banphrases
Revision ID: 8feba263d722
Revises: a6f9b5c3ba83
Create Date: 2016-05-24 22:04:49.803097
"""
# revision identifiers, used by Alembic.
revision = '8feba263d722'
down_revision = 'a6f9b5c3ba83'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tb_command', sa.Column('run_through_banphrases', sa.Boolean(), server_default='0', nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('tb_command', 'run_through_banphrases')
### end Alembic commands ###
| 26.172414 | 118 | 0.72332 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 481 | 0.633729 |
1b21ac02930b4e22bdd28cedc76ce3ebf51b7c69 | 650 | py | Python | hdri_encoding/metadata_encoding_utils.py | microsoft/ConfigNet | f16b9b52698b1fe588322fdc5d921746f68d0e9e | [
"MIT"
] | 82 | 2020-08-24T01:47:03.000Z | 2022-02-28T08:33:50.000Z | hdri_encoding/metadata_encoding_utils.py | microsoft/ConfigNet | f16b9b52698b1fe588322fdc5d921746f68d0e9e | [
"MIT"
] | 3 | 2020-09-18T07:21:00.000Z | 2022-02-09T23:42:27.000Z | hdri_encoding/metadata_encoding_utils.py | microsoft/ConfigNet | f16b9b52698b1fe588322fdc5d921746f68d0e9e | [
"MIT"
] | 16 | 2020-08-24T05:47:00.000Z | 2022-01-05T07:25:13.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
"""Common functions used by the metadata encoding scripts"""
import json
def load_metadata_dicts(metadata_files):
metadata_dicts = []
for metadata_file in metadata_files:
with open(metadata_file, "r") as fp:
metadata_dicts.append(json.load(fp))
return metadata_dicts
def save_metadata_dicts(metadata_dicts, metadata_files):
assert(len(metadata_dicts) == len(metadata_files))
for i in range(len(metadata_dicts)):
with open(metadata_files[i], "w") as fp:
json.dump(metadata_dicts[i], fp, indent=4)
| 34.210526 | 61 | 0.690769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 139 | 0.213846 |
1b227e9375b3dddeb348d11a30d06d878e3e08d5 | 11,700 | py | Python | code/segmentation/MELC_ImageProcessing.py | perlfloccri/DeepFLEX | 84d2d83f09acdb4dbc7c02735e071bf8d67ddafe | [
"MIT"
] | null | null | null | code/segmentation/MELC_ImageProcessing.py | perlfloccri/DeepFLEX | 84d2d83f09acdb4dbc7c02735e071bf8d67ddafe | [
"MIT"
] | null | null | null | code/segmentation/MELC_ImageProcessing.py | perlfloccri/DeepFLEX | 84d2d83f09acdb4dbc7c02735e071bf8d67ddafe | [
"MIT"
] | 1 | 2020-09-30T21:44:12.000Z | 2020-09-30T21:44:12.000Z | from MELC.utils.myDatasets import generate_workingRaw_from_raw, MELCStructureDataset
import numpy as np
import tifffile as tiff
from MELC.utils.registration_daria import register
import matplotlib.pyplot as plt
import cv2
from MELC.utils.Files import create_folder
from skimage import img_as_float, img_as_uint
from MELC.utils.f_transformations import filterLowFrequencies, visualize_frequencies
import glob
from os.path import join
from config import *
import argparse
SEPARATOR = '/'
def parse_args():
"""Parse input arguments"""
parser = argparse.ArgumentParser(description='Run Training of Mask R-CNN')
parser.add_argument(
'--path', dest='path', required=True,
help='Config file for training (and optionally testing)')
return parser.parse_args()
class MELCImageProcessing:
def __init__(self, path: str, melc_structure_generated: bool = True):
self._path = path
self._path_registered_fluor = ''
self._path_registered_bleach = ''
self._path_registered_phase = ''
self._path_registered_vis_fluor = ''
self._path_registered_vis_bleach = ''
self._path_registered_vis_phase = ''
self._path_bg_corr = ''
self._path_bg_corr_f = ''
self._path_bg_corr_v_f = ''
self._path_normalized_f = ''
self._path_normalized_v_f = ''
'''
Extract MELC data and calibration data
'''
w_raw = self._path + SEPARATOR + 'w_raw'
if not melc_structure_generated:
generate_workingRaw_from_raw(self._path, w_raw)
melc_dataset = MELCStructureDataset(w_raw)
'''
Sort by creation date
'''
self._melc_fluor = melc_dataset.fluor_pd.sort_values('order_index', ascending=True)
self._melc_phase = melc_dataset.phase_pd.sort_values('order_index', ascending=True)
self._melc_bleach = melc_dataset.bleach_pd.sort_values('order_index', ascending=True)
self._melc_phasebleach = melc_dataset.phasebleach_pd.sort_values('order_index', ascending=True)
self.create_folders()
self._corrected_bf_im = self.generate_bg_correction_img()
self.process_images()
def create_folders(self):
'''
Create folders for registered images
'''
path_processed = join(self._path, 'processed')
path_registered = join(path_processed, 'registered')
self._path_registered_fluor = join(path_registered, 'fluor')
self._path_registered_bleach = join(path_registered, 'bleach')
self._path_registered_phase = join(path_registered, 'phase')
self._path_registered_vis_fluor = join(path_registered, 'vis_fluor')
self._path_registered_vis_bleach = join(path_registered, 'vis_bleach')
self._path_registered_vis_phase = join(path_registered, 'vis_phase')
create_folder(path_processed)
create_folder(path_registered)
create_folder(self._path_registered_fluor)
create_folder(self._path_registered_bleach)
create_folder(self._path_registered_phase)
create_folder(self._path_registered_vis_fluor)
create_folder(self._path_registered_vis_bleach)
create_folder(self._path_registered_vis_phase)
'''
Create folders for background corrected images
'''
self._path_bg_corr = self._path + SEPARATOR + 'processed' + SEPARATOR + 'background_corr' + SEPARATOR
self._path_bg_corr_f = self._path_bg_corr + 'fluor' + SEPARATOR
self._path_bg_corr_v_f = self._path_bg_corr + 'vis_fluor' + SEPARATOR
self._path_bg_corr_p = self._path_bg_corr + 'phase' + SEPARATOR
self._path_bg_corr_v_p = self._path_bg_corr + 'vis_phase' + SEPARATOR
create_folder(self._path_bg_corr)
create_folder(self._path_bg_corr_f)
create_folder(self._path_bg_corr_v_f)
create_folder(self._path_bg_corr_p)
create_folder(self._path_bg_corr_v_p)
'''
Create folders for normalized images
'''
path_normalized = self._path + SEPARATOR + 'processed' + SEPARATOR + 'normalized'
self._path_normalized_f = path_normalized + SEPARATOR + 'fluor' + SEPARATOR
self._path_normalized_v_f = path_normalized + SEPARATOR + 'vis_fluor' + SEPARATOR
self._path_normalized_p = path_normalized + SEPARATOR + 'phase' + SEPARATOR
self._path_normalized_v_p = path_normalized + SEPARATOR + 'vis_phase' + SEPARATOR
create_folder(path_normalized)
create_folder(self._path_normalized_f)
create_folder(self._path_normalized_v_f)
create_folder(self._path_normalized_p)
create_folder(self._path_normalized_v_p)
def generate_bg_correction_img(self):
'''
Create correction image for fluorescence and bleaching images
'''
brightfield_im = []
darkframe_im = []
filter_names = ['XF116-2', 'XF111-2']
calibration_path = self._path + SEPARATOR +'w_raw' + SEPARATOR + 'calibration' + SEPARATOR
brightfield_im.append(np.int16(tiff.imread(glob.glob(calibration_path + '*_cal_b001_5000_XF116-2_000.tif'))))
brightfield_im.append(np.int16(tiff.imread(glob.glob(calibration_path + '*_cal_b001_5000_XF111-2_000.tif'))))
darkframe_im.append(np.int16(tiff.imread(glob.glob(calibration_path + '*_cal_d001_5000_XF116-2_000.tif'))))
darkframe_im.append(np.int16(tiff.imread(glob.glob(calibration_path + '*_cal_d001_5000_XF111-2_000.tif'))))
corrected_brightfield_im = [(brightfield_im[i] - darkframe_im[i]) for i in range(len(filter_names))]
corrected_brightfield_im[0][corrected_brightfield_im[0] <= 0] = 0
corrected_brightfield_im[1][corrected_brightfield_im[1] <= 0] = 0
return corrected_brightfield_im
def process_images(self):
'''
Registration, background correction and normalization of images
'''
'''
Registration
'''
ref_image = tiff.imread(glob.glob(self._path + SEPARATOR + 'w_raw' + SEPARATOR + 'phase' + SEPARATOR + '*_Propidium iodide_200_XF116*.tif'))
for i in range(0, (len(self._melc_fluor)-1)):
pb_idx = np.where(self._melc_phasebleach['order_index'] == self._melc_bleach.iloc[i]['order_index'])[0][0]
phasebleach_image = tiff.imread(self._melc_phasebleach.iloc[pb_idx]['path'])
bleach_image = tiff.imread(self._melc_bleach.iloc[i]['path'])
registered_bleach_image = register(ref_image, phasebleach_image, bleach_image)
filename_bleach = SEPARATOR + str(int(self._melc_bleach.iloc[i]['order_index'])) + '_' + '_'.join(
self._melc_bleach.iloc[i]['fid'].split('_')[:-1]) + '.tif'
tiff.imsave(self._path_registered_bleach + filename_bleach, registered_bleach_image)
save_vis_img(registered_bleach_image, self._path_registered_vis_bleach, filename_bleach)
p_idx = np.where(self._melc_phase['order_index'] == self._melc_fluor.iloc[i+1]['order_index'])[0][0]
phase_image = tiff.imread(self._melc_phase.iloc[p_idx]['path'])
fluorescence_image = tiff.imread(self._melc_fluor.iloc[i+1]['path'])
registered_phase_image = register(ref_image, phase_image, phase_image)
registered_fluor_image = register(ref_image, phase_image, fluorescence_image)
filename_fluor = SEPARATOR + str(int(self._melc_fluor.iloc[i+1]['order_index'])) + '_' + '_'.join(
self._melc_fluor.iloc[i+1]['fid'].split('_')[:-1]) + '.tif'
tiff.imsave(self._path_registered_fluor + filename_fluor, registered_fluor_image)
tiff.imsave(self._path_registered_phase + filename_fluor, registered_fluor_image)
save_vis_img(registered_fluor_image, self._path_registered_vis_fluor, filename_fluor)
save_vis_img(registered_phase_image, self._path_registered_vis_phase, filename_fluor)
'''
Background Correction
'''
bleach = np.int16(registered_bleach_image)
fluor = np.int16(registered_fluor_image)
phase = np.int16(registered_phase_image)
if self._melc_fluor.iloc[i+1]['filter'] == 'XF111-2':
fluor -= self._corrected_bf_im[1]
phase -= self._corrected_bf_im[1]
else:
fluor -= self._corrected_bf_im[0]
phase -= self._corrected_bf_im[0]
if self._melc_bleach.iloc[i]['filter'] == 'XF111-2':
bleach -= self._corrected_bf_im[1]
else:
bleach -= self._corrected_bf_im[0]
phase[phase < 0] = 0
# Substraction of bleaching image
fluor_wo_bg = fluor - bleach
fluor_wo_bg[fluor_wo_bg < 0] = 0
tiff.imsave(self._path_bg_corr_f + filename_fluor, fluor_wo_bg)
save_vis_img(fluor_wo_bg, self._path_bg_corr_v_f, filename_fluor)
tiff.imsave(self._path_bg_corr_p + filename_fluor, phase)
save_vis_img(phase, self._path_bg_corr_v_p, filename_fluor)
'''
Normalization
'''
fluor_wo_bg_normalized = melc_normalization(fluor_wo_bg)
phase_bc_normalized = melc_normalization(phase)
tiff.imsave(self._path_normalized_f + filename_fluor, fluor_wo_bg_normalized)
save_vis_img(fluor_wo_bg_normalized, self._path_normalized_v_f, filename_fluor)
tiff.imsave(self._path_normalized_p + filename_fluor, phase_bc_normalized)
save_vis_img(phase_bc_normalized, self._path_normalized_v_p, filename_fluor)
def save_vis_img(img: np.ndarray, path: str, filename: str):
img_float = img_as_float(img.astype(int))
img_float = img_float - np.percentile(img_float[20:-20, 20:-20], 0.135) # subtract background
if not np.percentile(img_float[20:-20, 20:-20], 100 - 0.135) == 0.0:
img_float /= np.percentile(img_float[20:-20, 20:-20], 100 - 0.135) # normalize to 99.865% of max value
img_float[img_float < 0] = 0
img_float[img_float > 1] = 1 # cut-off high intensities
tiff.imsave(path + filename, img_as_uint(img_float))
def melc_normalization(img: np.ndarray):
sorted_img = np.sort(np.ravel(img))[::-1]
img[img > sorted_img[3]] = sorted_img[3] # cut off high intensities
return img[15:-15, 15:-15]
'''
For visualization and inspection of images
***Using normalization
registered_u8 = cv2.convertScaleAbs(registered_image, alpha=(255.0/65535.0))
kernel = np.ones((2, 2), np.float32)/4
mean_filtered_img = cv2.filter2D(registered_float, -1, kernel)
normalized_img = cv2.normalize(mean_filtered_img, None, 0, 255, cv2.NORM_MINMAX)
***Using FFT - cut 0.00001 percent of highest frequencies
images = []
images.append(registered_float)
visualize_frequencies(images)
pixels = registered_float.size
high_intensity_pixels = 3
percentage_non_artificial = 100-high_intensity_pixels/pixels
filtered_img = filterLowFrequencies(registered_float, percentage_non_artificial)
images.append(filtered_img)
visualize_frequencies(images)
***Plot histogram
hist = cv2.calcHist([registered_image], [0], None, [65535], [0, 65535])
plt.plot(hist)
plt.xticks(np.arange(0, 65535, step=2000))
plt.grid(True)
plt.yscale('log') # plt.xlim([0, 65535])
plt.show()
'''
if __name__ == '__main__':
args = parse_args()
MELCImageProcessing(args.path, melc_structure_generated=False)
# raw_1 = r'G:\FORSCHUNG\LAB4\VISIOMICS\MELC\2019\3rdFinalPanel_18-6056\201912201349_1'
# melc_processed_data = MELCImageProcessing(raw_1, melc_structure_generated=False)
x = 0
| 42.238267 | 148 | 0.685897 | 8,881 | 0.75906 | 0 | 0 | 0 | 0 | 0 | 0 | 2,678 | 0.228889 |
1b228384e19773cced14f8e402df3707f46aed14 | 429 | py | Python | examples/extremes.py | gimait/pycozmo | 601d9c09903b9300e8990723cae95974212afb09 | [
"MIT"
] | 1 | 2020-12-20T13:33:41.000Z | 2020-12-20T13:33:41.000Z | examples/extremes.py | solosito/pycozmo | 5d28118eb8f7a625ae4a66054dabf19b4fe27483 | [
"MIT"
] | null | null | null | examples/extremes.py | solosito/pycozmo | 5d28118eb8f7a625ae4a66054dabf19b4fe27483 | [
"MIT"
] | 1 | 2019-12-25T13:36:16.000Z | 2019-12-25T13:36:16.000Z | #!/usr/bin/env python
import time
import pycozmo
def pycozmo_program(cli: pycozmo.client.Client):
cli.set_head_angle(pycozmo.MAX_HEAD_ANGLE.radians)
time.sleep(1)
cli.set_head_angle(pycozmo.MIN_HEAD_ANGLE.radians)
time.sleep(1)
cli.set_lift_height(pycozmo.MAX_LIFT_HEIGHT.mm)
time.sleep(1)
cli.set_lift_height(pycozmo.MIN_LIFT_HEIGHT.mm)
time.sleep(1)
pycozmo.run_program(pycozmo_program)
| 19.5 | 54 | 0.757576 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 21 | 0.048951 |
1b24854fdff1c239a2ae1dd104ad4dbb9b57c601 | 1,668 | py | Python | ryu/tests/server-for-consistency-test.py | uiuc-srg/ryu | 2a597f812270ea9690269a20bf659f334c323eb6 | [
"Apache-2.0"
] | null | null | null | ryu/tests/server-for-consistency-test.py | uiuc-srg/ryu | 2a597f812270ea9690269a20bf659f334c323eb6 | [
"Apache-2.0"
] | null | null | null | ryu/tests/server-for-consistency-test.py | uiuc-srg/ryu | 2a597f812270ea9690269a20bf659f334c323eb6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
An echo server that uses select to handle multiple clients at a time.
Command-line parameter: port=listening port
"""
import select
import socket
import sys
if len(sys.argv)!=2:
print("You need to specify a listening port!")
sys.exit()
host = ''
port = int(sys.argv[1])
backlog = 5 # maximum number of queued connections
size = 1024 # buffer size
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.bind((host,port))
server.listen(backlog)
input = [server,sys.stdin]
running = 1
print("Press any key to stop the server...")
while running:
# The Python select module allows an application to wait for input from multiple sockets at a time.
inputready,outputready,exceptready = select.select(input,[],[])
for s in inputready:
if s == server:
# handle the server socket
client, address = server.accept()
print("New client at "+address[0]+":"+str(address[1]))
input.append(client)
elif s == sys.stdin:
# handle standard input
junk = sys.stdin.readline()
running = 0
else:
# handle all other sockets
data = "[from h"+sys.argv[1][0]+"]: "
#data = data+s.recv(size)
if data:
try:
#s.send(data)
print("recv: %d bytes" % len(s.recv(size)))
except socket.error, e:
s.close()
input.remove(s)
break
else:
s.close()
input.remove(s)
server.close()
| 27.344262 | 103 | 0.550959 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 536 | 0.321343 |
1b24a9c95de8992eb607fee932124e52311407d1 | 1,146 | py | Python | 2015/python/d07.py | eduellery/adventofcode | dccece0bf59bc241803edc99a6536062fe2714d1 | [
"MIT"
] | null | null | null | 2015/python/d07.py | eduellery/adventofcode | dccece0bf59bc241803edc99a6536062fe2714d1 | [
"MIT"
] | null | null | null | 2015/python/d07.py | eduellery/adventofcode | dccece0bf59bc241803edc99a6536062fe2714d1 | [
"MIT"
] | null | null | null | import re
lines = open('d07.in').read().split('\n')
calc = dict()
for line in lines:
instruction = re.findall(r'(.*)\s->\s(\w*)', line)
for ops, register in instruction:
calc[register] = ops.strip().split(' ')
def calculate(register):
try:
return int(register)
except ValueError:
pass
if register not in results:
ops = calc[register]
if len(ops) == 1:
res = calculate(ops[0])
else:
op = ops[-2]
if op == 'AND':
res = calculate(ops[0]) & calculate(ops[2])
elif op == 'OR':
res = calculate(ops[0]) | calculate(ops[2])
elif op == 'NOT':
res = ~calculate(ops[1]) & 0xffff
elif op == 'RSHIFT':
res = calculate(ops[0]) >> calculate(ops[2])
elif op == 'LSHIFT':
res = calculate(ops[0]) << calculate(ops[2])
results[register] = res
return results[register]
results = dict()
original = calculate('a')
print('P1:', original)
results = dict()
results['b'] = original
modified = calculate('a')
print('P2:', modified)
| 25.466667 | 58 | 0.519197 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.071553 |
1b296874bf5074952d7f78ae18699f62788825a3 | 17,739 | py | Python | extensions/logger.py | cobrab11/black1-bot | 47c1a80029d6183fc990960b422bb3155360702d | [
"Apache-2.0"
] | 3 | 2015-10-15T15:40:17.000Z | 2021-06-08T05:39:21.000Z | extensions/logger.py | cobrab11/black1-bot | 47c1a80029d6183fc990960b422bb3155360702d | [
"Apache-2.0"
] | 1 | 2019-04-06T11:54:56.000Z | 2019-04-07T00:57:49.000Z | extensions/logger.py | cobrab11/black1-bot | 47c1a80029d6183fc990960b422bb3155360702d | [
"Apache-2.0"
] | 3 | 2015-10-26T14:49:57.000Z | 2018-03-04T15:34:11.000Z | # BS mark.1-55
# /* coding: utf-8 */
# BlackSmith mark.1
# logger.py
# © 2011-2013 simpleApps (http://simpleapps.ru)
# Thanks to: WitcherGeralt (alkorgun@gmail.com)
logConfigFile = "dynamic/logstate.txt"
logCacheFile = "logcache.txt"
logThemes = {}
Months, Days = ("", u"Январь", u"Февраль", u"Март", u"Апрель", u"Май", u"Июнь", u"Июль", u"Август", u"Сентябрь", u"Октябрь", u"Ноябрь", u"Декабрь"), (u"Понедельник", u"Вторник", u"Среда", u"Четверг", u"Пятница", u"Суббота", u"Воскресенье", u"Понедельник")
logAfl = {
"none": u"посетитель",
"member": u"зарегистрированный пользователь",
"admin": u"администратор",
"owner": u"владелец"
}
logRole = {
"visitor": u"гость",
"moderator": u"модератор",
"participant": u"участник"
}
logStatus = {
None: u"доступен",
"xa": u"недоступен",
"dnd": u"не беспокоить",
"away": u"отсутствую",
"chat": u"готов поболтать"
}
logCfg = {}
logNicks = {}
logSynchronize = {}
logger_compile_link = re.compile("((http[s]?|ftp)://[^\s'\"<>]+)")
DefaultLogHeader = u'''<!doctype html>
<html>
<head>
<title>%(date)s — %(chat)s</title>
<meta charset="utf-8">
<link rel="stylesheet" type="text/css" href="../../.theme/logger.css"/>
</head>
<body>
<div class="shadowed" align="right"><a href="http://simpleapps.ru/">BlackSmith Bot log file</a></div>
<div class="shadowed" align="center"><a href="xmpp:%(chat)s?join" title="Join to %(chat)s">%(chat)s</a><hr></div></hr>
<h3><div class="shadowed">%(date)s<hr></div></h3>
<div>
<tt>'''
LoggerCfg = {"theme": "LunnaCat", "enabled": False, "timetype": "local", "dir": "logs"}
Subjs = {}
def logGetDate(Time):
if LoggerCfg["timetype"] == "local":
func = time.localtime
else:
func = time.gmtime
get_date = lambda date: tuple(func(date))[:3]
year, month, day = Time[:3]
try:
date = time.mktime(time.struct_time((year, month, day, 6, 0, 0, 0, 0, 0)))
except Exception:
year_p = month_p = day_p = year_n = month_n = day_n = 0
else:
try:
year_p, month_p, day_p = get_date(date - 86400)
year_n, month_n, day_n = get_date(date + 86400)
except ValueError: ## Meet 2038 year!
year_p, month_p, day_p = year_n, month_n, day_n =\
[time.strftime(x) for x in ("%Y", "%m", "%d")] ## Just for fun.
if year_p == "2038":
Print("#-# Impossible! Bot works in 2038 year! Hello from 2013!", xmpp.debug.color_cyan) ## fuuuuuuuuuuuun!
return "{0}/{1:02}/{2:02}||{3}/{4:02}/{5:02}".format(year_p, month_p, day_p, year_n, month_n, day_n)
def getLogFile(chat, Time):
mon = "{0:02}".format(Time.tm_mon)
logDir = chkFile("%s/%s/%d/%s" % (LoggerCfg["dir"], chat, Time.tm_year, mon))
if not os.path.isdir(logDir):
try:
os.makedirs(logDir)
except:
return False
prev, next = logGetDate(Time).split("||")
day = "{0:02}".format(Time.tm_mday)
logFileName = "%s/%s.html" % (logDir, day)
if os.path.isfile(logFileName):
logFile = open(logFileName, "a")
INFO["fw"] += 1
else:
date = time.strftime("{0}, {1} %d, %Y".format(Days[Time.tm_wday], Months[Time.tm_mon]), Time)
themeFile = chkFile("%s/%s/.theme/pattern.html" % (LoggerCfg["dir"], chat))
if os.path.isfile(themeFile):
pattern = read_file(themeFile)
else:
pattern = DefaultLogHeader
exfile = logCfg[chat]["file"]
if logFileName != exfile:
if exfile and os.path.isfile(exfile):
write_file(exfile, "\n</tt>\n</div>\n</body>\n</html>", "a")
logCfg[chat]["file"] = logFileName
logFile = open(logFileName, "w")
INFO["fcr"] += 1
logFile.write(pattern % vars())
if chat in GROUPCHATS:
if Subjs[chat]['time'] and Subjs[chat]['body']:
Time = time.time()
if (Time - Subjs[chat]['time']) > 20:
Subjs[chat]['time'] = Time
logFile.write('<span class="topic">%s</span><br>' % logFormat(Subjs[chat]['body']))
#logWrite(chat, Subjs[chat]['body'].replace("\n", "<br>"), "subject")
return logFile
def logFormat(body):
body = xmpp.XMLescape(body)
body = logger_compile_link.sub(lambda obj: "<a href=\"{0}\">{0}</a>".format(obj.group(0)), body) #'
body = body.replace(chr(10), "<br>")
body = body.replace(chr(9), " " * 4) # "	" requires tag <pre>, but " " just eats your brain
return body
def logWrite(chat, state, body, nick = None):
if LoggerCfg["timetype"].lower() == "gmt":
Time = time.gmtime()
elif LoggerCfg["timetype"].lower() == "local":
Time = time.localtime()
with logSynchronize[chat]:
logFile = getLogFile(chat, Time)
if logFile:
timestamp = time.strftime("%H:%M:%S", Time)
if nick: nick = xmpp.XMLescape(nick)
body = logFormat(body)
logFile.write(chr(10))
if state == "subject":
logFile.write('<a id="t{0}" href="#t{0}">[{0}]</a> <span class="topic">{1}</span><br>'.format(timestamp, body))
elif state == "msg":
if nick:
nickColor = "nick%d" % coloredNick(chat, nick)
if body.startswith("/me"):
logFile.write('<span class="{0}"><a id="t{1}" href="#t{1}">[{1}]</a> *{2} {3}</span><br>'.format(nickColor, timestamp, nick, body[3:]))
else:
logFile.write('<span class="{0}"><a id="t{1}" href="#t{1}">[{1}]</a> <{2}></span> <span class="text">{3}</span><br>'.format(nickColor, timestamp, nick, body))
else:
logFile.write('<span class="status"><a id="t{0}" href="#t{0}">[{0}]</a></span> '.format(timestamp))
logFile.write('<span class="status">*** %s</span><br>' % (body))
else:
logFile.write('<span class="{0}"><a id="t{1}" href="#t{1}">[{1}]</a></span> '.format(state, timestamp))
logFile.write('<span class="%s">%s</span><br>' % (state, body))
logFile.close()
def coloredNick(chat, nick):
if logNicks[chat].has_key(nick):
return logNicks[chat][nick]
if len(logNicks[chat]) < 20:
ls = range(1, 21)
for x in logNicks[chat].values():
ls.remove(x)
logNicks[chat][nick] = x = random.choice(ls)
else:
logNicks[chat][nick] = x = random.randrange(1, 21)
return x
def logWriteMessage(stanza, mType, source, body):
if GROUPCHATS.has_key(source[1]) and logCfg[source[1]]["enabled"] and mType == "public" and source[2]:
logWrite(source[1], "msg", body, source[2])
def logWriteSubject(chat, nick, subject, body):
if chat in logCfg and logCfg[chat]["enabled"]:
Time = time.time()
if (Time - Subjs[chat]['time']) > 20:
Subjs[chat] = {'body': body, 'time': Time}
if nick:
body = "%s set subject:\n%s" % (nick.strip(), subject.strip())
logWrite(chat, "subject", body)
def logWriteJoined(chat, nick, afl, role, status, text):
if GROUPCHATS.has_key(chat) and logCfg[chat]["enabled"]:
some = ""
if logCfg[chat].get("jids"):
jid = GROUPCHATS[chat].get(nick, {}).get("full_jid", "?@?/?")
if not chat in jid:
some = " (%(jid)s)" % vars()
log = u"*** %(nick)s%(some)s заходит как %(role)s"
if afl != "none":
log += u" и %(afl)s"
log += u" и теперь %(status)s"
afl, role, status = logAfl.get(afl, afl), logRole.get(role, role), logStatus.get(status, status)
if text:
log += " (%(text)s)"
logWrite(chat, "join", log % vars())
def logWriteARole(chat, nick, aRole, reason):
if GROUPCHATS.has_key(chat) and logCfg[chat]["enabled"]:
role, afl = aRole
log = u"*** %(nick)s теперь %(role)s"
if afl != "none":
log += u" и %(afl)s"
if reason:
log += u" (%(reason)s)"
afl, role = logAfl.get(afl, ""), logRole.get(role, "")
logWrite(chat, "role", log % vars())
def logWriteNickChange(stanza, chat, oldNick, nick):
if GROUPCHATS.has_key(chat) and logCfg[chat]["enabled"]:
logWrite(chat, "nick", u'*** %s меняет ник на %s' % (oldNick, nick))
def logWriteStatusChange(chat, nick, status, priority, text):
if GROUPCHATS.has_key(chat) and logCfg[chat]["enabled"]:
log = u"*** %(nick)s теперь %(status)s"
if text:
log += " (%(text)s)"
if priority:
log += " [%s]" % priority
status = logStatus.get(status, "")
logWrite(chat, "status", log % vars())
def logWriteLeave(chat, nick, reason, code):
if GROUPCHATS.has_key(chat) and logCfg[chat]["enabled"]:
some = ""
if logCfg[chat].get("jids"):
jid = GROUPCHATS[chat].get(nick, {}).get("full_jid", "")
if not chat in jid:
some = " (%(jid)s)" % vars()
# status_code_change(["full_jid"], chat, nick) #?!
if code:
if code == "307":
if reason:
logWrite(chat, "kick", u"*** %s%s выгнали из конференции (%s)" % (nick, some, reason))
else:
logWrite(chat, "kick", u"*** %s%s выгнали из конференции" % (nick, some))
elif code == "301":
if reason:
logWrite(chat, "ban", u"*** %s%s запретили входить в данную конференцию (%s)" % (nick, some, reason))
else:
logWrite(chat, "ban", u"*** %s%s запретили входить в данную конференцию" % (nick, some))
elif reason:
logWrite(chat, "leave", u"*** %s%s выходит из конференции (%s)" % (nick, some, reason))
else:
logWrite(chat, "leave", u"*** %s%s выходит из конференции" % (nick, some))
def logFileInit(chat):
cfg = {"theme": LoggerCfg["theme"], "enabled": False, "file": "", "jids": 0}
Subjs[chat] = {'body': '', 'time': 0}
if check_file(chat, logCacheFile, str(cfg)):
cfg = eval(read_file("dynamic/%s/%s" % (chat, logCacheFile)))
else:
delivery(u"Внимание! Не удалось создать файл \"dynamic/%s/%s\"!" % (chat, logCacheFile))
logCfg[chat] = cfg
logNicks[chat] = {}
logSynchronize[chat] = threading.Semaphore()
if not os.path.isdir(chkFile("%s/%s/.theme" % (LoggerCfg["dir"], chat))) and logThemes.has_key(cfg["theme"]):
if logCfg[chat]["enabled"]:
logThemeCopier(chat, cfg["theme"])
def init_logger():
if initialize_file(logConfigFile, str(LoggerCfg)):
LoggerCfg.update(eval(read_file(logConfigFile)))
if LoggerCfg["enabled"]:
if not os.path.isdir(LoggerCfg["dir"]):
try:
os.makedirs(LoggerCfg["dir"])
except:
pass
Dir = "static/logger/themes"
for Theme in os.listdir(Dir):
path = "%s/%s" % (Dir, Theme)
if os.path.isdir(path):
if "logger.css" in os.listdir(path):
logThemes[Theme] = path
handler_register("01si", logFileInit)
handler_register("04eh", logWriteJoined)
handler_register("05eh", logWriteLeave)
handler_register("01eh", logWriteMessage)
handler_register("09eh", logWriteSubject)
handler_register("07eh", logWriteARole)
handler_register("06eh", logWriteNickChange)
handler_register("08eh", logWriteStatusChange)
command_handler(logSetState, 30, "logger")
return True
else:
Print("\nCan't init %s, logger wasn't enabled." % logConfigFile, color2)
def logThemeCopier(chat, theme):
import shutil
themeDir = chkFile("%s/%s/.theme" % (LoggerCfg["dir"], chat))
if os.path.exists(themeDir):
shutil.rmtree(themeDir)
shutil.copytree(logThemes[theme], themeDir)
del shutil
def logSetStateMain(mType, source, argv):
if argv:
argv = argv.split()
a0 = (argv.pop(0)).lower()
if a0 in ("1", u"вкл"):
if not LoggerCfg["enabled"]:
LoggerCfg["enabled"] = True
write_file(logConfigFile, str(LoggerCfg))
for chat in GROUPCHATS.keys():
execute_handler(logFileInit, (chat,))
if init_logger():
reply(mType, source, u"Включил логгер.")
else:
reply(mType, source, "Something wrong")
else:
reply(mType, source, u"Уже включено.")
elif a0 in ("0", u"выкл"):
if LoggerCfg["enabled"]:
LoggerCfg["enabled"] = False
write_file(logConfigFile, str(LoggerCfg))
name = logWriteMessage.func_name
for handler in Handlers["01eh"]:
if name == handler.func_name:
Handlers["01eh"].remove(handler)
name = logWriteSubject.func_name
for handler in Handlers["09eh"]:
if name == handler.func_name:
Handlers["09eh"].remove(handler)
name = logWriteNickChange.func_name
for handler in Handlers["06eh"]:
if name == handler.func_name:
Handlers["06eh"].remove(handler)
name = logWriteStatusChange.func_name
for handler in Handlers["08eh"]:
if name == handler.func_name:
Handlers["08eh"].remove(handler)
name = logWriteARole.func_name
for handler in Handlers["07eh"]:
if name == handler.func_name:
Handlers["07eh"].remove(handler)
name = logWriteJoined.func_name
for handler in Handlers["04eh"]:
if name == handler.func_name:
Handlers["04eh"].remove(handler)
name = logWriteLeave.func_name
for handler in Handlers["05eh"]:
if name == handler.func_name:
Handlers["05eh"].remove(handler)
name = logFileInit.func_name
try:
command = eval(read_file("help/logger").decode('utf-8'))[logSetState.func_name]["cmd"]
except:
delivery(u"Внимание! Не удалось загрузить файл помощи логгера.")
else:
del COMMAND_HANDLERS[command]
for handler in Handlers["01si"]:
if name == handler.func_name:
Handlers["01si"].remove(handler)
logCfg.clear()
logSynchronize.clear()
reply(mType, source, u"Выключил логгер.")
else:
reply(mType, source, u"Логгер вообще не включён.")
elif a0 in (u"тема", "темы"):
if argv:
if logThemes.has_key(argv[0]):
themeFile = "static/logger/themes/%s/name.txt" % LoggerCfg["theme"]
if os.path.isfile(themeFile) and argv[0] == read_file(themeFile):
reply(mType, source, u"Тема «%s» уже используется плагином." % argv[0])
else:
LoggerCfg["theme"] = argv[0]
write_file(logConfigFile, str(LoggerCfg))
reply(mType, source, u"Установил «%s» стандартной темой." % argv[0])
else:
reply(mType, source, u"Нет такой темы :(")
else:
ls = []
for Numb, Theme in enumerate(logThemes.keys(), 1):
ls.append("%d. %s." % (Numb , Theme))
reply(mType, source, str.join(chr(10), ls))
elif a0 == u"папка":
if argv:
LoggerCfg["dir"] = argv[0]
logThemeCopier(source[1], "LunnaCat")
write_file(logConfigFile, str(LoggerCfg))
repl = u"Теперь логи будут храниться в папке «%s»." % argv[0]
else:
repl = u"Сейчас логи хрянятся в «%s»." % LoggerCfg["dir"]
reply(mType, source, repl)
elif a0 == u"время":
if argv:
if argv[0] in ("gmt", "local"):
LoggerCfg["timetype"] = argv[0]
write_file(logConfigFile, str(LoggerCfg))
repl = u"Установил тип записи времени на «%s»." % argv[0]
logWrite(source[1], "status", u"*** Установлен тип записи времени: %s" % argv[0])
else:
repl = u"Недопустимый тип. Доступные: local, gmt."
else:
repl = u"Сейчас установлен тип записи времени «%s»." % LoggerCfg["timetype"]
reply(mType, source, repl)
else:
reply(mType, source, u"Что-то не то...")
elif LoggerCfg["enabled"]:
reply(mType, source, u"Сейчас логгер включён.")
else:
reply(mType, source, u"Сейчас логгер выключен.")
def logSetState(mType, source, argv):
if GROUPCHATS.has_key(source[1]):
chat = source[1]
if argv:
argv = argv.split()
a0 = (argv.pop(0)).lower()
if a0 in ("1", u"вкл"):
if not logCfg[chat]["enabled"]:
logCfg[chat]["enabled"] = True
write_file("dynamic/%s/%s" % (chat, logCacheFile), str(logCfg[chat]))
reply(mType, source, u"Включил логирование «%s»." % chat)
else:
reply(mType, source, u"Уже включено.")
elif a0 in ("0", u"выкл"):
if logCfg[chat]["enabled"]:
logCfg[chat]["enabled"] = False
write_file("dynamic/%s/%s" % (chat, logCacheFile), str(logCfg[chat]))
logWrite(chat, "status", u"*** Логирование конференции приостановлено")
reply(mType, source, u"Выключил логирование «%s»." % chat)
else:
reply(mType, source, u"«%s» не логируется." % chat)
elif a0 in (u"тема", "темы"):
if argv:
if logThemes.has_key(argv[0]):
themeFile = chkFile("%s/%s/.theme/name.txt" % (LoggerCfg["dir"], chat))
if os.path.isfile(themeFile) and argv[0] == read_file(themeFile):
reply(mType, source, u"Тема «%s» уже используется плагином." % argv[0])
else:
logCfg[chat]["theme"] = argv[0]
write_file("dynamic/%s/%s" % (chat, logCacheFile), str(logCfg[chat]))
logThemeCopier(chat, argv[0])
repl = u"Установил тему «%s». Она вступит в силу "
if os.path.exists(chkFile("%s/%s/.theme/pattern.html" % (LoggerCfg["dir"], chat))):
repl += u"с завтрашнего дня."
else:
repl += u"немедленно."
reply(mType, source, repl % argv[0])
else:
reply(mType, source, u"Нет такой темы :(.")
else:
repl = str()
for num, thm in enumerate(logThemes.keys()):
repl += "%d. %s.\n" % (num + 1, thm)
reply(mType, source, repl)
elif a0 in ("жиды", "жид"):
if argv:
if argv[0] == "1":
if not logCfg[chat].get("jids"):
logCfg[chat]["jids"] = 1
write_file("dynamic/%s/%s" % (chat, logCacheFile), str(logCfg[chat]))
reply(mType, source, "Теперь Jabber ID пользователей будут записываться в логи. Обратите внимание: уже записанные Jabber ID никуда не исчезнут даже после отключения данной опции.")
else:
reply(mType, source, "Эта опция уже включена.")
elif argv[0] == "0":
if logCfg[chat].get("jids"):
logCfg[chat]["jids"] = 0
write_file("dynamic/%s/%s" % (chat, logCacheFile), str(logCfg[chat]))
reply(mType, source, "Больше Jabber ID пользователей записываться не будут. Обратите внимание: уже записанные Jabber ID никуда не исчезнут.")
else:
reply(mType, source, "Эта опция не включена.")
else:
reply(mType, source, "Неизвестный параметр.")
else:
reply(mType, source, u"Нет такого параметра.")
elif logCfg[chat]["enabled"]:
if logCfg[chat].get("jids"):
jid_state = "включена"
else:
jid_state = "отключена"
reply(mType, source, u"Сейчас логгер включён. Запись JabberID: %s. Тема, используемая плагином в текущей конференции, называется «%s»." % (jid_state, logCfg[chat]["theme"]))
else:
reply(mType, source, u"Сейчас комната не логируется.")
handler_register("00si", init_logger)
command_handler(logSetStateMain, 100, "logger") | 36.879418 | 255 | 0.631434 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7,032 | 0.365242 |
1b2ad0ae61c78643d62af25d4ac99dd6250e8bf1 | 1,655 | py | Python | jupiter/use_cases/prm/person/remove.py | horia141/jupiter | 2c721d1d44e1cd2607ad9936e54a20ea254741dc | [
"MIT"
] | 15 | 2019-05-05T14:34:58.000Z | 2022-02-25T09:57:28.000Z | jupiter/use_cases/prm/person/remove.py | horia141/jupiter | 2c721d1d44e1cd2607ad9936e54a20ea254741dc | [
"MIT"
] | 3 | 2020-02-22T16:09:39.000Z | 2021-12-18T21:33:06.000Z | jupiter/use_cases/prm/person/remove.py | horia141/jupiter | 2c721d1d44e1cd2607ad9936e54a20ea254741dc | [
"MIT"
] | null | null | null | """Remove a person."""
import logging
from typing import Final
from jupiter.domain.inbox_tasks.infra.inbox_task_notion_manager import InboxTaskNotionManager
from jupiter.domain.prm.infra.prm_notion_manager import PrmNotionManager
from jupiter.domain.prm.service.remove_service import PersonRemoveService
from jupiter.domain.storage_engine import StorageEngine
from jupiter.framework.base.entity_id import EntityId
from jupiter.framework.use_case import UseCase
from jupiter.utils.time_provider import TimeProvider
LOGGER = logging.getLogger(__name__)
class PersonRemoveUseCase(UseCase[EntityId, None]):
"""The command for removing a person."""
_time_provider: Final[TimeProvider]
_storage_engine: Final[StorageEngine]
_inbox_task_notion_manager: Final[InboxTaskNotionManager]
_prm_notion_manager: Final[PrmNotionManager]
def __init__(
self, time_provider: TimeProvider, storage_engine: StorageEngine,
inbox_task_notion_manager: InboxTaskNotionManager,
prm_notion_manager: PrmNotionManager) -> None:
"""Constructor."""
self._time_provider = time_provider
self._storage_engine = storage_engine
self._inbox_task_notion_manager = inbox_task_notion_manager
self._prm_notion_manager = prm_notion_manager
def execute(self, args: EntityId) -> None:
"""Execute the command's action."""
with self._storage_engine.get_unit_of_work() as uow:
person = uow.person_repository.load_by_id(args)
PersonRemoveService(self._storage_engine, self._prm_notion_manager, self._inbox_task_notion_manager)\
.do_it(person)
| 40.365854 | 109 | 0.767372 | 1,099 | 0.664048 | 0 | 0 | 0 | 0 | 0 | 0 | 115 | 0.069486 |
1b2c8ab1cb51ed50fafc8095cf24fef9233aa71a | 2,755 | py | Python | lib/durak/game/ai.py | maximpertsov/durak-sockets | 53e47980a5456f44df4c2f140d74504ae8ad43a6 | [
"MIT"
] | null | null | null | lib/durak/game/ai.py | maximpertsov/durak-sockets | 53e47980a5456f44df4c2f140d74504ae8ad43a6 | [
"MIT"
] | 1 | 2020-10-23T14:12:47.000Z | 2020-11-03T17:33:11.000Z | lib/durak/game/ai.py | maximpertsov/durak-sockets | 53e47980a5456f44df4c2f140d74504ae8ad43a6 | [
"MIT"
] | null | null | null | from itertools import chain
from random import choice
from lib.durak.exceptions import IllegalAction
class AI:
class CannotPerform(IllegalAction):
pass
def __init__(self, *, game):
self._game = game
def perform_action(self, *, player):
"""
Have the user perform a random action.
Do nothing if yielded.
"""
_player = self._player(player)
if self._player(_player) in self._game._yielded.get():
raise self.CannotPerform("Already yielded")
action_type, selected_action = choice(self._potential_actions(player=_player))
selected_action(player=_player)
return action_type
def _potential_actions(self, *, player):
# TODO: report action on event?
defending = player == self._game.defender
not_defending = not defending
return list(
chain(
[("attacked", self._attack)] * 3 * not_defending,
# self._pass_card,
[("defended", self._defend)] * 9 * defending,
[("gave_up", self._give_up)] * 1 * defending,
[("yielded_attack", self._yield_attack)] * 7 * not_defending,
)
)
def _attack(self, *, player):
"""
Throw a random, legal attack card
"""
try:
potential_cards = list(
set(self._game._legal_attacks._cards)
& set(self._player(player).cards())
)
card = choice(potential_cards)
self._game.legally_attack(player=player, cards=[card])
except (IllegalAction, IndexError):
raise self.CannotPerform
def _yield_attack(self, *, player):
try:
self._game.yield_attack(player=player)
except IllegalAction:
raise self.CannotPerform
def _defend(self, *, player):
"""
Defend randomly
"""
try:
base_card, potential_cards = choice(
list(self._game.legal_defenses._legal_defenses.items())
)
if not potential_cards:
self._game.give_up(player=player)
return
card = choice(list(potential_cards))
self._game.legally_defend(player=player, base_card=base_card, card=card)
except (IllegalAction, IndexError):
raise self.CannotPerform
def _give_up(self, *, player):
try:
self._game.give_up(player=player)
except IllegalAction:
raise self.CannotPerform
def serialize(self):
return [player.serialize() for player in self.ordered()]
def _player(self, player_or_id):
return self._game.player(player_or_id)
| 29.945652 | 86 | 0.57931 | 2,650 | 0.961887 | 0 | 0 | 0 | 0 | 0 | 0 | 301 | 0.109256 |
1b2cf5164131d83c30f0c82c36524c69f4744fce | 2,954 | py | Python | diatomic/grids.py | ihavalyova/DiAtomic | 7eb6394947574c19be9a307fd7706939abc35e3f | [
"BSD-3-Clause"
] | 3 | 2022-01-26T02:40:12.000Z | 2022-02-10T19:03:15.000Z | diatomic/grids.py | ihavalyova/DiAtomic | 7eb6394947574c19be9a307fd7706939abc35e3f | [
"BSD-3-Clause"
] | null | null | null | diatomic/grids.py | ihavalyova/DiAtomic | 7eb6394947574c19be9a307fd7706939abc35e3f | [
"BSD-3-Clause"
] | 1 | 2021-03-20T10:34:27.000Z | 2021-03-20T10:34:27.000Z | import numpy as np
from utils import C_bohr
__all__ = ['Grid']
class Grid:
def __init__(self, npoints, rgrid, solver='sinc', alpha=0.0, rbar=0.0):
self.ngrid = npoints
self.rmin = rgrid[0] / C_bohr
self.rmax = rgrid[1] / C_bohr
rbar = rbar / C_bohr
self.solver = solver.lower()
self.Gy = np.ones(self.ngrid)
self.Fy = np.zeros(self.ngrid)
if self.solver == 'sinc':
self.rgrid, self.rstep = self.generate_sinc_uniform_grid()
else:
self.rgrid, self.rstep = self.generate_fourier_uniform_grid()
if alpha > 0.0:
# mapping is allowed with sinc method only
self.solver = 'sinc'
self.rmin = self.get_grid_bounding_values(self.rmin, rbar, alpha)
self.rmax = self.get_grid_bounding_values(self.rmax, rbar, alpha)
self.rgrid, ygrid = self.generate_nonuniform_grid(alpha, rbar)
gy_power1 = np.power(1.0+ygrid, (1.0/alpha)-1.0)
gy_power2 = np.power(1.0-ygrid, (1.0/alpha)+1.0)
self.Gy = (2.0*rbar/alpha) * gy_power1 / gy_power2
fy_power = (np.power((1.0 - np.power(ygrid, 2)), 2))
self.Fy = (1.0 - (1.0/(alpha**2))) / fy_power
def get_grid_points(self):
return self.rgrid * C_bohr
def get_grid_bounding_values(self, rlimit, rbar, alpha):
return ((rlimit/rbar)**alpha - 1.0) / ((rlimit/rbar)**alpha + 1.0)
def generate_fourier_uniform_grid(self):
return np.linspace(
self.rmin, self.rmax, num=self.ngrid, endpoint=False, retstep=True
)
def generate_sinc_uniform_grid(self):
return np.linspace(
self.rmin, self.rmax, num=self.ngrid, endpoint=True, retstep=True
)
def calculate_sinc_basis_functions(self, r):
# numpy sinc function is defined as sin(pi*x)/(pi*x) where pi is
# used for normalization. Thus I do not need to multiply by pi
# for j in range(0, self.nch*self.ngrid):
for j in range(0, self.ngrid):
arg = (r - self.rgrid[j]) / self.rstep
# return one column from a matrix
return np.sinc(arg)
def generate_nonuniform_grid(self, alpha, rbar):
ystep = (self.rmax - self.rmin) / (self.ngrid - 1) # / ngrid - 1 ??
# ygrid = np.ogrid[self.rmin+ystep:self.rmax+ystep:ystep]
# ygrid = np.ogrid[self.rmin:self.rmax:ystep]
# ygrid = np.linspace(self.rmin, self.rmax, num=self.ngrid)
# ygrid = np.arange(self.rmin, self.rmax, step=ystep)
# ygrid = np.linspace(
# self.rmin, self.rmax, num=self.ngrid, endpoint=True
# )
ygrid = np.empty(self.ngrid)
for j in range(1, self.ngrid+1):
ygrid[j-1] = self.rmin + ystep*(j-1.0)
Ry = rbar * np.power((1.0+ygrid) / (1.0-ygrid), 1.0/alpha)
print(ygrid)
print(len(ygrid))
return Ry, ygrid
| 31.763441 | 78 | 0.585647 | 2,887 | 0.977319 | 0 | 0 | 0 | 0 | 0 | 0 | 574 | 0.194313 |
1b2e5bc5b7359f315c6568c59fbcd38f9b3397a5 | 948 | py | Python | string/tinyUrl.py | mengyangbai/leetcode | e7a6906ecc5bce665dec5d0f057b302a64d50f40 | [
"MIT"
] | null | null | null | string/tinyUrl.py | mengyangbai/leetcode | e7a6906ecc5bce665dec5d0f057b302a64d50f40 | [
"MIT"
] | null | null | null | string/tinyUrl.py | mengyangbai/leetcode | e7a6906ecc5bce665dec5d0f057b302a64d50f40 | [
"MIT"
] | null | null | null | import random
class Codec:
base62 = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
baseurl = 'http://tinyurl.com/'
tinyTolong = {'1':'1'}
longTotiny = {}
def encode(self, longUrl):
"""Encodes a URL to a shortened URL.
:type longUrl: str
:rtype: str
"""
if longUrl in Codec.longTotiny:
return Codec.longTotiny[longUrl]
shortUrl = '1'
while shortUrl in Codec.tinyTolong:
shortUrl = ''.join(random.choice(Codec.base62) for i in range(6))
shortUrl = Codec.baseurl + shortUrl
Codec.tinyTolong[shortUrl] = longUrl
Codec.longTotiny[longUrl] = shortUrl
return shortUrl
def decode(self, shortUrl):
"""Decodes a shortened URL to its original URL.
:type shortUrl: str
:rtype: str
"""
longUrl = Codec.tinyTolong[shortUrl]
return longUrl | 28.727273 | 77 | 0.597046 | 934 | 0.985232 | 0 | 0 | 0 | 0 | 0 | 0 | 316 | 0.333333 |
1b3057afe6cc042a2d06d8c6901dc21dbf505ca6 | 1,328 | py | Python | Lib/objc/MetalKit.py | kanishpatel/Pyto | feec7a1a54f635a6375fa7ede074ff35afbfbb95 | [
"MIT"
] | null | null | null | Lib/objc/MetalKit.py | kanishpatel/Pyto | feec7a1a54f635a6375fa7ede074ff35afbfbb95 | [
"MIT"
] | null | null | null | Lib/objc/MetalKit.py | kanishpatel/Pyto | feec7a1a54f635a6375fa7ede074ff35afbfbb95 | [
"MIT"
] | null | null | null | '''
Classes from the 'MetalKit' framework.
'''
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
MTKViewDisplayLinkTarget = _Class('MTKViewDisplayLinkTarget')
MTKMeshBuffer = _Class('MTKMeshBuffer')
MTKTextureLoaderASTCHelper = _Class('MTKTextureLoaderASTCHelper')
MTKMeshBufferZone = _Class('MTKMeshBufferZone')
MTKMeshBufferHolder = _Class('MTKMeshBufferHolder')
MTKMesh = _Class('MTKMesh')
MTKSubmesh = _Class('MTKSubmesh')
MTKMeshBufferAllocator = _Class('MTKMeshBufferAllocator')
MTKOffscreenDrawable = _Class('MTKOffscreenDrawable')
MTKTextureLoader = _Class('MTKTextureLoader')
MTKTextureIOBufferAllocator = _Class('MTKTextureIOBufferAllocator')
MTKTextureIOBuffer = _Class('MTKTextureIOBuffer')
MTKTextureIOBufferMap = _Class('MTKTextureIOBufferMap')
MTKTextureUploader = _Class('MTKTextureUploader')
MTKTextureLoaderData = _Class('MTKTextureLoaderData')
MTKTextureLoaderPVR = _Class('MTKTextureLoaderPVR')
MTKTextureLoaderKTX = _Class('MTKTextureLoaderKTX')
MTKTextureLoaderImageIO = _Class('MTKTextureLoaderImageIO')
MTKTextureLoaderMDL = _Class('MTKTextureLoaderMDL')
MTKTextureLoaderPVR3 = _Class('MTKTextureLoaderPVR3')
MTKView = _Class('MTKView')
| 33.2 | 67 | 0.799699 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 473 | 0.356175 |
1b30bcf71204112edd6ec2cc41795827d523cf03 | 1,151 | py | Python | mzitu_page.py | swiftlc/python_meizi_reptile | aedb4d84b99a0663330cc4e9dc59e5cdc776296b | [
"MIT"
] | null | null | null | mzitu_page.py | swiftlc/python_meizi_reptile | aedb4d84b99a0663330cc4e9dc59e5cdc776296b | [
"MIT"
] | null | null | null | mzitu_page.py | swiftlc/python_meizi_reptile | aedb4d84b99a0663330cc4e9dc59e5cdc776296b | [
"MIT"
] | null | null | null | import requests
from bs4 import BeautifulSoup
import os
import shutil
import threading
import uuid
import time
import sys
import helper
import threadpool
import mzitu_frame
def __task(url):
#print(url)
while not mzitu_frame.DownloadImage(url):pass
def DownloadImagePage(url):
bs = helper.GetBs(url)
#print(bs)
if not bs:return False
try:
all_li = bs.select('ul[id="pins"]>li')
pool = threadpool.ThreadPool(8)
for li in all_li:
if li.get('class') is None: #屏蔽广告
href = li.a.get('href')
title = li.a.img.get('alt')
#print(href,title)
req = threadpool.WorkRequest(__task,[href])
pool.putRequest(req)
pool.wait()
print(url,"整页下载完成")
return True
except Exception as ex:
print(ex)
return False
if __name__ == "__main__":
url = r''
if not url:
argv = sys.argv
if len(argv) == 1:
print("请输入page url")
sys.exit(-1)
url = argv[1]
#print(url)
print(DownloadImagePage(url))
print("download over") | 22.134615 | 59 | 0.569939 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 166 | 0.141037 |
1b316ea404a1e338f174eb77075582820fee88dd | 21 | py | Python | vnpy/trader/dbHistory/historyDataFile/__init__.py | chenzj810/vnpy-stock | ca30eb309e38f9f916e9877538b98096303e0b60 | [
"MIT"
] | 2 | 2021-01-03T05:28:14.000Z | 2021-01-03T05:28:19.000Z | vnpy/trader/dbHistory/historyDataFile/__init__.py | chenzj810/vnpy | ca30eb309e38f9f916e9877538b98096303e0b60 | [
"MIT"
] | null | null | null | vnpy/trader/dbHistory/historyDataFile/__init__.py | chenzj810/vnpy | ca30eb309e38f9f916e9877538b98096303e0b60 | [
"MIT"
] | 1 | 2021-04-26T14:08:23.000Z | 2021-04-26T14:08:23.000Z | # encoding: UTF-8
| 7 | 18 | 0.571429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.857143 |
1b323fb4983d14a761e7a1012a9aa7ebe6dd9100 | 1,334 | py | Python | eden/cli/test/interp_test.py | jmswen/eden | 5e0b051703fa946cc77fc43004435ae6b20599a1 | [
"BSD-3-Clause"
] | null | null | null | eden/cli/test/interp_test.py | jmswen/eden | 5e0b051703fa946cc77fc43004435ae6b20599a1 | [
"BSD-3-Clause"
] | null | null | null | eden/cli/test/interp_test.py | jmswen/eden | 5e0b051703fa946cc77fc43004435ae6b20599a1 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python3
#
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
import configparser
import unittest
from .. import configinterpolator
class InterpolatorTest(unittest.TestCase):
def test_basic_subs(self):
defaults = {"USER": "wez", "RECURSIVE": "a${RECURSIVE}b"}
parser = configparser.ConfigParser(
interpolation=configinterpolator.EdenConfigInterpolator(defaults)
)
parser.add_section("section")
parser.set("section", "user", "${USER}")
parser.set("section", "rec", "${RECURSIVE}")
parser.set("section", "simple", "value")
self.assertEqual("wez", parser.get("section", "user"))
self.assertEqual("value", parser.get("section", "simple"))
self.assertEqual("a${RECURSIVE}b", parser.get("section", "rec"))
actual = {}
for section in parser.sections():
actual[section] = dict(parser.items(section))
expect = {
"section": {"user": "wez", "simple": "value", "rec": "a${RECURSIVE}b"}
}
self.assertEqual(expect, actual)
| 34.205128 | 82 | 0.64018 | 939 | 0.703898 | 0 | 0 | 0 | 0 | 0 | 0 | 565 | 0.423538 |
1b32d43c547f2def5bee763b8d7fd01add81e1cf | 8,247 | py | Python | tests/unit/connection_test.py | ArthurKamalov/scalyr-agent-2 | 8392568792fa06534ed9f9ef1892cf91577e3a62 | [
"Apache-2.0"
] | 67 | 2015-02-03T00:35:33.000Z | 2022-03-23T10:14:26.000Z | tests/unit/connection_test.py | kdelph23/scalyr-agent-2 | 6b975db59367d271eeba6a614ac40c7cb4205c41 | [
"Apache-2.0"
] | 578 | 2015-04-09T08:58:56.000Z | 2022-03-30T12:13:21.000Z | tests/unit/connection_test.py | kdelph23/scalyr-agent-2 | 6b975db59367d271eeba6a614ac40c7cb4205c41 | [
"Apache-2.0"
] | 58 | 2015-01-15T22:00:43.000Z | 2022-02-18T15:48:31.000Z | # Copyright 2014-2020 Scalyr Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------
#
from __future__ import absolute_import
from scalyr_agent import scalyr_init
scalyr_init()
import os
import sys
import ssl
import socket
from scalyr_agent.compat import PY26
from scalyr_agent.compat import PY_post_equal_279
from scalyr_agent.connection import ConnectionFactory
from scalyr_agent.connection import HTTPSConnectionWithTimeoutAndVerification
from scalyr_agent.test_base import ScalyrTestCase
BASE_DIR = os.path.abspath(os.path.dirname(os.path.abspath(__file__)))
CA_FILE = os.path.join(BASE_DIR, "fixtures/certs/ca_certs.crt")
INTERMEDIATE_CERTS_FILE = os.path.join(
BASE_DIR, "fixtures/certs/intermediate_certs.pem"
)
SCALYR_COM_PEM_PATH = os.path.join(BASE_DIR, "fixtures/certs/scalyr_com.pem")
EXAMPLE_COM_PEM_PATH = os.path.join(BASE_DIR, "fixtures/certs/example_com.pem")
ORIGINAL_SOCKET_CREATE_CONNECTION = socket.create_connection
ORIGINAL_SOCKET_GETADDR_INFO = socket.getaddrinfo
class ScalyrNativeHttpConnectionTestCase(ScalyrTestCase):
def tearDown(self):
socket.create_connection = ORIGINAL_SOCKET_CREATE_CONNECTION
def test_connect_valid_cert_and_hostname_success(self):
connection = self._get_connection_cls(server="https://agent.scalyr.com:443")
conn = connection._ScalyrHttpConnection__connection # pylint: disable=no-member
self.assertTrue(isinstance(conn, HTTPSConnectionWithTimeoutAndVerification))
if PY_post_equal_279:
self.assertEqual(conn.sock._context.verify_mode, ssl.CERT_REQUIRED)
self.assertEqual(conn.sock._context.check_hostname, True)
else:
self.assertEqual(conn.sock.cert_reqs, ssl.CERT_REQUIRED)
self.assertEqual(conn.sock.ca_certs, CA_FILE)
def test_connect_valid_cert_invalid_hostname_failure(self):
# TODO: Add the same tests but where we mock the host on system level (e.g. via
# /etc/hosts entry)
def mock_create_connection(address_pair, timeout, **kwargs):
# We want to connect to the actual Scalyr agent endpoint, but actual host
# specified in the config to be different
assert address_pair[0] == "agent.invalid.scalyr.com"
new_address_pair = ("agent.scalyr.com", address_pair[1])
return ORIGINAL_SOCKET_CREATE_CONNECTION(
new_address_pair, timeout, **kwargs
)
socket.create_connection = mock_create_connection
try:
if sys.version_info >= (3, 7, 0):
expected_msg = r"Original error: .*Hostname mismatch.*" # NOQA
else:
expected_msg = (
r"Original error: hostname 'agent.invalid.scalyr.com' doesn't match either "
"of '\*.scalyr.com', 'scalyr.com'"
) # NOQA
self.assertRaisesRegexp(
Exception,
expected_msg,
self._get_connection_cls,
server="https://agent.invalid.scalyr.com:443",
)
finally:
socket.create_connection = ORIGINAL_SOCKET_CREATE_CONNECTION
def test_connect_invalid_cert_failure(self):
if PY26:
# Under Python 2.6, error looks like this:
# [Errno 1] _ssl.c:498: error:14090086:SSL
# routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
expected_msg = r"certificate verify failed"
else:
expected_msg = r"Original error: \[SSL: CERTIFICATE_VERIFY_FAILED\]"
self.assertRaisesRegexp(
Exception,
expected_msg,
self._get_connection_cls,
server="https://example.com:443",
)
def _get_connection_cls(self, server):
connection = ConnectionFactory.connection(
server=server,
request_deadline=10,
ca_file=CA_FILE,
intermediate_certs_file=INTERMEDIATE_CERTS_FILE,
headers={},
use_requests=False,
quiet=False,
proxies=None,
)
return connection
class ScalyrRequestsHttpConnectionTestCase(ScalyrTestCase):
# NOTE: With the requests library, connection is established lazily on first request and
# that's also when SSL handshake and cert + hostname validation happens
def tearDown(self):
socket.getaddrinfo = ORIGINAL_SOCKET_GETADDR_INFO
def test_connect_valid_cert_and_hostname_success(self):
connection = self._get_connection_cls(server="https://agent.scalyr.com:443")
# pylint: disable=no-member
self.assertIsNone(connection._RequestsConnection__response)
self.assertIsNone(connection._RequestsConnection__session)
# pylint: enable=no-member
connection._get("/")
# pylint: disable=no-member
self.assertTrue(connection._RequestsConnection__response)
self.assertTrue(connection._RequestsConnection__session)
# pylint: enable=no-member
def test_connect_valid_cert_invalid_hostname_failure(self):
# TODO: Add the same tests but where we mock the host on system level (e.g. via
# /etc/hosts entry)
def mock_socket_getaddrinfo(host, *args, **kwargs):
# We want to connect to the actual Scalyr agent endpoint, but actual host
# specified in the config to be different
assert host == "agent.invalid.scalyr.com"
new_host = "agent.scalyr.com"
return ORIGINAL_SOCKET_GETADDR_INFO(new_host, *args, **kwargs)
socket.getaddrinfo = mock_socket_getaddrinfo
try:
connection = self._get_connection_cls(
server="https://agent.invalid.scalyr.com:443",
)
# pylint: disable=no-member
self.assertIsNone(connection._RequestsConnection__response)
self.assertIsNone(connection._RequestsConnection__session)
# pylint: enable=no-member
expected_msg = r"hostname 'agent.invalid.scalyr.com' doesn't match either of '\*.scalyr.com', 'scalyr.com'"
self.assertRaisesRegexp(
Exception,
expected_msg,
connection.get,
request_path="/",
)
# pylint: disable=no-member
self.assertIsNone(connection._RequestsConnection__response)
self.assertTrue(connection._RequestsConnection__session)
# pylint: enable=no-member
finally:
connection._RequestsConnection__session = None
socket.getaddrinfo = ORIGINAL_SOCKET_CREATE_CONNECTION
def test_connect_invalid_cert_failure(self):
if PY26:
# Under Python 2.6, error looks like this:
# [Errno 1] _ssl.c:498: error:14090086:SSL
# routines:SSL3_GET_SERVER_CERTIFICATE:certificate verify failed
expected_msg = r"certificate verify failed"
else:
expected_msg = r"\[SSL: CERTIFICATE_VERIFY_FAILED\]"
connection = self._get_connection_cls(server="https://example.com:443")
self.assertRaisesRegexp(
Exception,
expected_msg,
connection.get,
request_path="/",
)
def _get_connection_cls(self, server):
connection = ConnectionFactory.connection(
server=server,
request_deadline=10,
ca_file=CA_FILE,
intermediate_certs_file=INTERMEDIATE_CERTS_FILE,
headers={},
use_requests=True,
quiet=False,
proxies=None,
)
return connection
| 38.35814 | 119 | 0.659391 | 6,671 | 0.8089 | 0 | 0 | 0 | 0 | 0 | 0 | 2,572 | 0.311871 |
1b33c128befc050cac21642510f7cc293d57c338 | 3,365 | py | Python | tests/oauth1/client.py | ashleysommer/sanic-oauthlib | 9d25d5b34180678e2b4437efb7d6cd7e0a62b4ce | [
"BSD-3-Clause"
] | 2 | 2020-10-29T02:08:16.000Z | 2021-08-30T23:25:41.000Z | tests/oauth1/client.py | ashleysommer/sanic-oauthlib | 9d25d5b34180678e2b4437efb7d6cd7e0a62b4ce | [
"BSD-3-Clause"
] | 1 | 2020-10-29T02:07:33.000Z | 2020-10-29T09:34:24.000Z | tests/oauth1/client.py | ashleysommer/sanic-oauthlib | 9d25d5b34180678e2b4437efb7d6cd7e0a62b4ce | [
"BSD-3-Clause"
] | 2 | 2019-12-23T15:32:05.000Z | 2020-02-11T02:01:17.000Z | from inspect import isawaitable
from sanic import Sanic
from sanic.response import redirect, json, text
from sanic.exceptions import SanicException
from sanic_plugin_toolkit import SanicPluginRealm
from sanic_oauthlib.client import oauthclient
def create_oauth(app):
realm = SanicPluginRealm(app)
try:
oauth = realm.register_plugin(oauthclient)
except ValueError as v:
_, oauth = v
return oauth
def create_remote(app, oauth=None):
if not oauth:
oauth = create_oauth(app)
remote = oauth.remote_app(
'dev',
consumer_key='dev',
consumer_secret='devsecret',
request_token_params={'realm': 'email'},
base_url='http://127.0.0.1:5001/api/',
request_token_url='http://127.0.0.1:5001/oauth/request_token',
access_token_method='GET',
access_token_url='http://127.0.0.1:5001/oauth/access_token',
authorize_url='http://127.0.0.1:5001/oauth/authorize'
)
return remote
def create_client(app, oauth=None, remote=None):
if not oauth:
oauth = create_oauth(app)
if not remote:
remote = create_remote(app, oauth)
session = {}
#TODO: make a better client session for test
@app.middleware
async def add_dummy_session(request):
context = oauth.context
shared_context = oauth.context.shared
shared_request_context = shared_context.request[id(request)]
shared_request_context['session'] = session
@app.route('/')
async def index(request):
if 'dev_oauth' in session:
ret = await remote.get('email')
if isinstance(ret.data, dict):
return json(ret.data)
return str(ret.data)
return redirect(app.url_for('login'))
@app.route('/login')
@remote.autoauthorize
async def login(request, context):
return {'callback': app.url_for('authorized', _external=True, _scheme='http')}
@app.route('/logout')
def logout(request):
session.pop('dev_oauth', None)
return redirect(app.url_for('index'))
@app.route('/authorized')
@remote.authorized_handler
async def authorized(request, data, context):
if data is None:
return 'Access denied: error=%s' % (
request.args['error']
)
resp = {k: v[0] for k, v in data.items()}
if 'oauth_token' in resp:
session['dev_oauth'] = resp
return json(resp)
return text(str(resp))
@app.route('/address')
async def address(request):
ret = await remote.get('address/hangzhou')
if ret.status not in (200, 201):
raise SanicException(ret.data, status_code=ret.status)
return text(ret.raw_data)
@app.route('/method/<name>')
async def method(request, name):
func = getattr(remote, name)
ret = func('method')
if isawaitable(ret):
ret = await ret
return text(ret.raw_data)
@remote.tokengetter
async def get_oauth_token():
if 'dev_oauth' in session:
resp = session['dev_oauth']
return resp['oauth_token'], resp['oauth_token_secret']
return remote
if __name__ == '__main__':
app = Sanic("test_main")
create_client(app)
app.run(host='localhost', port=8000, debug=True, auto_reload=False)
| 29.517544 | 86 | 0.626449 | 0 | 0 | 0 | 0 | 1,923 | 0.571471 | 1,556 | 0.462407 | 544 | 0.161664 |
1b34238a33e3602211d94b11d52414e9597c513e | 5,154 | py | Python | lph/IntentLearner.py | PhilippeMorere/learning-to-plan-hierarchically | 7342a835127a17160de69e12c0e682472d8c6f8f | [
"MIT"
] | 6 | 2019-07-09T06:47:09.000Z | 2021-01-25T15:43:41.000Z | lph/IntentLearner.py | PhilippeMorere/learning-to-plan-hierarchically | 7342a835127a17160de69e12c0e682472d8c6f8f | [
"MIT"
] | null | null | null | lph/IntentLearner.py | PhilippeMorere/learning-to-plan-hierarchically | 7342a835127a17160de69e12c0e682472d8c6f8f | [
"MIT"
] | 2 | 2019-07-09T06:47:14.000Z | 2019-07-18T03:41:23.000Z | from functools import reduce
from operator import add
from lph.IntentPlanner import IntentPlanner
from lph.Skill import SkillBase, Skill, PrimitiveSkill
from lph.utils import SparseState, Effect
class IntentLearner:
def __init__(self, primitive_skills, planner_max_rec=3, verbose=1):
"""
Initialize intent learner.
:param primitive_skills: list of (PrimitiveSkill)
:param planner_max_rec: planner recursion order (default: 3). Tougher
skill curriculum requires higher planner recursion order.
:param verbose: verbose level (default: 1)
"""
self.skill_base = SkillBase(primitive_skills)
self.planner = IntentPlanner(self.skill_base, planner_max_rec, verbose)
self.current_plan = None
self.verbose = verbose
def plan(self, state, goal):
"""
Generates a plan from state to goal.
:param state: Starting state (dense)
:param goal: Goal as (SparseState)
:return:
"""
s_start = SparseState.from_dense_state(state)
if self.current_plan is None:
try:
self.current_plan = self.planner.plan(s_start, goal)
except RuntimeError:
self.current_plan = [self.skill_base.random_skill()]
if self.verbose > 1:
print("Planner failed (low rec), random action.")
return self.current_plan
# If next skill's effect is already satisfied, remove it
plan = self.current_plan
while len(plan) > 0:
skill, new_plan = IntentPlanner.behead(plan, randomness=False)
if not skill.effect.end_state.matches(s_start):
break
plan = new_plan
self.current_plan = plan
# # If the next skill can't be executed, execute random action
# if len(self.current_plan) > 0:
# first_skill = IntentPlanner.behead(plan)[0]
# if first_skill.fails_in(s_start):
# # self.current_plan = []
# # Random action
# self.current_plan = [self.skill_base.random_skill()]
# if self.verbose > 1:
# print("Random action2")
# return self.current_plan
# If no plan left, try to plan again
if len(self.current_plan) == 0:
self.current_plan = None
return self.plan(state, goal)
return self.current_plan
def update(self, state, executed_skill, effect):
"""
Updates agent with latest intent and transition.
:param state: starting dense state
:param executed_skill: (PrimitiveSkill) executed by the agent
:param effect: observed transition effect as (Effect)
:return: None
"""
# Assess if last executed skill was successful
successful_execution = (effect == executed_skill.effect)
# Update executed skill conditions
executed_skill.update_conditions(state, successful_execution)
# If fail, execute a random action next time, then re-plan completely
if not successful_execution:
self.current_plan = [self.skill_base.random_skill()]
else: # Remove first action from plan
_, self.current_plan = IntentPlanner.behead(self.current_plan)
if len(self.current_plan) == 0:
self.current_plan = None
def learn_demonstration(self, trajectory, goal):
"""
Learn from demonstration.
:param trajectory: (Trajectory)
:param goal: demonstration goal as (SparseState)
:return: None
"""
# Identify if existing skills match the trajectory and goal
s_start = SparseState.from_dense_state(trajectory.initial_state)
effect = Effect.from_sparse_start_goal(s_start, goal)
skills = self.skill_base.skills_from_effect(effect)
s_init = SparseState.from_dense_state(trajectory.initial_state)
candidate_skills = [s for s in skills if not s.fails_in(s_init)]
# trajectory.refine()
# skill_seq, seq_effect = trajectory.to_skill_seq(effect)
skill_seq = [n.skill for n in trajectory.nodes]
seq_effect = reduce(add, [s.effect for s in skill_seq])
# If none found, create a new skill
if len(candidate_skills) == 0:
# Learn new skill
new_skill = Skill(seq_effect, skill_seq, trajectory.initial_state)
self.skill_base.add_skill(new_skill)
else:
for skill in candidate_skills:
if not isinstance(skill, PrimitiveSkill):
skill.add_successful_skill_seq(seq_effect, skill_seq,
trajectory.initial_state)
def end_of_ep_update(self):
"""
Performs updates at the end of each episode (such as refining skills).
"""
for effect, skill_list in self.skill_base.all_skills.items():
for skill in skill_list:
if not isinstance(skill, PrimitiveSkill):
skill.refine(effect, self.skill_base)
| 40.582677 | 79 | 0.620489 | 4,955 | 0.961389 | 0 | 0 | 0 | 0 | 0 | 0 | 1,906 | 0.36981 |
1b34a6e317513d1c6eae487960fb1aa32926aad5 | 2,182 | py | Python | linear_ucb.py | nizamphoenix/Bandits | 83f6298e6339a897887b7bd30efe3d29f92c8509 | [
"MIT"
] | null | null | null | linear_ucb.py | nizamphoenix/Bandits | 83f6298e6339a897887b7bd30efe3d29f92c8509 | [
"MIT"
] | null | null | null | linear_ucb.py | nizamphoenix/Bandits | 83f6298e6339a897887b7bd30efe3d29f92c8509 | [
"MIT"
] | null | null | null | from mab import MAB
class LinUCB(MAB):
"""
Contextual multi-armed bandit (LinUCB) implenting algorithm in http://rob.schapire.net/papers/www10.pdf
Arguments
========
narms : int
number of arms
ndims : int
number of dimensions for each arm's context i.e length of context vector for each arm
alpha : float
positive real explore-exploit parameter i.e. exploitation rate
"""
def __init__(self, narms, ndims, alpha):
self.narms = narms
self.ndims = ndims
self.alpha = float(alpha)
self.theta = np.zeros((self.ndims,self.ndims,1))#An array of vectors to maintain coefficient estimates
self.A = np.array([np.identity(self.ndims) for col in range(1, narms + 1)])#An array of length 10 of (10,10)identity matrices for each arm
self.b = np.zeros((self.ndims,self.ndims,1))#An array of length 10 of (10,1)vectors
def play(self, tround, context):
posterior = [0 for col in range(1, self.narms + 1)]
context_matrix = context.reshape((self.ndims, self.ndims))
for arm in range(1, self.narms + 1):
self.theta[arm - 1] = np.dot(inv(self.A[arm - 1]), self.b[arm - 1]) # Updating coefficient vector for an arm
X = context_matrix[arm - 1].reshape((self.ndims, 1)) # Calculating X for each arm which is (10x1) vector
stdev = np.math.sqrt(np.dot(np.dot(X.T , inv(self.A[arm - 1])) , X)) #standard deviation
posterior[arm - 1] = (np.dot(self.theta[arm - 1].T ,X)) + self.alpha * stdev #updating posterior(our belief about an arm) which was initialized to zero
return np.random.choice([i for i, j in enumerate(posterior) if j == max(posterior)]) + 1 #chosing an arm at random and breaking the ties if they occur
def update(self, arm, reward, context):
context_matrix = context.reshape((self.ndims, self.ndims))#Reshaping the context of an event to ten(10,1) contexts, one for each arm.
X=context_matrix[arm-1].reshape(self.ndims,1)#reshaping the context from (10,) to (10,1)
self.A[arm-1] += np.dot(X,X.T)
self.b[arm-1]+=reward * X
| 49.590909 | 164 | 0.63428 | 2,144 | 0.982585 | 0 | 0 | 0 | 0 | 0 | 0 | 912 | 0.417965 |
1b3af065e11ddefc9d69f8908fab887ea05f1e93 | 1,233 | py | Python | plugins_/open_package.py | calculuswhiz/PackageDev | 76fe412eefbc775f647591fbd2c526391aea98fc | [
"MIT"
] | null | null | null | plugins_/open_package.py | calculuswhiz/PackageDev | 76fe412eefbc775f647591fbd2c526391aea98fc | [
"MIT"
] | null | null | null | plugins_/open_package.py | calculuswhiz/PackageDev | 76fe412eefbc775f647591fbd2c526391aea98fc | [
"MIT"
] | null | null | null | import glob
import os
import sublime
import sublime_plugin
from .create_package import _open_folder_in_st, _is_override_package
__all__ = ('PackagedevOpenPackageCommand',)
OVERRIDE_SUFFIX = " [*Override*]"
def _list_normal_packages():
pkgspath = sublime.packages_path()
folders = glob.glob(os.path.join(pkgspath, "*/", ""))
names = (os.path.basename(fold.strip("\\/")) for fold in folders)
for name in names:
yield (name, _is_override_package(name))
class NameInputHandler(sublime_plugin.ListInputHandler):
def placeholder(self):
return "Package"
def list_items(self):
packages = list(sorted(_list_normal_packages()))
print(packages)
items = [name + (OVERRIDE_SUFFIX if override else "")
for name, override in packages]
return items
class PackagedevOpenPackageCommand(sublime_plugin.WindowCommand):
def input(self, args):
return NameInputHandler()
def run(self, name):
if not name:
return
name = name.split(OVERRIDE_SUFFIX)[0]
path = os.path.join(sublime.packages_path(), name)
# TODO find a .sublime-project file and open that instead?
_open_folder_in_st(path)
| 26.234043 | 69 | 0.675588 | 747 | 0.605839 | 267 | 0.216545 | 0 | 0 | 0 | 0 | 125 | 0.101379 |
1b3b726fdd352738b9bb7f371ccb99a43f38c221 | 2,493 | py | Python | examples/demo/quiver.py | ContinuumIO/chaco | e4a42b91cb25ef7191fd465caaef2c3256fc668e | [
"BSD-3-Clause"
] | 3 | 2017-09-17T17:32:06.000Z | 2022-03-15T13:04:43.000Z | examples/demo/quiver.py | ContinuumIO/chaco | e4a42b91cb25ef7191fd465caaef2c3256fc668e | [
"BSD-3-Clause"
] | null | null | null | examples/demo/quiver.py | ContinuumIO/chaco | e4a42b91cb25ef7191fd465caaef2c3256fc668e | [
"BSD-3-Clause"
] | 5 | 2015-05-17T16:08:11.000Z | 2021-02-23T09:23:42.000Z | """
Draws a vector or "quiver" plot of a set of random points.
- Left-drag pans the plot.
- Mousewheel up and down zooms the plot in and out.
- Pressing "z" brings up the Zoom Box, and you can click-drag a rectangular
region to zoom. If you use a sequence of zoom boxes, pressing alt-left-arrow
and alt-right-arrow moves you forwards and backwards through the "zoom
history".
"""
# Major library imports
from numpy import array, sort
from numpy.random import random
# Enthought library imports
from enable.api import Component, ComponentEditor
from traits.api import HasTraits, Instance, Int
from traitsui.api import Item, View
# Chaco imports
from chaco.api import ArrayDataSource, MultiArrayDataSource, DataRange1D, \
LinearMapper, QuiverPlot, OverlayPlotContainer, add_default_grids, \
add_default_axes
from chaco.tools.api import PanTool, ZoomTool
class PlotExample(HasTraits):
plot = Instance(Component)
numpts = Int(400)
vectorlen = Int(15)
traits_view = View(Item('plot', editor=ComponentEditor(), show_label=False),
width=600, height=600)
def _plot_default(self):
# Create starting points for the vectors.
numpts = self.numpts
x = sort(random(numpts))
y = random(numpts)
# Create vectors.
vectorlen = self.vectorlen
vectors = array((random(numpts)*vectorlen, random(numpts)*vectorlen)).T
# Create an array data sources to plot all vectors at once
xs = ArrayDataSource(x, sort_order='ascending')
ys = ArrayDataSource(y)
vector_ds = MultiArrayDataSource(vectors)
# Set up the Plot
xrange = DataRange1D()
xrange.add(xs)
yrange = DataRange1D()
yrange.add(ys)
quiverplot = QuiverPlot(index = xs, value = ys,
vectors = vector_ds,
index_mapper = LinearMapper(range=xrange),
value_mapper = LinearMapper(range=yrange),
bgcolor = "white")
add_default_axes(quiverplot)
add_default_grids(quiverplot)
# Attach some tools to the plot
quiverplot.tools.append(PanTool(quiverplot, constrain_key="shift"))
zoom = ZoomTool(quiverplot)
quiverplot.overlays.append(zoom)
container = OverlayPlotContainer(quiverplot, padding=50)
return container
demo = PlotExample()
if __name__ == "__main__":
demo.configure_traits()
| 31.556962 | 80 | 0.661853 | 1,527 | 0.612515 | 0 | 0 | 0 | 0 | 0 | 0 | 662 | 0.265544 |
1b3c0f287457855644bb8a0b8a4eb6a1990d11e6 | 2,005 | py | Python | resolwe/flow/executors/local/run.py | JureZmrzlikar/resolwe | 2c967b5fa06b6b7daeee88b3fca4cd19d10d99c3 | [
"Apache-2.0"
] | null | null | null | resolwe/flow/executors/local/run.py | JureZmrzlikar/resolwe | 2c967b5fa06b6b7daeee88b3fca4cd19d10d99c3 | [
"Apache-2.0"
] | null | null | null | resolwe/flow/executors/local/run.py | JureZmrzlikar/resolwe | 2c967b5fa06b6b7daeee88b3fca4cd19d10d99c3 | [
"Apache-2.0"
] | null | null | null | """.. Ignore pydocstyle D400.
.. autoclass:: resolwe.flow.executors.local.run.FlowExecutor
"""
import asyncio
import logging
import os
import shlex
from asyncio import subprocess
from ..run import BaseFlowExecutor
logger = logging.getLogger(__name__)
class FlowExecutor(BaseFlowExecutor):
"""Local dataflow executor proxy."""
name = "local"
def __init__(self, *args, **kwargs):
"""Initialize attributes."""
super().__init__(*args, **kwargs)
self.kill_delay = 5
self.proc = None
self.stdout = None
self.command = "/bin/bash"
async def start(self):
"""Start process execution."""
# Workaround for pylint issue #1469
# (https://github.com/PyCQA/pylint/issues/1469).
self.proc = await subprocess.create_subprocess_exec(
*shlex.split(self.command),
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
self.stdout = self.proc.stdout
return self.proc.pid
async def run_script(self, script):
"""Execute the script and save results."""
script = os.linesep.join(["set -x", "set +B", script, "exit"]) + os.linesep
self.proc.stdin.write(script.encode("utf-8"))
await self.proc.stdin.drain()
self.proc.stdin.close()
async def end(self):
"""End process execution."""
await self.proc.wait()
return self.proc.returncode
async def terminate(self):
"""Terminate a running script."""
try:
self.proc.terminate()
except ProcessLookupError:
# Process has already been terminated. Log exception and continue.
logger.exception("While terminating process with PID %s", self.proc.pid)
await asyncio.wait_for(self.proc.wait(), self.kill_delay)
if self.proc.returncode is None:
self.proc.kill()
await self.proc.wait()
await super().terminate()
| 27.465753 | 84 | 0.616958 | 1,747 | 0.871322 | 0 | 0 | 0 | 0 | 1,388 | 0.692269 | 528 | 0.263342 |
1b3cb28fe5d89cdbcd53ffd981cabf41b464fad3 | 11,974 | py | Python | affectnet.py | orena1/DAN | 49247ad0cad2a67057d184fa92d15fe2e7bb2cb6 | [
"MIT"
] | 50 | 2021-09-16T14:22:54.000Z | 2022-03-25T17:10:55.000Z | affectnet.py | orena1/DAN | 49247ad0cad2a67057d184fa92d15fe2e7bb2cb6 | [
"MIT"
] | 11 | 2021-09-23T06:40:50.000Z | 2022-03-25T03:11:45.000Z | affectnet.py | orena1/DAN | 49247ad0cad2a67057d184fa92d15fe2e7bb2cb6 | [
"MIT"
] | 16 | 2021-09-16T14:22:56.000Z | 2022-03-29T12:52:00.000Z | import os
import glob
from tqdm import tqdm
import argparse
from PIL import Image
import numpy as np
import pandas as pd
import torch
import torch.nn as nn
import torch.utils.data as data
from torchvision import transforms, datasets
from networks.dan import DAN
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--aff_path', type=str, default='datasets/AfectNet/', help='AfectNet dataset path.')
parser.add_argument('--batch_size', type=int, default=256, help='Batch size.')
parser.add_argument('--lr', type=float, default=0.0001, help='Initial learning rate for adam.')
parser.add_argument('--workers', default=8, type=int, help='Number of data loading workers.')
parser.add_argument('--epochs', type=int, default=40, help='Total training epochs.')
parser.add_argument('--num_head', type=int, default=4, help='Number of attention head.')
parser.add_argument('--num_class', type=int, default=8, help='Number of class.')
return parser.parse_args()
class AffectNet(data.Dataset):
def __init__(self, aff_path, phase, use_cache = True, transform = None):
self.phase = phase
self.transform = transform
self.aff_path = aff_path
if use_cache:
cache_path = os.path.join(aff_path,'affectnet.csv')
if os.path.exists(cache_path):
df = pd.read_csv(cache_path)
else:
df = self.get_df()
df.to_csv(cache_path)
else:
df = self.get_df()
self.data = df[df['phase'] == phase]
self.file_paths = self.data.loc[:, 'img_path'].values
self.label = self.data.loc[:, 'label'].values
_, self.sample_counts = np.unique(self.label, return_counts=True)
# print(f' distribution of {phase} samples: {self.sample_counts}')
def get_df(self):
train_path = os.path.join(self.aff_path,'train_set/')
val_path = os.path.join(self.aff_path,'val_set/')
data = []
for anno in glob.glob(train_path + 'annotations/*_exp.npy'):
idx = os.path.basename(anno).split('_')[0]
img_path = os.path.join(train_path,f'images/{idx}.jpg')
label = int(np.load(anno))
data.append(['train',img_path,label])
for anno in glob.glob(val_path + 'annotations/*_exp.npy'):
idx = os.path.basename(anno).split('_')[0]
img_path = os.path.join(val_path,f'images/{idx}.jpg')
label = int(np.load(anno))
data.append(['val',img_path,label])
return pd.DataFrame(data = data,columns = ['phase','img_path','label'])
def __len__(self):
return len(self.file_paths)
def __getitem__(self, idx):
path = self.file_paths[idx]
image = Image.open(path).convert('RGB')
label = self.label[idx]
if self.transform is not None:
image = self.transform(image)
return image, label
class AffinityLoss(nn.Module):
def __init__(self, device, num_class=8, feat_dim=512):
super(AffinityLoss, self).__init__()
self.num_class = num_class
self.feat_dim = feat_dim
self.gap = nn.AdaptiveAvgPool2d(1)
self.device = device
self.centers = nn.Parameter(torch.randn(self.num_class, self.feat_dim).to(device))
def forward(self, x, labels):
x = self.gap(x).view(x.size(0), -1)
batch_size = x.size(0)
distmat = torch.pow(x, 2).sum(dim=1, keepdim=True).expand(batch_size, self.num_class) + \
torch.pow(self.centers, 2).sum(dim=1, keepdim=True).expand(self.num_class, batch_size).t()
distmat.addmm_(x, self.centers.t(), beta=1, alpha=-2)
classes = torch.arange(self.num_class).long().to(self.device)
labels = labels.unsqueeze(1).expand(batch_size, self.num_class)
mask = labels.eq(classes.expand(batch_size, self.num_class))
dist = distmat * mask.float()
dist = dist / self.centers.var(dim=0).sum()
loss = dist.clamp(min=1e-12, max=1e+12).sum() / batch_size
return loss
class PartitionLoss(nn.Module):
def __init__(self, ):
super(PartitionLoss, self).__init__()
def forward(self, x):
num_head = x.size(1)
if num_head > 1:
var = x.var(dim=1).mean()
loss = torch.log(1+num_head/var)
else:
loss = 0
return loss
class ImbalancedDatasetSampler(data.sampler.Sampler):
def __init__(self, dataset, indices: list = None, num_samples: int = None):
self.indices = list(range(len(dataset))) if indices is None else indices
self.num_samples = len(self.indices) if num_samples is None else num_samples
df = pd.DataFrame()
df["label"] = self._get_labels(dataset)
df.index = self.indices
df = df.sort_index()
label_to_count = df["label"].value_counts()
weights = 1.0 / label_to_count[df["label"]]
self.weights = torch.DoubleTensor(weights.to_list())
# self.weights = self.weights.clamp(min=1e-5)
def _get_labels(self, dataset):
if isinstance(dataset, datasets.ImageFolder):
return [x[1] for x in dataset.imgs]
elif isinstance(dataset, torch.utils.data.Subset):
return [dataset.dataset.imgs[i][1] for i in dataset.indices]
else:
raise NotImplementedError
def __iter__(self):
return (self.indices[i] for i in torch.multinomial(self.weights, self.num_samples, replacement=True))
def __len__(self):
return self.num_samples
def run_training():
args = parse_args()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
if torch.cuda.is_available():
torch.backends.cudnn.benchmark = True
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.enabled = True
model = DAN(num_class=args.num_class, num_head=args.num_head)
model.to(device)
data_transforms = transforms.Compose([
transforms.Resize((224, 224)),
transforms.RandomHorizontalFlip(),
transforms.RandomApply([
transforms.RandomAffine(20, scale=(0.8, 1), translate=(0.2, 0.2)),
], p=0.7),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
transforms.RandomErasing(),
])
# train_dataset = AffectNet(args.aff_path, phase = 'train', transform = data_transforms) # loading dynamically
train_dataset = datasets.ImageFolder(f'{args.aff_path}/train', transform = data_transforms) # loading statically
if args.num_class == 7: # ignore the 8-th class
idx = [i for i in range(len(train_dataset)) if train_dataset.imgs[i][1] != 7]
train_dataset = data.Subset(train_dataset, idx)
print('Whole train set size:', train_dataset.__len__())
train_loader = torch.utils.data.DataLoader(train_dataset,
batch_size = args.batch_size,
num_workers = args.workers,
sampler=ImbalancedDatasetSampler(train_dataset),
shuffle = False,
pin_memory = True)
data_transforms_val = transforms.Compose([
transforms.Resize((224, 224)),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])])
# val_dataset = AffectNet(args.aff_path, phase = 'val', transform = data_transforms_val) # loading dynamically
val_dataset = datasets.ImageFolder(f'{args.aff_path}/val', transform = data_transforms_val) # loading statically
if args.num_class == 7: # ignore the 8-th class
idx = [i for i in range(len(val_dataset)) if val_dataset.imgs[i][1] != 7]
val_dataset = data.Subset(val_dataset, idx)
print('Validation set size:', val_dataset.__len__())
val_loader = torch.utils.data.DataLoader(val_dataset,
batch_size = args.batch_size,
num_workers = args.workers,
shuffle = False,
pin_memory = True)
criterion_cls = torch.nn.CrossEntropyLoss().to(device)
criterion_af = AffinityLoss(device, num_class=args.num_class)
criterion_pt = PartitionLoss()
params = list(model.parameters()) + list(criterion_af.parameters())
optimizer = torch.optim.Adam(params,args.lr,weight_decay = 0)
scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer, gamma = 0.6)
best_acc = 0
for epoch in tqdm(range(1, args.epochs + 1)):
running_loss = 0.0
correct_sum = 0
iter_cnt = 0
model.train()
for (imgs, targets) in train_loader:
iter_cnt += 1
optimizer.zero_grad()
imgs = imgs.to(device)
targets = targets.to(device)
out,feat,heads = model(imgs)
loss = criterion_cls(out,targets) + criterion_af(feat,targets) + criterion_pt(heads)
loss.backward()
optimizer.step()
running_loss += loss
_, predicts = torch.max(out, 1)
correct_num = torch.eq(predicts, targets).sum()
correct_sum += correct_num
acc = correct_sum.float() / float(train_dataset.__len__())
running_loss = running_loss/iter_cnt
tqdm.write('[Epoch %d] Training accuracy: %.4f. Loss: %.3f. LR %.6f' % (epoch, acc, running_loss,optimizer.param_groups[0]['lr']))
with torch.no_grad():
running_loss = 0.0
iter_cnt = 0
bingo_cnt = 0
sample_cnt = 0
model.eval()
for imgs, targets in val_loader:
imgs = imgs.to(device)
targets = targets.to(device)
out,feat,heads = model(imgs)
loss = criterion_cls(out,targets) + criterion_af(feat,targets) + criterion_pt(heads)
running_loss += loss
iter_cnt+=1
_, predicts = torch.max(out, 1)
correct_num = torch.eq(predicts,targets)
bingo_cnt += correct_num.sum().cpu()
sample_cnt += out.size(0)
running_loss = running_loss/iter_cnt
scheduler.step()
acc = bingo_cnt.float()/float(sample_cnt)
acc = np.around(acc.numpy(),4)
best_acc = max(acc,best_acc)
tqdm.write("[Epoch %d] Validation accuracy:%.4f. Loss:%.3f" % (epoch, acc, running_loss))
tqdm.write("best_acc:" + str(best_acc))
if args.num_class == 7 and acc > 0.65:
torch.save({'iter': epoch,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),},
os.path.join('checkpoints', "affecnet7_epoch"+str(epoch)+"_acc"+str(acc)+".pth"))
tqdm.write('Model saved.')
elif args.num_class == 8 and acc > 0.62:
torch.save({'iter': epoch,
'model_state_dict': model.state_dict(),
'optimizer_state_dict': optimizer.state_dict(),},
os.path.join('checkpoints', "affecnet8_epoch"+str(epoch)+"_acc"+str(acc)+".pth"))
tqdm.write('Model saved.')
if __name__ == "__main__":
run_training() | 38.133758 | 138 | 0.576416 | 4,638 | 0.387339 | 0 | 0 | 0 | 0 | 0 | 0 | 1,342 | 0.112076 |
1b3e671c11ff77239e8ab8e03baca3eb7d514c79 | 5,007 | py | Python | main.py | Dropout1337/ComboWatermarker | 6c1dfa9fb6f12dc2924268fcd91458b839796824 | [
"MIT"
] | 6 | 2020-09-20T22:23:26.000Z | 2021-06-30T16:14:25.000Z | main.py | Dropout1337/ComboWatermarker | 6c1dfa9fb6f12dc2924268fcd91458b839796824 | [
"MIT"
] | null | null | null | main.py | Dropout1337/ComboWatermarker | 6c1dfa9fb6f12dc2924268fcd91458b839796824 | [
"MIT"
] | null | null | null | import os
import keyboard
banner = '''\u001b[34m____ ____ _ _ ___ ____ _ _ _ ____ ___ ____ ____ _ _ ____ ____ _ _
| | | |\/| |__] | | | | | |__| | |___ |__/ |\/| |__| |__/ |_/
|___ |__| | | |__] |__| |_|_| | | | |___ | \ | | | | | \ | \_ \u001b[37m'''
class Watermark():
def All(type, author, checker):
try:
with open('Combos.txt', "r") as Combo_File:
for line in Combo_File:
line = line.replace('\n', '')
Account_Combo = line.split(':')
email = Account_Combo[0]
password = Account_Combo[1]
with open('New_Combo.txt', "a+") as f:
f.write(f'{email}:{password} | Type: {checker} | Checked By: {author} | Checker: {checker}\n')
print('\n[\u001b[34mWATERMARK\u001b[37m] Success')
input()
os._exit(0)
except IndexError:
print('\n[\u001b[34mWATERMARK\u001b[37m] Invalid Combo Syntax, Example dropout@gmail.com:FuckingNoLife123')
input()
os._exit(0)
def Type(type):
try:
with open('Combos.txt', "r") as Combo_File:
for line in Combo_File:
line = line.replace('\n', '')
Account_Combo = line.split(':')
email = Account_Combo[0]
password = Account_Combo[1]
with open('New_Combo.txt', "a+") as f:
f.write(f'{email}:{password} | Type: {type}\n')
print('\n[\u001b[34mWATERMARK\u001b[37m] Success')
input()
os._exit(0)
except IndexError:
print('\n[\u001b[34mWATERMARK\u001b[37m] Invalid Combo Syntax, Example dropout@gmail.com:FuckingNoLife123')
input()
os._exit(0)
def Author(author):
try:
with open('Combos.txt', "r") as Combo_File:
for line in Combo_File:
line = line.replace('\n', '')
Account_Combo = line.split(':')
email = Account_Combo[0]
password = Account_Combo[1]
with open('New_Combo.txt', "a+") as f:
f.write(f'{email}:{password} | Checked By: {author}\n')
print('\n[\u001b[34mWATERMARK\u001b[37m] Success')
input()
os._exit(0)
except IndexError:
print('\n[\u001b[34mWATERMARK\u001b[37m] Invalid Combo Syntax, Example dropout@gmail.com:FuckingNoLife123')
input()
os._exit(0)
def Checker(checker):
try:
with open('Combos.txt', "r") as Combo_File:
for line in Combo_File:
line = line.replace('\n', '')
Account_Combo = line.split(':')
email = Account_Combo[0]
password = Account_Combo[1]
with open('New_Combo.txt', "a+") as f:
f.write(f'{email}:{password} | Checker: {checker}\n')
print('\n[\u001b[34mWATERMARK\u001b[37m] Success')
input()
os._exit(0)
except IndexError:
print('\n[\u001b[34mWATERMARK\u001b[37m] Invalid Combo Syntax, Example dropout@gmail.com:FuckingNoLife123')
input()
os._exit(0)
if __name__ == "__main__":
os.system('cls & title [Combo Watermarker] By Dropout')
print(
f'{banner}\n',
'\n[\u001b[34m1\u001b[37m] All',
'\n[\u001b[34m2\u001b[37m] Combo Type',
'\n[\u001b[34m3\u001b[37m] Author',
'\n[\u001b[34m4\u001b[37m] Checker'
)
while True:
try:
if keyboard.is_pressed('1'):
keyboard.write('\b')
cmd_type = input('\n\u001b[34m>\u001b[37m Combo Type: ')
author = input('\u001b[34m>\u001b[37m Author: ')
checker = input('\u001b[34m>\u001b[37m Checker: ')
Watermark.All(cmd_type, author, checker)
break
elif keyboard.is_pressed('2'):
keyboard.write('\b')
cmd_type = input('\n\u001b[34m>\u001b[37m Combo Type: ')
Watermark.Type(cmd_type)
break
elif keyboard.is_pressed('3'):
keyboard.write('\b')
author = input('\n\u001b[34m>\u001b[37m Author: ')
Watermark.Author(author)
break
elif keyboard.is_pressed('4'):
keyboard.write('\b')
checker = input('\n\u001b[34m>\u001b[37m Checker: ')
Watermark.Checker(checker)
break
except:
continue | 43.53913 | 126 | 0.46415 | 3,293 | 0.657679 | 0 | 0 | 0 | 0 | 0 | 0 | 1,649 | 0.329339 |
1b3eb815d7dc1334894f133246fe3bca46ed81e2 | 216,073 | py | Python | fixtures/webui_test.py | rombie/contrail-test | a68c71d6f282142501a7e2e889bbb232fdd82dc3 | [
"Apache-2.0"
] | null | null | null | fixtures/webui_test.py | rombie/contrail-test | a68c71d6f282142501a7e2e889bbb232fdd82dc3 | [
"Apache-2.0"
] | null | null | null | fixtures/webui_test.py | rombie/contrail-test | a68c71d6f282142501a7e2e889bbb232fdd82dc3 | [
"Apache-2.0"
] | null | null | null | from netaddr import IPNetwork
from selenium import webdriver
from pyvirtualdisplay import Display
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import WebDriverException
import time
import random
import fixtures
from ipam_test import *
from project_test import *
from util import *
from vnc_api.vnc_api import *
from netaddr import *
from time import sleep
from contrail_fixtures import *
from pyvirtualdisplay import Display
import inspect
import policy_test_utils
import threading
import sys
from webui_common import *
class WebuiTest:
def __init__(self, connections, inputs):
self.proj_check_flag = 0
self.inputs = inputs
self.connections = connections
self.logger = self.inputs.logger
self.browser = self.connections.browser
self.browser_openstack = self.connections.browser_openstack
self.delay = 10
self.frequency = 1
self.logger = inputs.logger
self.webui_common = WebuiCommon(self)
self.dash = "-" * 60
self.vnc_lib = connections.vnc_lib_fixture
def _click_if_element_found(self, element_name, elements_list):
for element in elements_list:
if element.text == element_name:
element.click()
# end _click_if_element_found
def create_vn_in_webui(self, fixture):
result = True
try:
fixture.obj = fixture.quantum_fixture.get_vn_obj_if_present(
fixture.vn_name, fixture.project_id)
if not fixture.obj:
self.logger.info("Creating VN %s using webui..." %
(fixture.vn_name))
if not self.webui_common.click_configure_networks():
result = result and False
self.webui_common.select_project(fixture.project_name)
self.browser.get_screenshot_as_file(
'createVN' + self.webui_common.date_time_string() + '.png')
self.webui_common.click_element(
self.browser, 'btnCreateVN', 'id')
self.webui_common.wait_till_ajax_done(self.browser)
txtVNName = self.webui_common.find_element(
self.browser, 'txtVNName', 'id')
txtVNName.send_keys(fixture.vn_name)
if type(fixture.vn_subnets) is list:
for subnet in fixture.vn_subnets:
self.webui_common.click_element(
self.browser, 'btnCommonAddIpam', 'id')
self.webui_common.wait_till_ajax_done(self.browser)
self.webui_common.click_element(
self.browser, ['ipamTuples', 'select2-choice'], ['id', 'class'])
ipam_list = self.webui_common.find_element(
self.browser, ['select2-drop', 'li'], ['id', 'tag'], [1])
self.webui_common.wait_till_ajax_done(self.browser)
for ipam in ipam_list:
ipam_text = ipam.find_element_by_tag_name(
'div').text
time.sleep(2)
if ipam_text.find(fixture.ipam_fq_name[2]) != -1:
ipam.click()
break
self.browser.find_element_by_xpath(
"//input[@placeholder = 'IP Block'] ").send_keys(subnet)
else:
self.browser.find_element_by_id('btnCommonAddIpam').click()
self.browser.find_element_by_id(
"select2-drop-mask").click()
ipam_list = self.browser.find_element_by_id(
"select2-drop").find_element_by_tag_name('ul').find_elements_by_tag_name('li')
for ipam in ipam_list:
ipam_text = ipam.get_attribute("innerHTML")
if ipam_text == self.ipam_fq_name:
ipam.click()
break
self.browser.find_element_by_xpath(
"//input[@placeholder = 'IP Block'] ").send_keys(fixture.vn_subnets)
self.browser.find_element_by_id('btnCreateVNOK').click()
time.sleep(3)
if not self.webui_common.check_error_msg("create VN"):
raise Exception("vn creation failed")
else:
fixture.already_present = True
self.logger.info('VN %s already exists, skipping creation ' %
(fixture.vn_name))
self.logger.debug('VN %s exists, already there' %
(fixture.vn_name))
fixture.obj = fixture.quantum_fixture.get_vn_obj_if_present(
fixture.vn_name, fixture.project_id)
fixture.vn_id = fixture.obj['network']['id']
fixture.vn_fq_name = ':'.join(self.vnc_lib.id_to_fq_name(
fixture.obj['network']['id']))
except Exception as e:
with fixture.lock:
self.logger.exception(
"Got exception as %s while creating %s" % (e, fixture.vn_name))
sys.exit(-1)
# end create_vn_in_webui
def create_dns_server_in_webui(self):
ass_ipam_list = ['ipam1', 'ipam_1']
if not self.webui_common.click_configure_dns_server():
result = result and False
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnCreateDNSServer')).click()
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtDNSServerName')).send_keys('server1')
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtDomainName')).send_keys('domain1')
self.browser.find_elements_by_class_name(
'control-group')[2].find_element_by_tag_name('i').click()
options = self.browser.find_element_by_class_name(
'ui-autocomplete').find_elements_by_tag_name('li')
for i in range(len(options)):
if (options[i].find_element_by_tag_name('a').text == 'default-domain:dnss'):
options[i].click()
time.sleep(2)
self.browser.find_element_by_id(
's2id_ddLoadBal').find_element_by_tag_name('a').click()
rro_list = self.browser.find_element_by_id(
'select2-drop').find_elements_by_tag_name('li')
rro_opt_list = [element.find_element_by_tag_name('div')
for element in rro_list]
for rros in rro_opt_list:
rros_text = rros.text
if rros_text == 'Round-Robin':
rros.click()
break
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtTimeLive')).send_keys('300')
for ipam in range(len(ass_ipam_list)):
self.browser.find_element_by_id(
's2id_msIPams').find_element_by_tag_name('input').click()
ipam_list = self.browser.find_element_by_id(
'select2-drop').find_element_by_class_name('select2-results').find_elements_by_tag_name('li')
ipam_opt_list = [element.find_element_by_tag_name('div')
for element in ipam_list]
for ipams in ipam_opt_list:
ipams_text = ipams.text
if ipams_text == 'admin:' + ass_ipam_list[ipam]:
ipams.click()
break
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnCreateDNSServerOK')).click()
if not self.webui_common.check_error_msg("create DNS"):
raise Exception("DNS creation failed")
# end create_dns_server_in_webui
def create_dns_record_in_webui(self):
if not self.webui_common.click_configure_dns_record():
result = result and False
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnCreateDNSRecord')).click()
self.browser.find_element_by_id(
's2id_cmbRecordType').find_element_by_tag_name('a').click()
type_list = self.browser.find_element_by_id(
'select2-drop').find_elements_by_tag_name('li')
type_opt_list = [element.find_element_by_tag_name('div')
for element in type_list]
for types in type_opt_list:
types_text = types.text
if types_text == 'NS (Delegation Record)':
types.click()
if types_text == 'CNAME (Alias Record)':
self.browser.find_element_by_id(
'txtRecordName').send_keys('abc')
self.browser.find_element_by_id(
'txtRecordData').send_keys('bcd')
if types_text == 'A (IP Address Record)':
self.browser.find_element_by_id(
'txtRecordName').send_keys('abc')
self.browser.find_element_by_id(
'txtRecordData').send_keys('189.32.3.2/21')
if types_text == 'PTR (Reverse DNS Record)':
self.browser.find_element_by_id(
'txtRecordName').send_keys('187.23.2.1/27')
self.browser.find_element_by_id(
'txtRecordData').send_keys('bcd')
if types_text == 'NS (Delegation Record)':
self.browser.find_element_by_id(
'txtRecordName').send_keys('abc')
self.browser.find_elements_by_class_name(
'control-group')[2].find_element_by_tag_name('i').click()
dns_servers = self.browser.find_element_by_class_name(
'ui-autocomplete').find_elements_by_tag_name('li')
for servers in range(len(dns_servers)):
if dns_servers[servers].find_element_by_tag_name('a').text == 'default-domain:' + 'dns2':
dns_servers[servers].find_element_by_tag_name(
'a').click()
break
break
self.browser.find_element_by_id(
's2id_cmbRecordClass').find_element_by_tag_name('a').click()
class_list = self.browser.find_element_by_id(
'select2-drop').find_elements_by_tag_name('li')
class_opt_list = [element.find_element_by_tag_name('div')
for element in class_list]
for classes in class_opt_list:
classes_text = classes.text
if classes_text == 'IN (Internet)':
classes.click()
break
self.browser.find_element_by_id('txtRecordTTL').send_keys('300')
self.browser.find_element_by_id('btnAddDNSRecordOk').click()
if not self.webui_common.check_error_msg("create DNS Record"):
raise Exception("DNS Record creation failed")
# end create_dns_record_in_webui
def create_svc_template_in_webui(self, fixture):
result = True
if not self.webui_common.click_configure_service_template():
result = result and False
self.logger.info("Creating svc template %s using webui" %
(fixture.st_name))
self.webui_common.click_element(
self.browser, 'btnCreatesvcTemplate', 'id')
self.webui_common.wait_till_ajax_done(self.browser)
txt_temp_name = WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtTempName'))
txt_temp_name.send_keys(fixture.st_name)
self.browser.find_element_by_id(
's2id_ddserMode').find_element_by_class_name('select2-choice').click()
service_mode_list = self.browser.find_element_by_id(
"select2-drop").find_elements_by_tag_name('li')
for service_mode in service_mode_list:
service_mode_text = service_mode.text
if service_mode_text.lower() == fixture.svc_mode:
service_mode.click()
break
self.browser.find_element_by_id(
's2id_ddserType').find_element_by_class_name('select2-choice').click()
service_type_list = self.browser.find_element_by_id(
"select2-drop").find_elements_by_tag_name('li')
for service_type in service_type_list:
service_type_text = service_type.text
if service_type_text.lower() == fixture.svc_type:
service_type.click()
break
self.browser.find_element_by_id(
's2id_ddImageName').find_element_by_class_name('select2-choice').click()
image_name_list = self.browser.find_element_by_id(
"select2-drop").find_elements_by_tag_name('li')
for image_name in image_name_list:
image_name_text = image_name.text
if image_name_text.lower() == fixture.image_name:
image_name.click()
break
static_route = self.browser.find_element_by_id(
'widgetStaticRoutes').find_element_by_tag_name('i').click()
for index, intf_element in enumerate(fixture.if_list):
intf_text = intf_element[0]
shared_ip = intf_element[1]
static_routes = intf_element[2]
self.browser.find_element_by_id('btnCommonAddInterface').click()
self.browser.find_element_by_id(
'allInterface').find_elements_by_tag_name('i')[index * 3].click()
if shared_ip:
self.browser.find_element_by_id('allInterface').find_elements_by_tag_name(
'input')[index * 3 + 1].click()
if static_routes:
self.browser.find_element_by_id(
'allInterface').find_elements_by_tag_name('i')[index * 3 + 2].click()
intf_types = self.browser.find_elements_by_class_name(
'ui-autocomplete')[index].find_elements_by_class_name('ui-menu-item')
intf_dropdown = [element.find_element_by_tag_name('a')
for element in intf_types]
for intf in intf_dropdown:
if intf.text.lower() == intf_text:
intf.click()
break
self.browser.find_element_by_id(
's2id_ddFlavors').find_element_by_class_name('select2-choice').click()
flavors_list = self.browser.find_elements_by_xpath(
"//span[@class = 'select2-match']/..")
for flavor in flavors_list:
flavor_text = flavor.text
if flavor_text.find(fixture.flavor) != -1:
flavor.click()
break
if fixture.svc_scaling:
self.browser.find_element_by_id('chkServiceEnabeling').click()
self.browser.find_element_by_id('btnCreateSTempOK').click()
time.sleep(3)
if not self.webui_common.check_error_msg("create service template"):
raise Exception("service template creation failed")
# end create_svc_template_in_webui
def create_svc_instance_in_webui(self, fixture):
result = True
if not self.webui_common.click_configure_service_instance():
result = result and False
self.webui_common.select_project(fixture.project_name)
self.logger.info("Creating svc instance %s using webui" %
(fixture.si_name))
self.webui_common.click_element(
self.browser, 'btnCreatesvcInstances', 'id')
self.webui_common.wait_till_ajax_done(self.browser)
txt_instance_name = WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtsvcInstanceName'))
txt_instance_name.send_keys(fixture.si_name)
self.browser.find_element_by_id(
's2id_ddsvcTemplate').find_element_by_class_name('select2-choice').click()
service_template_list = self.browser.find_element_by_id(
'select2-drop').find_elements_by_tag_name('li')
service_temp_list = [
element.find_element_by_tag_name('div') for element in service_template_list]
for service_temp in service_temp_list:
service_temp_text = service_temp.text
if service_temp_text.find(fixture.st_name) != -1:
service_temp.click()
break
intfs = self.browser.find_element_by_id(
'instanceDiv').find_elements_by_tag_name('a')
self.browser.find_element_by_id('btnCreatesvcInstencesOK').click()
time.sleep(3)
if not self.webui_common.check_error_msg("create service instance"):
raise Exception("service instance creation failed")
time.sleep(30)
# end create_svc_instance_in_webui
def create_ipam_in_webui(self, fixture):
result = True
ip_blocks = False
if not self.webui_common.click_configure_ipam():
result = result and False
self.webui_common.select_project(fixture.project_name)
self.logger.info("Creating ipam %s using webui" % (fixture.name))
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnCreateEditipam')).click()
self.webui_common.wait_till_ajax_done(self.browser)
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtIPAMName')).send_keys(fixture.name)
self.webui_common.wait_till_ajax_done(self.browser)
'''
self.browser.find_element_by_id('s2id_ddDNS').find_element_by_class_name('select2-choice').click()
dns_method_list = self.browser.find_element_by_id('select2-drop').find_elements_by_tag_name('li')
dns_list = [ element.find_element_by_tag_name('div') for element in dns_method_list]
for dns in dns_list :
dns_text = dns.text
if dns_text.find('Tenant') != -1 :
dns.click()
if dns_text == 'Tenant':
self.browser.find_element_by_id('txtdnsTenant').send_keys('189.23.2.3/21')
self.browser.find_element_by_id("txtNTPServer").send_keys('32.24.53.45/28')
self.browser.find_element_by_id("txtDomainName").send_keys('domain_1')
elif dns_text == 'Default' or dns.text == 'None':
self.browser.find_element_by_id("txtNTPServer").send_keys('32.24.53.45/28')
self.browser.find_element_by_id("txtDomainName").send_keys('domain_1')
elif dns_text == 'Virtual DNS':
self.browser.find_element_by_id('dnsvirtualBlock').find_element_by_tag_name('a').click()
self.webui_common.wait_till_ajax_done(self.browser)
virtual_dns_list = self.browser.find_element_by_id('select2-drop').find_elements_by_tag_name('li')
vdns_list = [ element.find_element_by_tag_name('div') for element in virtual_dns_list]
for vdns in vdns_list :
vdns_text = vdns.text
if vdns_text == 'default-domain:'+'dns':
vdns.click()
break
break
for net in range(len(net_list)):
self.browser.find_element_by_id("btnCommonAddVN").click()
self.browser.find_element_by_id('vnTuples').find_element_by_tag_name('a').click()
self.webui_common.wait_till_ajax_done(self.browser)
vn_list = self.browser.find_element_by_id('select2-drop').find_elements_by_tag_name('li')
virtual_net_list = [ element.find_element_by_tag_name('div') for element in vn_list]
for vns in virtual_net_list :
vn_text = vns.text
if vn_text == net_list[net] :
vns.click()
break
self.browser.find_element_by_xpath("//*[contains(@placeholder, 'IP Block')]").send_keys('187.23.2.'+str(net+1)+'/21')
'''
self.browser.find_element_by_id("btnCreateEditipamOK").click()
if not self.webui_common.check_error_msg("Create ipam"):
raise Exception("ipam creation failed")
# end create_ipam_in_webui
def create_policy_in_webui(self, fixture):
result = True
line = 0
try:
fixture.policy_obj = fixture.quantum_fixture.get_policy_if_present(
fixture.project_name, fixture.policy_name)
if not fixture.policy_obj:
self.logger.info("Creating policy %s using webui" %
(fixture.policy_name))
if not self.webui_common.click_configure_policies():
result = result and False
self.webui_common.select_project(fixture.project_name)
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnCreatePolicy')).click()
time.sleep(2)
# self.webui_common.wait_till_ajax_done(self.browser)
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('txtPolicyName')).send_keys(fixture.policy_name)
time.sleep(2)
# self.webui_common.wait_till_ajax_done(self.browser)
lists = 0
for rule in fixture.rules_list:
action = rule['simple_action']
protocol = rule['protocol']
source_net = rule['source_network']
direction = rule['direction']
dest_net = rule['dest_network']
if rule['src_ports']:
if type(rule['src_ports']) is list:
src_port = ','.join(str(num)
for num in rule['src_ports'])
else:
src_port = str(rule['src_ports'])
if rule['dst_ports']:
if type(rule['dst_ports']) is list:
dst_port = ','.join(str(num)
for num in rule['dst_ports'])
else:
dst_port = str(rule['dst_ports'])
self.browser.find_element_by_id('btnCommonAddRule').click()
self.webui_common.wait_till_ajax_done(self.browser)
controls = WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_class_name('controls'))
rules = self.webui_common.find_element(
controls, ['ruleTuples', 'rule-item'], ['id', 'class'], [1])[line]
rules = rules.find_elements_by_css_selector(
"div[class$='pull-left']")
li = self.browser.find_elements_by_css_selector(
"ul[class^='ui-autocomplete']")
for rule in range(len(rules)):
if rule == 3:
rules[rule].find_element_by_class_name(
'select2-container').find_element_by_tag_name('a').click()
direction_list = self.browser.find_element_by_id(
'select2-drop').find_elements_by_tag_name('li')
dir_list = [element.find_element_by_tag_name('div')
for element in direction_list]
for directions in dir_list:
direction_text = directions.text
if direction_text == direction:
directions.click()
break
continue
rules[rule].find_element_by_class_name(
'add-on').find_element_by_class_name('icon-caret-down').click()
time.sleep(2)
# self.webui_common.wait_till_ajax_done(self.browser)
opt = li[lists].find_elements_by_tag_name('li')
if rule == 0:
self.sel(opt, action.upper())
elif rule == 1:
self.sel(opt, protocol.upper())
elif rule == 2:
self.sel(opt, source_net)
rule_items = self.webui_common.find_element(
controls, ['ruleTuples', 'rule-item'], ['id', 'class'], [1])[line]
rule_items.find_elements_by_class_name(
'span1')[2].find_element_by_tag_name('input').send_keys(src_port)
# controls.find_element_by_id('ruleTuples').find_elements_by_class_name('rule-item')[line].find_elements_by_class_name('span1')[2].find_element_by_tag_name('input').send_keys(src_port)
else:
self.sel(opt, dest_net)
controls.find_element_by_id('ruleTuples').find_elements_by_class_name(
'rule-item')[line].find_elements_by_class_name('span1')[4].find_element_by_tag_name('input').send_keys(dst_port)
break
lists = lists + 1
lists = lists + 1
self.browser.find_element_by_id('btnCreatePolicyOK').click()
self.webui_common.wait_till_ajax_done(self.browser)
if not self.webui_common.check_error_msg("Create Policy"):
raise Exception("Policy creation failed")
fixture.policy_obj = fixture.quantum_fixture.get_policy_if_present(
fixture.project_name, fixture.policy_name)
else:
fixture.already_present = True
self.logger.info(
'Policy %s already exists, skipping creation ' %
(fixture.policy_name))
self.logger.debug('Policy %s exists, already there' %
(fixture.policy_name))
except Exception as e:
self.logger.exception("Got exception as %s while creating %s" %
(e, fixture.policy_name))
sys.exit(-1)
def sel(self, opt, choice):
for i in range(len(opt)):
option = opt[i].find_element_by_class_name(
'ui-corner-all').get_attribute("innerHTML")
if option == choice:
btn = opt[i].find_element_by_class_name('ui-corner-all')
time.sleep(1)
btn.click()
time.sleep(1)
return
continue
def policy_delete_in_webui(self, fixture):
if not self.webui_common.click_configure_policies():
result = result and False
rows = self.webui_common.get_rows()
for pol in range(len(rows)):
tdArry = rows[pol].find_elements_by_class_name('slick-cell')
if(len(tdArry) > 2):
if (tdArry[2].text == fixture.policy_name):
tdArry[0].find_element_by_tag_name('i').click()
self.webui_common.wait_till_ajax_done(self.browser)
rows = self.webui_common.get_rows()
ass_net = rows[
pol + 1].find_elements_by_class_name('row-fluid')[1].find_element_by_xpath("//div[@class='span11']").text.split()
if(ass_net[0] != '-'):
for net in range(len(ass_net)):
network.append(ass_net[net])
else:
print("No networks associated")
tdArry[5].find_element_by_tag_name('i').click()
self.browser.find_element_by_id(
'gridPolicy-action-menu-' + str(i)).find_elements_by_tag_name('li')[1].find_element_by_tag_name('a').click()
self.browser.find_element_by_id("btnRemovePopupOK").click()
self.webui_common.wait_till_ajax_done(self.browser)
if not self.webui_common.check_error_msg("Delete policy"):
raise Exception("Policy deletion failed")
self.logger.info("%s is deleted successfully using webui" %
(fixture.policy_name))
break
# end policy_delete_in_webui
def verify_analytics_nodes_ops_basic_data(self):
self.logger.info("Verifying analytics_node basic ops-data in Webui...")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_analytics_nodes():
result = result and False
rows = self.webui_common.get_rows()
analytics_nodes_list_ops = self.webui_common.get_collectors_list_ops()
result = True
for n in range(len(analytics_nodes_list_ops)):
ops_analytics_node_name = analytics_nodes_list_ops[n]['name']
self.logger.info("Vn host name %s exists in op server..checking if exists in webui as well" % (
ops_analytics_node_name))
if not self.webui_common.click_monitor_analytics_nodes():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_analytics_node_name:
self.logger.info("Analytics_node name %s found in webui..going to match basic details.." % (
ops_analytics_node_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error("Analytics_node name %s did not match in webui...not found in webui" % (
ops_analytics_node_name))
self.logger.debug(self.dash)
else:
self.logger.info("Click and retrieve analytics_node basic view details in webui for \
analytics_node-name %s " % (ops_analytics_node_name))
self.webui_common.click_monitor_analytics_nodes_basic(
match_index)
dom_basic_view = self.webui_common.get_basic_view_infra()
# special handling for overall node status value
node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name(
'p').get_attribute('innerHTML').replace('\n', '').strip()
for i, item in enumerate(dom_basic_view):
if item.get('key') == 'Overall Node Status':
dom_basic_view[i]['value'] = node_status
# filter analytics_node basic view details from opserver data
analytics_nodes_ops_data = self.webui_common.get_details(
analytics_nodes_list_ops[n]['href'])
ops_basic_data = []
host_name = analytics_nodes_list_ops[n]['name']
ip_address = analytics_nodes_ops_data.get(
'CollectorState').get('self_ip_list')
ip_address = ', '.join(ip_address)
generators_count = str(
len(analytics_nodes_ops_data.get('CollectorState').get('generator_infos')))
version = json.loads(analytics_nodes_ops_data.get('CollectorState').get('build_info')).get(
'build-info')[0].get('build-id')
version = self.webui_common.get_version_string(version)
module_cpu_info_len = len(
analytics_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info'))
for i in range(module_cpu_info_len):
if analytics_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info')[i][
'module_id'] == 'Collector':
cpu_mem_info_dict = analytics_nodes_ops_data.get(
'ModuleCpuState').get('module_cpu_info')[i]
break
cpu = self.webui_common.get_cpu_string(cpu_mem_info_dict)
memory = self.webui_common.get_memory_string(cpu_mem_info_dict)
modified_ops_data = []
process_state_list = analytics_nodes_ops_data.get(
'ModuleCpuState').get('process_state_list')
process_down_stop_time_dict = {}
process_up_start_time_dict = {}
exclude_process_list = [
'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr',
'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema']
for i, item in enumerate(process_state_list):
if item['process_name'] == 'redis-query':
redis_query_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-qe':
contrail_qe_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-analytics-nodemgr':
contrail_analytics_nodemgr_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'redis-uve':
redis_uve_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-opserver':
contrail_opserver_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-collector':
contrail_collector_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
for k, v in process_down_stop_time_dict.items():
if k not in exclude_process_list:
reduced_process_keys_dict[k]=v
if not reduced_process_keys_dict:
for process in exclude_process_list:
process_up_start_time_dict.pop(process, None)
recent_time = min(process_up_start_time_dict.values())
overall_node_status_time = self.webui_common.get_node_status_string(
str(recent_time))
overall_node_status_string = [
'Up since ' + status for status in overall_node_status_time]
else:
overall_node_status_down_time = self.webui_common.get_node_status_string(
str(max(reduced_process_keys_dict.values())))
process_down_count = len(reduced_process_keys_dict)
overall_node_status_string = str(
process_down_count) + ' Process down'
modified_ops_data.extend(
[{'key': 'Hostname', 'value': host_name}, {'key': 'Generators', 'value': generators_count}, {'key': 'IP Address', 'value': ip_address}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'Collector', 'value': contrail_collector_string},
{'key': 'Query Engine', 'value': contrail_qe_string}, {'key': 'OpServer', 'value': contrail_opserver_string}, {'key': 'Redis Query', 'value': redis_query_string}, {'key': 'Redis UVE', 'value': redis_uve_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}])
if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view):
self.logger.info(
"Ops %s uves analytics_nodes basic view details data matched in webui" %
(ops_analytics_node_name))
else:
self.logger.error(
"Ops %s uves analytics_nodes basic view details data match failed in webui" %
(ops_analytics_node_name))
result = result and False
return result
# end verify_analytics_nodes_ops_basic_data_in_webui
def verify_config_nodes_ops_basic_data(self):
self.logger.info(
"Verifying config_node basic ops-data in Webui monitor->infra->Config Nodes->details(basic view)...")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_config_nodes():
result = result and False
rows = self.webui_common.get_rows()
config_nodes_list_ops = self.webui_common.get_config_nodes_list_ops()
result = True
for n in range(len(config_nodes_list_ops)):
ops_config_node_name = config_nodes_list_ops[n]['name']
self.logger.info("Vn host name %s exists in op server..checking if exists in webui as well" % (
ops_config_node_name))
if not self.webui_common.click_monitor_config_nodes():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_config_node_name:
self.logger.info("Config_node name %s found in webui..going to match basic details..." % (
ops_config_node_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error("Config_node name %s did not match in webui...not found in webui" % (
ops_config_node_name))
self.logger.debug(self.dash)
else:
self.logger.info("Click and retrieve config_node basic view details in webui for \
config_node-name %s " % (ops_config_node_name))
# filter config_node basic view details from opserver data
config_nodes_ops_data = self.webui_common.get_details(
config_nodes_list_ops[n]['href'])
self.webui_common.click_monitor_config_nodes_basic(match_index)
dom_basic_view = self.webui_common.get_basic_view_infra()
ops_basic_data = []
host_name = config_nodes_list_ops[n]['name']
ip_address = config_nodes_ops_data.get(
'ModuleCpuState').get('config_node_ip')
if not ip_address:
ip_address = '--'
else:
ip_address = ', '.join(ip_address)
process_state_list = config_nodes_ops_data.get(
'ModuleCpuState').get('process_state_list')
process_down_stop_time_dict = {}
process_up_start_time_dict = {}
exclude_process_list = [
'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr',
'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema']
for i, item in enumerate(process_state_list):
if item['process_name'] == 'contrail-api:0':
api_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'ifmap':
ifmap_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-discovery:0':
discovery_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-schema':
schema_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-svc-monitor':
monitor_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
for k, v in process_down_stop_time_dict.items():
if k not in exclude_process_list:
reduced_process_keys_dict[k]=v
if not reduced_process_keys_dict:
for process in exclude_process_list:
process_up_start_time_dict.pop(process, None)
recent_time = max(process_up_start_time_dict.values())
overall_node_status_time = self.webui_common.get_node_status_string(
str(recent_time))
overall_node_status_string = [
'Up since ' + status for status in overall_node_status_time]
else:
overall_node_status_down_time = self.webui_common.get_node_status_string(
str(max(reduced_process_keys_dict.values())))
process_down_count = len(reduced_process_keys_dict)
overall_node_status_string = str(
process_down_count) + ' Process down'
# special handling for overall node status value
node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name(
'p').get_attribute('innerHTML').replace('\n', '').strip()
for i, item in enumerate(dom_basic_view):
if item.get('key') == 'Overall Node Status':
dom_basic_view[i]['value'] = node_status
version = config_nodes_ops_data.get(
'ModuleCpuState').get('build_info')
if not version:
version = '--'
else:
version = json.loads(config_nodes_ops_data.get('ModuleCpuState').get('build_info')).get(
'build-info')[0].get('build-id')
version = self.webui_common.get_version_string(version)
module_cpu_info_len = len(
config_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info'))
cpu_mem_info_dict = {}
for i in range(module_cpu_info_len):
if config_nodes_ops_data.get('ModuleCpuState').get('module_cpu_info')[i][
'module_id'] == 'ApiServer':
cpu_mem_info_dict = config_nodes_ops_data.get(
'ModuleCpuState').get('module_cpu_info')[i]
break
if not cpu_mem_info_dict:
cpu = '--'
memory = '--'
else:
cpu = self.webui_common.get_cpu_string(cpu_mem_info_dict)
memory = self.webui_common.get_memory_string(
cpu_mem_info_dict)
modified_ops_data = []
generator_list = self.webui_common.get_generators_list_ops()
for element in generator_list:
if element['name'] == ops_config_node_name + ':Config:Contrail-Config-Nodemgr:0':
analytics_data = element['href']
generators_vrouters_data = self.webui_common.get_details(
element['href'])
analytics_data = generators_vrouters_data.get(
'ModuleClientState').get('client_info')
if analytics_data['status'] == 'Established':
analytics_primary_ip = analytics_data[
'primary'].split(':')[0] + ' (Up)'
modified_ops_data.extend(
[{'key': 'Hostname', 'value': host_name}, {'key': 'IP Address', 'value': ip_address}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'API Server', 'value': api_string},
{'key': 'Discovery', 'value': discovery_string}, {'key': 'Service Monitor', 'value': monitor_string}, {'key': 'Ifmap', 'value': ifmap_string}, {'key': 'Schema Transformer', 'value': schema_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}])
self.webui_common.match_ops_with_webui(
modified_ops_data, dom_basic_view)
if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view):
self.logger.info(
"Ops %s uves config_nodes basic view details data matched in webui" %
(ops_config_node_name))
else:
self.logger.error(
"Ops %s uves config_nodes basic view details data match failed in webui" % (ops_config_node_name))
result = result and False
return result
# end verify_config_nodes_ops_basic_data_in_webui
def verify_vrouter_ops_basic_data(self):
result = True
self.logger.info(
"Verifying vrouter basic ops-data in Webui monitor->infra->Virtual routers->details(basic view)...")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_vrouters():
result = result and False
rows = self.webui_common.get_rows()
vrouters_list_ops = self.webui_common.get_vrouters_list_ops()
for n in range(len(vrouters_list_ops)):
ops_vrouter_name = vrouters_list_ops[n]['name']
self.logger.info(
"Vn host name %s exists in op server..checking if exists in webui as well" %
(ops_vrouter_name))
if not self.webui_common.click_monitor_vrouters():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_vrouter_name:
self.logger.info(
"Vrouter name %s found in webui..going to match basic details..." % (ops_vrouter_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error(
"Vrouter name %s did not match in webui...not found in webui" % (ops_vrouter_name))
self.logger.debug(self.dash)
else:
self.logger.info(
"Click and retrieve vrouter basic view details in webui for vrouter-name %s " % (ops_vrouter_name))
self.webui_common.click_monitor_vrouters_basic(match_index)
dom_basic_view = self.webui_common.get_basic_view_infra()
# special handling for overall node status value
node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name(
'p').get_attribute('innerHTML').replace('\n', '').strip()
for i, item in enumerate(dom_basic_view):
if item.get('key') == 'Overall Node Status':
dom_basic_view[i]['value'] = node_status
# special handling for control nodes
control_nodes = self.browser.find_element_by_class_name(
'table-cell').text
for i, item in enumerate(dom_basic_view):
if item.get('key') == 'Control Nodes':
dom_basic_view[i]['value'] = control_nodes
# filter vrouter basic view details from opserver data
vrouters_ops_data = self.webui_common.get_details(
vrouters_list_ops[n]['href'])
ops_basic_data = []
host_name = vrouters_list_ops[n]['name']
ip_address = vrouters_ops_data.get(
'VrouterAgent').get('self_ip_list')[0]
version = json.loads(vrouters_ops_data.get('VrouterAgent').get('build_info')).get(
'build-info')[0].get('build-id')
version = version.split('-')
version = version[0] + ' (Build ' + version[1] + ')'
xmpp_messages = vrouters_ops_data.get(
'VrouterStatsAgent').get('xmpp_stats_list')
for i, item in enumerate(xmpp_messages):
if item['ip'] == ip_address:
xmpp_in_msgs = item['in_msgs']
xmpp_out_msgs = item['out_msgs']
xmpp_msgs_string = str(xmpp_in_msgs) + \
' In ' + \
str(xmpp_out_msgs) + ' Out'
break
total_flows = vrouters_ops_data.get(
'VrouterStatsAgent').get('total_flows')
active_flows = vrouters_ops_data.get(
'VrouterStatsAgent').get('active_flows')
flow_count_string = str(active_flows) + \
' Active, ' + \
str(total_flows) + ' Total'
if vrouters_ops_data.get('VrouterAgent').get('connected_networks'):
networks = str(
len(vrouters_ops_data.get('VrouterAgent').get('connected_networks')))
else:
networks = '--'
interfaces = str(vrouters_ops_data.get('VrouterAgent')
.get('total_interface_count'))
if vrouters_ops_data.get('VrouterAgent').get('virtual_machine_list'):
instances = str(
len(vrouters_ops_data.get('VrouterAgent').get('virtual_machine_list')))
else:
instances = '--'
cpu = vrouters_ops_data.get('VrouterStatsAgent').get(
'cpu_info').get('cpu_share')
cpu = str(round(cpu, 2)) + ' %'
memory = vrouters_ops_data.get('VrouterStatsAgent').get(
'cpu_info').get('meminfo').get('virt')
memory = memory / 1024.0
if memory < 1024:
memory = str(round(memory, 2)) + ' MB'
else:
memory = str(round(memory / 1024), 2) + ' GB'
last_log = vrouters_ops_data.get(
'VrouterAgent').get('total_interface_count')
modified_ops_data = []
process_state_list = vrouters_ops_data.get(
'VrouterStatsAgent').get('process_state_list')
process_down_stop_time_dict = {}
process_up_start_time_dict = {}
exclude_process_list = [
'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr',
'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema']
for i, item in enumerate(process_state_list):
if item['process_name'] == 'contrail-vrouter':
contrail_vrouter_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-vrouter-nodemgr':
contrail_vrouter_nodemgr_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'openstack-nova-compute':
openstack_nova_compute_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
for k, v in process_down_stop_time_dict.items():
if k not in exclude_process_list:
reduced_process_keys_dict[k] = v
'''
if not reduced_process_keys_dict :
recent_time = max(process_up_start_time_dict.values())
overall_node_status_time = self.webui_common.get_node_status_string(str(recent_time))
overall_node_status_string = ['Up since ' + status for status in overall_node_status_time]
else:
overall_node_status_down_time = self.webui_common.get_node_status_string(str(max(reduced_process_keys_dict.values())))
overall_node_status_string = ['Down since ' + status for status in overall_node_status_down_time]
'''
if not reduced_process_keys_dict:
for process in exclude_process_list:
process_up_start_time_dict.pop(process, None)
recent_time = max(process_up_start_time_dict.values())
overall_node_status_time = self.webui_common.get_node_status_string(
str(recent_time))
overall_node_status_string = [
'Up since ' + status for status in overall_node_status_time]
else:
overall_node_status_down_time = self.webui_common.get_node_status_string(
str(max(reduced_process_keys_dict.values())))
process_down_count = len(reduced_process_keys_dict)
process_down_list = reduced_process_keys_dict.keys()
overall_node_status_string = str(
process_down_count) + ' Process down'
generator_list = self.webui_common.get_generators_list_ops()
for element in generator_list:
if element['name'] == ops_vrouter_name + ':Compute:VRouterAgent:0':
analytics_data = element['href']
break
generators_vrouters_data = self.webui_common.get_details(
element['href'])
analytics_data = generators_vrouters_data.get(
'ModuleClientState').get('client_info')
if analytics_data['status'] == 'Established':
analytics_primary_ip = analytics_data[
'primary'].split(':')[0] + ' (Up)'
tx_socket_bytes = analytics_data.get(
'tx_socket_stats').get('bytes')
tx_socket_size = self.webui_common.get_memory_string(
int(tx_socket_bytes))
analytics_msg_count = generators_vrouters_data.get(
'ModuleClientState').get('session_stats').get('num_send_msg')
offset = 5
analytics_msg_count_list = range(
int(analytics_msg_count) - offset, int(analytics_msg_count) + offset)
analytics_messages_string = [
str(count) + ' [' + str(size) + ']' for count in analytics_msg_count_list for size in tx_socket_size]
control_nodes_list = vrouters_ops_data.get(
'VrouterAgent').get('xmpp_peer_list')
control_nodes_string = ''
for node in control_nodes_list:
if node['status'] == True and node['primary'] == True:
control_ip = node['ip']
control_nodes_string = control_ip + '* (Up)'
index = control_nodes_list.index(node)
del control_nodes_list[index]
for node in control_nodes_list:
node_ip = node['ip']
if node['status'] == True:
control_nodes_string = control_nodes_string + \
', ' + node_ip + ' (Up)'
else:
control_nodes_string = control_nodes_string + \
', ' + node_ip + ' (Down)'
modified_ops_data.extend(
[{'key': 'Flow Count', 'value': flow_count_string}, {'key': 'Hostname', 'value': host_name}, {'key': 'IP Address', 'value': ip_address}, {'key': 'Networks', 'value': networks}, {'key': 'Instances', 'value': instances}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version},
{'key': 'vRouter Agent', 'value': contrail_vrouter_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}, {'key': 'Analytics Node', 'value': analytics_primary_ip}, {'key': 'Analytics Messages', 'value': analytics_messages_string}, {'key': 'Control Nodes', 'value': control_nodes_string}])
self.webui_common.match_ops_with_webui(
modified_ops_data, dom_basic_view)
if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view):
self.logger.info(
"Ops %s uves vrouters basic view details data matched in webui" % (ops_vrouter_name))
else:
self.logger.error(
"Ops %s uves vrouters basic view details data match failed in webui" % (ops_vrouter_name))
result = result and False
return result
# end verify_vrouter_ops_basic_data_in_webui
def verify_vrouter_ops_advance_data(self):
self.logger.info(
"Verifying vrouter Ops-data in Webui monitor->infra->Virtual Routers->details(advance view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_vrouters():
result = result and False
rows = self.webui_common.get_rows()
vrouters_list_ops = self.webui_common.get_vrouters_list_ops()
result = True
for n in range(len(vrouters_list_ops)):
ops_vrouter_name = vrouters_list_ops[n]['name']
self.logger.info(
"Vn host name %s exists in op server..checking if exists in webui as well" %
(ops_vrouter_name))
if not self.webui_common.click_monitor_vrouters():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_vrouter_name:
self.logger.info(
"Vrouter name %s found in webui..going to match advance details..." % (ops_vrouter_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error(
"Vrouter name %s did not match in webui...not found in webui" % (ops_vrouter_name))
self.logger.debug(self.dash)
else:
self.logger.info(
"Click and retrieve vrouter advance details in webui for vrouter-name %s " % (ops_vrouter_name))
self.webui_common.click_monitor_vrouters_advance(match_index)
vrouters_ops_data = self.webui_common.get_details(
vrouters_list_ops[n]['href'])
dom_arry = self.webui_common.parse_advanced_view()
dom_arry_str = self.webui_common.get_advanced_view_str()
dom_arry_num = self.webui_common.get_advanced_view_num()
dom_arry_num_new = []
for item in dom_arry_num:
dom_arry_num_new.append(
{'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']})
dom_arry_num = dom_arry_num_new
merged_arry = dom_arry + dom_arry_str + dom_arry_num
if vrouters_ops_data.has_key('VrouterStatsAgent'):
ops_data = vrouters_ops_data['VrouterStatsAgent']
history_del_list = [
'total_in_bandwidth_utilization', 'cpu_share', 'used_sys_mem',
'one_min_avg_cpuload', 'virt_mem', 'total_out_bandwidth_utilization']
for item in history_del_list:
if ops_data.get(item):
for element in ops_data.get(item):
if element.get('history-10'):
del element['history-10']
if element.get('s-3600-topvals'):
del element['s-3600-topvals']
modified_ops_data = []
self.webui_common.extract_keyvalue(
ops_data, modified_ops_data)
if vrouters_ops_data.has_key('VrouterAgent'):
ops_data_agent = vrouters_ops_data['VrouterAgent']
modified_ops_data_agent = []
self.webui_common.extract_keyvalue(
ops_data_agent, modified_ops_data_agent)
complete_ops_data = modified_ops_data + \
modified_ops_data_agent
for k in range(len(complete_ops_data)):
if type(complete_ops_data[k]['value']) is list:
for m in range(len(complete_ops_data[k]['value'])):
complete_ops_data[k]['value'][m] = str(
complete_ops_data[k]['value'][m])
elif type(complete_ops_data[k]['value']) is unicode:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
else:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry):
self.logger.info(
"Ops %s uves virual networks advance view data matched in webui" % (ops_vrouter_name))
else:
self.logger.error(
"Ops %s uves virual networks advance data match failed in webui" % (ops_vrouter_name))
result = result and False
return result
# end verify_vrouter_ops_advance_data_in_webui
def verify_bgp_routers_ops_basic_data(self):
self.logger.info(
"Verifying Control Nodes basic ops-data in Webui monitor->infra->Control Nodes->details(basic view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_control_nodes():
result = result and False
rows = self.webui_common.get_rows()
bgp_routers_list_ops = self.webui_common.get_bgp_routers_list_ops()
result = True
for n in range(len(bgp_routers_list_ops)):
ops_bgp_routers_name = bgp_routers_list_ops[n]['name']
self.logger.info("Control node host name %s exists in op server..checking if exists \
in webui as well" % (ops_bgp_routers_name))
if not self.webui_common.click_monitor_control_nodes():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_bgp_routers_name:
self.logger.info("Bgp routers name %s found in webui..going to match basic details..." % (
ops_bgp_routers_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error("Bgp routers name %s did not match in webui...not found in webui" % (
ops_bgp_routers_name))
self.logger.debug(self.dash)
else:
self.logger.info("Click and retrieve control nodes basic view details in webui for \
control node name %s " % (ops_bgp_routers_name))
self.webui_common.click_monitor_control_nodes_basic(
match_index)
dom_basic_view = self.webui_common.get_basic_view_infra()
# special handling for overall node status value
node_status = self.browser.find_element_by_id('allItems').find_element_by_tag_name(
'p').get_attribute('innerHTML').replace('\n', '').strip()
for i, item in enumerate(dom_basic_view):
if item.get('key') == 'Overall Node Status':
dom_basic_view[i]['value'] = node_status
# filter bgp_routers basic view details from opserver data
bgp_routers_ops_data = self.webui_common.get_details(
bgp_routers_list_ops[n]['href'])
ops_basic_data = []
host_name = bgp_routers_list_ops[n]['name']
ip_address = bgp_routers_ops_data.get(
'BgpRouterState').get('bgp_router_ip_list')[0]
if not ip_address:
ip_address = '--'
version = json.loads(bgp_routers_ops_data.get('BgpRouterState').get('build_info')).get(
'build-info')[0].get('build-id')
version = self.webui_common.get_version_string(version)
bgp_peers_string = 'BGP Peers: ' + \
str(bgp_routers_ops_data.get('BgpRouterState')
.get('num_bgp_peer')) + ' Total'
vrouters = 'vRouters: ' + \
str(bgp_routers_ops_data.get('BgpRouterState')
.get('num_up_xmpp_peer')) + ' Established in Sync'
cpu = bgp_routers_ops_data.get('BgpRouterState')
memory = bgp_routers_ops_data.get('BgpRouterState')
if not cpu:
cpu = '--'
memory = '--'
else:
cpu = self.webui_common.get_cpu_string(cpu)
memory = self.webui_common.get_memory_string(memory)
generator_list = self.webui_common.get_generators_list_ops()
for element in generator_list:
if element['name'] == ops_bgp_routers_name + ':Control:ControlNode:0':
analytics_data = element['href']
generators_vrouters_data = self.webui_common.get_details(
element['href'])
analytics_data = generators_vrouters_data.get(
'ModuleClientState').get('client_info')
if analytics_data['status'] == 'Established':
analytics_primary_ip = analytics_data[
'primary'].split(':')[0] + ' (Up)'
tx_socket_bytes = analytics_data.get(
'tx_socket_stats').get('bytes')
tx_socket_size = self.webui_common.get_memory_string(
int(tx_socket_bytes))
analytics_msg_count = generators_vrouters_data.get(
'ModuleClientState').get('session_stats').get('num_send_msg')
offset = 10
analytics_msg_count_list = range(
int(analytics_msg_count) - offset, int(analytics_msg_count) + offset)
analytics_messages_string = [
str(count) + ' [' + str(size) + ']' for count in analytics_msg_count_list for size in tx_socket_size]
ifmap_ip = bgp_routers_ops_data.get('BgpRouterState').get(
'ifmap_info').get('url').split(':')[0]
ifmap_connection_status = bgp_routers_ops_data.get(
'BgpRouterState').get('ifmap_info').get('connection_status')
ifmap_connection_status_change = bgp_routers_ops_data.get(
'BgpRouterState').get('ifmap_info').get('connection_status_change_at')
ifmap_connection_string = [ifmap_ip + ' (' + ifmap_connection_status + ' since ' + time +
')' for time in self.webui_common.get_node_status_string(ifmap_connection_status_change)]
process_state_list = bgp_routers_ops_data.get(
'BgpRouterState').get('process_state_list')
process_down_stop_time_dict = {}
process_up_start_time_dict = {}
exclude_process_list = [
'contrail-config-nodemgr', 'contrail-analytics-nodemgr', 'contrail-control-nodemgr', 'contrail-vrouter-nodemgr',
'openstack-nova-compute', 'contrail-svc-monitor', 'contrail-discovery:0', 'contrail-zookeeper', 'contrail-schema']
for i, item in enumerate(process_state_list):
if item['process_name'] == 'contrail-control':
control_node_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-control-nodemgr':
control_nodemgr_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-dns':
contrail_dns_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
if item['process_name'] == 'contrail-named':
contrail_named_string = self.webui_common.get_process_status_string(
item, process_down_stop_time_dict, process_up_start_time_dict)
for k, v in process_down_stop_time_dict.items():
if k not in exclude_process_list:
reduced_process_keys_dict[k] = v
if not reduced_process_keys_dict:
for process in exclude_process_list:
process_up_start_time_dict.pop(process, None)
recent_time = max(process_up_start_time_dict.values())
overall_node_status_time = self.webui_common.get_node_status_string(
str(recent_time))
overall_node_status_string = [
'Up since ' + status for status in overall_node_status_time]
else:
overall_node_status_down_time = self.webui_common.get_node_status_string(
str(max(reduced_process_keys_dict.values())))
process_down_list = reduced_process_keys_dict.keys()
process_down_count = len(reduced_process_keys_dict)
overall_node_status_string = str(
process_down_count) + ' Process down'
modified_ops_data = []
modified_ops_data.extend(
[{'key': 'Peers', 'value': bgp_peers_string}, {'key': 'Hostname', 'value': host_name}, {'key': 'IP Address', 'value': ip_address}, {'key': 'CPU', 'value': cpu}, {'key': 'Memory', 'value': memory}, {'key': 'Version', 'value': version}, {'key': 'Analytics Node',
'value': analytics_primary_ip}, {'key': 'Analytics Messages', 'value': analytics_messages_string}, {'key': 'Ifmap Connection', 'value': ifmap_connection_string}, {'key': 'Control Node', 'value': control_node_string}, {'key': 'Overall Node Status', 'value': overall_node_status_string}])
self.webui_common.match_ops_with_webui(
modified_ops_data, dom_basic_view)
if self.webui_common.match_ops_with_webui(modified_ops_data, dom_basic_view):
self.logger.info(
"Ops %s uves bgp_routers basic view details data matched in webui" %
(ops_bgp_routers_name))
else:
self.logger.error(
"Ops %s uves bgp_routers basic view details data match failed in webui" % (ops_bgp_routers_name))
result = result and False
return result
# end verify_bgp_routers_ops_basic_data_in_webui
def verify_bgp_routers_ops_advance_data(self):
self.logger.info(
"Verifying Control Nodes ops-data in Webui monitor->infra->Control Nodes->details(advance view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_control_nodes():
result = result and False
rows = self.webui_common.get_rows()
bgp_routers_list_ops = self.webui_common.get_bgp_routers_list_ops()
result = True
for n in range(len(bgp_routers_list_ops)):
ops_bgp_router_name = bgp_routers_list_ops[n]['name']
self.logger.info(
"Bgp router %s exists in op server..checking if exists in webui " %
(ops_bgp_router_name))
self.logger.info(
"Clicking on bgp_routers in monitor page in Webui...")
if not self.webui_common.click_monitor_control_nodes():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_bgp_router_name:
self.logger.info(
"Bgp router name %s found in webui..going to match advance details..." % (ops_bgp_router_name))
match_flag = 1
match_index = i
break
if not match_flag:
self.logger.error("Bgp router name %s not found in webui" %
(ops_bgp_router_name))
self.logger.debug(self.dash)
else:
self.logger.info(
"Click and retrieve bgp advance view details in webui for bgp router-name %s " %
(ops_bgp_router_name))
self.webui_common.click_monitor_control_nodes_advance(
match_index)
dom_arry = self.webui_common.parse_advanced_view()
dom_arry_str = self.webui_common.get_advanced_view_str()
dom_arry_num = self.webui_common.get_advanced_view_num()
dom_arry_num_new = []
for item in dom_arry_num:
dom_arry_num_new.append(
{'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']})
dom_arry_num = dom_arry_num_new
merged_arry = dom_arry + dom_arry_str + dom_arry_num
bgp_routers_ops_data = self.webui_common.get_details(
bgp_routers_list_ops[n]['href'])
bgp_router_state_ops_data = bgp_routers_ops_data[
'BgpRouterState']
history_del_list = [
'total_in_bandwidth_utilization', 'cpu_share', 'used_sys_mem',
'one_min_avg_cpuload', 'virt_mem', 'total_out_bandwidth_utilization']
for item in history_del_list:
if bgp_router_state_ops_data.get(item):
for element in bgp_router_state_ops_data.get(item):
if element.get('history-10'):
del element['history-10']
if element.get('s-3600-topvals'):
del element['s-3600-topvals']
if bgp_routers_ops_data.has_key('BgpRouterState'):
bgp_router_state_ops_data = bgp_routers_ops_data[
'BgpRouterState']
modified_bgp_router_state_ops_data = []
self.webui_common.extract_keyvalue(
bgp_router_state_ops_data, modified_bgp_router_state_ops_data)
complete_ops_data = modified_bgp_router_state_ops_data
for k in range(len(complete_ops_data)):
if type(complete_ops_data[k]['value']) is list:
for m in range(len(complete_ops_data[k]['value'])):
complete_ops_data[k]['value'][m] = str(
complete_ops_data[k]['value'][m])
elif type(complete_ops_data[k]['value']) is unicode:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
else:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry):
self.logger.info(
"Ops uves bgp router advanced view data matched in webui")
else:
self.logger.error(
"Ops uves bgp router advanced view bgp router match failed in webui")
result = result and False
return result
# end verify_bgp_routers_ops_advance_data_in_webui
def verify_analytics_nodes_ops_advance_data(self):
self.logger.info(
"Verifying analytics_nodes(collectors) ops-data in Webui monitor->infra->Analytics Nodes->details(advance view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_analytics_nodes():
result = result and False
rows = self.webui_common.get_rows()
analytics_nodes_list_ops = self.webui_common.get_collectors_list_ops()
result = True
for n in range(len(analytics_nodes_list_ops)):
ops_analytics_node_name = analytics_nodes_list_ops[n]['name']
self.logger.info(
"Analytics node %s exists in op server..checking if exists in webui " %
(ops_analytics_node_name))
self.logger.info(
"Clicking on analytics_nodes in monitor page in Webui...")
if not self.webui_common.click_monitor_analytics_nodes():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_analytics_node_name:
self.logger.info(
"Analytics node name %s found in webui..going to match advance details..." %
(ops_analytics_node_name))
match_flag = 1
match_index = i
break
if not match_flag:
self.logger.error("Analytics node name %s not found in webui" %
(ops_analytics_node_name))
self.logger.debug(self.dash)
else:
self.logger.info(
"Click and retrieve analytics advance view details in webui for analytics node-name %s " %
(ops_analytics_node_name))
self.webui_common.click_monitor_analytics_nodes_advance(
match_index)
analytics_nodes_ops_data = self.webui_common.get_details(
analytics_nodes_list_ops[n]['href'])
dom_arry = self.webui_common.parse_advanced_view()
dom_arry_str = self.webui_common.get_advanced_view_str()
dom_arry_num = self.webui_common.get_advanced_view_num()
dom_arry_num_new = []
for item in dom_arry_num:
dom_arry_num_new.append(
{'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']})
dom_arry_num = dom_arry_num_new
merged_arry = dom_arry + dom_arry_str + dom_arry_num
modified_query_perf_info_ops_data = []
modified_module_cpu_state_ops_data = []
modified_analytics_cpu_state_ops_data = []
modified_collector_state_ops_data = []
history_del_list = [
'opserver_mem_virt', 'queryengine_cpu_share', 'opserver_cpu_share',
'collector_cpu_share', 'collector_mem_virt', 'queryengine_mem_virt', 'enq_delay']
if analytics_nodes_ops_data.has_key('QueryPerfInfo'):
query_perf_info_ops_data = analytics_nodes_ops_data[
'QueryPerfInfo']
for item in history_del_list:
if query_perf_info_ops_data.get(item):
for element in query_perf_info_ops_data.get(item):
if element.get('history-10'):
del element['history-10']
if element.get('s-3600-topvals'):
del element['s-3600-topvals']
if element.get('s-3600-summary'):
del element['s-3600-summary']
self.webui_common.extract_keyvalue(
query_perf_info_ops_data, modified_query_perf_info_ops_data)
if analytics_nodes_ops_data.has_key('ModuleCpuState'):
module_cpu_state_ops_data = analytics_nodes_ops_data[
'ModuleCpuState']
for item in history_del_list:
if module_cpu_state_ops_data.get(item):
for element in module_cpu_state_ops_data.get(item):
if element.get('history-10'):
del element['history-10']
if element.get('s-3600-topvals'):
del element['s-3600-topvals']
if element.get('s-3600-summary'):
del element['s-3600-summary']
self.webui_common.extract_keyvalue(
module_cpu_state_ops_data, modified_module_cpu_state_ops_data)
if analytics_nodes_ops_data.has_key('AnalyticsCpuState'):
analytics_cpu_state_ops_data = analytics_nodes_ops_data[
'AnalyticsCpuState']
modified_analytics_cpu_state_ops_data = []
self.webui_common.extract_keyvalue(
analytics_cpu_state_ops_data, modified_analytics_cpu_state_ops_data)
if analytics_nodes_ops_data.has_key('CollectorState'):
collector_state_ops_data = analytics_nodes_ops_data[
'CollectorState']
self.webui_common.extract_keyvalue(
collector_state_ops_data, modified_collector_state_ops_data)
complete_ops_data = modified_query_perf_info_ops_data + modified_module_cpu_state_ops_data + \
modified_analytics_cpu_state_ops_data + \
modified_collector_state_ops_data
for k in range(len(complete_ops_data)):
if type(complete_ops_data[k]['value']) is list:
for m in range(len(complete_ops_data[k]['value'])):
complete_ops_data[k]['value'][m] = str(
complete_ops_data[k]['value'][m])
elif type(complete_ops_data[k]['value']) is unicode:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
else:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry):
self.logger.info(
"Ops uves analytics node advance view data matched in webui")
else:
self.logger.error(
"Ops uves analytics node match failed in webui")
result = result and False
return result
# end verify_analytics_nodes_ops_advance_data_in_webui
def verify_vm_ops_basic_data(self):
self.logger.info(
"Verifying VM basic ops-data in Webui monitor->Networking->instances summary(basic view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.webui_common.get_rows()
vm_list_ops = self.webui_common.get_vm_list_ops()
result = True
for k in range(len(vm_list_ops)):
ops_uuid = vm_list_ops[k]['name']
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.webui_common.get_rows()
self.logger.info(
"Vm uuid %s exists in op server..checking if exists in webui as well" % (ops_uuid))
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[2].text == ops_uuid:
self.logger.info(
"Vm uuid %s matched in webui..going to match basic view details..." % (ops_uuid))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
vm_name = rows[i].find_elements_by_class_name(
'slick-cell')[1].text
break
if not match_flag:
self.logger.error(
"Uuid exists in opserver but uuid %s not found in webui..." % (ops_uuid))
self.logger.debug(self.dash)
else:
self.webui_common.click_monitor_instances_basic(match_index)
self.logger.info(
"Click and retrieve basic view details in webui for uuid %s " % (ops_uuid))
dom_arry_basic = self.webui_common.get_vm_basic_view()
len_dom_arry_basic = len(dom_arry_basic)
elements = self.browser.find_element_by_xpath(
"//*[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid')
len_elements = len(elements)
vm_ops_data = self.webui_common.get_details(
vm_list_ops[k]['href'])
complete_ops_data = []
if vm_ops_data.has_key('UveVirtualMachineAgent'):
# get vm interface basic details from opserver
ops_data_interface_list = vm_ops_data[
'UveVirtualMachineAgent']['interface_list']
for k in range(len(ops_data_interface_list)):
del ops_data_interface_list[k]['l2_active']
if ops_data_interface_list[k].get('floating_ips'):
fip_list = ops_data_interface_list[
k].get('floating_ips')
floating_ip = None
fip_list_len = len(fip_list)
for index, element in enumerate(fip_list):
ops_data_interface_list[k][
'floating_ips'] = element.get('ip_address')
ops_data_interface_list[k][
'floating_ip_pool'] = element.get('virtual_network')
# if index == 0:
# floating_ip = element.get('ip_address') + ' (' + element.get('virtual_network') + ')'
# else:
# floating_ip = floating_ip + ' , ' + element.get('ip_address') + ' (' + element.get('virtual_network') + ')'
#ops_data_interface_list[k]['floating_ips'] = floating_ip
modified_ops_data_interface_list = []
self.webui_common.extract_keyvalue(
ops_data_interface_list[k], modified_ops_data_interface_list)
complete_ops_data = complete_ops_data + \
modified_ops_data_interface_list
for t in range(len(complete_ops_data)):
if type(complete_ops_data[t]['value']) is list:
for m in range(len(complete_ops_data[t]['value'])):
complete_ops_data[t]['value'][m] = str(
complete_ops_data[t]['value'][m])
elif type(complete_ops_data[t]['value']) is unicode:
complete_ops_data[t]['value'] = str(
complete_ops_data[t]['value'])
else:
complete_ops_data[t]['value'] = str(
complete_ops_data[t]['value'])
# get vm basic interface details excluding basic interface
# details
dom_arry_intf = []
dom_arry_intf.insert(0, {'key': 'vm_name', 'value': vm_name})
# insert non interface elements in list
for i in range(len_dom_arry_basic):
element_key = elements[
i].find_elements_by_tag_name('div')[0].text
element_value = elements[
i].find_elements_by_tag_name('div')[1].text
dom_arry_intf.append(
{'key': element_key, 'value': element_value})
fip_rows_index = False
for i in range(len_dom_arry_basic + 1, len_elements):
if not fip_rows_index:
elements_key = elements[
len_dom_arry_basic].find_elements_by_tag_name('div')
else:
elements_key = elements[
fip_rows_index].find_elements_by_tag_name('div')
elements_value = elements[
i].find_elements_by_tag_name('div')
if not elements_value[0].text == 'Floating IPs':
for j in range(len(elements_key)):
if j == 2 and not fip_rows_index:
dom_arry_intf.append(
{'key': 'ip_address', 'value': elements_value[j].text.split('/')[0].strip()})
dom_arry_intf.append(
{'key': 'mac_address', 'value': elements_value[j].text.split('/')[1].strip()})
else:
dom_arry_intf.append(
{'key': elements_key[j].text, 'value': elements_value[j].text})
else:
fip_rows_index = i
continue
for element in complete_ops_data:
if element['key'] == 'name':
index = complete_ops_data.index(element)
del complete_ops_data[index]
if self.webui_common.match_ops_values_with_webui(complete_ops_data, dom_arry_intf):
self.logger.info(
"Ops vm uves basic view data matched in webui")
else:
self.logger.error(
"Ops vm uves basic data match failed in webui")
result = result and False
return result
# end verify_vm_ops_basic_data_in_webui
def verify_dashboard_details(self):
self.logger.info("Verifying dashboard details...")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_dashboard():
result = result and False
dashboard_node_details = self.browser.find_element_by_id(
'topStats').find_elements_by_class_name('infobox-data-number')
dashboard_data_details = self.browser.find_element_by_id(
'sparkLineStats').find_elements_by_class_name('infobox-data-number')
dashboard_system_details = self.browser.find_element_by_id(
'system-info-stat').find_elements_by_tag_name('li')
servers_ver = self.webui_common.find_element(
self.browser, ['system-info-stat', 'value'], ['id', 'class'], [1])
servers = servers_ver[0].text
version = servers_ver[1].text
dom_data = []
dom_data.append(
{'key': 'vrouters', 'value': dashboard_node_details[0].text})
dom_data.append(
{'key': 'control_nodes', 'value': dashboard_node_details[1].text})
dom_data.append(
{'key': 'analytics_nodes', 'value': dashboard_node_details[2].text})
dom_data.append(
{'key': 'config_nodes', 'value': dashboard_node_details[3].text})
dom_data.append(
{'key': 'instances', 'value': dashboard_data_details[0].text})
dom_data.append(
{'key': 'interfaces', 'value': dashboard_data_details[1].text})
dom_data.append(
{'key': 'virtual_networks', 'value': dashboard_data_details[2].text})
dom_data.append({'key': dashboard_system_details[0].find_element_by_class_name(
'key').text, 'value': dashboard_system_details[0].find_element_by_class_name('value').text})
dom_data.append({'key': dashboard_system_details[1].find_element_by_class_name(
'key').text, 'value': dashboard_system_details[1].find_element_by_class_name('value').text})
ops_servers = str(len(self.webui_common.get_config_nodes_list_ops()))
ops_version = self.webui_common.get_version()
self.webui_common.append_to_list(
dom_data, [('servers', servers), ('version', version)])
ops_dashborad_data = []
if not self.webui_common.click_configure_networks():
result = result and False
rows = self.webui_common.get_rows()
vrouter_total_vm = str(len(self.webui_common.get_vm_list_ops()))
total_vrouters = str(len(self.webui_common.get_vrouters_list_ops()))
total_control_nodes = str(
len(self.webui_common.get_bgp_routers_list_ops()))
total_analytics_nodes = str(
len(self.webui_common.get_collectors_list_ops()))
total_config_nodes = str(
len(self.webui_common.get_config_nodes_list_ops()))
vrouters_list_ops = self.webui_common.get_vrouters_list_ops()
interface_count = 0
vrouter_total_vn = 0
for index in range(len(vrouters_list_ops)):
vrouters_ops_data = self.webui_common.get_details(
vrouters_list_ops[index]['href'])
if vrouters_ops_data.get('VrouterAgent').get('total_interface_count'):
interface_count = interface_count + \
vrouters_ops_data.get('VrouterAgent').get(
'total_interface_count')
if vrouters_ops_data.get('VrouterAgent').get('connected_networks'):
vrouter_total_vn = vrouter_total_vn + \
(len(vrouters_ops_data.get('VrouterAgent')
.get('connected_networks')))
ops_dashborad_data.append({'key': 'vrouters', 'value': total_vrouters})
ops_dashborad_data.append(
{'key': 'control_nodes', 'value': total_control_nodes})
ops_dashborad_data.append(
{'key': 'analytics_nodes', 'value': total_analytics_nodes})
ops_dashborad_data.append(
{'key': 'config_nodes', 'value': total_config_nodes})
ops_dashborad_data.append(
{'key': 'instances', 'value': vrouter_total_vm})
ops_dashborad_data.append(
{'key': 'interfaces', 'value': str(interface_count)})
ops_dashborad_data.append(
{'key': 'virtual_networks', 'value': str(vrouter_total_vn)})
self.webui_common.append_to_list(
ops_dashborad_data, [('servers', ops_servers), ('version', ops_version)])
result = True
if self.webui_common.match_ops_with_webui(ops_dashborad_data, dom_data):
self.logger.info("Monitor dashborad details matched")
else:
self.logger.error("Monitor dashborad details not matched")
result = result and False
return result
# end verify_dashboard_details_in_webui
def verify_vn_ops_basic_data(self):
self.logger.info("Verifying VN basic ops-data in Webui...")
self.logger.debug(self.dash)
error = 0
if not self.webui_common.click_monitor_networks():
result = result and False
rows = self.webui_common.get_rows()
vn_list_ops = self.webui_common.get_vn_list_ops()
for k in range(len(vn_list_ops)):
ops_fq_name = vn_list_ops[k]['name']
if not self.webui_common.click_monitor_networks():
result = result and False
rows = self.webui_common.get_rows()
self.logger.info(
"Vn fq_name %s exists in op server..checking if exists in webui as well" % (ops_fq_name))
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[1].text == ops_fq_name:
self.logger.info(
"Vn fq_name %s matched in webui..going to match basic view details..." % (ops_fq_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
vn_fq_name = rows[i].find_elements_by_class_name(
'slick-cell')[1].text
break
if not match_flag:
self.logger.error(
"Vn fq_name exists in opserver but %s not found in webui..." % (ops_fq_name))
self.logger.debug(self.dash)
else:
self.webui_common.click_monitor_networks_basic(match_index)
self.logger.info(
"Click and retrieve basic view details in webui for VN fq_name %s " % (ops_fq_name))
# get vn basic details excluding basic interface details
dom_arry_basic = self.webui_common.get_vm_basic_view()
len_dom_arry_basic = len(dom_arry_basic)
elements = self.browser.find_element_by_xpath(
"//*[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid')
len_elements = len(elements)
vn_ops_data = self.webui_common.get_details(
vn_list_ops[k]['href'])
complete_ops_data = []
ops_data_ingress = {'key':
'ingress_flow_count', 'value': str(0)}
ops_data_egress = {'key':
'egress_flow_count', 'value': str(0)}
ops_data_acl_rules = {'key':
'total_acl_rules', 'value': str(0)}
vn_name = ops_fq_name.split(':')[2]
ops_data_interfaces_count = {
'key': 'interface_list_count', 'value': str(0)}
if vn_ops_data.has_key('UveVirtualNetworkAgent'):
# creating a list of basic view items retrieved from
# opserver
ops_data_basic = vn_ops_data.get('UveVirtualNetworkAgent')
if ops_data_basic.get('ingress_flow_count'):
ops_data_ingress = {'key': 'ingress_flow_count',
'value': ops_data_basic.get('ingress_flow_count')}
if ops_data_basic.get('egress_flow_count'):
ops_data_egress = {'key': 'egress_flow_count',
'value': ops_data_basic.get('egress_flow_count')}
if ops_data_basic.get('total_acl_rules'):
ops_data_acl_rules = {
'key': 'total_acl_rules', 'value': ops_data_basic.get('total_acl_rules')}
if ops_data_basic.get('interface_list'):
ops_data_interfaces_count = {
'key': 'interface_list_count', 'value': len(ops_data_basic.get('interface_list'))}
if ops_data_basic.get('vrf_stats_list'):
vrf_stats_list = ops_data_basic['vrf_stats_list']
vrf_stats_list_new = [vrf['name']
for vrf in vrf_stats_list]
vrf_list_joined = ','.join(vrf_stats_list_new)
ops_data_vrf = {'key': 'vrf_stats_list',
'value': vrf_list_joined}
complete_ops_data.append(ops_data_vrf)
if ops_data_basic.get('acl'):
ops_data_acl = {'key': 'acl', 'value':
ops_data_basic.get('acl')}
complete_ops_data.append(ops_data_acl)
if ops_data_basic.get('virtualmachine_list'):
ops_data_instances = {'key': 'virtualmachine_list', 'value': ', '.join(
ops_data_basic.get('virtualmachine_list'))}
complete_ops_data.append(ops_data_instances)
complete_ops_data.extend(
[ops_data_ingress, ops_data_egress, ops_data_acl_rules, ops_data_interfaces_count])
if ops_fq_name.find('__link_local__') != -1 or ops_fq_name.find('default-virtual-network') != -1 or ops_fq_name.find('ip-fabric') != -1:
for i, item in enumerate(complete_ops_data):
if complete_ops_data[i]['key'] == 'vrf_stats_list':
del complete_ops_data[i]
if vn_ops_data.has_key('UveVirtualNetworkConfig'):
ops_data_basic = vn_ops_data.get('UveVirtualNetworkConfig')
if ops_data_basic.get('attached_policies'):
ops_data_policies = ops_data_basic.get(
'attached_policies')
if ops_data_policies:
pol_name_list = [pol['vnp_name']
for pol in ops_data_policies]
pol_list_joined = ', '.join(pol_name_list)
ops_data_policies = {
'key': 'attached_policies', 'value': pol_list_joined}
complete_ops_data.extend([ops_data_policies])
for t in range(len(complete_ops_data)):
if type(complete_ops_data[t]['value']) is list:
for m in range(len(complete_ops_data[t]['value'])):
complete_ops_data[t]['value'][m] = str(
complete_ops_data[t]['value'][m])
elif type(complete_ops_data[t]['value']) is unicode:
complete_ops_data[t]['value'] = str(
complete_ops_data[t]['value'])
else:
complete_ops_data[t]['value'] = str(
complete_ops_data[t]['value'])
if self.webui_common.match_ops_values_with_webui(complete_ops_data, dom_arry_basic):
self.logger.info(
"Ops uves virutal networks basic view data matched in webui")
else:
self.logger.error(
"Ops uves virutal networks basic view data match failed in webui")
error = 1
return not error
# end verify_vn_ops_basic_data_in_webui
def verify_config_nodes_ops_advance_data(self):
self.logger.info(
"Verifying config_nodes ops-data in Webui monitor->infra->Config Nodes->details(advance view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_config_nodes():
result = result and False
rows = self.webui_common.get_rows()
config_nodes_list_ops = self.webui_common.get_config_nodes_list_ops()
result = True
for n in range(len(config_nodes_list_ops)):
ops_config_node_name = config_nodes_list_ops[n]['name']
self.logger.info(
"Config node host name %s exists in op server..checking if exists in webui as well" %
(ops_config_node_name))
if not self.webui_common.click_monitor_config_nodes():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[0].text == ops_config_node_name:
self.logger.info(
"Config node name %s found in webui..going to match advance view details..." % (ops_config_node_name))
match_flag = 1
match_index = i
break
if not match_flag:
self.logger.error(
"Config node name %s did not match in webui...not found in webui" %
(ops_config_node_name))
self.logger.debug(self.dash)
else:
self.logger.info(
"Click and retrieve config nodes advance view details in webui for config node-name %s " %
(ops_config_node_name))
self.webui_common.click_monitor_config_nodes_advance(
match_index)
config_nodes_ops_data = self.webui_common.get_details(
config_nodes_list_ops[n]['href'])
dom_arry = self.webui_common.parse_advanced_view()
dom_arry_str = self.webui_common.get_advanced_view_str()
dom_arry_num = self.webui_common.get_advanced_view_num()
dom_arry_num_new = []
for item in dom_arry_num:
dom_arry_num_new.append(
{'key': item['key'].replace('\\', '"').replace(' ', ''), 'value': item['value']})
dom_arry_num = dom_arry_num_new
merged_arry = dom_arry + dom_arry_str + dom_arry_num
if config_nodes_ops_data.has_key('ModuleCpuState'):
ops_data = config_nodes_ops_data['ModuleCpuState']
history_del_list = [
'api_server_mem_virt', 'service_monitor_cpu_share', 'schema_xmer_mem_virt',
'service_monitor_mem_virt', 'api_server_cpu_share', 'schema_xmer_cpu_share']
for item in history_del_list:
if ops_data.get(item):
for element in ops_data.get(item):
if element.get('history-10'):
del element['history-10']
if element.get('s-3600-topvals'):
del element['s-3600-topvals']
modified_ops_data = []
self.webui_common.extract_keyvalue(
ops_data, modified_ops_data)
complete_ops_data = modified_ops_data
for k in range(len(complete_ops_data)):
if type(complete_ops_data[k]['value']) is list:
for m in range(len(complete_ops_data[k]['value'])):
complete_ops_data[k]['value'][m] = str(
complete_ops_data[k]['value'][m])
elif type(complete_ops_data[k]['value']) is unicode:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
else:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry):
self.logger.info(
"Ops uves config nodes advance view data matched in webui")
else:
self.logger.error(
"Ops uves config nodes advance view data match failed in webui")
result = result and False
return result
# end verify_config_nodes_ops_advance_data_in_webui
def verify_vn_ops_advance_data(self):
self.logger.info(
"Verifying VN advance ops-data in Webui monitor->Networking->Networks Summary(basic view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_networks():
result = result and False
rows = self.webui_common.get_rows()
vn_list_ops = self.webui_common.get_vn_list_ops()
result = True
for n in range(len(vn_list_ops)):
ops_fqname = vn_list_ops[n]['name']
self.logger.info(
"Vn fq name %s exists in op server..checking if exists in webui as well" % (ops_fqname))
if not self.webui_common.click_monitor_networks():
result = result and False
rows = self.webui_common.get_rows()
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[1].text == ops_fqname:
self.logger.info(
"Vn fq name %s found in webui..going to match advance view details..." % (ops_fqname))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error(
"Vn fqname %s did not match in webui...not found in webui" % (ops_fqname))
self.logger.debug(self.dash)
else:
self.logger.info(
"Click and retrieve advance view details in webui for fqname %s " % (ops_fqname))
self.webui_common.click_monitor_networks_advance(match_index)
vn_ops_data = self.webui_common.get_details(
vn_list_ops[n]['href'])
dom_arry = self.webui_common.parse_advanced_view()
dom_arry_str = self.webui_common.get_advanced_view_str()
merged_arry = dom_arry + dom_arry_str
if vn_ops_data.has_key('UveVirtualNetworkConfig'):
ops_data = vn_ops_data['UveVirtualNetworkConfig']
modified_ops_data = []
self.webui_common.extract_keyvalue(
ops_data, modified_ops_data)
if vn_ops_data.has_key('UveVirtualNetworkAgent'):
ops_data_agent = vn_ops_data['UveVirtualNetworkAgent']
if 'udp_sport_bitmap' in ops_data_agent:
del ops_data_agent['udp_sport_bitmap']
if 'udp_dport_bitmap' in ops_data_agent:
del ops_data_agent['udp_dport_bitmap']
self.logger.info(
"VN details for %s got from ops server and going to match in webui : \n %s \n " %
(vn_list_ops[i]['href'], ops_data_agent))
modified_ops_data_agent = []
self.webui_common.extract_keyvalue(
ops_data_agent, modified_ops_data_agent)
complete_ops_data = modified_ops_data + \
modified_ops_data_agent
for k in range(len(complete_ops_data)):
if type(complete_ops_data[k]['value']) is list:
for m in range(len(complete_ops_data[k]['value'])):
complete_ops_data[k]['value'][m] = str(
complete_ops_data[k]['value'][m])
elif type(complete_ops_data[k]['value']) is unicode:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
else:
complete_ops_data[k]['value'] = str(
complete_ops_data[k]['value'])
if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry):
self.logger.info(
"Ops uves virtual networks advance view data matched in webui")
else:
self.logger.error(
"Ops uves virtual networks advance view data match failed in webui")
result = result and False
return result
# end verify_vn_ops_advance_data_in_webui
def verify_vm_ops_advance_data(self):
self.logger.info(
"Verifying VM ops-data in Webui monitor->Networking->instances->Instances summary(Advance view)......")
self.logger.debug(self.dash)
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.webui_common.get_rows()
vm_list_ops = self.webui_common.get_vm_list_ops()
result = True
for k in range(len(vm_list_ops)):
ops_uuid = vm_list_ops[k]['name']
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.webui_common.get_rows()
self.logger.info(
"Vm uuid %s exists in op server..checking if exists in webui as well" % (ops_uuid))
for i in range(len(rows)):
match_flag = 0
if rows[i].find_elements_by_class_name('slick-cell')[2].text == ops_uuid:
self.logger.info(
"Vm uuid %s matched in webui..going to match advance view details..." % (ops_uuid))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
break
if not match_flag:
self.logger.error(
"Uuid exists in opserver but uuid %s not found in webui..." % (ops_uuid))
self.logger.debug(self.dash)
else:
self.webui_common.click_monitor_instances_advance(match_index)
self.logger.info(
"Click and retrieve advance view details in webui for uuid %s " % (ops_uuid))
dom_arry = self.webui_common.parse_advanced_view()
dom_arry_str = []
dom_arry_str = self.webui_common.get_advanced_view_str()
merged_arry = dom_arry + dom_arry_str
vm_ops_data = self.webui_common.get_details(
vm_list_ops[k]['href'])
if vm_ops_data.has_key('UveVirtualMachineAgent'):
ops_data = vm_ops_data['UveVirtualMachineAgent']
modified_ops_data = []
self.webui_common.extract_keyvalue(
ops_data, modified_ops_data)
complete_ops_data = modified_ops_data
for t in range(len(complete_ops_data)):
if type(complete_ops_data[t]['value']) is list:
for m in range(len(complete_ops_data[t]['value'])):
complete_ops_data[t]['value'][m] = str(
complete_ops_data[t]['value'][m])
elif type(complete_ops_data[t]['value']) is unicode:
complete_ops_data[t]['value'] = str(
complete_ops_data[t]['value'])
else:
complete_ops_data[t]['value'] = str(
complete_ops_data[t]['value'])
if self.webui_common.match_ops_with_webui(complete_ops_data, merged_arry):
self.logger.info(
"Ops vm uves advance view data matched in webui")
else:
self.logger.error(
"Ops vm uves advance data match failed in webui")
result = result and False
return result
# end verify_vm_ops_advance_data_in_webui
def verify_vn_api_data(self):
self.logger.info(
"Verifying VN api details in Webui config networks...")
self.logger.debug(self.dash)
result = True
vn_list_api = self.webui_common.get_vn_list_api()
for vns in range(len(vn_list_api['virtual-networks']) - 3):
pol_list = []
pol_list1 = []
ip_block_list = []
ip_block = []
pool_list = []
floating_pool = []
route_target_list = []
host_route_main = []
api_fq_name = vn_list_api['virtual-networks'][vns]['fq_name'][2]
self.webui_common.click_configure_networks()
rows = self.webui_common.get_rows()
self.logger.info(
"Vn fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name))
for i in range(len(rows)):
match_flag = 0
dom_arry_basic = []
if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name:
self.logger.info(
"Vn fq_name %s matched in webui..going to match basic view details..." % (api_fq_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
vn_fq_name = rows[
i].find_elements_by_tag_name('div')[2].text
policies = rows[i].find_elements_by_tag_name(
'div')[3].text.splitlines()
dom_arry_basic.append(
{'key': 'Attached Policies', 'value': policies})
dom_arry_basic.append(
{'key': 'Network', 'value': rows[i].find_elements_by_tag_name('div')[2].text})
dom_arry_basic.append(
{'key': 'ip_blocks_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text.split()})
break
if not match_flag:
self.logger.error(
"Vn fq_name exists in apiserver but %s not found in webui..." % (api_fq_name))
self.logger.debug(self.dash)
else:
self.webui_common.click_configure_networks_basic(match_index)
rows = self.webui_common.get_rows()
self.logger.info(
"Click and retrieve basic view details in webui for VN fq_name %s " % (api_fq_name))
rows_detail = rows[
match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_tag_name('label')
for detail in range(len(rows_detail)):
text1 = rows_detail[detail].text
if text1 == 'Attached Network Policies':
poli = str(rows_detail[detail].find_element_by_xpath('..').text).replace(
text1, '').strip().split()
dom_arry_basic.append(
{'key': str(rows_detail[detail].text), 'value': poli})
elif text1 == 'IP Blocks' or text1 == 'Host Routes':
dom_arry_basic.append({'key': str(text1), 'value': str(
rows_detail[detail].find_element_by_xpath('..').text).replace(text1, '').strip().splitlines()})
elif text1 == 'Floating IP Pools':
pools = rows_detail[detail].find_element_by_xpath(
'..').text.replace(text1, '').strip().splitlines()
for pool in range(len(pools)):
pool_list.append(pools[pool].split()[0])
dom_arry_basic.append(
{'key': text1, 'value': pool_list})
elif text1 == 'Route Targets':
dom_arry_basic.append({'key': str(text1), 'value': str(
rows_detail[detail].find_element_by_xpath('..').text).replace(text1, '').strip().split(', ')})
else:
dom_arry_basic.append({'key': str(text1), 'value': str(
rows_detail[detail].find_element_by_xpath('..').text).replace(text1, '').strip()})
vn_api_data = self.webui_common.get_details(
vn_list_api['virtual-networks'][vns]['href'])
complete_api_data = []
if vn_api_data.has_key('virtual-network'):
api_data_basic = vn_api_data.get('virtual-network')
if api_data_basic.get('name'):
complete_api_data.append(
{'key': 'Network', 'value': api_data_basic['name']})
if api_data_basic.has_key('network_policy_refs'):
for ass_pol in range(len(api_data_basic['network_policy_refs'])):
pol_list.append(
str(api_data_basic['network_policy_refs'][ass_pol]['to'][2]))
if len(pol_list) > 2:
for item in range(len(policies)):
for items in range(len(pol_list)):
if policies[item] == pol_list[items]:
pol_list1.append(pol_list[items])
pol_string = '(' + str(len(pol_list) - 2) + ' more)'
pol_list1.append(pol_string)
else:
pol_list1 = policies
complete_api_data.append(
{'key': 'Attached Network Policies', 'value': pol_list})
complete_api_data.append(
{'key': 'Attached Policies', 'value': pol_list1})
if api_data_basic.has_key('network_ipam_refs'):
for ip in range(len(api_data_basic['network_ipam_refs'])):
dom_arry_basic.append(
{'key': 'Attached Policies', 'value': rows[i].find_elements_by_tag_name('div')[3].text.split()})
if(api_data_basic['network_ipam_refs'][ip]['to'][2]) == 'default-network-ipam':
for ip_sub in range(len(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'])):
ip_block_list.append(str(api_data_basic['network_ipam_refs'][ip]['to'][0] + ':' + api_data_basic['network_ipam_refs'][ip]['to'][1] + ':' + api_data_basic['network_ipam_refs'][ip]['to'][2]) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets']
[ip_sub]['subnet']['ip_prefix']) + '/' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub]['subnet']['ip_prefix_len']) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub]['default_gateway']))
else:
for ip_sub1 in range(len(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'])):
ip_block_list.append(str(api_data_basic['network_ipam_refs'][ip]['to'][2]) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub1]['subnet']['ip_prefix']) + '/' +
str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub1]['subnet']['ip_prefix_len']) + ' ' + str(api_data_basic['network_ipam_refs'][ip]['attr']['ipam_subnets'][ip_sub1]['default_gateway']))
if len(ip_block_list) > 2:
for ips in range(2):
ip_block.append(ip_block_list[ips].split()[1])
ip_string = '(' + \
str(len(ip_block_list) - 2) + ' more)'
ip_block.append(ip_string)
else:
for ips in range(len(ip_block_list)):
ip_block.append(ip_block_list[ips].split()[1])
complete_api_data.append(
{'key': 'IP Blocks', 'value': ip_block_list})
complete_api_data.append(
{'key': 'ip_blocks_grid_row', 'value': ip_block})
if api_data_basic.has_key('route_target_list'):
if api_data_basic['route_target_list'].has_key('route_target'):
for route in range(len(api_data_basic['route_target_list']['route_target'])):
route_target_list.append(
str(api_data_basic['route_target_list']['route_target'][route]).strip('target:'))
complete_api_data.append(
{'key': 'Route Targets', 'value': route_target_list})
if api_data_basic.has_key('floating_ip_pools'):
for fip in range(len(api_data_basic['floating_ip_pools'])):
floating_pool.append(
str(api_data_basic['floating_ip_pools'][fip]['to'][3]))
complete_api_data.append(
{'key': 'Floating IP Pools', 'value': floating_pool})
if api_data_basic.has_key('network_ipam_refs'):
for ipams in range(len(api_data_basic['network_ipam_refs'])):
if api_data_basic['network_ipam_refs'][ipams]['attr'].get('host_routes'):
if api_data_basic['network_ipam_refs'][ipams]['to'][2] == 'default-network-ipam':
host_route_sub = []
for host_route in range(len(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'])):
host_route_sub.append(
str(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'][host_route]['prefix']))
host_route_string = ",".join(host_route_sub)
host_route_main.append(str(api_data_basic['network_ipam_refs'][ipams]['to'][
0] + ':' + api_data_basic['network_ipam_refs'][ipams]['to'][1] + ':' + api_data_basic['network_ipam_refs'][ipams]['to'][2]) + ' ' + host_route_string)
else:
host_route_sub = []
for host_route1 in range(len(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'])):
host_route_sub.append(
str(api_data_basic['network_ipam_refs'][ipams]['attr']['host_routes']['route'][host_route1]['prefix']))
host_route_string = ", ".join(host_route_sub)
host_route_main.append(
str(api_data_basic['network_ipam_refs'][ipams]['to'][2]) + ' ' + host_route_string)
if(len(host_route_main) > 0):
complete_api_data.append(
{'key': 'Host Routes', 'value': host_route_main})
if api_data_basic['virtual_network_properties'].has_key('forwarding_mode'):
forwarding_mode = api_data_basic[
'virtual_network_properties']['forwarding_mode']
if forwarding_mode == 'l2':
forwarding_mode = forwarding_mode.title() + ' Only'
else:
forwarding_mode = 'L2 and L3'
complete_api_data.append(
{'key': 'Forwarding Mode', 'value': forwarding_mode})
if api_data_basic['virtual_network_properties'].has_key('vxlan_network_identifier'):
complete_api_data.append({'key': 'VxLAN Identifier', 'value': str(
api_data_basic['virtual_network_properties']['vxlan_network_identifier']).replace('None', 'Automatic')})
if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic):
self.logger.info(
"Api virutal networks details matched in webui config networks")
else:
self.logger.error(
"Api virutal networks details not match in webui config networks")
result = result and False
return result
# end verify_vn_api_basic_data_in_webui
def verify_service_template_api_basic_data(self):
self.logger.info("Verifying service template api-data in Webui...")
self.logger.debug(self.dash)
result = True
service_temp_list_api = self.webui_common.get_service_template_list_api(
)
for temp in range(len(service_temp_list_api['service-templates']) - 1):
interface_list = []
api_fq_name = service_temp_list_api[
'service-templates'][temp + 1]['fq_name'][1]
self.webui_common.click_configure_service_template()
rows = self.webui_common.get_rows()
self.logger.info(
"Service template fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name))
for i in range(len(rows)):
dom_arry_basic = []
match_flag = 0
j = 0
if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name:
self.logger.info(
"Service template fq_name %s matched in webui..going to match basic view details..." % (api_fq_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
dom_arry_basic.append(
{'key': 'Name_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[2].text})
dom_arry_basic.append(
{'key': 'Mode_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[3].text})
dom_arry_basic.append(
{'key': 'Type_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text})
dom_arry_basic.append(
{'key': 'Scaling_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[5].text})
dom_arry_basic.append(
{'key': 'Interface_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[6].text})
dom_arry_basic.append(
{'key': 'Image_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[7].text})
dom_arry_basic.append(
{'key': 'Flavor_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[8].text})
break
if not match_flag:
self.logger.error(
"Service template fq_name exists in apiserver but %s not found in webui..." % (api_fq_name))
self.logger.debug(self.dash)
else:
self.webui_common.click_configure_service_template_basic(
match_index)
rows = self.webui_common.get_rows()
self.logger.info(
"Click and retrieve basic view details in webui for service templatefq_name %s " % (api_fq_name))
rows_detail = rows[
match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_class_name('row-fluid')
for detail in range(len(rows_detail)):
text1 = rows_detail[
detail].find_element_by_tag_name('label').text
if text1 == 'Interface Type':
dom_arry_basic.append(
{'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text})
else:
dom_arry_basic.append(
{'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text})
service_temp_api_data = self.webui_common.get_details(
service_temp_list_api['service-templates'][temp + 1]['href'])
complete_api_data = []
if service_temp_api_data.has_key('service-template'):
api_data_basic = service_temp_api_data.get(
'service-template')
if api_data_basic.has_key('fq_name'):
complete_api_data.append(
{'key': 'Template', 'value': str(api_data_basic['fq_name'][1])})
complete_api_data.append(
{'key': 'Name_grid_row', 'value': str(api_data_basic['fq_name'][1])})
if api_data_basic['service_template_properties'].has_key('service_mode'):
complete_api_data.append({'key': 'Mode', 'value': str(
api_data_basic['service_template_properties']['service_mode']).capitalize()})
complete_api_data.append({'key': 'Mode_grid_row', 'value': str(
api_data_basic['service_template_properties']['service_mode']).capitalize()})
if api_data_basic['service_template_properties'].has_key('service_type'):
complete_api_data.append({'key': 'Type', 'value': str(
api_data_basic['service_template_properties']['service_type']).title()})
complete_api_data.append({'key': 'Type_grid_row', 'value': str(
api_data_basic['service_template_properties']['service_type']).title()})
if api_data_basic['service_template_properties'].has_key('service_scaling'):
if api_data_basic['service_template_properties']['service_scaling'] == True:
complete_api_data.append({'key': 'Scaling', 'value': str(
api_data_basic['service_template_properties']['service_scaling']).replace('True', 'Enabled')})
complete_api_data.append({'key': 'Scaling_grid_row', 'value': str(
api_data_basic['service_template_properties']['service_scaling']).replace('True', 'Enabled')})
else:
complete_api_data.append({'key': 'Scaling', 'value': str(
api_data_basic['service_template_properties']['service_scaling']).replace('False', 'Disabled')})
complete_api_data.append({'key': 'Scaling_grid_row', 'value': str(
api_data_basic['service_template_properties']['service_scaling']).replace('False', 'Disabled')})
if api_data_basic['service_template_properties'].has_key('interface_type'):
for interface in range(len(api_data_basic['service_template_properties']['interface_type'])):
if api_data_basic['service_template_properties']['interface_type'][interface]['shared_ip'] == True and api_data_basic['service_template_properties']['interface_type'][interface]['static_route_enable'] == True:
interface_type = api_data_basic['service_template_properties']['interface_type'][
interface]['service_interface_type'].title() + '(' + 'Shared IP' + ', ' + 'Static Route' + ')'
elif api_data_basic['service_template_properties']['interface_type'][interface]['shared_ip'] == False and api_data_basic['service_template_properties']['interface_type'][interface]['static_route_enable'] == True:
interface_type = api_data_basic['service_template_properties']['interface_type'][
interface]['service_interface_type'].title() + '(' + 'Static Route' + ')'
elif api_data_basic['service_template_properties']['interface_type'][interface]['shared_ip'] == True and api_data_basic['service_template_properties']['interface_type'][interface]['static_route_enable'] == False:
interface_type = api_data_basic['service_template_properties']['interface_type'][
interface]['service_interface_type'].title() + '(' + 'Shared IP' + ')'
else:
interface_type = api_data_basic['service_template_properties'][
'interface_type'][interface]['service_interface_type'].title()
interface_list.append(interface_type)
interface_string = ", ".join(interface_list)
complete_api_data.append(
{'key': 'Interface Type', 'value': interface_string})
complete_api_data.append(
{'key': 'Interface_grid_row', 'value': interface_string})
if api_data_basic['service_template_properties'].has_key('image_name'):
complete_api_data.append(
{'key': 'Image', 'value': str(api_data_basic['service_template_properties']['image_name'])})
complete_api_data.append({'key': 'Image_grid_row', 'value': str(
api_data_basic['service_template_properties']['image_name'])})
if api_data_basic.has_key('service_instance_back_refs'):
service_instances = api_data_basic[
'service_instance_back_refs']
si_text = ''
for index, si in enumerate(service_instances):
if index == 0:
si_text = si['to'][1] + ':' + si['to'][2]
else:
si_text = si_text + ', ' + \
si['to'][1] + ':' + si['to'][2]
complete_api_data.append(
{'key': 'Instances', 'value': si_text})
else:
complete_api_data.append(
{'key': 'Instances', 'value': '-'})
if api_data_basic['service_template_properties'].has_key('flavor'):
complete_api_data.append(
{'key': 'Flavor', 'value': str(api_data_basic['service_template_properties']['flavor'])})
complete_api_data.append({'key': 'Flavor_grid_row', 'value': str(
api_data_basic['service_template_properties']['flavor'])})
if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic):
self.logger.info(
"Api service templates details matched in webui")
else:
self.logger.error(
"Api uves service templates details match failed in webui")
result = result and False
return result
# end verify_service_template_api_basic_data_in_webui
def verify_floating_ip_api_data(self):
self.logger.info("Verifying FIP api-data in Webui...")
self.logger.info(self.dash)
result = True
fip_list_api = self.webui_common.get_fip_list_api()
for fips in range(len(fip_list_api['floating-ips'])):
api_fq_id = fip_list_api['floating-ips'][fips]['uuid']
self.webui_common.click_configure_fip()
project_name = fip_list_api.get('floating-ips')[fips].get('fq_name')[1]
self.webui_common.select_project(project_name)
rows = self.webui_common.get_rows()
self.logger.info(
"fip fq_id %s exists in api server..checking if exists in webui as well" % (api_fq_id))
for i in range(len(rows)):
match_flag = 0
j = 0
if rows[i].find_elements_by_tag_name('div')[4].text == api_fq_id:
self.logger.info(
"fip fq_id %s matched in webui..going to match basic view details now" % (api_fq_id))
self.logger.info(self.dash)
match_index = i
match_flag = 1
dom_arry_basic = []
dom_arry_basic.append(
{'key': 'IP Address', 'value': rows[i].find_elements_by_tag_name('div')[1].text})
dom_arry_basic.append(
{'key': 'Instance', 'value': rows[i].find_elements_by_tag_name('div')[2].text})
dom_arry_basic.append(
{'key': 'Floating IP and Pool', 'value': rows[i].find_elements_by_tag_name('div')[3].text})
dom_arry_basic.append(
{'key': 'UUID', 'value': rows[i].find_elements_by_tag_name('div')[4].text})
break
if not match_flag:
self.logger.error(
"fip fq_id exists in apiserver but %s not found in webui..." % (api_fq_id))
self.logger.info(self.dash)
else:
fip_api_data = self.webui_common.get_details(
fip_list_api['floating-ips'][fips]['href'])
complete_api_data = []
if fip_api_data.has_key('floating-ip'):
# creating a list of basic view items retrieved from
# opserver
api_data_basic = fip_api_data.get('floating-ip')
if api_data_basic.get('floating_ip_address'):
complete_api_data.append(
{'key': 'IP Address', 'value': api_data_basic['floating_ip_address']})
if api_data_basic.get('virtual_machine_interface_refs'):
vm_api_data = self.webui_common.get_details(
api_data_basic['virtual_machine_interface_refs'][0]['href'])
if vm_api_data.has_key('virtual-machine-interface'):
if vm_api_data['virtual-machine-interface'].get('virtual_machine_refs'):
complete_api_data.append(
{'key': 'Instance', 'value': vm_api_data['virtual-machine-interface']['virtual_machine_refs'][0]['to']})
else:
complete_api_data.append(
{'key': 'Instance', 'value': '-'})
if api_data_basic.get('fq_name'):
complete_api_data.append(
{'key': 'Floating IP and Pool', 'value': api_data_basic['fq_name'][2] + ':' + api_data_basic['fq_name'][3]})
if api_data_basic.get('fq_name'):
complete_api_data.append(
{'key': 'UUID', 'value': api_data_basic['fq_name'][4]})
if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic):
self.logger.info("api fip data matched in webui")
else:
self.logger.error("api fip data match failed in webui")
result = False
return result
# end verify_floating_ip_api_data_in_webui
def verify_policy_api_data(self):
self.logger.info("Verifying policy details in Webui...")
self.logger.debug(self.dash)
result = True
policy_list_api = self.webui_common.get_policy_list_api()
for policy in range(len(policy_list_api['network-policys']) - 1):
pol_list = []
net_list = []
service_list = []
api_fq_name = policy_list_api[
'network-policys'][policy]['fq_name'][2]
project_name = policy_list_api[
'network-policys'][policy]['fq_name'][1]
self.webui_common.click_configure_policies()
self.webui_common.select_project(project_name)
rows = self.webui_common.get_rows()
self.logger.info(
"Policy fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name))
for i in range(len(rows)):
dom_arry_basic = []
match_flag = 0
detail = 0
if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name:
self.logger.info(
"Policy fq_name %s matched in webui..going to match basic view details..." % (api_fq_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
dom_arry_basic.append(
{'key': 'Policy', 'value': rows[i].find_elements_by_tag_name('div')[2].text})
net_grid_row_value = rows[i].find_elements_by_tag_name('div')[3].text.splitlines()
dom_arry_basic.append({'key':'Associated_Networks_grid_row','value': net_grid_row_value})
dom_arry_basic.append(
{'key': 'Rules_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text.splitlines()})
break
if not match_flag:
self.logger.error(
"Policy fq_name exists in apiserver but %s not found in webui..." % (api_fq_name))
self.logger.debug(self.dash)
else:
self.webui_common.click_configure_policies_basic(match_index)
rows = self.webui_common.get_rows()
self.logger.info(
"Click and retrieve basic view details in webui for policy fq_name %s " % (api_fq_name))
rows_detail = rows[
match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_class_name('row-fluid')
while(detail < len(rows_detail)):
text1 = rows_detail[
detail].find_element_by_tag_name('label').text
if text1 == 'Associated Networks':
dom_arry_basic.append(
{'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span11').text.split()})
elif text1 == 'Rules':
dom_arry_basic.append({'key': str(text1), 'value': rows_detail[
detail].find_element_by_class_name('span11').text.splitlines()})
detail = detail + 2
policy_api_data = self.webui_common.get_details(
policy_list_api['network-policys'][policy]['href'])
complete_api_data = []
if policy_api_data.has_key('network-policy'):
api_data_basic = policy_api_data.get('network-policy')
if api_data_basic.has_key('fq_name'):
complete_api_data.append(
{'key': 'Policy', 'value': api_data_basic['fq_name'][2]})
if api_data_basic.has_key('virtual_network_back_refs'):
for net in range(len(api_data_basic['virtual_network_back_refs'])):
api_project = api_data_basic[
'virtual_network_back_refs'][net]['to'][1]
if project_name == api_project:
fq = api_data_basic[
'virtual_network_back_refs'][net]['to'][2]
else:
fq = ':'.join(
api_data_basic['virtual_network_back_refs'][net]['to'])
net_list.append(fq)
complete_api_data.append(
{'key': 'Associated Networks', 'value': net_list})
net_list_len = len(net_list)
if net_list_len > 2 :
net_list_grid_row = net_list[:2]
more_string = '(' + str(net_list_len-2) + ' more)'
net_list_grid_row.append(more_string)
complete_api_data.append({'key':'Associated_Networks_grid_row', 'value':net_list_grid_row})
else:
complete_api_data.append({'key':'Associated_Networks_grid_row', 'value':net_list})
if api_data_basic.has_key('network_policy_entries'):
for rules in range(len(api_data_basic['network_policy_entries']['policy_rule'])):
dst_ports = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['dst_ports']
src_ports = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['src_ports']
source_port = []
desti_port = []
if dst_ports[0]['start_port'] == -1:
desti_port = 'any'
else:
for item in dst_ports:
if item['start_port'] == item['end_port']:
desti_port.append(item['start_port'])
else:
port_range = str(item['start_port']) + \
'-' + \
str(item['end_port'])
desti_port.append(port_range)
if type(desti_port) is list:
desti_port = str(desti_port)
desti_port = '[ ' + desti_port[1:-1] + ' ]'
if src_ports[0]['start_port'] == -1:
source_port = 'any'
else:
for item in src_ports:
if item['start_port'] == item['end_port']:
source_port.append(item['start_port'])
else:
port_range = str(item['start_port']) + \
'-' + \
str(item['end_port'])
source_port.append(port_range)
if type(source_port) is list:
source_port = str(source_port)
source_port = '[ ' + source_port[1:-1] + ' ]'
api_src_vnet = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['src_addresses'][0]['virtual_network']
api_dst_vnet = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['dst_addresses'][0]['virtual_network']
api_vnet_match_list = [
'default-domain:default-project:default-virtual-network', 'any',
'default-domain:default-project:__link_local__', 'default-domain:default-project:ip-fabric']
if api_src_vnet in api_vnet_match_list:
source_network = api_src_vnet
else:
source_network = api_src_vnet.split(':')[2]
if api_dst_vnet in api_vnet_match_list:
dest_network = api_dst_vnet
else:
dest_network = api_dst_vnet.split(':')[2]
action_list = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['action_list']
protocol = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['protocol']
direction = api_data_basic['network_policy_entries'][
'policy_rule'][rules]['direction']
if action_list.get('apply_service'):
for service in range(len(action_list['apply_service'])):
service_list.append(
action_list['apply_service'][service])
service_string = ",".join(service_list)
policy_text = 'protocol' + ' ' + protocol + ' ' + 'network' + ' ' + source_network + ' ' + 'port' + ' ' + source_port + ' ' + \
direction + ' ' + 'network' + ' ' + dest_network + ' ' + 'port' + \
' ' + desti_port + ' ' + \
'apply_service' + ' ' + service_string
pol_list.append(policy_text)
else:
policy_text = action_list['simple_action'] + ' ' + 'protocol' + ' ' + protocol + ' ' + 'network' + ' ' + source_network + \
' ' + 'port' + ' ' + source_port + ' ' + direction + ' ' + \
'network' + ' ' + dest_network + \
' ' + 'port' + ' ' + desti_port
pol_list.append(policy_text)
complete_api_data.append(
{'key': 'Rules', 'value': pol_list})
if len(pol_list) > 2:
more_count = len(pol_list) - 2
pol_list_grid_row = pol_list[:2]
more_text = '(' + str(more_count) + ' more)'
pol_list_grid_row.append(more_text)
else:
pol_list_grid_row = pol_list
complete_api_data.append(
{'key': 'Rules_grid_row', 'value': pol_list_grid_row})
if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic):
self.logger.info("Api policy details matched in webui")
else:
self.logger.error(
"Api policy details match failed in webui")
result = result and False
return result
# end verify_policy_api_basic_data_in_webui
def verify_ipam_api_data(self):
self.logger.info("Verifying ipam details in Webui...")
self.logger.debug(self.dash)
result = True
ipam_list_api = self.webui_common.get_ipam_list_api()
for ipam in range(len(ipam_list_api['network-ipams'])):
net_list = []
api_fq_name = ipam_list_api['network-ipams'][ipam]['fq_name'][2]
project_name = ipam_list_api['network-ipams'][ipam]['fq_name'][1]
self.webui_common.click_configure_ipam()
self.webui_common.select_project(project_name)
rows = self.webui_common.get_rows()
self.logger.info(
"Ipam fq_name %s exists in api server..checking if exists in webui as well" % (api_fq_name))
for i in range(len(rows)):
match_flag = 0
j = 0
dom_arry_basic = []
if rows[i].find_elements_by_tag_name('div')[2].text == api_fq_name:
self.logger.info(
"Ipam fq_name %s matched in webui..going to match basic view details..." % (api_fq_name))
self.logger.debug(self.dash)
match_index = i
match_flag = 1
ipam_fq_name = rows[
i].find_elements_by_tag_name('div')[2].text
dom_arry_basic.append(
{'key': 'Name_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[2].text})
ip_grid_row_value = ' '.join(
rows[i].find_elements_by_tag_name('div')[3].text.splitlines())
dom_arry_basic.append(
{'key': 'IP_grid_row', 'value': ip_grid_row_value})
dom_arry_basic.append(
{'key': 'DNS_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[4].text})
dom_arry_basic.append(
{'key': 'NTP_grid_row', 'value': rows[i].find_elements_by_tag_name('div')[5].text})
break
if not match_flag:
self.logger.error(
"Ipam fq_name exists in apiserver but %s not found in webui..." % (api_fq_name))
self.logger.debug(self.dash)
else:
self.webui_common.click_configure_ipam_basic(match_index)
rows = self.webui_common.get_rows()
self.logger.info(
"Click and retrieve basic view details in webui for ipam fq_name %s " % (api_fq_name))
rows_detail = rows[
match_index + 1].find_element_by_class_name('slick-row-detail-container').find_element_by_class_name('row-fluid').find_elements_by_class_name('row-fluid')
for detail in range(len(rows_detail)):
text1 = rows_detail[
detail].find_element_by_tag_name('label').text
if text1 == 'IP Blocks':
dom_arry_basic.append(
{'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text})
else:
dom_arry_basic.append(
{'key': str(text1), 'value': rows_detail[detail].find_element_by_class_name('span10').text})
ipam_api_data = self.webui_common.get_details(
ipam_list_api['network-ipams'][ipam]['href'])
complete_api_data = []
if ipam_api_data.has_key('network-ipam'):
api_data_basic = ipam_api_data.get('network-ipam')
if api_data_basic.has_key('fq_name'):
complete_api_data.append(
{'key': 'IPAM Name', 'value': str(api_data_basic['fq_name'][2])})
complete_api_data.append(
{'key': 'Name_grid_row', 'value': str(api_data_basic['fq_name'][2])})
if api_data_basic.get('network_ipam_mgmt'):
if api_data_basic['network_ipam_mgmt'].get('ipam_dns_method'):
if api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'default-dns-server':
complete_api_data.append(
{'key': 'DNS Server', 'value': '-'})
complete_api_data.append(
{'key': 'DNS_grid_row', 'value': '-'})
elif api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'none':
complete_api_data.append(
{'key': 'DNS Server', 'value': 'DNS Mode : None'})
complete_api_data.append(
{'key': 'DNS_grid_row', 'value': 'DNS Mode : None'})
elif api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'virtual-dns-server':
complete_api_data.append(
{'key': 'DNS Server', 'value': 'Virtual DNS:' + ' ' + api_data_basic['network_ipam_mgmt']['ipam_dns_server']['virtual_dns_server_name']})
complete_api_data.append({'key': 'DNS_grid_row', 'value': 'Virtual DNS:' + ' ' +
api_data_basic['network_ipam_mgmt']['ipam_dns_server']['virtual_dns_server_name']})
elif api_data_basic['network_ipam_mgmt']['ipam_dns_method'] == 'tenant-dns-server':
dns_server_value = str(api_data_basic['network_ipam_mgmt']['ipam_dns_method']).split(
'-')[0].title() + ' ' + 'Managed' + ' ' + 'DNS' + ':' + ' ' + str(api_data_basic['network_ipam_mgmt']['ipam_dns_server']['tenant_dns_server_address']['ip_address'][0])
complete_api_data.append(
{'key': 'DNS Server', 'value': dns_server_value})
complete_api_data.append(
{'key': 'DNS_grid_row', 'value': dns_server_value})
else:
complete_api_data.append(
{'key': 'DNS Server', 'value': '-'})
complete_api_data.append(
{'key': 'DNS_grid_row', 'value': '-'})
if api_data_basic.get('network_ipam_mgmt'):
if api_data_basic['network_ipam_mgmt'].get('dhcp_option_list'):
if api_data_basic['network_ipam_mgmt']['dhcp_option_list'].get('dhcp_option'):
if len(api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option']) > 1:
ntp_server_value = str(api_data_basic['network_ipam_mgmt']['dhcp_option_list'][
'dhcp_option'][0]['dhcp_option_value'])
complete_api_data.append({'key': 'Domain Name', 'value': str(
api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][1]['dhcp_option_value'])})
complete_api_data.append(
{'key': 'NTP Server', 'value': ntp_server_value})
complete_api_data.append(
{'key': 'NTP_grid_row', 'value': ntp_server_value})
elif api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][0]['dhcp_option_name'] == '4':
ntp_server_value = str(api_data_basic['network_ipam_mgmt']['dhcp_option_list'][
'dhcp_option'][0]['dhcp_option_value'])
complete_api_data.append(
{'key': 'NTP Server', 'value': ntp_server_value})
complete_api_data.append(
{'key': 'NTP_grid_row', 'value': ntp_server_value})
elif api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][0]['dhcp_option_name'] == '15':
complete_api_data.append({'key': 'Domain Name', 'value': str(
api_data_basic['network_ipam_mgmt']['dhcp_option_list']['dhcp_option'][0]['dhcp_option_value'])})
else:
complete_api_data.append(
{'key': 'NTP Server', 'value': '-'})
complete_api_data.append(
{'key': 'NTP_grid_row', 'value': '-'})
complete_api_data.append(
{'key': 'Domain Name', 'value': '-'})
else:
complete_api_data.append(
{'key': 'NTP Server', 'value': '-'})
complete_api_data.append(
{'key': 'NTP_grid_row', 'value': '-'})
complete_api_data.append(
{'key': 'Domain Name', 'value': '-'})
if api_data_basic.has_key('virtual_network_back_refs'):
for net in range(len(api_data_basic['virtual_network_back_refs'])):
for ip_sub in range(len(api_data_basic['virtual_network_back_refs'][net]['attr']['ipam_subnets'])):
api_project = api_data_basic[
'virtual_network_back_refs'][net]['to'][1]
if project_name == api_project:
fq = str(
api_data_basic['virtual_network_back_refs'][net]['to'][2])
else:
fq = ':'.join(
api_data_basic['virtual_network_back_refs'][net]['to'])
ip_prefix = str(api_data_basic['virtual_network_back_refs'][net][
'attr']['ipam_subnets'][ip_sub]['subnet']['ip_prefix'])
ip_prefix_len = str(api_data_basic['virtual_network_back_refs'][net]['attr'][
'ipam_subnets'][ip_sub]['subnet']['ip_prefix_len'])
default_gateway = str(api_data_basic['virtual_network_back_refs'][net][
'attr']['ipam_subnets'][ip_sub]['default_gateway'])
net_list.append(
fq + ' - ' + ip_prefix + '/' + ip_prefix_len + '(' + default_gateway + ')')
net_string = ' '.join(net_list)
complete_api_data.append(
{'key': 'IP Blocks', 'value': net_string})
if len(net_list) > 2:
net_string_grid_row = ' '.join(
net_list[:2]) + ' (' + str(len(net_list) - 2) + ' more )'
else:
net_string_grid_row = net_string
complete_api_data.append(
{'key': 'IP_grid_row', 'value': net_string_grid_row})
if self.webui_common.match_ops_with_webui(complete_api_data, dom_arry_basic):
self.logger.info(
"Api uves ipam basic view data matched in webui")
else:
self.logger.error(
"Api uves ipam basic view data match failed in webui")
result = result and False
return result
# end verify_ipam_api_data_in_webui
def verify_vm_ops_data_in_webui(self, fixture):
self.logger.info("Verifying VN %s ops-data in Webui..." %
(fixture.vn_name))
vm_list = self.webui_common.get_vm_list_ops()
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.webui_common.get_rows()
if len(rows) != len(vm_list):
self.logger.error(" VM count in webui and opserver not matched ")
else:
self.logger.info(" VM count in webui and opserver matched")
for i in range(len(vm_list)):
vm_name = vm_list[i]['name']
# end verify_vm_ops_data_in_webui
def verify_vn_ops_data_in_webui(self, fixture):
vn_list = self.webui_common.get_vn_list_ops(fixture)
self.logger.info(
"VN details for %s got from ops server and going to match in webui : " % (vn_list))
if not self.webui_common.click_configure_networks():
result = result and False
rows = self.webui_common.get_rows()
#rows = self.browser.find_element_by_id('gridVN').find_element_by_tag_name('tbody').find_elements_by_tag_name('tr')
ln = len(vn_list)
for i in range(ln):
vn_name = vn_list[i]['name']
details = self.webui_common.get_vn_details(vn_list[i]['href'])
UveVirtualNetworkConfig
if details.has_key('UveVirtualNetwokConfig'):
total_acl_rules_ops
if details.has_key('UveVirtualNetworkAgent'):
UveVirtualNetworkAgent_dict = details['UveVirtualNetworkAgent']
egress_flow_count_api = details[
'UveVirtualNetworkAgent']['egress_flow_count']
ingress_flow_count_api = details[
'UveVirtualNetworkAgent']['ingress_flow_count']
interface_list_count_api = len(
details['UveVirtualNetworkAgent']['interface_list_count'])
total_acl_rules_count = details[
'UveVirtualNetworkAgent']['total_acl_rules']
if self.webui_common.check_element_exists_by_xpath(row[j + 1], "//label[contains(text(), 'Ingress Flows')]"):
for n in range(floating_ip_length_api):
fip_api = details[
'virtual-network']['floating_ip_pools'][n]['to']
if fip_ui[n] == fip_api[3] + ' (' + fip_api[0] + ':' + fip_api[1] + ')':
self.logger.info(" Fip matched ")
if not self.webui_common.click_monitor_networks():
result = result and False
for j in range(len(rows)):
rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
fq_name = rows[j].find_elements_by_tag_name('a')[1].text
if(fq_name == vn_list[i]['name']):
self.logger.info(" %s VN verified in monitor page " %
(fq_name))
rows[j].find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('a').click()
rows = self.webui_common.get_rows()
expanded_row = rows[
j + 1].find_element_by_class_name('inline row-fluid position-relative pull-right margin-0-5')
expanded_row.find_element_by_class_name(
'icon-cog icon-only bigger-110').click()
expanded_row.find_elements_by_tag_name('a')[1].click()
basicdetails_ui_data = rows[
j + 1].find_element_by_xpath("//*[contains(@id, 'basicDetails')]").find_elements_by_class_name("row-fluid")
ingress_ui = basicdetails_ui_data[0].text.split('\n')[1]
egress_ui = basicdetails_ui_data[1].text.split('\n')[1]
acl_ui = basicdetails_ui_data[2].text.split('\n')[1]
intf_ui = basicdetails_ui_data[3].text.split('\n')[1]
vrf_ui = basicdetails_ui_data[4].text.split('\n')[1]
break
else:
self.logger.error(" %s VN not found in monitor page " %
(fq_name))
details = self.webui_common.get_vn_details_api(vn_list[i]['href'])
j = 0
for j in range(len(rows)):
if not self.webui_common.click_monitor_networks():
result = result and False
rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
if (rows[j].find_elements_by_tag_name('td')[2].get_attribute('innerHTML') == details['virtual-network']['fq_name'][2]):
if rows[j].find_elements_by_tag_name('td')[4].text == ip_block:
self.logger.info("Ip blocks verified ")
rows[j].find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('a').click()
rows = self.webui_common.get_rows()
ui_ip_block = rows[
j + 1].find_element_by_class_name('span11').text.split('\n')[1]
if (ui_ip_block.split(' ')[0] == ':'.join(details['virtual-network']['network_ipam_refs'][0]['to']) and ui_ip_block.split(' ')[1] == ip_block and ui_ip_block.split(' ')[2] == details['virtual-network']['network_ipam_refs'][0]['attr']['ipam_subnets'][0]['default_gateway']):
self.logger.info(
"Ip block and details matched in webui advance view details ")
else:
self.logger.error("Ip block not matched")
forwarding_mode = rows[
j + 1].find_elements_by_class_name('span2')[0].text.split('\n')[1]
vxlan = rows[
j + 1].find_elements_by_class_name('span2')[1].text.split('\n')[1]
network_dict = {'l2_l3': 'L2 and L3'}
if network_dict[details['virtual-network']['virtual_network_properties']['forwarding_mode']] == forwarding_mode:
self.logger.info(" Forwarding mode matched ")
else:
self.logger.error("Forwarding mode not matched ")
if details['virtual-network']['virtual_network_properties']['vxlan_network_identifier'] == None:
vxlan_api = 'Automatic'
else:
vxlan_api = details[
'virtual-network']['virtual_network_properties']['vxlan_network_identifier']
if vxlan_api == vxlan:
self.logger.info(" Vxlan matched ")
else:
self.logger.info(" Vxlan not matched ")
rows[j].find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('a').click()
break
elif (j == range(len(rows))):
self.logger.info(
"Vn name %s : %s is not matched in webui " %
(fixture.vn_name, details['virtual-network']['fq_name'][2]))
# end verify_vn_ops_data_in_webui
def verify_vn_in_webui(self, fixture):
self.browser.get_screenshot_as_file('vm_verify.png')
if not self.webui_common.click_configure_networks():
result = result and False
time.sleep(2)
rows = self.webui_common.get_rows()
ln = len(rows)
vn_flag = 0
for i in range(len(rows)):
if (rows[i].find_elements_by_tag_name('div')[2].get_attribute('innerHTML') == fixture.vn_name and rows[i].find_elements_by_tag_name(
'div')[4].text == fixture.vn_subnets[0]):
vn_flag = 1
rows[i].find_elements_by_tag_name(
'div')[0].find_element_by_tag_name('i').click()
rows = self.webui_common.get_rows()
ip_blocks = rows[
i + 1].find_element_by_class_name('span11').text.split('\n')[1]
if (ip_blocks.split(' ')[0] == ':'.join(fixture.ipam_fq_name) and ip_blocks.split(' ')[1] == fixture.vn_subnets[0]):
self.logger.info(
"Vn name %s and ip block %s verified in configure page " %
(fixture.vn_name, fixture.vn_subnets))
else:
self.logger.error(
"Ip block details failed to verify in configure page %s " % (fixture.vn_subnets))
self.browser.get_screenshot_as_file(
'Verify_vn_configure_page_ip_block.png')
vn_flag = 0
break
if not self.webui_common.click_monitor_networks():
result = result and False
time.sleep(3)
rows = self.webui_common.get_rows()
vn_entry_flag = 0
for i in range(len(rows)):
fq_name = rows[i].find_elements_by_tag_name(
'div')[1].find_element_by_tag_name('div').text
if(fq_name == fixture.ipam_fq_name[0] + ":" + fixture.project_name + ":" + fixture.vn_name):
self.logger.info(" %s VN verified in monitor page " %
(fq_name))
vn_entry_flag = 1
break
if not vn_entry_flag:
self.logger.error("VN %s Verification failed in monitor page" %
(fixture.vn_name))
self.browser.get_screenshot_as_file('verify_vn_monitor_page.png')
if vn_entry_flag:
self.logger.info(
" VN %s and subnet verified in webui config and monitor pages" %
(fixture.vn_name))
# if self.webui_common.verify_uuid_table(fixture.vn_id):
# self.logger.info( "VN %s UUID verified in webui table " %(fixture.vn_name))
# else:
# self.logger.error( "VN %s UUID Verification failed in webui table " %(fixture.vn_name))
# self.browser.get_screenshot_as_file('verify_vn_configure_page_ip_block.png')
fixture.obj = fixture.quantum_fixture.get_vn_obj_if_present(
fixture.vn_name, fixture.project_id)
fq_type = 'virtual_network'
full_fq_name = fixture.vn_fq_name + ':' + fixture.vn_id
# if self.webui_common.verify_fq_name_table(full_fq_name, fq_type):
# self.logger.info( "fq_name %s found in fq Table for %s VN" %(fixture.vn_fq_name,fixture.vn_name))
# else:
# self.logger.error( "fq_name %s failed in fq Table for %s VN" %(fixture.vn_fq_name,fixture.vn_name))
# self.browser.get_screenshot_as_file('setting_page_configure_fq_name_error.png')
return True
# end verify_vn_in_webui
def vn_delete_in_webui(self, fixture):
result = True
self.browser.get_screenshot_as_file('vm_delete.png')
if not self.webui_common.click_configure_networks():
result = result and False
rows = self.webui_common.get_rows()
ln = len(rows)
for net in rows:
if (net.find_elements_by_tag_name('div')[2].text == fixture.vn_name):
net.find_elements_by_tag_name(
'div')[1].find_element_by_tag_name('input').click()
break
self.browser.find_element_by_id('btnDeleteVN').click()
self.webui_common.wait_till_ajax_done(self.browser)
time.sleep(2)
self.browser.find_element_by_id('btnCnfRemoveMainPopupOK').click()
self.logger.info("%s is deleted successfully using webui" %
(fixture.vn_name))
# end vn_delete_in_webui
def ipam_delete_in_webui(self, fixture):
if not self.webui_common.click_configure_ipam():
result = result and False
rows = self.webui_common.get_rows()
for ipam in range(len(rows)):
tdArry = rows[ipam].find_elements_by_class_name('slick-cell')
if (len(tdArry) > 2):
if (tdArry[2].text == fixture.name):
tdArry[1].find_element_by_tag_name('input').click()
self.browser.find_element_by_id(
'btnDeleteIpam').find_element_by_tag_name('i').click()
self.browser.find_element_by_id(
'btnCnfRemoveMainPopupOK').click()
if not self.webui_common.check_error_msg("Delete ipam"):
raise Exception("Ipam deletion failed")
break
self.webui_common.wait_till_ajax_done(self.browser)
self.logger.info(
"%s is deleted successfully using webui" % (name))
break
# end ipam_delete_in_webui
def service_template_delete_in_webui(self, fixture):
if not self.webui_common.click_configure_service_template():
result = result and False
rows = self.webui_common.get_rows()
for temp in range(len(rows)):
tdArry = rows[temp].find_elements_by_class_name('slick-cell')
if (len(tdArry) > 2):
if (tdArry[2].text == fixture.st_name):
tdArry[1].find_element_by_tag_name('input').click()
self.browser.find_element_by_id(
'btnDeletesvcTemplate').find_element_by_tag_name('i').click()
self.browser.find_element_by_id('btnCnfDelPopupOK').click()
if not self.webui_common.check_error_msg("Delete service template"):
raise Exception("Service template deletion failed")
break
self.webui_common.wait_till_ajax_done(self.browser)
self.logger.info("%s is deleted successfully using webui" %
(fixture.st_name))
break
# end service_template_delete_in_webui
def service_instance_delete_in_webui(self, fixture):
if not self.webui_common.click_configure_service_instance():
result = result and False
rows = self.webui_common.get_rows()
for inst in range(len(rows)):
tdArry = rows[inst].find_elements_by_class_name('slick-cell')
if (len(tdArry) > 2):
if (tdArry[2].text == fixture.si_name):
tdArry[1].find_element_by_tag_name('input').click()
self.browser.find_element_by_id(
'btnDeletesvcInstances').find_element_by_tag_name('i').click()
self.browser.find_element_by_id(
'btnCnfDelSInstPopupOK').click()
if not self.webui_common.check_error_msg("Delete service instance"):
raise Exception("Service instance deletion failed")
break
self.webui_common.wait_till_ajax_done(self.browser)
self.logger.info("%s is deleted successfully using webui" %
(fixture.si_name))
break
# end service_instance_delete_in_webui
def dns_server_delete(self, name):
if not self.webui_common.click_configure_dns_server():
result = result and False
rows = self.webui_common.get_rows()
for server in range(len(rows)):
tdArry = rows[server].find_elements_by_class_name('slick-cell')
if (len(tdArry) > 2):
if (tdArry[2].text == name):
tdArry[1].find_element_by_tag_name('input').click()
self.browser.find_element_by_id(
'btnDeleteDNSServer').click()
self.browser.find_element_by_id('btnCnfDelPopupOK').click()
if not self.webui_common.check_error_msg("Delete dns server"):
raise Exception("Dns server deletion failed")
break
self.webui_common.wait_till_ajax_done(self.browser)
self.logger.info(
"%s is deleted successfully using webui" % (name))
break
# end dns_server_delete_in_webui
def dns_record_delete(self, name):
if not self.webui_common.click_configure_dns_record():
result = result and False
rows = self.webui_common.get_rows()
for record in range(len(rows)):
tdArry = rows[record].find_elements_by_class_name('slick-cell')
if (len(tdArry) > 2):
if (tdArry[2].text == name):
tdArry[1].find_element_by_tag_name('input').click()
self.browser.find_element_by_id(
'btnDeleteDNSRecord').click()
self.browser.find_element_by_id(
'btnCnfDelMainPopupOK').click()
if not self.webui_common.check_error_msg("Delete dns record"):
raise Exception("Dns record deletion failed")
break
self.webui_common.wait_till_ajax_done(self.browser)
self.logger.info(
"%s is deleted successfully using webui" % (name))
break
# end dns_record_delete_in_webui
def create_vm_in_openstack(self, fixture):
try:
if not self.proj_check_flag:
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_link_text('Project')).click()
time.sleep(3)
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_css_selector('h4')).click()
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_id('tenant_list')).click()
current_project = WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_css_selector('h3')).text
if not current_project == fixture.project_name:
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_css_selector('h3')).click()
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_link_text(fixture.project_name)).click()
self.webui_common.wait_till_ajax_done(
self.browser_openstack)
self.proj_check_flag = 1
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_link_text('Project')).click()
self.webui_common.wait_till_ajax_done(self.browser_openstack)
instance = WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_link_text('Instances')).click()
self.webui_common.wait_till_ajax_done(self.browser_openstack)
fixture.image_name = 'ubuntu'
fixture.nova_fixture.get_image(image_name=fixture.image_name)
time.sleep(2)
launch_instance = WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_link_text('Launch Instance')).click()
self.webui_common.wait_till_ajax_done(self.browser_openstack)
self.logger.debug('Creating instance name %s with image name %s using openstack'
% (fixture.vm_name, fixture.image_name))
self.logger.info('Creating instance name %s with image name %s using openstack'
% (fixture.vm_name, fixture.image_name))
time.sleep(3)
self.browser_openstack.find_element_by_xpath(
"//select[@name='source_type']/option[contains(text(), 'image') or contains(text(),'Image')]").click()
self.webui_common.wait_till_ajax_done(self.browser_openstack)
self.browser_openstack.find_element_by_xpath(
"//select[@name='image_id']/option[contains(text(), '" + fixture.image_name + "')]").click()
WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_id(
'id_name')).send_keys(fixture.vm_name)
self.browser_openstack.find_element_by_xpath(
"//select[@name='flavor']/option[text()='m1.small']").click()
WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_xpath(
"//input[@value='Launch']")).click()
networks = WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_id
('available_network')).find_elements_by_tag_name('li')
for net in networks:
vn_match = net.text.split('(')[0]
if (vn_match == fixture.vn_name):
net.find_element_by_class_name('btn').click()
break
WebDriverWait(self.browser_openstack, self.delay).until(lambda a: a.find_element_by_xpath(
"//input[@value='Launch']")).click()
self.webui_common.wait_till_ajax_done(self.browser_openstack)
self.logger.debug('VM %s launched using openstack' %
(fixture.vm_name))
self.logger.info('Waiting for VM %s to come into active state' %
(fixture.vm_name))
time.sleep(10)
rows_os = self.browser_openstack.find_element_by_tag_name('form').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
for i in range(len(rows_os)):
rows_os = self.browser_openstack.find_element_by_tag_name(
'form')
rows_os = WebDriverWait(rows_os, self.delay).until(
lambda a: a.find_element_by_tag_name('tbody'))
rows_os = WebDriverWait(rows_os, self.delay).until(
lambda a: a.find_elements_by_tag_name('tr'))
if(rows_os[i].find_elements_by_tag_name('td')[1].text == fixture.vm_name):
counter = 0
vm_active = False
while not vm_active:
vm_active_status1 = self.browser_openstack.find_element_by_tag_name('form').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')[i].find_elements_by_tag_name(
'td')[6].text
vm_active_status2 = self.browser_openstack.find_element_by_tag_name('form').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')[i].find_elements_by_tag_name('td')[5].text
if(vm_active_status1 == 'Active' or vm_active_status2 == 'Active'):
self.logger.info(
"%s status changed to Active in openstack" % (fixture.vm_name))
vm_active = True
time.sleep(5)
elif(vm_active_status1 == 'Error' or vm_active_status2 == 'Error'):
self.logger.error(
"%s state went into Error state in openstack" % (fixture.vm_name))
self.browser_openstack.get_screenshot_as_file(
'verify_vm_state_openstack_' + 'fixture.vm_name' + '.png')
return "Error"
else:
self.logger.info(
"%s state is not yet Active in openstack, waiting for more time..." % (fixture.vm_name))
counter = counter + 1
time.sleep(3)
self.browser_openstack.find_element_by_link_text(
'Instances').click()
self.webui_common.wait_till_ajax_done(
self.browser_openstack)
time.sleep(3)
if(counter >= 100):
fixuture.logger.error(
"VM %s failed to come into active state" % (fixture.vm_name))
self.browser_openstack.get_screenshot_as_file(
'verify_vm_not_active_openstack_' + 'fixture.vm_name' + '.png')
break
fixture.vm_obj = fixture.nova_fixture.get_vm_if_present(
fixture.vm_name, fixture.project_fixture.uuid)
fixture.vm_objs = fixture.nova_fixture.get_vm_list(
name_pattern=fixture.vm_name, project_id=fixture.project_fixture.uuid)
except ValueError:
self.logger.error('Error while creating VM %s with image name %s failed in openstack'
% (fixture.vm_name, fixture.image_name))
self.browser_openstack.get_screenshot_as_file(
'verify_vm_error_openstack_' + 'fixture.vm_name' + '.png')
# end create_vm_in_openstack
def vm_delete_in_openstack(self, fixture):
rows = self.browser_openstack.find_element_by_id('instances').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
for instance in rows:
if fixture.vm_name == instance.find_element_by_tag_name('a').text:
instance.find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('input').click()
break
ln = len(rows)
launch_instance = WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_id('instances__action_terminate')).click()
WebDriverWait(self.browser_openstack, self.delay).until(
lambda a: a.find_element_by_link_text('Terminate Instances')).click()
time.sleep(5)
self.logger.info("VM %s deleted successfully using openstack" %
(fixture.vm_name))
# end vm_delete_in_openstack
def verify_vm_in_webui(self, fixture):
result = True
try:
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.webui_common.get_rows()
ln = len(rows)
vm_flag = 0
for i in range(len(rows)):
rows_count = len(rows)
vm_name = rows[i].find_elements_by_class_name(
'slick-cell')[1].text
vm_uuid = rows[i].find_elements_by_class_name(
'slick-cell')[2].text
vm_vn = rows[i].find_elements_by_class_name(
'slick-cell')[3].text.split(' ')[0]
if(vm_name == fixture.vm_name and fixture.vm_obj.id == vm_uuid and fixture.vn_name == vm_vn):
self.logger.info(
"VM %s vm exists..will verify row expansion basic details" % (fixture.vm_name))
retry_count = 0
while True:
self.logger.debug("Count is" + str(retry_count))
if retry_count > 20:
self.logger.error('Vm details failed to load')
break
self.browser.find_element_by_xpath(
"//*[@id='mon_net_instances']").find_element_by_tag_name('a').click()
time.sleep(1)
rows = self.webui_common.get_rows()
rows[i].find_elements_by_tag_name(
'div')[0].find_element_by_tag_name('i').click()
try:
retry_count = retry_count + 1
rows = self.webui_common.get_rows()
rows[
i + 1].find_elements_by_class_name('row-fluid')[0].click()
self.webui_common.wait_till_ajax_done(self.browser)
break
except WebDriverException:
pass
rows = self.webui_common.get_rows()
row_details = rows[
i + 1].find_element_by_xpath("//*[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid')[5]
vm_status = row_details.find_elements_by_tag_name(
'div')[8].text
vm_ip_and_mac = row_details.find_elements_by_tag_name(
'div')[2].text
assert vm_status == 'Active'
assert vm_ip_and_mac.splitlines()[0] == fixture.vm_ip
vm_flag = 1
break
assert vm_flag, "VM name or VM uuid or VM ip or VM status verifications in WebUI for VM %s failed" % (
fixture.vm_name)
self.browser.get_screenshot_as_file('vm_create_check.png')
self.logger.info(
"Vm name,vm uuid,vm ip and vm status,vm network verification in WebUI for VM %s passed" %
(fixture.vm_name))
mon_net_networks = WebDriverWait(self.browser, self.delay).until(lambda a: a.find_element_by_id(
'mon_net_networks')).find_element_by_link_text('Networks').click()
time.sleep(4)
self.webui_common.wait_till_ajax_done(self.browser)
rows = self.webui_common.get_rows()
for i in range(len(rows)):
if(rows[i].find_elements_by_class_name('slick-cell')[1].text == fixture.vn_fq_name.split(':')[0] + ":" + fixture.project_name + ":" + fixture.vn_name):
rows[i].find_elements_by_tag_name(
'div')[0].find_element_by_tag_name('i').click()
time.sleep(2)
self.webui_common.wait_till_ajax_done(self.browser)
rows = self.webui_common.get_rows()
vm_ids = rows[
i + 1].find_element_by_xpath("//div[contains(@id, 'basicDetails')]").find_elements_by_class_name('row-fluid')[5].find_elements_by_tag_name('div')[1].text
if fixture.vm_id in vm_ids:
self.logger.info(
"Vm_id matched in webui monitor network basic details page %s" % (fixture.vn_name))
else:
self.logger.error(
"Vm_id not matched in webui monitor network basic details page %s" % (fixture.vm_name))
self.browser.get_screenshot_as_file(
'monitor_page_vm_id_not_match' + fixture.vm_name + fixture.vm_id + '.png')
result = result and False
break
# if self.webui_common.verify_uuid_table(fixture.vm_id):
# self.logger.info( "UUID %s found in UUID Table for %s VM" %(fixture.vm_name,fixture.vm_id))
# else:
# self.logger.error( "UUID %s failed in UUID Table for %s VM" %(fixture.vm_name,fixture.vm_id))
# fq_type='virtual_machine'
# full_fq_name=fixture.vm_id+":"+fixture.vm_id
# if self.webui_common.verify_fq_name_table(full_fq_name,fq_type):
# self.logger.info( "fq_name %s found in fq Table for %s VM" %(fixture.vm_id,fixture.vm_name))
# else:
# self.logger.error( "fq_name %s failed in fq Table for %s VM" %(fixture.vm_id,fixture.vm_name))
self.logger.info("VM verification in WebUI %s passed" %
(fixture.vm_name))
return result
except ValueError:
self.logger.error("vm %s test error " % (fixture.vm_name))
self.browser.get_screenshot_as_file(
'verify_vm_test_openstack_error' + 'fixture.vm_name' + '.png')
# end verify_vm_in_webui
def create_floatingip_pool_webui(self, fixture, pool_name, vn_name):
try:
if not self.webui_common.click_configure_networks():
result = result and False
self.webui_common.select_project(fixture.project_name)
rows = self.webui_common.get_rows()
self.logger.info("Creating floating ip pool %s using webui" %
(pool_name))
for net in rows:
if (net.find_elements_by_class_name('slick-cell')[2].get_attribute('innerHTML') == fixture.vn_name):
net.find_element_by_class_name('icon-cog').click()
self.webui_common.wait_till_ajax_done(self.browser)
time.sleep(3)
self.browser.find_element_by_class_name(
'tooltip-success').find_element_by_tag_name('i').click()
ip_text = net.find_element_by_xpath(
"//span[contains(text(), 'Floating IP Pools')]")
ip_text.find_element_by_xpath(
'..').find_element_by_tag_name('i').click()
route = self.browser.find_element_by_xpath(
"//div[@title='Add Floating IP Pool below']")
route.find_element_by_class_name('icon-plus').click()
self.webui_common.wait_till_ajax_done(self.browser)
self.browser.find_element_by_xpath(
"//input[@placeholder='Pool Name']").send_keys(fixture.pool_name)
self.browser.find_element_by_id(
'fipTuples').find_elements_by_tag_name('input')[1].click()
project_elements = self.browser.find_elements_by_xpath(
"//*[@class = 'select2-match']/..")
self._click_if_element_found(
fixture.project_name, project_elements)
self.webui_common.wait_till_ajax_done(self.browser)
self.browser.find_element_by_xpath(
"//button[@id = 'btnCreateVNOK']").click()
self.webui_common.wait_till_ajax_done(self.browser)
time.sleep(2)
if not self.webui_common.check_error_msg("Creating fip pool"):
raise Exception("Create fip pool failed")
self.logger.info("Fip pool %s created using webui" %
(fixture.pool_name))
break
except ValueError:
self.logger.error("Fip %s Error while creating floating ip pool " %
(fixture.pool_name))
# end create_floatingip_pool_webui
def create_and_assoc_fip_webui(self, fixture, fip_pool_vn_id, vm_id, vm_name, project=None):
try:
fixture.vm_name = vm_name
fixture.vm_id = vm_id
if not self.webui_common.click_configure_networks():
result = result and False
rows = self.webui_common.get_rows()
self.logger.info("Creating and associating fip %s using webui" %
(fip_pool_vn_id))
for net in rows:
if (net.find_elements_by_class_name('slick-cell')[2].get_attribute('innerHTML') == fixture.vn_name):
self.browser.find_element_by_xpath(
"//*[@id='config_net_fip']/a").click()
self.browser.get_screenshot_as_file('fip.png')
time.sleep(3)
self.browser.find_element_by_id('btnCreatefip').click()
self.webui_common.wait_till_ajax_done(self.browser)
time.sleep(1)
pool = self.browser.find_element_by_xpath("//div[@id='s2id_ddFipPool']").find_element_by_tag_name(
'a').click()
time.sleep(2)
self.webui_common.wait_till_ajax_done(self.browser)
fip = self.browser.find_element_by_id(
"select2-drop").find_elements_by_tag_name('li')
for i in range(len(fip)):
if fip[i].find_element_by_tag_name('div').get_attribute("innerHTML") == fixture.project_name + ':' + fixture.vn_name + ':' + fixture.pool_name:
fip[i].click()
self.browser.find_element_by_id('btnCreatefipOK').click()
if not self.webui_common.check_error_msg("Creating Fip"):
raise Exception("Create fip failed")
self.webui_common.wait_till_ajax_done(self.browser)
rows1 = self.webui_common.get_rows()
for element in rows1:
if element.find_elements_by_class_name('slick-cell')[3].get_attribute('innerHTML') == fixture.vn_name + ':' + fixture.pool_name:
element.find_element_by_class_name(
'icon-cog').click()
self.webui_common.wait_till_ajax_done(self.browser)
element.find_element_by_xpath(
"//a[@class='tooltip-success']").click()
self.webui_common.wait_till_ajax_done(self.browser)
break
pool = self.browser.find_element_by_xpath(
"//div[@id='s2id_ddAssociate']").find_element_by_tag_name('a').click()
time.sleep(1)
self.webui_common.wait_till_ajax_done(self.browser)
fip = self.browser.find_element_by_id(
"select2-drop").find_elements_by_tag_name('li')
for i in range(len(fip)):
if fip[i].find_element_by_tag_name('div').get_attribute("innerHTML").split(' ')[1] == vm_id:
fip[i].click()
self.browser.find_element_by_id(
'btnAssociatePopupOK').click()
self.webui_common.wait_till_ajax_done(self.browser)
if not self.webui_common.check_error_msg("Fip Associate"):
raise Exception("Fip association failed")
time.sleep(1)
break
except ValueError:
self.logger.info(
"Error while creating floating ip and associating it.")
# end create_and_assoc_fip_webui
def verify_fip_in_webui(self, fixture):
if not self.webui_common.click_configure_networks():
result = result and False
rows = WebDriverWait(self.browser, self.delay).until(lambda a: a.find_element_by_id(
'gridVN')).find_element_by_tag_name('tbody').find_elements_by_tag_name('tr')
for i in range(len(rows)):
vn_name = rows[i].find_elements_by_tag_name('td')[2].text
if vn_name == fixture.vn_name:
rows[i].find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('a').click()
rows = self.webui_common.get_rows()
fip_check = rows[
i + 1].find_elements_by_xpath("//td/div/div/div")[1].text
if fip_check.split('\n')[1].split(' ')[0] == fixture.pool_name:
self.logger.info(
"Fip pool %s verified in WebUI configure network page" % (fixture.pool_name))
break
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_xpath("//*[@id='config_net_fip']/a")).click()
self.webui_common.wait_till_ajax_done(self.browser)
rows = self.browser.find_element_by_xpath(
"//div[@id='gridfip']/table/tbody").find_elements_by_tag_name('tr')
for i in range(len(rows)):
fip = rows[i].find_elements_by_tag_name('td')[3].text.split(':')[1]
vn = rows[i].find_elements_by_tag_name('td')[3].text.split(':')[0]
fip_ip = rows[i].find_elements_by_class_name('slick-cell')[1].text
if rows[i].find_elements_by_tag_name('td')[2].text == fixture.vm_id:
if vn == fixture.vn_name and fip == fixture.pool_name:
self.logger.info("Fip is found attached with vm %s " %
(fixture.vm_name))
self.logger.info("VM %s is found associated with FIP %s " %
(fixture.vm_name, fip))
else:
self.logger.info(
"Association of %s VM failed with FIP %s " %
(fixture.vm_name, fip))
break
if not self.webui_common.click_monitor_instances():
result = result and False
rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
ln = len(rows)
vm_flag = 0
for i in range(len(rows)):
vm_name = rows[i].find_elements_by_tag_name(
'td')[1].find_element_by_tag_name('div').text
vm_uuid = rows[i].find_elements_by_tag_name('td')[2].text
vm_vn = rows[i].find_elements_by_tag_name(
'td')[3].text.split(' ')[0]
if(vm_name == fixture.vm_name and fixture.vm_id == vm_uuid and vm_vn == fixture.vn_name):
rows[i].find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('a').click()
self.webui_common.wait_till_ajax_done(self.browser)
rows = self.browser.find_element_by_class_name('k-grid-content').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
fip_check_vm = rows[i + 1].find_element_by_xpath("//*[contains(@id, 'basicDetails')]"
).find_elements_by_tag_name('div')[0].find_elements_by_tag_name('div')[1].text
if fip_check_vm.split(' ')[0] == fip_ip and fip_check_vm.split(
' ')[1] == '\(' + 'default-domain' + ':' + fixture.project_name + ':' + fixture.vn_name + '\)':
self.logger.info(
"FIP verified in monitor instance page for vm %s " % (fixture.vm_name))
else:
self.logger.info(
"FIP failed to verify in monitor instance page for vm %s" % (fixture.vm_name))
break
# end verify_fip_in_webui
def delete_fip_in_webui(self, fixture):
if not self.webui_common.click_configure_fip():
result = result and False
rows = self.browser.find_element_by_id('gridfip').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
for net in rows:
if (net.find_elements_by_tag_name('td')[2].get_attribute('innerHTML') == fixture.vm_id):
net.find_elements_by_tag_name('td')[5].find_element_by_tag_name(
'div').find_element_by_tag_name('div').click()
self.webui_common.wait_till_ajax_done(self.browser)
net.find_element_by_xpath(
"//a[@class='tooltip-error']").click()
self.webui_common.wait_till_ajax_done(self.browser)
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnDisassociatePopupOK')).click()
self.webui_common.wait_till_ajax_done(self.browser)
self.webui_common.wait_till_ajax_done(self.browser)
rows = self.browser.find_element_by_id('gridfip').find_element_by_tag_name(
'tbody').find_elements_by_tag_name('tr')
for net in rows:
if (net.find_elements_by_tag_name('td')[3].get_attribute('innerHTML') == fixture.vn_name + ':' + fixture.pool_name):
net.find_elements_by_tag_name(
'td')[0].find_element_by_tag_name('input').click()
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnDeletefip')).click()
WebDriverWait(self.browser, self.delay).until(
lambda a: a.find_element_by_id('btnCnfReleasePopupOK')).click()
if not self.webui_common.click_configure_networks():
result = result and False
rows = self.webui_common.get_rows()
for net in rows:
if (net.find_elements_by_tag_name('td')[2].get_attribute('innerHTML') == fixture.vn_name):
net.find_element_by_class_name('dropdown-toggle').click()
net.find_elements_by_tag_name(
'li')[0].find_element_by_tag_name('a').click()
ip_text = net.find_element_by_xpath(
"//span[contains(text(), 'Floating IP Pools')]")
ip_text.find_element_by_xpath(
'..').find_element_by_tag_name('i').click()
pool_con = self.browser.find_element_by_id('fipTuples')
fip = pool_con.find_elements_by_xpath(
"//*[contains(@id, 'rule')]")
for pool in fip:
if(pool.find_element_by_tag_name('input').get_attribute('value') == fixture.pool_name):
pool.find_element_by_class_name(
'icon-minus').click()
self.browser.find_element_by_xpath(
"//button[@id = 'btnCreateVNOK']").click()
break
# end delete_fip_in_webui
| 60.406206 | 542 | 0.542983 | 215,459 | 0.997158 | 0 | 0 | 0 | 0 | 0 | 0 | 45,500 | 0.210577 |
1b416d5cad38f7b81e96e9dfe23519772715bfa6 | 2,064 | py | Python | generate_readme.py | mlazaric/Chebyshev | d55dc6f0a1ff2ac3e037476a9c3785ff6953c0a6 | [
"MIT"
] | 14 | 2018-12-05T18:02:42.000Z | 2022-02-09T08:16:23.000Z | generate_readme.py | mlazaric/Chebyshev | d55dc6f0a1ff2ac3e037476a9c3785ff6953c0a6 | [
"MIT"
] | 2 | 2018-12-21T19:07:23.000Z | 2022-01-10T02:55:37.000Z | generate_readme.py | mlazaric/Chebyshev | d55dc6f0a1ff2ac3e037476a9c3785ff6953c0a6 | [
"MIT"
] | null | null | null | #! /bin/python
import re
from sympy import exp, ln, sin, cos
from sympy.abc import x
from chebyshev.approximation import get_best_approximation
table_of_contents = []
function_entries = []
def generate_for_readme(approximation):
function_filename = re.sub(r'( |/)', '_', str(approximation.function))
approximation_plot_filename = f'images/{function_filename}_approximation.png'
absolute_error_plot_filename = f'images/{function_filename}_absolute_error.png'
table_of_contents.append((str(approximation.function),
re.sub(r'(\+|/|\(|\))', '', str(approximation.function))
.replace(' ', '-')))
function_entries.append(f"""## `{approximation.function}`
Coefficients for `{approximation.function}` on the `[{approximation.interval[0]}, {approximation.interval[
1]}]` interval:
{approximation.get_coeffs_as_table()}
Maximum error on that interval is `{approximation.get_error()}`
<img src="{approximation_plot_filename}" alt="{approximation_plot_filename}" width="50%"><img src="{absolute_error_plot_filename}" alt="{absolute_error_plot_filename}" width="50%">
""")
plotted = approximation.plot_approximation(show=False)
backend = plotted.backend(plotted)
backend.process_series()
backend.fig.savefig(approximation_plot_filename, dpi=300)
plotted = approximation.plot_absolute_error(show=False)
backend = plotted.backend(plotted)
backend.process_series()
backend.fig.savefig(absolute_error_plot_filename, dpi=300)
generate_for_readme(get_best_approximation(exp(x), (0, 1), 6, 7, point=0.5))
generate_for_readme(get_best_approximation(ln(1 + x), (0, 1), 6, 20))
generate_for_readme(get_best_approximation(sin(x) / x, (-1, 1), 8, 18))
generate_for_readme(get_best_approximation(cos(x), (-1, 1), 8, 18))
print('## Approximated Functions\n')
for (function_name, function_link) in table_of_contents:
print(f'* [`{function_name}`](#{function_link})')
print('\n')
for function_entry in function_entries:
print(function_entry)
| 35.586207 | 180 | 0.718508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 678 | 0.328488 |
1b42bdaa222371750f61ef24741923cb5ba6d385 | 3,392 | py | Python | tests/test_datasets.py | gau-nernst/CenterNet | bd6a21813a63310e3b1c77cefe24077b72d1092a | [
"MIT"
] | 47 | 2021-08-10T09:30:53.000Z | 2022-03-29T07:53:43.000Z | tests/test_datasets.py | gau-nernst/CenterNet | bd6a21813a63310e3b1c77cefe24077b72d1092a | [
"MIT"
] | 1 | 2021-08-07T13:46:49.000Z | 2021-08-07T13:46:49.000Z | tests/test_datasets.py | gau-nernst/CenterNet | bd6a21813a63310e3b1c77cefe24077b72d1092a | [
"MIT"
] | 6 | 2021-08-12T02:40:43.000Z | 2022-01-31T16:12:40.000Z | import random
import pytest
import numpy as np
import torch
from torch.utils.data import DataLoader
from centernet_lightning.datasets import COCODataset, VOCDataset, CrowdHumanDataset, MOTTrackingSequence, MOTTrackingDataset, KITTITrackingSequence, KITTITrackingDataset
from centernet_lightning.datasets.utils import get_default_detection_transforms, get_default_tracking_transforms, CollateDetection, CollateTracking
from centernet_lightning.datasets.builder import build_dataset, build_dataloader
def generate_detection_dataset_configs():
pass
def generate_tracking_dataset_configs():
pass
class TestDetectionDataset:
dataset_configs = generate_detection_dataset_configs()
def test_attributes(self, constructor, data_dir, split, name_to_label):
dataset = constructor(data_dir, split, name_to_label)
assert isinstance(len(dataset), int)
def test_get_item(self, constructor, data_dir, split, name_to_label):
dataset = constructor(data_dir, split, name_to_label)
for item in random.sample(dataset, 10):
assert isinstance(item["image"], np.ndarray)
assert item["image"].shape[-1] == 3
assert isinstance(item["bboxes"], list)
for box in item["bboxes"]:
assert len(box) == 4
for x in box:
assert 0 <= x <= 1
assert isinstance(item["labels"], list)
assert len(item["labels"]) == len(item["bboxes"])
transforms = get_default_detection_transforms()
dataset = constructor(data_dir, split, name_to_label, transforms=transforms)
for item in random.sample(dataset, 10):
assert isinstance(item["image"], torch.Tensor)
assert item["image"].shape[0] == 3
assert isinstance(item["bboxes"], tuple)
assert isinstance(item["labels"], tuple)
assert len(item["bboxes"]) == len(item["labels"])
def test_dataloader(self, constructor, data_dir, split, name_to_label):
batch_size = 4
transforms = get_default_detection_transforms()
collate_fn = CollateDetection()
dataset = constructor(data_dir, split, name_to_label, transforms=transforms)
dataloader = DataLoader(dataset, batch_size=batch_size, collate_fn=collate_fn)
batch = next(iter(dataloader))
img = batch["image"]
assert isinstance(img, torch.Tensor)
assert img.shape[0] == batch_size
bboxes = batch["bboxes"]
assert isinstance(bboxes, torch.Tensor)
assert bboxes.shape[0] == batch_size
assert bboxes.max() <= 1
assert bboxes.min() >= 0
labels = batch["labels"]
assert isinstance(labels, torch.Tensor)
assert labels.shape[0] == batch_size
mask = batch["mask"]
assert isinstance(mask, torch.Tensor)
assert mask.shape[0] == batch_size
for x in mask.view(-1):
assert x == 0 or x == 1
assert bboxes.shape[1] == labels.shape[1] == mask.shape[1]
def test_builder(self):
pass
class TestTrackingDataset:
dataset_configs = generate_tracking_dataset_configs()
def test_attributes(self):
pass
def test_get_item(self):
pass
def test_dataloader(self):
pass
def test_builder(self):
pass | 34.262626 | 169 | 0.65684 | 2,786 | 0.821344 | 0 | 0 | 0 | 0 | 0 | 0 | 129 | 0.038031 |
1b437fea1f8494157ef38e2b0fdd72c52d6e8cfe | 768 | py | Python | docs/run.py | imrehg/dash-bootstrap-components | 7cf43168808bb88b243e414168dc3bf196fefd84 | [
"Apache-2.0"
] | 1 | 2022-01-12T12:36:20.000Z | 2022-01-12T12:36:20.000Z | docs/run.py | imrehg/dash-bootstrap-components | 7cf43168808bb88b243e414168dc3bf196fefd84 | [
"Apache-2.0"
] | null | null | null | docs/run.py | imrehg/dash-bootstrap-components | 7cf43168808bb88b243e414168dc3bf196fefd84 | [
"Apache-2.0"
] | null | null | null | from werkzeug.middleware.dispatcher import DispatcherMiddleware
from components_page import register_apps as register_component_apps
from examples import register_apps as register_example_apps
from markdown_to_html import convert_all_markdown_files
from server import create_server
convert_all_markdown_files()
server = create_server()
component_routes = register_component_apps()
example_routes = register_example_apps()
routes = {**component_routes, **example_routes}
application = DispatcherMiddleware(
server, {slug: app.server for slug, app in routes.items()}
)
if __name__ == "__main__":
import os
from werkzeug.serving import run_simple
os.environ["DBC_DOCS_MODE"] = "dev"
run_simple("localhost", 8888, application, use_reloader=True)
| 30.72 | 68 | 0.807292 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 41 | 0.053385 |
1b44cd65539b9da5eb8666339aae04096641f0bd | 1,962 | py | Python | myia/operations/op_array_getitem_wrap.py | strint/myia | 3d00d3fb3df80ab7a264a724226c5f56c6ff1a8a | [
"MIT"
] | 222 | 2019-02-13T07:56:28.000Z | 2022-03-28T07:07:54.000Z | myia/operations/op_array_getitem_wrap.py | strint/myia | 3d00d3fb3df80ab7a264a724226c5f56c6ff1a8a | [
"MIT"
] | 107 | 2019-02-12T21:56:39.000Z | 2022-03-12T01:08:03.000Z | myia/operations/op_array_getitem_wrap.py | strint/myia | 3d00d3fb3df80ab7a264a724226c5f56c6ff1a8a | [
"MIT"
] | 27 | 2017-11-14T17:58:15.000Z | 2019-01-14T01:36:09.000Z | """Implementation of the 'array_getitem_wrap' operation."""
from ..lib import Slice, core, myia_static
from ..operations import array_getitem, reshape
def _dim_explicit(dim, dim_size):
if dim < 0:
dim = dim_size + dim
assert dim >= 0
return dim
@myia_static
def _build_slices(a_shp, item):
begin = ()
end = ()
stride = ()
remove_dims = ()
for adx, a in enumerate(a_shp):
if adx < len(item):
i = item[adx]
if isinstance(i, (slice, Slice)):
begin = begin + (
0 if i.start is None else _dim_explicit(i.start, a),
)
end = end + (a if i.stop is None else _dim_explicit(i.stop, a),)
stride = stride + (1 if i.step is None else i.step,)
remove_dims = remove_dims + (False,)
else:
begin = begin + (_dim_explicit(i, a),)
end = end + (_dim_explicit(i, a) + 1,)
stride = stride + (1,)
remove_dims = remove_dims + (True,)
else:
begin = begin + (0,)
end = end + (a,)
stride = stride + (1,)
remove_dims = remove_dims + (False,)
return begin, end, stride, remove_dims
@core
def array_getitem_wrap(array, item):
"""Implementation of `array_getitem`."""
if isinstance(item, tuple):
begin, end, stride, remove_dims = _build_slices(array.shape, item)
else:
begin, end, stride, remove_dims = _build_slices(array.shape, (item,))
ret = array_getitem(array, begin, end, stride)
final_shape = ()
for o, r in zip(ret.shape, remove_dims):
if not r:
final_shape = final_shape + (o,)
ret = reshape(ret, final_shape)
return ret
__operation_defaults__ = {
"name": "array_getitem_wrap",
"registered_name": "array_getitem_wrap",
"mapping": array_getitem_wrap,
"python_implementation": None,
}
| 30.184615 | 80 | 0.563201 | 0 | 0 | 0 | 0 | 1,508 | 0.768603 | 0 | 0 | 194 | 0.098879 |
1b4626f96dec1d654d95e876fe84b62e1e6877b4 | 1,426 | py | Python | fledgling/app/entity/task.py | PracticalCleanArchitecture/fledgling | ec2fdd8446c0ecb94bdd8f8cfaec4c264b546683 | [
"MIT"
] | 1 | 2021-08-28T16:26:44.000Z | 2021-08-28T16:26:44.000Z | fledgling/app/entity/task.py | PracticalCleanArchitecture/fledgling | ec2fdd8446c0ecb94bdd8f8cfaec4c264b546683 | [
"MIT"
] | null | null | null | fledgling/app/entity/task.py | PracticalCleanArchitecture/fledgling | ec2fdd8446c0ecb94bdd8f8cfaec4c264b546683 | [
"MIT"
] | null | null | null | # -*- coding: utf8 -*-
from abc import ABC, abstractmethod
from datetime import datetime
from enum import Enum
from typing import List, Optional, Tuple, Union
class TaskRepositoryError(Exception):
pass
class TaskStatus(Enum):
CREATED = 1
FINISHED = 2
class Task:
def __init__(self):
self.brief = None
self.id = None
self.keywords = []
self.status = None
@classmethod
def new(cls, *, brief, id_=None, keywords: List[str] = None,
status: TaskStatus = None) -> 'Task':
instance = Task()
instance.brief = brief
instance.id = id_
instance.keywords = keywords or []
instance.status = status
return instance
def is_finished(self) -> bool:
return self.status == TaskStatus.FINISHED
class ITaskRepository(ABC):
@abstractmethod
def add(self, task: Task) -> Task:
pass
@abstractmethod
def get_by_id(self, id_) -> Union[None, Task]:
"""
查询指定的任务。
"""
pass
@abstractmethod
def list(self, *, keyword: Optional[str] = None, page, per_page,
plan_trigger_time: Optional[Tuple[datetime, datetime]] = None,
status: Optional[int] = None,
task_ids: Union[None, List[int]] = None):
"""
列出任务。
"""
pass
@abstractmethod
def remove(self, *, task_id: int):
pass
| 22.634921 | 75 | 0.581346 | 1,281 | 0.882231 | 0 | 0 | 899 | 0.619146 | 0 | 0 | 115 | 0.079201 |
1b462ceaf5cd2d06ce934814a8e36a43af26e4ed | 977 | py | Python | turbustat/tests/test_vca.py | keflavich/TurbuStat | a6fac4c0d10473a74c62cce4a9c6a30773a955b1 | [
"MIT"
] | null | null | null | turbustat/tests/test_vca.py | keflavich/TurbuStat | a6fac4c0d10473a74c62cce4a9c6a30773a955b1 | [
"MIT"
] | null | null | null | turbustat/tests/test_vca.py | keflavich/TurbuStat | a6fac4c0d10473a74c62cce4a9c6a30773a955b1 | [
"MIT"
] | null | null | null | # Licensed under an MIT open source license - see LICENSE
'''
Test functions for VCA
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import VCA, VCA_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testVCA(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_VCA_method(self):
self.tester = VCA(dataset1["cube"][0],
dataset1["cube"][1],
slice_sizes=[1.0])
self.tester.run()
assert np.allclose(self.tester.ps1D, computed_data['vca_val'])
def test_VCA_distance(self):
self.tester_dist = \
VCA_Distance(dataset1["cube"],
dataset2["cube"]).distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['vca_distance'])
| 27.138889 | 70 | 0.62129 | 674 | 0.689867 | 0 | 0 | 0 | 0 | 0 | 0 | 134 | 0.137155 |
1b469269698aba27e175e7d85e9c7b4f767ceec3 | 7,254 | py | Python | ElectroWeakAnalysis/ZMuMu/python/ZMuMuCategoriesPlots_cff.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 6 | 2017-09-08T14:12:56.000Z | 2022-03-09T23:57:01.000Z | ElectroWeakAnalysis/ZMuMu/python/ZMuMuCategoriesPlots_cff.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 545 | 2017-09-19T17:10:19.000Z | 2022-03-07T16:55:27.000Z | ElectroWeakAnalysis/ZMuMu/python/ZMuMuCategoriesPlots_cff.py | SWuchterl/cmssw | 769b4a7ef81796579af7d626da6039dfa0347b8e | [
"Apache-2.0"
] | 14 | 2017-10-04T09:47:21.000Z | 2019-10-23T18:04:45.000Z | import FWCore.ParameterSet.Config as cms
from ElectroWeakAnalysis.ZMuMu.ZMuMuCategoriesSequences_cff import *
import copy
zPlots = cms.PSet(
histograms = cms.VPSet(
cms.PSet(
min = cms.untracked.double(0.0),
max = cms.untracked.double(200.0),
nbins = cms.untracked.int32(200),
name = cms.untracked.string("zMass"),
description = cms.untracked.string("Z mass [GeV/c^{2}]"),
plotquantity = cms.untracked.string("mass")
),
cms.PSet(
min = cms.untracked.double(0.0),
max = cms.untracked.double(200.0),
nbins = cms.untracked.int32(200),
name = cms.untracked.string("mu1Pt"),
description = cms.untracked.string("Highest muon p_{t} [GeV/c]"),
plotquantity = cms.untracked.string("max(daughter(0).pt,daughter(1).pt)")
),
cms.PSet(
min = cms.untracked.double(0.0),
max = cms.untracked.double(200.0),
nbins = cms.untracked.int32(200),
name = cms.untracked.string("mu2Pt"),
description = cms.untracked.string("Lowest muon p_{t} [GeV/c]"),
plotquantity = cms.untracked.string("min(daughter(0).pt,daughter(1).pt)")
)
)
)
# ZMuMu at least 1 HLT + 2 track-iso (Shape)
goodZToMuMuPlotsLoose = cms.EDAnalyzer(
"CandViewHistoAnalyzer",
zPlots,
src = cms.InputTag("goodZToMuMuAtLeast1HLTLoose")
)
goodZToMuMuPlots = cms.EDAnalyzer(
"CandViewHistoAnalyzer",
zPlots,
src = cms.InputTag("goodZToMuMuAtLeast1HLT")
)
## #### plot for loose cuts
## goodZToMuMuSequence.__iadd__(goodZToMuMuPlots)
## goodZToMuMuSequence.setLabel("goodZToMuMuAtLeast1HLT")
## #ZMuMu 2 HLT + 2 track-iso
## goodZToMuMu2HLTPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMu2HLTPlots.src = cms.InputTag("goodZToMuMu2HLT")
## goodZToMuMu2HLTSequence.__iadd__(goodZToMuMu2HLTPlots)
## goodZToMuMu2HLTSequence.setLabel("goodZToMuMu2HLT")
## #ZMuMu 1 HLT + 2 track-iso
## goodZToMuMu1HLTPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMu1HLTPlots.src = cms.InputTag("goodZToMuMu1HLT")
## goodZToMuMu1HLTSequence.__iadd__(goodZToMuMu1HLTPlots)
## #ZMuMu at least 1 HLT + at least 1 NON track-iso
## nonIsolatedZToMuMuPlots = copy.deepcopy(goodZToMuMuPlots)
## nonIsolatedZToMuMuPlots.src = cms.InputTag("nonIsolatedZToMuMuAtLeast1HLT")
## nonIsolatedZToMuMuSequence.__iadd__(nonIsolatedZToMuMuPlots)
## #ZMuMu at least 1 HLT + 1 NON track-iso
## oneNonIsolatedZToMuMuPlots = copy.deepcopy(goodZToMuMuPlots)
## oneNonIsolatedZToMuMuPlots.src = cms.InputTag("oneNonIsolatedZToMuMuAtLeast1HLT")
## oneNonIsolatedZToMuMuSequence.__iadd__(oneNonIsolatedZToMuMuPlots)
## #ZMuMu at least 1 HLT + 2 NON track-iso
## twoNonIsolatedZToMuMuPlots = copy.deepcopy(goodZToMuMuPlots)
## twoNonIsolatedZToMuMuPlots.src = cms.InputTag("twoNonIsolatedZToMuMuAtLeast1HLT")
## twoNonIsolatedZToMuMuSequence.__iadd__(twoNonIsolatedZToMuMuPlots)
## #ZMuSta First HLT + 2 track-iso
## goodZToMuMuOneStandAloneMuonPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMuOneStandAloneMuonPlots.src = cms.InputTag("goodZToMuMuOneStandAloneMuonFirstHLT")
## goodZToMuMuOneStandAloneMuonSequence.__iadd__(goodZToMuMuOneStandAloneMuonPlots)
## #ZMuTk First HLT + 2 track-iso
## goodZToMuMuOneTrackPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMuOneTrackPlots.src = cms.InputTag("goodZToMuMuOneTrackFirstHLT")
## goodZToMuMuOneTrackSequence.__iadd__(goodZToMuMuOneTrackPlots)
## #ZMuMu same charge
## goodZToMuMuSameChargeAtLeast1HLTPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMuSameChargeAtLeast1HLTPlots.src = cms.InputTag("goodZToMuMuSameChargeAtLeast1HLT")
## goodZToMuMuSameChargeSequence.__iadd__(goodZToMuMuSameChargeAtLeast1HLTPlots)
## goodZToMuMuSameCharge2HLTPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMuSameCharge2HLTPlots.src = cms.InputTag("goodZToMuMuSameCharge2HLT")
## goodZToMuMuSameCharge2HLTSequence.__iadd__(goodZToMuMuSameCharge2HLTPlots)
## goodZToMuMuSameCharge1HLTPlots = copy.deepcopy(goodZToMuMuPlots)
## goodZToMuMuSameCharge1HLTPlots.src = cms.InputTag("goodZToMuMuSameCharge1HLT")
## goodZToMuMuSameCharge1HLTSequence.__iadd__(goodZToMuMuSameCharge1HLTPlots)
#### plot for tight cuts
goodZToMuMuPath.__iadd__(goodZToMuMuPlots)
goodZToMuMuPath.setLabel("goodZToMuMuAtLeast1HLT")
#ZMuMu 2 HLT + 2 track-iso
goodZToMuMu2HLTPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMu2HLTPlots.src = cms.InputTag("goodZToMuMu2HLT")
goodZToMuMu2HLTPath.__iadd__(goodZToMuMu2HLTPlots)
goodZToMuMu2HLTPath.setLabel("goodZToMuMu2HLT")
#ZMuMu 1 HLT + 2 track-iso
goodZToMuMu1HLTPlots= copy.deepcopy(goodZToMuMuPlots)
goodZToMuMu1HLTPlots.src = cms.InputTag("goodZToMuMu1HLT")
goodZToMuMu1HLTPath.__iadd__(goodZToMuMu1HLTPlots)
##### plot for AB and BB region
goodZToMuMuAB1HLTPlots= copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuAB1HLTPlots.src = cms.InputTag("goodZToMuMuAB1HLT")
goodZToMuMuAB1HLTPath.__iadd__(goodZToMuMuAB1HLTPlots)
goodZToMuMuBB2HLTPlots= copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuBB2HLTPlots.src = cms.InputTag("goodZToMuMuBB2HLT")
goodZToMuMuBB2HLTPath.__iadd__(goodZToMuMuBB2HLTPlots)
#ZMuMu at least 1 HLT + at least 1 NON track-iso
nonIsolatedZToMuMuPlots = copy.deepcopy(goodZToMuMuPlots)
nonIsolatedZToMuMuPlots.src = cms.InputTag("nonIsolatedZToMuMuAtLeast1HLT")
nonIsolatedZToMuMuPath.__iadd__(nonIsolatedZToMuMuPlots)
#ZMuMu at least 1 HLT + 1 NON track-iso
oneNonIsolatedZToMuMuPlots = copy.deepcopy(goodZToMuMuPlots)
oneNonIsolatedZToMuMuPlots.src = cms.InputTag("oneNonIsolatedZToMuMuAtLeast1HLT")
oneNonIsolatedZToMuMuPath.__iadd__(oneNonIsolatedZToMuMuPlots)
#ZMuMu at least 1 HLT + 2 NON track-iso
twoNonIsolatedZToMuMuPlots = copy.deepcopy(goodZToMuMuPlots)
twoNonIsolatedZToMuMuPlots.src = cms.InputTag("twoNonIsolatedZToMuMuAtLeast1HLT")
twoNonIsolatedZToMuMuPath.__iadd__(twoNonIsolatedZToMuMuPlots)
#ZMuSta global HLT + 2 track-iso
goodZToMuMuOneStandAloneMuonPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuOneStandAloneMuonPlots.src = cms.InputTag("goodZToMuMuOneStandAloneMuonFirstHLT")
goodZToMuMuOneStandAloneMuonPath.__iadd__(goodZToMuMuOneStandAloneMuonPlots)
#ZMuTk First HLT + 2 track-iso
goodZToMuMuOneTrackPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuOneTrackPlots.src = cms.InputTag("goodZToMuMuOneTrackFirstHLT")
goodZToMuMuOneTrackPath.__iadd__(goodZToMuMuOneTrackPlots)
#ZMuTkMu global HLT + 2 track-iso
goodZToMuMuOneTrackerMuonPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuOneTrackerMuonPlots.src = cms.InputTag("goodZToMuMuOneTrackerMuonFirstHLT")
goodZToMuMuOneTrackerMuonPath.__iadd__(goodZToMuMuOneTrackerMuonPlots)
#ZMuMu same charge
goodZToMuMuSameChargeAtLeast1HLTPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuSameChargeAtLeast1HLTPlots.src = cms.InputTag("goodZToMuMuSameChargeAtLeast1HLT")
goodZToMuMuSameChargePath.__iadd__(goodZToMuMuSameChargeAtLeast1HLTPlots)
goodZToMuMuSameCharge2HLTPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuSameCharge2HLTPlots.src = cms.InputTag("goodZToMuMuSameCharge2HLT")
goodZToMuMuSameCharge2HLTPath.__iadd__(goodZToMuMuSameCharge2HLTPlots)
goodZToMuMuSameCharge1HLTPlots = copy.deepcopy(goodZToMuMuPlots)
goodZToMuMuSameCharge1HLTPlots.src = cms.InputTag("goodZToMuMuSameCharge1HLT")
goodZToMuMuSameCharge1HLTPath.__iadd__(goodZToMuMuSameCharge1HLTPlots)
| 34.056338 | 95 | 0.809347 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,677 | 0.506893 |
1b4acf3e59a5b34f3e0546db195f5c9514fda9a2 | 1,385 | py | Python | pages/index.py | cshields143/unit2project | f8ac979137a421758b56452321886955d596b985 | [
"MIT"
] | null | null | null | pages/index.py | cshields143/unit2project | f8ac979137a421758b56452321886955d596b985 | [
"MIT"
] | null | null | null | pages/index.py | cshields143/unit2project | f8ac979137a421758b56452321886955d596b985 | [
"MIT"
] | null | null | null |
# Imports from 3rd party libraries
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import plotly.express as px
# Imports from this application
from app import app
layout = dbc.Col([
html.Button('Guess the QB!', id='trigger-game-round', style={'font-size':'2em','font-weight':'bold', 'text-transform':'uppercase', 'width':'10em', 'display':'block', 'margin':'0 auto'}),
html.Div(id='prompt-bucket', style={'font-size':'1.5em', 'width':'18em', 'margin':'1.5em auto'}),
dbc.Row([
dbc.Col([
html.Button('Forget', id='trigger-score-clear', style={'margin-bottom':'0.5em'}),
html.Table([
html.Thead([
html.Tr([
html.Th('Player', style={'padding':'0.25em 0.5em'}),
html.Th('Score', style={'padding':'0.25em 0.5em'})
])
], style={'background':'#ddd', 'border-bottom':'1px solid #666'}),
html.Tbody(id='score-output', style={'text-align':'center'})
])
]),
dbc.Col(id='options-output', style={'font-size':'1.5em', 'text-align':'center'}),
dbc.Col(id='guess-outputs', style={'font-size':'1.5em', 'text-transform':'uppercase', 'font-weight':'bold'})
])
]) | 43.28125 | 190 | 0.577617 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 601 | 0.433935 |
1b4b3d3ab6ebe0ec3481ce7190fc80763629166b | 937 | py | Python | applications/tensorflow/contrastive_divergence_vae/utils/train_utils.py | Splendon/examples | ed4a8a01857b6ddca49559141acf5d0986eb01e1 | [
"MIT"
] | null | null | null | applications/tensorflow/contrastive_divergence_vae/utils/train_utils.py | Splendon/examples | ed4a8a01857b6ddca49559141acf5d0986eb01e1 | [
"MIT"
] | null | null | null | applications/tensorflow/contrastive_divergence_vae/utils/train_utils.py | Splendon/examples | ed4a8a01857b6ddca49559141acf5d0986eb01e1 | [
"MIT"
] | null | null | null | # Copyright 2019 Graphcore Ltd.
# coding=utf-8
import tensorflow as tf
from utils.optimisers import VcdRMSPropOptimizer
def vcd_lr_schedule(base_lr, current_step, total_steps, n_epoch, iter_timescale=15000, decay_factor=0.9):
"""
Exponential LR decay: lr <- lr * 0.9 applied every 15000 iterations
"""
n_timescales = tf.cast(current_step // iter_timescale, tf.float32)
lr = base_lr * decay_factor ** n_timescales
return lr
optimiser_configs = {
'vcd': [VcdRMSPropOptimizer,
{'decay': 0.9,
'epsilon': 1.,
'base_learning_rate': {'encoder': {'mean': 5e-4,
'std': 2.5e-4},
'decoder': 5e-4},
'learning_rate_func': vcd_lr_schedule}]
}
| 36.038462 | 105 | 0.499466 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 218 | 0.232657 |
1b4c36ef85d23630099a5b73f9ef9b5386a12e64 | 1,032 | py | Python | batch_jobs/ewas_predictions/python_caller_features.py | Deep-Learning-and-Aging/Scalars-based-models-and-XWAS-pipeline | f6913ce4ec1f6aed358ba27fdf575257f712c132 | [
"MIT"
] | null | null | null | batch_jobs/ewas_predictions/python_caller_features.py | Deep-Learning-and-Aging/Scalars-based-models-and-XWAS-pipeline | f6913ce4ec1f6aed358ba27fdf575257f712c132 | [
"MIT"
] | null | null | null | batch_jobs/ewas_predictions/python_caller_features.py | Deep-Learning-and-Aging/Scalars-based-models-and-XWAS-pipeline | f6913ce4ec1f6aed358ba27fdf575257f712c132 | [
"MIT"
] | null | null | null |
import sys
import os
if sys.platform == 'linux':
sys.path.append('/n/groups/patel/samuel/Aging')
elif sys.platform == 'darwin':
sys.path.append('/Users/samuel/Desktop/Aging')
from aging.model.environment_predictor import EnvironmentPredictor
name = sys.argv[1]
n_iter = int(sys.argv[2])
target_dataset = sys.argv[3]
input_dataset = sys.argv[4]
n_splits = int(sys.argv[5])
hyperparameters = dict()
hyperparameters['name'] = name
hyperparameters['n_splits'] = n_splits
hyperparameters['n_iter'] = n_iter
hyperparameters['target_dataset'] = target_dataset
hyperparameters['input_dataset'] = input_dataset
print(hyperparameters)
gp = EnvironmentPredictor(name, -1, n_splits, n_iter, target_dataset, input_dataset, -1)
print("Loading Dataset")
df = gp.load_dataset().dropna()
print("Dataset Loaded, optimizing hyper")
#df_scaled = gp.normalise_dataset(df)
feature_importance_cols = gp.feature_importance(df)
print("Feature importance over, saving file")
gp.save_features(feature_importance_cols)
print("task complete")
| 26.461538 | 88 | 0.768411 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 270 | 0.261628 |
1b4e40b3667a2b8543c968bbe653fc8de682b7a8 | 99 | py | Python | packages/watchmen-pipeline-surface/src/watchmen_pipeline_surface/connectors/__init__.py | Indexical-Metrics-Measure-Advisory/watchmen | c54ec54d9f91034a38e51fd339ba66453d2c7a6d | [
"MIT"
] | null | null | null | packages/watchmen-pipeline-surface/src/watchmen_pipeline_surface/connectors/__init__.py | Indexical-Metrics-Measure-Advisory/watchmen | c54ec54d9f91034a38e51fd339ba66453d2c7a6d | [
"MIT"
] | null | null | null | packages/watchmen-pipeline-surface/src/watchmen_pipeline_surface/connectors/__init__.py | Indexical-Metrics-Measure-Advisory/watchmen | c54ec54d9f91034a38e51fd339ba66453d2c7a6d | [
"MIT"
] | null | null | null | from .kafka import init_kafka, KafkaSettings
from .rabbitmq import init_rabbitmq, RabbitmqSettings
| 33 | 53 | 0.858586 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
1b4f250eb95e76631c9784db08283d905057b857 | 4,454 | py | Python | test.py | KKN18/ViT-pytorch | 9c053579cf93c303afd817e8eacad3fbe0afb483 | [
"MIT"
] | null | null | null | test.py | KKN18/ViT-pytorch | 9c053579cf93c303afd817e8eacad3fbe0afb483 | [
"MIT"
] | null | null | null | test.py | KKN18/ViT-pytorch | 9c053579cf93c303afd817e8eacad3fbe0afb483 | [
"MIT"
] | null | null | null | # coding=utf-8
from __future__ import absolute_import, division, print_function
import logging
import argparse
import os
import random
import numpy as np
from datetime import timedelta
import torch
import torch.distributed as dist
from tqdm import tqdm
from torch.utils.tensorboard import SummaryWriter
from apex import amp
from apex.parallel import DistributedDataParallel as DDP
from models.modeling import VisionTransformer, CONFIGS
from utils.scheduler import WarmupLinearSchedule, WarmupCosineSchedule
from utils.data_utils import get_loader
from utils.dist_util import get_world_size
logger = logging.getLogger(__name__)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def simple_accuracy(preds, labels):
return (preds == labels).mean()
def test(model, args):
_, test_loader = get_loader(args)
eval_losses = AverageMeter()
logger.info("***** Running Test *****")
model.eval()
all_preds, all_label = [], []
epoch_iterator = tqdm(test_loader,
desc="Testing... (loss=X.X)",
bar_format="{l_bar}{r_bar}",
dynamic_ncols=True,
disable=args.local_rank not in [-1, 0])
loss_fct = torch.nn.CrossEntropyLoss()
for step, batch in enumerate(epoch_iterator):
batch = tuple(t.to(args.device) for t in batch)
x, y = batch
with torch.no_grad():
logits = model(x)[0]
eval_loss = loss_fct(logits, y)
eval_losses.update(eval_loss.item())
preds = torch.argmax(logits, dim=-1)
if len(all_preds) == 0:
all_preds.append(preds.detach().cpu().numpy())
all_label.append(y.detach().cpu().numpy())
else:
all_preds[0] = np.append(
all_preds[0], preds.detach().cpu().numpy(), axis=0
)
all_label[0] = np.append(
all_label[0], y.detach().cpu().numpy(), axis=0
)
epoch_iterator.set_description("Testing... (loss=%2.5f)" % eval_losses.val)
all_preds, all_label = all_preds[0], all_label[0]
accuracy = simple_accuracy(all_preds, all_label)
print("\n")
print("Testing Results")
print("Test Loss: %2.5f" % eval_losses.avg)
print("Test Accuracy: %2.5f" % accuracy)
f = open("./test_result/"+args.model+".txt", 'w')
f.write("##Testing Results##\n\n")
f.write("Model name: %s\n" % args.model)
f.write("Test Loss: %2.5f\n" % eval_losses.avg)
f.write("Test Accuracy: %2.5f\n" % accuracy)
f.close()
return
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--local_rank", type=int, default=-1,
help="local_rank for distributed training on gpus")
parser.add_argument("--img_size", default=224, type=int,
help="Resolution size")
parser.add_argument("--dataset", choices=["cifar10", "cifar100"], default="cifar10",
help="Which downstream task.")
parser.add_argument("--train_batch_size", default=512, type=int,
help="Total batch size for training.")
parser.add_argument("--eval_batch_size", default=64, type=int,
help="Total batch size for eval.")
parser.add_argument("--model_type", choices=["ViT-B_16", "ViT-B_32", "ViT-L_16",
"ViT-L_32", "ViT-H_14", "R50-ViT-B_16"],
default="ViT-B_16",
help="Which variant to use.")
parser.add_argument("--model", type=str, default="ViT_1st")
args = parser.parse_args()
device = torch.device("cuda")
args.device = device
config = CONFIGS[args.model_type]
num_classes = 10 if args.dataset == "cifar10" else 100
model = VisionTransformer(config, args.img_size, zero_head=True, num_classes=num_classes)
MODEL_PATH = "./output/" + args.model+".bin"
model.load_state_dict(torch.load(MODEL_PATH))
model.to(args.device)
model.eval()
test(model, args)
if __name__ == "__main__":
main()
| 32.75 | 93 | 0.605299 | 389 | 0.087337 | 0 | 0 | 0 | 0 | 0 | 0 | 753 | 0.169062 |