text
stringlengths 8
6.05M
|
|---|
# http://code.google.com/p/dexterity/issues/detail?id=234
from Acquisition import aq_inner
from zope.component import getUtility
from zope.intid.interfaces import IIntIds
from zope.security import checkPermission
from zc.relation.interfaces import ICatalog
from plone.multilingualbehavior.interfaces import IDexterityTranslatable
from Products.CMFCore.utils import getToolByName
from Acquisition import aq_inner
from zope.component import queryAdapter
from plone.multilingual.interfaces import ITranslationManager
from plone.multilingual.interfaces import ILanguage
from Products.Archetypes.interfaces.referenceable import IReferenceable
from archetypes.multilingual.interfaces import IArchetypesTranslatable
from plone.multilingual.interfaces import ITranslatable
from AccessControl import getSecurityManager
from AccessControl import Unauthorized
def back_references(source_object, attribute_name):
""" Return back references from source object on specified attribute_name """
language_tool = getToolByName(source_object, 'portal_languages')
default_language = language_tool.getDefaultLanguage()
default_rels = []
# if this object is translatable, we should get the back relationship from the
# default language of this object
if ITranslatable.providedBy(source_object):
trans_manager = ITranslationManager(aq_inner(source_object))
default_lang_obj = trans_manager.get_translation(default_language)
if default_lang_obj:
default_rels = _back_references(default_lang_obj, attribute_name, source_object)
return list(set(default_rels + _back_references(source_object, attribute_name)))
def _back_references(source_object, attribute_name, translation=None):
catalog = getUtility(ICatalog)
intids = getUtility(IIntIds)
lang = queryAdapter(source_object, ILanguage).get_language()
if translation:
lang = queryAdapter(translation, ILanguage).get_language()
gsm = getSecurityManager()
result = []
for rel in catalog.findRelations({
'to_id': intids.getId(aq_inner(source_object)),
'from_attribute':attribute_name
}):
obj = intids.queryObject(rel.from_id)
if obj is not None and checkPermission('zope2.View', obj):
if ITranslatable.providedBy(obj):
trans_manager = ITranslationManager(aq_inner(obj))
try:
trans_obj = trans_manager.get_translation(lang)
except Unauthorized, e:
continue
if trans_obj:
result.append(trans_obj)
continue
if gsm.checkPermission('zope2.View', obj):
result.append(obj)
return result
def at_back_references(source_object, relationship):
language_tool = getToolByName(source_object, 'portal_languages')
default_language = language_tool.getDefaultLanguage()
# if this object is translatable, we should get the back relationship from the
# default language of this object
default_rels = []
if ITranslatable.providedBy(source_object):
trans_manager = ITranslationManager(aq_inner(source_object))
default_lang_obj = trans_manager.get_translation(default_language)
if default_lang_obj:
default_rels = _at_back_references(default_lang_obj, relationship, source_object)
return list(set(default_rels + _at_back_references(source_object, relationship)))
def _at_back_references(source_object, relationship, translation=None):
lang = queryAdapter(source_object, ILanguage).get_language()
if translation:
lang = queryAdapter(translation, ILanguage).get_language()
refs = IReferenceable(source_object).getBRefs(relationship=relationship)
gsm = getSecurityManager()
result = []
for obj in refs:
if obj is not None:
if ITranslatable.providedBy(obj):
trans_manager = ITranslationManager(aq_inner(obj))
try:
trans_obj = trans_manager.get_translation(lang)
except Unauthorized:
continue
if trans_obj:
result.append(trans_obj)
continue
if gsm.checkPermission('zope2.View', obj):
result.append(obj)
return result
|
number = int(input())
if number > 1:
for i in range (2,number):
if number%i == 0:
print("nope")
break
elif i == number - 1:
print("yep")
|
from .base import FunctionalTest
from selenium.webdriver.common.keys import Keys
class UserRegistrationTest(FunctionalTest):
def test_new_user_can_register_new_account(self):
#otwieramy strone glowna
self.browser.get(self.server_url)
#klikamy przycisk odpowiedzialny za rejestracje na stronie glownej
link_reg = self.browser.find_element_by_id('id_register_link')
link_reg.send_keys(Keys.ENTER)
#zostanie wyswietlony formularz rejestracji
registration_url = self.browser.current_url
self.assertRegex(registration_url, '/accounts/register')
page_text = self.browser.find_element_by_tag_name('body').text
self.assertIn('Rejestracja', page_text)
#wpisujemy nazwe uzytkownika
inputbox = self.browser.find_element_by_id('id_username')
inputbox.send_keys('pawel')
inputbox = self.browser.find_element_by_id('id_email')
inputbox.send_keys('pawel@mockmyid.com')
inputbox = self.browser.find_element_by_id('id_password1')
inputbox.send_keys('pass123')
inputbox = self.browser.find_element_by_id('id_password2')
inputbox.send_keys('pass123')
submitbox = self.browser.find_element_by_id('id_register_user')
submitbox.send_keys(Keys.ENTER)
#po poprawnym wpisaniu danych zostaniemy przeniesieni na strone potwierdzajaca poprawna rejestracje
#sprawdzamy czy po rejestracji zostalismy automatycznie zalogowani
#w panelu nawigacyjnym wyswietli się login uzytkownika i odnosnik do wylogowania
registration_url = self.browser.current_url
self.assertRegex(registration_url, '/accounts/register/complete/')
page_text = self.browser.find_element_by_tag_name('body').text
self.assertIn('Rejestracja przebiegła pomyślnie', page_text)
self.assertIn('Zalogowano jako: pawel', page_text)
self.assertIn('Wyloguj', page_text)
|
import sys, glob
sys.path.append('../gen-py')
from SpellService import SpellService
from SpellService.ttypes import *
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
# parsing arguments
host=sys.argv[1]
port=sys.argv[2]
words=[]
words=sys.argv[3:]
spell_request=SpellRequest()
spell_request.to_check=words
try:
# Make socket
transport = TSocket.TSocket(host,port)
# Buffering is critical. Raw sockets are very slow
transport = TTransport.TBufferedTransport(transport)
# Wrap in a protocol
protocol = TBinaryProtocol.TBinaryProtocol(transport)
# Create a client to use the protocol encoder
client = SpellService.Client(protocol)
# Connect!
transport.open()
spell_response=client.spellcheck(spell_request)
#print spell_response
if hasattr(spell_response,'is_correct'):
output=[]
# converting bool to int
for i in spell_response.is_correct:
output.append(str(int(i)))
print " ".join(output)
# Close!
transport.close()
except Thrift.TException, tx:
print '%s' % (tx.message)
|
from flask import Flask, redirect, url_for, request, render_template
import requests
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/search', methods =['POST'])
def search():
search = request.form['search']
if search == "":
return render_template('index.html')
else:
r = requests.get('https://en.wikipedia.org/w/api.php?action=opensearch&search='+search+'')
json_object = r.json()
url_links = []
title = []
para = []
for i in range(10):
url_links.append(json_object[3][i])
title.append(json_object[1][i])
para.append(json_object[2][i])
return render_template('search.html',url_links = url_links,title=title,para=para)
if __name__ == '__main__':
app.run(debug = True)
|
import gym
import torch.optim as optim
from dqn_learn import OptimizerSpec, dqn_learing
from utils.gym import get_env, get_wrapper_by_name
from utils.schedule import LinearSchedule
from utils.experiments_mgr import start_experiments_generator
from dqn_model_lrelu import DQNLRelu
from dqn_model import DQN
# Program parameters are set via the experiments_mgr
# (to support custom arguments and sampling from ranges)
def main(env, num_timesteps, experiment_config, experiment_name):
q_func = DQNLRelu if experiment_config['adv_model'] else DQN
def stopping_criterion(env):
# notice that here t is the number of steps of the wrapped env,
# which is different from the number of steps in the underlying env
return get_wrapper_by_name(env, "Monitor").get_total_steps() >= num_timesteps
optimizer_spec = OptimizerSpec(
constructor=optim.RMSprop,
kwargs=dict(lr=experiment_config['lr'], alpha=experiment_config['alpha'], eps=experiment_config['eps']),
)
exploration_schedule = LinearSchedule(1000000, experiment_config['min_eps'])
dqn_learing(
experiment_name=experiment_name,
env=env,
q_func=q_func,
optimizer_spec=optimizer_spec,
exploration=exploration_schedule,
stopping_criterion=stopping_criterion,
replay_buffer_size=experiment_config['replay_size'],
batch_size=experiment_config['batch'],
gamma=experiment_config['gamma'],
learning_starts=experiment_config['learning_start'],
learning_freq=experiment_config['learning_freq'],
frame_history_len=experiment_config['frame_hist'],
target_update_freq=experiment_config['target_update_freq'],
output_path=experiment_config['output']
)
if __name__ == '__main__':
experiments_generator = start_experiments_generator()
# Get Atari games.
benchmark = gym.benchmark_spec('Atari40M')
for exp_num, (experiment_config, experiment_name) in enumerate(experiments_generator):
print('Beginning experiment: #' + str(exp_num+1) + ': ' + experiment_name)
# Change the index to select a different game.
assert 0 <= experiment_config['game'] < len(benchmark.tasks), 'Illegal Atari game id'
task = benchmark.tasks[experiment_config['game']] # By default - use Pong (id 3)
# Run training
seed = experiment_config['seed'] # By default - use a seed of zero (you may want to randomize the seed!)
env = get_env(task, seed)
# Take minimum between custom configuration and task configuration
max_timestamps = min(experiment_config['max_steps'], task.max_timesteps)
main(env, max_timestamps, experiment_config, experiment_name)
print('Ended experiment: #' + str(exp_num + 1) + ': ' + experiment_name)
|
from unittest import TestCase
from Queue import Queue
class TestQueue(TestCase):
def test_dequeue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
self.assertEqual(queue.dequeue(), 1)
self.assertEqual(queue.dequeue(), 2)
self.assertEqual(queue.dequeue(), 3)
|
S = input()
R = ""
for c in S:
if c in "aeiou":
R += c
print('S' if R == R[::-1] else 'N')
|
#!/usr/bin/env pypy3
from sys import stdin
from collections import deque
lines = [list(map(int, line.strip().split())) for line in stdin]
t = lines[0][0]
ln = 1
for _ in range(t):
n, a = lines[ln][0], lines[ln+1]
ln += 2
d = deque()
for x in a:
if len(d) == 0 or x < d[0]:
d.appendleft(x)
else:
d.append(x)
print(" ".join(map(str,d)))
|
T = [(1, 2), (3, 4), (5, 6)]
for (a, b) in T: # tuple assignment
print(a, b)
D = {'a': 1, 'b': 2, 'c': 3} # Dictionary
for key in D:
print(key, '=>', D[key]) # use dictionary keys iterator and index
L = list(D.items()) # make a list from the dictionary items
print(L)
for (key, values) in L: # iterate the list over keys and values
print(key, '=>', values)
|
print('-' * 30)
print('Sequência de Fibonacci')
print('-' * 30)
termos = int(input('Quantos termos você quer mostrar? '))
print('~' * 30)
inicio = 3
t1 = 0
t2 = 1
print(f'{t1} → {t2}', end=' → ')
fim = termos
while inicio <= fim:
t3 = t1 + t2
print(t3, end=' → ')
t1 = t2
t2 = t3
inicio += 1
print('FIM')
|
array = [-2, 1, -3, 4, 6, 3]
new_array = []
for item in array:
if item % 2 != 0:
new_array.append(item)
array = new_array
print(array)
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that app bundles are built correctly.
"""
import TestGyp
import TestMac
import os
import plistlib
import subprocess
import sys
if sys.platform == 'darwin':
print "This test is currently disabled: https://crbug.com/483696."
sys.exit(0)
def ExpectEq(expected, actual):
if expected != actual:
print >>sys.stderr, 'Expected "%s", got "%s"' % (expected, actual)
test.fail_test()
def ls(path):
'''Returns a list of all files in a directory, relative to the directory.'''
result = []
for dirpath, _, files in os.walk(path):
for f in files:
result.append(os.path.join(dirpath, f)[len(path) + 1:])
return result
# Xcode supports for assets catalog was introduced in Xcode 6.0
if sys.platform == 'darwin' and TestMac.Xcode.Version() >= '0600':
test_gyp_path = 'test-assets-catalog.gyp'
test_app_path = 'Test App Assets Catalog Gyp.app'
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp(test_gyp_path, chdir='app-bundle')
test.build(test_gyp_path, test.ALL, chdir='app-bundle')
# Binary
test.built_file_must_exist(
os.path.join(test_app_path, 'Contents/MacOS/Test App Assets Catalog Gyp'),
chdir='app-bundle')
# Info.plist
info_plist = test.built_file_path(
os.path.join(test_app_path, 'Contents/Info.plist'),
chdir='app-bundle')
test.must_exist(info_plist)
test.must_contain(
info_plist,
'com.google.Test-App-Assets-Catalog-Gyp') # Variable expansion
test.must_not_contain(info_plist, '${MACOSX_DEPLOYMENT_TARGET}');
if test.format != 'make':
# TODO: Synthesized plist entries aren't hooked up in the make generator.
machine = subprocess.check_output(['sw_vers', '-buildVersion']).rstrip('\n')
plist = plistlib.readPlist(info_plist)
ExpectEq(machine, plist['BuildMachineOSBuild'])
expected = ''
version = TestMac.Xcode.SDKVersion()
expected = 'macosx' + version
ExpectEq(expected, plist['DTSDKName'])
sdkbuild = TestMac.Xcode.SDKBuild()
if not sdkbuild:
# Above command doesn't work in Xcode 4.2.
sdkbuild = plist['BuildMachineOSBuild']
ExpectEq(sdkbuild, plist['DTSDKBuild'])
ExpectEq(TestMac.Xcode.Version(), plist['DTXcode'])
ExpectEq(TestMac.Xcode.Build(), plist['DTXcodeBuild'])
# Resources
strings_files = ['InfoPlist.strings', 'utf-16be.strings', 'utf-16le.strings']
for f in strings_files:
strings = test.built_file_path(
os.path.join(test_app_path, 'Contents/Resources/English.lproj', f),
chdir='app-bundle')
test.must_exist(strings)
# Xcodes writes UTF-16LE with BOM.
contents = open(strings, 'rb').read()
if not contents.startswith('\xff\xfe' + '/* Localized'.encode('utf-16le')):
test.fail_test()
test.built_file_must_exist(
os.path.join(
test_app_path, 'Contents/Resources/English.lproj/MainMenu.nib'),
chdir='app-bundle')
# make does not supports .xcassets files
extra_content_files = []
if test.format != 'make':
extra_content_files = ['Contents/Resources/Assets.car']
for f in extra_content_files:
test.built_file_must_exist(
os.path.join(test_app_path, f),
chdir='app-bundle')
# Packaging
test.built_file_must_exist(
os.path.join(test_app_path, 'Contents/PkgInfo'),
chdir='app-bundle')
test.built_file_must_match(
os.path.join(test_app_path, 'Contents/PkgInfo'), 'APPLause',
chdir='app-bundle')
# Check that no other files get added to the bundle.
if set(ls(test.built_file_path(test_app_path, chdir='app-bundle'))) != \
set(['Contents/MacOS/Test App Assets Catalog Gyp',
'Contents/Info.plist',
'Contents/Resources/English.lproj/MainMenu.nib',
'Contents/PkgInfo',
] + extra_content_files +
[os.path.join('Contents/Resources/English.lproj', f)
for f in strings_files]):
test.fail_test()
test.pass_test()
|
#!/usr/bin/python
from PIL import Image
import os, sys
def resize(path, originDir, destDir, dimensionX, dimensionY):
dirs = os.listdir( path+originDir )
print(dirs)
for item in dirs:
if os.path.isfile(path+originDir+item):
im = Image.open(path+originDir+item)
f, e = os.path.splitext(path+originDir+item)
imResize = im.resize((dimensionX,dimensionY), Image.ANTIALIAS)
imResize.save(path + destDir + f[len(path+originDir):] + '_resized.jpg', 'JPEG', quality=100)
print(imResize)
else:
print("could not resize")
path = "../../KaggleCompetition/Resources/DataSets/"
originDir = 'train1/'
destDir = 'train2/'
resize(path, originDir, destDir, 32, 32)
|
from unittest import TestCase
from agrupa_numeros.agrupa import agrupa
class AgrupaTests(TestCase):
def test_retorna_vazio(self):
retorno = agrupa('')
self.assertEqual('', retorno)
def test_retorno_de_um_unico_numero(self):
retorno = agrupa('10')
self.assertEqual('[10]', retorno)
def test_retorno_de_dois_numeros(self):
retorno = agrupa('1, 2')
self.assertEqual('[1-2]', retorno)
def test_retorno_de_tres_numeros(self):
retorno = agrupa('1, 2, 3')
self.assertEqual('[1-3]', retorno)
|
import time
import serial
import utils
class Fob(object):
def __init__(self):
self.ser1 = serial.Serial()
self.ser2 = serial.Serial()
self.fs = 100
self.all_data = (0, 0, 0, 0, 0, 0)
# ser1 is master, and ser2 is the only slave
# two birds are both connected to the host via RS-232
# if only the master is connected to the host and slave
# is connected to the master via FBB,
# the comand sent needs changing to include FBB information
# Since I have two RS-232 cable, I didnot trouble using FBB command
self.ser1.port = 'COM1'
self.ser1.baudrate = 115200
self.ser1.open()
self.ser2.port = 'COM4'
self.ser2.baudrate = 115200
self.ser2.open()
# Hello birds
self.ser1.setRTS(True)
time.sleep(0.5)
self.ser1.setRTS(False)
time.sleep(0.5)
self.ser2.setRTS(True)
time.sleep(0.5)
self.ser2.setRTS(False)
time.sleep(0.5)
# Could be replaced by ser.flushInput
n = self.ser1.inWaiting()
if n > 0:
self.ser1.read(n)
n = self.ser2.inWaiting()
if n > 0:
self.ser2.read(n)
# auto-configure flock of birds
self.ser1.write('P') # command
self.ser1.write(chr(50)) # parameter 50
self.ser1.write(chr(2)) # number of birds
time.sleep(1)
self.ser2.write('Y')
time.sleep(1)
def get_posang(self):
self.ser2.write('B')
while self.ser2.inWaiting() < 12:
continue
raw_data = self.ser2.read(12)
self.all_data = utils.dataconvert(raw_data)
return self.all_data
def close(self):
self.ser1.close()
self.ser2.close()
|
import datetime
import os
from typing import Dict, List, Any, Tuple, Union
import requests
from dateutil import parser
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
class BookingUser:
def __init__(self, base_url: str, class_type: str, user_info, logger):
self.base_url = base_url
self.class_type = class_type
self.name = user_info["name"]
self.user_email = user_info["email"]
self.password = user_info["password"]
self.user_preferences = user_info["preferences"][self.class_type]
self.auth_url = "auth/local"
self.classes_url = "api/class/gym/5fd7cff72eb93d371e0aa7de"
self.headers = {"Content-Type": "application/json"}
self.timeout = 5.0
self.user_id = None
self.logger = logger
def get_scheduled_classes(self) -> List[Dict[str, Any]]:
# return scheduled classes for a given user
booked_classes = []
url = os.path.join(self.base_url, f"api/users/{self.class_type}/upcoming")
r = requests.get(url, headers=self.headers, timeout=self.timeout)
r.raise_for_status()
for booked_class in r.json():
if booked_class["status"] == "active":
class_tmp = dict()
class_tmp["booking_id"] = booked_class["_id"]
booked_class = booked_class['class']
class_tmp["class_id"] = booked_class["_id"]
day = parser.parse(booked_class["classDate"])
class_tmp["classDate"] = (day.strftime("%Y-%m-%d"), day.strftime("%A"))
class_tmp["classTime"] = booked_class["classTime"]
booked_classes.append(class_tmp)
return booked_classes
def generate_candidates(self) -> List[Tuple[Union[str, List[str]]]]:
# returns the classes that the user wants to book: has preference AND it is not in its list of scheduled class
ndays = 8
candidate_days = self._generate_candidate_days(ndays)
scheduled_classes = self.get_scheduled_classes()
candidates = self._filter_days_to_schedule(candidate_days, scheduled_classes)
return candidates
def get_classes_to_schedule(self, candidates_class: List[Tuple[Union[str, List[str]]]]) -> Tuple[
List[Dict[str, Any]], List[Tuple[Union[str, List[str]]]]]:
# receives the candidates already filtered by the user preferences and filtered by what we already scheduled
# it then searches for availability for those candidates, if found returns that class information
url = os.path.join(self.base_url, self.classes_url, self.class_type)
s = requests.Session()
retries = Retry(total=5, backoff_factor=1, status_forcelist=[ 502, 503, 504 ])
s.mount('http://', HTTPAdapter(max_retries=retries))
r = s.get(url, headers=self.headers, timeout=self.timeout)
r.raise_for_status()
bookings = r.json()
days_to_filter = [d[0] for d in candidates_class]
filtered_bookings = {}
classes_to_schedule = []
candidates_scheduled = []
for b in bookings:
d = parser.parse(b["_id"]).strftime("%Y-%m-%d")
if d in days_to_filter:
filtered_bookings[d] = b
for candidate in candidates_class:
day = candidate[0]
candidate_time = self._parse_hour(candidate[2])
required_spots = 1 + len(candidate[3])
if day in filtered_bookings.keys():
for real_class in filtered_bookings[day]["classes"]:
available_spots = real_class["limit"] - real_class["joinedUsers"]
real_class_time = self._parse_hour(real_class["classTime"])
if real_class_time > candidate_time:
break
if real_class_time == candidate_time:
cancel = False
for attendance in real_class["attendanceList"]:
if attendance["user"] == self.user_id and attendance["status"] == "cancelled":
self.logger.info(
f"The {self.class_type} for {day} at {candidate[2]} "
f"was cancelled by user - remove from scheduling")
cancel = True
candidates_scheduled.append(candidate)
break
if not real_class["active"]:
self.logger.info(f"The {self.class_type} for {day} at {candidate[2]} is not active - skip")
candidates_scheduled.append(candidate)
if not cancel and available_spots >= required_spots and real_class["active"]:
real_class["classDate"] = parser.parse(real_class["classDate"]).strftime("%Y-%m-%d")
classes_to_schedule.append(real_class)
candidates_scheduled.append(candidate)
candidates_not_available = [c for c in candidates_class if c not in candidates_scheduled]
return classes_to_schedule, candidates_not_available
def book_class(self, class_id: str):
url = os.path.join(self.base_url, "api/class", class_id)
data = f'{{"userId":"{self.user_id}","isSinglePayment":true}}'
r = requests.post(url, headers=self.headers, data=data, timeout=self.timeout)
r.raise_for_status()
def cancel_class(self, booking_id: str):
url = os.path.join(self.base_url, "api/attendance", booking_id, "cancel")
data = f'{{"userId":"{self.user_id}"}}'
r = requests.patch(url, headers=self.headers, data=data, timeout=self.timeout)
r.raise_for_status()
@staticmethod
def _generate_candidate_days(ndays: int) -> Dict[str, List[str]]:
# return dict of weekday: [str_date] based on ndays
candidate_days = {}
now = datetime.datetime.now()
for i in range(ndays):
day = now + datetime.timedelta(i)
weekday = day.strftime("%A")
if weekday in candidate_days:
candidate_days[weekday].append(day.strftime("%Y-%m-%d"))
else:
candidate_days[weekday] = [day.strftime("%Y-%m-%d")]
return candidate_days
def _filter_days_to_schedule(self, candidate_days: Dict[str, List[str]],
scheduled_classes: List[Dict[str, Any]]) -> List[Tuple[Union[str, List[str]]]]:
# filter days for each class regarding user preferences and classes already scheduled
class_candidates = []
now = datetime.datetime.now()
for day_p in self.user_preferences:
if day_p[0] in candidate_days.keys():
class_dates = candidate_days[day_p[0]]
class_hours = day_p[1]
candidate = [(class_date, day_p[0], hour, day_p[2])
for hour in class_hours for class_date in class_dates]
class_candidates.extend(candidate)
# after filtering candidates using the preferences, filter the ones already scheduled
filtered_candidates = []
for candidate in class_candidates:
scheduled_days = {(day["classDate"][0], self._parse_hour(day["classTime"].lower())) for day in
scheduled_classes}
candidate_date = candidate[0]
candidate_hour = self._parse_hour(candidate[2])
preference_datetime = parser.parse(candidate_date) + candidate_hour
if now < preference_datetime and (candidate_date, candidate_hour) not in scheduled_days:
filtered_candidates.append(candidate)
return filtered_candidates
def me(self):
url = os.path.join(self.base_url, "api/users/me")
r = requests.get(url, headers=self.headers, timeout=self.timeout)
r.raise_for_status()
response = r.json()
self.user_id = response["_id"]
def auth(self):
url = os.path.join(self.base_url, self.auth_url)
data = f'{{"email":"{self.user_email}","password":"{self.password}"}}'
r = requests.post(url, headers=self.headers, data=data, timeout=self.timeout)
r.raise_for_status()
response = r.json()
self.headers["Authorization"] = f"Bearer {response['token']}"
@staticmethod
def _parse_hour(hour_minutes: str):
hour = hour_minutes.split()
hour_int = int(hour[0].split(":")[0])
minutes_int = int(hour[0].split(":")[1])
time_of_day = hour[1].lower()
if time_of_day == 'pm' and hour_int < 12:
hour_int = hour_int + 12
return datetime.timedelta(hours=hour_int, minutes=minutes_int)
|
import os
import sys
#Calculate the path based on the location of the WSGI script.
apache_configuration= os.path.dirname(__file__) #e:\PythonWeb\code\voith_sales\Rail\apache_django_wsgi.conf
project = os.path.dirname(apache_configuration) #e:\PythonWeb\code\voith_sales\Rail
workspace = os.path.dirname(project) #e:\PythonWeb\code\voith_sales
sys.stdout = sys.stderr
sys.path.append(workspace)
print workspace
os.environ['DJANGO_SETTINGS_MODULE'] = "iexam.iexam.settings"
#import django
#django.setup()
#import django.core.handlers.wsgi
#application = django.core.handlers.wsgi.WSGIHandler()
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
import argparse
from docqa.triviaqa.training_data import ExtractMultiParagraphsPerQuestion
from docqa.data_processing.preprocessed_corpus import PreprocessedData
from docqa.scripts.ablate_triviaqa import get_model
from docqa.text_preprocessor import WithIndicators
from docqa.data_processing.document_splitter import MergeParagraphs, ShallowOpenWebRanker
from docqa.data_processing.multi_paragraph_qa import StratifyParagraphsBuilder, \
StratifyParagraphSetsBuilder, RandomParagraphSetDatasetBuilder
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--input_file', required=True,
help="input file, e.g. train_data.pkl")
parser.add_argument('--output_train_file', required=True,
help="output train file, e.g. train_output.json")
parser.add_argument('--num_epoch', required=True, type=int,
help="num_epoch, e.g. 10")
args = parser.parse_args()
mode = "shared-norm"
model = get_model(100, 140, mode, WithIndicators())
extract = ExtractMultiParagraphsPerQuestion(MergeParagraphs(400), ShallowOpenWebRanker(16), model.preprocessor, intern=True)
oversample = [1] * 2
train = StratifyParagraphSetsBuilder(30, mode == "merge", True, oversample)
test = RandomParagraphSetDatasetBuilder(120, "merge" if mode == "merge" else "group", True, oversample)
data = PreprocessedData(None, extract, train, test, eval_on_verified=False)
data.load_preprocess(args.input_file)
outputs = []
training_data = data.get_train()
for i in range(args.num_epoch):
for batch in training_data.get_epoch():
last_qid = None
current = {"question_id": "", "question": [], "context": [], "answer_spans": [], "answer_text": []}
for instance in batch:
if last_qid and instance.question_id != last_qid:
outputs.append(current)
current = {"question": [], "context": [], "answer_spans": [], "answer_text": []}
if current["question"]:
assert(current["question"] == instance.question)
assert(current["answer_text"] == instance.answer.answer_text)
else:
current["question"] = instance.question
current["answer_text"] = instance.answer.answer_text
current["question_id"] = instance.question_id
current["context"].append(instance.context)
current["answer_spans"].append(instance.answer.answer_spans.tolist())
last_qid = instance.question_id
outputs.append(current)
import json
with open(args.output_train_file, "w") as fout:
for output in outputs:
fout.write(json.dumps(output, ensure_ascii=False) + "\n")
|
# -*- coding: utf-8 -*-
from utils import get_img_urls, write_pdf
from datetime import datetime
import json
class Immobilier(object):
def __init__(self, item_url, data):
self.item_url = item_url
self.data = data
self.serialized_data = None
self.interest_data = None
self.url_img_list = []
self.description = ""
def ad_number(self):
p1 = self.item_url.rindex("/") + 1
p2 = self.item_url.index(".htm")
return self.item_url[p1:p2]
def serialize(self, image):
# get infos
body = self.data.find('body')
script_elt = str(body.findAll('script')[3])
begin = script_elt.index('{')
end = script_elt.rfind('}') + 1
object_data = script_elt[begin:end]
self.serialized_data = json.loads(object_data)['adview']
self.description = self.serialized_data['body']
self.interest_data = {
'annonce' : self.serialized_data['owner']['type'],
'publié le' : self.serialized_data['first_publication_date'],
'dernière modification le' : self.serialized_data['index_date'],
'photo disponible' : self.serialized_data['images']['nb_images'],
'prix' : str(self.serialized_data['price'][0])+" €",
}
try:
self.interest_data['surface'] = [d['value_label'] for d in self.serialized_data['attributes'] if d.get('key')=='square'][0]
except:
pass
try:
self.interest_data['nombre de pièces'] = [d['value'] for d in self.serialized_data['attributes'] if d.get('key')=='rooms'][0]
except:
pass
try:
self.interest_data['ges'] = [d['value_label'] for d in self.serialized_data['attributes'] if d.get('key')=='ges'][0]
except:
pass
try:
self.interest_data['nrj'] = [d['value_label'] for d in self.serialized_data['attributes'] if d.get('key')=='energy_rate'][0]
except:
pass
# get picture urls
if(image):
self.url_img_list = self.serialized_data['images']['urls']
def save(self, doc, args):
item_last_update = datetime.strptime(self.serialized_data['index_date'], '%Y-%m-%d %H:%M:%S')
if item_last_update > args.last_update:
if "viager" not in self.data.text.lower():
print(self.serialized_data['subject'])
# generate pdf
return(write_pdf(self, doc, args.image))
else:
print("viager")
return()
else:
return()
def __str__(self):
return(self.item_url)
class Vehicule(object):
def __init__(self, item_url, data):
self.item_url = item_url
self.data = data
self.serialized_data = None
self.interest_data = None
self.url_img_list = []
self.description = ""
def ad_number(self):
p1 = self.item_url.rindex("/") + 1
p2 = self.item_url.index(".htm")
return self.item_url[p1:p2]
def serialize(self, image):
# get infos
body = self.data.find('body')
script_elt = str(body.findAll('script')[3])
begin = script_elt.index('{')
end = script_elt.rfind('}') + 1
object_data = script_elt[begin:end]
self.serialized_data = json.loads(object_data)['adview']
self.description = self.serialized_data['body']
self.interest_data = {
'annonce' : self.serialized_data['owner']['type'],
'publié le' : self.serialized_data['first_publication_date'],
'dernière modification le' : self.serialized_data['index_date'],
'photo disponible' : self.serialized_data['images']['nb_images'],
'prix' : str(self.serialized_data['price'][0])+" €",
}
try:
self.interest_data['année'] = [d['value_label'] for d in self.serialized_data['attributes'] if d.get('key')=='regdate'][0]
except:
pass
try:
self.interest_data['kilométrage'] = [d['value_label'] for d in self.serialized_data['attributes'] if d.get('key')=='mileage'][0]
except:
pass
try:
self.interest_data['Cylindrée'] = [d['value_label'] for d in self.serialized_data['attributes'] if d.get('key')=='cubic_capacity'][0]
except:
pass
# get picture urls
if(image):
self.url_img_list = self.serialized_data['images']['urls']
def save(self, doc, args):
item_last_update = datetime.strptime(self.serialized_data['index_date'], '%Y-%m-%d %H:%M:%S')
if item_last_update > args.last_update:
print(self.serialized_data['subject'])
# generate pdf
return(write_pdf(self, doc, args.image))
else:
return()
def __str__(self):
return(self.item_url)
class General(object):
def __init__(self, item_url, data):
self.item_url = item_url
self.data = data
self.serialized_data = None
self.interest_data = None
self.url_img_list = []
self.description = ""
def ad_number(self):
p1 = self.item_url.rindex("/") + 1
p2 = self.item_url.index(".htm")
return self.item_url[p1:p2]
def serialize(self, image):
# get infos
body = self.data.find('body')
script_elt = str(body.findAll('script')[3])
begin = script_elt.index('{')
end = script_elt.rfind('}') + 1
object_data = script_elt[begin:end]
self.serialized_data = json.loads(object_data)['adview']
self.description = self.serialized_data['body']
self.interest_data = {
'annonce' : self.serialized_data['owner']['type'],
'publié le' : self.serialized_data['first_publication_date'],
'dernière modification le' : self.serialized_data['index_date'],
'photo disponible' : self.serialized_data['images']['nb_images'],
'prix' : str(self.serialized_data['price'][0])+" €",
}
# get picture urls
if(image):
self.url_img_list = self.serialized_data['images']['urls']
def save(self, doc, args):
item_last_update = datetime.strptime(self.serialized_data['index_date'], '%Y-%m-%d %H:%M:%S')
if item_last_update > args.last_update:
print(self.serialized_data['subject'])
# generate pdf
return(write_pdf(self, doc, args.image))
else:
return()
def __str__(self):
return(self.item_url)
|
# -*- coding: UTF-8 -*-
import os,re,shutil,json,random,threading,socket,sys,time#,requests
import unreal
class StaticTextureMeshTask:
def __init__(self):
#self.MaterialLoaderData = 'I:\\svnDir\\ue422_epic_1\\Engine\\Plugins\\zhuohua\\CGGameWork\\Content\\MaterialLoaderData.xml'
self.MaterialLoaderData = r'I:\svnDir\ue425_epic\Engine\Plugins\Teamones\Content\MaterialLoaderData.xml'
#self.Material_Template = '/Game/Game/Game_Resources/MapsResources/PublicResources/Materiral_Template/PBR_Mat_Template/Mat_Master/PublicMetallic'
self.Material_Template = '/Game/ZHAssets/MaterialTemplate/RealisticTemplate/ScenceMaterial/PublicMetallic/PublicMetallic_Inst'
self.MaterialsTemplateArr = []
self.Bone_list = []
self.Bone_list.append('ToseEnd_R')
self.Bone_list.append('ToseEnd_L')
self.Bone_list.append('Knee_R')
self.Bone_list.append('Knee_L')
self.Bone_list.append('Spine1_M')
self.Bone_list.append('Chest_M')
self.Bone_list.append('Shoulder_R')
self.Bone_list.append('Shoulder_L')
self.Bone_list.append('Elbow_R')
self.Bone_list.append('Elbow_L')
self.Bone_list.append('Wrist_R')
self.Bone_list.append('Wrist_L')
self.Bone_list.append('MiddleFinger4_R')
self.Bone_list.append('MiddleFinger4_L')
self.Bone_list.append('Neck_M')
self.Bone_list.append('Eye_R')
self.Bone_list.append('Eye_L')
self.Bone_list.append('JawEnd_M')
# def CreateInstanceOfMaterial(materialFileName,newAssetName,destination_path,_textureTargetNameList,textureFileNameList):
def CreateInstanceOfMaterial(self, materialFileName, newAssetName, destination_path, textureFileNameList, material_template):
selectedAsset = unreal.load_asset(materialFileName)
# newAssetName = ""
# newAssetName = selectedAsset.get_name() + "_%s"
# "_%s_%d"
asset_import_task = unreal.AssetImportTask()
asset_import_task.set_editor_property("save", True)
asset_import_task.set_editor_property("automated", True)
asset_import_task.set_editor_property("replace_existing", True)
factory = unreal.MaterialInstanceConstantFactoryNew()
factory.set_editor_property("asset_import_task", asset_import_task)
factory.set_editor_property("create_new", True)
asset_tools = unreal.AssetToolsHelpers.get_asset_tools()
# createdAssetsPath = materialFileName.replace(selectedAsset.get_name(), "-")
# createdAssetsPath = createdAssetsPath.replace("-.-", "")newAssetName %("inst")
newAsset = asset_tools.create_asset(newAssetName, destination_path, None, factory) # add by chenganggui
unreal.MaterialEditingLibrary.set_material_instance_parent(newAsset, selectedAsset)# add by chenganggui
for textureFileName in textureFileNameList:
for Minslot in material_template['Minslots']:
if (textureFileName.find(Minslot) != -1) and newAssetName in textureFileName:
print('textureFileName is {}, Minslot is {}, newAsset is {}'.format(
textureFileName, Minslot, newAssetName
))
texture_asset = unreal.Texture.cast(unreal.load_asset(textureFileName))
unreal.MaterialEditingLibrary.set_material_instance_texture_parameter_value(newAsset, Minslot, texture_asset)
# for MaterialsTemplate in self.MaterialsTemplateArr:
# if (newAssetName.find(MaterialsTemplate['mat_Inst'].split("_")[0]) != -1):
# for textureFileName in textureFileNameList:
# # print "newAssetName::"+newAssetName+" MaterialsTemplate.mat_Inst::"+MaterialsTemplate.mat_Inst+" textureFileName::"+textureFileName+" "
# for Minslot in MaterialsTemplate['Minslots']:
#
# if (textureFileName.find(Minslot) != -1) and newAssetName[:-5] in textureFileName:
# # print('textureFileName is {} minslot is {}, newAssetName is {}'.format(
# # textureFileName,
# # Minslot,
# # newAssetName[:-5]
# # ))
# texture_asset = unreal.Texture.cast(unreal.load_asset(textureFileName))
# unreal.MaterialEditingLibrary.set_material_instance_texture_parameter_value(newAsset,
# Minslot,
# texture_asset)
unreal.EditorLoadingAndSavingUtils.save_dirty_packages(True, True)
def resetStaticMeshMaterial(self, package_path):
# def __init__(self, package_names=[], package_paths=[], object_paths=[], class_names=[], recursive_classes_exclusion_set=[], recursive_paths=False, recursive_classes=False, include_only_on_disk_assets=False):
filter_staticmesh = unreal.ARFilter([], [package_path], [], [unreal.StaticMesh.static_class().get_name()], [], True)
filter_materialIns = unreal.ARFilter([], [package_path], [],
[unreal.MaterialInstanceConstant.static_class().get_name()], [], True)
AssetRegistry = unreal.AssetRegistryHelpers().get_asset_registry()
MaterialInsDataArr = AssetRegistry.get_assets(filter_materialIns)
StaticMeshAssetDataArr = AssetRegistry.get_assets(filter_staticmesh)
print('MaterialInsDataArr len is {}, StaticMeshAssetDataArr is {}'.format(
len(MaterialInsDataArr), len(StaticMeshAssetDataArr)
))
for StaticMeshAssetData in StaticMeshAssetDataArr:
# print StaticMeshAssetData
StaticMeshStr = str(StaticMeshAssetData.package_name)
# print StaticMeshStr
StaticMeshAsset = unreal.StaticMesh.cast(unreal.load_asset(StaticMeshStr))
if (StaticMeshAsset != None):
for MaterialInsData in MaterialInsDataArr:
# print MaterialInsData.asset_name
materialIndex = StaticMeshAsset.get_material_index(MaterialInsData.asset_name)
if (materialIndex != -1):
MaterialInsStr = str(MaterialInsData.package_name)
targetMaterial = unreal.MaterialInstance.cast(unreal.load_asset(MaterialInsStr))
StaticMeshAsset.set_material(materialIndex, targetMaterial)
print MaterialInsStr
# print materialIndex
def resetSkeletonMeshMaterial(self, package_path):
# def __init__(self, package_names=[], package_paths=[], object_paths=[], class_names=[], recursive_classes_exclusion_set=[], recursive_paths=False, recursive_classes=False, include_only_on_disk_assets=False):
filter_skeletalMesh = unreal.ARFilter([], [package_path], [], [unreal.SkeletalMesh.static_class().get_name()])
filter_materialIns = unreal.ARFilter([], [package_path], [],
[unreal.MaterialInstanceConstant.static_class().get_name()])
AssetRegistry = unreal.AssetRegistryHelpers().get_asset_registry()
MaterialInsDataArr = AssetRegistry.get_assets(filter_materialIns)
SkeletalMeshAssetDataArr = AssetRegistry.get_assets(filter_skeletalMesh)
for SkeletalMeshAssetData in SkeletalMeshAssetDataArr:
# print StaticMeshAssetData
SkeletalMeshStr = str(SkeletalMeshAssetData.package_name)
# print StaticMeshStr
SkeletalMeshAsset = unreal.SkeletalMesh.cast(unreal.load_asset(SkeletalMeshStr))
materials = SkeletalMeshAsset.materials
if (SkeletalMeshAsset != None):
for MaterialInsData in MaterialInsDataArr:
# print MaterialInsData.asset_name
materialIndex = SkeletalMeshAsset.get_material_index(MaterialInsData.asset_name)
if (materialIndex != -1):
MaterialInsStr = str(MaterialInsData.package_name)
targetMaterial = unreal.MaterialInstance.cast(unreal.load_asset(MaterialInsStr))
# SkeletalMeshAsset.set_material(materialIndex,targetMaterial)
for SkeletalMeshMaterialIndex in range(materials):
if (materials[
SkeletalMeshMaterialIndex].imported_material_slot_name == MaterialInsData.asset_name):
targetSkeltalMaterial = unreal.SkeletalMaterial(targetMaterial, materials[
SkeletalMeshMaterialIndex].material_slot_name(), materials[
SkeletalMeshMaterialIndex].uv_channel_data())
materials[SkeletalMeshMaterialIndex] = targetSkeltalMaterial
print MaterialInsStr
# print materialIndex
def buildImportTask(self, filename, destination_path, options=None):
task = unreal.AssetImportTask()
# task = unreal.AutomatedAssetImportData()
task.set_editor_property('automated', True)
task.set_editor_property('destination_name', '')
task.set_editor_property('destination_path', destination_path)
task.set_editor_property('filename', filename)
task.set_editor_property('replace_existing', True)
task.set_editor_property('save', True)
# task.set_editor_property('skip_read_only',True)
task.set_editor_property('options', options)
return task
def executeImportTasks(self, tasks):
unreal.AssetToolsHelpers.get_asset_tools().import_asset_tasks(tasks)
for task in tasks:
for path in task.get_editor_property('imported_object_paths'):
print 'Imported: %s' % path
def buildTextureImportOptions(self):
options = unreal.TextureFactory()
options.set_editor_property('create_material', False)
return options
def buildStaticMeshImportOptions(self, bCombine_meshs=False):
options = unreal.FbxImportUI()
static_mesh_import_data = unreal.FbxStaticMeshImportData()
static_mesh_import_data.set_editor_property('combine_meshes', False)
if bCombine_meshs:
static_mesh_import_data.set_editor_property('combine_meshes', True)
options.set_editor_property('import_mesh', True)
options.set_editor_property('import_textures', False)
options.set_editor_property('import_materials', False)
options.set_editor_property('import_as_skeletal', False)
# options.static_mesh_import_data.set_edtitor_property('import_translation')
# options.static_mesh_import_data.set_edtitor_property('import_rotation')
# options.static_mesh_import_data.set_edtitor_property('import_uniform_scale')
options.set_editor_property('static_mesh_import_data', static_mesh_import_data)
# options.static_mesh_import_data.set_edtitor_property('generate_lightmap_u_v')
# options.static_mesh_import_data.set_edtitor_property('generate_lightmap')
return options
def buildSkeletalMeshImportOptions(self):
skeletal_mesh_import_data = unreal.FbxSkeletalMeshImportData()
skeletal_mesh_import_data.set_editor_property('update_skeleton_reference_pose', False)
skeletal_mesh_import_data.set_editor_property('import_meshes_in_bone_hierarchy', True)
skeletal_mesh_import_data.set_editor_property('use_t0_as_ref_pose', False)
skeletal_mesh_import_data.set_editor_property('preserve_smoothing_groups', True)
skeletal_mesh_import_data.set_editor_property('import_morph_targets', True)
import_translation = unreal.Vector(0, 0, 0)
skeletal_mesh_import_data.set_editor_property('import_translation', import_translation)
import_rotation = unreal.Rotator(0, 0, 0)
skeletal_mesh_import_data.set_editor_property('import_rotation', import_rotation)
skeletal_mesh_import_data.set_editor_property('import_uniform_scale', 1.0)
skeletal_mesh_import_data.set_editor_property('convert_scene', True)
skeletal_mesh_import_data.set_editor_property('force_front_x_axis', False)
skeletal_mesh_import_data.set_editor_property('convert_scene_unit', False)
# SkeletalMeshImportData->bImportAsScene = false;
options = unreal.FbxImportUI()
options.set_editor_property('skeletal_mesh_import_data', skeletal_mesh_import_data)
options.set_editor_property('import_mesh', True)
options.set_editor_property('import_textures', False)
options.set_editor_property('import_materials', False)
options.set_editor_property('import_as_skeletal', True)
options.set_editor_property('skeleton', None)
# options.skeletal_mesh_import_data.set_edtitor_property('import_translation')
# options.skeletal_mesh_import_data.set_edtitor_property('import_rotation')
# options.skeletal_mesh_import_data.set_edtitor_property('import_uniform_scale')
# options.skeletal_mesh_import_data.set_edtitor_property('combine_meshes',False)
# options.skeletal_mesh_import_data.set_edtitor_property('generate_lightmap_u_v')
# options.skeletal_mesh_import_data.set_edtitor_property('generate_lightmap')
return options
def refreshIMGAsset(self, IMGFileName):
# Get texture
texture_asset = unreal.Texture.cast(unreal.load_asset(IMGFileName))
print('success refresh')
texture_asset_str = texture_asset.get_name()
if (texture_asset_str.find("BaseColor") != -1 or texture_asset_str.find("BentNormal") != -1):
texture_asset.srgb = True
else:
texture_asset.srgb = False
# unreal.ImportSubsystem.on_asset_reimport(texture_asset)
def importIMGAsset(self, IMGList, destination_path):
# print "pwd=" + os.path.abspath('.')
taskList = []
clearNameList = []
for IMG_FileName in IMGList:
taskList.append(self.buildImportTask(IMG_FileName, destination_path, self.buildTextureImportOptions()))
clearNameList.append(os.path.splitext(os.path.basename(IMG_FileName))[0])
self.executeImportTasks(taskList)
gamePath = destination_path.replace(".", "_")
for clearName in clearNameList:
texutureFileName = gamePath + "/" + clearName + ""
print 'texutureFileName::: ' + texutureFileName + " :::"
self.refreshIMGAsset(texutureFileName)
def importStaticMesh(self, MeshFileName, destination_path, bCombine_meshs = False):
taskList = []
taskList.append(self.buildImportTask(MeshFileName, destination_path, self.buildStaticMeshImportOptions(bCombine_meshs)))
self.executeImportTasks(taskList)
def importSkeletalMesh(self, MeshFileName, destination_path):
taskList = []
taskList.append(self.buildImportTask(MeshFileName, destination_path, self.buildSkeletalMeshImportOptions()))
self.executeImportTasks(taskList)
# def importMeshAsset():
# static_mesh_fbx = 'I:\\unrealwork\\test424BP\\pyscripts\\SM_StatocMesh.FBX'
# skeletal_mesh_fbx = 'I:\\unrealwork\\test424BP\\pyscripts\\SM_skeletal.FBX'
# static_mesh_task = buildImportTask(static_mesh_fbx,'/Game/StaticMeshes')
# skeletal_mesh_task = buildImportTask(skeletal_mesh_fbx,'/Game/SkeletalMeshes')
# executeImportTasks([static_mesh_task,skeletal_mesh_task])
def get_material_template(self, material):
print(material)
template_material_inst = material.split('_')
if len(template_material_inst) < 2 or template_material_inst[-1] != 'Inst':
return None, None
material_inst = template_material_inst[-2] + '_' + template_material_inst[-1]
package_path = '/Game/ZHAssets/MaterialTemplate/'
filter_staticmesh = unreal.ARFilter(
[], [package_path], [], [unreal.MaterialInstanceConstant.static_class().get_name()], [], True)
AssetRegistry = unreal.AssetRegistryHelpers().get_asset_registry()
MaterialInsDataArr = AssetRegistry.get_assets(filter_staticmesh)
for material in MaterialInsDataArr:
if str(material.package_name).split('/')[-1] == material_inst:
return str(material.package_name), material_inst
return None, None
def create_material_instance(self, Material_matName, Pic_destination_path, texArr):
template_material_inst = Material_matName.split('_')
if len(template_material_inst) < 3:
return
print('Material_matName : {}'.format(Material_matName))
material_inst = template_material_inst[-2] + '_' + template_material_inst[-1]
for material_template in self.MaterialsTemplateArr:
if material_template['mat_inst'] == material_inst:
print('create instance')
self.CreateInstanceOfMaterial(material_template['mat_inst_path'], Material_matName,
Pic_destination_path, texArr, material_template) # add by chenganggui
def add_slots(self, asset_path):
filter_skeletalmesh = unreal.ARFilter(
[], [asset_path], [], [unreal.SkeletalMesh.static_class().get_name()], [], True)
AssetRegistry = unreal.AssetRegistryHelpers().get_asset_registry()
MaterialInsDataArr = AssetRegistry.get_assets(filter_skeletalmesh)
print(len(MaterialInsDataArr))
for material in MaterialInsDataArr:
print('material.package_name is xxx{}'.format(
str(material.get_full_name()).split(' ')[1]
))
unreal.PythonCallLibrary.create_socket(str(material.get_full_name()).split(' ')[1], self.Bone_list)
def importAsset(self, targetDir):
import xml.etree.ElementTree as ET
root = ET.parse(targetDir + '\\Description.xml').getroot()
# print textureTargetNameList
textureTargetNameList = []
material_template_lists = list()
for elem in root.iter():
if (elem.tag == "Material"):
material = elem.attrib.get('matName')
material_template_path, material_template = self.get_material_template(material)
if not material_template:
continue
if material_template not in material_template_lists:
material_template_lists.append(material_template)
else:
continue
Minslots = set()
for elem_Inst in elem.iter():
for Pic in elem_Inst.iter():
Usage = Pic.attrib.get('Usage')
if Usage:
Minslots.add(Usage)
self.MaterialsTemplateArr.append(
{
'mat_inst' : material_template,
'mat_inst_path' : material_template_path,
'Minslots' : list(Minslots)
}
)
# for template in self.MaterialsTemplateArr:
# print('xx' + str(template))
# return
# targetDir = 'M:\\DLQ2\\asset_work\\props\\hw\\Model\\texture\\publish'
root = ET.parse(targetDir + '\\Description.xml').getroot()
picList = []
destination_path = "/Game" + targetDir.replace("\\", "/").split(":")[1]
importType = 0
# print os.path.exists('M:\\DLQ2\\asset_work\\props\\hw\\Model\\texture\\publish\\Description.xml')
# print root,root.tag, root.attrib
# for child_of_root in root:
# print child_of_root.tag, child_of_root.attrib
MeshFileName = ""
is_character_type = False
is_Combine_meshs = False
dict_static_type = {
'Character': False,
'Environment': False,
'Props': False
}
for elem in root.iter():
# print elem.tag, elem.attrib
if (elem.tag == "Pic"):
Pic_Path = elem.attrib.get('Path')
# print Pic_Path
picList.append(Pic_Path)
# destination_path = "/"+os.path.dirname(Pic_Path).replace('M:',"Game")
elif (elem.tag == "StaticMesh"):
MeshFileName = elem.attrib.get('Path')
print('MeshFileName is {}'.format(
MeshFileName
))
static_type = elem.attrib.get('AssetType')
dict_static_type[static_type] = True
importType = 1
elif (elem.tag == "SkeletalMesh"):
MeshFileName = elem.attrib.get('Path')
print('MeshFileName is {}'.format(
MeshFileName
))
if elem.attrib.get('AssetType') == 'character':
is_character_type = True;
importType = 2
# print "importType" + str(importType)
self.importIMGAsset(picList, destination_path) #// by chenganggui
EditorAssetLibrary = unreal.EditorAssetLibrary()
AssetRegistry = unreal.AssetRegistryHelpers().get_asset_registry()
# print "destination_path.replace" + destination_path
if not EditorAssetLibrary.does_directory_exist(destination_path):
EditorAssetLibrary.make_directory(destination_path)
AssetRegistryDataArr = AssetRegistry.get_assets_by_path(destination_path)
texArr = []
# print AssetRegistryDataArr
for AssetRegistryData in AssetRegistryDataArr:
# print AssetRegistryData.package_name
# print 'AssetRegistryData.package_name '+ AssetRegistryData.package_name
texArr.append(str(AssetRegistryData.package_name))
# print destination_path
# print texArr
for elem in root.iter():
if (elem.tag == "Material"):
Material_matName = elem.attrib.get('matName')
# print "Material_matName:: "+Material_matName
Material_TemplateList = self.Material_Template.split("/")
# print('Material_matName is {} '.format(Material_matName))
Pic_destination_path = destination_path
self.create_material_instance(Material_matName, Pic_destination_path, texArr) # add by chenganggui
# for f in os.listdir(targetDir):
# # print f
# if (f.find(".fbx") != -1):
# MeshFileName = targetDir + "\\" + f
# print MeshFileName
if MeshFileName == "":
print('canot find fbx file')
return
if importType == 1:
package_path = "/Game" + os.path.dirname(targetDir).replace("\\", "/").split(":")[1]
package_path = package_path.replace(".", "_")
# EditorAssetLibrary = unreal.EditorAssetLibrary()
# if (EditorAssetLibrary.does_directory_exist(package_path)):
# EditorAssetLibrary.delete_directory(package_path)
if dict_static_type['Environment']:
print('not conbine' + '**'*10)
self.importStaticMesh(MeshFileName, destination_path) # add by chenganggui
else:
print('need conbine' + '**'*10)
self.importStaticMesh(MeshFileName, destination_path, bCombine_meshs=True) # add by chenganggui
print('package_path is {}'.format(package_path))
self.resetStaticMeshMaterial(
package_path) # '/Game/DLQ2/asset_work/props/hw/Model/texture/publish/image/hw_Texture_V01_fbx'
# unreal.EditorLoadingAndSavingUtils.save_dirty_packages(True, True)
elif importType == 2:
package_path = "/Game" + os.path.dirname(targetDir).replace("\\", "/").split(":")[1]
package_path = package_path.replace(".", "_")
# EditorAssetLibrary = unreal.EditorAssetLibrary()
# if (EditorAssetLibrary.does_directory_exist(package_path)):
# EditorAssetLibrary.delete_directory(package_path)
print('import skeletal mesh')
self.importSkeletalMesh(MeshFileName, destination_path)
# self.resetSkeletonMeshMaterial(
# package_path) # '/Game/DLQ2/asset_work/props/hw/Model/texture/publish/image/hw_Texture_V01_fbx'
unreal.EditorLoadingAndSavingUtils.save_dirty_packages(True, True)
#判断是否是character类型,所以为skeletalmesh 添加插槽
if is_character_type:
print('this is is_character_type')
self.add_slots(package_path)
import_task = StaticTextureMeshTask()
# #dir = r'M:\DLQ2\asset_work\Scenes\zjynjgb\Model\texture\publish'
#dir = r'P:\TestProject\asset_work\Environment\zjynjgb\texture\Publish'
dir = r'P:\TestProject\asset_work\Character\luban\texture\Publish'
#
import_task.importAsset(dir)
|
# Copyright (C) 2011-2013 Claudio Guarnieri.
# Copyright (C) 2014-2018 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import datetime
import hashlib
import logging
import os
import pkgutil
import socket
import struct
import sys
import threading
import traceback
import urllib
import urllib2
import xmlrpclib
import zipfile
from lib.api.process import Process
from lib.common.abstracts import Package, Auxiliary
from lib.common.constants import SHUTDOWN_MUTEX
from lib.common.decide import dump_memory
from lib.common.defines import KERNEL32
from lib.exceptions.exceptions import CuckooError, CuckooDisableModule
from lib.common.hashing import hash_file
from lib.common.rand import random_string
from lib.common.results import upload_to_host
from lib.core.config import Config
from lib.core.ioctl import zer0m0n
from lib.core.packages import choose_package
from lib.core.pipe import PipeServer, PipeForwarder, PipeDispatcher
from lib.core.pipe import disconnect_pipes
from lib.core.privileges import grant_privilege
from lib.core.startup import init_logging, disconnect_logger, set_clock
from modules import auxiliary
log = logging.getLogger("analyzer")
class Files(object):
PROTECTED_NAMES = ()
def __init__(self):
self.files = {}
self.files_orig = {}
self.dumped = []
def is_protected_filename(self, file_name):
"""Return whether or not to inject into a process with this name."""
return file_name.lower() in self.PROTECTED_NAMES
def add_pid(self, filepath, pid, verbose=True):
"""Track a process identifier for this file."""
if not pid or filepath.lower() not in self.files:
return
if pid not in self.files[filepath.lower()]:
self.files[filepath.lower()].append(pid)
verbose and log.info("Added pid %s for %r", pid, filepath)
def add_file(self, filepath, pid=None):
"""Add filepath to the list of files and track the pid."""
if filepath.lower() not in self.files:
log.info(
"Added new file to list with pid %s and path %s",
pid, filepath.encode("utf8")
)
self.files[filepath.lower()] = []
self.files_orig[filepath.lower()] = filepath
self.add_pid(filepath, pid, verbose=False)
def dump_file(self, filepath):
"""Dump a file to the host."""
if not os.path.isfile(filepath):
log.warning("File at path %r does not exist, skip.", filepath)
return False
# Check whether we've already dumped this file - in that case skip it.
try:
sha256 = hash_file(hashlib.sha256, filepath)
if sha256 in self.dumped:
return
except IOError as e:
log.info("Error dumping file from path \"%s\": %s", filepath, e)
return
filename = "%s_%s" % (sha256[:16], os.path.basename(filepath))
upload_path = os.path.join("files", filename)
try:
upload_to_host(
# If available use the original filepath, the one that is
# not lowercased.
self.files_orig.get(filepath.lower(), filepath),
upload_path, self.files.get(filepath.lower(), [])
)
self.dumped.append(sha256)
except (IOError, socket.error) as e:
log.error(
"Unable to upload dropped file at path \"%s\": %s",
filepath, e
)
def delete_file(self, filepath, pid=None):
"""A file is about to removed and thus should be dumped right away."""
self.add_pid(filepath, pid)
self.dump_file(filepath)
# Remove the filepath from the files list.
self.files.pop(filepath.lower(), None)
self.files_orig.pop(filepath.lower(), None)
def move_file(self, oldfilepath, newfilepath, pid=None):
"""A file will be moved - track this change."""
self.add_pid(oldfilepath, pid)
if oldfilepath.lower() in self.files:
# Replace the entry with the new filepath.
self.files[newfilepath.lower()] = \
self.files.pop(oldfilepath.lower(), [])
def dump_files(self):
"""Dump all pending files."""
while self.files:
self.delete_file(self.files.keys()[0])
class ProcessList(object):
def __init__(self):
self.pids = []
self.pids_notrack = []
def add_pid(self, pid, track=True):
"""Add a process identifier to the process list.
Track determines whether the analyzer should be monitoring this
process, i.e., whether Cuckoo should wait for this process to finish.
"""
if int(pid) not in self.pids and int(pid) not in self.pids_notrack:
if track:
self.pids.append(int(pid))
else:
self.pids_notrack.append(int(pid))
def add_pids(self, pids):
"""Add one or more process identifiers to the process list."""
if isinstance(pids, (tuple, list)):
for pid in pids:
self.add_pid(pid)
else:
self.add_pid(pids)
def has_pid(self, pid, notrack=True):
"""Return whether or not this process identifier being tracked."""
if int(pid) in self.pids:
return True
if notrack and int(pid) in self.pids_notrack:
return True
return False
def remove_pid(self, pid):
"""Remove a process identifier from being tracked."""
if pid in self.pids:
self.pids.remove(pid)
if pid in self.pids_notrack:
self.pids_notrack.remove(pid)
class CommandPipeHandler(object):
"""Pipe Handler.
This class handles the notifications received through the Pipe Server and
decides what to do with them.
"""
ignore_list = dict(pid=[])
def __init__(self, analyzer):
self.analyzer = analyzer
self.tracked = {}
def _handle_debug(self, data):
"""Debug message from the monitor."""
log.debug(data)
def _handle_info(self, data):
"""Regular message from the monitor."""
log.info(data)
def _handle_warning(self, data):
"""Warning message from the monitor."""
log.warning(data)
def _handle_critical(self, data):
"""Critical message from the monitor."""
log.critical(data)
def _handle_loaded(self, data):
"""The monitor has loaded into a particular process."""
if not data or data.count(",") != 1:
log.warning("Received loaded command with incorrect parameters, "
"skipping it.")
return
pid, track = data.split(",")
if not pid.isdigit() or not track.isdigit():
log.warning("Received loaded command with incorrect parameters, "
"skipping it.")
return
self.analyzer.process_lock.acquire()
self.analyzer.process_list.add_pid(int(pid), track=int(track))
self.analyzer.process_lock.release()
log.debug("Loaded monitor into process with pid %s", pid)
def _handle_getpids(self, data):
"""Return the process identifiers of the agent and its parent
process."""
return struct.pack("II", self.analyzer.pid, self.analyzer.ppid)
def _inject_process(self, process_id, thread_id, mode):
"""Helper function for injecting the monitor into a process."""
# We acquire the process lock in order to prevent the analyzer to
# terminate the analysis while we are operating on the new process.
self.analyzer.process_lock.acquire()
# Set the current DLL to the default one provided at submission.
dll = self.analyzer.default_dll
if process_id in (self.analyzer.pid, self.analyzer.ppid):
if process_id not in self.ignore_list["pid"]:
log.warning("Received request to inject Cuckoo processes, "
"skipping it.")
self.ignore_list["pid"].append(process_id)
self.analyzer.process_lock.release()
return
# We inject the process only if it's not being monitored already,
# otherwise we would generated polluted logs (if it wouldn't crash
# horribly to start with).
if self.analyzer.process_list.has_pid(process_id):
# This pid is already on the notrack list, move it to the
# list of tracked pids.
if not self.analyzer.process_list.has_pid(process_id, notrack=False):
log.debug("Received request to inject pid=%d. It was already "
"on our notrack list, moving it to the track list.")
self.analyzer.process_list.remove_pid(process_id)
self.analyzer.process_list.add_pid(process_id)
self.ignore_list["pid"].append(process_id)
# Spit out an error once and just ignore it further on.
elif process_id not in self.ignore_list["pid"]:
self.ignore_list["pid"].append(process_id)
# We're done operating on the processes list, release the lock.
self.analyzer.process_lock.release()
return
# Open the process and inject the DLL. Hope it enjoys it.
proc = Process(pid=process_id, tid=thread_id)
filename = os.path.basename(proc.get_filepath())
if not self.analyzer.files.is_protected_filename(filename):
# Add the new process ID to the list of monitored processes.
self.analyzer.process_list.add_pid(process_id)
# We're done operating on the processes list,
# release the lock. Let the injection do its thing.
self.analyzer.process_lock.release()
# If we have both pid and tid, then we can use APC to inject.
if process_id and thread_id:
proc.inject(dll, apc=True, mode="%s" % mode)
else:
proc.inject(dll, apc=False, mode="%s" % mode)
log.info("Injected into process with pid %s and name %r",
proc.pid, filename)
def _handle_process(self, data):
"""Request for injection into a process."""
# Parse the process identifier.
if not data or not data.isdigit():
log.warning("Received PROCESS command from monitor with an "
"incorrect argument.")
return
return self._inject_process(int(data), None, 0)
def _handle_process2(self, data):
"""Request for injection into a process using APC."""
# Parse the process and thread identifier.
if not data or data.count(",") != 2:
log.warning("Received PROCESS2 command from monitor with an "
"incorrect argument.")
return
pid, tid, mode = data.split(",")
if not pid.isdigit() or not tid.isdigit() or not mode.isdigit():
log.warning("Received PROCESS2 command from monitor with an "
"incorrect argument.")
return
return self._inject_process(int(pid), int(tid), int(mode))
def _handle_file_new(self, data):
"""Notification of a new dropped file."""
self.analyzer.files.add_file(data.decode("utf8"), self.pid)
def _handle_file_del(self, data):
"""Notification of a file being removed (if it exists) - we have to
dump it before it's being removed."""
filepath = data.decode("utf8")
if os.path.exists(filepath):
self.analyzer.files.delete_file(filepath, self.pid)
def _handle_file_move(self, data):
"""A file is being moved - track these changes."""
if "::" not in data:
log.warning("Received FILE_MOVE command from monitor with an "
"incorrect argument.")
return
old_filepath, new_filepath = data.split("::", 1)
self.analyzer.files.move_file(
old_filepath.decode("utf8"), new_filepath.decode("utf8"), self.pid
)
def _handle_kill(self, data):
"""A process is being killed."""
if not data.isdigit():
log.warning("Received KILL command with an incorrect argument.")
return
if self.analyzer.config.options.get("procmemdump"):
dump_memory(int(data))
def _handle_dumpmem(self, data):
"""Dump the memory of a process as it is right now."""
if not data.isdigit():
log.warning("Received DUMPMEM command with an incorrect argument.")
return
dump_memory(int(data))
def _handle_dumpreqs(self, data):
if not data.isdigit():
log.warning("Received DUMPREQS command with an incorrect argument %r.", data)
return
pid = int(data)
if pid not in self.tracked:
log.warning("Received DUMPREQS command but there are no reqs for pid %d.", pid)
return
dumpreqs = self.tracked[pid].get("dumpreq", [])
for addr, length in dumpreqs:
log.debug("tracked dump req (%r, %r, %r)", pid, addr, length)
if not addr or not length:
continue
Process(pid=pid).dump_memory_block(int(addr), int(length))
def _handle_track(self, data):
if not data.count(":") == 2:
log.warning("Received TRACK command with an incorrect argument %r.", data)
return
pid, scope, params = data.split(":", 2)
pid = int(pid)
paramtuple = params.split(",")
if pid not in self.tracked:
self.tracked[pid] = {}
if scope not in self.tracked[pid]:
self.tracked[pid][scope] = []
self.tracked[pid][scope].append(paramtuple)
def dispatch(self, data):
response = "NOPE"
if not data or ":" not in data:
log.critical("Unknown command received from the monitor: %r",
data.strip())
else:
# Backwards compatibility (old syntax is, e.g., "FILE_NEW:" vs the
# new syntax, e.g., "1234:FILE_NEW:").
if data[0].isupper():
command, arguments = data.strip().split(":", 1)
self.pid = None
else:
self.pid, command, arguments = data.strip().split(":", 2)
fn = getattr(self, "_handle_%s" % command.lower(), None)
if not fn:
log.critical("Unknown command received from the monitor: %r",
data.strip())
else:
try:
response = fn(arguments)
except:
log.exception(
"Pipe command handler exception occurred (command "
"%s args %r).", command, arguments
)
return response
class Analyzer(object):
"""Cuckoo Windows Analyzer.
This class handles the initialization and execution of the analysis
procedure, including handling of the pipe server, the auxiliary modules and
the analysis packages.
"""
def __init__(self):
self.config = None
self.target = None
self.do_run = True
self.time_counter = 0
self.process_lock = threading.Lock()
self.default_dll = None
self.pid = os.getpid()
self.ppid = Process(pid=self.pid).get_parent_pid()
self.files = Files()
self.process_list = ProcessList()
self.package = None
self.reboot = []
def get_pipe_path(self, name):
"""Return \\\\.\\PIPE on Windows XP and \\??\\PIPE elsewhere."""
version = sys.getwindowsversion()
if version.major == 5 and version.minor == 1:
return "\\\\.\\PIPE\\%s" % name
return "\\??\\PIPE\\%s" % name
def prepare(self):
"""Prepare env for analysis."""
# Get SeDebugPrivilege for the Python process. It will be needed in
# order to perform the injections.
grant_privilege("SeDebugPrivilege")
grant_privilege("SeLoadDriverPrivilege")
# Initialize logging.
init_logging()
# Parse the analysis configuration file generated by the agent.
self.config = Config(cfg="analysis.conf")
# Pass the configuration through to the Process class.
Process.set_config(self.config)
# Set virtual machine clock.
set_clock(datetime.datetime.strptime(
self.config.clock, "%Y%m%dT%H:%M:%S"
))
# Set the default DLL to be used for this analysis.
self.default_dll = self.config.options.get("dll")
# If a pipe name has not set, then generate a random one.
self.config.pipe = self.get_pipe_path(
self.config.options.get("pipe", random_string(16, 32))
)
# Generate a random name for the logging pipe server.
self.config.logpipe = self.get_pipe_path(random_string(16, 32))
# Initialize and start the Command Handler pipe server. This is going
# to be used for communicating with the monitored processes.
self.command_pipe = PipeServer(
PipeDispatcher, self.config.pipe, message=True,
dispatcher=CommandPipeHandler(self)
)
self.command_pipe.daemon = True
self.command_pipe.start()
# Initialize and start the Log Pipe Server - the log pipe server will
# open up a pipe that monitored processes will use to send logs to
# before they head off to the host machine.
destination = self.config.ip, self.config.port
self.log_pipe_server = PipeServer(
PipeForwarder, self.config.logpipe, destination=destination
)
self.log_pipe_server.daemon = True
self.log_pipe_server.start()
# We update the target according to its category. If it's a file, then
# we store the target path.
if self.config.category == "file":
self.target = os.path.join(
os.environ["TEMP"], self.config.file_name
)
elif self.config.category == "archive":
zip_path = os.path.join(os.environ["TEMP"], self.config.file_name)
zipfile.ZipFile(zip_path).extractall(os.environ["TEMP"])
self.target = os.path.join(
os.environ["TEMP"], self.config.options["filename"]
)
# If it's a URL, well.. we store the URL.
else:
self.target = self.config.target
def stop(self):
"""Allow an auxiliary module to stop the analysis."""
self.do_run = False
def complete(self):
"""End analysis."""
# Stop the Pipe Servers.
self.command_pipe.stop()
self.log_pipe_server.stop()
# Cleanly close remaining connections
disconnect_pipes()
disconnect_logger()
def run(self):
"""Run analysis.
@return: operation status.
"""
self.prepare()
self.path = os.getcwd()
log.debug("Starting analyzer from: %s", self.path)
log.debug("Pipe server name: %s", self.config.pipe)
log.debug("Log pipe server name: %s", self.config.logpipe)
# If no analysis package was specified at submission, we try to select
# one automatically.
if not self.config.package:
log.debug(
"No analysis package specified, trying to detect "
"it automagically."
)
# If the analysis target is a file, we choose the package according
# to the file format.
if self.config.category == "file":
package = choose_package(
self.config.file_type, self.config.file_name,
self.config.pe_exports.split(",")
)
# If it's an URL, we'll just use the default Internet Explorer
# package.
else:
package = "ie"
# If we weren't able to automatically determine the proper package,
# we need to abort the analysis.
if not package:
raise CuckooError("No valid package available for file "
"type: {0}".format(self.config.file_type))
log.info("Automatically selected analysis package \"%s\"", package)
# Otherwise just select the specified package.
else:
package = self.config.package
# Generate the package path.
package_name = "modules.packages.%s" % package
# Try to import the analysis package.
try:
__import__(package_name, globals(), locals(), ["dummy"], -1)
# If it fails, we need to abort the analysis.
except ImportError:
raise CuckooError("Unable to import package \"{0}\", does "
"not exist.".format(package_name))
# Initialize the package parent abstract.
Package()
# Enumerate the abstract subclasses.
try:
package_class = Package.__subclasses__()[0]
except IndexError as e:
raise CuckooError("Unable to select package class "
"(package={0}): {1}".format(package_name, e))
# Initialize the analysis package.
self.package = package_class(self.config.options, analyzer=self)
# Move the sample to the current working directory as provided by the
# task - one is able to override the starting path of the sample.
# E.g., for some samples it might be useful to run from %APPDATA%
# instead of %TEMP%.
if self.config.category == "file":
self.target = self.package.move_curdir(self.target)
# Initialize Auxiliary modules
Auxiliary()
prefix = auxiliary.__name__ + "."
for loader, name, ispkg in pkgutil.iter_modules(auxiliary.__path__, prefix):
if ispkg:
continue
# Import the auxiliary module.
try:
__import__(name, globals(), locals(), ["dummy"], -1)
except ImportError as e:
log.warning("Unable to import the auxiliary module "
"\"%s\": %s", name, e)
# Walk through the available auxiliary modules.
aux_enabled, aux_avail = [], []
for module in Auxiliary.__subclasses__():
# Try to start the auxiliary module.
try:
aux = module(options=self.config.options, analyzer=self)
aux_avail.append(aux)
aux.init()
aux.start()
except (NotImplementedError, AttributeError):
log.exception(
"Auxiliary module %s was not implemented", module.__name__
)
except CuckooDisableModule:
continue
except Exception as e:
log.exception(
"Cannot execute auxiliary module %s: %s",
module.__name__, e
)
else:
log.debug("Started auxiliary module %s",
module.__name__)
aux_enabled.append(aux)
# Inform zer0m0n of the ResultServer address.
zer0m0n.resultserver(self.config.ip, self.config.port)
# Forward the command pipe and logpipe names on to zer0m0n.
zer0m0n.cmdpipe(self.config.pipe)
zer0m0n.channel(self.config.logpipe)
# Hide the Cuckoo Analyzer & Cuckoo Agent.
zer0m0n.hidepid(self.pid)
zer0m0n.hidepid(self.ppid)
# Initialize zer0m0n with our compiled Yara rules.
zer0m0n.yarald("bin/rules.yarac")
# Propagate the requested dump interval, if set.
zer0m0n.dumpint(int(self.config.options.get("dumpint", "0")))
# Start analysis package. If for any reason, the execution of the
# analysis package fails, we have to abort the analysis.
pids = self.package.start(self.target)
# If the analysis package returned a list of process identifiers, we
# add them to the list of monitored processes and enable the process monitor.
if pids:
self.process_list.add_pids(pids)
pid_check = True
# If the package didn't return any process ID (for example in the case
# where the package isn't enabling any behavioral analysis), we don't
# enable the process monitor.
else:
log.info("No process IDs returned by the package, running "
"for the full timeout.")
pid_check = False
# Check in the options if the user toggled the timeout enforce. If so,
# we need to override pid_check and disable process monitor.
if self.config.enforce_timeout:
log.info("Enabled timeout enforce, running for the full timeout.")
pid_check = False
while self.do_run:
self.time_counter += 1
if self.time_counter == int(self.config.timeout):
log.info("Analysis timeout hit, terminating analysis.")
break
# If the process lock is locked, it means that something is
# operating on the list of monitored processes. Therefore we
# cannot proceed with the checks until the lock is released.
if self.process_lock.locked():
KERNEL32.Sleep(1000)
continue
try:
# If the process monitor is enabled we start checking whether
# the monitored processes are still alive.
if pid_check:
# We also track the PIDs provided by zer0m0n.
self.process_list.add_pids(zer0m0n.getpids())
for pid in self.process_list.pids:
if not Process(pid=pid).is_alive():
log.info("Process with pid %s has terminated", pid)
self.process_list.remove_pid(pid)
# If none of the monitored processes are still alive, we
# can terminate the analysis.
if not self.process_list.pids:
log.info("Process list is empty, "
"terminating analysis.")
break
# Update the list of monitored processes available to the
# analysis package. It could be used for internal
# operations within the module.
self.package.set_pids(self.process_list.pids)
try:
# The analysis packages are provided with a function that
# is executed at every loop's iteration. If such function
# returns False, it means that it requested the analysis
# to be terminate.
if not self.package.check():
log.info("The analysis package requested the "
"termination of the analysis.")
break
# If the check() function of the package raised some exception
# we don't care, we can still proceed with the analysis but we
# throw a warning.
except Exception as e:
log.warning("The package \"%s\" check function raised "
"an exception: %s", package_name, e)
finally:
# Zzz.
KERNEL32.Sleep(1000)
if not self.do_run:
log.debug("The analyzer has been stopped on request by an "
"auxiliary module.")
# Create the shutdown mutex.
KERNEL32.CreateMutexA(None, False, SHUTDOWN_MUTEX)
try:
# Before shutting down the analysis, the package can perform some
# final operations through the finish() function.
self.package.finish()
except Exception as e:
log.warning("The package \"%s\" finish function raised an "
"exception: %s", package_name, e)
try:
# Upload files the package created to package_files in the
# results folder.
for path, name in self.package.package_files() or []:
upload_to_host(path, os.path.join("package_files", name))
except Exception as e:
log.warning("The package \"%s\" package_files function raised an "
"exception: %s", package_name, e)
# Terminate the Auxiliary modules.
for aux in aux_enabled:
try:
aux.stop()
except (NotImplementedError, AttributeError):
continue
except Exception as e:
log.warning("Cannot terminate auxiliary module %s: %s",
aux.__class__.__name__, e)
if self.config.terminate_processes:
# Try to terminate remaining active processes.
log.info("Terminating remaining processes before shutdown.")
for pid in self.process_list.pids:
proc = Process(pid=pid)
if proc.is_alive():
try:
proc.terminate()
except:
continue
# Run the finish callback of every available Auxiliary module.
for aux in aux_avail:
try:
aux.finish()
except (NotImplementedError, AttributeError):
continue
except Exception as e:
log.warning("Exception running finish callback of auxiliary "
"module %s: %s", aux.__class__.__name__, e)
# Dump all the notified files.
self.files.dump_files()
# Hell yeah.
log.info("Analysis completed.")
return True
if __name__ == "__main__":
success = False
error = ""
try:
# Initialize the main analyzer class.
analyzer = Analyzer()
# Run it and wait for the response.
success = analyzer.run()
data = {
"status": "complete",
"description": success,
}
# This is not likely to happen.
except KeyboardInterrupt:
error = "Keyboard Interrupt"
# If the analysis process encountered a critical error, it will raise a
# CuckooError exception, which will force the termination of the analysis.
# Notify the agent of the failure. Also catch unexpected exceptions.
except Exception as e:
# Store the error.
error_exc = traceback.format_exc()
error = "%s\n%s" % (e, error_exc)
# Just to be paranoid.
if len(log.handlers):
log.exception(error_exc)
else:
sys.stderr.write("{0}\n".format(error_exc))
data = {
"status": "exception",
"description": error_exc,
}
finally:
try:
# Let's invoke the completion procedure.
analyzer.complete()
except Exception as e:
complete_excp = traceback.format_exc()
data["status"] = "exception"
if "description" in data:
data["description"] += "%s\n%s" % (
data["description"], complete_excp
)
else:
data["description"] = complete_excp
# Report that we're finished. First try with the XML RPC thing and
# if that fails, attempt the new Agent.
try:
server = xmlrpclib.Server("http://127.0.0.1:8000")
server.complete(success, error, "unused_path")
except Exception as e:
urllib2.urlopen("http://127.0.0.1:8000/status",
urllib.urlencode(data)).read()
|
#encoding=UTF8
'''
从redis读取要主动监控的数据,反序列化
按配置文件读取相应的数据
通过redis发布监控的数据
'''
import global_setting
import plugin_conf
import threading
import json
import time,sys
from conf import redis_connecter as redis
hostname='wan'
channel='main_queue'
def get_config_fromredis(host):
host_configure='HostConfiguration::%s' % host#redis中的key,客户端和服务器都必须要一致
monitor_list=redis.r.get(host_configure)
if monitor_list is not None:
monitor_list=json.loads(monitor_list)
else:
sys.exit('could not load the config data from the redis')
return monitor_list
def msg_publish(fun_from_redis):
data=fun_from_redis()
send_dict={'hostname':hostname,'data':data}
redis.r.publish(channel,json.dumps(send_dict))
if __name__=='__main__':
#获取redis中主动要监控的数据,数据结构为多重字典
'''
config_from_redis{
'hostname':hostname,
'services':{'cpu':[plugin_name,interval,]
'load':[plugin_name,interval,],
}
}
'''
config_from_redis=get_config_fromredis(hostname)
while True:
for k,v in config_from_redis['services'].items():
interval=v[1]
last_run=v[2]
if (last_run+interval)<=time.time():
config_from_redis['services'][k][2]=time.time()
plugin_data=getattr(plugin_conf,v[0])#获取函数指针
print plugin_data
p=threading.Thread(target=msg_publish,args=(plugin_data,))
p.start()
else:
next_time=interval-(time.time()-last_run)
print 'next running needs %s '% next_time
time.sleep(2)
print '循环已经结束'
|
p=print
a=A=b=''
i=26
while i:l=chr(123-i);L=chr(91-i);a+=l;A+=L;b+=L+l;i-=1
c='-'*23
p(A+a);p(b);p(c)
while b:p('| %-20s|'%' '.join(b[:10]));b=b[10:]
p(c)
d='apple','grape','lemon','olive'
for e in a:
if e in'aglo':b+='%s begins with %%s, '%d[i]%e;e=e+': ',d[i];i+=1
p(list(e))
p(b[:-2])
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
from twython import Twython
import requests
from cStringIO import StringIO
from django.core.files import File
# will be used to build tweet absolute url
TWEET_URL_TEMPLATE = "https://twitter.com/{user_name}/status/{tweet_id}/"
def get_credentials_from_file(file_path):
"""
Returns a dict with twitter api credentials. Reads them from json file.
:param file_path: str absolute path to credentials file
:return: dict with credentials
"""
# since this operation requires disk access it can be optimized
# with cache/memoization, should not be an issue for small projects
with open(file_path, 'rb') as credentials_file:
return json.load(credentials_file)
def get_twitter_api(credentials):
"""
Returns authenticated api to twitter.
:param credentials: dict with credentials, expected to be as following:
'app_key' - Consumer Key (API Key)
'app_secret' - Consumer Secret (API Secret)
'oauth_token' - Access Token
'oauth_token_secret' - Access Token Secret
based on Twitter Keys and Access tokens settings for an app
:return: twython.Twython
"""
twitter = Twython(**credentials)
# twitter = Twython(app_key=credentials['consumer_key'],
# app_secret=credentials['consumer_secret'],
# oauth_token=credentials['access_token'],
# oauth_token_secret=credentials['access_token_secret'])
return twitter
def get_image_from_url(image_url):
"""
Fetch the image from original_image_url and return Django file object.
:param image_url: str absolute url to image
:return: django.core.files.File
"""
response = requests.get(image_url)
file_name = image_url.split('/')[-1]
file_like = StringIO(response.content)
file_obj = File(file_like, name=file_name)
return file_obj
def get_original_image_url_from_tweet(tweet):
"""
Extracts the tweet photo image url from tweet data.
:param tweet: dict of the tweet provided by twitter API
:return: str original image url or None in case if something went wrong
"""
entities = tweet.get('entities', {})
media_entities = entities.get('media', [])
image_url = None
# media_entities is a list, iterate until we meet the type 'photo'
for media in media_entities:
if media.get('type') == 'photo':
image_url = media.get('media_url')
break
return image_url
def get_tweet_id(tweet):
"""
Extracts tweet id from tweet data received with twitter api.
:param tweet: dict with tweet data
:return: int tweet id
"""
return tweet.get('id')
def get_tweet_url(tweet):
"""
Builds tweet absolute url from tweet data received with twitter api.
:param tweet: dict with tweet data
:return: str tweet url
"""
user_info = tweet.get('user')
user_screen_name = user_info.get('screen_name')
tweet_id = get_tweet_id(tweet)
return TWEET_URL_TEMPLATE.format(user_name=user_screen_name,
tweet_id=tweet_id)
def search_tweets_by_hashtag(api, hash_tag, limit=100, since_id=None, image_only=True):
"""
Search twitter for tweets with specific hashtag, if image_only is true - will search
for tweets that have photos in it (twitter filtering).
:param api: twython api to access twitter (should be authenticated)
:param hash_tag: str hash tag for search
:param limit: int limit results to this number
:param since_id: int tweet id, perform search only on tweets that are older then this id
:param image_only: bool only search tweets with images
:return: dict with statuses .
"""
# build the query to twitter, search for hashtag in any case, if image_only selected - add
# twitter filtering to the query based on twitter api documentation
# https://dev.twitter.com/rest/public/search (query operators)
query = '{hash_tag}{extra}'.format(
hash_tag=hash_tag,
extra=' filter:images' if image_only else '')
search_kwargs = {
'q': query,
'count': limit,
}
# limit the search with only recent items
if since_id:
search_kwargs['since_id'] = since_id
# query the api
search_results = api.search(**search_kwargs)
# search results will be a dict of 'search_metadata' and 'statuses', where statuses
# are actual twitter statuses (dict)
return search_results['statuses']
|
import cv2
import numpy as np
import pandas as pd
from tqdm import tqdm
def normalize(df, width, height):
"""Normalize the images in the given DataFrame.
Args:
df = [DataFrame] images as a Pandas DataFrame
width = [int] width of the raw images in pixels
height = [int] height of the raw images in pixels
Returns [ndarray]:
Images normalized in a (N, height, width) Numpy array.
"""
# images are stored in inverse
img_array = 255 - df.iloc[:, 1:].to_numpy()
# make use of full grayscale spectrum
img_min = img_array.min(axis=1, keepdims=True)
img_array = img_array - img_min
img_max = img_array.max(axis=1, keepdims=True)
img_array = img_array * (255 / img_max)
# remove low-intensity pixels
img_array[img_array < 26] = 0
return img_array.reshape((len(df), height, width)).astype(np.uint8)
def bounding_boxes(images, width, height):
"""Returns the bounding boxes around the relevant pixels.
Args:
images = [ndarray] the images as a (N, height, width) Numpy array
width = [int] width of the raw images in pixels
height = [int] height of the raw images in pixels
Returns [ndarray]:
Left x pixel, right x pixel, top y pixel, bottom y pixel of
the bounding box for each image.
"""
# remove lines at the boundary of the images
images = images[:, 5:-5, 5:-5]
images = np.pad(images, [(0,), (5,), (5,)], mode='constant')
# find columns and rows that have visible pixels
cols = np.any(images > 170, axis=1)
rows = np.any(images > 170, axis=2)
# find first and last pixels of columns and rows, respectively
xmin = np.argmax(cols, axis=1)
xmax = width - np.argmax(cols[:, ::-1], axis=1)
ymin = np.argmax(rows, axis=1)
ymax = height - np.argmax(rows[:, ::-1], axis=1)
# widen the bounding boxes if they are cropped too much
xmin = (xmin - 13) * (xmin > 13)
xmax = (xmax + 13 - width) * (xmax < width - 13) + width
# lengthen the bounding boxes if they are cropped too much
ymin = (ymin - 10) * (ymin > 10)
ymax = (ymax + 10 - height) * (ymax < height - 10) + height
return np.stack((xmin, xmax, ymin, ymax), axis=1)
def crop_pad_resize(images, bboxes, out_size, pad=16):
"""Crops, pads, and resizes the given images.
Args:
images = [ndarray] the images as (N, height, width) Numpy array
bboxes = [ndarray] the bounding boxes as a (N, 4) Numpy array
out_size = [int] the size of the output images in pixels
pad = [int] number of pixels to pad the bounding boxes
Returns [ndarray]:
Input images cropped, padded, and resized as
(N, out_size, out_size) Numpy ndarray.
"""
images_cropped_padded_resized = []
for img, (xmin, xmax, ymin, ymax) in zip(images, bboxes):
# crop the image
img_crop = img[ymin:ymax, xmin:xmax]
# compute length of square cropped image
width = xmax - xmin
height = ymax - ymin
length = max(width, height) + pad
# make sure that the aspect ratio is kept in resizing
padding = [((length - height) // 2,), ((length - width) // 2,)]
img_crop_pad = np.pad(img_crop, padding, mode='constant')
# resize image to standard resolution
img_crop_pad_resize = cv2.resize(img_crop_pad, (out_size, out_size))
images_cropped_padded_resized.append(img_crop_pad_resize)
return np.stack(images_cropped_padded_resized)
def preprocess(files, width, height, out_size, batch_size=512):
"""Preprocess the grapheme images in the given files.
Args:
files = [list] list of file paths to the parquet files with images
width = [int] width of the raw images in pixels
height = [int] height of the raw images in pixels
out_size = [int] the size of the output images in pixels
batch_size = [int] number of images to process at a time
Returns [ndarray]:
Preprocessed images in (N, out_size, out_size) Numpy ndarray.
"""
preprocessed_images = []
for file_name in files:
# read images from parquet file
df = pd.read_parquet(file_name)
for batch_idx in tqdm(range(0, len(df), batch_size)):
# select batch of images to process
batch = df.iloc[batch_idx:batch_idx + batch_size]
# process images
normalized_images = normalize(batch, width, height)
bboxes = bounding_boxes(normalized_images, width, height)
images = crop_pad_resize(normalized_images, bboxes, out_size)
preprocessed_images.append(images)
# put all preprocessed images in one big ndarray
return np.concatenate(preprocessed_images)
if __name__ == '__main__':
# preprocess training images
train_files = ['../kaggle/input/bengaliai-cv19/train_image_data_0.parquet',
'../kaggle/input/bengaliai-cv19/train_image_data_1.parquet',
'../kaggle/input/bengaliai-cv19/train_image_data_2.parquet',
'../kaggle/input/bengaliai-cv19/train_image_data_3.parquet']
preprocessed_train_images = preprocess(train_files, 236, 137, 128)
# determine mean and standard deviation for normalization purposes
mean = preprocessed_train_images.mean()
std = preprocessed_train_images.std()
print(f'Mean: {mean}\tStandard Deviation: {std}')
# save training images ndarray on storage for easy re-use
np.save(f'../train_image_data_{128}.npy', preprocessed_train_images)
|
# Property tax
# Calculation of assessment value and property tax
# Anatoli Penev
# 26.11.2017
# Main fucntion
def main():
# Ask for the property's actual value.
property_value = float(input("Enter the property's actual value: "))
prop(property_value)
# Property value calculation
def prop(property_value):
# Calculate the property's assessment value
# using the actual value.
# assessment_value=actual_value*60%
assessment_value = property_value * 0.6
print('The assessment value is $%.2f' % assessment_value)
assessment(assessment_value)
# Assessment value calculation
def assessment(assessment_value):
# Calculate the property tax.
# Property_tax=$0.64 for every $100 of assessment_value.
property_tax = assessment_value * 0.0064
print('The property tax is $%.2f' % property_tax)
# Call the main function.
main()
|
#!/usr/bin/python3
import requests, base64, argparse, json
class Crypt(object):
def __init__(self, addr, keypair=None):
self.baseurl = addr
if keypair is None:
self.key = requests.get(self.baseurl+"/register").json()
else:
self.key = keypair
def encrypt(self, raw, key=None):
if key is None:
key = self.key['pub']
data = base64.b64encode(raw).decode()
enc = requests.post(self.baseurl+"/encrypt/"+key, data={
'data':data
}).content
res = base64.b64decode(enc)
return res
def decrypt(self, raw, key=None):
if key is None:
key = self.key['priv']
enc = base64.b64encode(raw).decode()
dec = requests.post(self.baseurl+"/decrypt/"+key, data={
'data':enc
}).content
res = base64.b64decode(dec)
return res
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"-u","--url",
help=("Server URL. Eg: https://cryptserver--marcusweinberger.repl.co"),
required=True)
parser.add_argument(
"-k","--key",
help=("Keyfile, if key is \"gen\" then a new keypair will be requested and output to the terminal. Eg: --key key.json"),
required=True)
parser.add_argument(
"-e","--encrypt",
help=("Encrypt a file and add \"encrypted.\" to the filename. Eg: --encrypt secrets.txt"))
parser.add_argument(
"-d","--decrypt",
help=("Decrypt a file. Eg: --decrypt encrypted.secrets.txt"))
return parser.parse_args()
def main(args):
url = args.url
if args.key == "gen":
keys = Crypt(url).key
print(json.dumps(keys))
return 0
key = json.loads(open(args.key,"r").read())
crypt = Crypt(url,keypair=key)
if not args.encrypt and not args.decrypt:
print("Neither --encrypt or --decrypt specified, exiting.")
return 1
if args.encrypt:
data = open(args.encrypt,"rb").read()
res = crypt.encrypt(data)
with open("encrypted."+args.encrypt,"wb") as f:
f.write(res)
return 0
if args.decrypt:
data = open(args.decrypt,"rb").read()
res = crypt.decrypt(data)
with open("decrypted."+args.decrypt,"wb") as f:
f.write(res)
return 0
return 0
if __name__ == "__main__":
exit(main(parse_args()))
|
property_key_map = {
"BIRTH_DATE": "birth_date",
"PATIENT_STATUS": "is_deceased",
"DEATH_DATE": "death_date",
"PRIMARY_ONC": "care_provider",
"PAT_ID": "client_id",
"PAT_LAST_NAME": "last_name",
"PAT_FIRST_NAME": "first_name",
"PAT_MIDDLE_NAME": "middle_name",
"PAT_TITLE": "prefix",
"PAT_NAME_SUFFIX": "suffix",
"SEX": "sex",
"LANGUAGE": "preferred_language",
"PATIENT_RACE": "race",
"ETHNIC_GROUP": "ethnicity",
"PRIMARY_ONC": "care_provider",
"EMAIL_ADDRESS": "email",
"MRN": "mrn",
"ADD_LINE_1": "street",
"ADD_LINE_2": "street2",
"CITY": "city",
"STATE": "state",
"ZIP": "postal_code",
"COUNTRY": "country",
"CONTACT_PHONE": "phone",
"DB_UPDATE_DTTM": "extract_date"
}
SEX = {
'Female': 'Female',
'Male': 'Male',
'Unknown': 'Unknown'
}
patient_status ={
'Alive': False,
'Deceased': True,
'N': False,
'Y': True
}
RACE = {
"American Indian or Alaskan Native": "American Indian or Alaska Native",
"Asian": "Asian",
"Black/African American": "Black or African American",
"DEACTIVE": "Not Provided",
"Native Hawaiian": "Native Hawaiian or Other Pacific Islander",
"Not Asked": "Not Provided",
"Other Pacific Islander": "Native Hawaiian or Other Pacific Islander",
"Patient Refused": "Not Provided",
"Unknown": "Other",
"White": "White",
"XXNative Hawaiian/Other Pacific Islander": "Native Hawaiian or Other Pacific Islander",
"": ""
}
ETHNICITY = {
"African American": "Non-Hispanic/Non-Latino",
"Hispanic/Latino Origin": "Hispanic/Latino",
"Not of Hispanic or Latino Origin": "Non-Hispanic/Non-Latino",
"Patient Refused": "Unknown",
"Not Asked": "Unknown",
"Unknown": "Unknown",
"": ''
}
LANGUAGES = {
"Abkhaz": "Abkhaz",
"Afar": "Afar",
"Afrikaans": "Afrikaans",
"Akan": "Akan",
"Albanian": "Albanian",
"Amharic": "Amharic",
"Arabic": "Arabic",
"Aragonese": "Aragonese",
"Armenian": "Armenian",
"Assamese": "Assamese",
"Avaric": "Avaric",
"Avestan": "Avestan",
"Aymara": "Aymara",
"Azerbaijani": "Azerbaijani",
"Bambara": "Bambara",
"Bashkir": "Bashkir",
"Basque": "Basque",
"Belarusian": "Belarusian",
"Bengali": "Bengali",
"Bihari": "Bihari",
"Bislama": "Bislama",
"Bosnian": "Bosnian",
"Breton": "Breton",
"Bulgarian": "Bulgarian",
"Burmese": "Burmese",
"Cambodian": "Khmer",
"Catalan (Valencian)": "Catalan",
"Chamorro": "Chamorro",
"Chechen": "Chechen",
"Chichewa (Nyanja)": "Chichewa",
"Chinese": "Chinese",
"Chinese-Cantonese": "Chinese",
"Chinese-Mandarin": "Chinese",
"Church Slavonic": "Old Church Slavonic",
"Chuvash": "Chuvash",
"Cornish": "Cornish",
"Corsican": "Corsican",
"Cree": "Cree",
"Croatian": "Croatian",
"Czech": "Czech",
"Danish": "Danish",
"Divehi (Maldivian)": "Divehi",
"Dutch": "Dutch",
"Dzongkha": "Dzongkha",
"English": "English",
"Esperanto": "Esperanto",
"Estonian": "Estonian",
"Ewe": "Ewe",
"Faroese": "Faroese",
"Farsi": "Persian",
"Farsi (persian)": "Persian",
"Fijian": "Fijian",
"Finnish": "Finnish",
"Flemish": "Dutch",
"French": "French",
"Fula": "Fula",
"Galician": "Galician",
"Ganda": "Ganda",
"Georgian": "Georgian",
"German": "German",
"Greek": "Greek",
"Gujarati": "Gujarati",
"Haitian": "Haitian",
"Hausa": "Hausa",
"Hebrew": "Hebrew",
"Herero": "Herero",
"Hindi": "Hindi",
"Hiri Motu": "Hiri Motu",
"Hungarian": "Hungarian",
"Icelandic": "Icelandic",
"Ido": "Ido",
"Igbo": "Igbo",
"Indonesian": "Indonesian",
"Interlingua": "Interlingua",
"Interlingue": "Interlingue",
"Inuktitut": "Inuktitut",
"Inupiaq": "Inupiaq",
"Irish": "Irish",
"Italian": "Italian",
"Japanese": "Japanese",
"Javanese": "Javanese",
"Kalaallisut": "Kalaallisut",
"Kannada": "Kannada",
"Kanuri": "Kanuri",
"Kashmiri": "Kashmiri",
"Kazakh": "Kazakh",
"Khmer": "Khmer",
"Kikuyu": "Kikuyu",
"Kinyarwanda": "Kinyarwanda",
"Kirundi": "Kirundi",
"Komi": "Komi",
"Kongo": "Kongo",
"Korean": "Korean",
"Kurdish": "Kurdish",
"Kwanyama": "Kwanyama",
"Kyrgyz": "Kyrgyz",
"Lao": "Lao",
"Latin": "Latin",
"Latvian": "Latvian",
"Limburgish": "Limburgish",
"Lingala": "Lingala",
"Lithuanian": "Lithuanian",
"Luba-Katanga": "Luba-Katanga",
"Luxembourgish": "Luxembourgish",
"Macedonian": "Macedonian",
"Malagasy": "Malagasy",
"Malay": "Malay",
"Malayalam": "Malayalam",
"Maltese": "Maltese",
"Manx": "Manx",
"Marathi": "Marathi",
"Marshallese": "Marshallese",
"Mongolian": "Mongolian",
"Nauru": "Nauruan",
"Navajo": "Navajo",
"Ndonga": "Ndonga",
"Nepali": "Nepali",
"North Ndebele": "Northern Ndebele",
"Northern Sami": "Northern Sami",
"Norwegian": "Norwegian",
"Norwegian Nynorsk": "Norwegian Nynorsk",
"Nuosu": "Nuosu",
"Occitan": "Occitan",
"Ojibwe": "Ojibwe",
"Oriya": "Oriya",
"Oromo": "Oromo",
"Ossetian": "Ossetian",
"Pakistani": "Urdu",
"Pashto": "Pashto",
"Polish": "Polish",
"Portuguese": "Portuguese",
"Punjabi": "Eastern Pujabi",
"Quechua": "Quechua",
"Romanian": "Romanian",
"Romansh": "Romansh",
"Russian": "Russian",
"Samoan": "Samoan",
"Sango": "Sango",
"Sanskrit": "Sanskrit",
"Sardinian": "Sardinian",
"Scottish": "Scottish Gaelic",
"Serbian": "Serbian",
"Shona": "Shona",
"Sindhi": "Sindhi",
"Sinhala": "Sinhalese",
"Slovak": "Slovak",
"Slovene": "Slovene",
"Somali": "Somali",
"South Azerbaijani": "Azerbaijani",
"South Ndebele": "Southern Ndebele",
"Southern Sotho": "Southern Sotho",
"Spanish": "Spanish",
"Sundanese": "Sundanese",
"Swahili": "Swahili",
"Swati": "Swati",
"Swedish": "Swedish",
"Tagalog": "Tagalog",
"Tahitian": "Tahitian",
"Tajik": "Tajik",
"Tamil": "Tamil",
"Tatar": "Tatar",
"Telugu": "Telugu",
"Thai": "Thai",
"Tibetan": "Tibetan",
"Tigrinya": "Tigrinya",
"Tonga": "Tonga",
"Tsonga": "Tsonga",
"Tswana": "Tswana",
"Turkish": "Turkish",
"Turkmen": "Turkmen",
"Twi": "Twi",
"Ukrainian": "Ukrainian",
"Urdu": "Urdu",
"Uyghur": "Uyghur",
"Uzbek": "Uzbek",
"Venda": "Venda",
"Vietnamese": "Vietnamese",
"Walloon": "Walloon",
"Welsh": "Welsh",
"Western Frisian": "Western Frisian",
"Wolof": "Wolof",
"Xhosa": "Xhosa",
"Yiddish": "Yiddish",
"Yoruba": "Yoruba",
"Zhuang": "Zhuang",
"Zulu": "Zulu",
"": ""
}
|
from webargs import ValidationError
def limit_length(lower=None, upper=None):
def validate(field):
length = len(field)
return (lower <= length if lower else True) and \
(upper >= length if upper else True)
return validate
def limit_value(lower=None, upper=None):
def validate(field):
value = field
return (lower <= value if lower else True) and \
(upper >= value if upper else True)
return validate
def in_(collection):
def validate(field):
return field in collection
return validate
def not_(fn):
def validate(field):
try:
result = fn(field)
return not bool(result)
except ValidationError:
return True
return validate
def or_(*fns):
def validate(field):
results = []
for fn in fns:
try:
results.append(fn(field))
except ValidationError:
results.append(False)
return any(results)
return validate
|
from folium import Map, Marker, Icon, PolyLine
def get_mex_map():
m = Map(
location=[23.0676883,-104.7929726],
zoom_start=5
)
return m
def set_locations_mex(locationsDict,htmlFileName):
mexMap = get_mex_map()
for key in locationsDict.keys():
if list(locationsDict.keys())[0] == key:
Marker(locationsDict[key],popup='<b>'+key.split('=> ')[1]+'</b>',
tooltip=key, icon=Icon(color='green')).add_to(mexMap)
elif list(locationsDict.keys())[-1] == key:
Marker(locationsDict[key],popup='<b>'+key.split('=> ')[1]+'</b>',
tooltip=key, icon=Icon(color='red')).add_to(mexMap)
else:
Marker(locationsDict[key], popup='<b>'+key.split('=> ')[1]+'</b>',
tooltip=key).add_to(mexMap)
PolyLine(list(locationsDict.values()),color="red", weight=2.5, opacity=1).add_to(mexMap)
mexMap.save(htmlFileName)
|
from typing import Dict
from .abstract import AbstractInterface
import json
class ProblemsInterface(AbstractInterface):
create_fields = ['title', 'description', 'max_cpu_time', 'max_real_time', 'max_memory', 'author', 'testcases']
retrieve_fields = ['id', 'title', 'description', 'max_cpu_time', 'max_real_time', 'max_memory', 'author', 'testcases']
update_fields = ['description', 'max_cpu_time', 'max_real_time', 'max_memory', 'testcases']
table_name = 'judge.PROBLEMS'
def create(self, **data: Dict):
if 'testcases' in data:
testcases = data.get('testcases')
data['testcases'] = json.dumps(testcases)
return super().create(**data)
def update(self, id: int, **data: Dict):
if 'testcases' in data:
testcases = data.get('testcases')
data['testcases'] = json.dumps(testcases)
return super().update(id, **data)
|
import requests
import webbrowser
headers = {'User-Agent':'Mozilla/5.0'}
payload = {'txtPlan': '454'}
session = requests.Session()
webbrowser.open( session.post('http://www.esar.alberta.ca/esarmain.aspx',headers=headers,data=payload) )
|
'''
Created on Jul 16, 2013
@author: emma
'''
import unittest #imports unit test/ability to run as pyunit test
from UnitTesting.page_objects.webdriver_wrapper import webdriver_wrapper
from UnitTesting.page_objects.homepage import homepage
class new_releases_fiction(unittest.TestCase):
def new_releases_fiction_test(self, webd_wrap):
page_homepage = homepage(webd_wrap)
page_homepage.get_page()
page_homepage.click_fiction_new_releases_link('ROMANCE',2)
page_homepage.click_fiction_new_releases_link('FICTION',1)
webd_wrap.close_the_browser()
def test_new_releases_fiction(self): #running x as a unit test
for browser in webdriver_wrapper._browsers:
self.new_releases_fiction_test(webdriver_wrapper(browser))
print "Module Complete", __name__
if __name__ == "__main__":
unittest.main()
|
from src.Algorithms.Nodes.abstract_node import AbstractNode
class DijkstraNode(AbstractNode):
def __init__(self, x, y, parent,distance):
super().__init__(x, y, parent)
self.__distance = distance
def get_distance(self):
return self.__distance
def __lt__(self, other):
return (self.__distance <= other.get_distance())
|
import glob
import sys
import pyaudio
import wave
import os
import numpy as np
import tensorflow as tf
import librosa
from socket import *
from header import *
if len(sys.argv) < 3:
print("Compile error : python record.py [minutes] [meters]")
exit(1)
FORMAT = pyaudio.paInt16
NODE = sys.argv[2]
seconds = int(sys.argv[1])
# open pyaudio
p = pyaudio.PyAudio()
stream = p.open(format = FORMAT,
channels = 1,
rate = RATE,
input = True,
input_device_index = 0,
output_device_index = 0,
frames_per_buffer = CHUNK
)
# start loop
print("Start recording...")
loop = 0
while loop < seconds:
try:
loop = loop+1
frames = []
# recording
for i in range(0, int(RATE/CHUNK*RECORD_SECONDS)):
data = stream.read(CHUNK, exception_on_overflow=False)
frames.append(data)
# record wave files
fileName = file_saver(str(NODE), frames, wave, p)
# send file
os.system('scp '+fileName+' gunhoo@192.168.123.6:~/Desktop/Drone-Tracking/server/data/ &')
# exception handle
except KeyboardInterrupt:
print("wait seconds to terminate...")
stream.stop_stream()
stream.close()
p.terminate()
break
|
import getpass
import os
import sys
class Who:
"""
Este Metodo detecta quien eres en el sesion actual,
Si detecta el user ROOT se detiene.
"""
user = getpass.getuser()
localhost = os.popen('hostname', 'r')
localhost = localhost.read()
sistema_operativo = os.name
def identificar(self, user, local_host):
if user == 'root':
print ''
print 'Usuario ' + user.upper() + ' detectado!'
print ''
print 'Este programa no requiere de privilegios elevados'
print 'Cambia el usuario y reinicia'
print ''
sys.exit(0)
else:
print 'No estas usando root'
print ''
print 'User: ' + user
print 'Host: ' + local_host
#Metodo para detectar el OS, en caso de windows se detiene.
#No hay soporte.
def sistema(self, SistemaOperativo):
if SistemaOperativo == 'nt':
print 'Sistema Windows'
print 'Sistema Operativo no soportado!'
print ''
sys.exit(0)
elif SistemaOperativo == 'posix':
print 'Linux o variante Unix'
elif SistemaOperativo == 'mac':
print 'Mac'
else:
print 'Sistema operativo no validado'
|
import eel
import pyowm
owm = pyowm.OWM('your token')
@eel.expose
def get_weather(place):
mgr = owm.weather_manager()
observation = mgr.weather_at_place(place)
w = observation.weather
temp = w.temperature('celsius')['temp']
# print("В городе " + place + " сейчас " + str(temp) + " градусов.")
return "В городе " + place + " сейчас " + str(temp) + " градусов."
eel.init(r'D:\Python\work_practice\project2_weather_app\web')
eel.start('main.html', size=(700,700))
# # get_weather('Нью-Йорк, США')
|
#~!/usr/bin/env python3
"""save the best alignment along with the sequence's corresponding score. """
__appname__ = 'align_seqs'
__author__ = 'Zongyi Hu (zh2720@ic.ac.uk)'
__version__ = '0.0.1'
import sys
"""Two example sequences to match"""
# seq2 = "ATCGCCGGATTACGGG"
# seq1 = "CAATTCGGAT"
# Assign the longer sequence s1, and the shorter to s2
# l1 is length of the longest, l2 that of the shortest
def longer_seq(seq1, seq2):
global l1
global l2
l1 = len(seq1)
l2 = len(seq2)
if l1 >= l2:
global s1
global s2
s1 = seq1
s2 = seq2
else:
s1 = seq2
s2 = seq1
l1, l2 = l2, l1 # swap the two lengths
"""A function that computes a score by returning the number of matches starting from arbitrary startpoint (chosen by user)"""
def calculate_score(s1, s2, l1, l2, startpoint):
matched = "" # to hold string displaying alignements
score = 0
for i in range(l2):
if (i + startpoint) < l1:
if s1[i + startpoint] == s2[i]: # if the bases match
matched = matched + "*"
score = score + 1
else:
matched = matched + "-"
# some formatted output
print("." * startpoint + matched)
print("." * startpoint + s2)
print(s1)
print(score)
print(" ")
return score
# Test the function with some example starting points:
# calculate_score(s1, s2, l1, l2, 0)
# calculate_score(s1, s2, l1, l2, 1)
# calculate_score(s1, s2, l1, l2, 5)
"""now try to find the best match (highest score) for the two sequences"""
def best_match(s1, s2, l1, l2):
my_best_align = None
my_best_score = -1
for i in range(l1):
z = calculate_score(s1, s2, l1, l2, i)
if z > my_best_score:
my_best_align = "." * i + s2
my_best_score = z
print(my_best_align)
print(s1)
print("Best score:", my_best_score)
#store a best alignment in a .txt file
g = open("../results/fasta_result.txt","w")
g.write("The best align is:" + "\n")
g.write(my_best_align + "\n")
g.write(s1)
g.write(f"The best score is: {my_best_score}" + "\n")
g.close()
"""main function"""
def main(argv):
with open("../data/sequence.csv") as f:
seq1 = f.readline()
seq2 = f.readline()
longer_seq(seq1, seq2)
best_match(s1, s2, l1, l2)
return 0
if __name__ == "__main__":
status = main(sys.argv)
sys.exit(status)
|
"""
1 ler os casos de teste Q (entrada)
2 laço de Q até i = 0 (para cada caso de teste)
3 armazenar em uma lista os total de primos gemeos no interval (X-Y) (processamento)
4 exibir a lista em um novo laço (saida)
"""
def eh_primo(x,y):
primos = []
for i in range(x,y+1):
aux = 0
for y in range(x,y+1):
if i%y==0:
aux +=1
if aux <= 2:
primos.append(i)
return primos
def eh_primo_gem(N):
x = int(N[0])
y = int(N[2])
# pegar os primos do intervalo x e y
z = eh_primo(x,y)
return z
i = 0
out = []
Q = int(input())
while True:
if Q == i:
break
N = input()
total = eh_primo_gem(N)
out.append(total)
Q -= 1
print(total)
print(out)
|
from typing import Dict
class AvgCollecter():
def __init__(self, keys=None):
self.total = {k:0 for k in keys} if keys else {}
self.count = {k:0 for k in keys} if keys else {}
self.val = None
def __call__(self, result:Dict[str, float], reset=False):
for k, v in result.items():
if k not in self.total or reset:
self.total[k] = 0
self.count[k] = 0
self.total[k] += v
self.count[k] += 1
self.val = {k:self.total[k] / self.count[k] for k in self.total}
return self.val
class EMACollecter():
def __init__(self, alpha=0.98):
self.val = {}
self.alpha = alpha
def __call__(self, result:Dict[str, float]):
for k, v in result.items():
self.val[k] = self.val[k] * self.alpha + v * (1 - self.alpha) \
if k in self.val else v
return self.val
|
import numpy as np
class Layout:
"""
Layout(width, height, *anchorWidth, *anchorHeight, *fill)
Parameters
width int: layout width
height int: layout height
achrowWidth int: layout width position from the parent
optional, default: 0
anchorHeight int: layout height position from the parent
optional, default: 0
fill str: layout background fill
optional, default: ' '
Most basic component of the whole renderer, components should inherit from this
class since it has the built in properties to write to the driver.
Properties:
ignoreOverflow bool: silently fails if trying to write outside of the shadowBuffer.
default: True
underData: the shadowBuffer under the layout. Unsed internally for undrawing.
defailt: None
parent: the parent layout or renderer.
default: None
isAttached: checks if the layout is attached to the parent
default: False
"""
def __init__(self, w, h, anchorw = 0, anchorh = 0, fill = ' '):
self.__dict__.update({
'width': w,
'height': h,
'anchorw': anchorw,
'anchorh': anchorh,
'fill': fill,
})
self.initalize()
self.ignoreOverflow = True
self.underData = None
self.parent = None
self.isAttached = False
def initalize(self):
self.cursorPos = [0, 0]
self.shadowBuff = [[self.fill for x in range(
self.width)] for y in range(self.height)]
self.renderables = []
self.components = []
def _is_overflow(self, y, x):
if y >= self.height or x >= self.width:
return True
return False
def write_char(self, data, direction='x', diffpos=None, CJK=False):
direction = 0 if direction == "x" else 1
pos = self.cursorPos if diffpos is None else diffpos
if direction == 1 and diffpos is None:
if self._is_overflow(*pos):
self.cursorPos = [0, pos[1] + 1]
else:
self.cursorPos = [pos[0] + 1, pos[1]]
elif direction == 0 and diffpos is None:
if self._is_overflow(*pos):
self.cursorPos = [pos[0] + 1, 0]
else:
self.cursorPos = [pos[0], pos[1] + 1]
try:
self.shadowBuff[pos[0]][pos[1]] = data
if CJK and direction == 0:
self.write_char('')
except:
if not self.ignoreOverflow:
raise Exception(f'Attempting to write outside of buffer. {pos} -> {[self.conHeight - 1, self.conWidth - 1]}')
def write_string(self, data, direction='x', CJK=False):
for char in data:
self.write_char(char, direction, None, CJK)
def attachTo(self, parent):
parent.components.append(self)
getattr(self, 'onAttach', lambda x: True)(parent)
self.isAttached = True
def drawTo(self, driver):
if hasattr(self, 'beforeRender') and not self.isAttached:
self.beforeRender()
for rendable in self.renderables:
rendable(self)
for component in self.components:
getattr(component, 'beforeRender', lambda: True)()
component.drawTo(self)
try:
anchorHeight, anchorWidth = self.anchorh, self.anchorw
buffer = np.asarray(self.shadowBuff, dtype=np.dtype('<U32'))
parentBuffer = np.asarray(driver.shadowBuff, dtype=np.dtype('<U32'))
self.underData = parentBuffer[anchorHeight:anchorHeight +
buffer.shape[0], anchorWidth:anchorWidth + buffer.shape[1]].tolist()
parentBuffer[anchorHeight:anchorHeight + buffer.shape[0], anchorWidth:anchorWidth + buffer.shape[1]] = buffer
driver.shadowBuff = parentBuffer.tolist()
except ValueError:
print(f'')
raise Exception(f'Layout misfit: Parent{driver.width, driver.height}:S{np.asarray(driver.shadowBuff).shape} layout is smaller than Child{self.width, self.height}:S{np.asarray(self.shadowBuff).shape}.\nAnchors: {self.anchorh, self.anchorw}')
def draw(self):
if self.parent:
self.drawTo(self.parent)
self.parent.draw()
if hasattr(self, 'afterRender'):
self.afterRender()
for rendable in self.renderables:
rendable(self)
for component in self.components:
getattr(component, 'afterRender', lambda: True)()
def undrawTo(self, driver):
if self.underData is not None:
anchorHeight, anchorWidth = self.anchorh, self.anchorw
buffer = np.asarray(self.underData)
parentBuffer = np.asarray(driver.shadowBuff)
parentBuffer[anchorHeight:anchorHeight + buffer.shape[0],
anchorWidth: anchorWidth + buffer.shape[1]] = buffer
driver.shadowBuff = parentBuffer.tolist()
|
import numpy as np
import sys
import math
import time
def merge(lista,p,q,r):
L, R,i,j = [],[],0,0
for a in range(p,q+1): L.append(lista[a])
for a in range(q+1,r+1): R.append(lista[a])
L.append(math.inf)
R.append(math.inf)
for k in range(p,r+1):
if (L[i] < R[j]):
lista[k] = L[i]
i += 1
else:
lista[k] = R[j]
j += 1
def mergeSort(lista,p,r):
if (p < r):
q = int((p+r)/2)
mergeSort(lista,p,q)
mergeSort(lista,q+1,r)
merge(lista,p,q,r)
def lerArquivo():
arquivo = 'instancias-num/' + sys.argv[1]
f = open(arquivo,'r')
conteudo = f.readlines()
entrada = []
for i in range(len(conteudo)): entrada.append(int(conteudo[i]))
return entrada
def escreveResultado(saida):
arquivo = 'resposta-mergeSort-' + sys.argv[1]
f = open(arquivo, 'w')
res = []
for i in range(len(saida)): res.append(str(saida[i])+'\n')
f.writelines(res)
if __name__ == '__main__':
print("Lendo arquivo...")
entrada = lerArquivo()
print("Arquivo Lido!!")
print("\nProcessando...")
inicio,fim = 0, len(entrada)-1
start = time.time()
mergeSort(entrada,inicio,fim)
finish = time.time()
print("\nProcessado em: ",(finish - start), "s")
print("Escrevendo Arquivo...")
escreveResultado(entrada)
print("Concluído!")
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
img1 = cv2.imread("Img1.png", cv2.IMREAD_GRAYSCALE)
img2 = cv2.imread("Img2.png", cv2.IMREAD_GRAYSCALE)
sift = cv2.xfeatures2d.SIFT_create()
kp1, des1 = sift.detectAndCompute(img1, None)
kp2, des2 = sift.detectAndCompute(img2, None)
# Here kp will be a list of keypoints and des is a numpy array of shape Number_of_Keypoints×128.
# img = cv2.drawKeypoints(img, kp, None, flags=cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
#
# cv2.imshow("Imagem", img)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
bf = cv2.BFMatcher()
matches = bf.knnMatch(des1, des2, k=2)
# Apply ratio test
good = []
for m,n in matches:
if m.distance < 0.75*n.distance:
good.append([m])
# cv.drawMatchesKnn expects list of lists as matches.
img3 = cv2.drawMatchesKnn(img1, kp1, img2, kp2, good, None, flags=cv2.DrawMatchesFlags_NOT_DRAW_SINGLE_POINTS)
cv2.imshow("Resultado", img3)
cv2.waitKey(0)
cv2.destroyAllWindows()
# plt.imshow(img3), plt.show()
|
import sys
# In that way we can give parameters for Python file from the terminal
first_name = sys.argv[0]
last_name = sys.argv[1]
print(f'Hi, I\'m {first_name} {last_name}')
|
from django.contrib import admin
from sponsorapp.models import Sponsor
admin.site.register(Sponsor)
|
from antlr4 import *
from parser.parity_gameLexer import parity_gameLexer
from parser.parity_gameParser import parity_gameParser
from parser.parity_gameListener import parity_gameListener
from pathlib import Path
from typing import List, Set, Dict
from copy import deepcopy
import re
class ParsedNode:
def __init__(self):
self.identifier = 0
self.parity = 0
self.owner = 0
self.successors: Set[int] = set()
def __str__(self):
s = "Node %d (parity %d, owner %d) - " % (self.identifier, self.parity, self.owner)
s += "successors are " + str(self.successors)
return s
def add_successor(self, identifier):
self.successors.add(identifier)
class ParsedParityGame:
def __init__(self):
self.size: int = 0
self.nodes: Dict[int, ParsedNode] = {}
self.even: Set[int] = set()
self.odd: Set[int] = set()
self.parities: Dict[int, Set[int]] = {}
def __str__(self):
s = "Parity game of size %d\n" % self.size
for node in self.nodes.values():
s += "\t" + str(node) + "\n"
return s
def add_node(self, node):
self.nodes[node.identifier] = node
"""
populate the data structures of the parity game
"""
def populate(self):
for node in self.nodes.values():
if node.owner == 0:
self.even.add(node.identifier)
else:
self.odd.add(node.identifier)
if node.parity not in self.parities.keys():
self.parities[node.parity] = {node.identifier}
else:
self.parities[node.parity].add(node.identifier)
"""
create a subgame from the current parity game
- remove, the set of nodes to remove
"""
def create_subgame(self, remove: Set[int]):
new = ParsedParityGame()
new.size = self.size
for n in self.nodes.keys():
if n not in remove:
nn = deepcopy(self.nodes[n])
nn.successors -= remove
new.add_node(nn)
new.populate()
return new
class ParityGamePrintListener(parity_gameListener):
game = None
current_node = None
def enterNode(self, ctx: parity_gameParser.NodeContext):
self.current_node = ParsedNode()
def exitNode(self, ctx: parity_gameParser.NodeContext):
self.game.add_node(self.current_node)
def enterIdentifier(self, ctx: parity_gameParser.IdentifierContext):
self.current_node.identifier = int(ctx.getText())
def enterParity(self, ctx: parity_gameParser.ParityContext):
self.current_node.parity = int(ctx.getText())
def enterOwner(self, ctx: parity_gameParser.OwnerContext):
self.current_node.owner = int(ctx.getText())
def enterSuccessor(self, ctx: parity_gameParser.SuccessorContext):
self.current_node.add_successor(int(ctx.getText()))
def enterGame(self, ctx: parity_gameParser.GameContext):
self.game = ParsedParityGame()
def enterPreamble(self, ctx: parity_gameParser.PreambleContext):
self.game.size = int("%s" % ctx.NUMBER())
class ParityGameParser:
@staticmethod
def parse_file(path):
txt = Path(path).read_text()
return ParityGameParser.parse_string(txt)
@staticmethod
def parse_string(string):
string = re.sub('\[?".*?"\]?', '', string) # remove commentary
return ParityGameParser.parse(InputStream(string))
@staticmethod
def parse(input_stream):
lexer = parity_gameLexer(input_stream)
stream = CommonTokenStream(lexer)
parser = parity_gameParser(stream)
tree = parser.game()
printer = ParityGamePrintListener()
walker = ParseTreeWalker()
walker.walk(printer, tree)
return printer.game
|
import filters
def main():
# Ask what image the user wants to edit
filename = input("Enter filename: ")
#Load the image from the specified file
img = filters.load_img(filename)
#Apply Filters
newimg = filters.obamicon(img)
#Save the final image
filters.save_img(img, "recolored.jpg")
main()
|
import typing
import mysql.connector
from classes import Student, Room
class Model:
def __init__(self, **kwargs):
self.create_db_and_tables(kwargs.get('init_script_path_sh') or './inits/init_script.sh')
self.connection = mysql.connector.connect(
user = kwargs.get('user') or 'root',
password= kwargs.get('root') or 'root',
host = kwargs.get('host') or '127.0.0.1',
database = kwargs.get('database') or 'leverx_task4_db',
auth_plugin=kwargs.get('auth_plugin') or 'mysql_native_password')
@staticmethod
def create_db_and_tables(init_script_path_sh : str):
import os
os.system(init_script_path_sh)
def insert_rooms(self, rooms: typing.Iterable[Room]):
my_cursor = self.connection.cursor()
sql = "INSERT INTO Rooms(id, name) VALUES (%s, %s) ON DUPLICATE KEY UPDATE name = VALUES(name)"
val = [
(room.id, room.name) for room in rooms
]
my_cursor.executemany(sql, val)
self.connection.commit()
def insert_students(self, students: typing.Iterable[Student]):
my_cursor = self.connection.cursor()
sql = "INSERT INTO Students (id, name, sex, birthday, room_id) VALUES (%s, %s, %s, %s, %s)\
ON DUPLICATE KEY UPDATE name = VALUES(name), sex = VALUES(sex), birthday = VALUES(birthday), room_id = VALUES(room_id)"
val = [
(s.id, s.name, s.sex, s.birthday, s.room) for s in students
]
my_cursor.executemany(sql, val)
self.connection.commit()
def select_rooms_with_count_students(self) -> typing.Iterable[typing.Tuple[str,int]]:
my_cursor = self.connection.cursor()
sql = "SELECT R.name, count(S.id) as students_in_room \
FROM Students as S \
INNER JOIN Rooms as R \
ON S.room_id = R.id \
GROUP BY R.id"
my_cursor.execute(sql)
return my_cursor.fetchall()
def select_rooms_with_smalles_date_arg(self) -> typing.Iterable[(str)]:
my_cursor = self.connection.cursor()
sql = "SELECT R.name \
FROM Students as S \
INNER JOIN Rooms as R ON S.room_id = R.id \
GROUP BY R.id \
ORDER BY avg(S.birthday) ASC \
LIMIT 5"
my_cursor.execute(sql)
return my_cursor.fetchall()
def select_rooms_with_largest_date_differense(self) -> typing.Iterable[str]:
my_cursor = self.connection.cursor()
sql = "SELECT R.name \
FROM Students as S \
INNER JOIN Rooms as R ON S.room_id = R.id \
GROUP BY R.id \
ORDER BY (MAX(S.birthday)-MIN(S.birthday)) ASC \
LIMIT 5"
my_cursor.execute(sql)
return my_cursor.fetchall()
def select_rooms_there_living_with_different_sex(self) -> typing.Iterable[str]:
my_cursor = self.connection.cursor()
sql = "SELECT R.name \
FROM Rooms as R \
WHERE (\
SELECT count(*) \
FROM Students as S1 \
WHERE S1.sex = 'M' and S1.room_id = R.id \
Group BY S1.room_id) \
< \
(SELECT count(*) \
FROM Students as S2 \
WHERE S2.room_id = R.id \
Group BY S2.room_id)"
my_cursor.execute(sql)
return my_cursor.fetchall()
def __del__(self):
self.connection.close()
|
import inspect
import re
from functools import partial
from typing import Any, Callable, Dict, List, Mapping, Optional, get_type_hints
from falcon import HTTP_400, HTTP_415, HTTPError
from falcon import Response as FalconResponse
from falcon.routing.compiled import _FIELD_PATTERN as FALCON_FIELD_PATTERN
from .._pydantic import BaseModel, ValidationError
from .._types import ModelType
from ..response import Response
from .base import BasePlugin
class OpenAPI:
def __init__(self, spec: Mapping[str, str]):
self.spec = spec
def on_get(self, _: Any, resp: Any):
resp.media = self.spec
class DocPage:
def __init__(self, html: str, **kwargs: Any):
self.page = html.format(**kwargs)
def on_get(self, _: Any, resp: Any):
resp.content_type = "text/html"
resp.text = self.page
class OpenAPIAsgi(OpenAPI):
async def on_get(self, req: Any, resp: Any):
super().on_get(req, resp)
class DocPageAsgi(DocPage):
async def on_get(self, req: Any, resp: Any):
super().on_get(req, resp)
DOC_CLASS: List[str] = [
x.__name__ for x in (DocPage, OpenAPI, DocPageAsgi, OpenAPIAsgi)
]
HTTP_500: str = "500 Internal Service Response Validation Error"
class FalconPlugin(BasePlugin):
OPEN_API_ROUTE_CLASS = OpenAPI
DOC_PAGE_ROUTE_CLASS = DocPage
def __init__(self, spectree):
super().__init__(spectree)
self.FALCON_MEDIA_ERROR_CODE = (HTTP_400, HTTP_415)
# NOTE from `falcon.routing.compiled.CompiledRouterNode`
self.ESCAPE = r"[\.\(\)\[\]\?\$\*\+\^\|]"
self.ESCAPE_TO = r"\\\g<0>"
self.EXTRACT = r"{\2}"
# NOTE this regex is copied from werkzeug.routing._converter_args_re and
# modified to support only int args
self.INT_ARGS = re.compile(
r"""
((?P<name>\w+)\s*=\s*)?
(?P<value>\d+)\s*
""",
re.VERBOSE,
)
self.INT_ARGS_NAMES = ("num_digits", "min", "max")
def register_route(self, app: Any):
self.app = app
self.app.add_route(
self.config.spec_url, self.OPEN_API_ROUTE_CLASS(self.spectree.spec)
)
for ui in self.config.page_templates:
self.app.add_route(
f"/{self.config.path}/{ui}",
self.DOC_PAGE_ROUTE_CLASS(
self.config.page_templates[ui],
spec_url=self.config.spec_url,
spec_path=self.config.path,
**self.config.swagger_oauth2_config(),
),
)
def find_routes(self):
routes = []
def find_node(node):
if node.resource and node.resource.__class__.__name__ not in DOC_CLASS:
routes.append(node)
for child in node.children:
find_node(child)
for route in self.app._router._roots:
find_node(route)
return routes
def parse_func(self, route: Any) -> Dict[str, Any]:
return route.method_map.items()
def parse_path(self, route, path_parameter_descriptions):
subs, parameters = [], []
for segment in route.uri_template.strip("/").split("/"):
matches = FALCON_FIELD_PATTERN.finditer(segment)
if not matches:
subs.append(segment)
continue
escaped = re.sub(self.ESCAPE, self.ESCAPE_TO, segment)
subs.append(FALCON_FIELD_PATTERN.sub(self.EXTRACT, escaped))
for field in matches:
variable, converter, argstr = [
field.group(name) for name in ("fname", "cname", "argstr")
]
if converter == "int":
if argstr is None:
argstr = ""
arg_values = [None, None, None]
for index, match in enumerate(self.INT_ARGS.finditer(argstr)):
name, value = match.group("name"), match.group("value")
if name:
index = self.INT_ARGS_NAMES.index(name)
arg_values[index] = value
num_digits, minimum, maximum = arg_values
schema = {
"type": "integer",
"format": f"int{num_digits}" if num_digits else "int32",
}
if minimum:
schema["minimum"] = minimum
if maximum:
schema["maximum"] = maximum
elif converter == "uuid":
schema = {"type": "string", "format": "uuid"}
elif converter == "dt":
schema = {
"type": "string",
"format": "date-time",
}
else:
# no converter specified or customized converters
schema = {"type": "string"}
description = (
path_parameter_descriptions.get(variable, "")
if path_parameter_descriptions
else ""
)
parameters.append(
{
"name": variable,
"in": "path",
"required": True,
"schema": schema,
"description": description,
}
)
return f'/{"/".join(subs)}', parameters
def request_validation(self, req, query, json, form, headers, cookies):
if query:
req.context.query = query.parse_obj(req.params)
if headers:
req.context.headers = headers.parse_obj(req.headers)
if cookies:
req.context.cookies = cookies.parse_obj(req.cookies)
if json:
try:
media = req.media
except HTTPError as err:
if err.status not in self.FALCON_MEDIA_ERROR_CODE:
raise
media = None
req.context.json = json.parse_obj(media)
if form:
# TODO - possible to pass the BodyPart here?
# req_form = {x.name: x for x in req.get_media()}
req_form = {x.name: x.stream.read() for x in req.get_media()}
req.context.form = form.parse_obj(req_form)
def response_validation(
self,
response_spec: Optional[Response],
falcon_response: FalconResponse,
skip_validation: bool,
) -> None:
if response_spec and response_spec.has_model():
model = falcon_response.media
status = int(falcon_response.status[:3])
expect_model = response_spec.find_model(status)
if response_spec.expect_list_result(status) and isinstance(model, list):
expected_list_item_type = response_spec.get_expected_list_item_type(
status
)
if all(isinstance(entry, expected_list_item_type) for entry in model):
skip_validation = True
falcon_response.media = [
(entry.dict() if isinstance(entry, BaseModel) else entry)
for entry in model
]
elif expect_model and isinstance(falcon_response.media, expect_model):
falcon_response.media = model.dict()
skip_validation = True
if self._data_set_manually(falcon_response):
skip_validation = True
if expect_model and not skip_validation:
expect_model.parse_obj(falcon_response.media)
def validate(
self,
func: Callable,
query: Optional[ModelType],
json: Optional[ModelType],
form: Optional[ModelType],
headers: Optional[ModelType],
cookies: Optional[ModelType],
resp: Optional[Response],
before: Callable,
after: Callable,
validation_error_status: int,
skip_validation: bool,
*args: Any,
**kwargs: Any,
):
# falcon endpoint method arguments: (self, req, resp)
_self, _req, _resp = args[:3]
req_validation_error, resp_validation_error = None, None
try:
self.request_validation(_req, query, json, form, headers, cookies)
if self.config.annotations:
annotations = get_type_hints(func)
for name in ("query", "json", "form", "headers", "cookies"):
if annotations.get(name):
kwargs[name] = getattr(_req.context, name)
except ValidationError as err:
req_validation_error = err
_resp.status = f"{validation_error_status} Validation Error"
_resp.media = err.errors()
before(_req, _resp, req_validation_error, _self)
if req_validation_error:
return
func(*args, **kwargs)
try:
self.response_validation(
response_spec=resp,
falcon_response=_resp,
skip_validation=skip_validation,
)
except ValidationError as err:
resp_validation_error = err
_resp.status = HTTP_500
_resp.media = err.errors()
after(_req, _resp, resp_validation_error, _self)
def _data_set_manually(self, resp):
return (resp.text is not None or resp.data is not None) and resp.media is None
def bypass(self, func, method):
if isinstance(func, partial):
return True
return inspect.isfunction(func)
class FalconAsgiPlugin(FalconPlugin):
"""Light wrapper around default Falcon plug-in to support Falcon 3.0 ASGI apps"""
ASYNC = True
OPEN_API_ROUTE_CLASS = OpenAPIAsgi
DOC_PAGE_ROUTE_CLASS = DocPageAsgi
async def request_validation(self, req, query, json, form, headers, cookies):
if query:
req.context.query = query.parse_obj(req.params)
if headers:
req.context.headers = headers.parse_obj(req.headers)
if cookies:
req.context.cookies = cookies.parse_obj(req.cookies)
if json:
try:
media = await req.get_media()
except HTTPError as err:
if err.status not in self.FALCON_MEDIA_ERROR_CODE:
raise
media = None
req.context.json = json.parse_obj(media)
if form:
try:
form_data = await req.get_media()
except HTTPError as err:
if err.status not in self.FALCON_MEDIA_ERROR_CODE:
raise
req.context.form = None
else:
res_data = {}
async for x in form_data:
res_data[x.name] = x
await x.data # TODO - how to avoid this?
req.context.form = form.parse_obj(res_data)
async def validate(
self,
func: Callable,
query: Optional[ModelType],
json: Optional[ModelType],
form: Optional[ModelType],
headers: Optional[ModelType],
cookies: Optional[ModelType],
resp: Optional[Response],
before: Callable,
after: Callable,
validation_error_status: int,
skip_validation: bool,
*args: Any,
**kwargs: Any,
):
# falcon endpoint method arguments: (self, req, resp)
_self, _req, _resp = args[:3]
req_validation_error, resp_validation_error = None, None
try:
await self.request_validation(_req, query, json, form, headers, cookies)
if self.config.annotations:
annotations = get_type_hints(func)
for name in ("query", "json", "form", "headers", "cookies"):
if annotations.get(name):
kwargs[name] = getattr(_req.context, name)
except ValidationError as err:
req_validation_error = err
_resp.status = f"{validation_error_status} Validation Error"
_resp.media = err.errors()
before(_req, _resp, req_validation_error, _self)
if req_validation_error:
return
await func(*args, **kwargs)
try:
self.response_validation(
response_spec=resp,
falcon_response=_resp,
skip_validation=skip_validation,
)
except ValidationError as err:
resp_validation_error = err
_resp.status = HTTP_500
_resp.media = err.errors()
after(_req, _resp, resp_validation_error, _self)
|
#What you will learn
"""
Lists
Searching in Lists
Exception Handling
Slices
Ranges
For Loop
While Loop
#Lists
A list is a data type that holds an ordered collection of items.
The items can be of varrious data types
You can even have lists of lists!
list_name = [item_1, item_2, item_N]
list_name = []
list_name[index]
"""
animals = ['man', 'bear', 'pig']
print(animals[0])
animals[0] = 'cat'
print(animals[0])
animals = ['man', 'bear', 'pig']
print(animals[-1])
print(animals[-2])
print(animals[-3])
animals = ['man', 'bear', 'pig']
animals.append('cow')
print(animals[-1])
animals = ['man', 'bear', 'pig']
animals.extend(['cow','duck'])
print(animals)
more_animals = ['horse', 'dog']
animals.extend(more_animals)
print(animals)
animals = ['man', 'bear', 'pig']
animals.insert(0, 'horse')
print(animals)
animals.insert(2, 'duck')
print(animals)
"""
#Slices
list[index1:index2]
list[:index2]
list[index1:]
"""
animals = ['man', 'bear', 'pig', 'cow', 'duck', 'horse']
some_animals = animals[1:4]
print('Some animals: {}'.format(some_animals))
first_two = animals[0:2]
print('First two animals: {}' .format(first_two))
first_two_again = animals[:2]
print('First two animals: {}'.format(first_two_again))
animals = ['man', 'bear', 'pig', 'cow', 'duck', 'horse']
last_two = animals[4:6]
print('last two animals: {}'.format(last_two))
last_two_again = animals[-2:]
print('last two animals: {}'.format(last_two_again))
part_of_a_horse = 'horse'[1:3]
print(part_of_a_horse)
"""
#Exception Handling
Finding an item in a list
"""
animals = ['man', 'bear', 'pig']
bear_index = animals.index('bear')
print(bear_index)
# animals = ['man', 'bear', 'pig']
# cat_index = animals.index('cat')
# print(cat_index)
"""
Traceback (most recent call last):
File "/Users/alisariboga/Desktop/python_course /Section_7/lists.py", line 95, in <module>
cat_index = animals.index('cat')
ValueError: 'cat' is not in list
"""
animals = ['man', 'bear', 'pig']
try:
cat_index = animals.index('cat')
except:
cat_index = 'No cats found'
print(cat_index)
#No cats found
"""
#loops
for item_variable in list_name:
# Code block
item_variable = list[0]
item_variable = list[1]
item_variable = list[N]
"""
animals = ['man', 'bear', 'pig']
for animal in animals:
print(animal.upper())
#MAN
#BEAR
#PIG
"""
#While Loop
while condition
# Code block
"""
animals = ['man', 'bear', 'pig', 'cow', 'duck', 'horse']
index = 0
while index < len(animals):
print(animals[index])
index += 1
"""
man
bear
pig
cow
duck
horse
# Sorting and Ranges
"""
animals = ['man', 'bear', 'pig']
sorted_animals = sorted(animals)
print('Animals list: {}'.format(animals))
print('Sorted animals list: {}'.format(sorted_animals))
animals.sort()
print('Animals after sort method: {}'.format(animals))
"""
Animals list: ['man', 'bear', 'pig']
Sorted animals list: ['bear', 'man', 'pig']
Animals after sort method: ['bear', 'man', 'pig']
"""
animals = ['man', 'bear', 'pig']
more_animals = ['cow', 'duck', 'horse']
all_animals = animals + more_animals
print(all_animals)
#['man', 'bear', 'pig', 'cow', 'duck', 'horse']
animals = ['man', 'bear', 'pig']
print(len(animals))
animals.append('cow')
print(len(animals))
#3
#4
#Ranges
for number in range(3):
print(number)
#0
#1
#2
for number in range(1,3):
print(number)
#1
#2
for number in range(1, 10, 2):
print(number)
#1
#3
#5
#7
#9
animals = ['man', 'bear', 'pig', 'cow', 'duck', 'horse', 'dog']
for number in range(0, len(animals), 2):
print(animals[number])
"""
man
pig
duck
dog
"""
|
from django.contrib.auth.admin import GroupAdmin, UserAdmin
from django.contrib.auth.models import Group, User
from django.contrib.sites.models import Site
from django.contrib import admin
from django import forms
# Register your models here.
# from frontend.models import Article
from django_ace import AceWidget
from frontend.models import Article, Tag, Category, Poll
# wordwrap=False, width="500px", height="300px", showprintmargin=True
class ArticleForm(forms.ModelForm):
content_ru = forms.CharField(widget=AceWidget(mode='markdown', theme='twilight',
wordwrap=False, width='100%', height='300px'), required=False)
content_en = forms.CharField(widget=AceWidget(mode='markdown', theme='twilight',
wordwrap=False, width='100%', height='300px'), required=False)
class Meta:
model = Article
class AdminArticle(admin.ModelAdmin):
form = ArticleForm
list_display = ['id', 'since', 'title_ru', 'slug_ru', 'title_en', 'slug_en']
# list_editable = ['slug_ru', 'slug_en', 'title_ru', 'title_en']
list_editable = ['title_ru', 'slug_ru']
raw_id_fields = ['tag']
# related_lookup_fields = {'m2m': ['tag']}
autocomplete_lookup_fields = {
'm2m': ['tag'],
}
def admin_register(admin_instance):
admin_instance.register(Category, list_display=['id', 'title_ru', 'slug_ru', 'title_en'],
list_editable=['title_ru', 'slug_ru'])
# admin_instance.register(SimplePage, AdminSimplePage)
# admin_instance.register(Slider, AdminSlider)
# admin_instance.register(Order, AdminOrder)
#
# admin_instance.register(SiteTemplate, AdminSiteTemplate)
#
# admin_instance.register(SiteSettings, list_display=['id', 'name', 'value', 'value_txt', 'description'],
# list_editable=['name', 'value'])
# admin_instance.register(SiteTheme)
admin_instance.register(User, UserAdmin)
admin_instance.register(Group, GroupAdmin)
admin_instance.register(Site, list_display=['id', 'domain', 'name'], list_editable=['domain', 'name'])
admin_instance.register(Article, AdminArticle)
admin_instance.register(Tag, list_display=['id', 'title_ru', 'slug_ru', 'title_en', 'slug_en', 'num'],
list_editable=['title_ru', 'slug_ru',])
admin_instance.register(Poll, list_display=['id', 'title_ru', 'title_en', 'date_close', 'num_votes', 'state'],
list_editable=['title_ru', 'title_en', 'state'])
# admin_instance.register(Delivery, AdminDelivery)
# admin_instance.register(DeliveryGroup, AdminDeliveryGroup)
|
# Copyright 2010-2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
__all__ = ['getmaskingstatus']
import sys
import portage
from portage import eapi_is_supported, _eapi_is_deprecated
from portage.localization import _
from portage.package.ebuild.config import config
from portage.versions import catpkgsplit, _pkg_str
if sys.hexversion >= 0x3000000:
basestring = str
class _UnmaskHint(object):
__slots__ = ('key', 'value')
def __init__(self, key, value):
self.key = key
self.value = value
class _MaskReason(object):
__slots__ = ('category', 'message', 'unmask_hint')
def __init__(self, category, message, unmask_hint=None):
self.category = category
self.message = message
self.unmask_hint = unmask_hint
def getmaskingstatus(mycpv, settings=None, portdb=None, myrepo=None):
if settings is None:
settings = config(clone=portage.settings)
if portdb is None:
portdb = portage.portdb
return [mreason.message for \
mreason in _getmaskingstatus(mycpv, settings, portdb,myrepo)]
def _getmaskingstatus(mycpv, settings, portdb, myrepo=None):
metadata = None
installed = False
if not isinstance(mycpv, basestring):
# emerge passed in a Package instance
pkg = mycpv
mycpv = pkg.cpv
metadata = pkg.metadata
installed = pkg.installed
if metadata is None:
db_keys = list(portdb._aux_cache_keys)
try:
metadata = dict(zip(db_keys, portdb.aux_get(mycpv, db_keys, myrepo=myrepo)))
except KeyError:
if not portdb.cpv_exists(mycpv):
raise
return [_MaskReason("corruption", "corruption")]
if "?" in metadata["LICENSE"]:
settings.setcpv(mycpv, mydb=metadata)
metadata["USE"] = settings["PORTAGE_USE"]
else:
metadata["USE"] = ""
try:
mycpv.slot
except AttributeError:
try:
mycpv = _pkg_str(mycpv, metadata=metadata, settings=settings)
except portage.exception.InvalidData:
raise ValueError(_("invalid CPV: %s") % mycpv)
rValue = []
# package.mask checking
if settings._getMaskAtom(mycpv, metadata):
rValue.append(_MaskReason("package.mask", "package.mask", _UnmaskHint("p_mask", None)))
# keywords checking
eapi = metadata["EAPI"]
mygroups = settings._getKeywords(mycpv, metadata)
licenses = metadata["LICENSE"]
properties = metadata["PROPERTIES"]
if not eapi_is_supported(eapi):
return [_MaskReason("EAPI", "EAPI %s" % eapi)]
elif _eapi_is_deprecated(eapi) and not installed:
return [_MaskReason("EAPI", "EAPI %s" % eapi)]
egroups = settings.configdict["backupenv"].get(
"ACCEPT_KEYWORDS", "").split()
global_accept_keywords = settings.get("ACCEPT_KEYWORDS", "")
pgroups = global_accept_keywords.split()
myarch = settings["ARCH"]
if pgroups and myarch not in pgroups:
"""For operating systems other than Linux, ARCH is not necessarily a
valid keyword."""
myarch = pgroups[0].lstrip("~")
# NOTE: This logic is copied from KeywordsManager.getMissingKeywords().
unmaskgroups = settings._keywords_manager.getPKeywords(mycpv,
metadata["SLOT"], metadata["repository"], global_accept_keywords)
pgroups.extend(unmaskgroups)
if unmaskgroups or egroups:
pgroups = settings._keywords_manager._getEgroups(egroups, pgroups)
else:
pgroups = set(pgroups)
kmask = "missing"
kmask_hint = None
if '**' in pgroups:
kmask = None
else:
for keyword in pgroups:
if keyword in mygroups:
kmask = None
break
if kmask:
for gp in mygroups:
if gp=="*":
kmask=None
break
elif gp=="-"+myarch and myarch in pgroups:
kmask="-"+myarch
break
elif gp=="~"+myarch and myarch in pgroups:
kmask="~"+myarch
kmask_hint = _UnmaskHint("unstable keyword", kmask)
break
if kmask == "missing":
kmask_hint = _UnmaskHint("unstable keyword", "**")
try:
missing_licenses = settings._getMissingLicenses(mycpv, metadata)
if missing_licenses:
allowed_tokens = set(["||", "(", ")"])
allowed_tokens.update(missing_licenses)
license_split = licenses.split()
license_split = [x for x in license_split \
if x in allowed_tokens]
msg = license_split[:]
msg.append("license(s)")
rValue.append(_MaskReason("LICENSE", " ".join(msg), _UnmaskHint("license", set(missing_licenses))))
except portage.exception.InvalidDependString as e:
rValue.append(_MaskReason("invalid", "LICENSE: "+str(e)))
try:
missing_properties = settings._getMissingProperties(mycpv, metadata)
if missing_properties:
allowed_tokens = set(["||", "(", ")"])
allowed_tokens.update(missing_properties)
properties_split = properties.split()
properties_split = [x for x in properties_split \
if x in allowed_tokens]
msg = properties_split[:]
msg.append("properties")
rValue.append(_MaskReason("PROPERTIES", " ".join(msg)))
except portage.exception.InvalidDependString as e:
rValue.append(_MaskReason("invalid", "PROPERTIES: "+str(e)))
# Only show KEYWORDS masks for installed packages
# if they're not masked for any other reason.
if kmask and (not installed or not rValue):
rValue.append(_MaskReason("KEYWORDS",
kmask + " keyword", unmask_hint=kmask_hint))
return rValue
|
# client.py --
# Runs in thread and communicates with the Glass Server.
import threading
import socket
import config
import time
import struct
import logging
import GlassProtocol
from variable import variables
class client_c(object):
#Overall client class
def __init__(self):
#Read Config file for port and host of GlassServer to connect to.
self.port = int(config.client.config['GlassServer']['port'])
self.host = config.client.config['GlassServer']['ip']
self.client_thread = threading.Thread(target = self.run)
self.go = True
self.rx_count =0
self.VD_recv = False
def stop(self):
self.go=False
def start(self):
self.client_thread.start()
def run(self):
self.init_client()
count = 10
while self.go:
#Check to see if connected, if not try to connect to Glass Server
if not self.connected:
self.try_connect()
else:
if not self.AVsent: #If no AV sent then send add variables
self.AVsend(variables.list())
#Check send buffer
#self.send_data()
#self.parse_data(self)
GlassProtocol.sendrecv(self, self.sock)
if self.go:
GlassProtocol.parse_data(self)
#Check for server ping
if (time.time() - self.lastRXtime) > 10:
self.reset_connect()
#Time delay for know for testing
#print count
#count = count -1
#time.sleep(0.01)
logging.info("Client Thread Ended %s", self.client_thread.name)
def init_client(self):
self.commands = ["PI","VD"]
self.connected = False
self.AVsent = False
self.send_buffer = ""
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
GlassProtocol.initialize(self)
def try_connect(self):
success = False
try:
self.sock.connect((self.host,self.port))
success = True
except socket.error, e:
if e[0]==111: #Connection refused
pass
else: #Something is wrong True Error occured
logging.warning("Client: Socket Error %r", e)
#self.go = False #Quit connection
if success:
self.connected= True
logging.info("Client: Connected %r:%r",self.host,self.port)
self.lastRXtime = time.time()
self.sock.setblocking(0)
else:
time.sleep(3)
def reset_connect(self):
#Reset connection if no ping recieved
logging.warning("Client: Resetting Connection No Data Received")
#self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
time.sleep(3)
self.init_client()
def add_to_send(self, response):
#Takes data to send in list form
for id, data, desc in response: #Cycle through all data that needs to be sent.
length = len(data)
send_s = id + struct.pack("H",length) + data
self.send_buffer += send_s
def send_data(self):
if len(self.send_buffer) > 0:
#try:
self.sock.send(self.send_buffer)
logging.debug("Client: Send send_buffer %r" ,self.send_buffer)
self.send_buffer = "" #Empty send buffer
#except:
def AVsend(self, list):
data_out = ""
if list and len(list)>0:
for i in list:
data_out+=struct.pack("H",i)
if data_out != "":
self.add_to_send([["AV",data_out,"Add variables"]])
self.AVsent = True
def process_data(self, command_byte, command_data):
#print "Byte = %r Data = %r" %(command_byte, command_data)
data_len = len(command_data)
desc = "UNKNOWN"
if command_byte == "PI":
desc = "Ping from Server "
#No actions taken
logging.debug("Client: Ping for Server Received")
elif command_byte == "VD":
i=0
self.rx_count = self.rx_count + 1
self.VD_recv = True
logging.debug("Client: VD Recieved rx_count %r" , self.rx_count)
while i<data_len: #Loop through till data runs out.
#Get addr
addr = struct.unpack("H",command_data[i:i+2])[0]
i+=2
#Determine size of data value.
#Make sure it exists.
if variables.exists(addr):
#Get variable
var = variables.dict[addr]
#Determine size
size = var.pack_size
value_s = command_data[i:i+size]
i+= size
#Try to write to it, if var is not writable will get error.
#print addr, "%r" %value_s
v = struct.unpack(var.pack_format,value_s)[0]
#print "V= ",v, var.pack_format
var.setvalue(v)
#self.connection.SetVariable(addr,value_s)
format = "%" + var.format_s
value = format %v
desc = "Client Set Variable 0x%04X to " %(addr) + value
#print desc
logging.debug(desc)
elif command_byte == "SE": #Send Event
#Events don't repeat.. so certian events can be passed through server,
#with server not needing to know of them.
#If events repeat then server needs to know size of data of each event,
#for it to parse it correctly.
addr = struct.unpack("H", command_data[:2])[0]
data = command_data[2:] #Data is all data after addr.
#Send addr, and data to event obj on server.
#self.events.process(addr,data)
#To see if server is aware of event and needs to process it.
#Automatically forward event to all clients.
#*** STILL NEED TO DO ****
#self.log_comm('TX', command_byte, command_data, desc)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\盛田昭夫\Desktop\IonTrap-WIPM-master\GUI_Material\QC2_0.ui'
#
# Created by: PyQt5 UI code generator 5.13.0
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(597, 888)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
MainWindow.setFont(font)
MainWindow.setLayoutDirection(QtCore.Qt.LeftToRight)
MainWindow.setIconSize(QtCore.QSize(30, 30))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.line = QtWidgets.QFrame(self.centralwidget)
self.line.setGeometry(QtCore.QRect(50, 80, 491, 20))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label_20 = QtWidgets.QLabel(self.centralwidget)
self.label_20.setGeometry(QtCore.QRect(50, 20, 491, 61))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_20.setFont(font)
self.label_20.setObjectName("label_20")
self.label_10 = QtWidgets.QLabel(self.centralwidget)
self.label_10.setGeometry(QtCore.QRect(60, 470, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.label_33 = QtWidgets.QLabel(self.centralwidget)
self.label_33.setGeometry(QtCore.QRect(50, 100, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
self.label_33.setFont(font)
self.label_33.setObjectName("label_33")
self.radioButton_rabi = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_rabi.setGeometry(QtCore.QRect(420, 540, 115, 19))
self.radioButton_rabi.setObjectName("radioButton_rabi")
self.radioButton_zeeman = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_zeeman.setGeometry(QtCore.QRect(420, 580, 115, 19))
self.radioButton_zeeman.setObjectName("radioButton_zeeman")
self.radioButton_cust = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_cust.setGeometry(QtCore.QRect(420, 620, 115, 19))
self.radioButton_cust.setObjectName("radioButton_cust")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(420, 700, 121, 111))
self.pushButton.setObjectName("pushButton")
self.radioButton_off = QtWidgets.QRadioButton(self.centralwidget)
self.radioButton_off.setGeometry(QtCore.QRect(420, 660, 115, 19))
self.radioButton_off.setChecked(True)
self.radioButton_off.setObjectName("radioButton_off")
self.Setting = QtWidgets.QTabWidget(self.centralwidget)
self.Setting.setGeometry(QtCore.QRect(50, 150, 491, 311))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Setting.setFont(font)
self.Setting.setObjectName("Setting")
self.DPL_Cooling = QtWidgets.QWidget()
self.DPL_Cooling.setObjectName("DPL_Cooling")
self.doubleSpinBox_DPL = QtWidgets.QDoubleSpinBox(self.DPL_Cooling)
self.doubleSpinBox_DPL.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_DPL.setFont(font)
self.doubleSpinBox_DPL.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_DPL.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_DPL.setMaximum(999999.99)
self.doubleSpinBox_DPL.setObjectName("doubleSpinBox_DPL")
self.label_36 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_36.setGeometry(QtCore.QRect(260, 30, 31, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_36.setFont(font)
self.label_36.setObjectName("label_36")
self.label_37 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_37.setGeometry(QtCore.QRect(20, 30, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_37.setFont(font)
self.label_37.setObjectName("label_37")
self.Laser_397_1 = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_1.setGeometry(QtCore.QRect(150, 160, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_1.setFont(font)
self.Laser_397_1.setChecked(True)
self.Laser_397_1.setObjectName("Laser_397_1")
self.Laser_397_2 = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_2.setGeometry(QtCore.QRect(230, 160, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_2.setFont(font)
self.Laser_397_2.setChecked(True)
self.Laser_397_2.setObjectName("Laser_397_2")
self.Laser_397_3 = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_3.setGeometry(QtCore.QRect(310, 160, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_3.setFont(font)
self.Laser_397_3.setChecked(True)
self.Laser_397_3.setObjectName("Laser_397_3")
self.Laser_397_main = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_397_main.setGeometry(QtCore.QRect(30, 160, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_397_main.setFont(font)
self.Laser_397_main.setChecked(True)
self.Laser_397_main.setObjectName("Laser_397_main")
self.Laser_866 = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_866.setGeometry(QtCore.QRect(30, 200, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_866.setFont(font)
self.Laser_866.setChecked(True)
self.Laser_866.setObjectName("Laser_866")
self.Laser_854 = QtWidgets.QCheckBox(self.DPL_Cooling)
self.Laser_854.setGeometry(QtCore.QRect(30, 240, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_854.setFont(font)
self.Laser_854.setChecked(True)
self.Laser_854.setObjectName("Laser_854")
self.line_2 = QtWidgets.QFrame(self.DPL_Cooling)
self.line_2.setGeometry(QtCore.QRect(20, 80, 451, 20))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.label_42 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_42.setGeometry(QtCore.QRect(20, 100, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_42.setFont(font)
self.label_42.setObjectName("label_42")
self.line_4 = QtWidgets.QFrame(self.DPL_Cooling)
self.line_4.setGeometry(QtCore.QRect(120, 160, 20, 101))
self.line_4.setFrameShape(QtWidgets.QFrame.VLine)
self.line_4.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_4.setObjectName("line_4")
self.label_44 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_44.setGeometry(QtCore.QRect(150, 200, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_44.setFont(font)
self.label_44.setObjectName("label_44")
self.label_45 = QtWidgets.QLabel(self.DPL_Cooling)
self.label_45.setGeometry(QtCore.QRect(150, 240, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_45.setFont(font)
self.label_45.setObjectName("label_45")
self.Setting.addTab(self.DPL_Cooling, "")
self.tab_6 = QtWidgets.QWidget()
self.tab_6.setObjectName("tab_6")
self.label_41 = QtWidgets.QLabel(self.tab_6)
self.label_41.setGeometry(QtCore.QRect(20, 30, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_41.setFont(font)
self.label_41.setObjectName("label_41")
self.label_40 = QtWidgets.QLabel(self.tab_6)
self.label_40.setGeometry(QtCore.QRect(260, 30, 31, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_40.setFont(font)
self.label_40.setObjectName("label_40")
self.doubleSpinBox_DPL_2 = QtWidgets.QDoubleSpinBox(self.tab_6)
self.doubleSpinBox_DPL_2.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_DPL_2.setFont(font)
self.doubleSpinBox_DPL_2.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_DPL_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_DPL_2.setMaximum(999999.99)
self.doubleSpinBox_DPL_2.setObjectName("doubleSpinBox_DPL_2")
self.Laser_729_1 = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_1.setGeometry(QtCore.QRect(150, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_1.setFont(font)
self.Laser_729_1.setObjectName("Laser_729_1")
self.Laser_729_3 = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_3.setGeometry(QtCore.QRect(310, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_3.setFont(font)
self.Laser_729_3.setObjectName("Laser_729_3")
self.Laser_729_2 = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_2.setGeometry(QtCore.QRect(230, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_2.setFont(font)
self.Laser_729_2.setObjectName("Laser_729_2")
self.Laser_729_4 = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_4.setGeometry(QtCore.QRect(390, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_4.setFont(font)
self.Laser_729_4.setObjectName("Laser_729_4")
self.Laser_855 = QtWidgets.QCheckBox(self.tab_6)
self.Laser_855.setGeometry(QtCore.QRect(30, 200, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_855.setFont(font)
self.Laser_855.setCheckable(True)
self.Laser_855.setChecked(True)
self.Laser_855.setObjectName("Laser_855")
self.line_3 = QtWidgets.QFrame(self.tab_6)
self.line_3.setGeometry(QtCore.QRect(20, 80, 451, 20))
self.line_3.setFrameShape(QtWidgets.QFrame.HLine)
self.line_3.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_3.setObjectName("line_3")
self.Laser_729_all = QtWidgets.QCheckBox(self.tab_6)
self.Laser_729_all.setGeometry(QtCore.QRect(30, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_all.setFont(font)
self.Laser_729_all.setChecked(True)
self.Laser_729_all.setObjectName("Laser_729_all")
self.label_43 = QtWidgets.QLabel(self.tab_6)
self.label_43.setGeometry(QtCore.QRect(20, 100, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_43.setFont(font)
self.label_43.setObjectName("label_43")
self.line_5 = QtWidgets.QFrame(self.tab_6)
self.line_5.setGeometry(QtCore.QRect(120, 160, 20, 101))
self.line_5.setFrameShape(QtWidgets.QFrame.VLine)
self.line_5.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_5.setObjectName("line_5")
self.label_46 = QtWidgets.QLabel(self.tab_6)
self.label_46.setGeometry(QtCore.QRect(150, 200, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_46.setFont(font)
self.label_46.setObjectName("label_46")
self.Setting.addTab(self.tab_6, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.doubleSpinBox_DPL_5 = QtWidgets.QDoubleSpinBox(self.tab_2)
self.doubleSpinBox_DPL_5.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_DPL_5.setFont(font)
self.doubleSpinBox_DPL_5.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_DPL_5.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_DPL_5.setMaximum(999999.99)
self.doubleSpinBox_DPL_5.setObjectName("doubleSpinBox_DPL_5")
self.label_53 = QtWidgets.QLabel(self.tab_2)
self.label_53.setGeometry(QtCore.QRect(20, 30, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.label_53.setFont(font)
self.label_53.setObjectName("label_53")
self.label_54 = QtWidgets.QLabel(self.tab_2)
self.label_54.setGeometry(QtCore.QRect(260, 30, 31, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_54.setFont(font)
self.label_54.setObjectName("label_54")
self.Setting.addTab(self.tab_2, "")
self.tab_7 = QtWidgets.QWidget()
self.tab_7.setObjectName("tab_7")
self.Laser_729_5 = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_5.setGeometry(QtCore.QRect(390, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_5.setFont(font)
self.Laser_729_5.setObjectName("Laser_729_5")
self.Laser_729_all_2 = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_all_2.setGeometry(QtCore.QRect(30, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setItalic(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_all_2.setFont(font)
self.Laser_729_all_2.setChecked(True)
self.Laser_729_all_2.setObjectName("Laser_729_all_2")
self.Laser_729_6 = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_6.setGeometry(QtCore.QRect(150, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_6.setFont(font)
self.Laser_729_6.setObjectName("Laser_729_6")
self.Laser_856 = QtWidgets.QCheckBox(self.tab_7)
self.Laser_856.setGeometry(QtCore.QRect(30, 200, 91, 19))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_856.setFont(font)
self.Laser_856.setChecked(True)
self.Laser_856.setObjectName("Laser_856")
self.line_6 = QtWidgets.QFrame(self.tab_7)
self.line_6.setGeometry(QtCore.QRect(120, 160, 20, 101))
self.line_6.setFrameShape(QtWidgets.QFrame.VLine)
self.line_6.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_6.setObjectName("line_6")
self.Laser_729_7 = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_7.setGeometry(QtCore.QRect(230, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_7.setFont(font)
self.Laser_729_7.setObjectName("Laser_729_7")
self.label_47 = QtWidgets.QLabel(self.tab_7)
self.label_47.setGeometry(QtCore.QRect(260, 30, 31, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_47.setFont(font)
self.label_47.setObjectName("label_47")
self.doubleSpinBox_DPL_3 = QtWidgets.QDoubleSpinBox(self.tab_7)
self.doubleSpinBox_DPL_3.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_DPL_3.setFont(font)
self.doubleSpinBox_DPL_3.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_DPL_3.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_DPL_3.setMaximum(999999.99)
self.doubleSpinBox_DPL_3.setObjectName("doubleSpinBox_DPL_3")
self.Laser_729_8 = QtWidgets.QCheckBox(self.tab_7)
self.Laser_729_8.setGeometry(QtCore.QRect(310, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.Laser_729_8.setFont(font)
self.Laser_729_8.setObjectName("Laser_729_8")
self.label_48 = QtWidgets.QLabel(self.tab_7)
self.label_48.setGeometry(QtCore.QRect(20, 30, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_48.setFont(font)
self.label_48.setObjectName("label_48")
self.label_49 = QtWidgets.QLabel(self.tab_7)
self.label_49.setGeometry(QtCore.QRect(150, 200, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_49.setFont(font)
self.label_49.setObjectName("label_49")
self.label_50 = QtWidgets.QLabel(self.tab_7)
self.label_50.setGeometry(QtCore.QRect(20, 100, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_50.setFont(font)
self.label_50.setObjectName("label_50")
self.line_7 = QtWidgets.QFrame(self.tab_7)
self.line_7.setGeometry(QtCore.QRect(20, 80, 451, 20))
self.line_7.setFrameShape(QtWidgets.QFrame.HLine)
self.line_7.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_7.setObjectName("line_7")
self.Setting.addTab(self.tab_7, "")
self.tab_8 = QtWidgets.QWidget()
self.tab_8.setObjectName("tab_8")
self.doubleSpinBox_DPL_4 = QtWidgets.QDoubleSpinBox(self.tab_8)
self.doubleSpinBox_DPL_4.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_DPL_4.setFont(font)
self.doubleSpinBox_DPL_4.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_DPL_4.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_DPL_4.setMaximum(999999.99)
self.doubleSpinBox_DPL_4.setObjectName("doubleSpinBox_DPL_4")
self.label_51 = QtWidgets.QLabel(self.tab_8)
self.label_51.setGeometry(QtCore.QRect(20, 30, 131, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setUnderline(True)
font.setWeight(50)
font.setStrikeOut(False)
self.label_51.setFont(font)
self.label_51.setObjectName("label_51")
self.label_52 = QtWidgets.QLabel(self.tab_8)
self.label_52.setGeometry(QtCore.QRect(260, 30, 31, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_52.setFont(font)
self.label_52.setObjectName("label_52")
self.Setting.addTab(self.tab_8, "")
self.tab_9 = QtWidgets.QWidget()
self.tab_9.setObjectName("tab_9")
self.label_55 = QtWidgets.QLabel(self.tab_9)
self.label_55.setGeometry(QtCore.QRect(20, 30, 421, 101))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_55.setFont(font)
self.label_55.setObjectName("label_55")
self.Setting.addTab(self.tab_9, "")
self.tabWidget_2 = QtWidgets.QTabWidget(self.centralwidget)
self.tabWidget_2.setGeometry(QtCore.QRect(50, 520, 341, 301))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.tabWidget_2.setFont(font)
self.tabWidget_2.setIconSize(QtCore.QSize(20, 20))
self.tabWidget_2.setUsesScrollButtons(False)
self.tabWidget_2.setObjectName("tabWidget_2")
self.tab_3 = QtWidgets.QWidget()
self.tab_3.setObjectName("tab_3")
self.label_18 = QtWidgets.QLabel(self.tab_3)
self.label_18.setGeometry(QtCore.QRect(230, 220, 41, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_18.setFont(font)
self.label_18.setObjectName("label_18")
self.label_5 = QtWidgets.QLabel(self.tab_3)
self.label_5.setGeometry(QtCore.QRect(260, 40, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.doubleSpinBox_rabistep = QtWidgets.QDoubleSpinBox(self.tab_3)
self.doubleSpinBox_rabistep.setGeometry(QtCore.QRect(140, 150, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_rabistep.setFont(font)
self.doubleSpinBox_rabistep.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_rabistep.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_rabistep.setMaximum(999999.99)
self.doubleSpinBox_rabistep.setObjectName("doubleSpinBox_rabistep")
self.label_9 = QtWidgets.QLabel(self.tab_3)
self.label_9.setGeometry(QtCore.QRect(20, 220, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.doubleSpinBox_rabistart = QtWidgets.QDoubleSpinBox(self.tab_3)
self.doubleSpinBox_rabistart.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_rabistart.setFont(font)
self.doubleSpinBox_rabistart.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_rabistart.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_rabistart.setMaximum(999999.99)
self.doubleSpinBox_rabistart.setObjectName("doubleSpinBox_rabistart")
self.label_7 = QtWidgets.QLabel(self.tab_3)
self.label_7.setGeometry(QtCore.QRect(260, 160, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_7.setFont(font)
self.label_7.setObjectName("label_7")
self.label_4 = QtWidgets.QLabel(self.tab_3)
self.label_4.setGeometry(QtCore.QRect(20, 160, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.label_6 = QtWidgets.QLabel(self.tab_3)
self.label_6.setGeometry(QtCore.QRect(260, 100, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.doubleSpinBox_rabiend = QtWidgets.QDoubleSpinBox(self.tab_3)
self.doubleSpinBox_rabiend.setGeometry(QtCore.QRect(140, 90, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_rabiend.setFont(font)
self.doubleSpinBox_rabiend.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_rabiend.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_rabiend.setMaximum(999999.99)
self.doubleSpinBox_rabiend.setObjectName("doubleSpinBox_rabiend")
self.spinBox_rabirepeat = QtWidgets.QSpinBox(self.tab_3)
self.spinBox_rabirepeat.setGeometry(QtCore.QRect(140, 220, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.spinBox_rabirepeat.setFont(font)
self.spinBox_rabirepeat.setObjectName("spinBox_rabirepeat")
self.label_2 = QtWidgets.QLabel(self.tab_3)
self.label_2.setGeometry(QtCore.QRect(20, 40, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.tab_3)
self.label_3.setGeometry(QtCore.QRect(20, 100, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.tabWidget_2.addTab(self.tab_3, "")
self.tab_4 = QtWidgets.QWidget()
self.tab_4.setObjectName("tab_4")
self.label_19 = QtWidgets.QLabel(self.tab_4)
self.label_19.setGeometry(QtCore.QRect(270, 220, 41, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
self.spinBox_zeemanrepeat = QtWidgets.QSpinBox(self.tab_4)
self.spinBox_zeemanrepeat.setGeometry(QtCore.QRect(180, 220, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.spinBox_zeemanrepeat.setFont(font)
self.spinBox_zeemanrepeat.setObjectName("spinBox_zeemanrepeat")
self.label_12 = QtWidgets.QLabel(self.tab_4)
self.label_12.setGeometry(QtCore.QRect(300, 160, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_12.setFont(font)
self.label_12.setObjectName("label_12")
self.label_13 = QtWidgets.QLabel(self.tab_4)
self.label_13.setGeometry(QtCore.QRect(20, 220, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.label_15 = QtWidgets.QLabel(self.tab_4)
self.label_15.setGeometry(QtCore.QRect(20, 160, 141, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_15.setFont(font)
self.label_15.setObjectName("label_15")
self.doubleSpinBox_zeemanend = QtWidgets.QDoubleSpinBox(self.tab_4)
self.doubleSpinBox_zeemanend.setGeometry(QtCore.QRect(180, 90, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_zeemanend.setFont(font)
self.doubleSpinBox_zeemanend.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_zeemanend.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_zeemanend.setMaximum(999999.99)
self.doubleSpinBox_zeemanend.setObjectName("doubleSpinBox_zeemanend")
self.doubleSpinBox_zeemanstep = QtWidgets.QDoubleSpinBox(self.tab_4)
self.doubleSpinBox_zeemanstep.setGeometry(QtCore.QRect(180, 150, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_zeemanstep.setFont(font)
self.doubleSpinBox_zeemanstep.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_zeemanstep.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_zeemanstep.setMaximum(999999.99)
self.doubleSpinBox_zeemanstep.setObjectName("doubleSpinBox_zeemanstep")
self.label_14 = QtWidgets.QLabel(self.tab_4)
self.label_14.setGeometry(QtCore.QRect(20, 100, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.doubleSpinBox_zeemanstart = QtWidgets.QDoubleSpinBox(self.tab_4)
self.doubleSpinBox_zeemanstart.setGeometry(QtCore.QRect(180, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_zeemanstart.setFont(font)
self.doubleSpinBox_zeemanstart.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_zeemanstart.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_zeemanstart.setMaximum(999999.99)
self.doubleSpinBox_zeemanstart.setObjectName("doubleSpinBox_zeemanstart")
self.label_16 = QtWidgets.QLabel(self.tab_4)
self.label_16.setGeometry(QtCore.QRect(300, 40, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_16.setFont(font)
self.label_16.setObjectName("label_16")
self.label_17 = QtWidgets.QLabel(self.tab_4)
self.label_17.setGeometry(QtCore.QRect(300, 100, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_17.setFont(font)
self.label_17.setObjectName("label_17")
self.label_11 = QtWidgets.QLabel(self.tab_4)
self.label_11.setGeometry(QtCore.QRect(20, 40, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_11.setFont(font)
self.label_11.setObjectName("label_11")
self.tabWidget_2.addTab(self.tab_4, "")
self.tab_5 = QtWidgets.QWidget()
self.tab_5.setObjectName("tab_5")
self.label_28 = QtWidgets.QLabel(self.tab_5)
self.label_28.setGeometry(QtCore.QRect(260, 40, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_28.setFont(font)
self.label_28.setObjectName("label_28")
self.label_21 = QtWidgets.QLabel(self.tab_5)
self.label_21.setGeometry(QtCore.QRect(20, 100, 91, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_21.setFont(font)
self.label_21.setObjectName("label_21")
self.doubleSpinBox_custTime = QtWidgets.QDoubleSpinBox(self.tab_5)
self.doubleSpinBox_custTime.setGeometry(QtCore.QRect(140, 90, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_custTime.setFont(font)
self.doubleSpinBox_custTime.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_custTime.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_custTime.setMaximum(999999.99)
self.doubleSpinBox_custTime.setObjectName("doubleSpinBox_custTime")
self.doubleSpinBox_custF = QtWidgets.QDoubleSpinBox(self.tab_5)
self.doubleSpinBox_custF.setGeometry(QtCore.QRect(140, 30, 101, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.doubleSpinBox_custF.setFont(font)
self.doubleSpinBox_custF.setLayoutDirection(QtCore.Qt.LeftToRight)
self.doubleSpinBox_custF.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.doubleSpinBox_custF.setMaximum(999999.99)
self.doubleSpinBox_custF.setObjectName("doubleSpinBox_custF")
self.label_29 = QtWidgets.QLabel(self.tab_5)
self.label_29.setGeometry(QtCore.QRect(20, 160, 91, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_29.setFont(font)
self.label_29.setObjectName("label_29")
self.label_23 = QtWidgets.QLabel(self.tab_5)
self.label_23.setGeometry(QtCore.QRect(260, 100, 31, 16))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_23.setFont(font)
self.label_23.setObjectName("label_23")
self.spinBox_custreapeat = QtWidgets.QSpinBox(self.tab_5)
self.spinBox_custreapeat.setGeometry(QtCore.QRect(140, 160, 71, 31))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.spinBox_custreapeat.setFont(font)
self.spinBox_custreapeat.setObjectName("spinBox_custreapeat")
self.label_22 = QtWidgets.QLabel(self.tab_5)
self.label_22.setGeometry(QtCore.QRect(20, 40, 131, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_22.setFont(font)
self.label_22.setObjectName("label_22")
self.label_27 = QtWidgets.QLabel(self.tab_5)
self.label_27.setGeometry(QtCore.QRect(230, 165, 41, 21))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(9)
font.setBold(False)
font.setWeight(50)
font.setStrikeOut(False)
self.label_27.setFont(font)
self.label_27.setObjectName("label_27")
self.tabWidget_2.addTab(self.tab_5, "")
self.label_34 = QtWidgets.QLabel(self.centralwidget)
self.label_34.setGeometry(QtCore.QRect(490, 30, 51, 41))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
font.setStrikeOut(False)
self.label_34.setFont(font)
self.label_34.setObjectName("label_34")
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 597, 26))
self.menubar.setObjectName("menubar")
self.menuMain = QtWidgets.QMenu(self.menubar)
self.menuMain.setObjectName("menuMain")
self.menuAdvance = QtWidgets.QMenu(self.menubar)
self.menuAdvance.setObjectName("menuAdvance")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionreset = QtWidgets.QAction(MainWindow)
self.actionreset.setObjectName("actionreset")
self.actionClear_Settings = QtWidgets.QAction(MainWindow)
self.actionClear_Settings.setObjectName("actionClear_Settings")
self.actionClear_Data = QtWidgets.QAction(MainWindow)
self.actionClear_Data.setObjectName("actionClear_Data")
self.menuMain.addAction(self.actionreset)
self.menuMain.addAction(self.actionClear_Settings)
self.menuMain.addAction(self.actionClear_Data)
self.menubar.addAction(self.menuMain.menuAction())
self.menubar.addAction(self.menuAdvance.menuAction())
self.retranslateUi(MainWindow)
self.Setting.setCurrentIndex(0)
self.tabWidget_2.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.label_20.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:16pt; font-weight:600; color:#000000;\">Ion Trap QC Control System 2.0</span></p></body></html>"))
self.label_10.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:11pt; color:#002800;\">Mode</span></p></body></html>"))
self.label_33.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:11pt; font-weight:600; color:#002800;\">Basic Setting</span></p></body></html>"))
self.radioButton_rabi.setText(_translate("MainWindow", "Rabi Scan"))
self.radioButton_zeeman.setText(_translate("MainWindow", "Zeeman Scan"))
self.radioButton_cust.setText(_translate("MainWindow", "Customized"))
self.pushButton.setText(_translate("MainWindow", "Submit"))
self.radioButton_off.setText(_translate("MainWindow", "OFF"))
self.label_36.setText(_translate("MainWindow", "us"))
self.label_37.setText(_translate("MainWindow", "Doppler Cooling"))
self.Laser_397_1.setText(_translate("MainWindow", "397_1"))
self.Laser_397_2.setText(_translate("MainWindow", "397_2"))
self.Laser_397_3.setText(_translate("MainWindow", "397_3"))
self.Laser_397_main.setText(_translate("MainWindow", "397 Main"))
self.Laser_866.setText(_translate("MainWindow", "866"))
self.Laser_854.setText(_translate("MainWindow", "854"))
self.label_42.setText(_translate("MainWindow", "Laser Selected"))
self.label_44.setText(_translate("MainWindow", "NA"))
self.label_45.setText(_translate("MainWindow", "NA"))
self.Setting.setTabText(self.Setting.indexOf(self.DPL_Cooling), _translate("MainWindow", "DPL Cooling"))
self.label_41.setText(_translate("MainWindow", "Optical Pump"))
self.label_40.setText(_translate("MainWindow", "us"))
self.Laser_729_1.setText(_translate("MainWindow", "729_1"))
self.Laser_729_3.setText(_translate("MainWindow", "729_3"))
self.Laser_729_2.setText(_translate("MainWindow", "729_2"))
self.Laser_729_4.setText(_translate("MainWindow", "729_4"))
self.Laser_855.setText(_translate("MainWindow", "854 1"))
self.Laser_729_all.setText(_translate("MainWindow", "729 Main"))
self.label_43.setText(_translate("MainWindow", "Laser Selected"))
self.label_46.setText(_translate("MainWindow", "NA"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_6), _translate("MainWindow", "Optical Pump"))
self.label_53.setText(_translate("MainWindow", "SB Cooling"))
self.label_54.setText(_translate("MainWindow", "us"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_2), _translate("MainWindow", "SB Cooling"))
self.Laser_729_5.setText(_translate("MainWindow", "729_4"))
self.Laser_729_all_2.setText(_translate("MainWindow", "729 Main"))
self.Laser_729_6.setText(_translate("MainWindow", "729_1"))
self.Laser_856.setText(_translate("MainWindow", "854 1"))
self.Laser_729_7.setText(_translate("MainWindow", "729_2"))
self.label_47.setText(_translate("MainWindow", "us"))
self.Laser_729_8.setText(_translate("MainWindow", "729_3"))
self.label_48.setText(_translate("MainWindow", "Opreating"))
self.label_49.setText(_translate("MainWindow", "NA"))
self.label_50.setText(_translate("MainWindow", "Laser Selected"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_7), _translate("MainWindow", "Opreating"))
self.label_51.setText(_translate("MainWindow", "Detecting"))
self.label_52.setText(_translate("MainWindow", "us"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_8), _translate("MainWindow", "Detecting"))
self.label_55.setText(_translate("MainWindow", "<html><head/><body><p>Temporarily we couple the operation period to the frequency </p><p>of 50Hz, if you want to change or cancle it, please change </p><p>the code responsible for running.(not in the GUI file)</p></body></html>"))
self.Setting.setTabText(self.Setting.indexOf(self.tab_9), _translate("MainWindow", "Gap"))
self.label_18.setText(_translate("MainWindow", "times"))
self.label_5.setText(_translate("MainWindow", "us"))
self.label_9.setText(_translate("MainWindow", "Repeat"))
self.label_7.setText(_translate("MainWindow", "us"))
self.label_4.setText(_translate("MainWindow", "Time for Step"))
self.label_6.setText(_translate("MainWindow", "us"))
self.label_2.setText(_translate("MainWindow", "Time to Start"))
self.label_3.setText(_translate("MainWindow", "Time to End"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_3), _translate("MainWindow", "Rabi Scan"))
self.label_19.setText(_translate("MainWindow", "times"))
self.label_12.setText(_translate("MainWindow", "Hz"))
self.label_13.setText(_translate("MainWindow", "Repeat"))
self.label_15.setText(_translate("MainWindow", "Frequency for Step"))
self.label_14.setText(_translate("MainWindow", "Frequency of End"))
self.label_16.setText(_translate("MainWindow", "Hz"))
self.label_17.setText(_translate("MainWindow", "Hz"))
self.label_11.setText(_translate("MainWindow", "Frequency of Start"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_4), _translate("MainWindow", "Zeeman Scan"))
self.label_28.setText(_translate("MainWindow", "Hz"))
self.label_21.setText(_translate("MainWindow", "Rabi Time"))
self.label_29.setText(_translate("MainWindow", "Repeat"))
self.label_23.setText(_translate("MainWindow", "us"))
self.label_22.setText(_translate("MainWindow", "Frequency "))
self.label_27.setText(_translate("MainWindow", "times"))
self.tabWidget_2.setTabText(self.tabWidget_2.indexOf(self.tab_5), _translate("MainWindow", "Costomized"))
self.label_34.setText(_translate("MainWindow", "<html><head/><body><p><span style=\" font-size:11pt; font-style:italic; color:#000000;\">WIPM </span></p></body></html>"))
self.menuMain.setTitle(_translate("MainWindow", "Main"))
self.menuAdvance.setTitle(_translate("MainWindow", "Advance"))
self.actionreset.setText(_translate("MainWindow", "Introduction"))
self.actionClear_Settings.setText(_translate("MainWindow", "Clear Settings"))
self.actionClear_Data.setText(_translate("MainWindow", "Clear Data"))
|
#!/usr/bin/python
# Copyright (c) Istituto Nazionale di Fisica Nucleare (INFN). 2006-2010.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import getopt
__author__ = "Elisabetta Ronchieri"
m={'-u':'used-space',
'-f':'free-space',
'-r':'reserved-space',
'-t':'total-space',
'-a':'available-space'}
s={'used-space':'',
'free-space':'',
'reserved-space':'',
'total-space':'',
'available-space':''}
if __name__=="__main__":
try:
opts, args = getopt.getopt(sys.argv[1:], "u:f:r:t:a:")
except getopt.GetoptError, err:
# print help information and exit:
print str(err)
sys.exit(2)
for o, a in opts:
if o in m.keys():
s[m[o]]=int((int(a)*(1000*1000*1000))/(1024*1024*1024))
else:
sys.exit(2)
print s
|
#!/usr/bin/env python
'''
This is the main file that the meta-server calls when you want you
run just one robot at the time
'''
import roslib; roslib.load_manifest('br_swarm_rover')
import rospy
from sensor_msgs.msg import CompressedImage
from sensor_msgs.msg import Image
from std_msgs.msg import String
import br_cam
from br_control import RovCon
#from time import sleep
# meta_server.py creates the file where ROS shall write its network
# address, then the address is passed as an argument here
# TODO: may be able to do something directly like:
# python fibo.py <arguments> (on command line)
# import sys (inside script)
# fib(int(sys.argv[1]))
# to enter argument instead of using argparser
import argparse
parser = argparse.ArgumentParser('br_single_control')
parser.add_argument('file', type=str, default=None,
help='temporary file to store server uri')
parser.add_argument('robot_address', type=str, default=None,
help='address of NICs connect to robots')
arg = parser.parse_args()
if __name__ == '__main__':
try:
#TODO: change the local host part to a normal address
# for now the wanted address is exported manually in the
# .bashrc file
# initiate rover connection and video streaming
rover = RovCon(arg.robot_address)
rover_video = br_cam.RovCam(rover.return_data())
# publish robot camera data
pub = rospy.Publisher('/output/image_raw/compressed'+
arg.robot_address.split('.')[3], CompressedImage,
queue_size=100)
rospy.init_node('robot'+arg.robot_address.split('.')[3])
# distance = 0.5 # feet
# speed = 1 # foot/sec
# obtain published move command
#TODO: also obtain speed and distance
rospy.Subscriber("move", String, rover.set_move)
# thread to run the subscriber
from threading import Thread
spin_thread = Thread(target=lambda: rospy.spin())
spin_thread.start()
while not rospy.is_shutdown():
rospy.loginfo("before video")
buf = rover_video.receive_image()
pub.publish(buf) # publish CompressedImage
rospy.sleep(0.033) # manually given image frame rate
except rospy.ROSInterruptException:
rover.disconnect_rover()
rover_video.disconnect_video()
# pass
from sys import exit
exit()
|
# coding:utf-8
st = "Hello World"
# 字符串包含判断操作符: in、not in
print("He" in st)
print("She" not in st)
# 读取字符串的某一部分
print(st[:6])
# string模块提供的方法:
# 从下标0开始,查找在字符串里第一个出现的子串,返回结果:0 ,查找不到返回-1
print(st.find('Hello') > 0)
# 首字母大写
print("字符串:{0},首字母大写:{1}".format(st, st.capitalize()))
# 转小写
print("字符串:{0},转小写:{1}".format(st, st.lower()))
# 转大写
print("字符串:{0},转大写:{1}".format(st, st.upper()))
# 大小写互换
print("字符串:{0},大小写互换:{1}".format(st, st.swapcase()))
# 将string转化为list,以空格切分
print("字符串:{0},转化为list:{1}".format(st, st.split()))
# 字符串的格式化
print("%s's height is %d cm" % ("My brother", 180))
|
from collections import defaultdict
from cloudshell.shell.core.driver_context import AutoLoadAttribute, AutoLoadDetails, AutoLoadResource, ResourceCommandContext
class LegacyUtils(object):
def __init__(self):
self._datamodel_clss_dict = self.__generate_datamodel_classes_dict()
def migrate_autoload_details(self, autoload_details, context):
model_name = context.resource.model
root_name = context.resource.name
root = self.__create_resource_from_datamodel(model_name, root_name)
attributes = self.__create_attributes_dict(autoload_details.attributes)
self.__attach_attributes_to_resource(attributes, "", root)
self.__build_sub_resoruces_hierarchy(root, autoload_details.resources, attributes)
return root
def __create_resource_from_datamodel(self, model_name, res_name):
return self._datamodel_clss_dict[model_name](res_name)
def __create_attributes_dict(self, attributes_lst):
d = defaultdict(list)
for attribute in attributes_lst:
d[attribute.relative_address].append(attribute)
return d
def __build_sub_resoruces_hierarchy(self, root, sub_resources, attributes):
d = defaultdict(list)
for resource in sub_resources:
splitted = resource.relative_address.split("/")
parent = "" if len(splitted) == 1 else resource.relative_address.rsplit("/", 1)[0]
rank = len(splitted)
d[rank].append((parent, resource))
self.__set_models_hierarchy_recursively(d, 1, root, "", attributes)
def __set_models_hierarchy_recursively(self, dict, rank, manipulated_resource, resource_relative_addr, attributes):
if rank not in dict: # validate if key exists
pass
for (parent, resource) in dict[rank]:
if parent == resource_relative_addr:
sub_resource = self.__create_resource_from_datamodel(resource.model.replace(" ", ""), resource.name)
self.__attach_attributes_to_resource(attributes, resource.relative_address, sub_resource)
manipulated_resource.add_sub_resource(
self.__slice_parent_from_relative_path(parent, resource.relative_address), sub_resource
)
self.__set_models_hierarchy_recursively(dict, rank + 1, sub_resource, resource.relative_address, attributes)
def __attach_attributes_to_resource(self, attributes, curr_relative_addr, resource):
for attribute in attributes[curr_relative_addr]:
setattr(resource, attribute.attribute_name.lower().replace(" ", "_"), attribute.attribute_value)
del attributes[curr_relative_addr]
def __slice_parent_from_relative_path(self, parent, relative_addr):
if parent is "":
return relative_addr
return relative_addr[len(parent) + 1 :] # + 1 because we want to remove the seperator also
def __generate_datamodel_classes_dict(self):
return dict(self.__collect_generated_classes())
def __collect_generated_classes(self):
import inspect
import sys
return inspect.getmembers(sys.modules[__name__], inspect.isclass)
class Ixia_Chassis_Shell_2G(object):
def __init__(self, name):
""" """
self.attributes = {}
self.resources = {}
self._cloudshell_model_name = "Ixia Chassis Shell 2G"
self._name = name
def add_sub_resource(self, relative_path, sub_resource):
self.resources[relative_path] = sub_resource
@classmethod
def create_from_context(cls, context):
"""
Creates an instance of NXOS by given context
:param context: cloudshell.shell.core.driver_context.ResourceCommandContext
:type context: cloudshell.shell.core.driver_context.ResourceCommandContext
:return:
:rtype Ixia Chassis Shell 2G
"""
result = Ixia_Chassis_Shell_2G(name=context.resource.name)
for attr in context.resource.attributes:
result.attributes[attr] = context.resource.attributes[attr]
return result
def create_autoload_details(self, relative_path=""):
"""
:param relative_path:
:type relative_path: str
:return
"""
resources = [
AutoLoadResource(
model=self.resources[r].cloudshell_model_name,
name=self.resources[r].name,
relative_address=self._get_relative_path(r, relative_path),
)
for r in self.resources
]
attributes = [AutoLoadAttribute(relative_path, a, self.attributes[a]) for a in self.attributes]
autoload_details = AutoLoadDetails(resources, attributes)
for r in self.resources:
curr_path = relative_path + "/" + r if relative_path else r
curr_auto_load_details = self.resources[r].create_autoload_details(curr_path)
autoload_details = self._merge_autoload_details(autoload_details, curr_auto_load_details)
return autoload_details
def _get_relative_path(self, child_path, parent_path):
"""
Combines relative path
:param child_path: Path of a model within it parent model, i.e 1
:type child_path: str
:param parent_path: Full path of parent model, i.e 1/1. Might be empty for root model
:type parent_path: str
:return: Combined path
:rtype str
"""
return parent_path + "/" + child_path if parent_path else child_path
@staticmethod
def _merge_autoload_details(autoload_details1, autoload_details2):
"""
Merges two instances of AutoLoadDetails into the first one
:param autoload_details1:
:type autoload_details1: AutoLoadDetails
:param autoload_details2:
:type autoload_details2: AutoLoadDetails
:return:
:rtype AutoLoadDetails
"""
for attribute in autoload_details2.attributes:
autoload_details1.attributes.append(attribute)
for resource in autoload_details2.resources:
autoload_details1.resources.append(resource)
return autoload_details1
@property
def cloudshell_model_name(self):
"""
Returns the name of the Cloudshell model
:return:
"""
return "Ixia Chassis Shell 2G"
@property
def user(self):
"""
:rtype: str
"""
return self.attributes["Ixia Chassis Shell 2G.User"] if "Ixia Chassis Shell 2G.User" in self.attributes else None
@user.setter
def user(self, value):
"""
User with administrative privileges
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.User"] = value
@property
def password(self):
"""
:rtype: string
"""
return (
self.attributes["Ixia Chassis Shell 2G.Password"] if "Ixia Chassis Shell 2G.Password" in self.attributes else None
)
@password.setter
def password(self, value):
"""
Password
:type value: string
"""
self.attributes["Ixia Chassis Shell 2G.Password"] = value
@property
def controller_tcp_port(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.Controller TCP Port"]
if "Ixia Chassis Shell 2G.Controller TCP Port" in self.attributes
else None
)
@controller_tcp_port.setter
def controller_tcp_port(self, value):
"""
The TCP port of the traffic server. Relevant only in case an external server is configured. Default TCP port should be used if kept empty.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.Controller TCP Port"] = value
@property
def controller_address(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.Controller Address"]
if "Ixia Chassis Shell 2G.Controller Address" in self.attributes
else None
)
@controller_address.setter
def controller_address(self, value):
"""
The IP address of the traffic server. Relevant only in case an external server is configured.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.Controller Address"] = value
@property
def client_install_path(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.Client Install Path"]
if "Ixia Chassis Shell 2G.Client Install Path" in self.attributes
else None
)
@client_install_path.setter
def client_install_path(self, value):
"""
The path in which the traffic client is installed on the Execution Server. For example "C:/Program Files (x86)/Ixia/IxLoad/5.10-GA".
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.Client Install Path"] = value
@property
def power_management(self):
"""
:rtype: bool
"""
return (
self.attributes["Ixia Chassis Shell 2G.Power Management"]
if "Ixia Chassis Shell 2G.Power Management" in self.attributes
else None
)
@power_management.setter
def power_management(self, value=True):
"""
Used by the power management orchestration, if enabled, to determine whether to automatically manage the device power status. Enabled by default.
:type value: bool
"""
self.attributes["Ixia Chassis Shell 2G.Power Management"] = value
@property
def serial_number(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.Serial Number"]
if "Ixia Chassis Shell 2G.Serial Number" in self.attributes
else None
)
@serial_number.setter
def serial_number(self, value):
"""
The serial number of the resource.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.Serial Number"] = value
@property
def server_description(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.Server Description"]
if "Ixia Chassis Shell 2G.Server Description" in self.attributes
else None
)
@server_description.setter
def server_description(self, value):
"""
The full description of the server. Usually includes the OS, exact firmware version and additional characteritics of the device.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.Server Description"] = value
@property
def name(self):
"""
:rtype: str
"""
return self._name
@name.setter
def name(self, value):
"""
:type value: str
"""
self._name = value
@property
def cloudshell_model_name(self):
"""
:rtype: str
"""
return self._cloudshell_model_name
@cloudshell_model_name.setter
def cloudshell_model_name(self, value):
"""
:type value: str
"""
self._cloudshell_model_name = value
@property
def model_name(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorChassis.Model Name"]
if "CS_TrafficGeneratorChassis.Model Name" in self.attributes
else None
)
@model_name.setter
def model_name(self, value=""):
"""
The catalog name of the device model. This attribute will be displayed in CloudShell instead of the CloudShell model.
:type value: str
"""
self.attributes["CS_TrafficGeneratorChassis.Model Name"] = value
@property
def vendor(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorChassis.Vendor"]
if "CS_TrafficGeneratorChassis.Vendor" in self.attributes
else None
)
@vendor.setter
def vendor(self, value):
"""
The name of the device manufacture.
:type value: str
"""
self.attributes["CS_TrafficGeneratorChassis.Vendor"] = value
@property
def version(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorChassis.Version"]
if "CS_TrafficGeneratorChassis.Version" in self.attributes
else None
)
@version.setter
def version(self, value):
"""
The firmware version of the resource.
:type value: str
"""
self.attributes["CS_TrafficGeneratorChassis.Version"] = value
class GenericTrafficGeneratorModule(object):
def __init__(self, name):
""" """
self.attributes = {}
self.resources = {}
self._cloudshell_model_name = "Ixia Chassis Shell 2G.GenericTrafficGeneratorModule"
self._name = name
def add_sub_resource(self, relative_path, sub_resource):
self.resources[relative_path] = sub_resource
@classmethod
def create_from_context(cls, context):
"""
Creates an instance of NXOS by given context
:param context: cloudshell.shell.core.driver_context.ResourceCommandContext
:type context: cloudshell.shell.core.driver_context.ResourceCommandContext
:return:
:rtype GenericTrafficGeneratorModule
"""
result = GenericTrafficGeneratorModule(name=context.resource.name)
for attr in context.resource.attributes:
result.attributes[attr] = context.resource.attributes[attr]
return result
def create_autoload_details(self, relative_path=""):
"""
:param relative_path:
:type relative_path: str
:return
"""
resources = [
AutoLoadResource(
model=self.resources[r].cloudshell_model_name,
name=self.resources[r].name,
relative_address=self._get_relative_path(r, relative_path),
)
for r in self.resources
]
attributes = [AutoLoadAttribute(relative_path, a, self.attributes[a]) for a in self.attributes]
autoload_details = AutoLoadDetails(resources, attributes)
for r in self.resources:
curr_path = relative_path + "/" + r if relative_path else r
curr_auto_load_details = self.resources[r].create_autoload_details(curr_path)
autoload_details = self._merge_autoload_details(autoload_details, curr_auto_load_details)
return autoload_details
def _get_relative_path(self, child_path, parent_path):
"""
Combines relative path
:param child_path: Path of a model within it parent model, i.e 1
:type child_path: str
:param parent_path: Full path of parent model, i.e 1/1. Might be empty for root model
:type parent_path: str
:return: Combined path
:rtype str
"""
return parent_path + "/" + child_path if parent_path else child_path
@staticmethod
def _merge_autoload_details(autoload_details1, autoload_details2):
"""
Merges two instances of AutoLoadDetails into the first one
:param autoload_details1:
:type autoload_details1: AutoLoadDetails
:param autoload_details2:
:type autoload_details2: AutoLoadDetails
:return:
:rtype AutoLoadDetails
"""
for attribute in autoload_details2.attributes:
autoload_details1.attributes.append(attribute)
for resource in autoload_details2.resources:
autoload_details1.resources.append(resource)
return autoload_details1
@property
def cloudshell_model_name(self):
"""
Returns the name of the Cloudshell model
:return:
"""
return "GenericTrafficGeneratorModule"
@property
def version(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericTrafficGeneratorModule.Version"]
if "Ixia Chassis Shell 2G.GenericTrafficGeneratorModule.Version" in self.attributes
else None
)
@version.setter
def version(self, value=""):
"""
The firmware version of the resource.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericTrafficGeneratorModule.Version"] = value
@property
def serial_number(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericTrafficGeneratorModule.Serial Number"]
if "Ixia Chassis Shell 2G.GenericTrafficGeneratorModule.Serial Number" in self.attributes
else None
)
@serial_number.setter
def serial_number(self, value=""):
"""
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericTrafficGeneratorModule.Serial Number"] = value
@property
def name(self):
"""
:rtype: str
"""
return self._name
@name.setter
def name(self, value):
"""
:type value: str
"""
self._name = value
@property
def cloudshell_model_name(self):
"""
:rtype: str
"""
return self._cloudshell_model_name
@cloudshell_model_name.setter
def cloudshell_model_name(self, value):
"""
:type value: str
"""
self._cloudshell_model_name = value
@property
def model_name(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorModule.Model Name"]
if "CS_TrafficGeneratorModule.Model Name" in self.attributes
else None
)
@model_name.setter
def model_name(self, value=""):
"""
The catalog name of the device model. This attribute will be displayed in CloudShell instead of the CloudShell model.
:type value: str
"""
self.attributes["CS_TrafficGeneratorModule.Model Name"] = value
class GenericTrafficGeneratorPortGroup(object):
def __init__(self, name):
""" """
self.attributes = {}
self.resources = {}
self._cloudshell_model_name = "Ixia Chassis Shell 2G.GenericTrafficGeneratorPortGroup"
self._name = name
def add_sub_resource(self, relative_path, sub_resource):
self.resources[relative_path] = sub_resource
@classmethod
def create_from_context(cls, context):
"""
Creates an instance of NXOS by given context
:param context: cloudshell.shell.core.driver_context.ResourceCommandContext
:type context: cloudshell.shell.core.driver_context.ResourceCommandContext
:return:
:rtype GenericTrafficGeneratorPortGroup
"""
result = GenericTrafficGeneratorPortGroup(name=context.resource.name)
for attr in context.resource.attributes:
result.attributes[attr] = context.resource.attributes[attr]
return result
def create_autoload_details(self, relative_path=""):
"""
:param relative_path:
:type relative_path: str
:return
"""
resources = [
AutoLoadResource(
model=self.resources[r].cloudshell_model_name,
name=self.resources[r].name,
relative_address=self._get_relative_path(r, relative_path),
)
for r in self.resources
]
attributes = [AutoLoadAttribute(relative_path, a, self.attributes[a]) for a in self.attributes]
autoload_details = AutoLoadDetails(resources, attributes)
for r in self.resources:
curr_path = relative_path + "/" + r if relative_path else r
curr_auto_load_details = self.resources[r].create_autoload_details(curr_path)
autoload_details = self._merge_autoload_details(autoload_details, curr_auto_load_details)
return autoload_details
def _get_relative_path(self, child_path, parent_path):
"""
Combines relative path
:param child_path: Path of a model within it parent model, i.e 1
:type child_path: str
:param parent_path: Full path of parent model, i.e 1/1. Might be empty for root model
:type parent_path: str
:return: Combined path
:rtype str
"""
return parent_path + "/" + child_path if parent_path else child_path
@staticmethod
def _merge_autoload_details(autoload_details1, autoload_details2):
"""
Merges two instances of AutoLoadDetails into the first one
:param autoload_details1:
:type autoload_details1: AutoLoadDetails
:param autoload_details2:
:type autoload_details2: AutoLoadDetails
:return:
:rtype AutoLoadDetails
"""
for attribute in autoload_details2.attributes:
autoload_details1.attributes.append(attribute)
for resource in autoload_details2.resources:
autoload_details1.resources.append(resource)
return autoload_details1
@property
def cloudshell_model_name(self):
"""
Returns the name of the Cloudshell model
:return:
"""
return "GenericTrafficGeneratorPortGroup"
@property
def name(self):
"""
:rtype: str
"""
return self._name
@name.setter
def name(self, value):
"""
:type value: str
"""
self._name = value
@property
def cloudshell_model_name(self):
"""
:rtype: str
"""
return self._cloudshell_model_name
@cloudshell_model_name.setter
def cloudshell_model_name(self, value):
"""
:type value: str
"""
self._cloudshell_model_name = value
class GenericTrafficGeneratorPort(object):
def __init__(self, name):
""" """
self.attributes = {}
self.resources = {}
self._cloudshell_model_name = "Ixia Chassis Shell 2G.GenericTrafficGeneratorPort"
self._name = name
def add_sub_resource(self, relative_path, sub_resource):
self.resources[relative_path] = sub_resource
@classmethod
def create_from_context(cls, context):
"""
Creates an instance of NXOS by given context
:param context: cloudshell.shell.core.driver_context.ResourceCommandContext
:type context: cloudshell.shell.core.driver_context.ResourceCommandContext
:return:
:rtype GenericTrafficGeneratorPort
"""
result = GenericTrafficGeneratorPort(name=context.resource.name)
for attr in context.resource.attributes:
result.attributes[attr] = context.resource.attributes[attr]
return result
def create_autoload_details(self, relative_path=""):
"""
:param relative_path:
:type relative_path: str
:return
"""
resources = [
AutoLoadResource(
model=self.resources[r].cloudshell_model_name,
name=self.resources[r].name,
relative_address=self._get_relative_path(r, relative_path),
)
for r in self.resources
]
attributes = [AutoLoadAttribute(relative_path, a, self.attributes[a]) for a in self.attributes]
autoload_details = AutoLoadDetails(resources, attributes)
for r in self.resources:
curr_path = relative_path + "/" + r if relative_path else r
curr_auto_load_details = self.resources[r].create_autoload_details(curr_path)
autoload_details = self._merge_autoload_details(autoload_details, curr_auto_load_details)
return autoload_details
def _get_relative_path(self, child_path, parent_path):
"""
Combines relative path
:param child_path: Path of a model within it parent model, i.e 1
:type child_path: str
:param parent_path: Full path of parent model, i.e 1/1. Might be empty for root model
:type parent_path: str
:return: Combined path
:rtype str
"""
return parent_path + "/" + child_path if parent_path else child_path
@staticmethod
def _merge_autoload_details(autoload_details1, autoload_details2):
"""
Merges two instances of AutoLoadDetails into the first one
:param autoload_details1:
:type autoload_details1: AutoLoadDetails
:param autoload_details2:
:type autoload_details2: AutoLoadDetails
:return:
:rtype AutoLoadDetails
"""
for attribute in autoload_details2.attributes:
autoload_details1.attributes.append(attribute)
for resource in autoload_details2.resources:
autoload_details1.resources.append(resource)
return autoload_details1
@property
def cloudshell_model_name(self):
"""
Returns the name of the Cloudshell model
:return:
"""
return "GenericTrafficGeneratorPort"
@property
def media_type(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericTrafficGeneratorPort.Media Type"]
if "Ixia Chassis Shell 2G.GenericTrafficGeneratorPort.Media Type" in self.attributes
else None
)
@media_type.setter
def media_type(self, value):
"""
Interface media type. Possible values are Fiber and/or Copper (comma-separated).
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericTrafficGeneratorPort.Media Type"] = value
@property
def name(self):
"""
:rtype: str
"""
return self._name
@name.setter
def name(self, value):
"""
:type value: str
"""
self._name = value
@property
def cloudshell_model_name(self):
"""
:rtype: str
"""
return self._cloudshell_model_name
@cloudshell_model_name.setter
def cloudshell_model_name(self, value):
"""
:type value: str
"""
self._cloudshell_model_name = value
@property
def max_speed(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorPort.Max Speed"]
if "CS_TrafficGeneratorPort.Max Speed" in self.attributes
else None
)
@max_speed.setter
def max_speed(self, value):
"""
Max speed supported by the interface (default units - MB)
:type value: str
"""
self.attributes["CS_TrafficGeneratorPort.Max Speed"] = value
@property
def logical_name(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorPort.Logical Name"]
if "CS_TrafficGeneratorPort.Logical Name" in self.attributes
else None
)
@logical_name.setter
def logical_name(self, value):
"""
The port's logical name in the test configuration. If kept emtpy - allocation will applied in the blue print.
:type value: str
"""
self.attributes["CS_TrafficGeneratorPort.Logical Name"] = value
@property
def configured_controllers(self):
"""
:rtype: str
"""
return (
self.attributes["CS_TrafficGeneratorPort.Configured Controllers"]
if "CS_TrafficGeneratorPort.Configured Controllers" in self.attributes
else None
)
@configured_controllers.setter
def configured_controllers(self, value):
"""
specifies what controller can be used with the ports (IxLoad controller, BP controller etc...)
:type value: str
"""
self.attributes["CS_TrafficGeneratorPort.Configured Controllers"] = value
class GenericPowerPort(object):
def __init__(self, name):
""" """
self.attributes = {}
self.resources = {}
self._cloudshell_model_name = "Ixia Chassis Shell 2G.GenericPowerPort"
self._name = name
def add_sub_resource(self, relative_path, sub_resource):
self.resources[relative_path] = sub_resource
@classmethod
def create_from_context(cls, context):
"""
Creates an instance of NXOS by given context
:param context: cloudshell.shell.core.driver_context.ResourceCommandContext
:type context: cloudshell.shell.core.driver_context.ResourceCommandContext
:return:
:rtype GenericPowerPort
"""
result = GenericPowerPort(name=context.resource.name)
for attr in context.resource.attributes:
result.attributes[attr] = context.resource.attributes[attr]
return result
def create_autoload_details(self, relative_path=""):
"""
:param relative_path:
:type relative_path: str
:return
"""
resources = [
AutoLoadResource(
model=self.resources[r].cloudshell_model_name,
name=self.resources[r].name,
relative_address=self._get_relative_path(r, relative_path),
)
for r in self.resources
]
attributes = [AutoLoadAttribute(relative_path, a, self.attributes[a]) for a in self.attributes]
autoload_details = AutoLoadDetails(resources, attributes)
for r in self.resources:
curr_path = relative_path + "/" + r if relative_path else r
curr_auto_load_details = self.resources[r].create_autoload_details(curr_path)
autoload_details = self._merge_autoload_details(autoload_details, curr_auto_load_details)
return autoload_details
def _get_relative_path(self, child_path, parent_path):
"""
Combines relative path
:param child_path: Path of a model within it parent model, i.e 1
:type child_path: str
:param parent_path: Full path of parent model, i.e 1/1. Might be empty for root model
:type parent_path: str
:return: Combined path
:rtype str
"""
return parent_path + "/" + child_path if parent_path else child_path
@staticmethod
def _merge_autoload_details(autoload_details1, autoload_details2):
"""
Merges two instances of AutoLoadDetails into the first one
:param autoload_details1:
:type autoload_details1: AutoLoadDetails
:param autoload_details2:
:type autoload_details2: AutoLoadDetails
:return:
:rtype AutoLoadDetails
"""
for attribute in autoload_details2.attributes:
autoload_details1.attributes.append(attribute)
for resource in autoload_details2.resources:
autoload_details1.resources.append(resource)
return autoload_details1
@property
def cloudshell_model_name(self):
"""
Returns the name of the Cloudshell model
:return:
"""
return "GenericPowerPort"
@property
def model(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Model"]
if "Ixia Chassis Shell 2G.GenericPowerPort.Model" in self.attributes
else None
)
@model.setter
def model(self, value):
"""
The device model. This information is typically used for abstract resource filtering.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Model"] = value
@property
def serial_number(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Serial Number"]
if "Ixia Chassis Shell 2G.GenericPowerPort.Serial Number" in self.attributes
else None
)
@serial_number.setter
def serial_number(self, value):
"""
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Serial Number"] = value
@property
def version(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Version"]
if "Ixia Chassis Shell 2G.GenericPowerPort.Version" in self.attributes
else None
)
@version.setter
def version(self, value):
"""
The firmware version of the resource.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Version"] = value
@property
def port_description(self):
"""
:rtype: str
"""
return (
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Port Description"]
if "Ixia Chassis Shell 2G.GenericPowerPort.Port Description" in self.attributes
else None
)
@port_description.setter
def port_description(self, value):
"""
The description of the port as configured in the device.
:type value: str
"""
self.attributes["Ixia Chassis Shell 2G.GenericPowerPort.Port Description"] = value
@property
def name(self):
"""
:rtype: str
"""
return self._name
@name.setter
def name(self, value):
"""
:type value: str
"""
self._name = value
@property
def cloudshell_model_name(self):
"""
:rtype: str
"""
return self._cloudshell_model_name
@cloudshell_model_name.setter
def cloudshell_model_name(self, value):
"""
:type value: str
"""
self._cloudshell_model_name = value
@property
def model_name(self):
"""
:rtype: str
"""
return self.attributes["CS_PowerPort.Model Name"] if "CS_PowerPort.Model Name" in self.attributes else None
@model_name.setter
def model_name(self, value=""):
"""
The catalog name of the device model. This attribute will be displayed in CloudShell instead of the CloudShell model.
:type value: str
"""
self.attributes["CS_PowerPort.Model Name"] = value
|
'''
Created on Jul 3, 2011
@author: kjell
'''
from random import random
from random import choice
import unittest
example_alphabet=['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z']
def get_example_alphabet():
return example_alphabet
def generate_examples_for_base(base="dog", number_of_examples=100, poelap=0.03, poelenl=0.7, powlap=0.1, polmap=0.01, alphabet=example_alphabet):
'''
Function that generate misspelled versions of a base given propabilities
defined by the parameters.
Parameters:
base = the base that the examples shall be generated for
poelap = probability of extra letter at position
poelenl = probability of extra letter equals neighbor letter
powlap = probability of wrong letter at position
polmap = probability of letter missing at position
number_of_examples = the number of examples that shall be generated
Returns:
A list of size number_of_examples containing versions of the base
'''
#Help functions:
def true_with_probability(probability):
return random() <= probability
def neighbors_at_position(base, position):
base_length = len(base)
if(position==0):
return [base[0]]
elif position < base_length:
return [base[position-1], base[position]]
else:
return [base[base_length-1]]
def actual_or_other(letter):
if(true_with_probability(polmap)):#Letter missing at position
return ""
else:
if(true_with_probability(powlap)):#Wrong letter at position
return choice(alphabet)
else:
return letter
def generate_example_for_base_from_pos(base,start_at_position=0):
if start_at_position > len(base):
return ""
else:
end = start_at_position == len(base)
char_at_pos = "" if end else actual_or_other(base[start_at_position])
rest = generate_example_for_base_from_pos(base,start_at_position+1)
if(true_with_probability(poelap)):#probability of extra letter
if(true_with_probability(poelenl)):#probability of extra letter equals to neighbor
neighbor = choice(neighbors_at_position(base, start_at_position))
return neighbor + char_at_pos + rest
else:
extra_letter = choice(alphabet)
return extra_letter + char_at_pos + rest
else:
return char_at_pos + rest
#Generate the examples
examples = []
for i in range(number_of_examples): #@UnusedVariable
examples.append(generate_example_for_base_from_pos(base))
return examples
default_base_list = ["dog","cat","pig","love","hate",
"scala","python","summer","winter","night",
"daydream","nightmare","animal","happiness","sadness",
"tennis","feminism","fascism","socialism","capitalism"]
def generate_examples_for_bases(bases=default_base_list, number_of_examples=100, poelap=0.03, poelenl=0.7, powlap=0.1, polmap=0.01, alphabet=example_alphabet):
'''
Generate tuples for all bases in the list bases of the format:
(base, list of training examples for the bases)
See generate_examples_for_base for description of the rest of the parameters
'''
base_training_example_tuples = []
for base in bases:
base_training_example_tuples.append((base,generate_examples_for_base(base, number_of_examples, poelap, poelenl, powlap, polmap, alphabet)))
return base_training_example_tuples
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.test_base_']
unittest.main()
|
from .. import logger, parse_all_sections_symbols
from ..utils import wrap_script_entry_point
from optparse import OptionParser
import sys
from latex_symbol_manager.programs.collect.find_commands import find_all_commands
__all__ = ['lsm_extract_main']
usage = """
%prog -m main.tex -o compact.tex sources.tex ....
"""
def lsm_extract_main():
parser = OptionParser(usage)
parser.add_option("-m", "--main")
parser.add_option("-o", "--output")
(options, args) = parser.parse_args() # @UnusedVariable
main = options.main
out = options.output
sources = args
f = open(out, 'w')
try:
sections, symbols = parse_all_sections_symbols(sources)
logger.info('Loaded %d sections with %d symbols.\n' %
(len(sections), len(symbols)))
if not sections or not symbols:
raise Exception('Not enough data found.')
logger.info('Now looking for symbols')
commands = find_all_commands(main)
logger.info('I found %d commands' % len(commands))
done = set()
todo = set(commands)
while todo:
c = todo.pop()
if c in done:
continue
done.add(c)
if c in symbols:
logger.info('Found command %r' % c)
s = symbols[c]
f.write(s.tex_definition_short() + '\n')
todo.update(s.symbol_dependencies())
else:
logger.warning('Not found %r' % c)
except:
raise
def main():
wrap_script_entry_point(lsm_extract_main, logger)
if __name__ == '__main__':
main()
|
# coding: utf-8
# Standard Python libraries
from pathlib import Path
from typing import Optional
# iprPy imports
from . import settings
from .tools import screen_input
def load_run_directory(name: Optional[str] = None):
"""
Loads a pre-defined run_directory from the settings file.
Parameters
----------
name : str, optional
The name assigned to a pre-defined run_directory.
Returns
-------
str
The path to the identified run_directory.
"""
# Ask for name if not given
if name is None:
run_directory_names = settings.list_run_directories
if len(run_directory_names) > 0:
print('Select a run_directory:')
for i, run_directory in enumerate(run_directory_names):
print(i+1, run_directory)
choice = screen_input(':')
try:
choice = int(choice)
except (ValueError, TypeError):
name = choice
else:
name = run_directory_names[choice-1]
else:
raise KeyError('No run_directories currently set')
try:
return Path(settings.run_directories[name])
except Exception as err:
raise ValueError(f'run_directory {name} not found') from err
|
# -*- coding: utf-8 -*-
# @Time : 2020-05-22 09:27
# @Author : speeding_motor
from tensorflow import keras
import tensorflow as tf
from config import GRID_SIZE, BATCH_SIZE, ANCHOR_SIZE, ANCHORS, LAMBDA_COORD, LAMBDA_NOOBJ, LAMBDA_OBJ \
, THRESHOLD_IOU
from util.iou import IOU
class YoloLoss(keras.losses.Loss):
def __init__(self):
super(YoloLoss, self).__init__()
self.priors = tf.reshape(ANCHORS, [1, 1, 1, ANCHOR_SIZE, 2])
cell_x = tf.reshape(tf.tile(tf.range(GRID_SIZE), [GRID_SIZE]), shape=(1, GRID_SIZE, GRID_SIZE, 1, 1))
cell_y = tf.transpose(cell_x, perm=(0, 2, 1, 3, 4))
self.cell_grid = tf.tile(tf.concat([cell_x, cell_y], axis=-1), [BATCH_SIZE, 1, 1, ANCHOR_SIZE, 1])
def call(self, y_true, y_pred):
"""
it means there have box when the confidence of box > 0, confidence = IOU
"""
object_mask = tf.cast(y_true[..., 0], dtype=tf.float32) # whether have box or not[confidence, x, y, w, h]
object_mask_bool = tf.cast(object_mask, dtype=bool)
num_object_mask = tf.reduce_sum(object_mask)
pred_xy = tf.sigmoid(y_pred[..., 1:3]) + tf.cast(self.cell_grid, tf.float32)
pred_wh = tf.exp(y_pred[..., 3:5]) * self.priors
def coordinate_loss(true_boxs):
"""
coordinate loss contain the xy loss, wh loss
First: xy_loss ,sigmoid the predict xy, and then add the cell_grad,
notation: here we just need to compute the loss when the box have object ,if the box don't have box,
ignore the box coordinate loss
"""
object_mask_expand = tf.expand_dims(object_mask, axis=-1) # shape = [batch_size, h, w, anchor_num, 1]
true_xy = true_boxs[..., 1:3]
xy_loss = tf.reduce_sum(tf.square(true_xy - pred_xy) * object_mask_expand)
true_wh = true_boxs[..., 3:5]
wh_loss = tf.reduce_sum(tf.square(true_wh - pred_wh) * object_mask_expand)
return (xy_loss + wh_loss) * LAMBDA_COORD
def confidence_loss():
"""
true_conf: = iou between true_box and anchor box, iou(Intersection over union) wrong
true_conf = iou between true_box and pred_box, and then multiple the probability with object
conf_mask
"""
pred_conf = tf.sigmoid(y_pred[..., 0]) # adjust pred_conf to 0 ~ 1, shape = [batch_size, h, w, anchor_num]
pred_box = tf.concat([pred_xy, pred_wh], axis=-1)
# calculate the IOU between true_box and pred_box
# true_conf = IOU.iou(y_true[..., 1:5], pred_box) * y_true[..., 0]
ignore_mask = tf.TensorArray(dtype=tf.float32, size=1, dynamic_size=True)
def loop_body(b, ignore_mask):
""" get get iou between ground truth and pred_box """
true_box = tf.boolean_mask(y_true[b][..., 1: 5], object_mask_bool[b]) # shape = ()
true_box = tf.reshape(true_box, [1, 1, 1, -1, 4])
iou_scores = IOU.best_iou(true_box, pred_box[b]) # return the shape [13, 13, 5, len(true_box)]
best_ious = tf.reduce_max(iou_scores, axis=-1)
ignore_mask = ignore_mask.write(b, tf.cast(best_ious < THRESHOLD_IOU, dtype=tf.float32))
best_ious_debug = tf.boolean_mask(best_ious, best_ious > THRESHOLD_IOU)
return b + 1, ignore_mask
_, ignore_mask = tf.while_loop(lambda b, *args: b < BATCH_SIZE, loop_body, [0, ignore_mask])
ignore_mask = ignore_mask.stack()
obj_conf_loss = tf.reduce_sum(tf.square(1 - pred_conf) * object_mask)
noobj_conf_loss = tf.reduce_sum(tf.square(- pred_conf) * (1 - object_mask) * ignore_mask)
return obj_conf_loss * LAMBDA_OBJ + noobj_conf_loss * LAMBDA_NOOBJ
def class_loss():
true_class = y_true[..., 5:]
pred_class = y_pred[..., 5:]
loss_cell = tf.nn.softmax_cross_entropy_with_logits(true_class, pred_class, axis=-1)
c_loss = tf.reduce_sum(loss_cell * object_mask)
return c_loss
coord_loss = coordinate_loss(y_true[..., 0:5])
conf_loss = confidence_loss()
classs_loss = class_loss()
return (coord_loss + conf_loss + classs_loss) / BATCH_SIZE
# return conf_loss / BATCH_SIZE
if __name__ == '__main__':
loss = YoloLoss()
|
from backbone import *
import re
'''
To check if the cpu and memory usages of the nodes are in optimal ranges
'''
nodes = get_nodes_by_type('namenode')
nodes_data = get_nodes_by_type('datanode')
all_nodes = nodes + nodes_data
percentage = {}
for node in nodes :
for line in node.shellCmd('free').split('\n'):
if 'Mem:' in line :
search = re.search('Mem:\s+([0-9]+)\s+([0-9]+).* ',line)
total = search.group(1)
used = search.group(2)
percentage[node.getIp()] = float(float(used)/float(total))*100
else :
pass
for i in percentage :
logger.info('%s Memory utilization : %s' % (i,percentage[i]) )
if percentage[i] > 95 :
logger.error ( '%s Memory utilization : %s { THIS IS TOO HIGH }' % (i,percentage[i]) )
report.fail('%s Memory utilization : %s { THIS IS TOO HIGH }' % (i,percentage[i]) )
for i in nodes :
for line in node.shellCmd('mpstat').split('\n'):
if 'all' in line.lower() :
logger.info('%s CPU utilization : %s' % (node.getIp(),line.split()[2] ) )
if float(line.split()[2]) > 90 :
logger.error('%s CPU utilization : %s { THIS IS TOO HIGH }' % (node.getIp(),line.split()[2] ) )
report.fail('%s CPU utilization : %s { THIS IS TOO HIGH }' % (node.getIp(),line.split()[2] ))
|
# Generated by Django 3.1.5 on 2021-01-27 12:42
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Members',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('userid', models.CharField(max_length=16, verbose_name='user_id')),
('userpassword', models.CharField(default='', max_length=16, null=True, verbose_name='user_password')),
('username', models.CharField(max_length=16, null=True, verbose_name='user_name')),
('usergender', models.CharField(default='', max_length=2, null=True, verbose_name='user_gender')),
('useremail', models.EmailField(max_length=128, verbose_name='user_email')),
('userphone', models.CharField(max_length=16, verbose_name='user_phone')),
('useraddress', models.CharField(default='', max_length=128, null=True, verbose_name='user_address')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='register_time')),
('update', models.DateTimeField(auto_now_add=True, verbose_name='modify_time')),
],
options={
'verbose_name': 'World_Users',
'verbose_name_plural': 'World Users',
'db_table': 'world_users',
},
),
]
|
def number(lines):
return ['{}: {}'.format(i, a) for i, a in enumerate(lines, 1)]
|
!ls
import pandas as pd
df=pd.read_csv("ov.tsv", sep="\t")
#df["normfactor"]=
len(df.columns)
for a in list(df["ID"]):
print(a)
df["normfac"]=df.sum(axis=1)
df["normfac"]=df["normfac"]/1000000
df["normfac"]
df[list(df)[:-1]]
df[df["ID"]=="120812A_mkdup_exonCov.tsv"][list(df)[:-1]].divide(10)
df[list(df)[:10]]
df[list(df)[:10]].divide(list(df["normfac"]), axis='rows')
for a in list(df["ID"]==""):
|
# python版本3.4
import hashlib
import urllib.request
import urllib
import json
import base64
import message
def md5str(str): # md5加密字符串
m = hashlib.md5(str.encode(encoding="utf-8"))
return m.hexdigest()
def md5(byte): # md5加密byte
return hashlib.md5(byte).hexdigest()
class DamatuApi():
ID = message.damaId
KEY = message.damaKey
HOST = 'http://api.dama2.com:7766/app/'
def __init__(self, username, password):
self.username = username
self.password = password
def getSign(self, param=b''):
return (md5(bytes(self.KEY, encoding="utf8") + bytes(self.username, encoding="utf8") + param))[:8]
def getPwd(self):
return md5str(self.KEY + md5str(md5str(self.username) + md5str(self.password)))
def post(self, path, params={}):
data = urllib.parse.urlencode(params).encode('utf-8')
url = self.HOST + path
response = urllib.request.Request(url, data)
return urllib.request.urlopen(response).read()
# 上传验证码 参数filePath 验证码图片路径 如d:/1.jpg type是类型,查看http://wiki.dama2.com/index.php?n=ApiDoc.Pricedesc return 是答案为成功 如果为负数 则为错误码
def decode(self, filePath, type):
f = open(filePath, 'rb')
fdata = f.read()
filedata = base64.b64encode(fdata)
f.close()
data = {'appID': self.ID,
'user': self.username,
'pwd': self.getPwd(),
'type': type,
'fileDataBase64': filedata,
'sign': self.getSign(fdata)
}
res = self.post('d2File', data)
res = str(res, encoding="utf-8")
jres = json.loads(res)
if jres['ret'] == 0:
# 注意这个json里面有ret,id,result,cookie,根据自己的需要获取
result = ''
str1 = jres['result']
if str1.find('|') == -1:
xy = str1.split(',')
x = xy[0]
y = str(int(xy[1]) - 28)
result = x+','+y
else:
temp = str1.split('|')
for t in temp:
xy = t.split(',')
x = xy[0]
y = str(int(xy[1]) - 20)
result += x + ',' + y + '|'
result = result[:-1]
return (result)
else:
return jres['ret']
|
'''
Author: MK_Devil
Date: 2022-01-13 11:13:09
LastEditTime: 2022-01-14 11:51:42
LastEditors: MK_Devil
'''
#!/usr/bin/env python
# -*- coding:utf-8 -*-
import json
import
file_mat = open('material.txt', 'r+')
while True:
get_str = input('输入名称,是否,四种数据,使用空格分隔,输入exit结束\n')
if get_str == 'exit':
break
get_list = get_str.split(' ')
print(get_list)
save = input('是否保存y/n:')
if save == 'y':
pass # 添加json数据
|
import math
import sys
import os
#import scipy
#from astLib import astWCS
from kapteyn import wcs
import pyfits
class FoundStar:
pass
if (len(sys.argv) < 3 or len(sys.argv) > 4) :
print "Usage: convert_hstphot_list list.txt ref_raw_data_image.fits [ref_drizzled_SCI_image.fits]"
sys.exit(1)
#run distortion correction
hst_distort_location="~/software/hstphot1.1/distort"
list_filename=sys.argv[1]
raw_data_ref_image_filename=sys.argv[2]
HLA_astrometry_corrected_image_filename=""
if(len(sys.argv)==4):
HLA_astrometry_corrected_image_filename=sys.argv[3]
####################################################################################################################
#Run the distortion correction program on the raw output from HSTphot
##Rotation of 0.28 degrees is prescribed by Holtzman1995 to adjust the pixel co-ords to the nominal WCS,
##This must be added as a command line parameter to distort.
rotation_suffix = ""
#rotation_suffix = str(0.28+135)
print "Running distortion correction (local / global)"
hst_distort_command = hst_distort_location +" "+ list_filename + " "+ "local_distorted_" + list_filename+" "+ "0 "+rotation_suffix
print hst_distort_command
os.system(hst_distort_command)
hst_distort_command = hst_distort_location +" "+ list_filename + " "+ "global_distorted_" + list_filename+" "+ "1 "+rotation_suffix
print hst_distort_command
os.system(hst_distort_command)
####################################################################################################################
##Load stars from local list
local_list_file = open("local_distorted_"+list_filename,'rb')
local_list_lines=local_list_file.readlines()
print len(local_list_lines)
local_pix_stars_list=[]
#
for i in range(0,len(local_list_lines)):
if(local_list_lines[i][0]=='#'): continue
tokens=local_list_lines[i].split()
this_star=FoundStar()
this_star.detector= int(tokens[0])
this_star.x= float(tokens[1])
this_star.y= float(tokens[2])
this_star.chi= float(tokens[3])
this_star.signal= float(tokens[4])
this_star.sharpness=float(tokens[5])
this_star.roundness=float(tokens[6])
# this_star.major_axis=float(tokens[7])
this_star.object_type=float(tokens[8])
# this_star.filter_counts=float(tokens[9])
# this_star.filter_bg=float(tokens[10])
this_star.filter_flight_sys_mag=float(tokens[11])
# this_star.filter_standard_mag=float(tokens[12])
local_pix_stars_list+=[this_star]
##Load stars from global list:
global_list_file = open("global_distorted_"+list_filename,'rb')
global_list_lines=global_list_file.readlines()
global_pix_stars_list=[]
#
for i in range(0,len(global_list_lines)):
if(global_list_lines[i][0]=='#'): continue
tokens=global_list_lines[i].split()
this_star=FoundStar()
this_star.detector=-1
this_star.x= float(tokens[0])
this_star.y= float(tokens[1])
this_star.chi= float(tokens[2])
this_star.signal= float(tokens[3])
this_star.sharpness= float(tokens[4])
this_star.roundness= float(tokens[5])
this_star.object_type= float(tokens[8-1])
this_star.filter_flight_sys_mag=float(tokens[11-1])
global_pix_stars_list+=[this_star]
####################################################################################################################
##Calculate CRPIX new position in global transformed co-ordinates
raw_hdulist = pyfits.open(raw_data_ref_image_filename)
raw_header = raw_hdulist[0].header
raw_CRPIX1 = raw_header["CRPIX1"]
raw_CRPIX2 = raw_header["CRPIX2"]
#convert to global distorted coords
print "Raw crpix1", raw_CRPIX1
#modifiy the header so it uses the corrected CRPIX_vals
distortion_corrected_header=raw_header.copy()
import global_PC_distort
transformed_crpix = global_PC_distort.transform_to_global_pixel_coords(0, raw_CRPIX1,raw_CRPIX2 )
#transformed_crpix = global_PC_distort.transform_to_global_pixel_coords(0, raw_CRPIX1,raw_CRPIX2, 0.28*180/math.pi )
distortion_corrected_header["CRPIX1"]=transformed_crpix[0]
distortion_corrected_header["CRPIX2"]=transformed_crpix[1]
print "Raw crpix1", raw_header["CRPIX1"]
print "corrected crpix1", distortion_corrected_header["CRPIX1"]
#now load it as a pixel / WCS transformation
distortion_corrected_map = (wcs.Projection(distortion_corrected_header)).sub(nsub=2)
##########################################################################
#Chip to chip adjustments are approximate alterations to the Holtzman 1995 solution as determined by eye, by comparison with drizzled mosaics from HLA in 2010
#NB the HLA mosaics presumably use the Anderson 2003 solution, hence the discrepancy.
#Ideally the distortion correction should be rewritten to match Anderson's, but this is a stop gap measure that produces aesthetically pleasing (And presumably, approximately correct) plots.
WF_to_PC_pix = 2.187 #approximate pixel size ratio
#chip_adjustments_x=[0, 0, 0, 0]
#chip_adjustments_y=[0, 0, 0, 0]
chip_adjustments_x=[0, 0.6*WF_to_PC_pix, 1.3*WF_to_PC_pix, 0]
chip_adjustments_y=[0, 1.8*WF_to_PC_pix, 2.37*WF_to_PC_pix, 1*WF_to_PC_pix]
####################################################################################
#Ok, now calculate shift in sky coordinates as read from header of astrometry adjusted drizzle file
astrometry_correction_delta_CRVAL=(0,0)
if(HLA_astrometry_corrected_image_filename!=""):
HLA_hdulist = pyfits.open(HLA_astrometry_corrected_image_filename)
HLA_header = HLA_hdulist[1].header
current_CRVAL = (float(HLA_header["CRVAL1"]), float(HLA_header["CRVAL2"]))
original_CRVAL = (float(HLA_header["O_CRVAL1"]), float(HLA_header["O_CRVAL2"]))
astrometry_correction_delta_CRVAL= (current_CRVAL[0]-original_CRVAL[0], current_CRVAL[1]-original_CRVAL[1])
pass
####################################################################################
#Apply adjustment for chipnum in pixel space, then transform to sky-coords,
#then apply adjustment for CRVAL shift as determined from HLA mosaic with updated astrometry
counter=0
global_WCS_stars_good_lists=[[],[],[],[]] ##4 empty lists, will hold tuples of sky co-ords
global_WCS_stars_reject_lists=[[],[],[],[]] ##4 empty lists
for star_num in range(len(global_pix_stars_list)):
star = global_pix_stars_list[star_num]
chip_num = local_pix_stars_list[star_num].detector
if(global_pix_stars_list[star_num].chi != local_pix_stars_list[star_num].chi):
print "List mismatch!"
sys.exit()
pixel_position = star.x +chip_adjustments_x[chip_num] , star.y + chip_adjustments_y[chip_num]
original_sky_position = distortion_corrected_map.toworld(pixel_position)
corrected_sky_position = (original_sky_position[0]+astrometry_correction_delta_CRVAL[0], original_sky_position[1]+astrometry_correction_delta_CRVAL[1])
# if(1): ##pass all markers
# if( (star.signal>50 and star.chi<1.5 and star.roundness<0.5) ): #or (star.signal>35 and star.chi<1) ): #over 12 images
if( star.filter_flight_sys_mag<20):
# if( star.chi<5 and star.sharpness>-0.5 and star.sharpness<0.5 and star.filter_flight_sys_mag<18):
global_WCS_stars_good_lists[chip_num].append(corrected_sky_position)
else:
global_WCS_stars_reject_lists[chip_num].append(corrected_sky_position)
print counter, "of", len(global_pix_stars_list),"(detector",chip_num,");"
counter+=1
##############################################################################################
#Output region files:
def output_ds9_region_file(output_filename, list_of_FK5_tuples, colour="green"):
region_file = open(output_filename, 'w')
file_header="# Region file format: DS9 version 4.1"+"\n"
file_header+="# Filename:"+"\n"
file_header+="global color="+colour+" dashlist=8 3 width=1 font=\"helvetica 10 normal\" select=1 highlite=1 dash=0"+"\n"
file_header+="fk5"+"\n"
region_file.write(file_header)
for star_WCS_coords in list_of_FK5_tuples:
output_line = "point("+str(star_WCS_coords[0])+","+str(star_WCS_coords[1])+") # point=x" +"\n"
region_file.write(output_line)
pass
pass
output_filename_base = "WCS_"+list_filename.rsplit('.',1)[0]
#reject_output_filename_base = "WCS_"+list_filename.rsplit('.',1)[0]+"_fail"
all_global_WCS_stars_good_list=[]
all_global_WCS_stars_reject_list=[]
for chip_index in range(len(global_WCS_stars_good_lists)):
chip_list=global_WCS_stars_good_lists[chip_index]
output_ds9_region_file(output_filename_base+"_chip"+str(chip_index)+"_pass.reg", chip_list, "green")
all_global_WCS_stars_good_list.extend(chip_list)
output_ds9_region_file(output_filename_base+"_all_pass.reg", all_global_WCS_stars_good_list, "green")
for chip_index in range(len(global_WCS_stars_reject_lists)):
chip_list=global_WCS_stars_reject_lists[chip_index]
output_ds9_region_file(output_filename_base+"_chip"+str(chip_index)+"_fail.reg", chip_list, "red")
all_global_WCS_stars_reject_list.extend(chip_list)
output_ds9_region_file(output_filename_base+"_all_fail.reg", all_global_WCS_stars_reject_list, "red")
print "Output",len(all_global_WCS_stars_good_list),"selected stars"
print "Output",len(all_global_WCS_stars_reject_list),"rejected stars"
|
#!/usr/bin/python
import sys
import pickle
sys.path.append("../tools/")
import pandas as pd
import matplotlib.pyplot
import pprint
pp = pprint.PrettyPrinter(indent=4)
from feature_format import featureFormat, targetFeatureSplit
from tester import dump_classifier_and_data
from sklearn import tree,metrics,cross_validation
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeClassifier
from sklearn.grid_search import GridSearchCV
from sklearn.naive_bayes import GaussianNB
### Task 1: Select what features you'll use.
### features_list is a list of strings, each of which is a feature name.
### The first feature must be "poi".
features_list = ['poi','exercised_stock_options', 'total_stock_value', 'salary', 'deferred_income', 'expenses'] # You will need to use more features
with open("final_project_dataset.pkl", "r") as data_file:
data_dict = pickle.load(data_file)
print len(data_dict) #数据点总数为146
#共标注18个嫌疑人
poi_count = 0
for person_name in data_dict:
if data_dict[person_name]["poi"] ==1:
poi_count = poi_count + 1
print(poi_count)
#找出缺失值
def count_nan(dataset):
d = {}
for person in dataset:
for key, value in dataset[person].iteritems():
if value == "NaN":
if key in d:
d[key] += 1
else:
d[key] = 1
return d
print "* List of NaNs per feature:"
pp.pprint(count_nan(data_dict))
### Task 2: Remove outliers
#找到异常值
max = 0
max_name=""
for i in data_dict:
if (data_dict[i]["bonus"]>max and data_dict[i]["bonus"]!="NaN"):
max=data_dict[i]["bonus"]
max_name=i
print "异常值为",max_name
data_dict.pop( "TOTAL", 0 )#清除异常值
data_dict.pop("THE TRAVEL AGENCY IN THE PARK",0)
data=data_dict.pop("LOCKHART EUGENE E",0)
### Task 3: Create new feature(s)
def poi_email_ratio(from_poi_to_this_person, to_messages):
if from_poi_to_this_person or to_messages == 'NaN':
to_poi_ratio = 0
else:
to_poi_ratio = float(from_poi_to_this_person)/to_messages
return to_poi_ratio
#绘制可视化图,预估新特征对算法的影响
for employee, persons in data_dict.iteritems():
if persons['from_this_person_to_poi'] == 'NaN' or persons['from_messages'] == 'NaN':
persons['to_poi_ratio'] = 'NaN'
else:
persons['to_poi_ratio'] = float(persons['from_this_person_to_poi']) / float(persons['from_messages'])
if persons['from_poi_to_this_person'] == 'NaN' or persons['to_messages'] == 'NaN':
persons['from_poi_ratio'] = 'NaN'
else:
persons['from_poi_ratio'] = float(persons['from_poi_to_this_person']) / float(persons['to_messages'])
features = ["to_poi_ratio","from_poi_ratio","poi"]
data = featureFormat(data_dict, features)
for point in data:
to_poi_ratio = point[0]
from_poi_ratio = point[1]
poi=point[2]
if poi==1:
matplotlib.pyplot.scatter( to_poi_ratio, from_poi_ratio, c='r')
if poi==0:
matplotlib.pyplot.scatter( to_poi_ratio, from_poi_ratio, c='b' )
matplotlib.pyplot.xlabel("to_poi_ratio")
matplotlib.pyplot.ylabel("from_poi_ratio")
matplotlib.pyplot.show()
### Store to my_dataset for easy export below.算法性能影响测试
my_dataset = data_dict
for key in my_dataset:
my_dataset[key]['to_poi_ratio'] = poi_email_ratio(my_dataset[key]['from_poi_to_this_person'], my_dataset[key]['to_messages'])
#改变features_list,分别包含和不包含新特征,用tester.py报告recall和precision,测试新特征对性能的影响
### Extract features and labels from dataset for local testing
data = featureFormat(my_dataset, features_list)
label, features = targetFeatureSplit(data)
features_train, features_test, labels_train, labels_test = train_test_split(features, label, test_size=0.1)
clf_addnew = tree.DecisionTreeClassifier()
clf_addnew = clf_addnew.fit(features_train, labels_train)
### Task 4: Try a varity of classifiers
### Please name your classifier clf for easy export below.
#直接预测
#朴素贝叶斯预测
clf_bay = GaussianNB()
clf_bay.fit(features_train, labels_train)
#决策树预测
clf_tree = tree.DecisionTreeClassifier()
clf_tree = clf_tree.fit(features_train, labels_train)
### Task 5: Tune your classifier to achieve better than .3 precision and recall
### using our testing script. Check the tester.py script in the final project
### folder for details on the evaluation method, especially the test_classifier
### function. Because of the small size of the dataset, the script uses
### stratified shuffle split cross validation. For more info:
### http://scikit-learn.org/stable/modules/generated/sklearn.cross_validation.StratifiedShuffleSplit.html
#自动调参 决策树
clf=DecisionTreeClassifier()
parameters = {'criterion':('gini','entropy'),'max_depth': [None,2,5,10],'min_samples_leaf':[1,5,10],'max_leaf_nodes':[None,5,10,20],'min_samples_split':[2,10,20]}
clf=grid=GridSearchCV(clf,parameters,scoring='f1')
clf=grid.fit(features_train, labels_train)
print "best estimator:",clf.best_estimator_
print "best score:", clf.best_score_
clf=clf.best_estimator_
#用最优参数预测
clf = clf.fit(features_train, labels_train)
print data_dict
### Task 6: Dump your classifier, dataset, and features_list so anyone can
### check your results. You do not need to change anything below, but make sure
### that the version of poi_id.py that you submit can be run on its own and
### generates the necessary .pkl files for validating your results.
dump_classifier_and_data(clf, my_dataset, features_list)
|
class ListNode:
def __init__(self, val=0, next=None):
self.val = val
self.next = next
class Solution:
def reverseList(self, head: ListNode) -> ListNode:
stack = []
ans = ListNode(0)
node = ans
while head:
stack.append(head.val)
head = head.next
while stack:
node.next = ListNode(stack.pop())
node = node.next
return ans.next
|
from django.urls import re_path
from election_snooper import views
urlpatterns = [
re_path(
r"^$", views.SnoopedElectionView.as_view(), name="snooped_election_view"
),
re_path(
r"^moderation_queue/$",
views.ModerationQueueView.as_view(),
name="election_moderation_queue",
),
]
|
# Generated by Django 2.2.6 on 2019-12-30 08:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('work', '0074_auto_20191230_0808'),
]
operations = [
migrations.AlterField(
model_name='dprqty',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='hh_apl_free',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='hh_apl_not_free',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='hh_bpl',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='hh_bpl_metered',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='hh_metered',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='hh_unmetered',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='dprqty',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='hh_apl_free',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='hh_apl_not_free',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='hh_bpl',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='hh_bpl_metered',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='hh_metered',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='hh_unmetered',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicaldprqty',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqty',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalprogressqtyextra',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='historicalsurveyqty',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='loa',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='loa',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='loa',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='loa',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='loa',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='loa',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='loa',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='loa',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='loa',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='loa',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqty',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='progressqtyextra',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='dtr_100',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='dtr_25',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='dtr_63',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='ht',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='ht_conductor',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='lt_1p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='lt_3p',
field=models.FloatField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='pole_9m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='pole_ht_8m',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='surveyqty',
name='pole_lt_8m',
field=models.IntegerField(default=0),
),
]
|
"""Core settings."""
import collections
from django import forms
from django.conf import settings
from django.contrib.auth import password_validation
from django.utils.translation import gettext as _, gettext_lazy
from modoboa.core.password_hashers import get_dovecot_schemes
from modoboa.core.password_hashers.base import PasswordHasher
from modoboa.lib import fields as lib_fields
from modoboa.lib.form_utils import (
HorizontalRadioSelect, SeparatorField, YesNoField
)
from modoboa.parameters import forms as param_forms, tools as param_tools
from . import constants
from . import sms_backends
def enabled_applications():
"""Return the list of installed extensions."""
from modoboa.core.extensions import exts_pool
result = [("user", _("User profile"))]
for extension in exts_pool.list_all():
if "topredirection_url" not in extension:
continue
result.append((extension["name"], extension["label"]))
return sorted(result, key=lambda e: e[0])
class GeneralParametersForm(param_forms.AdminParametersForm):
"""General parameters."""
app = "core"
sep1 = SeparatorField(label=gettext_lazy("Authentication"))
authentication_type = forms.ChoiceField(
label=gettext_lazy("Authentication type"),
choices=[("local", gettext_lazy("Local")),
("ldap", "LDAP")],
initial="local",
help_text=gettext_lazy("The backend used for authentication"),
widget=HorizontalRadioSelect()
)
password_scheme = forms.ChoiceField(
label=gettext_lazy("Default password scheme"),
choices=[],
initial="sha512crypt",
help_text=gettext_lazy("Scheme used to crypt mailbox passwords"),
)
rounds_number = forms.IntegerField(
label=gettext_lazy("Rounds"),
initial=70000,
help_text=gettext_lazy(
"Number of rounds to use (only used by sha256crypt and "
"sha512crypt). Must be between 1000 and 999999999, inclusive."
),
)
update_scheme = YesNoField(
label=gettext_lazy("Update password scheme at login"),
initial=True,
help_text=gettext_lazy(
"Update user password at login to use the default password scheme"
)
)
default_password = forms.CharField(
label=gettext_lazy("Default password"),
initial="password",
help_text=gettext_lazy(
"Default password for automatically created accounts.")
)
random_password_length = forms.IntegerField(
label=gettext_lazy("Random password length"),
min_value=8,
initial=8,
help_text=gettext_lazy(
"Length of randomly generated passwords.")
)
update_password_url = forms.URLField(
label=gettext_lazy("Update password service URL"),
initial="",
required=False,
help_text=gettext_lazy(
"The URL of an external page where users will be able"
" to update their password. It applies only to non local"
" users, ie. those automatically created after a successful"
" external authentication (LDAP, SMTP)."
)
)
password_recovery_msg = forms.CharField(
label=gettext_lazy("Password recovery announcement"),
initial="",
required=False,
widget=forms.widgets.Textarea(),
help_text=gettext_lazy(
"A temporary message that will be displayed on the "
"reset password page."
)
)
sms_password_recovery = YesNoField(
label=gettext_lazy("Enable password recovery by SMS"),
initial=False,
help_text=gettext_lazy(
"Enable password recovery by SMS for users who filled "
"a phone number."
)
)
sms_provider = forms.ChoiceField(
label=gettext_lazy("SMS provider"),
choices=constants.SMS_BACKENDS,
help_text=gettext_lazy(
"Choose a provider to send password recovery SMS"
),
required=False
)
# LDAP specific settings
ldap_sep = SeparatorField(label=gettext_lazy("LDAP settings"))
ldap_server_address = forms.CharField(
label=gettext_lazy("Server address"),
initial="localhost",
help_text=gettext_lazy(
"The IP address or the DNS name of the LDAP server"),
)
ldap_server_port = forms.IntegerField(
label=gettext_lazy("Server port"),
initial=389,
help_text=gettext_lazy("The TCP port number used by the LDAP server"),
)
ldap_enable_secondary_server = YesNoField(
label=gettext_lazy("Enable secondary server (fallback)"),
initial=False,
help_text=gettext_lazy(
"Enable a secondary LDAP server which will be used "
"if the primary one fails"
)
)
ldap_secondary_server_address = forms.CharField(
label=gettext_lazy("Secondary server address"),
initial="localhost",
help_text=gettext_lazy(
"The IP address or the DNS name of the seondary LDAP server"),
)
ldap_secondary_server_port = forms.IntegerField(
label=gettext_lazy("Secondary server port"),
initial=389,
help_text=gettext_lazy(
"The TCP port number used by the LDAP secondary server"),
)
ldap_secured = forms.ChoiceField(
label=gettext_lazy("Use a secured connection"),
choices=constants.LDAP_SECURE_MODES,
initial="none",
help_text=gettext_lazy(
"Use an SSL/STARTTLS connection to access the LDAP server")
)
ldap_is_active_directory = YesNoField(
label=gettext_lazy("Active Directory"),
initial=False,
help_text=gettext_lazy(
"Tell if the LDAP server is an Active Directory one")
)
ldap_admin_groups = forms.CharField(
label=gettext_lazy("Administrator groups"),
initial="",
help_text=gettext_lazy(
"Members of those LDAP Posix groups will be created as domain "
"administrators. Use ';' characters to separate groups."
),
required=False
)
ldap_group_type = forms.ChoiceField(
label=gettext_lazy("Group type"),
initial="posixgroup",
choices=constants.LDAP_GROUP_TYPES,
help_text=gettext_lazy(
"The LDAP group type to use with your directory."
)
)
ldap_groups_search_base = forms.CharField(
label=gettext_lazy("Groups search base"),
initial="",
help_text=gettext_lazy(
"The distinguished name of the search base used to find groups"
),
required=False
)
ldap_password_attribute = forms.CharField(
label=gettext_lazy("Password attribute"),
initial="userPassword",
help_text=gettext_lazy("The attribute used to store user passwords"),
)
# LDAP authentication settings
ldap_auth_sep = SeparatorField(
label=gettext_lazy("LDAP authentication settings"))
ldap_auth_method = forms.ChoiceField(
label=gettext_lazy("Authentication method"),
choices=[("searchbind", gettext_lazy("Search and bind")),
("directbind", gettext_lazy("Direct bind"))],
initial="searchbind",
help_text=gettext_lazy("Choose the authentication method to use"),
)
ldap_bind_dn = forms.CharField(
label=gettext_lazy("Bind DN"),
initial="",
help_text=gettext_lazy(
"The distinguished name to use when binding to the LDAP server. "
"Leave empty for an anonymous bind"
),
required=False,
)
ldap_bind_password = forms.CharField(
label=gettext_lazy("Bind password"),
initial="",
help_text=gettext_lazy(
"The password to use when binding to the LDAP server "
"(with 'Bind DN')"
),
widget=forms.PasswordInput(render_value=True),
required=False
)
ldap_search_base = forms.CharField(
label=gettext_lazy("Users search base"),
initial="",
help_text=gettext_lazy(
"The distinguished name of the search base used to find users"
),
required=False,
)
ldap_search_filter = forms.CharField(
label=gettext_lazy("Search filter"),
initial="(mail=%(user)s)",
help_text=gettext_lazy(
"An optional filter string (e.g. '(objectClass=person)'). "
"In order to be valid, it must be enclosed in parentheses."
),
required=False,
)
ldap_user_dn_template = forms.CharField(
label=gettext_lazy("User DN template"),
initial="",
help_text=gettext_lazy(
"The template used to construct a user's DN. It should contain "
"one placeholder (ie. %(user)s)"
),
required=False,
)
# LDAP sync. settings
ldap_sync_sep = SeparatorField(
label=gettext_lazy("LDAP synchronization settings"))
ldap_sync_bind_dn = forms.CharField(
label=gettext_lazy("Bind DN"),
initial="",
help_text=gettext_lazy(
"The distinguished name to use when binding to the LDAP server. "
"Leave empty for an anonymous bind"
),
required=False,
)
ldap_sync_bind_password = forms.CharField(
label=gettext_lazy("Bind password"),
initial="",
help_text=gettext_lazy(
"The password to use when binding to the LDAP server "
"(with 'Bind DN')"
),
widget=forms.PasswordInput(render_value=True),
required=False
)
ldap_enable_sync = YesNoField(
label=gettext_lazy("Enable export to LDAP"),
initial=False,
help_text=gettext_lazy(
"Enable automatic synchronization between local database and "
"LDAP directory")
)
ldap_sync_delete_remote_account = YesNoField(
label=gettext_lazy(
"Delete remote LDAP account when local account is deleted"
),
initial=False,
help_text=gettext_lazy(
"Delete remote LDAP account when local account is deleted, "
"otherwise it will be disabled."
)
)
ldap_sync_account_dn_template = forms.CharField(
label=gettext_lazy("Account DN template"),
initial="",
help_text=gettext_lazy(
"The template used to construct an account's DN. It should contain "
"one placeholder (ie. %(user)s)"
),
required=False
)
ldap_enable_import = YesNoField(
label=gettext_lazy("Enable import from LDAP"),
initial=False,
help_text=gettext_lazy(
"Enable account synchronization from LDAP directory to local "
"database"
)
)
ldap_import_search_base = forms.CharField(
label=gettext_lazy("Users search base"),
initial="",
help_text=gettext_lazy(
"The distinguished name of the search base used to find users"
),
required=False,
)
ldap_import_search_filter = forms.CharField(
label=gettext_lazy("Search filter"),
initial="(cn=*)",
help_text=gettext_lazy(
"An optional filter string (e.g. '(objectClass=person)'). "
"In order to be valid, it must be enclosed in parentheses."
),
required=False,
)
ldap_import_username_attr = forms.CharField(
label=gettext_lazy("Username attribute"),
initial="cn",
help_text=gettext_lazy(
"The name of the LDAP attribute where the username can be found."
),
)
ldap_dovecot_sync = YesNoField(
label=gettext_lazy("Enable Dovecot LDAP sync"),
initial=False,
help_text=gettext_lazy(
"LDAP authentication settings will be applied to Dovecot "
"configuration."
)
)
ldap_dovecot_conf_file = forms.CharField(
label=gettext_lazy("Dovecot LDAP config file"),
initial="/etc/dovecot/dovecot-modoboa.conf",
required=False,
help_text=gettext_lazy(
"Location of the configuration file which contains "
"Dovecot LDAP settings."
)
)
dash_sep = SeparatorField(label=gettext_lazy("Dashboard"))
rss_feed_url = forms.URLField(
label=gettext_lazy("Custom RSS feed"),
required=False,
help_text=gettext_lazy(
"Display custom RSS feed to resellers and domain administrators"
)
)
hide_features_widget = YesNoField(
label=gettext_lazy("Hide features widget"),
initial=False,
help_text=gettext_lazy(
"Hide features widget for resellers and domain administrators"
)
)
notif_sep = SeparatorField(label=gettext_lazy("Notifications"))
sender_address = lib_fields.UTF8EmailField(
label=_("Sender address"),
initial="noreply@yourdomain.test",
help_text=_(
"Email address used to send notifications."
)
)
api_sep = SeparatorField(label=gettext_lazy("Public API"))
enable_api_communication = YesNoField(
label=gettext_lazy("Enable communication"),
initial=True,
help_text=gettext_lazy(
"Enable communication with Modoboa public API")
)
check_new_versions = YesNoField(
label=gettext_lazy("Check new versions"),
initial=True,
help_text=gettext_lazy(
"Automatically checks if a newer version is available")
)
send_new_versions_email = YesNoField(
label=gettext_lazy("Send an email when new versions are found"),
initial=False,
help_text=gettext_lazy(
"Send an email to notify admins about new versions"
)
)
new_versions_email_rcpt = lib_fields.UTF8EmailField(
label=_("Recipient"),
initial="postmaster@yourdomain.test",
help_text=_(
"Recipient of new versions notification emails."
)
)
send_statistics = YesNoField(
label=gettext_lazy("Send statistics"),
initial=True,
help_text=gettext_lazy(
"Send statistics to Modoboa public API "
"(counters and used extensions)")
)
sep3 = SeparatorField(label=gettext_lazy("Miscellaneous"))
enable_inactive_accounts = YesNoField(
label=_("Enable inactive account tracking"),
initial=True,
help_text=_("Allow the administrator to set a threshold (in days) "
"beyond which an account is considered inactive "
"if the user hasn't logged in")
)
inactive_account_threshold = forms.IntegerField(
label=_("Inactive account threshold"),
initial=30,
help_text=_(
"An account with a last login date greater than this threshold "
"(in days) will be considered as inactive"
),
)
top_notifications_check_interval = forms.IntegerField(
label=_("Top notifications check interval"),
initial=30,
help_text=_(
"Interval between two top notification checks (in seconds)"
),
)
log_maximum_age = forms.IntegerField(
label=gettext_lazy("Maximum log record age"),
initial=365,
help_text=gettext_lazy("The maximum age in days of a log record"),
)
items_per_page = forms.IntegerField(
label=gettext_lazy("Items per page"),
initial=30,
help_text=gettext_lazy("Number of displayed items per page"),
)
default_top_redirection = forms.ChoiceField(
label=gettext_lazy("Default top redirection"),
choices=[],
initial="user",
help_text=gettext_lazy(
"The default redirection used when no application is specified"
),
)
# Visibility rules
visibility_rules = {
"ldap_secondary_server_address": "ldap_enable_secondary_server=True",
"ldap_secondary_server_port": "ldap_enable_secondary_server=True",
"ldap_auth_sep": "authentication_type=ldap",
"ldap_auth_method": "authentication_type=ldap",
"ldap_bind_dn": "ldap_auth_method=searchbind",
"ldap_bind_password": "ldap_auth_method=searchbind",
"ldap_search_base": "ldap_auth_method=searchbind",
"ldap_search_filter": "ldap_auth_method=searchbind",
"ldap_user_dn_template": "ldap_auth_method=directbind",
"ldap_admin_groups": "authentication_type=ldap",
"ldap_group_type": "authentication_type=ldap",
"ldap_groups_search_base": "authentication_type=ldap",
"ldap_sync_delete_remote_account": "ldap_enable_sync=True",
"ldap_sync_account_dn_template": "ldap_enable_sync=True",
"ldap_import_search_base": "ldap_enable_import=True",
"ldap_import_search_filter": "ldap_enable_import=True",
"ldap_import_username_attr": "ldap_enable_import=True",
"ldap_dovecot_conf_file": "ldap_dovecot_sync=True",
"check_new_versions": "enable_api_communication=True",
"send_statistics": "enable_api_communication=True",
"send_new_versions_email": "check_new_versions=True",
"new_versions_email_rcpt": "check_new_versions=True",
"sms_provider": "sms_password_recovery=True",
"inactive_account_threshold": "enable_inactive_accounts=True"
}
def __init__(self, *args, **kwargs):
super(GeneralParametersForm, self).__init__(*args, **kwargs)
self.fields["default_top_redirection"].choices = enabled_applications()
self._add_visibilty_rules(
sms_backends.get_all_backend_visibility_rules()
)
available_schemes = get_dovecot_schemes()
self.fields["password_scheme"].choices = [
(hasher.name, gettext_lazy(hasher.label))
for hasher in PasswordHasher.get_password_hashers()
if hasher().scheme in available_schemes
]
def _add_dynamic_fields(self):
new_fields = collections.OrderedDict()
for field, value in self.fields.items():
new_fields[field] = value
if field == "sms_provider":
sms_backend_fields = sms_backends.get_all_backend_settings()
for field, definition in sms_backend_fields.items():
new_fields[field] = definition["type"](
**definition["attrs"])
self.fields = new_fields
def clean_ldap_user_dn_template(self):
tpl = self.cleaned_data["ldap_user_dn_template"]
try:
tpl % {"user": "toto"}
except (KeyError, ValueError):
raise forms.ValidationError(_("Invalid syntax"))
return tpl
def clean_ldap_sync_account_dn_template(self):
tpl = self.cleaned_data["ldap_sync_account_dn_template"]
try:
tpl % {"user": "toto"}
except (KeyError, ValueError):
raise forms.ValidationError(_("Invalid syntax"))
return tpl
def clean_ldap_search_filter(self):
ldap_filter = self.cleaned_data["ldap_search_filter"]
try:
ldap_filter % {"user": "toto"}
except (KeyError, ValueError, TypeError):
raise forms.ValidationError(_("Invalid syntax"))
return ldap_filter
def clean_rounds_number(self):
value = self.cleaned_data["rounds_number"]
if value < 1000 or value > 999999999:
raise forms.ValidationError(_("Invalid rounds number"))
return value
def clean_default_password(self):
"""Check password complexity."""
value = self.cleaned_data["default_password"]
password_validation.validate_password(value)
return value
def clean(self):
"""Custom validation method
Depending on 'ldap_auth_method' value, we check for different
required parameters.
"""
super().clean()
cleaned_data = self.cleaned_data
if cleaned_data["sms_password_recovery"]:
provider = cleaned_data.get("sms_provider")
if provider:
sms_settings = sms_backends.get_backend_settings(provider)
if sms_settings:
for name in sms_settings.keys():
if not cleaned_data.get(name):
self.add_error(name, _("This field is required"))
else:
self.add_error("sms_provider", _("This field is required"))
if cleaned_data["authentication_type"] != "ldap":
return cleaned_data
if cleaned_data["ldap_auth_method"] == "searchbind":
required_fields = ["ldap_search_base", "ldap_search_filter"]
else:
required_fields = ["ldap_user_dn_template"]
for f in required_fields:
if f not in cleaned_data or cleaned_data[f] == u'':
self.add_error(f, _("This field is required"))
return cleaned_data
def _apply_ldap_settings(self, values, backend):
"""Apply configuration for given backend."""
import ldap
from django_auth_ldap.config import (
LDAPSearch, PosixGroupType, GroupOfNamesType,
ActiveDirectoryGroupType
)
if not hasattr(settings, backend.setting_fullname("USER_ATTR_MAP")):
setattr(settings, backend.setting_fullname("USER_ATTR_MAP"), {
"first_name": "givenName",
"email": "mail",
"last_name": "sn"
})
ldap_uri = "ldaps://" if values["ldap_secured"] == "ssl" else "ldap://"
ldap_uri += "%s:%s" % (
values[backend.srv_address_setting_name],
values[backend.srv_port_setting_name]
)
setattr(settings, backend.setting_fullname("SERVER_URI"), ldap_uri)
if values["ldap_secured"] == "starttls":
setattr(settings, backend.setting_fullname("START_TLS"), True)
if values["ldap_is_active_directory"]:
setattr(
settings, backend.setting_fullname("GROUP_TYPE"),
ActiveDirectoryGroupType()
)
searchfilter = "(objectClass=group)"
elif values["ldap_group_type"] == "groupofnames":
setattr(settings, backend.setting_fullname("GROUP_TYPE"),
GroupOfNamesType())
searchfilter = "(objectClass=groupOfNames)"
else:
setattr(settings, backend.setting_fullname("GROUP_TYPE"),
PosixGroupType())
searchfilter = "(objectClass=posixGroup)"
setattr(settings, backend.setting_fullname("GROUP_SEARCH"), LDAPSearch(
values["ldap_groups_search_base"], ldap.SCOPE_SUBTREE,
searchfilter
))
if values["ldap_auth_method"] == "searchbind":
setattr(settings, backend.setting_fullname("BIND_DN"),
values["ldap_bind_dn"])
setattr(
settings, backend.setting_fullname("BIND_PASSWORD"),
values["ldap_bind_password"]
)
search = LDAPSearch(
values["ldap_search_base"], ldap.SCOPE_SUBTREE,
values["ldap_search_filter"]
)
setattr(settings, backend.setting_fullname("USER_SEARCH"), search)
else:
setattr(
settings, backend.setting_fullname("USER_DN_TEMPLATE"),
values["ldap_user_dn_template"]
)
setattr(
settings,
backend.setting_fullname("BIND_AS_AUTHENTICATING_USER"), True)
if values["ldap_is_active_directory"]:
setting = backend.setting_fullname("GLOBAL_OPTIONS")
if not hasattr(settings, setting):
setattr(settings, setting, {
ldap.OPT_REFERRALS: False
})
else:
getattr(settings, setting)[ldap.OPT_REFERRALS] = False
def to_django_settings(self):
"""Apply LDAP related parameters to Django settings.
Doing so, we can use the django_auth_ldap module.
"""
try:
import ldap
ldap_available = True
except ImportError:
ldap_available = False
values = dict(param_tools.get_global_parameters("core"))
if not ldap_available or values["authentication_type"] != "ldap":
return
from modoboa.lib.authbackends import LDAPBackend
self._apply_ldap_settings(values, LDAPBackend)
if not values["ldap_enable_secondary_server"]:
return
from modoboa.lib.authbackends import LDAPSecondaryBackend
self._apply_ldap_settings(values, LDAPSecondaryBackend)
def save(self):
"""Extra save actions."""
super().save()
self.localconfig.need_dovecot_update = True
self.localconfig.save(update_fields=["need_dovecot_update"])
GLOBAL_PARAMETERS_STRUCT = collections.OrderedDict([
("authentication", {
"label": gettext_lazy("Authentication"),
"params": collections.OrderedDict([
("authentication_type", {
"label": gettext_lazy("Authentication type"),
"help_text": gettext_lazy(
"The backend used for authentication"),
}),
("password_scheme", {
"label": gettext_lazy("Default password scheme"),
"help_text": gettext_lazy(
"Scheme used to crypt mailbox passwords"),
}),
("rounds_number", {
"label": gettext_lazy("Rounds"),
"help_text": gettext_lazy(
"Number of rounds to use (only used by sha256crypt and "
"sha512crypt). Must be between 1000 and 999999999, "
"inclusive."
)
}),
("update_scheme", {
"label": gettext_lazy("Update password scheme at login"),
"help_text": gettext_lazy(
"Update user password at login to use the default "
"password scheme"
)
}),
("default_password", {
"label": gettext_lazy("Default password"),
"help_text": gettext_lazy(
"Default password for automatically created accounts.")
}),
("random_password_length", {
"label": gettext_lazy("Random password length"),
"help_text": gettext_lazy(
"Length of randomly generated passwords.")
}),
("update_password_url", {
"label": gettext_lazy("Update password service URL"),
"help_text": gettext_lazy(
"The URL of an external page where users will be able"
" to update their password. It applies only to non local"
" users, ie. those automatically created after a successful"
" external authentication (LDAP, SMTP)."
)
}),
("password_recovery_msg", {
"label": gettext_lazy("Password recovery announcement"),
"help_text": gettext_lazy(
"A temporary message that will be displayed on the "
"reset password page."
)
}),
("sms_password_recovery", {
"label": gettext_lazy("Enable password recovery by SMS"),
"help_text": gettext_lazy(
"Enable password recovery by SMS for users who filled "
"a phone number."
)
}),
("sms_provider", {
"label": gettext_lazy("SMS provider"),
"display": "sms_password_recovery=true",
"help_text": gettext_lazy(
"Choose a provider to send password recovery SMS"
)
}),
*sms_backends.get_all_backend_structures()
])
}),
("ldap", {
"label": _("LDAP"),
"params": collections.OrderedDict([
("ldap_server_address", {
"label": gettext_lazy("Server address"),
"help_text": gettext_lazy(
"The IP address or the DNS name of the LDAP server"),
}),
("ldap_server_port", {
"label": gettext_lazy("Server port"),
"help_text": gettext_lazy(
"The TCP port number used by the LDAP server")
}),
("ldap_enable_secondary_server", {
"label": gettext_lazy("Enable secondary server (fallback)"),
"help_text": gettext_lazy(
"Enable a secondary LDAP server which will be used "
"if the primary one fails"
)
}),
("ldap_secondary_server_address", {
"label": gettext_lazy("Secondary server address"),
"display": "ldap_enable_secondary_server=true",
"help_text": gettext_lazy(
"The IP address or the DNS name of the seondary LDAP server"
)
}),
("ldap_secondary_server_port", {
"label": gettext_lazy("Secondary server port"),
"display": "ldap_enable_secondary_server=true",
"help_text": gettext_lazy(
"The TCP port number used by the LDAP secondary server"
)
}),
("ldap_secured", {
"label": gettext_lazy("Use a secured connection"),
"help_text": gettext_lazy(
"Use an SSL/STARTTLS connection to access the LDAP server")
}),
("ldap_is_active_directory", {
"label": gettext_lazy("Active Directory"),
"help_text": gettext_lazy(
"Tell if the LDAP server is an Active Directory one")
}),
("ldap_admin_groups", {
"label": gettext_lazy("Administrator groups"),
"help_text": gettext_lazy(
"Members of those LDAP Posix groups will be created as "
"domain administrators. Use ';' characters to separate "
"groups."
)
}),
("ldap_group_type", {
"label": gettext_lazy("Group type"),
"help_text": gettext_lazy(
"The LDAP group type to use with your directory."
)
}),
("ldap_groups_search_base", {
"label": gettext_lazy("Groups search base"),
"help_text": gettext_lazy(
"The distinguished name of the search base used to find "
"groups"
)
}),
("ldap_password_attribute", {
"label": gettext_lazy("Password attribute"),
"help_text": gettext_lazy(
"The attribute used to store user passwords"),
}),
("ldap_auth_sep", {
"label": gettext_lazy("LDAP authentication settings"),
"display": "authentication_type=ldap",
"separator": True
}),
("ldap_auth_method", {
"label": gettext_lazy("Authentication method"),
"display": "authentication_type=ldap",
"help_text": gettext_lazy(
"Choose the authentication method to use"),
}),
("ldap_bind_dn", {
"label": gettext_lazy("Bind DN"),
"help_text": gettext_lazy(
"The distinguished name to use when binding to the LDAP "
"server. Leave empty for an anonymous bind"
),
"display": "authentication_type=ldap&ldap_auth_method=searchbind"
}),
("ldap_bind_password", {
"label": gettext_lazy("Bind password"),
"help_text": gettext_lazy(
"The password to use when binding to the LDAP server "
"(with 'Bind DN')"
),
"display": "authentication_type=ldap&ldap_auth_method=searchbind",
"password": True
}),
("ldap_search_base", {
"label": gettext_lazy("Users search base"),
"help_text": gettext_lazy(
"The distinguished name of the search base used to find "
"users"
),
"display": "authentication_type=ldap&ldap_auth_method=searchbind"
}),
("ldap_search_filter", {
"label": gettext_lazy("Search filter"),
"help_text": gettext_lazy(
"An optional filter string (e.g. '(objectClass=person)'). "
"In order to be valid, it must be enclosed in parentheses."
),
"display": "authentication_type=ldap&ldap_auth_method=searchbind"
}),
("ldap_user_dn_template", {
"label": gettext_lazy("User DN template"),
"help_text": gettext_lazy(
"The template used to construct a user's DN. It should "
"contain one placeholder (ie. %(user)s)"
),
"display": "authentication_type=ldap&ldap_auth_method=directbind"
}),
("ldap_sync_sep", {
"label": gettext_lazy("LDAP synchronization settings"),
"separator": True
}),
("ldap_sync_bind_dn", {
"label": gettext_lazy("Bind DN"),
"help_text": gettext_lazy(
"The distinguished name to use when binding to the LDAP server. "
"Leave empty for an anonymous bind"
)
}),
("ldap_sync_bind_password", {
"label": gettext_lazy("Bind password"),
"help_text": gettext_lazy(
"The password to use when binding to the LDAP server "
"(with 'Bind DN')"
),
"password": True
}),
("ldap_enable_sync", {
"label": gettext_lazy("Enable export to LDAP"),
"help_text": gettext_lazy(
"Enable automatic synchronization between local database "
"and LDAP directory"
)
}),
("ldap_sync_delete_remote_account", {
"label": gettext_lazy(
"Delete remote LDAP account when local account is deleted"),
"help_text": gettext_lazy(
"Delete remote LDAP account when local account is deleted, "
"otherwise it will be disabled."
),
"display": "ldap_enable_sync=true"
}),
("ldap_sync_account_dn_template", {
"label": gettext_lazy("Account DN template"),
"help_text": gettext_lazy(
"The template used to construct an account's DN. It should "
"contain one placeholder (ie. %(user)s)"
),
"display": "ldap_enable_sync=true"
}),
("ldap_enable_import", {
"label": gettext_lazy("Enable import from LDAP"),
"help_text": gettext_lazy(
"Enable account synchronization from LDAP directory to "
"local database"
)
}),
("ldap_import_search_base", {
"label": gettext_lazy("Users search base"),
"help_text": gettext_lazy(
"The distinguished name of the search base used to find "
"users"
),
"display": "ldap_enable_import=true"
}),
("ldap_import_search_filter", {
"label": gettext_lazy("Search filter"),
"help_text": gettext_lazy(
"An optional filter string (e.g. '(objectClass=person)'). "
"In order to be valid, it must be enclosed in parentheses."
),
"display": "ldap_enable_import=true"
}),
("ldap_import_username_attr", {
"label": gettext_lazy("Username attribute"),
"help_text": gettext_lazy(
"The name of the LDAP attribute where the username can be "
"found."
),
"display": "ldap_enable_import=true"
}),
("ldap_dovecot_sync", {
"label": gettext_lazy("Enable Dovecot LDAP sync"),
"help_text": gettext_lazy(
"LDAP authentication settings will be applied to Dovecot "
"configuration."
)
}),
("ldap_dovecot_conf_file", {
"label": gettext_lazy("Dovecot LDAP config file"),
"help_text": gettext_lazy(
"Location of the configuration file which contains "
"Dovecot LDAP settings."
),
"display": "ldap_dovecot_sync=true"
}),
])
}),
("dashboard", {
"label": gettext_lazy("Dashboard"),
"params": collections.OrderedDict([
("rss_feed_url", {
"label": gettext_lazy("Custom RSS feed"),
"help_text": gettext_lazy(
"Display custom RSS feed to resellers and domain "
"administrators"
)
}),
("hide_features_widget", {
"label": gettext_lazy("Hide features widget"),
"help_text": gettext_lazy(
"Hide features widget for resellers and domain "
"administrators"
)
}),
])
}),
("notifications", {
"label": gettext_lazy("Notifications"),
"params": collections.OrderedDict([
("sender_address", {
"label": gettext_lazy("Sender address"),
"help_text": gettext_lazy(
"Email address used to send notifications.")
})
])
}),
("api", {
"label": gettext_lazy("Public API"),
"params": collections.OrderedDict([
("enable_api_communication", {
"label": gettext_lazy("Enable communication"),
"help_text": gettext_lazy(
"Automatically checks if a newer version is available")
}),
("check_new_versions", {
"label": gettext_lazy("Check new versions"),
"display": "enable_api_communication=true",
"help_text": gettext_lazy(
"Automatically checks if a newer version is available")
}),
("send_new_versions_email", {
"label": gettext_lazy(
"Send an email when new versions are found"),
"display": "check_new_versions=true",
"help_text": gettext_lazy(
"Send an email to notify admins about new versions"
)
}),
("new_versions_email_rcpt", {
"label": gettext_lazy("Recipient"),
"display": "check_new_versions=true",
"help_text": gettext_lazy(
"Recipient of new versions notification emails."
)
}),
("send_statistics", {
"label": gettext_lazy("Send statistics"),
"display": "enable_api_communication=true",
"help_text": gettext_lazy(
"Send statistics to Modoboa public API "
"(counters and used extensions)")
})
])
}),
("misc", {
"label": gettext_lazy("Miscellaneous"),
"params": collections.OrderedDict([
("enable_inactive_accounts", {
"label": gettext_lazy("Enable inactive account tracking"),
"help_text": gettext_lazy(
"Allow the administrator to set a threshold (in days) "
"beyond which an account is considered inactive "
"if the user hasn't logged in"
),
}),
("inactive_account_threshold", {
"label": gettext_lazy("Inactive account threshold"),
"display": "enable_inactive_accounts=true",
"help_text": gettext_lazy(
"An account with a last login date greater than this "
"threshold (in days) will be considered as inactive"
)
}),
("top_notifications_check_interval", {
"label": gettext_lazy("Top notifications check interval"),
"help_text": gettext_lazy(
"Interval between two top notification checks (in seconds)"
),
}),
("log_maximum_age", {
"label": gettext_lazy("Maximum log record age"),
"help_text": gettext_lazy(
"The maximum age in days of a log record"),
}),
("items_per_page", {
"label": gettext_lazy("Items per page"),
"help_text": gettext_lazy("Number of displayed items per page")
}),
("default_top_redirection", {
"label": gettext_lazy("Default top redirection"),
"help_text": gettext_lazy(
"The default redirection used when no application is "
"specified"
),
})
])
})
])
|
import easygui
from tkinter import Tk
from tkinter.filedialog import askopenfilename
from PyPDF2 import PdfFileWriter, PdfFileReader
import PyPDF2
import re
from tkinter import messagebox
from tkinter import Text
import os
import calendar;
import time
from config import configs
import base64
from datetime import datetime
def checkPath(dir):
if os.path.isdir(dir):
print()
else:
os.mkdir(dir)
def subPath(dir):
if '\\' in dir:
return '\\'
elif '/' in dir:
return '/'
try:
filename = ""
Tk().withdraw()
filename = askopenfilename()
if(filename[-3:] == "pdf"):
inputpdf = PdfFileReader(open(filename, "rb"))
ts = calendar.timegm(time.gmtime())
passwd = configs.PASSWD
passwd = passwd.replace("x", "M")
passwd = passwd.replace("AA", "ZD")
passwd = passwd.replace("y", "N")
passwd = passwd[:-1]
passwd = base64.b64decode(passwd)
text = "Enter the password to enter GeeksforGeeks"
title = "Window Title GfG"
output = easygui.passwordbox(text, title)
if(passwd.decode('UTF-8') == base64.b64encode(output.encode("utf-8"))):
workpath = configs.PATH + str(ts)
checkPath(workpath)
for i in range(inputpdf.numPages):
output = PdfFileWriter()
output.addPage(inputpdf.getPage(i))
with open(workpath + subPath(workpath) + "%s.pdf" % i, "wb") as outputStream:
output.write(outputStream)
f = open(workpath + subPath(workpath) + "0.txt","w+")
now = datetime.now()
dt_string = now.strftime("%d/%m/%Y %H:%M:%S")
f.write(dt_string)
f.close()
messagebox.showinfo("doMestre_Folha", "Arquivo foi separado, iniciando processamento...")
pdf_file = open('/home/cleiton/Documents/pdf-a.pdf', 'rb')
read_pdf = PyPDF2.PdfFileReader(pdf_file)
number_of_pages = read_pdf.getNumPages()
page = read_pdf.getPage(0)
page_content = page.extractText()
parsed = ''.join(page_content)
print("Sem eliminar as quebras")
print(parsed)
parsed = re.sub('n', '', parsed)
print("Após eliminar as quebras")
print(parsed)
else:
messagebox.showinfo("doMestre_Folha", "Erro. Senha incorreta")
else:
messagebox.showinfo("doMestre_Folha", "Erro. Selecione um arquivo PDF")
except:
messagebox.showinfo("doMestre_Folha", "Erro ao selecionar arquivo.")
|
# Upgrading Dart's SDK for HTML (blink IDLs).
#
# Typically this is done using the Dart WebCore branch (as it has to be
# staged to get most things working).
#
# Enlist in third_party/WebCore:
# > cd src/dart/third_party
# > rm -rf WebCore (NOTE: Normally detached head using gclient sync)
# > git clone https://github.com/dart-lang/webcore.git WebCore
#
# To update all *.idl, *.py, LICENSE files, and IDLExtendedAttributes.txt:
# > cd sdk
# > python3 tools/dom/scripts/idlsync.py
#
# Display blink files to delete, copy, update, and collisions to review:
# > python3 tools/dom/scripts/idlsync.py --check
#
# Bring over all blink files to dart/third_party/WebCore (*.py, *.idl, and
# IDLExtendedAttributes.txt):
# > python3 tools/dom/scripts/idlsync.py
#
# Update the DEPS file SHA for "WebCore_rev" with the committed changes of files
# in WebCore e.g., "WebCore_rev": "@NNNNNNNNNNNNNNNNNNNNNNNNN"
#
# Generate the sdk/*.dart files from the new IDLs and PYTHON IDL parsing code
# copied to in dart/third_party/WebCore from src/third_party/WebKit (blink).
#
# > cd src/dart/tools/dom/script
# > ./go.sh
#
# Finally, commit the files in dart/third_party/WebCore.
import errno
import optparse
import os.path
import re
import requests
import subprocess
import sys
import time
from shutil import copyfile
# Dart DEPS file checked into the dart-lang/sdk master.
DEPS_GIT = "https://raw.githubusercontent.com/dart-lang/sdk/master/DEPS"
CHROME_TRUNK = "https://chromium.googlesource.com"
WEBKIT_SHA_PATTERN = r'"WebCore_rev": "(\S+)",'
# Chromium remote (GIT repository)
GIT_REMOTES_CHROMIUM = 'https://chromium.googlesource.com/chromium/src.git'
# location of this file
SOURCE_FILE_DIR = 'tools/dom/scripts'
WEBKIT_SOURCE = 'src/third_party/WebKit/Source'
WEBCORE_SOURCE = 'third_party/WebCore'
WEBKIT_BLINK_SOURCE = 'src/third_party/blink'
WEBCORE_BLINK_SOURCE = 'third_party/WebCore/blink'
# Never automatically git add bindings/IDLExtendedAttributes.txt this file has
# been modified by Dart but is usually changed by WebKit blink too.
IDL_EXTENDED_ATTRIBUTES_FILE = 'IDLExtendedAttributes.txt'
# Don't automatically update, delete or add anything in this directory:
# bindings/dart/scripts
# The scripts in the above directory is the source for our Dart generators that
# is driven from the blink IDL parser AST
DART_SDK_GENERATOR_SCRIPTS = 'bindings/dart/scripts'
# The __init__.py files allow Python to treat directories as packages. Used to
# allow Dart's Python scripts to interact with Chrome's IDL parsing scripts.
PYTHON_INITS = '__init__.py'
# sub directories containing IDLs (core and modules) from the base directory
# src/third_party/WebKit/Source
SUBDIRS = [
'bindings',
'core',
'modules',
]
IDL_EXT = '.idl'
PY_EXT = '.py'
LICENSE_FILE_PREFIX = 'LICENSE' # e.g., LICENSE-APPLE, etc.
# Look in any file in WebCore we copy from WebKit if this comment is in the file
# then flag this as a special .py or .idl file that needs to be looked at.
DART_CHANGES = ' FIXMEDART: '
# application options passed in.
options = None
warning_messages = []
# Is --dry_run passed in.
def isDryRun():
global options
return options['dry_run'] is not None
# Is --verbose passed in.
def isVerbose():
global options
return options['verbose'] is not None
# If --WebKit= is specified then compute the directory of the Chromium
# source.
def chromiumDirectory():
global options
if options['chromium_dir'] is not None:
return os.path.expanduser(options['chromium_dir'])
return os.cwd()
def RunCommand(cmd, valid_exits=[0]):
"""Executes a shell command and return its stdout."""
if isVerbose():
print(' '.join(cmd))
pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = pipe.communicate()
if pipe.returncode in valid_exits:
return output[0]
else:
print(output[1])
print('FAILED. RET_CODE=%d' % pipe.returncode)
sys.exit(pipe.returncode)
# returns True if // FIXMEDART: is in the file.
def anyDartFixMe(filepath):
if os.path.exists(filepath):
data = open(filepath, 'r').read()
return data.find(DART_CHANGES) != -1
else:
return False
# Give a base_dir compute the trailing directory after base_dir
# returns the subpath from base_dir for the path passed in.
def subpath(path, base_dir):
dir_portion = ''
head = path
while True:
head, tail = os.path.split(head)
dir_portion = os.path.join(tail, dir_portion)
if head == base_dir or tail == '':
break
return dir_portion
# Copy any file in source_dir (WebKit) to destination_dir (dart/third_party/WebCore)
# source_dir is the src/third_party/WebKit/Source location (blink)
# destination_dir is the src/dart/third_party/WebCore location
# returns idls_copied, py_copied, other_copied
def copy_files(source_dir, src_prefix, destination_dir):
original_cwd = os.getcwd()
try:
os.makedirs(destination_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
os.chdir(destination_dir)
idls = 0 # *.idl files copied
pys = 0 # *.py files copied
others = 0 # all other files copied
for (root, _, files) in os.walk(source_dir, topdown=False):
dir_portion = subpath(root, source_dir)
for f in files:
# Never automatically add any Dart generator scripts (these are the original
# sources in WebCore) from WebKit to WebCore.
if dir_portion != DART_SDK_GENERATOR_SCRIPTS:
if (f.endswith(IDL_EXT) or f == IDL_EXTENDED_ATTRIBUTES_FILE or
f.endswith(PY_EXT) or
f.startswith(LICENSE_FILE_PREFIX)):
if f.endswith(IDL_EXT):
idls += 1
elif f.endswith(PY_EXT):
pys += 1
else:
others += 1
src_file = os.path.join(root, f)
# Compute the destination path using sdk/third_party/WebCore
subdir_root = src_file[src_file.rfind(src_prefix) +
len(src_prefix):]
if subdir_root.startswith(os.path.sep):
subdir_root = subdir_root[1:]
dst_file = os.path.join(destination_dir, subdir_root)
# Need to make src/third_party/WebKit/Source/* to sdk/third_party/WebCore/*
destination = os.path.dirname(dst_file)
if not os.path.exists(destination):
os.makedirs(destination)
has_Dart_fix_me = anyDartFixMe(dst_file)
if not isDryRun():
copyfile(src_file, dst_file)
if isVerbose():
#print('...copying %s' % os.path.split(dst_file)[1])
print('...copying %s' % dst_file)
if f == IDL_EXTENDED_ATTRIBUTES_FILE:
warning_messages.append(dst_file)
else:
if has_Dart_fix_me:
warning_messages.append(dst_file)
if not (isDryRun() or has_Dart_fix_me):
# git add the file
RunCommand(['git', 'add', dst_file])
os.chdir(original_cwd)
return [idls, pys, others]
# Remove any file in webcore_dir that no longer exist in the webkit_dir
# webcore_dir src/dart/third_party/WebCore location
# webkit_dir src/third_party/WebKit/Source location (blink)
# only check if the subdir off from webcore_dir
# return list of files deleted
def remove_obsolete_webcore_files(webcore_dir, webkit_dir, subdir):
files_to_delete = []
original_cwd = os.getcwd()
if os.path.exists(webcore_dir):
os.chdir(webcore_dir)
for (root, _, files) in os.walk(
os.path.join(webcore_dir, subdir), topdown=False):
dir_portion = subpath(root, webcore_dir)
for f in files:
# Never automatically deleted any Dart generator scripts (these are the
# original sources in WebCore).
if dir_portion != DART_SDK_GENERATOR_SCRIPTS:
check_file = os.path.join(dir_portion, f)
check_file_full_path = os.path.join(webkit_dir, check_file)
if not os.path.exists(check_file_full_path) and \
not(check_file_full_path.endswith(PYTHON_INITS)):
if not isDryRun():
# Remove the file using git
RunCommand(['git', 'rm', check_file])
files_to_delete.append(check_file)
os.chdir(original_cwd)
return files_to_delete
def ParseOptions():
parser = optparse.OptionParser()
parser.add_option(
'--chromium',
'-c',
dest='chromium_dir',
action='store',
type='string',
help='WebKit Chrome directory (e.g., --chromium=~/chrome63',
default=None)
parser.add_option(
'--verbose',
'-v',
dest='verbose',
action='store_true',
help='Dump all information',
default=None)
parser.add_option(
'--dry_run',
'-d',
dest='dry_run',
action='store_true',
help='Display results without adding, updating or deleting any files',
default=None)
args, _ = parser.parse_args()
argOptions = {}
argOptions['chromium_dir'] = args.chromium_dir
argOptions['verbose'] = args.verbose
argOptions['dry_run'] = args.dry_run
return argOptions
# Fetch the DEPS file in src/dart/tools/deps/dartium.deps/DEPS from the GIT repro.
def GetDepsFromGit():
req = requests.get(DEPS_GIT)
return req.text
def ValidateGitRemotes():
#origin https://chromium.googlesource.com/dart/dartium/src.git (fetch)
remotes_list = RunCommand(['git', 'remote', '--verbose']).split()
if (len(remotes_list) > 2 and remotes_list[0] == 'origin' and
remotes_list[1] == GIT_REMOTES_CHROMIUM):
return True
print('ERROR: Unable to find dart/dartium/src repository %s' %
GIT_REMOTES_CHROMIUM)
return False
def getChromiumSHA():
cwd = os.getcwd()
chromiumDir = chromiumDirectory()
webkit_dir = os.path.join(chromiumDir, WEBKIT_SOURCE)
os.chdir(webkit_dir)
if ValidateGitRemotes():
chromium_sha = RunCommand(['git', 'log', '--format=format:%H', '-1'])
else:
chromium_sha = -1
os.chdir(cwd)
return chromium_sha
def getCurrentDartSHA():
cwd = os.getcwd()
if cwd.endswith('dart'):
# In src/dart
src_dir, _ = os.path.split(cwd)
elif cwd.endswith('sdk'):
src_dir = cwd
else:
src_dir = os.path.join(cwd, 'sdk')
os.chdir(src_dir)
if ValidateGitRemotes():
dart_sha = RunCommand(['git', 'log', '--format=format:%H', '-1'])
else:
dart_sha = -1
os.chdir(cwd)
return dart_sha
# Returns the SHA of the Dartium/Chromium in the DEPS file.
def GetDEPSWebCoreGitRevision(deps, component):
"""Returns a tuple with the (dartium chromium repo, latest revision)."""
foundIt = re.search(WEBKIT_SHA_PATTERN, deps)
#url_base, url_pattern = DEPS_PATTERNS[component]
#url = url_base + re.search(url_pattern, deps).group(1)
# Get the SHA for the Chromium/WebKit changes for Dartium.
#revision = url[len(url_base):]
revision = foundIt.group(1)[1:]
print('%s' % revision)
return revision
def copy_subdir(src, src_prefix, dest, subdir):
idls_deleted = remove_obsolete_webcore_files(dest, src, subdir)
print("%s files removed in WebCore %s" % (idls_deleted.__len__(), subdir))
if isVerbose():
for delete_file in idls_deleted:
print(" %s" % delete_file)
idls_copied, py_copied, other_copied = copy_files(
os.path.join(src, subdir), src_prefix, dest)
if idls_copied > 0:
print("Copied %s IDLs to %s" % (idls_copied, subdir))
if py_copied > 0:
print("Copied %s PYs to %s" % (py_copied, subdir))
if other_copied > 0:
print("Copied %s other to %s\n" % (other_copied, subdir))
def main():
global options
options = ParseOptions()
current_dir = os.path.dirname(os.path.abspath(__file__))
if not current_dir.endswith(SOURCE_FILE_DIR):
print('ERROR: idlsync.py not run in proper directory (%s)\n',
current_dir)
base_directory = current_dir[:current_dir.rfind(SOURCE_FILE_DIR)]
# Validate DEPS WebCore_rev SHA DOES NOT match the SHA of chromium master.
deps = GetDepsFromGit()
webcore_revision = GetDEPSWebCoreGitRevision(deps, 'webkit')
chromium_sha = getChromiumSHA()
if webcore_revision == chromium_sha:
print("ERROR: Nothing to update in WebCore, WebCore_rev SHA in DEPS "
"matches Chromium GIT master SHA in %s" % options['webkit_dir'])
return
start_time = time.time()
# Copy scripts from third_party/blink/tools to third_party/WebCore/blink/tools
#
# This also implies that the files:
# WebCore/bindings/scripts/code_generator_web_agent_api.py
# WebCore/bindings/scripts/utilities.py
#
# Need to have sys.path.append at beginning of the above files changed from:
#
# sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..', '..', '..', '..',
# 'third_party', 'blink', 'tools'))
# to
#
# sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
# 'blink', 'tools'))
#
webkit_blink_dir = os.path.join(chromiumDirectory(), WEBKIT_BLINK_SOURCE)
webcore_blink_dir = os.path.join(base_directory, WEBCORE_BLINK_SOURCE)
copy_subdir(webkit_blink_dir, WEBKIT_BLINK_SOURCE, webcore_blink_dir, "")
chromium_webkit_dir = os.path.join(chromiumDirectory(), WEBKIT_SOURCE)
dart_webcore_dir = os.path.join(base_directory, WEBCORE_SOURCE)
for subdir in SUBDIRS:
copy_subdir(chromium_webkit_dir, WEBKIT_SOURCE, dart_webcore_dir,
subdir)
end_time = time.time()
print(
'WARNING: File(s) contain FIXMEDART and are NOT "git add " please review:'
)
for warning in warning_messages:
print(' %s' % warning)
print('\nDone idlsync completed in %s seconds' %
round(end_time - start_time, 2))
if __name__ == '__main__':
sys.exit(main())
|
from collections.abc import Callable, Iterator, Mapping
from typing import Generic, TypeVar
from _typeshed import Incomplete, Self
_T = TypeVar("_T")
_U = TypeVar("_U")
_V = TypeVar("_V")
class AtlasView(Mapping[_T, dict[_U, _V]], Generic[_T, _U, _V]):
def __init__(self, d: Mapping[_T, dict[_U, _V]]) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __getitem__(self, key: _T) -> dict[_U, _V]: ...
def copy(self: Self) -> Self: ...
class AdjacencyView(AtlasView[_T, _U, _V], Generic[_T, _U, _V]): ...
class MultiAdjacencyView(AdjacencyView[_T, _U, _V], Generic[_T, _U, _V]): ...
class UnionAtlas(Mapping[_T, dict[_U, _V]], Generic[_T, _U, _V]):
def __init__(
self, succ: AtlasView[_T, _U, _V], pred: AtlasView[_T, _U, _V]
) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __getitem__(self, key: _T) -> dict[_U, _V]: ...
def copy(self: Self) -> Self: ...
class UnionAdjacency(Mapping[_T, dict[_U, _V]], Generic[_T, _U, _V]):
def __init__(
self, succ: AdjacencyView[_T, _U, _V], pred: AdjacencyView[_T, _U, _V]
) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __getitem__(self, key: _T) -> dict[_U, _V]: ...
def copy(self: Self) -> Self: ...
class UnionMultiInner(UnionAtlas[_T, _U, _V], Generic[_T, _U, _V]): ...
class UnionMultiAdjacency(UnionAdjacency[_T, _U, _V], Generic[_T, _U, _V]): ...
class FilterAtlas(Mapping[_T, _U], Generic[_T, _U]):
NODE_OK: Callable[[_T], bool]
def __init__(self, d: Mapping[_T, _U], NODE_OK: Callable[[_T], bool]) -> None: ...
def __len__(self) -> int: ...
def __iter__(self) -> Iterator[_T]: ...
def __getitem__(self, key: _T) -> _U: ...
def copy(self: Self) -> Self: ...
class FilterAdjacency(Mapping[_T, Mapping[_U, _V]], Generic[_T, _U, _V]):
NODE_OK: Callable[[_T], bool]
EDGE_OK: Callable[[_T, _T], bool]
def __init__(
self,
d: Mapping[_T, Mapping[_U, _V]],
NODE_OK: Callable[[_T], bool],
EDGE_OK: Callable[[_T, _T], bool],
) -> None: ...
def __len__(self) -> Incomplete: ...
def __iter__(self) -> Incomplete: ...
def __getitem__(self, node: _T) -> FilterAtlas[_U, _V]: ...
def copy(self: Self) -> Self: ...
class FilterMultiInner(FilterAdjacency[_T, _U, _V], Generic[_T, _U, _V]): ...
class FilterMultiAdjacency(FilterAdjacency[_T, _U, _V], Generic[_T, _U, _V]): ...
|
import sdl2.ext
from tetris.configuration.Configuration import *
from tetris.configuration.Colors import *
class MenuState(object):
def __init__(self):
self.selected_index = None
self.buttons = []
def add_button(self, button):
self.buttons.append(button)
if self.selected_index is None:
self.selected_index = 0
def next_button(self):
self.selected_index = (((self.selected_index + 1) % len(self.buttons)) + len(self.buttons)) % len(self.buttons)
def previous_button(self):
self.selected_index = (((self.selected_index - 1) % len(self.buttons)) + len(self.buttons)) % len(self.buttons)
class LabelState(object):
def __init__(self, display_text, text_size, text_color):
self.display_text = display_text
self.text_size = text_size
self.text_color = text_color
class ButtonState(object):
def __init__(self, display_text, text_size, unselected_color, selected_color, next_state_class):
self.display_text = display_text
self.text_size = text_size
self.unselected_color = unselected_color
self.selected_color = selected_color
self.next_state_class = next_state_class
class MenuLabel(sdl2.ext.Entity):
def __init__(self, world, label_state, pos=(0, 0)):
self.labelstate = label_state
factory = sdl2.ext.SpriteFactory(sdl2.ext.SOFTWARE)
font_manager = sdl2.ext.FontManager(font_path=font_path(), size=label_state.text_size, color=label_state.text_color)
self.sprite = factory.from_text(label_state.display_text, fontmanager=font_manager)
self.sprite.position = pos
class MenuButton(sdl2.ext.Entity):
def __init__(self, world, button_state, pos=(0,0)):
self.buttonstate = button_state
factory = sdl2.ext.SpriteFactory(sdl2.ext.SOFTWARE)
font_manager = sdl2.ext.FontManager(font_path=font_path(), size=button_state.text_size, color=button_state.unselected_color)
self.sprite = factory.from_text(button_state.display_text, fontmanager=font_manager)
self.sprite.position = pos
class TitleSprite(MenuLabel):
def __init__(self, world):
super(TitleSprite, self).__init__(world, LabelState("Pytris", 30, WHITE))
self.sprite.position = (window_width()/2 - self.sprite.area[2]/2, 30)
|
# handle import here
import sys
sys.path.append('../../')
'''
SmsInput
'''
from Clean_Dataset.Utils.csv_util import csv_util
import pandas as pd
import numpy as np
def percentage(data):
'''data is a DataFrame'''
array = np.array(data)
call_count = 0
for row in array:
for value in row[3:]:
if not str(value) == "nan":
call_count += 1
break
length = len(array)
if length == 0:
return 0.0
return call_count / length
sms = csv_util('../../StudentLife_Dataset/Inputs/sms/')
sms.readAll()
sms.process(percentage)
sms.writeToCsv(['uid', 'percentage'], 'sms.csv')
|
#
# This file is part of LUNA.
#
# Copyright (c) 2020 Great Scott Gadgets <info@greatscottgadgets.com>
# Copyright (c) 2020 Florent Kermarrec <florent@enjoy-digital.fr>
#
# Code based on ``usb3_pipe``.
# SPDX-License-Identifier: BSD-3-Clause
""" Scrambling and descrambling for USB3. """
import unittest
import operator
import functools
from amaranth import *
from .coding import COM, stream_word_matches_symbol
from ...stream import USBRawSuperSpeedStream
from ....test.utils import LunaSSGatewareTestCase, ss_domain_test_case
#
# Scrambling modules.
# See [USB3.2r1: Appendix B].
#
class ScramblerLFSR(Elaboratable):
""" Scrambler LFSR.
Linear feedback shift register used for USB3 scrambling.
Polynomial: X^16 + X^5 + X^4 + X^3 + 1
See [USB3.2: Appendix B]
Attributes
----------
clear: Signal(), input
Strobe; when high, resets the LFSR to its initial value.
advance: Signal(), input
Strobe; when high, the LFSR advances on each clock cycle.
value: Signal(32), output
The current value of the LFSR.
Parameters
----------
initial_value: 32-bit int, optional
The initial value for the LFSR. Optional; defaults to all 1's, per the USB3 spec.
"""
def __init__(self, initial_value=0xffff):
self._initial_value = initial_value
#
# I/O port
#
self.clear = Signal()
self.advance = Signal()
self.value = Signal(32)
def elaborate(self, platform):
m = Module()
next_value = Signal(16)
current_value = Signal(16, reset=self._initial_value)
def xor_bits(*indices):
bits = (current_value[i] for i in indices)
return functools.reduce(operator.__xor__, bits)
# Compute the next value in our internal LFSR state...
m.d.comb += next_value.eq(Cat(
xor_bits(0, 6, 8, 10), # 0
xor_bits(1, 7, 9, 11), # 1
xor_bits(2, 8, 10, 12), # 2
xor_bits(3, 6, 8, 9, 10, 11, 13), # 3
xor_bits(4, 6, 7, 8, 9, 11, 12, 14), # 4
xor_bits(5, 6, 7, 9, 12, 13, 15), # 5
xor_bits(0, 6, 7, 8, 10, 13, 14), # 6
xor_bits(1, 7, 8, 9, 11, 14, 15), # 7
xor_bits(0, 2, 8, 9, 10, 12, 15), # 8
xor_bits(1, 3, 9, 10, 11, 13), # 9
xor_bits(0, 2, 4, 10, 11, 12, 14), # 10
xor_bits(1, 3, 5, 11, 12, 13, 15), # 11
xor_bits(2, 4, 6, 12, 13, 14), # 12
xor_bits(3, 5, 7, 13, 14, 15), # 13
xor_bits(4, 6, 8, 14, 15), # 14
xor_bits(5, 7, 9, 15) # 15
))
# Compute the LFSR's current output.
m.d.comb += self.value.eq(Cat(
current_value[15],
current_value[14],
current_value[13],
current_value[12],
current_value[11],
current_value[10],
current_value[9],
current_value[8],
current_value[7],
current_value[6],
current_value[5],
xor_bits(4, 15),
xor_bits(3, 14, 15),
xor_bits(2, 13, 14, 15),
xor_bits(1, 12, 13, 14),
xor_bits(0, 11, 12, 13),
xor_bits(10, 11, 12, 15),
xor_bits(9, 10, 11, 14),
xor_bits(8, 9 , 10, 13),
xor_bits(7, 8 , 9, 12),
xor_bits(6, 7 , 8, 11),
xor_bits(5, 6 , 7, 10),
xor_bits(4, 5 , 6, 9, 15),
xor_bits(3, 4 , 5, 8, 14),
xor_bits(2, 3 , 4, 7, 13, 15),
xor_bits(1, 2 , 3, 6, 12, 14),
xor_bits(0, 1 , 2, 5, 11, 13, 15),
xor_bits(0, 1 , 4, 10, 12, 14),
xor_bits(0, 3 , 9, 11, 13),
xor_bits(2, 8 , 10, 12),
xor_bits(1, 7 , 9, 11),
xor_bits(0, 6 , 8, 10)
))
# If we have a reset, clear our LFSR.
with m.If(self.clear):
m.d.ss += current_value.eq(self._initial_value)
# Otherwise, advance when desired.
with m.Elif(self.advance):
m.d.ss += current_value.eq(next_value)
return m
class ScramblerLFSRTest(LunaSSGatewareTestCase):
FRAGMENT_UNDER_TEST = ScramblerLFSR
@ss_domain_test_case
def test_lfsr_stream(self):
# From the table of 8-bit encoded values, [USB3.2, Appendix B.1].
# We can continue this as long as we want to get more thorough testing,
# but for now, this is probably enough.
scrambled_sequence = [
0x14c017ff, 0x8202e7b2, 0xa6286e72, 0x8dbf6dbe, # Row 1 (0x00)
0xe6a740be, 0xb2e2d32c, 0x2a770207, 0xe0be34cd, # Row 2 (0x10)
0xb1245da7, 0x22bda19b, 0xd31d45d4, 0xee76ead7 # Row 3 (0x20)
]
yield self.dut.advance.eq(1)
yield
# Check that our LFSR produces each of our values in order.
for index, value in enumerate(scrambled_sequence):
self.assertEqual((yield self.dut.value), value, f"incorrect value at cycle {index}")
yield
class Scrambler(Elaboratable):
""" USB3-compliant data scrambler.
Scrambles the transmitted data stream to reduce EMI.
Attributes
----------
clear: Signal(), input
Strobe; when high, resets the scrambler to the start of its sequence.
enable: Signal(), input
When high, data scrambling is enabled. When low, data is passed through without scrambling.
sink: USBRawSuperSpeedStream(), input stream
The stream containing data to be scrambled.
sink: USBRawSuperSpeedStream(), output stream
The stream containing data the scrambled output.
Parameters
----------
initial_value: 32-bit int, optional
The initial value for the LFSR. Optional.
"""
def __init__(self, initial_value=0x7dbd):
self._initial_value = initial_value
#
# I/O port
#
self.clear = Signal()
self.enable = Signal()
self.hold = Signal()
self.sink = USBRawSuperSpeedStream()
self.source = USBRawSuperSpeedStream()
# Debug signaling.
self.lfsr_state = Signal.like(self.source.data)
def elaborate(self, platform):
m = Module()
sink = self.sink
source = self.source
# Detect when we're sending a comma; which should reset our scrambling LFSR.
comma_present = stream_word_matches_symbol(sink, 0, symbol=COM)
# Create our inner LFSR, which should advance whenever our input streams do.
m.submodules.lfsr = lfsr = ScramblerLFSR(initial_value=self._initial_value)
m.d.comb += [
lfsr.clear .eq(self.clear | comma_present),
lfsr.advance .eq(sink.valid & source.ready & ~self.hold)
]
# Pass through non-scrambled signals directly.
m.d.comb += [
source.ctrl .eq(sink.ctrl),
source.valid .eq(sink.valid),
sink.ready .eq(source.ready)
]
# If we have any non-control words, scramble them by overriding our data assignment above
# with the relevant data word XOR'd with our LFSR value. Note that control words are -never-
# scrambled, per [USB3.2: Appendix B]
for i in range(4):
is_data_code = ~sink.ctrl[i]
lfsr_word = lfsr.value.word_select(i, 8)
with m.If(self.enable & is_data_code):
m.d.comb += source.data.word_select(i, 8).eq(sink.data.word_select(i, 8) ^ lfsr_word)
with m.Else():
m.d.comb += source.data.word_select(i, 8).eq(sink.data.word_select(i, 8))
# Connect up our debug outputs.
m.d.comb += [
self.lfsr_state.eq(lfsr.value)
]
return m
class Descrambler(Scrambler):
""" USB3-compliant data descrambler.
This module descrambles the received data stream. K-codes are not affected.
This module automatically resets itself whenever a COM alignment character is seen.
Attributes
----------
enable: Signal(), input
When high, data scrambling is enabled. When low, data is passed through without scrambling.
sink: USBRawSuperSpeedStream(), input stream
The stream containing data to be descrambled.
source: USBRawSuperSpeedStream(), output stream
The stream containing data the descrambled output.
Parameters
----------
initial_value: 32-bit int, optional
The initial value for the LFSR. Optional.
"""
def __init__(self, initial_value=0xffff):
self._initial_value = initial_value
super().__init__(initial_value=initial_value)
if __name__ == "__main__":
unittest.main()
|
'''
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
'''
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTClient
import logging
import time
import argparse
import json
import datetime
AllowedActions = ['both', 'publish', 'subscribe']
PATH_TO_HOME='/home/pi/iot-temp/ping_service'
class PingService():
def __init__(self):
# Read in command-line parameters
parser = argparse.ArgumentParser()
parser.add_argument("-e", "--endpoint", action="store", default="a3cezb6rg1vyed-ats.iot.us-west-2.amazonaws.com", dest="host", help="Your AWS IoT custom endpoint")
parser.add_argument("-r", "--rootCA", action="store", default="{}/root-CA.crt".format(PATH_TO_HOME), dest="rootCAPath", help="Root CA file path")
parser.add_argument("-c", "--cert", action="store", default="{}/PL-student.cert.pem".format(PATH_TO_HOME), dest="certificatePath", help="Certificate file path")
parser.add_argument("-k", "--key", action="store", default="{}/PL-student.private.key".format(PATH_TO_HOME), dest="privateKeyPath", help="Private key file path")
parser.add_argument("-p", "--port", action="store", dest="port", type=int, help="Port number override")
parser.add_argument("-w", "--websocket", action="store_true", dest="useWebsocket", default=False,
help="Use MQTT over WebSocket")
parser.add_argument("-id", "--clientId", action="store", dest="clientId", default="pl19-99",
help="Targeted client id")
parser.add_argument("-t", "--topic", action="store", dest="topic", default="pl19/event", help="Event topic")
parser.add_argument("-m", "--mode", action="store", dest="mode", default="both",
help="Operation modes: %s"%str(AllowedActions))
parser.add_argument("-M", "--message", action="store", dest="message", default="Hello World!",
help="Message to publish")
args = parser.parse_args()
self.host = args.host
self.rootCAPath = args.rootCAPath
self.certificatePath = args.certificatePath
self.privateKeyPath = args.privateKeyPath
self.port = args.port
self.useWebsocket = args.useWebsocket
self.clientId = args.clientId
self.topic = args.topic
if args.mode not in AllowedActions:
parser.error("Unknown --mode option %s. Must be one of %s" % (args.mode, str(AllowedActions)))
exit(2)
if args.useWebsocket and args.certificatePath and args.privateKeyPath:
parser.error("X.509 cert authentication and WebSocket are mutual exclusive. Please pick one.")
exit(2)
if not args.useWebsocket and (not args.certificatePath or not args.privateKeyPath):
parser.error("Missing credentials for authentication.")
exit(2)
# Port defaults
if args.useWebsocket and not args.port: # When no port override for WebSocket, default to 443
self.port = 443
if not args.useWebsocket and not args.port: # When no port override for non-WebSocket, default to 8883
self.port = 8883
# Configure logging
streamHandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
# Init AWSIoTMQTTClient
self.mqtt_client = None
self.mqtt_client = AWSIoTMQTTClient(self.clientId)
self.mqtt_client.configureEndpoint(self.host, self.port)
self.mqtt_client.configureCredentials(self.rootCAPath, self.privateKeyPath, self.certificatePath)
# AWSIoTMQTTClient connection configuration
self.mqtt_client.configureAutoReconnectBackoffTime(1, 32, 20)
self.mqtt_client.configureOfflinePublishQueueing(-1) # Infinite offline Publish queueing
self.mqtt_client.configureDrainingFrequency(2) # Draining: 2 Hz
self.mqtt_client.configureConnectDisconnectTimeout(10) # 10 sec
self.mqtt_client.configureMQTTOperationTimeout(5) # 5 sec
def start(self):
self.mqtt_client.connect()
self.mqtt_client.subscribe("pl19/notification", 1, self.customCallback)
time.sleep(2)
while True:
time.sleep(5)
def replyToPing(self,sequence):
pingData = {}
pingData['sequence'] = sequence
pingData['message'] = "Ping response."
message = {}
message['device_mac'] = "b8:27:eb:f1:96:c4"
message['timestamp'] = str(datetime.datetime.now())
message['event_id'] = 1
message['event'] = pingData
messageJson = json.dumps(message)
self.mqtt_client.publishAsync("pl19/event", messageJson, 1)
print('Published topic %s: %s\n' % (self.topic, messageJson))
# Custom MQTT message callback
def customCallback(self,client, userdata, message):
print("Received a new message: ")
messageContent = json.loads(message.payload.decode('utf-8'))
messageData = messageContent['event']
print(messageContent)
print(messageData['message'])
print("Sequence ", messageData['sequence'])
print("from topic: ")
print(message.topic)
print("--------------\n\n")
if messageContent['event_id'] == 0:
self.replyToPing(messageData['sequence']);
def run():
PS = PingService()
PS.start()
if __name__ == '__main__':
run()
|
# -*- coding: utf-8 -*-
from typing import List
class Solution:
def twoCitySchedCost(self, costs: List[List[int]]) -> int:
differences, result = [], 0
for a_cost, b_cost in costs:
differences.append(a_cost - b_cost)
result += a_cost
differences.sort(reverse=True)
return result - sum(differences[: len(costs) // 2])
if __name__ == "__main__":
solution = Solution()
assert 110 == solution.twoCitySchedCost([[10, 20], [30, 200], [400, 50], [30, 20]])
|
import unittest
import numpy.testing as testing
import numpy as np
import hpgeom as hpg
import healsparse
class CoverageMapTestCase(unittest.TestCase):
def test_coverage_map_float(self):
"""
Test coverage_map functionality for floats
"""
nside_coverage = 16
nside_map = 512
# Number of non-masked pixels in the coverage map resolution
non_masked_px = 10.5
nfine = (nside_map//nside_coverage)**2
full_map = np.zeros(hpg.nside_to_npixel(nside_map)) + hpg.UNSEEN
full_map[0: int(non_masked_px*nfine)] = 1 + np.random.random(size=int(non_masked_px*nfine))
# Generate sparse map
sparse_map = healsparse.HealSparseMap(healpix_map=full_map, nside_coverage=nside_coverage)
# Build the "original" coverage map
cov_map_orig = self.compute_cov_map(nside_coverage, non_masked_px, nfine,
sparse_map._cov_map.bit_shift)
# Get the built coverage map
cov_map = sparse_map.coverage_map
# Test the coverage map generation and lookup
testing.assert_array_almost_equal(cov_map_orig, cov_map)
def test_coverage_map_int(self):
"""
Test coverage_map functionality for ints
"""
nside_coverage = 16
nside_map = 512
# Number of non-masked pixels in the coverage map resolution
non_masked_px = 10.5
nfine = (nside_map//nside_coverage)**2
sentinel = healsparse.utils.check_sentinel(np.int32, None)
full_map = np.zeros(hpg.nside_to_npixel(nside_map), dtype=np.int32) + sentinel
full_map[0: int(non_masked_px*nfine)] = 1
sparse_map = healsparse.HealSparseMap(healpix_map=full_map,
nside_coverage=nside_coverage,
sentinel=sentinel)
cov_map_orig = self.compute_cov_map(nside_coverage, non_masked_px, nfine,
sparse_map._cov_map.bit_shift)
cov_map = sparse_map.coverage_map
testing.assert_array_almost_equal(cov_map_orig, cov_map)
def test_coverage_map_recarray(self):
"""
Test coverage_map functionality for a recarray
"""
nside_coverage = 16
nside_map = 512
# Number of non-masked pixels in the coverage map resolution
non_masked_px = 10.5
nfine = (nside_map//nside_coverage)**2
dtype = [('a', np.float64),
('b', np.int32)]
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map,
dtype, primary='a')
sparse_map.update_values_pix(np.arange(int(non_masked_px*nfine)),
np.ones(1, dtype=dtype))
cov_map_orig = self.compute_cov_map(nside_coverage, non_masked_px, nfine,
sparse_map._cov_map.bit_shift)
cov_map = sparse_map.coverage_map
testing.assert_array_almost_equal(cov_map_orig, cov_map)
def test_coverage_map_widemask(self):
"""
Test coverage_map functionality for wide masks
"""
nside_coverage = 16
nside_map = 512
# Number of non-masked pixels in the coverage map resolution
non_masked_px = 10.5
nfine = (nside_map//nside_coverage)**2
# Do a 1-byte wide
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map,
healsparse.WIDE_MASK,
wide_mask_maxbits=2)
# Set bits in different columns
sparse_map.set_bits_pix(np.arange(int(non_masked_px*nfine)), [1])
cov_map_orig = self.compute_cov_map(nside_coverage, non_masked_px, nfine,
sparse_map._cov_map.bit_shift)
cov_map = sparse_map.coverage_map
testing.assert_array_almost_equal(cov_map_orig, cov_map)
# Do a 3-byte wide
sparse_map = healsparse.HealSparseMap.make_empty(nside_coverage, nside_map,
healsparse.WIDE_MASK,
wide_mask_maxbits=24)
# Set bits in different columns
sparse_map.set_bits_pix(np.arange(int(2*nfine)), [2])
sparse_map.set_bits_pix(np.arange(int(non_masked_px*nfine)), [20])
cov_map_orig = self.compute_cov_map(nside_coverage, non_masked_px, nfine,
sparse_map._cov_map.bit_shift)
cov_map = sparse_map.coverage_map
testing.assert_array_almost_equal(cov_map_orig, cov_map)
def compute_cov_map(self, nside_coverage, non_masked_px, nfine, bit_shift):
cov_map_orig = np.zeros(hpg.nside_to_npixel(nside_coverage), dtype=np.float64)
idx_cov = np.right_shift(np.arange(int(non_masked_px*nfine)), bit_shift)
unique_idx_cov = np.unique(idx_cov)
idx_counts = np.bincount(idx_cov, minlength=hpg.nside_to_npixel(nside_coverage)).astype(np.float64)
cov_map_orig[unique_idx_cov] = idx_counts[unique_idx_cov]/nfine
return cov_map_orig
def test_large_coverage_map_warning(self):
"""
Test coverage_map raises warning for large
values of nside_coverage
"""
nside_coverage = 256
nside_map = 512
# Generate sparse map and check that it rasises a warning
testing.assert_warns(ResourceWarning, healsparse.HealSparseMap.make_empty, nside_sparse=nside_map,
nside_coverage=nside_coverage, dtype=np.float32)
if __name__ == '__main__':
unittest.main()
|
"""Device Records Classes."""
from fmcapi.api_objects.apiclasstemplate import APIClassTemplate
from fmcapi.api_objects.policy_services.accesspolicies import AccessPolicies
from fmcapi.api_objects.status_services import TaskStatuses
import time
import logging
import warnings
class DeviceRecords(APIClassTemplate):
"""The DeviceRecords Object in the FMC."""
VALID_JSON_DATA = [
"id",
"name",
"type",
"hostName",
"natID",
"regKey",
"license_caps",
"accessPolicy",
]
VALID_FOR_KWARGS = VALID_JSON_DATA + [
"acp_name",
"acp_id",
"model",
"modelId",
"modelNumber",
"modelType",
"healthStatus",
"healthPolicy",
"type",
"version",
"sw_version",
"deviceGroup",
"prohibitPacketTransfer",
"keepLocalEvents",
"ftdMode",
"keepLocalEvents",
]
URL_SUFFIX = "/devices/devicerecords"
REQUIRED_FOR_POST = ["accessPolicy", "hostName", "regKey"]
REQUIRED_FOR_PUT = ["id"]
LICENSES = ["BASE", "MALWARE", "URLFilter", "THREAT", "VPN", "URL"]
def __init__(self, fmc, **kwargs):
"""
Initialize DeviceRecords object.
:param fmc (object): FMC object
:param **kwargs: Any other values passed during instantiation.
:return: None
"""
super().__init__(fmc, **kwargs)
logging.debug("In __init__() for DeviceRecords class.")
self.parse_kwargs(**kwargs)
def parse_kwargs(self, **kwargs):
"""
Parse the kwargs and set self variables to match.
:return: None
"""
super().parse_kwargs(**kwargs)
logging.debug("In parse_kwargs() for DeviceRecords class.")
if "acp_name" in kwargs:
self.acp(name=kwargs["acp_name"])
def licensing(self, action, name="BASE"):
"""
Associate licenses with this device record.
:param action: (str) 'add', 'remove', 'clear'
:param name: (str) Value from LICENSES constant.
:return: None
"""
logging.debug("In licensing() for DeviceRecords class.")
if action == "add":
if name in self.LICENSES:
if "license_caps" in self.__dict__:
self.license_caps.append(name)
self.license_caps = list(set(self.license_caps))
else:
self.license_caps = [name]
logging.info(f'License "{name}" added to this DeviceRecords object.')
else:
logging.warning(
f"{name} not found in {self.LICENSES}. Cannot add license to DeviceRecords."
)
elif action == "remove":
if name in self.LICENSES:
if "license_caps" in self.__dict__:
try:
self.license_caps.remove(name)
except ValueError:
logging.warning(
f"{name} is not assigned to this devicerecord thus cannot be removed."
)
logging.info(
f'License "{name}" removed from this DeviceRecords object.'
)
else:
logging.warning(
f"{name} is not assigned to this devicerecord thus cannot be removed."
)
else:
logging.warning(
f"{name} not found in {self.LICENSES}. Cannot remove license from DeviceRecords."
)
elif action == "clear":
if "license_caps" in self.__dict__:
del self.license_caps
logging.info("All licensing removed from this DeviceRecords object.")
def acp(self, name=""):
"""
Associate AccessPolicy with this device.
:param name: (str) Name of ACP.
:return: None
"""
logging.debug("In acp() for DeviceRecords class.")
acp = AccessPolicies(fmc=self.fmc)
acp.get(name=name)
if "id" in acp.__dict__:
self.accessPolicy = {"id": acp.id, "type": acp.type}
else:
logging.warning(
f"Access Control Policy {name} not found. Cannot set up accessPolicy for DeviceRecords."
)
def wait_for_task(self, task, wait_time=10):
"""
Pause configuration script and wait for device registration to complete.
:param task: (dict) task["id": (str)]
:param wait_time: (int) Seconds to wait before rechecking.
:return: None
"""
task_completed_states = ["Success", "SUCCESS", "COMPLETED"]
try:
status = TaskStatuses(fmc=self.fmc, id=task["id"])
current_status = status.get()
"""
Task Status for new device registration behaves differently than other tasks
On new device registration, a task is sent for the initial registration. After completion
the UUID is deleted without any change in task status. So we check to see if the object no longer exists
to assume the registration is complete. After registration, discovery of the device begins, but there is
no way to check for this with a task status. The device can't be modified during this time, but a new
device registration can begin.
OTOH, a device HA operation will update its status to "Success" on completion. Hence the two different
checks.
"""
while (
current_status["status"] is not None
and current_status["status"] not in task_completed_states
):
# Lot of inconsistencies with the type of data a task can return
if "taskType" in current_status.keys():
logging.info(
f"Task: {current_status['taskType']} {current_status['status']} {current_status['id']}"
)
time.sleep(wait_time)
current_status = status.get()
else:
logging.info(
f"Task: {current_status['status']} {current_status['id']}"
)
time.sleep(wait_time)
current_status = status.get()
logging.info(f"Task: {current_status['status']} {current_status['id']}")
except Exception as e:
logging.info(type(e), e)
def post(self, **kwargs):
"""POST to FMC API."""
logging.debug("In post() for DeviceRecords class.")
response = super().post(**kwargs)
# self.wait_for_task(task=response["metadata"]["task"], wait_time=30) # Doesn't work yet.
if "post_wait_time" in kwargs:
self.post_wait_time = kwargs["post_wait_time"]
else:
self.post_wait_time = 300
logging.info(
f"DeviceRecords registration task submitted. "
f"Waiting {self.post_wait_time} seconds for it to complete."
)
time.sleep(self.post_wait_time)
return response
class Device(DeviceRecords):
"""
Dispose of this Class after 20210101.
Use DeviceRecords() instead.
"""
def __init__(self, fmc, **kwargs):
warnings.resetwarnings()
warnings.warn("Deprecated: Device() should be called via DeviceRecords().")
super().__init__(fmc, **kwargs)
|
"""
Compute LSH hash codes based on the provided functor on all or specific
descriptors from the configured index given a file-list of UUIDs.
When using an input file-list of UUIDs, we require that the UUIDs of
indexed descriptors be strings, or equality comparable to the UUIDs' string
representation.
We update a key-value store with the results of descriptor hash computation. We
assume the keys of the store are the integer hash values and the values of the
store are ``frozenset`` instances of descriptor UUIDs (hashable-type objects).
We also assume that no other source is concurrently modifying this key-value
store due to the need to modify the values of keys.
"""
import logging
import os
from smqtk.algorithms import (
get_lsh_functor_impls,
)
from smqtk.compute_functions import compute_hash_codes
from smqtk.representation import (
get_descriptor_index_impls,
get_key_value_store_impls,
)
from smqtk.utils import (
bin_utils,
plugin,
)
try:
from six.moves import cPickle as pickle
except ImportError:
import pickle
def uuids_for_processing(uuids, hash2uuids):
"""
Determine descriptor UUIDs that need processing based on what's already in
the given ``hash2uuids`` mapping, returning UUIDs that need processing.
:param uuids: Iterable of descriptor UUIDs.
:type uuids:
:param hash2uuids: Existing mapping of computed hash codes to the UUIDs
of descriptors that generated the hash.
:type hash2uuids: smqtk.representation.KeyValueStore
:return: Iterator over UUIDs to process
:rtype: __generator[collections.Hashable]
"""
log = logging.getLogger(__name__)
already_there = frozenset(v for vs in hash2uuids.values() for v in vs)
skipped = 0
for uuid in uuids:
if uuid not in already_there:
yield uuid
else:
skipped += 1
log.debug("Skipped %d UUIDs already represented in previous hash table",
skipped)
def default_config():
return {
"utility": {
"report_interval": 1.0,
"use_multiprocessing": False,
},
"plugins": {
"descriptor_index":
plugin.make_config(get_descriptor_index_impls()),
"lsh_functor": plugin.make_config(get_lsh_functor_impls()),
"hash2uuid_kvstore":
plugin.make_config(get_key_value_store_impls()),
},
}
def cli_parser():
parser = bin_utils.basic_cli_parser(__doc__)
g_io = parser.add_argument_group("I/O")
g_io.add_argument("--uuids-list",
default=None, metavar="PATH",
help='Optional path to a file listing UUIDs of '
'descriptors to computed hash codes for. If '
'not provided we compute hash codes for all '
'descriptors in the configured descriptor index.')
return parser
def main():
args = cli_parser().parse_args()
config = bin_utils.utility_main_helper(default_config, args)
log = logging.getLogger(__name__)
#
# Load configuration contents
#
uuid_list_filepath = args.uuids_list
report_interval = config['utility']['report_interval']
use_multiprocessing = config['utility']['use_multiprocessing']
#
# Checking input parameters
#
if (uuid_list_filepath is not None) and \
not os.path.isfile(uuid_list_filepath):
raise ValueError("UUIDs list file does not exist!")
#
# Loading stuff
#
log.info("Loading descriptor index")
#: :type: smqtk.representation.DescriptorIndex
descriptor_index = plugin.from_plugin_config(
config['plugins']['descriptor_index'],
get_descriptor_index_impls()
)
log.info("Loading LSH functor")
#: :type: smqtk.algorithms.LshFunctor
lsh_functor = plugin.from_plugin_config(
config['plugins']['lsh_functor'],
get_lsh_functor_impls()
)
log.info("Loading Key/Value store")
#: :type: smqtk.representation.KeyValueStore
hash2uuids_kvstore = plugin.from_plugin_config(
config['plugins']['hash2uuid_kvstore'],
get_key_value_store_impls()
)
# Iterate either over what's in the file given, or everything in the
# configured index.
def iter_uuids():
if uuid_list_filepath:
log.info("Using UUIDs list file")
with open(uuid_list_filepath) as f:
for l in f:
yield l.strip()
else:
log.info("Using all UUIDs resent in descriptor index")
for k in descriptor_index.keys():
yield k
#
# Compute codes
#
log.info("Starting hash code computation")
kv_update = {}
for uuid, hash_int in \
compute_hash_codes(uuids_for_processing(iter_uuids(),
hash2uuids_kvstore),
descriptor_index, lsh_functor,
report_interval,
use_multiprocessing, True):
# Get original value in KV-store if not in update dict.
if hash_int not in kv_update:
kv_update[hash_int] = hash2uuids_kvstore.get(hash_int, frozenset())
kv_update[hash_int] |= frozenset([uuid])
if kv_update:
log.info("Updating KV store... (%d keys)" % len(kv_update))
hash2uuids_kvstore.add_many(kv_update)
log.info("Done")
if __name__ == '__main__':
main()
|
import numpy as np
import scipy.constants
import scipy.special
def getCoulombLogarithm(T, n):
"""
Calculates the Coulomb logarithm according to the formula given in
Wesson's book "Tokamaks".
:param float T: Plasma temperature (eV).
:param float n: Plasma density (m^-3).
"""
return 14.9 - 0.5*np.log(n / 1e20) + np.log(T / 1e3)
def getEc(T, n):
"""
Calculates the Connor-Hastie critical electric field, below which no
runaway electrons can be generated.
:param float T: Plasma temperature (eV).
:param float n: Plasma density (m^-3).
"""
logLambda = getCoulombLogarithm(T, n)
c = scipy.constants.c
e = scipy.constants.e
me = scipy.constants.m_e
eps0 = scipy.constants.epsilon_0
return (n*logLambda*e**3) / (4*np.pi*eps0**2 * me * c**2)
def getConnorHastieCriticalField(T, n): return getEc(T, n)
def getED(T, n):
"""
Calculates the Dreicer electric field at the given plasma temperature and
density, giving the electric field at which all electrons are accelerated
to the runaway region.
:param float T: Plasma temperature (eV).
:param float n: Plasma density (m^-3).
"""
c = scipy.constants.c
e = scipy.constants.e
me = scipy.constants.m_e
Ec = getEc(T, n)
return Ec * me * c**2 / (e*T)
def getDreicerElectricField(T, n): return getED(T, N)
def getTauEETh(T, n):
"""
Calculates the thermal electron-electron collision frequency.
:param float T: Plasma temperature (eV).
:param float n: Plasma density (m^-3).
"""
mc2 = scipy.constants.physical_constants['electron mass energy equivalent in MeV'][0] * 1e6
betaTh2 = getBetaThermal(T)**2
return getTauEERel(T, n) * betaTh2*np.sqrt(betaTh2)
def getThermalElectronCollisionFrequency(T, n): return getTauEETh(T, n)
def getBetaThermal(T): return getNormalizedThermalSpeed(T)
def getNormalizedThermalSpeed(T):
"""
Calculates the thermal electron speed, v_{th}, normalized
to the speed of light in vacuum.
"""
return getThermalSpeed(T) / scipy.constants.c
def getThermalSpeed(T):
"""
Calculates the thermal electron speed, v_{th}, for the given
plasma temperature.
:param float T: Electron temperature.
"""
return np.sqrt(2*scipy.constants.e*T / scipy.constants.m_e)
def getTauEERel(T, n):
"""
Calculates the relativistic electron-electron collision frequency.
:param float T: Plasma temperature (eV).
:param float n: Plasma density (m^-3).
"""
c = scipy.constants.c
e = scipy.constants.e
me = scipy.constants.m_e
r0 = scipy.constants.physical_constants['classical electron radius'][0]
C = 4*np.pi * r0**2 * c
logLambda = getCoulombLogarithm(T, n)
return 1/(logLambda * n * C)
def getRelativisticElectronCollisionFrequency(T, n): return getTauEERel(T, n)
|
l1 = int(input())
c1 = int(input())
l2 = int(input())
c2 = int(input())
a1 = (l1 * c1)
a2 = (l2 * c2)
print(a1 if a1 >= a2 else a2)
|
###MODULES###
import numpy as np
import pandas as pd
import os, sys
import time as t
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.patches import Circle
from matplotlib.ticker import MaxNLocator
import pathlib
from matplotlib.colors import Normalize
from scipy import interpolate
norm = Normalize()
from resource import getrusage, RUSAGE_SELF
import random
import scipy.ndimage as ndimage
mpl.rcParams['axes.linewidth'] = 1.5 #set the value globally
mpl.rcParams['contour.negative_linestyle'] = 'solid'
#CONSTANTS
cwd_PYTHON = os.getcwd() + '/'
RHO = 1000.0
NX = 512
PERIOD = 0.1
RADIUSLARGE = 0.002
RADIUSSMALL = 0.5*RADIUSLARGE
maxR = 0.025/RADIUSLARGE
csfont = {'fontname':'Times New Roman'}
#System Arguments
config = sys.argv[1]
Re = sys.argv[2]#"2"
perNumber = int(sys.argv[3])#5
local = int(sys.argv[4])
minVal, maxVal = -6.0,6.0
dX = 2.0*maxR/(1.0*NX)
if local:
cwd_FIGS = cwd_PYTHON+"../../Figures/VorticityDetection/{0}/".format(config)
pathlib.Path(cwd_FIGS).mkdir(parents=True, exist_ok=True)
cwd_Re = cwd_PYTHON+'../../FieldData/TestField/'
cwd_POS = cwd_Re
else:
cwd_FIGS = cwd_PYTHON+'../Figures/Bifurcation/{0}/'.format(config)
pathlib.Path(cwd_FIGS).mkdir(parents=True, exist_ok=True)
cwd_Re = cwd_PYTHON+'../{0}/Re{1}/VTK/AVG/'.format(config,Re)
cwd_POS = cwd_PYTHON+'../PosData/{0}/Re{1}/'.format(config,Re)
# constructs a filepath for the pos data of Re = $Re
def pname(cwd):
return cwd+"pd.txt"
def GetPosData(cwd,time,config):
global RADIUSLARGE
data = pd.read_csv(pname(cwd),delimiter=' ')
if(config == 'V' or config == 'O'):
pos = data[data['time'] == time*2.0]
else:
pos = data[data['time'] == time]
#pos = data[data['time'] == time]
pos = pos.reset_index(drop=True)
#Renormalize
pos['aXU'] /= RADIUSLARGE
pos['aXL'] /= RADIUSLARGE
pos['aYU'] /= RADIUSLARGE
pos['aYL'] /= RADIUSLARGE
pos['bXU'] /= RADIUSLARGE
pos['bXL'] /= RADIUSLARGE
pos['bYU'] /= RADIUSLARGE
pos['bYL'] /= RADIUSLARGE
return pos
def GetPosDataLength(cwd):
data = pd.read_csv(pname(cwd),delimiter=' ')
return len(data['time'])
def GetAvgFieldData(cwd,idx):
global RADIUSLARGE
#Load position data
#Columns
#mx.flat my.flat avgW.flat avgP.flat avgUx.flat avgUy.flat
fieldData = pd.read_csv(cwd+'AVG_%04d.csv'%idx,delimiter=' ')
print(fieldData.head())
#All field values to a list
mxList = fieldData['mx'].values.tolist()
myList = fieldData['my'].values.tolist()
WList = fieldData['avgW'].values.tolist()
PList = fieldData['avgP'].values.tolist()
UxList = fieldData['avgUx'].values.tolist()
UyList = fieldData['avgUy'].values.tolist()
#Convert lists to numpy arrays
#Reshape them to be Nx x Ny
Nx, Ny = 1024, 1024
mxArr = np.array(mxList).reshape((Nx,Ny))/RADIUSLARGE
myArr = np.array(myList).reshape((Nx,Ny))/RADIUSLARGE
WArr = np.array(WList).reshape((Nx,Ny))
PArr = np.array(PList).reshape((Nx,Ny))
UxArr = np.array(UxList).reshape((Nx,Ny))/RADIUSLARGE
UyArr = np.array(UyList).reshape((Nx,Ny))/RADIUSLARGE
return (mxArr, myArr, WArr, PArr, UxArr, UyArr)
def AddDiscsToPlot(ax,pos):
#Add Discs
circle1 = Circle((pos.loc[0,'aXU_rot'], pos.loc[0,'aYU_rot']), 1.0, facecolor=(0.25,)*3,
linewidth=1,alpha=1.0,zorder=6)
ax.add_patch(circle1)
circle2 = Circle((pos.loc[0,'aXL_rot'], pos.loc[0,'aYL_rot']), 0.5, facecolor=(0.25,)*3,
linewidth=1,alpha=1.0,zorder=6)
ax.add_patch(circle2)
circle3 = Circle((pos.loc[0,'bXU_rot'], pos.loc[0,'bYU_rot']), 1.0, facecolor=(0.75,)*3,
linewidth=1,alpha=1.0,zorder=6)
ax.add_patch(circle3)
circle4 = Circle((pos.loc[0,'bXL_rot'], pos.loc[0,'bYL_rot']), 0.5, facecolor=(0.75,)*3,
linewidth=1,alpha=1.0,zorder=6)
ax.add_patch(circle4)
#Add Swimmer "springs"
ax.plot([pos.loc[0,'aXU_rot'],pos.loc[0,'aXL_rot']],
[pos.loc[0,'aYU_rot'],pos.loc[0,'aYL_rot']],
color=(0.25,)*3,linewidth=3,zorder=6)
ax.plot([pos.loc[0,'bXU_rot'],pos.loc[0,'bXL_rot']],
[pos.loc[0,'bYU_rot'],pos.loc[0,'bYL_rot']],
color=(0.75,)*3,linewidth=3,zorder=6)
return
def set_size(w,h, ax=None):
""" w, h: width, height in inches """
if not ax: ax=plt.gca()
l = ax.figure.subplotpars.left
r = ax.figure.subplotpars.right
t = ax.figure.subplotpars.top
b = ax.figure.subplotpars.bottom
figw = float(w)/(r-l)
figh = float(h)/(t-b)
ax.figure.set_size_inches(figw, figh)
return ax
def Rotate(xy, theta):
# https://en.wikipedia.org/wiki/Rotation_matrix#In_two_dimensions
#First Rotate based on Theta
#Allocate Arrays
rotationMatrix = np.zeros((2,2))
#Calculate rotation matrix
rotationMatrix[0,0] = np.cos(theta)
rotationMatrix[0,1] = -1.0*np.sin(theta)
rotationMatrix[1,0] = np.sin(theta)
rotationMatrix[1,1] = np.cos(theta)
return rotationMatrix.dot(xy)
def CalcLabAngle(pos):
#Find swimming axis (normal y-axis)
xU, xL = pos.loc[0,'aXU'], pos.loc[0,'aXL']
yU, yL = pos.loc[0,'aYU'], pos.loc[0,'aYL']
labX = xU - xL
labY = yU - yL
length = np.hypot(labX,labY)
normX = labX/length
normY = labY/length
#2) Calculate Theta
if(normX <= 0.0):
theta = np.arccos(normY)
else:
theta = -1.0*np.arccos(normY)+2.0*np.pi
print('theta = ',theta*180.0/np.pi)
return 2.0*np.pi - theta
def InterpolateToNewCoordinateSystem(x,y,mx,my,arrayUx,arrayUy,arrayW):
#Create a uniform mesh for the interpolated velocity vectors!
mx_new, my_new = np.meshgrid(x,y)
#Interpolate Ux and Uy from original cartesian coordainates to new ones
#Griddata
print('About to inteprolate field data')
print('peak memory = ',getrusage(RUSAGE_SELF).ru_maxrss)
sys.stdout.flush()
arrayUx_new=interpolate.griddata((mx.flatten(),my.flatten()),arrayUx.flatten() , (mx_new,my_new),method='linear')
print('X transformation complete')
print('peak memory = ',getrusage(RUSAGE_SELF).ru_maxrss)
sys.stdout.flush()
arrayUy_new=interpolate.griddata((mx.flatten(),my.flatten()),arrayUy.flatten() , (mx_new,my_new),method='linear')
print('Coordinate Transformation Complete!')
print('peak memory = ',getrusage(RUSAGE_SELF).ru_maxrss)
sys.stdout.flush()
arrayW_new=interpolate.griddata((mx.flatten(),my.flatten()),arrayW.flatten() , (mx_new,my_new),method='linear')
print('Vorticity Transformation Complete!')
print('peak memory = ',getrusage(RUSAGE_SELF).ru_maxrss)
sys.stdout.flush()
return (arrayUx_new,arrayUy_new, arrayW_new)
def RotateSimulation(cwd,time,mx,my,Ux,Uy,W,pos):
global RADIUSLARGE
#Shift x and y by the CM location
xCM = 0.25*(pos.loc[0,'aXU'] + pos.loc[0,'bXU'] + pos.loc[0,'aXL'] + pos.loc[0,'bXL'])
yCM = 0.25*(pos.loc[0,'aYU'] + pos.loc[0,'bYU'] + pos.loc[0,'aYL'] + pos.loc[0,'bYL'])
#Do the same for mx and my
mx -= xCM
my -= yCM
#Shift pos data by xCM and yCM
pos['aXU'] -= xCM
pos['aXL'] -= xCM
pos['bXU'] -= xCM
pos['bXL'] -= xCM
pos['aYU'] -= yCM
pos['aYL'] -= yCM
pos['bYU'] -= yCM
pos['bYL'] -= yCM
#Rotate Reference frame by swimmer 1's axis
#Calculate Theta (Rotate by -Theta)
theta_rotate = CalcLabAngle(pos)
print('theta_rotate = ',theta_rotate*180.0/np.pi)
mxy = np.array([mx.flatten(),my.flatten()])
mxy_rot = np.zeros((2,1024*1024))
#Do the same for the U field
Uxy = np.array([Ux.flatten(),Uy.flatten()])
Uxy_rot = np.zeros((2,1024*1024))
for jdx in range(1024*1024):
mxy_rot[:,jdx] = Rotate(mxy[:,jdx],theta_rotate)
Uxy_rot[:,jdx] = Rotate(Uxy[:,jdx],theta_rotate)
mx_rot = mxy_rot[0,:].reshape((1024,1024))
my_rot = mxy_rot[1,:].reshape((1024,1024))
Ux_rot = Uxy_rot[0,:].reshape((1024,1024))
Uy_rot = Uxy_rot[1,:].reshape((1024,1024))
aU_pos = np.array([pos.loc[0,'aXU'],pos.loc[0,'aYU']])
aL_pos = np.array([pos.loc[0,'aXL'],pos.loc[0,'aYL']])
bU_pos = np.array([pos.loc[0,'bXU'],pos.loc[0,'bYU']])
bL_pos = np.array([pos.loc[0,'bXL'],pos.loc[0,'bYL']])
aU_rot = Rotate(aU_pos,theta_rotate)
print('aU = ',aU_pos)
print('aU_rot = ',aU_rot)
aL_rot = Rotate(aL_pos,theta_rotate)
bU_rot = Rotate(bU_pos,theta_rotate)
bL_rot = Rotate(bL_pos,theta_rotate)
pos['aXU_rot'], pos['aYU_rot'] = aU_rot[0], aU_rot[1]
pos['aXL_rot'], pos['aYL_rot'] = aL_rot[0], aL_rot[1]
pos['bXU_rot'], pos['bYU_rot'] = bU_rot[0], bU_rot[1]
pos['bXL_rot'], pos['bYL_rot'] = bL_rot[0], bL_rot[1]
#Interpolate onto a new coordinate system
x = np.linspace(-0.025/RADIUSLARGE,0.025/RADIUSLARGE,512)
y = np.linspace(-0.025/RADIUSLARGE,0.025/RADIUSLARGE,512)
mx_stream, my_stream = np.meshgrid(x,y)
interpUx, interpUy, interpW = InterpolateToNewCoordinateSystem(x,y,mx_rot,my_rot,Ux_rot,Uy_rot, W)
return (mx_stream.T, my_stream.T, interpW.T, interpUx.T, interpUy.T, pos)
def CalcPsi2D(fx,fy,NX,DX):
#From here, we are going to calculate the stream function
psi = np.zeros((NX,NX))
for idx in range(1,NX):
psi[idx,0] = psi[idx-1,0] - fy[idx,0]*DX
for idy in range(1,NX):
psi[:,idy] = psi[:,idy-1] + fx[:,idy]*DX
return psi
#Plot New mesh and interpolated velocity field Ux and Uy
def PlotAvgW(cwd,mx,my,W,Ux,Uy,pos,space,scale):
global FIGNUM, PERIOD,minVal,maxVal, Re, perNumber
#Here, we will visualize the velocity field on the new coordinate system
nRows, nCols = 1, 1
fig, ax = plt.subplots(nrows=nRows, ncols=nCols, num=0,figsize=(6,6),dpi=200)
#ax.set_title(r'Average Velocity Field',fontsize=12)
#Plot Streamlines
#Use two grids and combine them
UxT, UyT, WT = Ux.T, Uy.T, W.T
psi = CalcPsi2D(Ux,Uy,NX,dX)
print('psi.min() = ',psi.min())
print('psi.max() = ',psi.max())
sys.stdout.flush()
#Psi Contour
psi2 = ndimage.gaussian_filter(psi, sigma=5.0, order=0)
levels = MaxNLocator(nbins=21).tick_values(-1.0*max(abs(psi2.min()),psi2.max()), max(abs(psi2.min()),psi2.max()))
#levels = MaxNLocator(nbins=21).tick_values(-1.0*max(abs(psi2.min()),psi2.max()), max(abs(psi2.min()),psi2.max()))
ax.contour(mx,my,psi2,colors='k',extend='both',levels=levels)
#PlotVorticity with imshow (interpolate to smooth)
ax.imshow(W.T,cmap='bwr',extent=(-1.0*maxR-0.5*dX,maxR+0.5*dX,
-1.0*maxR-0.5*dX,maxR+0.5*dX),
origin='lower',vmin=-1.0,vmax=1.0,interpolation='bilinear')
#Add swimmer
AddDiscsToPlot(ax,pos)
xmin = min(pos.loc[0,'aXU_rot'],pos.loc[0,'aXL_rot'],
pos.loc[0,'bXU_rot'],pos.loc[0,'bXL_rot'])
xmax = max(pos.loc[0,'aXU_rot'],pos.loc[0,'aXL_rot'],
pos.loc[0,'bXU_rot'],pos.loc[0,'bXL_rot'])
ymin = min(pos.loc[0,'aYU_rot'],pos.loc[0,'aYL_rot'],
pos.loc[0,'bYU_rot'],pos.loc[0,'bYL_rot'])
ymax = max(pos.loc[0,'aYU_rot'],pos.loc[0,'aYL_rot'],
pos.loc[0,'bYU_rot'],pos.loc[0,'bYL_rot'])
ax.axis([xmin-1.0,xmax+1.0,ymin-1.0,ymax+1.0])
fig.tight_layout()
fig.savefig(cwd+'W_{0}_Re{1}_per{2}_.png'.format(config,Re,perNumber))
fig.clf()
plt.close()
return
if __name__ == '__main__':
#Get AvgVel Field and Rotate Frame
#Save Vel Field as AvgUx and AvgUy
#READ ALL AVG FILES IN A SIMULATION DIRECTORY
#EXTRACT AVERAGE FIELD DATA INTO NUMPY ARRAYS
#PLOT AVERAGED FIELD DATA
#Simulation Parameters
#Extract Position Data
#Calculate # Periods
DUMP_INT = 20.0
nTime = GetPosDataLength(cwd_POS)
nPer = int(np.trunc(1.0*nTime/DUMP_INT))
#nPer = 2
#Paths to data and plots
cwd_DATA = cwd_Re
countPer = 0
for countPer in range(nPer):
if(countPer == perNumber):
AVGPlot = pathlib.Path(cwd_DATA+'AVG_%04d.csv'%countPer)
if AVGPlot.exists ():
start = t.clock()
#Get Avg Field Data
mx,my,avgW,avgP,avgUx,avgUy = GetAvgFieldData(cwd_DATA,countPer)
#Extract Position and Time Data
time = np.round(0.05 + countPer*PERIOD,2)
posData = GetPosData(cwd_POS,time,config)
#Plot Averaged Field Data
#Vorticity And Streamlines
mx,my,avgW,avgUx,avgUy,posData = RotateSimulation(cwd_PYTHON,time,mx,my,avgUx,avgUy,avgW,posData)
rotatedDict = {'mx':mx.flatten(),'my':my.flatten(),
'avgUx':avgUx.flatten(),'avgUy':avgUy.flatten(),
'avgW':avgW.flatten()
}
rotatedData = pd.DataFrame(data=rotatedDict)
rotatedData.to_csv(cwd_DATA+'AVGRot_%04d.csv'%countPer,index=False,sep=' ',float_format='%.5e')
posData.to_csv(cwd_POS+'pd_rot_%04d.csv'%countPer,index=False,sep=' ',float_format='%.5e')
stend = t.clock()
diff = stend - start
print('Time to run for 1 period = %.5fs'%diff)
sys.stdout.flush()
#Plot Flow Field Visual
PlotAvgW(cwd_FIGS,mx,my,avgW,avgUx,avgUy,posData,4,5)
|
#coding:utf-8
import os
from urllib import quote
from flask import flash, url_for, redirect, render_template, request,\
current_app, session, make_response
from flask.ext.login import login_user,logout_user, login_required,\
current_user
from . import auth
from .auth_form import LoginForm, RegisterForm, ResetUsernameForm, \
ResetEmailForm, ResetPasswordForm, ForgetPasswordForm
from ..models import User, Role, User_Role_Relation, db, Follow
from ..email import send_email
from ..decorators import permission_required
@auth.route('/login',methods=['GET','POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.name.data).first()
if user is None:
flash(u'不存在的用户名,请确认输入正确或注册新用户')
return render_template('auth/login.html',form=form)
if not user.verify_password(form.password.data):
flash(u'错误的密码,请重新输入')
return render_template('auth/login.html',form=form)
login_user(user,form.remember_me.data)
flash(u'登录成功')
session['user_id'] = user.id
return redirect(request.args.get('next') or url_for('main.index'))
# response = make_response(redirect(request.args.get('next') or url_for('main.index')))
# response.set_cookie('user_id',str(user.id))
# return response
return render_template('auth/login.html',form=form)
@auth.route('/logout',methods=['GET'])
@login_required
def logout():
logout_user()
flash(u'退出登录成功')
response = make_response(redirect(url_for('main.index')))
response.delete_cookie('user_id')
return response
@auth.route('/register',methods=['GET','POST'])
def register():
form = RegisterForm()
if form.validate_on_submit():
user = User(username=form.name.data,
email=form.email.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
if form.email.data == os.environ['BLOG_ISLAND_MAIL_USERNAME']:
ship = User_Role_Relation(user_id=user.id,
role_id=Role.query.filter_by(rolename='Administrator').first().id,operate_id=user.id)
else:
ship = User_Role_Relation(user_id=user.id,
role_id=Role.query.filter_by(rolename='User').first().id,operate_id=user.id)
db.session.add(ship)
fans = Follow(star_id=user.id,fans_id=user.id)
db.session.add(fans)
token = user.generate_confirmation_token()
send_email(form.email.data,u'新账户邮件认证','auth/email/confirm',user=user,token=token)
flash(u'注册已完成,已发送一封认证邮件到您的邮箱中')
return redirect(url_for('main.index'))
return render_template('auth/register.html',form=form)
@auth.before_app_request
def keep_live():
user_id = session.get('user_id', None)
# user_id = request.cookies.get('user_id', None)
if not current_user.is_authenticated and user_id is not None:
login_user(User.query.get(int(user_id)))
@auth.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.active()
if not current_user.confirmed \
and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed',methods=['GET'])
def unconfirmed():
if current_user.is_anonymous or current_user.confirmed:
return redirect(url_for('home.homepage',id=current_user.id))
return render_template('auth/unconfirmed.html')
@auth.route('/confirm',methods=['GET'])
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, u'新账户邮件认证',
'auth/email/confirm', user=current_user, token=token)
flash(u'一封新的认证邮件已经发送到您的邮箱')
return redirect(url_for('main.index'))
@auth.route('/confirm/<token>',methods=['GET'])
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash(u'认证成功')
else:
flash(u'过期或无效的认证链接')
login_user(current_user)
return redirect(url_for('main.index'))
@auth.route('/reset_username',methods=['GET','POST'])
@login_required
@permission_required(Role.User)
def reset_username():
form = ResetUsernameForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.name.data).first()
if user is not None:
if user.username != current_user.username:
flash(u'用户名已被占用')
else:
flash(u'新用户名不能和原来的用户名相同,否则,请放弃修改')
return redirect(url_for('auth.reset_username'))
current_user.username = form.name.data
db.session.add(current_user)
return redirect(url_for('home.homepage',id=current_user.id))
form.name.data = current_user.username
return render_template('auth/reset_username.html',form=form)
@auth.route('/reset_email',methods=['GET','POST'])
@login_required
@permission_required(Role.User)
def reset_email():
form = ResetEmailForm()
if form.validate_on_submit():
user = User.query.filter_by(username=form.email.data).first()
if user is not None:
if user.username != current_user.username:
flash(u'邮箱已被占用')
else:
flash(u'新邮箱不能和原来的邮箱相同,否则,请放弃修改')
return redirect(url_for('auth.reset_email'))
token = current_user.generate_reset_token(form.email.data)
send_email(form.email.data,u'重置邮箱确认',
'auth/email/confirm_reset_email',user=current_user,token=token)
flash(u'已发送一封修改邮箱确认邮件到您的邮箱')
return redirect(url_for('home.homepage',id=current_user.id))
form.email.data = current_user.email
return render_template('auth/reset_email.html',form=form)
@auth.route('/confirm_reset_email/<token>',methods=['GET'])
@login_required
@permission_required(Role.User)
def confirm_reset_email(token):
if current_user.confirm_email(token):
flash(u'修改邮箱成功')
else:
flash(u'过期或者无效的链接')
return redirect(url_for('home.homepage',id=current_user.id))
@auth.route('/reset_password',methods=['GET','POST'])
@login_required
@permission_required(Role.User)
def reset_password():
form = ResetPasswordForm()
if form.validate_on_submit():
if not current_user.verify_password(form.password.data):
flash(u'密码错误')
return redirect(url_for('auth.reset_password'))
current_user.password = form.password1.data
db.session.add(current_user)
flash(u'修改密码成功')
return redirect(url_for('home.homepage',id=current_user.id))
return render_template('auth/reset_password.html',form=form)
@auth.route('/forget_password_sendemail',methods=['GET','POST'])
def forget_password_sendemail():
form = ForgetPasswordForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is None:
flash(u'不存在的注册邮箱')
return redirect(url_for('auth.forget_password_sendemail'))
token = user.generate_reset_token(form.email.data,password=form.password.data)
send_email(form.email.data,u'忘记密码确认',
'auth/email/forget_password',user=user,token=token)
flash(u'已发送一封忘记密码确认邮件到您的邮箱')
return redirect(url_for('main.index'))
return render_template('auth/forget_password_sendemail.html',form=form)
@auth.route('/forget_password_newpassword/<token>',methods=['GET','POST'])
def forget_password_newpassword(token):
if not User.confirm_reset_email(token):
flash(u'过期或者无效的链接')
else:
flash(u'修改密码成功')
return redirect(url_for('main.index'))
@auth.route('/ask_for_lift_ban')
@login_required
def ask_for_lift_ban():
send_email(current_app.config['BLOG_ISLAND_MAIL_SENDER'],u'申请解封用户',
'auth/email/ask_for_lift_ban',user=current_user,sub=quote('回复:[博客岛] 申请解封用户'))
flash(u'申请解封成功,三个工作日内将处理,请耐心等待,超出日期未处理,请到投诉反馈区提出意见')
current_user.ask_for_lift_ban = True
db.session.add(current_user)
return redirect(url_for('home.homepage',id=current_user.id))
|
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.by import By
from utils import REQUESTS_WAIT_TIMEOUT
import string, random
class Page(object):
def __init__(self, driver):
self.driver = driver
def hover(self, element):
link = WebDriverWait(self.driver, 2).until(EC.visibility_of_element_located(element),'There is no element')
return ActionChains(self.driver).move_to_element(link).perform()
def click(self, element):
link = WebDriverWait(self.driver, 2).until(EC.element_to_be_clickable(element))
return link.click()
def validate_elements(self, element, index):
self.driver.implicitly_wait(2)
return self.driver.find_elements(*element)[index].text
def validate_element_attribute_value(self, element):
self.driver.implicitly_wait(2)
return self.driver.find_element(*element).get_attribute('value')
def find_element(self, element):
self.driver.implicitly_wait(2)
return self.driver.find_element(*element)
def find_elements(self, element):
self.driver.implicitly_wait(2)
return self.driver.find_elements(*element)
def validate_element(self, element):
self.driver.implicitly_wait(2)
return self.driver.find_element(*element).text
def element_should_not_exist(self, element):
try:
self.driver.implicitly_wait(2)
self.driver.find_element(*element)
except NoSuchElementException:
return True
raise TypeError("The element exist on the page, but shouldn't")
def elements_should_not_exist(self, element, index):
try:
self.driver.find_elements(*element)[index]
except NoSuchElementException:
return True
raise TypeError("The element exist on the page, but shouldn't")
def random_email(self, element):
randtext = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(15)])
random_email = "test+"+randtext+"@gmail.com"
self.email = random_email
return self.driver.find_element(*element).send_keys(random_email)
def random_name(self, element):
randtext = ''.join([random.choice(string.ascii_letters + string.digits) for n in range(10)])
return self.driver.find_element(*element).send_keys(randtext)
def random_digits(self, element):
randtext = ''.join([random.choice(string.digits) for n in range(10)])
return self.driver.find_element(*element).send_keys(randtext)
def send_text(self, element, text):
self.driver.implicitly_wait(5)
return self.driver.find_element(*element).send_keys(*text)
|
import web
urls = ('/(.*)', 'index'
)
videojsApp = web.application(urls, locals())
render = web.template.render('templates/videojs')
class index:
## create
def POST(self,key):
return 'Not implement yet!'
## delete
def DELETE(self,key):
return 'Not implement yet!'
## read
def GET(self,key):
param = web.input()
return render.player(param)
## update
def PUT(self,key):
return 'Not implement yet!'
|
# Generated by Django 3.1.7 on 2021-04-01 12:03
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('customers', '0001_initial'),
('cars', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Booking',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('date_of_hire', models.DateTimeField(auto_now_add=True)),
('date_of_finish', models.DateTimeField()),
('confirmed', models.BooleanField(default=False)),
('payed', models.BooleanField(default=False)),
('car', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cars.car')),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='customers.customer')),
],
options={
'db_table': 'booking',
},
),
migrations.CreateModel(
name='BookingInvoice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('status', models.CharField(choices=[('pending', 'Pending'), ('final', 'Final')], max_length=32)),
('price', models.DecimalField(decimal_places=2, max_digits=10, validators=[django.core.validators.MinValueValidator(1.0)])),
('note', models.CharField(max_length=1024)),
('booking', models.OneToOneField(null=True, on_delete=django.db.models.deletion.SET_NULL, to='bookings.booking')),
],
options={
'db_table': 'invoice',
},
),
]
|
import json
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.middleware.csrf import get_token
from src.bin.lib import empty
class BaseResponse(object):
STATUS_REMARK_OK = "OK"
STATUS_REMARK_WARNING = "WARNING"
STATUS_REMARK_ERROR = "ERROR"
STYLE_GENERIC = "info"
STYLE_SUCCESS = "success"
STYLE_WARNING = "warning"
STYLE_ERROR = "danger"
RESPONSE_ERROR = "error"
RESPONSE_WARNING = "warning"
RESPONSE_SUCCESS = "success"
METHOD_LOGIN = "login"
METHOD_NEW = "new"
METHOD_UPDATE = "update"
METHOD_RETRIEVE = "retrieve"
METHOD_VIEW_MAIN = "view"
METHOD_VIEW_STATIC = "_vs"
METHOD_VIEW_INSTANCE = "_vi"
METHOD_VIEW_MODAL = "_p"
METHOD_VIEW_TABLE = "_vt"
METHOD_TABLE = "table"
METHOD_DELETE = "delete"
METHOD_VIEW_DELETE = "_vd"
METHOD_OPTIONS_AJAX = "options-ajax"
METHOD_SEARCH = "search"
METHOD_OPTIONS_STATIC = "options-static"
METHOD_LINKS = "links"
METHOD_COUNT = "count"
METHOD_META = "meta"
METHOD_PDF = "pdf"
METHOD_AUTHENTICATE = "authenticate"
METHOD_MISC = "miscellaneous"
METHOD_BLANK = "blank"
MESSAGE_AUTH_GRANTED = _("Authorization granted")
MESSAGE_LOGIN_SUCCESS = _("Login was successful")
MESSAGE_SUCCESS = _("Your entry was successful")
MESSAGE_SUCCESS_UPDATE = _("Your update was successful")
MESSAGE_SUCCESS_RETRIEVAL = _("Retrieved successful")
MESSAGE_DELETE = _("Deleted successfully")
MALFORMED_URL = 404, _("Malformed URL called by end-user")
GROUP_KEY_ERROR = 405, _("Group key index error. Please contact your system administrator")
POSSIBLE_HACK = 403, _("Possible malicious access, this call has been logged")
GET_EXCEPTION = 404, _("Uncaught error in get call, contact system administrator")
GET_EXCEPTION_UNDEFINED_AJAX = 405, _("Undefined ajax search function in main program")
GET_EXCEPTION_UNDEFINED_ATTRIBUTE = 405, _("Filter attribute error while querying data")
POST_EXCEPTION = 400, _("Uncaught error in post call, contact system administrator")
MULTIPLE_OBJECTS_RETURNED = 500, _("Returned multiple objects where one object was expected, critical error. "
"Please contact your system administrator")
PERMISSION_ERROR = 403, _("General permission error, contact system administrator to grant permissions for "
"the module you have attempted to access")
LOGIN_ERROR = 400, _("User and password doesn't exist in our record")
AUTHORIZATION_BEARER_ERROR = 401, _("No authorization bearer secret passed in header")
AUTHORIZATION_TOKEN_ERROR = 401, _("No authorization token passed in header or token has expired")
AUTHORIZATION_COOKIE_ERROR = 401, _("CSRF token failure")
PERMISSION_ALL = "all"
PERMISSION_MANAGE = "manage"
PERMISSION_CREATE = "create"
PERMISSION_UPDATE = "update"
PERMISSION_DELETE = "delete"
PERMISSION_VIEW = "view"
PERMISSION_DELETE_CONDITIONAL = "delete_conditional"
PERMISSION_DELETE_MINE = "delete_mine"
PERMISSION_MANAGE_CONDITIONAL = "manage_conditional"
PERMISSION_MANAGE_MINE = "manage_mine"
PERMISSION_RETRIEVE_CONDITIONAL = "view_conditional"
PERMISSION_RETRIEVE_MINE = "view_mine"
PERMISSION_RETRIEVE_ALL = "view_all"
PERMISSION_NO_AUTH = "no_authentication"
PERMISSION_NONE = None
PERMISSION_SUPER_ADMIN = "super_admin"
dump = {}
_status_remark = None
_warning = False
_success = False
_error = False
_error_detail = None
_route = None
_response = None
_method = None
_message = None
_delay = None
_js_command = None
_style = "info"
_error_code = None
_error_code_description = None
_data = None
_results = None
_template = None
_html_data = None
_misc = {}
_module = None
_request = None
_is_authenticated = True
def __init__(self, message=None, method=None, response=None, route=None, js_command=None,
style=None, template=None, html_data=None, request=None):
self._message = message
self._method = method
self._response = response
self._route = route
self._js_command = js_command
self._style = style or BaseResponse.STYLE_GENERIC
self._template = template
self._html_data = html_data
self._request = request
if response is not None:
if response == BaseResponse.RESPONSE_ERROR:
self._status_remark = BaseResponse.STATUS_REMARK_ERROR
self.error()
elif response == BaseResponse.RESPONSE_WARNING:
self._status_remark = BaseResponse.STATUS_REMARK_WARNING
self.warning()
else:
self._status_remark = BaseResponse.STATUS_REMARK_OK
self.success()
else:
self.success()
self._status_remark = BaseResponse.STATUS_REMARK_OK
def status_remark(self, status_remark):
self.dump["status_remark"] = status_remark
return self
def warning(self):
if empty(self._delay):
self._delay = 5000
self._style = BaseResponse.STYLE_WARNING
self._warning = True
return self
def error(self):
if empty(self._delay):
self._delay = 25000
self.status_remark(BaseResponse.STATUS_REMARK_ERROR)
self._style = BaseResponse.STYLE_ERROR
self._error = True
return self
def success(self):
if empty(self._delay):
self._delay = 3000
if empty(self._message) and self._method == BaseResponse.METHOD_DELETE:
self._message = BaseResponse.MESSAGE_DELETE
elif empty(self._message) and \
(self._method == BaseResponse.METHOD_RETRIEVE or self._method == BaseResponse.METHOD_TABLE):
self._message = BaseResponse.MESSAGE_SUCCESS_RETRIEVAL
elif empty(self._message) and self._method == BaseResponse.METHOD_UPDATE:
self._message = BaseResponse.MESSAGE_SUCCESS_UPDATE
elif empty(self._message) and self._method == BaseResponse.METHOD_NEW:
self._message = BaseResponse.MESSAGE_SUCCESS
elif empty(self._message) and self._method == BaseResponse.METHOD_LOGIN:
self._message = BaseResponse.MESSAGE_LOGIN_SUCCESS
self._success = True
return self
def js_command(self, js_command):
self._js_command = js_command
return self
def route_to(self, route):
self._route = route
return self
def message(self, message):
self._message = message
return self
def style(self, style):
self._style = style
return self
def error_code(self, error_code):
self._error_code = error_code[0]
self._error_code_description = error_code[1]
return self
def set_error_detail(self, error_detail):
self._error_detail = error_detail
return self
def auth_guard(self, val):
if not val:
self.error()
self._is_authenticated = False
return self
def append_data(self, data, **options):
self._data = data
try:
self._results = options["results"]
except KeyError:
pass
try:
self._data['extras'] = options["extras"]
except KeyError:
pass
try:
self._misc = options["misc"]
except KeyError:
pass
return self
def delay(self, delay):
self._delay = delay
return self
def data_to_dict(self):
try:
self._data = json.loads(self._data)
except (TypeError, ValueError):
pass
return self
def render(self):
self.data_to_dict()
self.dump = {
"statusRemark": self._status_remark,
"status": {
"warning": self._warning,
"error": self._error,
"success": self._success,
},
"message": self._message,
"method": self._method,
"delay": self._delay,
"jsCommand": self._js_command,
"style": self._style,
"route": self._route,
"systemError": {
"code": self._error_code,
"codeDescription": self._error_code_description,
"errorDetail": self._error_detail,
},
"data": self._data,
"results": self._results,
"module": self._module,
"i18n": "en-gb",
"misc": self._misc,
}
return self
def response_html(self):
from django.shortcuts import render
status_code = 200
# clean up error message
try:
if self.dump["status"]["error"] is True:
if type(self.dump["message"]) == tuple:
status_code = self.dump["message"][0]
self.dump["message"] = self.dump["message"][1]
else:
status_code = 400
except KeyError:
status_code = 400
# continue with rendering
if self._html_data:
if self._template:
return render(self._request, self._template, self._html_data)
else:
return HttpResponse(self._html_data["msg"])
else:
if not self._is_authenticated:
response = HttpResponse(self.response_json(), content_type='application/json')
response["Authorization"] = "not_authorized"
response["Access-Control-Expose-Headers"] = "Authorization"
else:
response = HttpResponse(self.response_json(), content_type='application/json')
response.status_code = status_code
return response
def response_json(self):
return json.dumps(self.dump, cls=DjangoJSONEncoder)
def response_raw(self):
return self.dump
def pre_response_delete(self, instance, message=None):
fqn = instance.get_fqn()
if message is None:
if settings.DEBUG:
message = _("You are about to delete <strong>%s</strong> from your records. "
"How do you want to proceed?") % fqn
else:
message = _("You are about to delete <strong>'%s'</strong> from your records. "
"How do you want to proceed?") % fqn
self.warning()
self._success = False
self._method = BaseResponse.METHOD_DELETE
self._message = message
return self
# authentication
def get_method_default_permission(self, method):
if method is None:
method = ""
if method == self.METHOD_RETRIEVE or method == self.METHOD_TABLE or method == self.METHOD_OPTIONS_STATIC \
or method == self.METHOD_OPTIONS_AJAX:
return self.PERMISSION_VIEW
elif method == self.METHOD_DELETE:
return self.PERMISSION_DELETE
elif method == self.METHOD_NEW:
return self.PERMISSION_CREATE
elif method == self.METHOD_UPDATE:
return self.PERMISSION_UPDATE
else:
return None
|
# This file provides a very simple "no sql database using python dictionaries"
# If you don't know SQL then you might consider something like this for this course
# We're not using a class here as we're roughly expecting this to be a singleton
# If you need to multithread this, a cheap and easy way is to stick it on its own bottle server on a different port
# Write a few dispatch methods and add routes
# A heads up, this code is for demonstration purposes; you might want to modify it for your own needs
# Currently it does basic insertions and lookups
class Table():
def __init__(self, table_name, *table_fields):
self.entries = []
self.fields = table_fields
self.name = table_name
def create_entry(self, data):
'''
Inserts an entry in the table
Doesn't do any type checking
'''
# Bare minimum, we'll check the number of fields
if len(data) != len(self.fields):
raise ValueError('Wrong number of fields for table')
self.entries.append(data)
return
def search_table(self, target_field_name, target_value):
'''
Search the table given a field name and a target value
Returns the first entry found that matches
'''
# Lazy search for matching entries
for entry in self.entries:
for field_name, value in zip(self.fields, entry):
if target_field_name == field_name and target_value == value:
return entry
# Nothing Found
return None
class DB():
'''
This is a singleton class that handles all the tables
You'll probably want to extend this with features like multiple lookups, and deletion
A method to write to and load from file might also be useful for your purposes
'''
def __init__(self):
self.tables = {}
# Setup your tables
self.add_table('users', "id", "username", "password")
return
def add_table(self, table_name, *table_fields):
'''
Adds a table to the database
'''
table = Table(table_name, *table_fields)
self.tables[table_name] = table
return
def search_table(self, table_name, target_field_name, target_value):
'''
Calls the search table method on an appropriate table
'''
return self.tables[table_name].search_table(target_field_name, target_value)
def create_table_entry(self, table_name, data):
'''
Calls the create entry method on the appropriate table
'''
return self.tables[table_name].create_entry(data)
# Our global database
# Invoke this as needed
database = DB()
|
import numpy as np
import tensorflow as tf
from pynput.keyboard import Key, Controller
import operator
import cv2
import sys, os
import time
import pyautogui
keyboard = Controller()
# Loading the model
jsonFile = open("model.json", "r")
modelJson = jsonFile.read()
jsonFile.close()
loadedModel =tf.keras.models.model_from_json(modelJson)
# load weights into new model
loadedModel.load_weights("model.h5")
cap = cv2.VideoCapture(0)
# Category dictionary
Category = {0: 'Zero', 1: 'One', 2: 'Two', 3: 'Three', 4: 'Four', 5: 'Five'}
while True:
_, frame = cap.read()
# mirror image
frame = cv2.flip(frame, 1)
# Coordinates of the ROI
m = int(0.5*frame.shape[1])
n = 10
m1 = frame.shape[1]-10
n1 = int(0.5*frame.shape[1])
# Drawing the ROI
cv2.rectangle(frame, (m-1, n-1), (m1+1, n1+1), (255,0,0) ,1)
# Extracting the ROI
ROI = frame[n:n1, m:m1]
# Resizing the ROI for prediction
ROI = cv2.resize(ROI, (64, 64))
ROI = cv2.cvtColor(ROI, cv2.COLOR_BGR2GRAY)
_, testImage = cv2.threshold(ROI, 123, 230, cv2.THRESH_BINARY)
cv2.imshow("test", testImage)
res = loadedModel.predict(testImage.reshape(1, 64, 64, 1))
prediction = {'Zero': res[0][0],
'One': res[0][1],
'Two': res[0][2],
'Three': res[0][3],
'Four': res[0][4],
'Five': res[0][5]}
# Sorting based on top prediction
prediction = sorted(prediction.items(), key=operator.itemgetter(1), reverse=True)
cv2.putText(frame, prediction[0][0], (10, 100), cv2.FONT_HERSHEY_PLAIN, 1, (0,255,255), 1)
cv2.putText(frame, "Zero-SWITCHWINDOW", (10, 120), cv2.FONT_HERSHEY_PLAIN, 1, (0,255,255), 1)
cv2.putText(frame, "One-SCROLLUP", (10, 140), cv2.FONT_HERSHEY_PLAIN, 1, (0,255,255), 1)
cv2.putText(frame, "Two-SCROLLDOWN", (10, 160), cv2.FONT_HERSHEY_PLAIN, 1, (0,255,255), 1)
cv2.putText(frame, "Four-SCREENSHOT", (10, 180), cv2.FONT_HERSHEY_PLAIN, 1, (0,255,255), 1)
cv2.putText(frame, "Five-PLAY/PAUSE", (10, 200), cv2.FONT_HERSHEY_PLAIN, 1, (0,255,255), 1)
# Displaying the predictions
if prediction[0][0] == 'Five':
keyboard.press(Key.space)
if prediction[0][0] =='ZERO':
pyautogui.keyDown('alt')
pyautogui.keyDown('tab')
pyautogui.keyUp('tab')
pyautogui.keyUp('alt')
if prediction[0][0] =='One':
pyautogui.press('up')
if prediction[0][0]=='Two':
pyautogui.press('down')
if prediction[0][0]=='Three':
pass
if prediction[0][0]=='Four':
pyautogui.press('prtscr')
cv2.imshow("Frame", frame)
interrupt = cv2.waitKey(5)
if interrupt & 0xFF == 27: # esc key
break
cap.release()
cv2.destroyAllWindows()
|
#!/usr/bin/python
class Solution(object):
def canConstruct(self, ransomNote, magazine):
magaArray = []
for cha in magazine:
magaArray.append(cha)
for cha in ransomNote:
if cha not in magaArray:
return False
else:
magaArray.remove(cha)
return True
solu = Solution()
print(solu.canConstruct("aabb", "ab"))
|
# -*- coding: utf-8 -*-
"""Tests for API signal handlers."""
import mock
import unittest
from webplatformcompat.models import Browser, Maturity
from webplatformcompat.signals import post_save_update_cache
from .base import TestCase
class TestDeleteSignal(TestCase):
def setUp(self):
patcher = mock.patch(
'webplatformcompat.signals.update_cache_for_instance')
self.login_user()
self.mocked_update_cache = patcher.start()
self.addCleanup(patcher.stop)
self.maturity = self.create(Maturity, slug='foo')
self.mocked_update_cache.reset_mock()
def test_delete(self):
pk = self.maturity.pk
self.maturity.delete()
self.mocked_update_cache.assert_called_once_with(
'Maturity', pk, self.maturity)
def test_delete_delayed(self):
self.maturity._delay_cache = True
self.maturity.delete()
self.mocked_update_cache.assert_not_called()
class TestSaveSignal(unittest.TestCase):
def setUp(self):
self.patcher = mock.patch(
'webplatformcompat.signals.update_cache_for_instance')
self.mocked_update_cache = self.patcher.start()
self.browser = Browser(id=666)
def tearDown(self):
self.patcher.stop()
def test_raw(self):
post_save_update_cache(Browser, self.browser, created=True, raw=True)
self.mocked_update_cache.assert_not_called()
def test_create(self):
post_save_update_cache(Browser, self.browser, created=True, raw=False)
self.mocked_update_cache.assert_called_once_with(
'Browser', 666, self.browser)
def test_create_delayed(self):
self.browser._delay_cache = True
post_save_update_cache(Browser, self.browser, created=True, raw=False)
self.mocked_update_cache.assert_not_called()
|
class Solution:
"""
@param num, a list of integer
@return an integer
"""
def longestConsecutive(self, num):
dataSet = set(num)
#for ie in num:
# dataSet.add(ie)
print("dataset is ",dataSet)
res = 0
for ie in set(dataSet):
print("tempset is ",set(dataSet),"res is ",res,)
#res = 1
dataSet.discard(ie)
#l = ie-1
data = [ie]
l= ie-1
while l in dataSet:
data.insert(0,l)
dataSet.discard(l)
l-=1
r = ie+1
while r in dataSet:
data.append(r)
dataSet.discard(r)
r=r+1
print("data is ",data)
res = max(res, r-l-1)
return res
data=[1,7,4,2,3,10,10,99,2,5,11,6,9,12,13,56,14,4715,88,15,16]
mySol = Solution()
res = mySol.longestConsecutive(data)
print("res is ",res)
'''
# O(n)
class Solution:
"""
@param num, a list of integer
@return an integer
"""
def longestConsecutive(self, num):
# write your code here
dict={}
for x in num:
dict[x] = 1
ans = 0
for x in num:
if x in dict:
len = 1
del dict[x]
l = x - 1
r = x + 1
while l in dict:
del dict[l]
l -= 1
len += 1
while r in dict:
del dict[r]
r += 1
len += 1
if ans < len:
ans = len
return ans
# O(nlogn)
class Solution:
# @param num, a list of integer
# @return an integer
def longestConsecutive(self, num):
num.sort()
l = num[0]
ans = 1
tmp = 1
for n in num:
if(n - l == 0):
continue;
elif(n - l == 1):
tmp += 1
else:
if tmp > ans:
ans = tmp
tmp = 1
l = n
if tmp > ans:
ans = tmp
return ans
'''
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 9 18:49:30 2018
@author: Haneen
"""
import kivy
from kivy.app import App
from kivy.uix.button import Label
from kivy.uix.widget import Widget
class CustomWidget(Widget):
pass
class CustomWidgetApp(App):
def build(self):
return CustomWidget()
num = CustomWidgetApp()
num.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.