text
stringlengths 8
6.05M
|
|---|
# This file is part of beets.
# Copyright 2019, Jack Wilsdon <jack.wilsdon@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Load SQLite extensions.
"""
from beets.dbcore import Database
from beets.plugins import BeetsPlugin
import sqlite3
class LoadExtPlugin(BeetsPlugin):
def __init__(self):
super().__init__()
if not Database.supports_extensions:
self._log.warn('loadext is enabled but the current SQLite '
'installation does not support extensions')
return
self.register_listener('library_opened', self.library_opened)
def library_opened(self, lib):
for v in self.config:
ext = v.as_filename()
self._log.debug('loading extension {}', ext)
try:
lib.load_extension(ext)
except sqlite3.OperationalError as e:
self._log.error('failed to load extension {}: {}', ext, e)
|
import os
import sys
import subprocess
import shutil
import time
import fam
sys.path.insert(0, 'scripts')
sys.path.insert(0, os.path.join("tools", "families"))
sys.path.insert(0, os.path.join("tools", "trees"))
sys.path.insert(0, os.path.join("tools", "msa_edition"))
import saved_metrics
import experiments as exp
import msa_converter
import cut_node_names
import re
import run_ALE
def extract_observe_ALE(datadir, gene_trees, subst_model, observe_output_dir):
results_dir = os.path.join(observe_output_dir, "results")
for family in fam.get_families_list(datadir):
src = os.path.join(results_dir, family + ".newick.ale")
dest = fam.build_gene_tree_path(datadir, subst_model, family, "ccp-" + gene_trees)
shutil.copyfile(src, dest)
def run_observe_ALE(datadir, gene_trees, subst_model, cores):
run_name = "ale_observe_" + gene_trees
output_dir = fam.get_run_dir(datadir, subst_model, run_name)
observe_output_dir = os.path.join(output_dir, "observe")
exp.reset_dir(observe_output_dir)
commands = run_ALE.generate_ALE_observe_commands_file(datadir, gene_trees, subst_model, cores, observe_output_dir)
exp.run_with_scheduler(exp.ale_observe_exec, commands, "onecore", cores, observe_output_dir, run_name + "_ml_run.logs")
extract_observe_ALE(datadir, gene_trees, subst_model, observe_output_dir)
if (__name__== "__main__"):
min_args_number = 5
if len(sys.argv) < min_args_number:
print("Syntax error: python run_ALE.py datadir gene_trees subst_model cores")
sys.exit(0)
datadir = sys.argv[1]
gene_trees = sys.argv[2]
subst_model = sys.argv[3]
cores = int(sys.argv[4])
run_observe_ALE(datadir, gene_trees, subst_model, cores)
#
|
from app import db, login
from flask_login import UserMixin
from flask import url_for, current_app
import base64
from datetime import datetime, timedelta
import os
import json
class PaginatedAPIMixin(object):
@staticmethod
def to_collection_dict(query, page, per_page, endpoint, **kwargs):
resources = query.paginate(page, per_page, False)
data = {
'items': [item.to_dict() for item in resources.items],
'_meta': {
'page': page,
'per_page': per_page,
'total_pages': resources.pages,
'total_items': resources.total
},
'_links': {
'self': url_for(endpoint, page=page, per_page=per_page,
**kwargs),
'next': url_for(endpoint, page=page + 1, per_page=per_page,
**kwargs) if resources.has_next else None,
'prev': url_for(endpoint, page=page - 1, per_page=per_page,
**kwargs) if resources.has_prev else None
}
}
return data
class User(PaginatedAPIMixin, UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(25), index=True, unique=True)
email = db.Column(db.String(30), index=True, unique=True)
password = db.Column(db.String(25))
lists = db.relationship('List', backref='author', lazy='dynamic')
token = db.Column(db.String(32), index=True, unique=True)
token_expiration = db.Column(db.DateTime)
def __repr__(self):
return '<User {}>'.format(self.username)
def set_password(self, password):
self.password = password
def check_password(self, password):
return self.password == password
def get_lists(self):
return List.query.filter(List.user_id == self.id).order_by(List.id).all()
def to_dict(self):
data = {
'id': self.id,
'username': self.username,
'email': self.email,
'list_count': self.lists.count(),
'_links': {
'self': url_for('api.get_user', id=self.id),
'lists': url_for('api.get_lists_of_user', id=self.id)
}
}
return data
def from_dict(self, data, new_user=False):
for field in ['username', 'email']:
if field in data:
setattr(self, field, data[field])
if new_user and 'password' in data:
self.set_password(data['password'])
def get_token(self, expires_in=3600):
now = datetime.utcnow()
if self.token and self.token_expiration > now + timedelta(seconds=60):
return self.token
self.token = base64.b64encode(os.urandom(24)).decode('utf-8')
self.token_expiration = now + timedelta(seconds=expires_in)
db.session.add(self)
return self.token
def revoke_token(self):
self.token_expiration = datetime.utcnow() - timedelta(seconds=1)
@staticmethod
def check_token(token):
user = User.query.filter_by(token=token).first()
if user is None or user.token_expiration < datetime.utcnow():
return None
return user
@login.user_loader
def load_user(id):
return User.query.get(int(id))
class List(PaginatedAPIMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
listname = db.Column(db.String(30), index=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
items = db.relationship('Item', backref='dir', lazy='dynamic')
def __repr__(self):
return '<List {}>'.format(self.listname)
def get_items(self):
return Item.query.filter(Item.list_id == self.id).order_by(Item.id).all()
def to_dict(self):
data = {
'id': self.id,
'listname': self.listname,
'user_id': self.user_id,
'item_count': self.items.count(),
'_links': {
'self': url_for('api.get_list', id=self.id),
'items': url_for('api.get_items_of_list', id=self.id)
}
}
return data
def from_dict(self, data):
for field in ['listname']:
if field in data:
setattr(self, field, data[field])
class Item(PaginatedAPIMixin, db.Model):
id = db.Column(db.Integer, primary_key=True)
itemname = db.Column(db.String(30), index=True)
list_id = db.Column(db.Integer, db.ForeignKey('list.id'))
def __repr__(self):
return '<Item {}>'.format(self.itemname)
def to_dict(self):
data = {
'id': self.id,
'itemname': self.itemname,
'list_id': self.list_id,
'_links': {
'self': url_for('api.get_item', id=self.id)
}
}
return data
def from_dict(self, data):
for field in ['itemname']:
if field in data:
setattr(self, field, data[field])
|
import pandas as pd
import numpy as np
def get_labels():
dir = 'training2017/'
label = pd.read_csv(dir + 'REFERENCE.csv')
label = label.values.tolist() # --> convert test dataframe to list
# print(label)
classes = ['A','N','O','~']
y = np.array([])
file = []
for t in label:
# print(t[1])
position = classes.index(t[1])
y = np.append(y,position)
# y = np.append(y,t[1])
file.append(t[0])
# print(x)
y = y.astype(int)
return y,file
|
def simple_Bayes(A, B_given_A, B):
"""
A straightforward implementation Bayes formula
Parameters
A_given_B: posterior
A: Prior
B_given_A: likelihood
B: marginal likelihood
"""
A_given_B = (A * B_given_A) / B
return A_given_B
"""
Example 2: easy -- all of the terms are spelled out for us
Oh no! 50% of all rainy days start off cloudy!
But cloudy mornings are common (about 40% of days start cloudy)
And this is usually a dry month (only 3 of 30 days tend to be rainy, or 10%)
You are planning a picnic today, but the morning is cloudy
What is the chance of rain during the day?
"""
A = 0.1
B_given_A = 0.5
B = 0.4
simple_Bayes(A, B_given_A, B)
"""
Example 2: "A" With Two Cases
Hunter says she is itchy. There is a test for Allergy to Cats, but this test is not always right:
For people that really do have the allergy, the test says "Yes" 80% of the time
For people that do not have the allergy, the test says "Yes" 10% of the time ("false positive")
If 1% of the population have the allergy, and Hunter's test says "Yes", what are
the chances that Hunter really has the allergy?
allergy | positive test
"""
allergy = 0.01
no_allergy = 0.99
positive_given_allergy = 0.8
positive_given_no_allergy = 0.1
# To compute this we need to know all conditions that yeild a positive test
# Best to think about this in terms of tree structure -- follow paths
# of independent
positive = (positive_given_allergy * allergy) + (positive_given_no_allergy * no_allergy)
allergy_given_positive = (allergy * positive_given_allergy) / positive
allergy_given_positive
(0.6*0.4) / 0.36
(0.4 * 0.3) / 0.24
(0.3 * 0.6)/ 0.75
(0.24 * 0.42) / 0.35
(0.8 * 0.3) / 0.36
|
import numpy as np
import sympy as sp
import matplotlib.pyplot as plt
# Input list for Convolutional code
K = 4 # constraint length
r = 2 # number of output bits per input bit
m = K-1 # memory size
# g = np.array([[1, 1, 1], [1, 1, 0]])
g = np.array([[1, 1, 0, 1], [1, 1, 1, 1]])
matlab_index = 1
rate_num = 3
rate_den = 4
puncturing_en = False
puncturing_matrix = np.array([[1, 1, 0], [1, 0, 1]])
data_length = 4
dout = np.zeros(data_length)
dout = np.array([1, 0, 1, 1], dtype=np.bool)
print(dout)
# shift_registers = np.zeros(K, dtype=np.bool)
shift_registers = np.array([1, 1, 0, 1])
# including zero padding due to zero termination
cc_out = np.zeros((r, data_length))
# convolutional coding
for idx in range(data_length):
for mem in reversed(range(K)):
shift_registers[mem] = shift_registers[mem-1]
shift_registers[0] = dout[idx]
for jdx in range(r):
for kdx in range(K):
if g[jdx][kdx] != 0:
cc_out[jdx][idx] = np.logical_xor(cc_out[jdx][idx], shift_registers[kdx])
print(cc_out[0])
print(cc_out[1])
# puncturing
serialized_out = np.zeros(r * data_length)
cnt = 0
nrow_pmat, ncol_pmat = puncturing_matrix.shape
mark = 0
puncturing_per_output = nrow_pmat * ncol_pmat / r
for idx in range(data_length):
for jdx in range(r):
if puncturing_en:
if puncturing_matrix[jdx][mark] > 0:
serialized_out[cnt] = cc_out[jdx][idx]
cnt = cnt + 1
else:
# serialized_out[idx*r + jdx] = cc_out[jdx][idx]
serialized_out[cnt] = cc_out[jdx][idx]
cnt = cnt + 1
mark = int((mark + 1) % puncturing_per_output)
print(serialized_out)
|
a=mymodulept2.employee["salary"]
print(a)
|
from django import forms
from django.core.validators import validate_image_file_extension
class ImageUploadForm(forms.Form):
image = forms.ImageField(validators=[validate_image_file_extension])
|
n1 = int(input('Digite um número:'))
n2 = int(input('Digite outro número:'))
r = 0
while r != 5:
print('Escolha uma das opções abaixo')
r = int(input('[1] - Somar\n'
'[2] - Multiplicar\n'
'[3] - Maior\n'
'[4] - Novos números\n'
'[5] - Sair do programa\n'
'Sua escolha:'))
if r == 1:
soma = n1 + n2
print('A soma de {} com {} é {}'.format(n1,n2,soma))
elif r == 2:
mult = n1 * n2
print('A multiplicação de {} com {} é {}'.format(n1, n2, mult))
elif r == 3:
if n1 > n2:
print('{} é maior e {} é menor'.format(n1,n2))
elif n2 < n1:
print('{} é maior e {} é menor'.format(n2, n1))
elif n1 == n2:
print('Os números digitados são iguais')
elif r == 4:
print('Informe os números novamente:')
n1 = int(input('Digite um número:'))
n2 = int(input('Digite outro número:'))
elif r == 5:
print('finalizando...')
else:
print('Opção invalida')
print('Programa finalizado com exito, volte sempre')
|
""" Objects module to hold all the objects for the game
"""
import random
import pygame
from pygame.locals import *
import utils
import numpy as np
class Fish(object):
""" A fish
"""
TURN_SPEED = 0.2
ACCELERATION_AMOUNT = 1
MAX_SPEED = 10
MIN_SPEED = 0
SIGHT_ANGLE = 3.14159/2.0
def __init__(self):
self.surface = pygame.image.load("fish.bmp")
self.rotated_surface = self.surface
self.rectangle = self.surface.get_rect()
self.speed = 1
self.x = random.randint(1, pygame.display.get_surface().get_width())
self.y = random.randint(1, pygame.display.get_surface().get_width())
self.direction = random.randint(1, 360)
self.fitness = 0
def run(self):
""" The main method for the fish, should be run each game tick
"""
self.__handle_events()
# Rotate the fish
self.__rotate()
# Move the fish
self.__move()
# Draw the fish
pygame.display.get_surface().blit(self.rotated_surface, self.rectangle)
def __move(self):
dx, dy = utils.vector_to_delta_speed(self.direction, self.speed)
self.x += dx
self.y += dy
if self.x > pygame.display.get_surface().get_width():
self.x = pygame.display.get_surface().get_width()
if self.x < 0:
self.x = 0
if self.y > pygame.display.get_surface().get_height():
self.y = pygame.display.get_surface().get_height()
if self.y < 0:
self.y = 0
self.rectangle.x = round(self.x)
self.rectangle.y = round(self.y)
self.rectangle.clamp_ip(pygame.display.get_surface().get_rect())
def __rotate(self):
self.rotated_surface = pygame.transform.rotate(self.surface, utils.rads_to_degrees(self.direction))
def __accelerate(self):
""" Accelerate the fish
"""
if self.speed < self.MAX_SPEED:
self.speed += self.ACCELERATION_AMOUNT
def __decelerate(self):
""" Decelerate the fish
"""
if self.speed > self.MIN_SPEED:
self.speed -= self.ACCELERATION_AMOUNT
def __turn_left(self):
self.direction += self.TURN_SPEED
def __turn_right(self):
self.direction -= self.TURN_SPEED
def __handle_events(self):
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == K_w:
self.__accelerate()
if event.key == K_s:
self.__decelerate()
if event.key == K_a:
self.__turn_left()
if event.key == K_d:
self.__turn_right()
def can_see(self, something):
""" Find out if the 'something' is in the line of sight of the fish.
'something' can be food, or another fish, or whatever.
'something' must have something.x and something.y defined.
Returns a boolean.
Expected usage is something like:
if (fish.can_see(food)):
fish.move_towards_food()
"""
relative_vector = [something.x - self.x, something.y - self.y]
fish_facing_vector = [-1.0 * np.sin(self.direction), np.cos(self.direction)]
angle_from_line_of_sight = utils.angle_between(relative_vector, fish_facing_vector)
if abs(angle_from_line_of_sight) < 0.5 * self.SIGHT_ANGLE:
can_see = True
else:
can_see = False
return can_see
class Food(object):
def __init__(self):
self.surface = pygame.image.load("food.bmp")
self.rectangle = self.surface.get_rect()
self.rectangle.x = random.randint(1, pygame.display.get_surface().get_width())
self.rectangle.y = random.randint(1, pygame.display.get_surface().get_height())
def run(self, fish):
if fish.rectangle.colliderect(self.rectangle):
self.get_eaten(fish)
pygame.display.get_surface().blit(self.surface, self.rectangle)
def get_eaten(self, fish):
fish.fitness += 1
self.__init__()
@property
def x(self):
return self.rectangle.x
@x.setter
def x(self, x_new):
self.rectangle.x = x_new
@property
def y(self):
return self.rectangle.y
@y.setter
def y(self, y_new):
self.rectangle.y = y_new
|
"""Supervision scan directory management.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import abc
import errno
import logging
import os
import sys
import six
from treadmill import fs
from . import _service_base
from . import _utils
_LOGGER = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class ScanDir(object):
"""Models a service directory.
"""
__slots__ = (
'_dir',
'_control_dir',
'_services',
)
def __init__(self, directory):
self._dir = directory
self._control_dir = os.path.join(self._dir, self.control_dir_name())
self._services = None
@staticmethod
@abc.abstractmethod
def _create_service(svc_basedir, svc_name, svc_type, **kwargs):
"""Implementation specifc service object creation from service data.
"""
pass
@staticmethod
@abc.abstractmethod
def control_dir_name():
"""Gets the name of the svscan control directory.
"""
pass
def __repr__(self):
return '{type}({dir!r})'.format(
type=self.__class__.__name__,
dir=os.path.basename(self._dir),
)
def control_dir(self):
"""Gets the svscan control directory.
"""
return self._control_dir
@property
def directory(self):
"""Gets the service directory path.
"""
return self._dir
@property
def services(self):
"""Gets the services which this service directory is composed of.
"""
if self._services is None:
self._services = {}
try:
for name in os.listdir(self._dir):
if name[0] == '.':
continue
dir_path = os.path.join(self._dir, name)
if not os.path.isdir(dir_path):
continue
svc_data = _service_base.Service.read_dir(dir_path)
if svc_data is None:
continue
svc_type, svc_basedir, svc_name = svc_data
svc = self._create_service(
svc_basedir=svc_basedir,
svc_name=svc_name,
svc_type=svc_type
)
# Should never fail to create the svc object
self._services[svc.name] = svc
except OSError as err:
if err.errno == errno.ENOENT:
pass
else:
six.reraise(*sys.exc_info())
return self._services.copy()
def add_service(self, svc_name, svc_type, **kw_args):
"""Adds a service to the service directory.
"""
if self._services is None:
# Pre-warm the services dict
_s = self.services
svc = self._create_service(
svc_basedir=self._dir,
svc_name=svc_name,
svc_type=svc_type,
**kw_args
)
self._services[svc.name] = svc
return svc
def write(self):
"""Write down the service definition.
"""
fs.mkdir_safe(self._control_dir)
if self._services is not None:
for svc in self._services.values():
svc.write()
@classmethod
def add_ctrlfile_props(cls):
"""Define all the properties for slot'ed attributes.
"""
def _add_ctrlfile_prop(cls, attrib):
"""Add all the class properties for a given file attribute.
"""
attrib_filename = '%s_file' % attrib
def _getter(self):
"""Gets the svscan {filename} control script.
"""
if getattr(self, attrib) is None:
try:
setattr(
self,
attrib,
_utils.script_read(
getattr(self, attrib_filename)
)
)
except IOError as err:
if err.errno is not errno.ENOENT:
raise
return getattr(self, attrib)
def _setter(self, new_script):
"""Sets the svscan {filename} control script.
"""
setattr(
self,
attrib,
new_script
)
_getter.__doc__ = _getter.__doc__.format(filename=attrib[1:])
attrib_prop = property(_getter)
_setter.__doc__ = _setter.__doc__.format(filename=attrib[1:])
attrib_prop = attrib_prop.setter(_setter)
setattr(cls, attrib[1:], attrib_prop)
for attrib in cls.__slots__:
_add_ctrlfile_prop(cls, attrib)
__all__ = (
'ScanDir',
)
|
from django.conf.urls import url
from . import views
app_name = 'market'
urlpatterns = [
# /classes/
url(r'^$', views.index, name='index'),
url(r'^makeoffer/$', views.makeoffer, name='makeoffer',),
#url(r'^offers/$', views.offer, name='offer'),
url(r'^makeoffer/(?P<subjectId>[0-9a-zA-Z]+)/$', views.createoffer, name='createoffer',),
url(r'^myoffers/$', views.myoffers, name='myoffers'),
url(r'^myoffers/deleteoffer/$', views.deleteOffer, name='deleteOffer'),
url(r'^offercreated/$', views.migrateOffer, name='migrateOffer'),
url(r'^alloffers/$', views.alloffers, name='alloffers'),
url(r'^alloffers/offeraccepted/$', views.offeraccepted, name='offeraccepted'),
url(r'^makeoffer/(?P<subjectId>[0-9a-zA-Z]+)/nochoice/', views.migrateOffer, name='migrateOffer'),
url(r'^timetable/$', views.timetable, name='timetable'),
url(r'^makeoffer/oneoffer/', views.oneoffer, name='oneoffer'),
]
|
from bottle import route, run, static_file, error, request, get
import os
import paste
# My current working directory
my_dir = os.getcwd()
# Route this API GET request to return invitation templates and query of the language.
# NOTE: Change the route in parenthesis to match your GET requests.
@route('/Brand/invitation_template')
def invite_static():
language = request.query.get('language')
filepath = 'invitation_template_' + language + '.txt'
my_root = os.path.join(my_dir, 'static')
return static_file(filepath, root=my_root)
# My webbridge3 webapp branding route this API GET request for the archive.zip file API Sends http://<bottle
# webserver ip>:8080/Brand/webapp/rebels.zip
@route('/Brand/webapp/rebels.zip')
def webapp_static():
filepath = 'rebels.zip'
my_root = os.path.join(my_dir, 'static')
return static_file(filepath, root=my_root)
# This is the code which will route API GET request for the call customization files for CMS. API Sends
# http://14.49.18.252:8080/Brand/CallBranding
@get("/Brand/CallBranding/<filepath:re:.*\.(jpg|wav)>")
def callbrand_static(filepath):
my_root = os.path.join(my_dir, 'CallBranding')
return static_file(filepath, root=my_root)
# This responds with an error 404 not found if the requests fails. NOTE this is only seeen in bottle logging. CMS
# will provide the default invitation template in callbridge if no file is returned by bottle webframework.
@error(404)
def error404(error):
return 'Nothing here, sorry'
# run(server='paste') is added for multi-thread server
# run(host='0.0.0.0', port=8080, debug=True)
run(server='paste', host='0.0.0.0', port=8080, debug=True)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from add_pinyin_key import add_pinyin_key_to_bib_file
def main():
parser = argparse.ArgumentParser(description='Add pinyin keys to chinese bib entries.')
parser.add_argument('input_bib')
parser.add_argument('output_bib')
parser.add_argument('--common-strings', dest='using_common_strings', action='store_true', help='If your bibtex contains months defined as strings such as month = jan, you will need this option.')
# parser.add_argument('--no-common-strings', dest='using_common_strings', action='store_false', help='default')
parser.set_defaults(feature=False)
args = parser.parse_args()
add_pinyin_key_to_bib_file(args.input_bib, args.output_bib, args.using_common_strings)
if __name__ == "__main__":
main()
|
# В какой-то момент вам надоело использовать имена файлов с пробелами и вы решили написать программу, которая переименовывает все файлы, содержащие пробелы в имени, заменив группы пробелов на символ подчёркивания "_".
# Для начала нужно написать программу, которая считывает строку и заменяет в ней группы пробельных символов на символ подчёркивания.
# Формат ввода:
# Одна строка, содержащая произвольные символы, в том числе и пробельные.
# Формат вывода:
# Преобразованная строка.
# Sample Input 1:
# my file name.txt
# Sample Output 1:
# my_file_name.txt
# Sample Input 2:
# string with multi spaces
# Sample Output 2:
# string_with_multi_spaces
# Sample Input 3:
# single
# Sample Output 3:
# single
string = input().split()
print("_".join(string))
|
import os
import re
import sys
from pathlib import Path
from subprocess import run
from lib.grade import grade
from lib.runner import set_home_path, set_assignment_name
from lib.print import (is_in_quiet_mode, enter_quiet_mode, leave_quiet_mode, print_error,
print_message, print_usage)
DEFAULT_BULK_GRADE_DIRECTORY = os.path.abspath('./.repositories')
bulk_grade_mode = False
file_with_commit_links = None
bulk_grade_directory = DEFAULT_BULK_GRADE_DIRECTORY
def error(msg):
print_error(msg)
exit(1)
def parse_options(args, option_flags):
i = 0
options = list(map(lambda x: x[0], option_flags))
while len(args) > i and args[i][0] == '-':
if args[i] in options:
index = options.index(args[i])
if option_flags[index][2] is None:
option_flags[index][1]()
else:
i += 1
if len(args) > i:
option_flags[index][1](args[i])
else:
error('option flag "' + option_flags[index][0] +
'" needs an argument ' + option_flags[index][2])
else:
error('unknown option: ' + args[i])
i += 1
return args[i:]
def parse_assignment(args, assignments):
assignment_names = list(map(lambda x: x[0], assignments))
if len(args) == 0:
return None
if len(args) > 1:
error('only 1 assignment allowed')
if args[0] in assignment_names:
return assignments[assignment_names.index(args[0])]
error('unknown test: {}'.format(args))
def validate_options_for(assignment):
if not bulk_grade_mode and is_in_quiet_mode() and assignment is None:
error('please specify a assignment')
def check_assignment(assignment, base_test):
if assignment[3] != base_test:
base_test(mandatory=True)
set_assignment_name(assignment[2])
print_message('executing test \'{}\''.format(assignment[0]))
assignment[3]()
set_assignment_name('')
grade()
def enable_bulk_grader(file):
global bulk_grade_mode, file_with_commit_links
if not os.path.exists(file):
error('the file "' + file + '" does not exist')
if not os.path.isfile(file):
error('the path "' + file + '" is not a file')
bulk_grade_mode = True
file_with_commit_links = os.path.abspath(file)
def set_bulk_grade_directory(directory):
global bulk_grade_directory
bulk_grade_directory = os.path.abspath(directory)
def parse_commit_url(url):
matcher = re.match(
'^https://github.com/([^/]+)/([^/]+)/commit/([0-9a-f]+)$', url)
if matcher is None:
return None
else:
return {
'user': matcher.group(1),
'repo': matcher.group(2),
'commit': matcher.group(3)
}
def do_bulk_grading(assignment, base_test):
if not os.path.exists(bulk_grade_directory):
os.mkdir(bulk_grade_directory)
working_directory = os.getcwd()
os.chdir(bulk_grade_directory)
with open(file_with_commit_links, 'rt') as file:
for line in file.readlines():
info = parse_commit_url(line)
if info is None:
print_message(line + '" is not a valid github commit link')
continue
repo_id = '{}/{}'.format(info['user'], info['repo'])
print_message(repo_id + ': ', end='', loud=True)
clone_dir = os.path.join(bulk_grade_directory, repo_id)
if not os.path.exists(clone_dir):
status = os.system(
'git clone -q git@github.com:{} {} >/dev/null 2>&1'.format(repo_id, repo_id))
if status != 0:
print_message('error when cloning ' + repo_id, loud=True)
continue
os.chdir(clone_dir)
# remove all changes in local repository
os.system('git reset --hard -q >/dev/null 2>&1')
# fetch updates from github repository
os.system('git fetch -q >/dev/null 2>&1')
# change the local repository state using the commit ID
status = os.system(
'git checkout -q {} >/dev/null 2>&1'.format(info['commit']))
if status == 0:
if assignment is None:
print_message('updated', loud=True)
else:
print_message('')
check_assignment(assignment, base_test)
print_message('', loud=True)
else:
print_message(
'commit hash "{}" is not valid'.format(info['commit']))
os.chdir(bulk_grade_directory)
os.chdir(working_directory)
if bulk_grade_directory is DEFAULT_BULK_GRADE_DIRECTORY:
os.system('rm -rf {}'.format(bulk_grade_directory))
print_usage_flag = False
def set_print_usage():
global print_usage_flag
print_usage_flag = True
option_flags = [
('-q', enter_quiet_mode, None, 'only the grade is printed'),
('-h', set_print_usage, None, 'this help text'),
('-b', enable_bulk_grader, '<file>',
'bulk grade assignments defined by a file with github commit links'),
('-d', set_bulk_grade_directory, '<directory>',
'path where all bulk graded repositories should be saved')
]
def reset_state():
global bulk_grade_mode, bulk_grade_directory
global file_with_commit_links
global print_usage_flag
bulk_grade_mode = False
file_with_commit_links = None
bulk_grade_directory = DEFAULT_BULK_GRADE_DIRECTORY
set_assignment_name('')
print_usage_flag = False
leave_quiet_mode()
def process_arguments(argv, assignments):
try:
if len(argv) <= 1:
print_usage(option_flags, assignments)
exit()
set_home_path(Path(os.path.abspath(os.path.dirname(argv[0]))))
args = parse_options(argv[1:], option_flags)
assignment = parse_assignment(args, assignments)
validate_options_for(assignment)
if print_usage_flag:
print_usage(option_flags, assignments)
exit()
base_test = assignments[0][3]
if bulk_grade_mode:
do_bulk_grading(assignment, base_test)
else:
check_assignment(assignment, base_test)
finally:
reset_state()
|
# This is a .py file
|
# 学生选课系统
import pickle
import os
import hashlib
import time
import sys
class Admin(object):
"""docstring for Admin"""
def __init__(self, name):
super(Admin, self).__init__()
self.name = name
self.auth = 'admin'
def 创建课程(self):
with open('courses','ab') as f:
new_course = input('请输入新课程信息(课程名称,价格,周期,老师)。用","隔开。\n:').strip().split(',',3)
temp = Course(*new_course)
pickle.dump(temp,f)
def 创建学生账号(self):
with open('students','ab') as f:
new_student = input('请输入学生姓名及学号,用“,”隔开。\n:').strip().split(',',2)
temp = Student(*new_student)
pickle.dump(temp,f)
def __loadfile(self,file):
with open(file,mode='rb') as f:
while True:
try:
yield pickle.load(f)
except EOFError:
break
def 已有课程(self):
temp2 = self.__loadfile('courses')
for i in temp2:
print(i.name,i.price,i.period,i.teacher)
def 查看所有学生(self):
temp = self.__loadfile('students')
for i in temp:
print(i.name,i.id,i.course)
#def overview_all(self):
class Student(object):
"""docstring for Student"""
def __init__(self, name,id):
super(Student, self).__init__()
self.name = name
self.id = id
self.course = []
self.auth = 'student'
def __loadfile(self,file):
with open(file, mode='rb') as f:
while True:
try:
yield pickle.load(f)
except EOFError:
break
def 选课(self):
# target_dir = os.path.join(os.path.curdir , self.name)
# if not os.path.exists(target_dir):
# os.mkdir(target_dir)
# else:
choice = input('请输入你要选择的课程名称:').strip()
choice = choice.split(',',choice.count(','))
# target_file = os.path.join(target_dir,self.name + "courses")
with open('courses.bak','wb') as f2:
b = self.__loadfile('students')
for i in b:
if i.name==self.name:
i.course.append(choice)
pickle.dump(i,f2)
self.course.append(choice)
else:
pickle.dump(i,f2)
os.remove('courses')
os.rename('courses.bak','courses')
def 查看可选课程(self):
temp = self.__loadfile('courses')
for i in temp:
print(i.name,i.price,i.period,i.teacher)
def 查看已选课程(self):
print(self.course)
class Course(object):
"""管理员创建课程用类"""
def __init__(self, name,price,period,teacher):
super(Course, self).__init__()
self.name = name
self.price = price
self.period = period
self.teacher = teacher
def register():
'''
注册
'''
count = 0
while count < 4:
username = input('请输入用户名(不是学号)(管理员请输入 admin):')
password = input('请输入密码(长度要在 6~14 个字符之间):')
if not username.strip().isdigit():
with open('user_msg.txt', encoding='utf-8', mode='r') as f1, open('user_msg.txt', encoding='utf-8',
mode='a') as f2:
lst1 = []
for line in f1:
lst1.append(line.strip().split('|')[0])
if username.strip() not in lst1 and (len(password.strip()) >= 6 and len(password.strip()) <= 14):
md5 = hashlib.md5()
md5.update(username.encode('utf-8'))
md5.update(password.encode('utf-8'))
ret = md5.hexdigest()
f2.write(username + '|' + ret + '\n')
# f2.write(username + '\n')
print(f'{username},恭喜您,注册成功!即将返回主界面!请登陆!')
time.sleep(0.5)
return True
elif username.strip() in lst1:
count += 1
print(f'用户名已存在!请重新注册。你还有{3 - count}次注册机会。')
time.sleep(0.5)
elif len(password.strip()) < 6 and len(password.strip()) > 14:
count += 1
print(f'密码不符合要求!密码长度要在 6~14 个字符之间。请重新注册。你还有{3 - count}次注册机会。')
else:
count += 1
print(f'用户名不符合要求。只能含有字母或者数字,不能含有特殊字符。请重新注册。你还有{3 - count}次注册机会。')
def login():
'''
登录
'''
count = 0
while count < 4:
username = input('请输入用户名:')
password = input('请输入密码:')
md5 = hashlib.md5()
md5.update(username.encode('utf-8'))
md5.update(password.encode('utf-8'))
ret = md5.hexdigest()
with open('user_msg.txt', encoding='utf-8', mode='r') as f2:
for line in f2:
if line.strip().split('|')[1] == ret:
print('登陆成功!')
return username
else:
count += 1
print(f'用户名或密码错误!请重新登陆!你还有{3 - count}次机会。')
time.sleep(0.6)
def loadfile(file):
with open(file,mode='rb') as f:
while True:
try:
yield pickle.load(f)
except EOFError:
break
def run():
while True:
judge = input('欢迎来到选课界面:请选择要进行的操作:\n1.注册 2.登录 3.退出:').strip()
if judge=='1' or judge=='注册':
register()
elif judge=='2' or judge=='登录':
uid = login()
if uid=='admin':
with open('admin','rb') as f:
admin = pickle.load(f)
while True:
judge = input('管理员,您好!请选择操作:\n创建课程\n创建学生账号\n已有课程\n查看所有学生\n退出\n:').strip()
if judge=='退出':
print('回到初始界面!')
time.sleep(0.5)
break
elif hasattr(admin,judge):
if callable(getattr(admin,judge)):
getattr(admin,judge)()
time.sleep(0.5)
else:
print('输入错误')
time.sleep(0.5)
else:
temp = loadfile('students')
for i in temp:
if i.name==uid:
student = i
break
while True:
judge = input(f'{student.name}同学,您好!请选择操作:\n选课\n查看可选课程\n查看已选课程\n退出\n:').strip()
if judge == '退出':
print('回到初始界面!')
time.sleep(0.5)
break
elif hasattr(student, judge):
if callable(getattr(student, judge)):
getattr(student, judge)()
time.sleep(0.5)
else:
print('输入错误')
time.sleep(0.5)
elif judge=='3' or judge=='退出':
print('退出程序!')
break
if __name__ == '__main__':
run()
|
#!/usr/bin/python
import math
n = 1000000
arr = [True] * (n + 1)
count = 0
for i in range(2, int(math.sqrt(n)) + 1):
if arr[i]:
j = i * i
while j <= n:
arr[j] = False
j += i
for i in range(2, n + 1):
if arr[i]:
count += 1
if count == 10001:
print(i)
break
|
import random
import torch
import numpy as np
import yaml
import os
import requests
import secrets
from typing import Any, Generator, Iterable, List, Mapping, Optional, Sequence, Sized, Union, Collection
from pathlib import Path
from urllib.parse import urlencode, parse_qs, urlsplit, urlunsplit, urlparse
from logging import getLogger
import tarfile
import zipfile
from hashlib import md5
import gzip
from tqdm import tqdm
log = getLogger(__name__)
def set_global_seeds(i):
torch.manual_seed(i)
torch.cuda.manual_seed_all(i)
random.seed(i)
np.random.seed(i)
def get_config(path):
with open(path, 'r') as stream:
config = yaml.load(stream)
return config
def get_version(filename):
n = filename.count('_')
if n == 0:
return filename
elif n==1:
return filename.split('_')[-1]
elif n==2:
return filename.split('_')[-2]
else:
print('Cant find version')
def get_last_save(path):
if os.path.exists(path):
files = [get_version('.'.join(f.split('.')[:-1])) for f in os.listdir(path) if '.pt' in f]
numbers = []
for f in files:
try:
numbers.append(int(f))
except: pass
if len(numbers) > 0:
return max(numbers)
else:
return 0
else:
return 0
def simple_download(url: str, destination: Union[Path, str]) -> None:
"""Download a file from URL to target location.
Displays a progress bar to the terminal during the download process.
Args:
url: The source URL.
destination: Path to the file destination (including file name).
"""
destination = Path(destination)
destination.parent.mkdir(parents=True, exist_ok=True)
log.info('Downloading from {} to {}'.format(url, destination))
if url.startswith('s3://'):
return s3_download(url, str(destination))
chunk_size = 32 * 1024
temporary = destination.with_suffix(destination.suffix + '.part')
headers = {'dp-token': _get_download_token()}
r = requests.get(url, stream=True, headers=headers)
if r.status_code != 200:
raise RuntimeError(f'Got status code {r.status_code} when trying to download {url}')
total_length = int(r.headers.get('content-length', 0))
if temporary.exists() and temporary.stat().st_size > total_length:
temporary.write_bytes(b'') # clearing temporary file when total_length is inconsistent
with temporary.open('ab') as f:
done = False
downloaded = f.tell()
if downloaded != 0:
log.warning(f'Found a partial download {temporary}')
with tqdm(initial=downloaded, total=total_length, unit='B', unit_scale=True) as pbar:
while not done:
if downloaded != 0:
log.warning(f'Download stopped abruptly, trying to resume from {downloaded} '
f'to reach {total_length}')
headers['Range'] = f'bytes={downloaded}-'
r = requests.get(url, headers=headers, stream=True)
if 'content-length' not in r.headers or \
total_length - downloaded != int(r.headers['content-length']):
raise RuntimeError(f'It looks like the server does not support resuming '
f'downloads.')
for chunk in r.iter_content(chunk_size=chunk_size):
if chunk: # filter out keep-alive new chunks
downloaded += len(chunk)
pbar.update(len(chunk))
f.write(chunk)
if downloaded >= total_length:
# Note that total_length is 0 if the server didn't return the content length,
# in this case we perform just one iteration and assume that we are done.
done = True
temporary.rename(destination)
def download(dest_file_path: [List[Union[str, Path]]], source_url: str, force_download: bool = True) -> None:
"""Download a file from URL to one or several target locations.
Args:
dest_file_path: Path or list of paths to the file destination (including file name).
source_url: The source URL.
force_download: Download file if it already exists, or not.
"""
if isinstance(dest_file_path, list):
dest_file_paths = [Path(path) for path in dest_file_path]
else:
dest_file_paths = [Path(dest_file_path).absolute()]
if not force_download:
to_check = list(dest_file_paths)
dest_file_paths = []
for p in to_check:
if p.exists():
log.info(f'File already exists in {p}')
else:
dest_file_paths.append(p)
if dest_file_paths:
cache_dir = os.getenv('DP_CACHE_DIR')
cached_exists = False
if cache_dir:
first_dest_path = Path(cache_dir) / md5(source_url.encode('utf8')).hexdigest()[:15]
cached_exists = first_dest_path.exists()
else:
first_dest_path = dest_file_paths.pop()
if not cached_exists:
first_dest_path.parent.mkdir(parents=True, exist_ok=True)
simple_download(source_url, first_dest_path)
else:
log.info(f'Found cached {source_url} in {first_dest_path}')
for dest_path in dest_file_paths:
dest_path.parent.mkdir(parents=True, exist_ok=True)
shutil.copy(str(first_dest_path), str(dest_path))
def download_decompress(url: str,
download_path: Union[Path, str],
extract_paths: Optional[Union[List[Union[Path, str]], Path, str]] = None) -> None:
"""Download and extract .tar.gz or .gz file to one or several target locations.
The archive is deleted if extraction was successful.
Args:
url: URL for file downloading.
download_path: Path to the directory where downloaded file will be stored until the end of extraction.
extract_paths: Path or list of paths where contents of archive will be extracted.
"""
file_name = Path(urlparse(url).path).name
download_path = Path(download_path)
if extract_paths is None:
extract_paths = [download_path]
elif isinstance(extract_paths, list):
extract_paths = [Path(path) for path in extract_paths]
else:
extract_paths = [Path(extract_paths)]
cache_dir = os.getenv('DP_CACHE_DIR')
extracted = False
if cache_dir:
cache_dir = Path(cache_dir)
url_hash = md5(url.encode('utf8')).hexdigest()[:15]
arch_file_path = cache_dir / url_hash
extracted_path = cache_dir / (url_hash + '_extracted')
extracted = extracted_path.exists()
if not extracted and not arch_file_path.exists():
simple_download(url, arch_file_path)
else:
if extracted:
log.info(f'Found cached and extracted {url} in {extracted_path}')
else:
log.info(f'Found cached {url} in {arch_file_path}')
else:
arch_file_path = download_path / file_name
simple_download(url, arch_file_path)
extracted_path = extract_paths.pop()
if not extracted:
log.info('Extracting {} archive into {}'.format(arch_file_path, extracted_path))
extracted_path.mkdir(parents=True, exist_ok=True)
if file_name.endswith('.tar.gz'):
untar(arch_file_path, extracted_path)
elif file_name.endswith('.gz'):
ungzip(arch_file_path, extracted_path / Path(file_name).with_suffix('').name)
elif file_name.endswith('.zip'):
with zipfile.ZipFile(arch_file_path, 'r') as zip_ref:
zip_ref.extractall(extracted_path)
else:
raise RuntimeError(f'Trying to extract an unknown type of archive {file_name}')
if not cache_dir:
arch_file_path.unlink()
for extract_path in extract_paths:
for src in extracted_path.iterdir():
dest = extract_path / src.name
if src.is_dir():
_copytree(src, dest)
else:
extract_path.mkdir(parents=True, exist_ok=True)
shutil.copy(str(src), str(dest))
def _get_download_token() -> str:
"""Return a download token from ~/.deeppavlov/token file.
If token file does not exists, creates the file and writes to it a random URL-safe text string
containing 32 random bytes.
Returns:
32 byte URL-safe text string from ~/.deeppavlov/token.
"""
token_file = Path.home() / '.deeppavlov' / 'token'
if not token_file.exists():
if token_file.parent.is_file():
token_file.parent.unlink()
token_file.parent.mkdir(parents=True, exist_ok=True)
token_file.write_text(secrets.token_urlsafe(32), encoding='utf8')
return token_file.read_text(encoding='utf8').strip()
def untar(file_path: Union[Path, str], extract_folder: Optional[Union[Path, str]] = None) -> None:
"""Simple tar archive extractor.
Args:
file_path: Path to the tar file to be extracted.
extract_folder: Folder to which the files will be extracted.
"""
file_path = Path(file_path)
if extract_folder is None:
extract_folder = file_path.parent
extract_folder = Path(extract_folder)
tar = tarfile.open(file_path)
tar.extractall(extract_folder)
tar.close()
def ungzip(file_path: Union[Path, str], extract_path: Optional[Union[Path, str]] = None) -> None:
"""Simple .gz archive extractor.
Args:
file_path: Path to the gzip file to be extracted.
extract_path: Path where the file will be extracted.
"""
chunk_size = 16 * 1024
file_path = Path(file_path)
if extract_path is None:
extract_path = file_path.with_suffix('')
extract_path = Path(extract_path)
with gzip.open(file_path, 'rb') as fin, extract_path.open('wb') as fout:
while True:
block = fin.read(chunk_size)
if not block:
break
fout.write(block)
|
"""
Week 3, Day 6: Kth Smallest Element in a BST
Given a binary search tree, write a function kthSmallest to find the kth smallest element in it.
Note: You may assume k is always valid, 1 ≤ k ≤ BST's total elements.
H i n t s
(1) Try to utilize the property of a BST.
(2) Try in-order traversal.
(3) What if you could modify the BST node's structure?
(4) The optimal runtime complexity is O(height of BST).
E x a m p l e s
Input: root = [3,1,4,null,2], k = 1
3
/ \
1 4
\
2
Output: 1
---------------
Input: root = [5,3,6,2,4,null,null,1], k = 3
5
/ \
3 6
/ \
2 4
/
1
Output: 3
---------------
"""
from solutions.tree_node import TreeNode
class Solution:
def kthSmallest(self, root: TreeNode, k: int) -> int:
stack, node, val = [], root, None
while k:
if node:
stack.append(node)
node = node.left
continue
node = stack.pop()
k -= 1
val = node.val
node = node.right
return val
class SolutionV2:
def kthSmallest(self, root: TreeNode, k: int) -> int:
stack, node = [], root
while True:
while node:
stack.append(node)
node = node.left
node = stack.pop()
k -= 1
if not k:
return node.val
node = node.right
if __name__ == '__main__':
o = Solution()
root = TreeNode(42)
k = 1
expected = 42
print(o.kthSmallest(root, k) == expected)
root = TreeNode(3, left=TreeNode(1, right=TreeNode(2)), right=TreeNode(4))
k = 1
expected = 1
print(o.kthSmallest(root, k) == expected)
root = TreeNode(5, left=TreeNode(3, left=TreeNode(2, left=TreeNode(1)), right=TreeNode(4)), right=TreeNode(6))
k = 3
expected = 3
print(o.kthSmallest(root, k) == expected)
# last line of code
|
import numpy as np
import pandas as pd
import MARS # MARS (Multivariate Adaptive Regression Splines) regression class
import WindFarmGeneticToolbox # wind farm layout optimization using genetic algorithms classes
from datetime import datetime
import os
import pickle
# parameters for the genetic algorithm
elite_rate = 0.2
cross_rate = 0.6
random_rate = 0.5
mutate_rate = 0.1
# wind farm size, cells
rows = 21
cols = 21
cell_width = 77.0 * 2 # unit : m
#
N = 60 # number of wind turbines
pop_size = 100 # population size, number of inidividuals in a population
iteration = 3 # number of genetic algorithm iterations
# all data will be save in data folder
data_folder = "data"
if not os.path.exists(data_folder):
os.makedirs(data_folder)
# create an object of WindFarmGenetic
wfg = WindFarmGeneticToolbox.WindFarmGenetic(rows=rows, cols=cols, N=N, pop_size=pop_size,
iteration=iteration, cell_width=cell_width, elite_rate=elite_rate,
cross_rate=cross_rate, random_rate=random_rate, mutate_rate=mutate_rate)
# set wind distribution
# wind distribution is discrete (number of wind speeds) by (number of wind directions)
# wfg.init_4_direction_1_speed_12()
wfg.init_1_direction_1_N_speed_12()
################################################
# generate initial populations
################################################
init_pops_data_folder = "data/init_pops"
if not os.path.exists(init_pops_data_folder):
os.makedirs(init_pops_data_folder)
# n_init_pops : number of initial populations
n_init_pops = 60
for i in range(n_init_pops):
wfg.gen_init_pop()
wfg.save_init_pop("{}/init_{}.dat".format(init_pops_data_folder,i))
#############################################
# generate wind distribution surface
#############################################
wds_data_folder = "data/wds"
if not os.path.exists(wds_data_folder):
os.makedirs(wds_data_folder)
# mc : monte-carlo
n_mc_samples = 10000
# each layout is binary list and the length of the list is (rows*cols)
# 1 indicates there is a wind turbine in that cell
# 0 indicates there is no wind turbine in the cell
# in "mc_layout.dat", there are 'n_mc_samples' line and each line is a layout.
# generate 'n_mc_samples' layouts and save it in 'mc_layout.data' file
WindFarmGeneticToolbox.LayoutGridMCGenerator.gen_mc_grid(rows=rows, cols=cols, n=n_mc_samples, N=N,
lofname="{}/{}".format(wds_data_folder, "mc_layout.dat"))
# read layouts from 'mc_layout.dat' file
layouts = np.genfromtxt("{}/{}".format(wds_data_folder,"mc_layout.dat"), delimiter=" ", dtype=np.int32)
# generate dataset to build wind farm distribution surface
wfg.mc_gen_xy(rows=rows, cols=cols, layouts=layouts, n=n_mc_samples, N=N, xfname="{}/{}".format(wds_data_folder, "x.dat"),
yfname="{}/{}".format(wds_data_folder, "y.dat"))
# parameters for MARS regression method
n_variables = 2
n_points = rows * cols
n_candidate_knots = [rows, cols]
n_max_basis_functions = 100
n_max_interactions = 4
difference = 1.0e-3
x_original = pd.read_csv("{}/{}".format(wds_data_folder,"x.dat"), header=None, nrows=n_points, delim_whitespace=True)
x_original = x_original.values
y_original = pd.read_csv("{}/{}".format(wds_data_folder,"y.dat"), header=None, nrows=n_points, delim_whitespace=True)
y_original = y_original.values
mars = MARS.MARS(n_variables=n_variables, n_points=n_points, x=x_original, y=y_original,
n_candidate_knots=n_candidate_knots, n_max_basis_functions=n_max_basis_functions,
n_max_interactions=n_max_interactions, difference=difference)
mars.MARS_regress()
# save wind distribution model to 'wds.mars'
mars.save_mars_model_to_file()
with open("{}/{}".format(wds_data_folder,"wds.mars"), "wb") as mars_file:
pickle.dump(mars, mars_file)
# results folder
# adaptive_best_layouts_N60_9_20190422213718.dat : best layout for AGA of run index 9
# result_CGA_20190422213715.dat : run time and best eta for CGA method
results_data_folder = "data/results"
if not os.path.exists(results_data_folder):
os.makedirs(results_data_folder)
n_run_times = 3 # number of run times
# result_arr stores the best conversion efficiency of each run
result_arr = np.zeros((n_run_times, 2), dtype=np.float32)
# CGA method
CGA_results_data_folder = "{}/CGA".format(results_data_folder)
if not os.path.exists(CGA_results_data_folder):
os.makedirs(CGA_results_data_folder)
for i in range(0, n_run_times): # run times
print("run times {} ...".format(i))
wfg.load_init_pop("{}/init_{}.dat".format(init_pops_data_folder, i))
run_time, eta = wfg.conventional_genetic_alg(ind_time=i, result_folder=CGA_results_data_folder)
result_arr[i, 0] = run_time
result_arr[i, 1] = eta
time_stamp = datetime.now().strftime("%Y%m%d%H%M%S")
filename = "{}/result_CGA_{}.dat".format(CGA_results_data_folder, time_stamp)
np.savetxt(filename, result_arr, fmt='%f', delimiter=" ")
# AGA method
AGA_results_data_folder = "{}/AGA".format(results_data_folder)
if not os.path.exists(AGA_results_data_folder):
os.makedirs(AGA_results_data_folder)
for i in range(0, n_run_times): # run times
print("run times {} ...".format(i))
wfg.load_init_pop("{}/init_{}.dat".format(init_pops_data_folder, i))
run_time, eta = wfg.adaptive_genetic_alg(ind_time=i, result_folder=AGA_results_data_folder)
result_arr[i, 0] = run_time
result_arr[i, 1] = eta
time_stamp = datetime.now().strftime("%Y%m%d%H%M%S")
filename = "{}/result_AGA_{}.dat".format(AGA_results_data_folder, time_stamp)
np.savetxt(filename, result_arr, fmt='%f', delimiter=" ")
# SIGA method
SIGA_results_data_folder = "{}/SIGA".format(results_data_folder)
if not os.path.exists(SIGA_results_data_folder):
os.makedirs(SIGA_results_data_folder)
# wds_mars_file : wind distribution surface MARS model file
wds_mars_file = "{}/{}".format(wds_data_folder, "wds.mars")
for i in range(0, n_run_times): # run times
print("run times {} ...".format(i))
wfg.load_init_pop("{}/init_{}.dat".format(init_pops_data_folder, i))
run_time, eta = wfg.self_informed_genetic_alg(ind_time=i, result_folder=SIGA_results_data_folder,
wds_file=wds_mars_file)
result_arr[i, 0] = run_time
result_arr[i, 1] = eta
time_stamp = datetime.now().strftime("%Y%m%d%H%M%S")
filename = "{}/result_self_informed_{}.dat".format(SIGA_results_data_folder, time_stamp)
np.savetxt(filename, result_arr, fmt='%f', delimiter=" ")
|
from django.conf.urls import url
from diary import views
urlpatterns = [
url(r'^add/(?P<year>[0-9]{4})/(?P<month>[0-9]{2})/(?P<day>[0-9]{2})/$', views.diary_add, name='diary_add'),
url(r'^calendar/month/$', views.month_calendar, name='month_calendar'),
url(r'^(?P<year>[0-9]{4})/(?P<month>[0-9]{2})/(?P<day>[0-9]{2})/$',
views.diary_detail, name='diary_detail'),
]
|
## ermittle Farbwerte eines Tennisballs
import cv2
# initialisiere Webcam
cam = cv2.VideoCapture(0)
# definiere Region of Interest
x, y, w, h = 400, 400, 100, 100
# zeige Stream von WebCam an
while cam.isOpened():
# lese frame von WebCam
ret, frame = cam.read()
#frame = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# waehle ein Region auf Interest an Punkt: (y, x) mit Dimension 50x50 Pixel
region_of_interest = image[y:h, x:w]
# zeichne Rechteck in Bild
cv2.rectangle(frame, (x, y), (x+w, y+h), (255, 255, 255), thickness=1)
b, g, r = cv2.mean(region_of_interest)
# zeige Frame an
cv2.imshow("frame", frame)
# warte auf Tastendruck (sonst sieht man das Fenster nicht)
key = cv2.waitKey(1) & 0xff
# warte auf Tastendruck (wichtig, sonst sieht man das Fenster nicht)
if key == 27:
break
print("test")
|
import pytest
from rest_framework.exceptions import ValidationError
from api.users.v1.serializers import UserSignUpSerializer
from api.users.models import User
user_mock = {
'email': 'test@test.gmail.com',
'first_name': 'test',
'last_name': 'test',
'password': '12345678'
}
@pytest.mark.django_db
class TestUserSignUpSerializer:
def test_valid_incoming_data(self, user_mock):
"""
Should return True when the incoming data to be deserialized is valid
"""
serializer = UserSignUpSerializer(data=user_mock)
assert serializer.is_valid()
@pytest.mark.parametrize('input_data, context', [
({'email': 123}, 'Invalid data'),
({'first_name': 'test'}, 'Missing fields')
])
def test_invalid_incoming_data(self, user_mock, input_data, context):
"""
Should raise ValidationError when the incoming data to be deserialized is invalid
"""
input_data = {**user_mock, **input_data} if context != 'Missing fields' else input_data
serializer = UserSignUpSerializer(data=input_data)
with pytest.raises(ValidationError):
serializer.is_valid(raise_exception=True)
def test_valid_serialized(self, regular_user, user_mock):
"""
Should return the respective serialized object
"""
serializer = UserSignUpSerializer(regular_user)
compare_attributes = ('id', 'email', 'first_name', 'last_name', 'created_at')
assert isinstance(serializer.data, dict)
for prop in compare_attributes:
# just makes sure that every attribute exist
assert prop in serializer.data
def test_create(self, mocker, regular_user, user_mock):
"""
Should call create_user custom method instead of the predefined one (create)
"""
serializer = UserSignUpSerializer(data=user_mock)
mocker.patch.object(User.objects, 'create_user',return_value=regular_user)
mocker.spy(User.objects, 'create')
serializer.is_valid()
serializer.save()
User.objects.create_user.assert_called_once()
User.objects.create.assert_not_called()
|
import time
import os
disk = 'C:\\'
path = 'Users\\Виктория\\Desktop\\Для Универа\\Второй курс\\Четвертый семестр\\Прога\\course_python\\'
file_name = str("the time is %s.txt" % (format(time.strftime("%Y_%m_%d-%H_%M_%S"))))
whole_path = os.path.join(disk, path, file_name)
start_time = time.time()
def setup():
with open(whole_path, 'w') as file_to_write:
file_to_write.write(summa(10101010291938, 5678697083))
def summa(a, b):
int(a) + int(b)
t = ' It took %s seconds to solve this' % (time.time() - start_time)
return t
setup()
|
"""
A single layer model, essentially a regression model, to predict the steering images.
Set the variable 'AUGMENT' to determine if you want to run image augmentation.
If AUGMENT is set to be True, the script will flip the images horizontally in each batch and add it to the training data.
"""
import os
import cv2
from keras.layers import Input, AveragePooling2D, Flatten, Dense, Lambda
import tensorflow as tf
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import pandas as pd
from keras.models import Sequential
import numpy as np
AUGMENT=True
sample_folder = './sample behavioral cloning data/'
ch, row, col = 3, 160, 320
def load_samples_df(test_size):
sample_df = pd.DataFrame.from_csv(os.path.join(sample_folder, 'driving_log.csv'), index_col=None)
train, val = train_test_split(sample_df, test_size=test_size)
return train, val
def generator(samples, batch_size=32, augment=False):
num_samples = len(samples)
dummy_seed = 1
while dummy_seed == 1: # Loop forever so the generator never terminates
# dummy_seed-=1
for offset in range(0, num_samples, batch_size):
batch_samples = samples.iloc[offset:min(offset + batch_size, num_samples), 0]
images = []
for batch_sample in batch_samples:
name = os.path.join(sample_folder, batch_sample)
center_image = cv2.imread(name)
# cv2.imwrite('test.jpg',center_image)
images.append(center_image)
# trim image to only see section with road
X_train = np.array(images).astype('float64')
y_train = samples.iloc[offset:min(offset + batch_size, num_samples), 3]
# X_train = preprocess_input(X_train)
if augment == True:
inv_X_train = np.flip(X_train, axis=1)
inv_y_train = -y_train
X_train = np.concatenate((X_train, inv_X_train), axis=0)
y_train = np.concatenate((y_train, inv_y_train), axis=0)
# plt.imshow(X_train[0])
# plt.savefig("test2.jpg")
yield X_train, y_train
t, v = load_samples_df(test_size=0.2)
train_data_g = generator(t,augment=AUGMENT)
val_data_g = generator(v,augment=AUGMENT)
model = Sequential()
model.add(Lambda(lambda x: x / 255 - 0.5, input_shape=(row, col, ch)))
model.add(Flatten())
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
hist = model.fit_generator(generator=train_data_g, samples_per_epoch=t.shape[0] * (1+int(AUGMENT)), nb_epoch=2,
validation_data=val_data_g, nb_val_samples=v.shape[0] * (1+int(AUGMENT)))
model.save("test_model.h5")
|
import maya.cmds as cmds
def rotateImage(objName, deg):
for x in range(0, 360/deg):
l = 'x'+str(x) + 'yna' + 'zna'
cmds.xform(objName, relative=True, rotation=(deg, 0, 0) )
screenShot(objName, l)
for y in range(0, 360/deg):
l = 'x'+str(x) + 'y'+str(y) + 'zna'
cmds.xform(objName, relative=True, rotation=(0, deg, 0) )
screenShot(objName, l)
for z in range(0, 360/deg):
l = 'x'+str(x) + 'y'+str(y) + 'z'+str(z)
cmds.xform(objName, relative=True, rotation=(0, 0, deg) )
screenShot(objName, l)
def screenShot(objName, l):
mel.eval('renderWindowRender redoPreviousRender renderView')
editor = 'renderView'
cmds.renderWindowEditor( editor, e=True,refresh = True,removeAllImages = True,writeImage=('D:\\test\\jklol\\'+'chair_test_'+str(l)))
s = cmds.ls(selection = True)
objName = s[0]
l = 'starting'
screenShot(objName, l)
rotateImage(objName, 45)
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:hua
from flask import Blueprint
# 1. 创建蓝图对象
users_blue = Blueprint('users', __name__)
from . import views
|
# Cole Calhoun
# CSCI 164 Homework
# Feb 2, 2016
from __future__ import division
from math import exp, pi
import random
# Chapter 3
# 3)
def chapter3Problem3(n):
estimation = 0
for each in range(n):
randomInt = random.random()
estimation += exp(exp(randomInt))
print "Chapter 3, Problem 3\nThe estimation is: %s" % (estimation/n)
chapter3Problem3(100)
# 8)
def chapter3Problem8(n):
estimation = 0
for each in range(n):
randomInt = random.random()
randomInt *= (2 * pi)
estimation += exp(randomInt*randomInt)
print "\nChapter 3, Problem 8\nThe estimation is: %s" % (estimation/n)
chapter3Problem8(100)
# Chapter 4
# 1)
def problem1(n):
oneThird = 0
twoThirds = 0
counter = 0
while counter <= n:
testNumber = random.random()
if testNumber <= (1/3):
oneThird+=1
else:
twoThirds+=1
counter+=1
print "N = %s P1 = %s%% P2 = %s%%" % (n, (oneThird / n), (twoThirds / n))
print("\nChapter 4, Question 1:\n")
problem1(100)
problem1(1000)
problem1(10000)
# 3)
def problem3(n):
# X=1
One = 0
# X=2
Two = 0
# X=3
Three = 0
# X=4
Four = 0
counter = 0
while counter <= n:
testNumber = random.random()
# ASSIGN ALL VARIABLES TO THEIR EXPECTED PROPORTIONS
if testNumber <= (.3):
One+=1
elif testNumber >(.3) and testNumber <=(.5):
Two+=1
elif testNumber >(.5) and testNumber <=(.85):
Three += 1
elif testNumber >(.85) and testNumber <=(1):
Four +=1
counter+=1
print("\nChapter 4, Question 3:\n")
print "N = %s\nOne = %s%%\nTwo = %s%%\nThree = %s%%\nFour = %s%%\n" % (n, (One / n), (Two / n), (Three / n), (Four / n))
problem3(100)
# 7)
def problem7(n):
# INITIALIZE A DICTIONARY OF ALL POSSIBLE ROLLS WITH A VALUE OF FALSE
# WHEN THE ROLL IS ROLLED - CHANGE THE VALUE TO TRUE
# IF ALL VALUES IN THE DICTIONARY ARE TRUE -- ALL POSSIBLE ROLLS HAVE BEEN ROLLED
# COUNT HOW MANY ROLLS IT TOOK
# DO THIS FOR YOUR SAMPLE SIZE AND DIVIDE THE TOTAL NUMBER OF ROLLS BY THE SAMPLE SIZE
sumOfRolls = 0
for each in range(n):
numberOfRolls = 0
dictionary = {2: False, 3: False, 4: False, 5: False, 6: False, 7: False, 8: False, 9: False, 10: False, 11: False, 12: False}
possibilitiesMet = 0
allPossibilitesMet = False
while allPossibilitesMet == False:
numberOfRolls += 1
dice1 = random.randint(1,6)
dice2 = random.randint(1,6)
completeRoll = dice1 + dice2
if dictionary[completeRoll] == False:
dictionary[completeRoll] = True
possibilitiesMet = 0
for each in dictionary:
if dictionary[each] == False:
break
else:
possibilitiesMet += 1
if possibilitiesMet == 11:
sumOfRolls += numberOfRolls
allPossibilitesMet = True
break
numberOfRolls = 0
possibilitiesMet = 0
print "\nChapter 4, Question 7:\n"
print "It took an average of %s rolls for all possibilities to be rolled out of %s tries" % (sumOfRolls/n, n)
problem7(1000)
|
#!/usr/bin/python3
from sys import argv
arg_len = len(argv)
if __name__ == "__main__":
if arg_len == 1:
print('0 arguments.')
else:
if arg_len == 2:
print('1 argument:')
else:
print('{:d} arguments:'.format(arg_len - 1))
for i in range(1, arg_len):
print('{:d}: {}'.format(i, argv[i]))
|
from .custom_driver import client, use_browser
import time
import json
from .utils import log
from .util_game import close_modal, check_resources, old_shortcut
from .village import open_village, open_city
from .settings import settings
def train_troops_thread(
browser: client, village: int, units: list, interval: int
) -> None:
# init delay
time.sleep(2)
with open(settings.units_path, "r") as f:
content = json.load(f)
while True:
# log("start training troops.")
if not start_training(browser, village, units, content):
log("village is too low of resources.")
# log("finished training troops.")
time.sleep(interval)
@use_browser
def start_training(
browser: client, village: int, units_train: list, content: dict
) -> bool:
open_village(browser, village)
open_city(browser)
tribe_id = browser.find('//*[@id="troopsStationed"]//li[contains(@class, "tribe")]')
tribe_id = tribe_id.get_attribute("tooltip-translate")
units_cost = [] # resources cost for every unit in units_train
total_units_cost_wood = [] # total wood cost for every unit in units_train
total_units_cost_clay = [] # total clay cost for every unit in units_train
total_units_cost_iron = [] # total iron cost for every unit in units_train
training_queue: dict = {} # dict for training queue
for tribe in content["tribe"]:
if tribe_id in tribe["tribeId"]:
for unit in tribe["units"]:
if unit["unitId"] in units_train:
units_cost.append(unit["trainingCost"])
training_cost_wood = unit["trainingCost"]["wood"]
training_cost_clay = unit["trainingCost"]["clay"]
training_cost_iron = unit["trainingCost"]["iron"]
total_units_cost_wood.append(training_cost_wood)
total_units_cost_clay.append(training_cost_clay)
total_units_cost_iron.append(training_cost_iron)
# initializing training_queue
training_queue[unit["unitTrain"]] = {}
training_queue[unit["unitTrain"]][unit["unitId"]] = {}
training_queue[unit["unitTrain"]][unit["unitId"]]["amount"] = 0
training_queue[unit["unitTrain"]][unit["unitId"]]["name"] = unit[
"unitName"
]
resources = check_resources(browser)
# training amount distributed by: less resources consumption per unit type
training_amount = [] # less posible amount of troop for training
training_amount_wood = []
training_amount_clay = []
training_amount_iron = []
for cost in total_units_cost_wood:
train_amount = resources["wood"] // (len(units_train) * cost)
training_amount_wood.append(train_amount)
for cost in total_units_cost_clay:
train_amount = resources["clay"] // (len(units_train) * cost)
training_amount_clay.append(train_amount)
for cost in total_units_cost_iron:
train_amount = resources["iron"] // (len(units_train) * cost)
training_amount_iron.append(train_amount)
# get the minimum possible troops to train
training_amount = list(
map(min, training_amount_wood, training_amount_clay, training_amount_iron)
)
if sum(training_amount) == 0:
return False
# fetching training_amount to training_queue
_iter = (x for x in training_amount) # generator of training_amount
for unit_train in training_queue:
for unit_id in training_queue[unit_train]:
training_queue[unit_train][unit_id]["amount"] = next(_iter)
total_training_cost = [] # amount of troop * units_cost
_iter = (x for x in training_amount) # generator of training_amount
for unit_cost in units_cost:
amount = next(_iter)
temp = {} # temporary dict
for _keys, _values in unit_cost.items():
temp[_keys] = _values * amount
total_training_cost.append(temp)
# Start training troops
index = 0
for unit_train in training_queue:
old_shortcut(browser, unit_train)
for unit_id in training_queue[unit_train]:
# input amount based training_queue[unit_train][unit_id]
input_amount = training_queue[unit_train][unit_id]["amount"]
input_name = training_queue[unit_train][unit_id]["name"]
if input_amount == 0:
continue # Skip empty amount
log(
"training {} units of type {}".format(input_amount, input_name)
+ " with a cost of wood:{}, clay:{}, iron:{}".format(
total_training_cost[index]["wood"],
total_training_cost[index]["clay"],
total_training_cost[index]["iron"],
)
)
# click picture based unit_id
unit_type = "unitType{}".format(unit_id)
image_troop = browser.find(
"//div[@class='modalContent']//img[contains(@class, '{}')]".format(
unit_type
)
)
browser.click(image_troop, 1)
input_troop = browser.find('//div[@class="inputContainer"]')
input_troop = input_troop.find_element_by_xpath("./input")
input_troop.click()
input_troop.send_keys(input_amount)
browser.sleep(1.5)
# click train button
train_button = browser.find(
"//button[contains(@class, 'animate footerButton')]"
)
browser.click(train_button, 1)
browser.sleep(1.5)
index += 1
browser.sleep(1)
close_modal(browser)
return True
|
from sympy import *
from sympy.stats import Normal, density, E, variance
var("tau nact pact dt std damping scale tau_threshold act_threshold", real=True)
var("dt std alpha scale time_constant", real=True, positive=True)
noise = Normal("noise", 0, std)
alpha = 1 - exp(-dt/time_constant)
#nact = alpha*pact + (1 - alpha)*(atan((tau - tau_threshold)*scale) + noise)
#dact = (nact - pact).simplify()
#pprint(dact)
#nact = pact + (-damping*pact + atan((tau - tau_threshold)*scale) + noise)*dt
dact = -alpha*pact + atan((tau - tau_threshold)*scale)*dt + noise*sqrt(dt)
print(dact)
pprint(density(dact))
m = E(dact)
v = variance(dact).simplify()
print(m)
print(v)
|
import torch
import torch.nn as nn
from deep_depth_transfer.utils.math import generate_relative_transformation
import kornia
class GeometricRegistrationLoss(torch.nn.Module):
def __init__(self, registration_lambda, camera_matrix):
super().__init__()
self._loss = nn.L1Loss()
self._registration_lambda = registration_lambda
self.camera_matrix = camera_matrix
def generate_next_image(self, current_image, next_depth, transformation_from_next_to_current):
generated_next_image = kornia.warp_frame_depth(current_image,
next_depth,
transformation_from_next_to_current,
self.camera_matrix)
return generated_next_image
def generate_current_image(self, next_image, current_depth, transformation_from_current_to_next):
generated_current_image = kornia.warp_frame_depth(next_image,
current_depth,
transformation_from_current_to_next,
self.camera_matrix)
return generated_current_image
def forward(self, current_depth, next_depth, current_position, next_position, current_angle, next_angle):
transformation_from_current_to_next = generate_relative_transformation(current_position,
current_angle,
next_position,
next_angle)
transformation_from_next_to_current = generate_relative_transformation(next_position,
next_angle,
current_position,
current_angle)
generated_next_depth = self.generate_next_image(current_depth, next_depth,
transformation_from_next_to_current)
generated_current_depth = self.generate_current_image(next_depth, current_depth,
transformation_from_current_to_next)
loss_previous = self._loss(generated_current_depth, current_depth)
loss_next = self._loss(generated_next_depth, next_depth)
return (loss_previous + loss_next) / 2 * self._registration_lambda
|
print("this is good")
|
# Create a short text adventure that will call the user by their name.
# The text adventure should use standard text adventure commands ("l, n, s, e, i, etc.").
import string
import sys
class TextAdventure:
COMMANDS = ['go', 'take', 'drop', 'use', 'inspect', 'inventory', 'help']
INVENTORY_SIZE = 3
EXIT_CHECKPOINT = 5
ITEMS = {
'berries' : {
'description' : 'A stalk of green, inch-wide berries that are squishy to the touch. You have no idea what kind they are.',
'uses' : {
'other' : {
'checkpoint' : (None, EXIT_CHECKPOINT),
'event_text' : "You pop a berry into your mouth and enjoy the tangy taste. Moments later, you clutch your throat as you die a slow and painful death. You really shouldn't go eating fruit you don't recognize."
}
}
},
'blowtorch' : {
'description' : 'This thing makes fire.',
'uses' : {
'house' : {
'checkpoint' : (1, 2),
'event_text' : "You squeeze the blowtorch trigger and the wooden table in front of you instantly bursts into flame. You evacuate the house in a timely manner and watch as it swiftly burns to the ground, leaving a smoking pile of rubble. You can make out a silhouette of something in the ruins, but the smoke is too thick to discern details or approach it.",
'update_description' : "Once the site of a quaint farmhouse, now a pile of rubble thanks to your careless misuse of fire. You can't approach the ruins due to the thick smoke."
},
'other' : {
'event_text' : 'A flame bursts from the tip of the blowtorch, but nothing around you looks worth setting on fire.'
}
}
},
'bottle' : {
'description' : 'A bottle you found on the beach. There seems to be something inside...',
'uses' : {
'other' : {
'event_text' : 'You shake the bottle and a rolled up piece of parchment falls out (obviously). You unroll the paper only to reveal a string of completely unrecognizable symbols. You return it to the bottle.'
}
}
},
'circuitboard' : {
'description' : 'A generic looking circuitboard to an unknown device',
'uses' : {
'other' : {
'event_text' : "With no power supply or compatible parts around, there's not much use for this."
}
}
},
'flux capacitor' : {
'description' : 'A high tech gizmo',
'uses' : {
'house' : {
'checkpoint' : (3, 4),
'event_text' : 'You haphazardly shove the flux capacitor into the empty slot and the machine hums to life. The interior glows, beckoning you to step inside',
'update_items' : 'mysterious machine'
},
'other' : {
'event_text' : 'How do you even use this thing?'
}
}
},
'hatchet' : {
'description' : 'A small hatchet with a sharp blade',
'uses' : {
'forestnorth' : {
'event_text' : "You chop away at a nearby tree. But this hatchet is small and you're no lumberjack, so you soon give up after making very little progress"
},
'forestsouth' : {
'event_text' : "You chop away at a nearby tree. But this hatchet is small and you're no lumberjack, so you soon give up after making very litttle progress"
},
'forest' : {
'event_text' : "You chop away at a nearby tree. But this hatchet is small and you're no lumberjack, so you soon give up up after making very litttle progress"
},
'other' : {
'event_text' : "There's not much use for a hatchet around here"
}
}
},
'metal scraps' : {
'description' : 'Some scraps from a mysterious wreckage in the forest.',
'uses' : {
'other' : {
'event_text' : "These scraps twisted and falling apart, probably useless for anything other than a modern art sculpture. And let's face it, you're no artist."
}
}
},
'mysterious machine' : {
'description' : 'A tall metallic cylinder just large enough for a single occupant. It emits a humming noise and pulses with a blue light',
'uses' : {
'other' : {
'checkpoint' : (4, EXIT_CHECKPOINT),
'event_text' : 'You step inside and flip a lever that is the only discernable control. The sound and light intensify until they are overwhelming and the whole machine is vibrating. You feel your feet leave the ground as you are whisked between dimensions to an unknown time and place.'
}
}
},
'shovel' : {
'description' : 'An ordinary shovel',
'uses' : {
'other' : {
'event_text' : 'You dig a hole. Good job!'
}
}
},
'thermal detonator' : {
'description' : 'A small metal orb with buttons and blinking lights.',
'uses' : {
'other' : {
'checkpoint' : (None, EXIT_CHECKPOINT),
'event_text' : "You press a button at random, and don't even have time to feel pain as you are incinerated by the explosion. That was not a very good idea."
}
}
},
'towel' : {
'description' : "If you want to survive out there, you've got to know where your towel is",
'uses' : {
'house' : {
'checkpoint' : (2, 3),
'event_text' : "You wrap the towel around your nose and mouth, creating a makeshift mask and allowing you to get closer to the smoking ruins. Your eyes burn, but you still manage to make out what looks like a futuristic phone booth, perhaps some kind of teleporter. You punch the big red button labled 'CAUTION', but the machine remains quiet and dark. Upon closer inspection, you find a panel open in the side with an empty slot for some kind of critical part.",
'update_description' : "Once the site of a quaint farmhouse, now a pile of rubble thanks to your careless misuse of fire. You can't approach the ruins due to the thick smoke.",
},
'other' : {
'event_text' : 'Your towel has innumerable uses, but none of them seem particularly applicable at the moment'
}
}
},
'warp drive' : {
'description' : 'An essential component for interstellar travel. If only you knew how to use it...',
'uses' : {
'other' : {
'event_text' : 'You look around, but fail to find a functional warp-drive enabled vehicle.'
}
}
}
}
def __init__(self):
self.locations = {
'hill' : {
'description' : 'You laboriously climb the steep hill and survey the land from the top. You stand at the north edge of the island, looking south. Along the coast to your left: a forest, and on the right: a beach. A field lies at the bottom of the hill in front of you, and beyond that a house.',
'items' : [],
'exits' : {
'n' : 'hill',
's' : 'field',
'e' : 'forestnorth',
'w' : 'beachnorth'
}
},
'field' : {
'description' : 'You enter a field. No crops are growing, but the land appears to have been worked recently. The plowed rows are not straight and orderly, but form a circular pattern seemingly radiating from the house to the south.',
'items' : ['hatchet', 'shovel'],
'exits' : {
'n' : 'hill',
's' : 'house',
'e' : 'forest',
'w' : 'beach'
}
},
'house' : {
'description' : 'You approach a small farmhouse. The exterior looks old and rotted, and seems to be abandoned. Upon entering, however, a plethora of advanced technology litters the table.',
'items' : ['thermal detonator', 'blowtorch', 'circuitboard'],
'exits' : {
'n' : 'field',
's' : 'house',
'e' : 'forestsouth',
'w' : 'beachsouth'
}
},
'forest' : {
'description' : 'You walk through the forest, noticing the bizarre collection of flora, most of which you have never seen before.',
'items' : ['berries'],
'exits' : {
'n' : 'forestnorth',
's' : 'forestsouth',
'e' : 'forest',
'w' : 'field'
}
},
'forestnorth' : {
'description' : 'You walk through the forest, noticing that you have yet to see a single animal, despite the flourishing plant life.',
'items' : [],
'exits' : {
'n' : 'forestnorth',
's' : 'forest',
'e' : 'forestnorth',
'w' : 'hill'
}
},
'forestsouth' : {
'description' : 'The trees give way to a small clearing, where a twisted metal wreckage lays partially buried in the ground. It looks like some kind of aerial vehicle, but its design and construction are completely unfamiliar to you. It is partially overgrown; clearly it has been here for a while.',
'items' : ['metal scraps', 'warp drive', 'flux capacitor'],
'exits' : {
'n' : 'forest',
's' : 'forestsouth',
'e' : 'forestsouth',
'w' : 'house'
}
},
'beach' : {
'description' : 'The beach where you woke up. The sand is pristine; no sign of a wreckage washed up with you.',
'items' : ['towel'],
'exits' : {
'n' : 'beachnorth',
's' : 'beachsouth',
'e' : 'field',
'w' : 'beach'
}
},
'beachnorth' : {
'description' : 'The west coast abruptly turns a corner and becomes the north coast. Looking behind you, you see that the coastline is completely, unnaturally straight.',
'items' : ['bottle'],
'exits' : {
'n' : 'beachnorth',
's' : 'beach',
'e' : 'hill',
'w' : 'beachnorth'
}
},
'beachsouth' : {
'description' : 'The west coast abruptly turns a corner and becomes the south coast. Looking behind you, you see that the coastline is completely, unnaturally straight.',
'items' : [],
'exits' : {
'n' : 'beach',
's' : 'beachsouth',
'e' : 'house',
'w' : 'beachsouth'
}
}
}
getattr(self, 'help')(None)
print "You wake up on a beach, with no memory of previous events. The shoreline stretches away to the north and south. To the east, the sand gives way to grass.\n"
self.current_location = 'beach'
self.current_checkpoint = 1
self.player_inventory = []
def help(self, none):
print "Commands"
print "========"
print "Go [direction]"
print "Inspect [item | surroundings(blank)]"
print "Take [item]"
print "Drop [item]"
print "Use [item]"
print "Inventory\n"
def go(self, direction):
if direction not in ['north', 'n', 'south', 's', 'east', 'e', 'west', 'w'] :
print "You can only go in valid cardinal directions (north/south/east/west)"
else :
direction = direction[0]
if self.locations[self.current_location]['exits'][direction] != self.current_location:
self.current_location = self.locations[self.current_location]['exits'][direction]
getattr(self, 'inspect')(None)
else:
print "The ocean stretches out in front of you as far as the eye can see. You don't fancy a swim so you look for another way."
def inspect(self, item):
if item:
if item in self.player_inventory or item in self.locations[self.current_location]['items']:
print self.ITEMS[item]['description']
else:
print "No %s around to inspect" % item
else:
print self.locations[self.current_location]['description']
print ("Items: " + ", ".join(self.locations[self.current_location]['items'])).title()
def take(self, item):
if item in self.ITEMS and item in self.locations[self.current_location]['items']:
if len(self.player_inventory) >= self.INVENTORY_SIZE:
print "Inventory is full. Use 'drop' to leave something here"
else:
print "You picked up the %s" % item
self.locations[self.current_location]['items'].remove(item)
self.player_inventory.append(item)
else:
print "There's no %s to take." % item
def drop(self, item):
if item not in self.player_inventory:
print "You don't have %s" % item
else:
self.player_inventory.remove(item)
self.locations[self.current_location]['items'].append(item)
def inventory(self, none):
for x in range(1, self.INVENTORY_SIZE+1):
print str(x) + ") " + ( self.player_inventory[x - 1].title() if len(self.player_inventory) >= x else "Empty" )
def use(self, item):
if item in self.player_inventory or item in self.locations[self.current_location]['items']:
if self.current_location in self.ITEMS[item]['uses']:
use_data = self.ITEMS[item]['uses'][self.current_location]
else:
use_data = self.ITEMS[item]['uses']['other']
if 'checkpoint' in use_data:
if use_data['checkpoint'][0] == self.current_checkpoint or use_data['checkpoint'][0] == None:
print use_data['event_text']
self.current_checkpoint = use_data['checkpoint'][1]
if 'update_description' in use_data:
self.locations[self.current_location]['description'] = use_data['update_description']
if 'update_items' in use_data:
self.locations[self.current_location]['items'].append(use_data['update_items'])
else:
print self.ITEMS[item]['uses']['other']['event_text']
else:
print use_data['event_text']
else:
print "There's no %s to use!" % item
def run(self):
while(self.current_checkpoint != self.EXIT_CHECKPOINT):
input = string.lower(raw_input("\nWhat do you do?\n"))
tokens = input.split()
if len(tokens) == 0 or not tokens[0] in self.COMMANDS:
print "That is not a valid command. (type 'help' for a list)"
else:
command = getattr(self, tokens[0].lower());
command( (" ".join(tokens[1:]).lower()) )
input = string.lower(raw_input("Game Over. Play again? (y/n) "))
return True if input.lower() == 'y' else False
while(True):
app = TextAdventure()
play_again = app.run()
if not play_again:
sys.exit(0)
|
#!python
import sys
import yaml
import dpath
id_num = sys.argv[1]
namespace = 'test-010-%s' % id_num
manifest = list(yaml.safe_load_all(open(sys.argv[2], 'r')))
kinds_to_delete = {
"ClusterRole": True,
"ServiceAccount": True,
"ServiceAccount": True,
"ClusterRoleBinding": True,
}
keep = []
for x in range(len(manifest)):
kind = dpath.util.get(manifest, "/%d/kind" % x)
name = dpath.util.get(manifest, "/%d/metadata/name" % x)
if kind in kinds_to_delete:
# print("Skipping %s %s" % (kind, name))
continue
# print("Adding namespace %s to %s %s" % (namespace, kind, name))
dpath.util.new(manifest, "/%d/metadata/namespace" % x, namespace)
if kind == 'Deployment':
# print("Setting environment for %s %s" % (kind, name))
path = "/%d/spec/template/spec/containers/0/env" % x
dpath.util.new(manifest, path, [
{
'name': 'AMBASSADOR_NAMESPACE',
'value': namespace
},
{
'name': 'AMBASSADOR_ID',
'value': 'ambassador-%s' % id_num
}
])
keep.append(manifest[x])
yaml.safe_dump_all(keep, open(sys.argv[3], "w"), default_flow_style=False)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, with_statement
from cuisine import package_ensure as ensure
from cuisine import package_install as install
from cuisine import package_update as update
from cuisine import package_upgrade as upgrade
from revolver import contextmanager as ctx
from revolver import file, server, core, decorator
ensure = decorator.multiargs(decorator.sudo(ensure))
install = decorator.multiargs(decorator.sudo(install))
update = decorator.multiargs(decorator.sudo(update))
upgrade = decorator.sudo(upgrade)
def is_installed(name):
with ctx.settings(warn_only=True):
res = core.run("dpkg -s %s" % name)
for line in res.splitlines():
if line.startswith("Status: "):
status = line[8:]
if "installed" in status.split(" "):
return True
return False
def install_ppa(name):
ensure("python-software-properties")
with ctx.cd("/etc/apt/sources.list.d"):
name_normalizes = name.replace("/", "-")
source_list = "%s-%s.list" % (name_normalizes, server.codename())
if not file.exists(source_list):
core.sudo("add-apt-repository ppa:%s" % name)
update()
|
from django.conf.urls import patterns, include, url
from extra.views import Update
urlpatterns = patterns('',
url(r'^(?P<pk>.+)/update/$', Update.as_view(), name='update'),
)
|
from .diary_add import *
from .diary_detail import *
from .month_calendar import *
|
#_*_coding:utf-8_*_
__author__ = 'Jorden Hai'
from sqlalchemy import create_engine,Table
from sqlalchemy.orm import sessionmaker
from conf import settings
# engine = create_engine(settings.DB_CONN)
# engine = create_engine(settings.DB_CONN,echo=True)
#创建与数据库的会话session class ,注意,这里返回给session的是个class,不是实例
SessionCls = sessionmaker(bind=engine)
session = SessionCls()
|
# -*- coding: utf-8 -*-
"""Tests for v1 API viewsets."""
from __future__ import unicode_literals
from datetime import datetime
from json import dumps, loads
from pytz import UTC
import mock
from webplatformcompat.history import Changeset
from webplatformcompat.models import Browser, Feature
from webplatformcompat.v1.viewsets import ViewFeaturesViewSet
from .base import APITestCase, NamespaceMixin
from ..test_viewsets import (
TestCascadeDeleteGeneric, TestUserBaseViewset, TestViewFeatureBaseViewset)
class TestBrowserViewset(APITestCase):
"""Test common viewset functionality through the browsers viewset."""
def test_get_browser_detail(self):
browser = self.create(
Browser,
slug='firefox',
name={'en': 'Firefox'},
note={'en': 'Uses Gecko for its web browser engine'})
url = self.api_reverse('browser-detail', pk=browser.pk)
response = self.client.get(url)
history_pk = browser.history.get().pk
expected_content = {
'browsers': {
'id': str(browser.pk),
'slug': 'firefox',
'name': {'en': 'Firefox'},
'note': {'en': 'Uses Gecko for its web browser engine'},
'links': {
'history': [str(history_pk)],
'history_current': str(history_pk),
'versions': [],
}
},
'links': {
'browsers.history': {
'href': (
'http://testserver/api/v1/historical_browsers/'
'{browsers.history}'),
'type': 'historical_browsers'
},
'browsers.history_current': {
'href': (
'http://testserver/api/v1/historical_browsers/'
'{browsers.history_current}'),
'type': 'historical_browsers'
},
'browsers.versions': {
'href': (
'http://testserver/api/v1/versions/'
'{browsers.versions}'),
'type': u'versions'
}
}
}
actual_content = loads(response.content.decode('utf-8'))
self.assertDataEqual(expected_content, actual_content)
def test_get_browser_list(self):
firefox = self.create(
Browser,
slug='firefox', name={'en': 'Firefox'},
note={'en': 'Uses Gecko for its web browser engine'})
chrome = self.create(Browser, slug='chrome', name={'en': 'Chrome'})
response = self.client.get(self.api_reverse('browser-list'))
firefox_history_id = str(firefox.history.get().pk)
chrome_history_id = str(chrome.history.get().pk)
expected_content = {
'browsers': [
{
'id': str(firefox.pk),
'slug': 'firefox',
'name': {'en': 'Firefox'},
'note': {'en': 'Uses Gecko for its web browser engine'},
'links': {
'history': [firefox_history_id],
'history_current': firefox_history_id,
'versions': [],
},
}, {
'id': '%s' % chrome.pk,
'slug': 'chrome',
'name': {'en': 'Chrome'},
'note': None,
'links': {
'history': [chrome_history_id],
'history_current': chrome_history_id,
'versions': [],
},
},
],
'links': {
'browsers.history': {
'href': (
'http://testserver/api/v1/historical_browsers/'
'{browsers.history}'),
'type': 'historical_browsers',
},
'browsers.history_current': {
'href': (
'http://testserver/api/v1/historical_browsers/'
'{browsers.history_current}'),
'type': 'historical_browsers',
},
'browsers.versions': {
'href': (
'http://testserver/api/v1/versions/'
'{browsers.versions}'),
'type': 'versions',
},
},
'meta': {
'pagination': {
'browsers': {
'count': 2,
'previous': None,
'next': None
},
},
},
}
actual_content = loads(response.content.decode('utf-8'))
self.assertDataEqual(expected_content, actual_content)
def test_get_browsable_api(self):
browser = self.create(Browser)
url = self.api_reverse('browser-list')
response = self.client.get(url, HTTP_ACCEPT='text/html')
history_pk = browser.history.get().pk
expected_data = {
'count': 1,
'previous': None,
'next': None,
'results': [{
'id': browser.pk,
'slug': '',
'name': None,
'note': None,
'history': [history_pk],
'history_current': history_pk,
'versions': [],
}]}
self.assertDataEqual(response.data, expected_data)
self.assertTrue(response['content-type'].startswith('text/html'))
def test_post_minimal(self):
self.login_user()
data = {'slug': 'firefox', 'name': '{"en": "Firefox"}'}
response = self.client.post(self.api_reverse('browser-list'), data)
self.assertEqual(201, response.status_code, response.data)
browser = Browser.objects.get()
history_pk = browser.history.get().pk
expected_data = {
'id': browser.pk,
'slug': 'firefox',
'name': {'en': 'Firefox'},
'note': None,
'history': [history_pk],
'history_current': history_pk,
'versions': [],
}
self.assertDataEqual(response.data, expected_data)
@mock.patch('webplatformcompat.signals.update_cache_for_instance')
def test_put_as_json_api(self, mock_update):
"""If content is application/vnd.api+json, put is partial."""
browser = self.create(
Browser, slug='browser', name={'en': 'Old Name'})
data = dumps({
'browsers': {
'name': {
'en': 'New Name'
}
}
})
url = self.api_reverse('browser-detail', pk=browser.pk)
mock_update.reset_mock()
response = self.client.put(
url, data=data, content_type='application/vnd.api+json')
self.assertEqual(200, response.status_code, response.data)
histories = browser.history.all()
expected_data = {
'id': browser.pk,
'slug': 'browser',
'name': {'en': 'New Name'},
'note': None,
'history': [h.pk for h in histories],
'history_current': histories[0].pk,
'versions': [],
}
self.assertDataEqual(response.data, expected_data)
mock_update.assert_has_calls([
mock.call('User', self.user.pk, mock.ANY),
mock.call('Browser', browser.pk, mock.ANY),
])
self.assertEqual(mock_update.call_count, 2)
@mock.patch('webplatformcompat.signals.update_cache_for_instance')
def test_put_in_changeset(self, mock_update):
browser = self.create(
Browser, slug='browser', name={'en': 'Old Name'})
changeset = Changeset.objects.create(user=self.user)
data = dumps({
'browsers': {
'name': {
'en': 'New Name'
}
}
})
url = self.api_reverse('browser-detail', pk=browser.pk)
url += '?use_changeset=%s' % changeset.pk
mock_update.reset_mock()
mock_update.side_effect = Exception('not called')
response = self.client.put(
url, data=data, content_type='application/vnd.api+json')
self.assertEqual(200, response.status_code, response.data)
def test_put_as_json(self):
"""If content is application/json, put is full put."""
browser = self.create(
Browser, slug='browser', name={'en': 'Old Name'})
data = {'name': '{"en": "New Name"}'}
url = self.api_reverse('browser-detail', pk=browser.pk)
response = self.client.put(url, data=data)
self.assertEqual(200, response.status_code, response.data)
histories = browser.history.all()
expected_data = {
'id': browser.pk,
'slug': 'browser',
'name': {'en': 'New Name'},
'note': None,
'history': [h.pk for h in histories],
'history_current': histories[0].pk,
'versions': [],
}
self.assertDataEqual(response.data, expected_data)
@mock.patch('webplatformcompat.signals.update_cache_for_instance')
def test_delete(self, mock_update):
self.login_user(groups=['change-resource', 'delete-resource'])
browser = self.create(Browser, slug='firesux', name={'en': 'Firesux'})
url = self.api_reverse('browser-detail', pk=browser.pk)
mock_update.reset_mock()
response = self.client.delete(url)
self.assertEqual(204, response.status_code, response.content)
self.assertFalse(Browser.objects.filter(pk=browser.pk).exists())
mock_update.assert_has_calls([
mock.call('User', self.user.pk, mock.ANY),
mock.call('Browser', browser.pk, mock.ANY),
])
self.assertEqual(mock_update.call_count, 2)
def test_delete_not_allowed(self):
self.login_user()
browser = self.create(
Browser, slug='browser', name={'en': 'Old Name'})
url = self.api_reverse('browser-detail', pk=browser.pk)
response = self.client.delete(url)
self.assertEqual(403, response.status_code)
expected_data = {
'detail': 'You do not have permission to perform this action.'
}
self.assertDataEqual(response.data, expected_data)
@mock.patch('webplatformcompat.signals.update_cache_for_instance')
def test_delete_in_changeset(self, mock_update):
self.login_user(groups=['change-resource', 'delete-resource'])
browser = self.create(
Browser, slug='internet_exploder',
name={'en': 'Internet Exploder'})
url = self.api_reverse('browser-detail', pk=browser.pk)
url += '?use_changeset=%d' % self.changeset.id
mock_update.reset_mock()
mock_update.side_effect = Exception('not called')
response = self.client.delete(url)
self.assertEqual(204, response.status_code, response.content)
self.assertFalse(Browser.objects.filter(pk=browser.pk).exists())
def test_options(self):
self.login_user()
browser = self.create(Browser)
url = self.api_reverse('browser-detail', pk=browser.pk)
response = self.client.options(url)
self.assertEqual(200, response.status_code, response.content)
expected_keys = {'actions', 'description', 'name', 'parses', 'renders'}
self.assertEqual(set(response.data.keys()), expected_keys)
class TestFeatureViewSet(APITestCase):
"""Test FeatureViewSet."""
def test_filter_by_slug(self):
parent = self.create(Feature, slug='parent', name={'en': 'Parent'})
feature = self.create(
Feature, slug='feature', parent=parent, name={'en': 'A Feature'})
self.create(Feature, slug='other', name={'en': 'Other'})
response = self.client.get(
self.api_reverse('feature-list'), {'slug': 'feature'})
self.assertEqual(200, response.status_code, response.data)
self.assertEqual(1, response.data['count'])
self.assertEqual(feature.id, response.data['results'][0]['id'])
def test_filter_by_parent(self):
parent = self.create(Feature, slug='parent', name={'en': 'Parent'})
feature = self.create(
Feature, slug='feature', parent=parent, name={'en': 'A Feature'})
self.create(Feature, slug='other', name={'en': 'Other'})
response = self.client.get(
self.api_reverse('feature-list'), {'parent': str(parent.id)})
self.assertEqual(200, response.status_code, response.data)
self.assertEqual(1, response.data['count'])
self.assertEqual(feature.id, response.data['results'][0]['id'])
def test_filter_by_no_parent(self):
parent = self.create(Feature, slug='parent', name={'en': 'Parent'})
self.create(
Feature, slug='feature', parent=parent, name={'en': 'The Feature'})
other = self.create(Feature, slug='other', name={'en': 'Other'})
response = self.client.get(
self.api_reverse('feature-list'), {'parent': ''})
self.assertEqual(200, response.status_code, response.data)
self.assertEqual(2, response.data['count'])
self.assertEqual(parent.id, response.data['results'][0]['id'])
self.assertEqual(other.id, response.data['results'][1]['id'])
class TestHistoricaBrowserViewset(APITestCase):
"""Test common historical viewset functionality through browsers."""
def test_get_historical_browser_detail(self):
browser = self.create(
Browser, slug='browser', name={'en': 'A Browser'},
_history_date=datetime(2014, 8, 25, 20, 50, 38, 868903, UTC))
history = browser.history.all()[0]
url = self.api_reverse('historicalbrowser-detail', pk=history.pk)
response = self.client.get(
url, HTTP_ACCEPT='application/vnd.api+json')
self.assertEqual(200, response.status_code, response.data)
expected_json = {
'historical_browsers': {
'id': str(history.pk),
'date': self.dt_json(browser._history_date),
'event': 'created',
'browsers': {
'id': str(browser.pk),
'slug': 'browser',
'name': {
'en': 'A Browser'
},
'note': None,
'links': {
'history_current': str(history.pk),
'versions': [],
},
},
'links': {
'browser': str(browser.pk),
'changeset': str(history.history_changeset_id),
}
},
'links': {
'historical_browsers.browser': {
'href': (
'http://testserver/api/v1/browsers/'
'{historical_browsers.browser}'),
'type': 'browsers'
},
'historical_browsers.changeset': {
'href': (
'http://testserver/api/v1/changesets/'
'{historical_browsers.changeset}'),
'type': 'changesets'
}
}
}
actual_json = loads(response.content.decode('utf-8'))
self.assertDataEqual(expected_json, actual_json)
class TestCascadeDelete(NamespaceMixin, TestCascadeDeleteGeneric):
"""Test cascading deletes."""
class TestUserViewset(NamespaceMixin, TestUserBaseViewset):
"""Test users/me UserViewSet."""
class TestViewFeatureViewset(NamespaceMixin, TestViewFeatureBaseViewset):
"""Test helper functions on ViewFeaturesViewSet."""
def setUp(self):
super(TestViewFeatureViewset, self).setUp()
self.view = ViewFeaturesViewSet()
|
#!/usr/bin/env python
#!coding:utf-8
import struct
files = "/home/quan/桌面/ts/live-2019-05-24_21-48-03.ts"
def Do(files,ff):
f = open(files,"r")
n = 0
while True:
_buf =f.read(188)
if not _buf:
print "已经读取完毕!!"
print "pcr时间为:%d" %pcr_time
return
buf = struct.unpack("B" * len(_buf), _buf)
if buf[0] != 0x47:
continue
if get_pid(buf[1],buf[2]) == 0:
continue
get_pmt_stream(buf)
return
def get_pid(b1,b2):
pid = (((b1 & 0b11111) << 8) | b2)
return pid
def get_payload_unit_start_indicator(b1):
payload_unit_start_indicator = (b1 >> 6) & 0b01
return payload_unit_start_indicator
def get_transport_error_indicator(b1):
transport_error_indicator = b1>>7
return transport_error_indicator
def get_transport_priority(b1):
transport_priority = (b1>>5)&0x1
return transport_priority
def get_transport_scrambling_control(b3): #加密标志(00:未加密;其他表示已加密)
transport_scrambling_control = b3>>6
return transport_scrambling_control
def get_adaptation_field_control(b3):
adaptation_field_control = (b3 >>4) & 0b11
return adaptation_field_control
def get_continuity_counter(b3):
continuity_counter = b3 & 0b1111
return continuity_counter
def get_pmt_pid(buf):
payload_unit_start_indicator = get_payload_unit_start_indicator(buf[1])
#print "payload_unit_start_indicator = %d" % payload_unit_start_indicator
if payload_unit_start_indicator == 1:
program_number = (buf[13] <<8) | buf[14]
program_map_PID = ((buf[15]<<8) | buf[16]) &0b1111111111111
return program_number,program_map_PID
else:
program_number = (buf[12] << 8) | buf[13]
program_map_PID = ((buf[14] << 8) | buf[15]) & 0b1111111111111
return program_number, program_map_PID
def get_pmt_stream(buf):
payload_unit_start_indicator = get_payload_unit_start_indicator(buf[1])
streams_id = {}
if payload_unit_start_indicator ==1:
section_length = ((buf[6] & 0b1111)<<8) | buf[7]
#n = (section_length - 13) / 5 # section_length为后续有用数据的字节数,减去固定项目就是音视频流的信息条数了。
pcr_pid =(buf[13]) & 0b1111111111111 | buf[14]
n =17
while 1:
stream_type = buf[n]
if get_pid(buf[1],buf[2]) !=get_pmt_pid(buf):
continue
elementary_PID = ((buf[n+1] & 0b11111) << 8) | buf[n+2]
ES_info_length = (buf[n+3] & 0b1111) << 8 | buf[n+4]
streams_id[elementary_PID] = stream_type
n = ES_info_length + n+ 5
# print "elementrry_id = %d" %elementary_PID
# print "n= %d,es_info_len= %d" %(n,ES_info_length)
if (n + 4) > section_length:
break
else:
section_length = ((buf[5] & 0b1111) << 8) | buf[6]
# n = (section_length - 13) / 5 # section_length为后续有用数据的字节数,减去固定项目就是音视频流的信息条数了。
pcr_pid = (buf[12]) & 0b1111111111111 | buf[13]
n = 16
while 1:
# print len(buf)
#for i in buf:
# print i
stream_type = buf[n]
elementary_PID = ((buf[n + 1] & 0b11111) << 8) | buf[n + 2]
ES_info_length = (buf[n + 3] & 0b1111) << 8 | buf[n + 4]
streams_id[elementary_PID] = stream_type
n += ES_info_length
if (n + 4) > section_length:
break
print streams_id
return
def get_pcr(ff):
pcr_time= 0
pcr_bash = 0
pcr_time = 0
while True:
_buf = ff.read(188)
buf = struct.unpack("B" * len(_buf), _buf)
if not buf:
print "pcr 时间为 %f" %(pcr_time/90000)
return
#print 123
adaption_field_control = get_adaptation_field_control(buf[3])
pid = get_pid(buf[1],buf[2])
if adaption_field_control ==0b10 | adaption_field_control==0b11:
PCR_flag = (buf[5]>>4) &0b1
if PCR_flag ==1:
#print pid
if not pcr_bash:
pcr_bash = (buf[6] << 25) | buf[7] << 17 | buf[8] << 9 | buf[9] <<1 | buf[10]>>7
continue
pcr_now = (buf[6] << 25) | buf[7] << 17 | buf[8] << 9 | buf[9] <<1 | buf[10]>>7
#print pcr_time
pcr_time = pcr_time + pcr_now -pcr_bash
pcr_bash = pcr_now
#print "pcr = %d" %pcr
return
#return pcr
if __name__ == '__main__':
with open("/home/quan/桌面/ts/live-2019-05-24_21-48-03.ts","r") as ff:
Do(files,ff)
|
import unittest
import vertica_python
from config.db import db_vertica
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy.engine.url import URL
def db_connect():
engine = create_engine(URL(**db_vertica), pool_recycle=900)
engine.dialect.supports_sane_rowcount = False
return engine
engine = db_connect()
Session = sessionmaker(bind=engine)
from mmvizutil.pagination import Pagination
from mmvizutil.alchemy.pagination import PaginationMmQuery
from mmvizutil.db.query import (
Query as MmQuery
)
class TestPagination(unittest.TestCase):
def test_pagination_db_query(self):
try:
session = Session()
query_count = MmQuery()
query_count.value = 'select count(*) from marketing.states'
query_count.parameters = {}
query_item = MmQuery()
query_item.value = 'select * from marketing.states limit :limit offset :offset'
query_item.parameters = {}
pagination_query = PaginationMmQuery(query_count, query_item, session)
pagination = Pagination(pagination_query, 5, 3) #items per page, range size
page = pagination.get_page(2)
print (page)
finally:
session.close()
|
from datetime import datetime, timedelta
import pymongo
from Utils import Utils
from dbmongo import Database
from geopy.distance import great_circle
class Map_Data:
def __init__(self, type=0, lat=0, lng=0, text=""):
self.type = type
self.lat = lat
self.lng = lng
self.text = text
class n1_MapaclassMaxPB:
def __init__(self, serial=0, dtsup=datetime(2099,1,1,1,1,1) ,dtinf=datetime(2010,1,1,1,1,1)):
self.serial = serial
self.dtinf = dtinf
self.dtsup = dtsup
self.gateways = []
def leitura(self):
StartDateInt = int(Utils().set_DatetimeToInt(self.dtinf))
EndDateInt = int(Utils().set_DatetimeToInt(self.dtsup))
json_query = {'dev_id': int(self.serial),'date_dev': {'$gte': self.dtinf, '$lte': self.dtsup}}
sortby =[['date_dev', pymongo.ASCENDING]]
print "pre db"
cursor = Database().getdata(json_query, sortby, collection="Pontos")
print cursor.count()
lines = []
first = True
if cursor.count() > 0:
for doc in cursor:
gprs = Utils().get_gprsfix(doc)
ignition = Utils().get_ignition(doc)
evento = Utils().get_event(doc)
pos = Utils().get_memoryindex(doc)
if first or lastPos != pos:
first = False
lastPos = pos
speed = Utils().get_speed(doc)
gps = Utils().get_gpsfix(doc)
date = Utils().get_devdate(doc)
# extPwr = Utils().get_extpower(doc)
# battery = Utils().get_battery(doc)
latitude = float(float(doc["lat"])/1000000)
longitude = float(float(doc["long"])/1000000)
stringInfo = "<b>Position</b>: " + str(pos) \
+ "<br><b>Event:</b> " + str(evento) \
+ "<br><b>Date:</b> " + str(date) \
+ "<br><b>Ignition: </b> " + str(ignition) \
+ "<br><b>Speed:</b> " + str(speed) \
+ "<br><b>GPS Fix:</b> " + str(gps) \
+ "<br><b>GPRS Fix:</b> " + str(gprs)
# + "<br><b>Battery:</b> " + str(battery) \
# + "<br><b>Ext Power:</b> " + str(extPwr)
type = self.getType(doc)
mapdata = Map_Data(type, latitude, longitude, stringInfo)
route_reconstruct = doc["route_rec"]
if route_reconstruct.__len__() > 0:
for rec_cursor in route_reconstruct:
lat_rec = float(float(rec_cursor["lat"])/1000000)
long_rec = float(float(rec_cursor["long"])/1000000)
string_info_rec = "<b>Position : </b> " + str(pos) \
+ "<br><b>Date : </b> " + str(rec_cursor["date"]) \
+ "<br><b>Speed: </b> " + str(rec_cursor["speed"])
point_reconstruct = Map_Data(0, lat_rec, long_rec, string_info_rec)
lines.append(point_reconstruct)
lines.append(mapdata.__dict__)
return lines
def getType(self, doc):
gps = Utils().get_gpsfix(doc)
gprs = Utils().get_gprsfix(doc)
if Utils().isAccelerometerEvent(doc) == False:
if gps == False and gprs == False:
type = 1
elif gps == False and gprs == True:
type = 2
elif gps == True and gprs == False:
type = 3
elif gps == True and gprs == True:
type = 4
else:
if gps == False and gprs == False:
type = 5
elif gps == False and gprs == True:
type = 6
elif gps == True and gprs == False:
type = 7
elif gps == True and gprs == True:
type = 8
return type
def isAccelerometerEvent(self, doc):
return False
|
import matplotlib.pyplot as plt
from pyrsa.vis import rdm_plot
from pyrsa.vis.colors import rdm_colormap
import pickle
import numpy as np
import scipy
import scipy.cluster.hierarchy as sch
def cluster_corr(corr_array, inplace=False):
"""
Rearranges the correlation matrix, corr_array, so that groups of highly
correlated variables are next to eachother
Parameters
----------
corr_array : pandas.DataFrame or numpy.ndarray
a NxN correlation matrix
Returns
-------
pandas.DataFrame or numpy.ndarray
a NxN correlation matrix with the columns and rows rearranged
"""
pairwise_distances = sch.distance.pdist(corr_array)
linkage = sch.linkage(pairwise_distances, method='complete')
cluster_distance_threshold = pairwise_distances.max() / 2
idx_to_cluster_array = sch.fcluster(linkage, cluster_distance_threshold,
criterion='distance')
idx = np.argsort(idx_to_cluster_array)
if not inplace:
corr_array = corr_array.copy()
#if isinstance(corr_array, pd.DataFrame):
# return corr_array.iloc[idx, :].T.iloc[idx, :]
return corr_array[idx, :][:, idx]
##RDMs
#fmri data
with open("/home/ajay/Desktop/Learning_Rule_paperspace/kamitani_fmri_data/fmri_brain_area_rsa_.pkl", "rb") as f:
rsa_fmri = pickle.load(f)
rdm_fmri = rsa_fmri['V4'][0].get_matrices()
rdm_cluster = cluster_corr(rdm_fmri[0])
cmap = rdm_colormap()
order = np.argsort(rdm_fmri[0,0,:])[np.argsort(np.argsort(rdm_cluster[0,:]))]
with open("./kamitani_fmri_data/kamitani_category_names_sub1.txt", "rb") as f0: #Import category names
category_names = pickle.load(f0)
with open("stimuli_perception_order.txt", "wb") as p1: #Pickling
pickle.dump(order.tolist(), p1)
thing = [order < 40]
plt.imshow(rdm_cluster, cmap)
ax = plt.gca()
ax.set_xticks(np.arange(150))
ax.set_xticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(np.arange(150))
ax.set_yticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(ax.get_yticks()[::2])
#for label in ax.yaxis.get_ticklabels()[::1]:
# label.set_visible(False)
for label in ax.yaxis.get_ticklabels()[::2]:
label.set_visible(False)
ax.set_xticks([])
plt.title('fMRI')
# plt.savefig('/home/ajay/Documents/Learning_Rule/saved_plots/fMRI_RDM.png')
plt.show()
#GD
with open("/home/ajay/Desktop/gd_other/GD/GD_brain_area_rsa.pkl", "rb") as f:
rsa_GD = pickle.load(f)
rdm_GD = rsa_GD['V4'][0].get_matrices()
rdm_GD = rdm_GD[0]
rdm_GD = rdm_GD[:,order][order]
cmap = rdm_colormap()
plt.imshow(rdm_GD, cmap)
ax = plt.gca()
ax.set_xticks(np.arange(150))
ax.set_xticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(np.arange(150))
ax.set_yticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(ax.get_yticks()[::2])
#for label in ax.yaxis.get_ticklabels()[::1]:
# label.set_visible(False)
for label in ax.yaxis.get_ticklabels()[::2]:
label.set_visible(False)
ax.set_xticks([])
plt.title('Gradient Descent')
plt.savefig('/home/ajay/Documents/Learning_Rule/saved_plots/GD_RDM.png')
plt.show()
#Burstprop
with open("/home/ajay/Desktop/Learning_Rule_paperspace/Burstprop/final_model_data/Burstprop_brain_area_rsa.pkl", "rb") as f:
rsa_burst = pickle.load(f)
rdm_burst = rsa_burst['V4'][0].get_matrices()
rdm_burst = rdm_burst[0]
rdm_burst = rdm_burst[:,order][order]
cmap = rdm_colormap()
plt.imshow(rdm_burst, cmap)
ax = plt.gca()
ax.set_xticks(np.arange(150))
ax.set_xticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(np.arange(150))
ax.set_yticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(ax.get_yticks()[::2])
#for label in ax.yaxis.get_ticklabels()[::1]:
# label.set_visible(False)
for label in ax.yaxis.get_ticklabels()[::2]:
label.set_visible(False)
ax.set_xticks([])
plt.title('Burstprop')
plt.savefig('/home/ajay/Documents/Learning_Rule/saved_plots/Burstprop_RDM.png')
plt.show()
#Hebbian
with open("/home/ajay/Desktop/Learning_Rule_paperspace/Hebbian/final_model_data/Hebbian_brain_area_rsa.pkl", "rb") as f:
rsa_hebb = pickle.load(f)
rdm_hebb = rsa_hebb['V4'][0].get_matrices()
rdm_hebb = rdm_hebb[0]
rdm_hebb = rdm_hebb[:,order][order]
cmap = rdm_colormap()
plt.imshow(rdm_hebb, cmap)
ax = plt.gca()
ax.set_xticks(np.arange(150))
ax.set_xticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(np.arange(150))
ax.set_yticklabels([category_names[i] for i in order.tolist()])
ax.set_yticks(ax.get_yticks()[::2])
#for label in ax.yaxis.get_ticklabels()[::1]:
# label.set_visible(False)
for label in ax.yaxis.get_ticklabels()[::2]:
label.set_visible(False)
ax.set_xticks([])
plt.title('Hebbian')
plt.savefig('/home/ajay/Documents/Learning_Rule/saved_plots/Hebbian_RDM.png')
plt.show()
#RSA Bar Charts
rsa_GD_sim = np.empty([4])
for index, key in enumerate(rsa_GD):
rsa_GD_sim[index] = rsa_GD[key][1]
rsa_burst_sim = np.empty([4])
for key in rsa_burst.keys():
rsa_burst_sim[list(rsa_burst).index(key)] = rsa_burst[key][1]
rsa_hebb_sim = np.empty([4])
for key in rsa_hebb.keys():
rsa_hebb_sim[list(rsa_hebb).index(key)] = rsa_hebb[key][1]
fig, ax = plt.subplots()
ind = np.arange(4) # the x locations for the groups
width = 0.25 # the width of the bars
r1 = np.arange(len(rsa_GD_sim))
r2 = [x + width for x in r1]
r3 = [x + width for x in r2]
p1 = ax.bar(ind, rsa_GD_sim, width, bottom=0)
p2 = ax.bar(r2, rsa_burst_sim, width, bottom=0)
p3 = ax.bar(r3, rsa_hebb_sim, width, bottom=0)
ax.set_title('Similarity to brain activity')
ax.set_xticks([r + width for r in range(len(rsa_GD_sim))])
ax.set_xticklabels(('V1', 'V2', 'V3', 'V4'))
plt.xlabel('Brain Area', fontsize=12)
plt.ylabel('Cosine Similarity')
ax.legend((p1[0], p2[0], p3[0]), ('Gradient Descent', 'Burstprop', 'Hebbian'))
ax.autoscale_view()
plt.savefig('/home/ajay/Documents/Learning_Rule/saved_plots/similarity_ventral_bar.png')
plt.show()
|
from datetime import datetime
# dc = {'11.12.12': 1, '10.12.12': 2}
# #
# dc = dict(dc)
#
# a = max(dc.keys())
# #
# # print({k: v for k, v in dc.keys() if k == a})
# # print([k for k, v in dc.keys() if k == a])
#
# for k, i in dc.items():
# print(k, i)
#
# d = '2020-08-20'
# datetime_object = datetime.strptime(d, '%Y-%m-%d')
# datetime_object = datetime.strptime(d, '')
# print(datetime_object.year)
#
dc = {'11.12.12': 1}
for k, v in dc.items():
print(v)
|
# -*- coding: utf-8 -*-
import urllib2
import time
import urlparse
def download(url,retry=2):
# print "downloading %s" % url
header = {
'User-Agent':'Mozilla/5.0'
}
try:
req = urllib2.Request(url,headers=header)
html = urllib2.urlopen(req).read()
except urllib2.HTTPError as e:
print "download error: %s" % e.reason
html = None
if retry >0:
print e.code
if hasattr(e,'code') and 500 <= e.code < 600:
print e.code
return download(url,retry-1)
time.sleep(1)
return html
def writePage(filename,content):
content = unicode(content).encode('utf-8',errors='ignore')+"\n"
if ('Title_Is_None.txt' in filename):
with open(filename,'a') as file:
file.write(content)
else:
with open(filename,'wb+') as file:
file.write(content)
# get urls to be crawled
#:param alinks: list of tag 'a' href, dependent on implementation eg. bs4,lxml
def to_be_crawled_link(alinks,url_seed,url_root):
links_to_be_crawled=set()
if len(alinks)==0:
return links_to_be_crawled
print "len of alinks is %d" % len(alinks)
for link in alinks:
link = link.get('href')
if link != None and 'javascript:' not in link:
if link not in links_to_be_crawled:
realUrl = urlparse.urljoin(url_root,link)
links_to_be_crawled.add(realUrl)
return links_to_be_crawled
def to_be_crawled_links(alinks,count,url_root,url_seed):
url = url_seed % count
links = to_be_crawled_link(alinks,url_root,url)#,{'class':'title'})
links.add(url)
return links
|
import sys
import getopt
import errno
import os
from pathlib import Path
# Total number of files in chosen datasets
total_files = 0
# Default score of all prediction
score = {}
# All prediction outputs by your algorithm
all_predictions = {}
# When predicting files outside of chosen datasets
class DatasetsNotChosenException(Exception):
pass
# When predicting the same file twice
class MultiplePredictionsFoundException(Exception):
pass
def AveragePrecision(prediction, solution):
# Loss function, more information in README
for index, pred in enumerate(prediction):
if(pred == solution):
return 1/(index+1)
return 0
# Count files
def countTasks(chosen_datasets):
count = 0
if(chosen_datasets):
for path_to_dataset in chosen_datasets:
for task in path_to_dataset.iterdir():
if(task.name.endswith(".txt") and task.name != "out.txt"):
count += 1
else:
path_to_datasets = Path(__file__).absolute().parent.parent / "Datasets"
for path_to_dataset in path_to_datasets.iterdir():
if(path_to_dataset.is_dir()):
for task in path_to_dataset.iterdir():
if(task.name.endswith(".txt") and task.name != "out.txt"):
count += 1
return count
# Init score for files, default score is 0 for each file
def initScore(chosen_datasets):
score = {}
if(chosen_datasets):
for path_to_dataset in chosen_datasets:
for task in path_to_dataset.iterdir():
if(task.name.endswith(".txt") and task.name != "out.txt"):
score[task.absolute()] = 0
else:
path_to_datasets = Path(__file__).absolute().parent.parent / "Datasets"
for path_to_dataset in path_to_datasets.iterdir():
if(path_to_dataset.is_dir()):
for task in path_to_dataset.iterdir():
if(task.name.endswith(".txt") and task.name != "out.txt"):
score[task.absolute()] = 0
return score
# Check the answers against the solution
def checkAnswers(prediction, path_to_task, chosen_datasets):
global score
# Get the solution
solutions_line = int(path_to_task.name.split(".")[0])
with open(path_to_task.parent / "out.txt", "r") as file:
for line, solution in enumerate(file):
if(line == solutions_line):
solution = int(solution)
break
# Check if the task if inside of chosen datasets
if(chosen_datasets):
isInChosenDatasets = False
for path_to_dataset in chosen_datasets:
if(str(path_to_dataset.absolute()) in str(path_to_task.absolute())):
isInChosenDatasets = True
if(not isInChosenDatasets):
raise DatasetsNotChosenException(str(path_to_task) + " is outside of chosen datasets.")
# Check if the file is already predicted
if(path_to_task in all_predictions):
raise MultiplePredictionsFoundException("Multiple predictions to " + str(path_to_task))
# Stored for later use, maybe in verbose mode?
all_predictions[path_to_task] = prediction
# Calculate the loss and update the score
averagePrecision = AveragePrecision(prediction, solution)
score[path_to_task] = averagePrecision
# Print neccesary statistics
def printStatistics(verbose):
global total_files, score
print("Total files: " + str(total_files))
print("MAP: " + str(sum(score.values())/(total_files*1.0)) + " (the higher, the better)")
def main():
global total_files, score
# Parse the options
verbose = False #TODO, verbose mode?
chosen_datasets = None
try:
opts, args = getopt.getopt(sys.argv[1:], "d:vh", ["datasets=","help"])
except getopt.GetoptError:
raise
for opt, arg in opts:
if opt == "-v":
verbose = True
elif opt in ("-d", "--datasets"):
chosen_datasets = arg.split(":")
chosen_datasets = [Path(path_to_dataset) for path_to_dataset in chosen_datasets]
elif opt in ("-h", "--help"):
print("usage evaluate.py [-vh] [-d path] [--datasets=path] [--help]")
print("-v for verbose output mode")
print("-d or --datasets= to evaluate on chosen datasets, must be absolute path, multiple paths should be seperated with ':'. Default is evaluating on all datasets")
sys.exit()
# Count total number of tasks, default is all tasks in Datasets/
total_files = countTasks(chosen_datasets)
# Deafault score of 0 for each tasks, which is maximal loss
score = initScore(chosen_datasets)
# Reading each predition
for args in sys.stdin:
inputs = args.split()
path_to_task = Path(inputs[0])
prediction = inputs[1:]
if(not path_to_task.exists()):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path_to_task)
try:
prediction = [int(pos) for pos in prediction]
except ValueError:
print(prediction + " should only contain integers!")
raise
# Check the prediction
checkAnswers(prediction, path_to_task, chosen_datasets)
# Print statistics about your algorithm
printStatistics(verbose)
if __name__=="__main__":
main()
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
ALL_LAYERS_ENABLED = True
LAYERS = []
LAYERS.append({"shortName": "NCDC-L4LRblend-GLOB-AVHRR_OI", "envs": ("ALL",)})
LAYERS.append({"shortName": "SSH_alti_1deg_1mon", "envs": ("ALL",)})
LAYERS.append({"shortName": "SIacSubl_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "PHIBOT_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SIhsnow_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SIheff_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "oceFWflx_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "oceQnet_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "MXLDEPTH_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SIatmQnt_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "oceSPflx_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "oceSPDep_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SIarea_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "ETAN_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "sIceLoad_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "oceQsw_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SIsnPrcp_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "DETADT2_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "TFLUX_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SItflux_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "SFLUX_ECCO_version4_release1", "envs": ("ALL",)})
LAYERS.append({"shortName": "TELLUS_GRACE_MASCON_GRID_RL05_V1_LAND", "envs": ("ALL",)})
LAYERS.append({"shortName": "TELLUS_GRACE_MASCON_GRID_RL05_V1_OCEAN", "envs": ("ALL",)})
LAYERS.append({"shortName": "Sea_Surface_Anomalies", "envs": ("DEV",)})
LAYERS.append({"shortName": "JPL-L4_GHRSST-SSTfnd-MUR-GLOB-v02.0-fv04.1", "envs": ("ALL",)})
def isLayerEnabled(shortName, env):
if ALL_LAYERS_ENABLED:
return True
if env == None:
env = "PROD"
env = env.upper()
if env == "DEV":
return True
for layer in LAYERS:
if layer["shortName"] == shortName and ("ALL" in layer["envs"] or env.upper() in layer["envs"]):
return True
return False
if __name__ == "__main__":
print(isLayerEnabled("NCDC-L4LRblend-GLOB-AVHRR_OI", None))
print(isLayerEnabled("NCDC-L4LRblend-GLOB-AVHRR_OI", "PROD"))
print(isLayerEnabled("NCDC-L4LRblend-GLOB-AVHRR_OI", "SIT"))
print(isLayerEnabled("TFLUX_ECCO_version4_release1", None))
print(isLayerEnabled("TFLUX_ECCO_version4_release1", "PROD"))
print(isLayerEnabled("TFLUX_ECCO_version4_release1", "SIT"))
|
def factorial(n):
if n == 1:
return n
return n * factorial(n - 1)
assert factorial(3) == 6
assert factorial(5) == 120
explain = 5 * (5-1) * (4-1) * (3-1) * (2-1)
assert explain == factorial(5)
|
from datetime import datetime, timedelta
import pendulum
from airflow import DAG
from airflow.contrib.operators.spark_submit_operator import SparkSubmitOperator
from airflow.models import Variable
local_tz = pendulum.timezone("Asia/Tehran")
default_args = {
'owner': 'mahdyne',
'depends_on_past': False,
'start_date': datetime(2020, 10, 10, tzinfo=local_tz),
'email': ['nematpour.ma@gmail.com'],
'email_on_failure': True,
'email_on_retry': True,
'retries': 0,
'retry_delay': timedelta(minutes=5)
}
dag = DAG(dag_id='flight_search_dag',
default_args=default_args,
catchup=False,
schedule_interval="0 * * * *")
pyspark_app_home = Variable.get("PYSPARK_APP_HOME")
flight_search_ingestion = SparkSubmitOperator(task_id='flight_search_ingestion',
conn_id='spark_local',
application=f'{pyspark_app_home}/spark/search_event_ingestor.py',
total_executor_cores=4,
packages="io.delta:delta-core_2.12:0.7.0,org.apache.spark:spark-sql-kafka-0-10_2.12:3.0.0",
executor_cores=2,
executor_memory='5g',
driver_memory='5g',
name='flight_search_ingestion',
execution_timeout=timedelta(minutes=10),
dag=dag
)
flight_search_waiting_time = SparkSubmitOperator(task_id='flight_search_waiting_time',
conn_id='spark_local',
application=f'{pyspark_app_home}/spark/flight_search_waiting_time.py',
total_executor_cores=4,
packages="io.delta:delta-core_2.12:0.7.0,org.postgresql:postgresql:42.2.9",
executor_cores=2,
executor_memory='10g',
driver_memory='10g',
name='flight_search_waiting_time',
execution_timeout=timedelta(minutes=10),
dag=dag
)
flight_nb_search = SparkSubmitOperator(task_id='flight_nb_search',
conn_id='spark_local',
application=f'{pyspark_app_home}/spark/nb_search.py',
total_executor_cores=4,
packages="io.delta:delta-core_2.12:0.7.0,org.postgresql:postgresql:42.2.9",
executor_cores=2,
executor_memory='10g',
driver_memory='10g',
name='flight_nb_search',
execution_timeout=timedelta(minutes=10),
dag=dag
)
flight_search_ingestion >> [flight_search_waiting_time, flight_nb_search]
|
# -*- coding: utf-8 -*-
# @Author: Maximus
# @Date: 2018-03-19 19:08:39
# @Last Modified by: mom1
# @Last Modified time: 2018-04-27 11:19:28
import sublime
import sublime_plugin
import os
import time
import re
import json
import hashlib
import imp
import Default.history_list as History
import RSBIDE.external.symdb as symdb
import RSBIDE.common.ast_rsl as ast_rsl
import RSBIDE.common.settings as Settings
from subprocess import call
from RSBIDE.common.notice import *
from RSBIDE.common.tree import Tree
from RSBIDE.common.lint import Linter
from RSBIDE.common.config import config
from RSBIDE.common.RsbIde_print_panel import print_to_panel
from RSBIDE.common.async import async_worker, run_after_loading
ST2 = int(sublime.version()) < 3000
if ST2:
try:
sublime.error_message("RSBIDE Package Message:\n\nЭтот Пакет НЕ РАБОТАЕТ в Sublime Text 2 \n\n Используйте Sublime Text 3.")
except Exception as e:
try:
sublime.message_dialog("RSBIDE Package Message:\n\nЭтот Пакет НЕ РАБОТАЕТ в Sublime Text 2 \n\n Используйте Sublime Text 3.")
except Exception as e:
pass
def posix(path):
return path.replace("\\", "/")
def is_file_index(file):
ret = False
if file and file.endswith('.mac'):
ret = True
elif file and file.endswith('.xml'):
ret = True
return ret
def is_RStyle_view(view):
if ('R-Style' in view.settings().get('syntax')):
return True
elif is_file_index(view.file_name()):
return True
else:
return False
def get_db(window):
if len(window.folders()) == 0:
return []
fold = window.folders()[0]
sublime_cache_path = sublime.cache_path()
tmp_folder = sublime_cache_path + "/RSBIDE/"
if os.path.isdir(tmp_folder) is False:
log('Папка не найдена. Создаем папку кэша: %s' % tmp_folder)
os.makedirs(tmp_folder)
hsh = hashlib.md5(fold.encode('utf-8'))
db = [os.path.join(tmp_folder, hsh.hexdigest() + '.cache_db')]
return db
def extent_reg(view, sel, mod=1):
if mod == 1: # class
dist_scope = 'source.mac meta.class.mac'
dist_scope_name = 'source.mac meta.class.mac entity.name.class.mac'
elif mod == 2: # macro
dist_scope = 'source.mac meta.macro.mac'
dist_scope_name = 'source.mac meta.macro.mac entity.name.function.mac'
regions = [i for i in view.find_by_selector(dist_scope) if i.contains(sel)]
if len(regions) == 0:
return None
region = regions[-1]
regions_name = [j for j in view.find_by_selector(dist_scope_name) if region.contains(j)]
region_name = regions_name[-1]
return (region, region_name)
def get_result(view, symbol):
def ret_format(file=view.file_name(), row=0, col=0, scope='', rowcol=None):
if isinstance(rowcol, tuple):
row, col = rowcol
return {'file': file, 'row': row, 'col': col, 'scope': scope}
window = sublime.active_window()
sel = view.sel()[0]
if sel.empty():
sel = view.word(sel)
if not symbol:
symbol = view.substr(sel).strip()
t = time.time()
if symbol.lower() == 'end' and view.match_selector(
sel.begin(),
'keyword.macro.end.mac, keyword.class.end.mac, keyword.if.end.mac, keyword.for.end.mac, keyword.while.end.mac'
):
meta = view.extract_scope(sel.begin() - 1)
res = ret_format(rowcol=view.rowcol(meta.begin()))
return [res]
elif view.match_selector(sel.begin(), 'import.file.mac'):
return [ret_format(f) for f in symdb.query_packages_info(symbol.lower())]
elif view.match_selector(sel.begin(), 'entity.other.inherited-class.mac'):
ret = window.lookup_symbol_in_index(symbol)
return [ret_format(cp[0], cp[2][0] - 1, cp[2][1] - 1) for cp in ret]
# контекст
cur_class = extent_reg(view, sel)
cur_macro = extent_reg(view, sel, 2)
log('Тек. контекст', "%.3f" % (time.time() - t))
t = time.time()
# Готовим генераторы
g_all = ast_rsl.generat_scope(
view,
'variable.declare.name.mac - (meta.class.mac, meta.macro.mac), entity.name.function.mac - meta.class.mac, entity.name.class.mac'
)
cls_symbols = ast_rsl.generat_scope(
view,
'meta.class.mac variable.declare.name.mac - meta.macro.mac, meta.class.mac entity.name.function.mac - (meta.macro.mac meta.macro.mac)'
)
cls_param_symbols = ast_rsl.generat_scope(view, 'variable.parameter.class.mac')
macro_symbols = ast_rsl.generat_scope(
view,
'meta.macro.mac & (variable.parameter.macro.mac, variable.declare.name.mac, meta.macro.mac meta.macro.mac entity.name.function.mac)'
)
log('Подготовка генераторов', "%.3f" % (time.time() - t))
t = time.time()
# Глобал в текущем файле
for ga in g_all:
if view.substr(ga).lower() == symbol.lower():
log('Глобал в текущем файле', "%.3f" % (time.time() - t))
return [ret_format(rowcol=view.rowcol(ga.begin()))]
# В текущем классе
if cur_class:
for cs in cls_symbols:
if cur_class[0].contains(cs) and view.substr(cs).lower() == symbol.lower():
if cur_macro and view.substr(cur_macro[1]).lower() == symbol.lower():
break
log('В текущем классе', "%.3f" % (time.time() - t))
return [ret_format(rowcol=view.rowcol(cs.begin()))]
# В параметрах класса
if cur_macro is None:
for cps in cls_param_symbols:
if cur_class[0].contains(cps) and view.substr(cps).lower() == symbol.lower():
log('В параметрах класса', "%.3f" % (time.time() - t))
return [ret_format(rowcol=view.rowcol(cps.begin()))]
# В текущем макро
if cur_macro:
for ms in macro_symbols:
if cur_macro[0].contains(ms) and view.substr(ms).lower() == symbol.lower():
log('В текущем макро', "%.3f" % (time.time() - t))
return [ret_format(rowcol=view.rowcol(ms.begin()))]
# В родителях
if cur_class:
find_symbols = symdb.query_parent_symbols_go(view.substr(cur_class[1]), symbol)
if find_symbols and len(find_symbols) > 0:
log('В родителях', "%.3f" % (time.time() - t))
return find_symbols
# В Глобальном глобале
ret = symdb.query_globals_in_packages_go(symdb.get_package(view.file_name()), symbol)
log('В Глобальном глобале', "%.3f" % (time.time() - t))
return ret
class GoToDefinitionCommand(sublime_plugin.WindowCommand):
window = sublime.active_window()
def run(self):
view = self.window.active_view()
self.view = view
if not is_RStyle_view(view):
return
sel = self.view.sel()[0]
if sel.empty():
sel = view.word(sel)
symbol = view.substr(sel).strip()
self.old_view = self.window.active_view()
self.curr_loc = sel.begin()
History.get_jump_history(self.window.id()).push_selection(view)
self.search(symbol)
def search(self, symbol):
def async_search():
if not update_settings():
return
results = get_result(self.view, symbol)
handle_results(results)
def handle_results(results):
if len(results) > 1:
self.ask_user_result(results)
elif results: # len(results) == 1
self.goto(results[0])
else:
sublime.status_message('Symbol "{0}" not found'.format(symbol))
sublime.set_timeout_async(async_search, 10)
def ask_user_result(self, results):
view = self.window.active_view()
self.view = view
self.last_viewed = None
def on_select(i, trans=False):
def add_reg():
if results[i]['row'] == 0 and results[i]['col'] == 0:
return
p = v.text_point(results[i]['row'], results[i]['col'])
dec_reg = v.word(p)
v.add_regions('rsbide_declare', [dec_reg], 'string', 'dot', sublime.DRAW_NO_FILL)
def clear_reg():
v.erase_regions('rsbide_declare')
flags = sublime.ENCODED_POSITION if not trans else sublime.ENCODED_POSITION | sublime.TRANSIENT
if self.last_viewed:
self.last_viewed.erase_regions('rsbide_declare')
if i > -1:
v = self.goto(results[i], flags)
self.last_viewed = v
if trans:
run_after_loading(v, add_reg)
else:
run_after_loading(v, clear_reg)
else:
self.window.focus_view(self.old_view)
self.old_view.show_at_center(self.curr_loc)
self.view.window().show_quick_panel(
list(map(self.format_result, results)),
on_select,
sublime.KEEP_OPEN_ON_FOCUS_LOST,
0,
lambda x: on_select(x, True)
)
def goto(self, result, flags=sublime.ENCODED_POSITION):
return self.view.window().open_file(
'{0}:{1}:{2}'.format(
result['file'],
result['row'] + 1,
result['col'] + 1
),
flags
)
def format_result(self, result):
rel_path = os.path.relpath(result['file'], self.view.window().folders()[0])
desc = result['scope']
if '.' in result['scope']:
desc = '{0} ({1})'.format(*result['scope'].split('.'))
return [
'{0} ({1})'.format(rel_path, result['row']),
desc
]
def is_visible(self):
return is_RStyle_view(self.window.active_view())
def description(self):
return 'RSBIDE: Перейти к объявлению\talt+g'
class PrintSignToPanelCommand(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
self.view = view
self.db_doc = None
if not is_RStyle_view(view):
return
sel = self.view.sel()[0]
if sel.empty():
sel = view.word(sel)
symbol = view.substr(sel).strip()
self.search(symbol)
def search(self, symbol):
def async_search():
if not update_settings():
return
results = get_result(self.view, symbol)
handle_results(results)
def handle_results(results):
if results:
self.print_symbol(results[0])
elif self.find_in_doc(symbol):
self.print_symbol_doc(self.get_doc(symbol))
else:
sublime.status_message('Symbol "{0}" not found'.format(symbol))
sublime.set_timeout_async(async_search, 10)
def print_symbol(self, result):
if result['file'].lower() == self.view.file_name().lower():
source = self.view.substr(sublime.Region(0, self.view.size()))
else:
source = open(result['file'], encoding='Windows 1251').read()
print_to_panel(self.view, source, showline=result['row'], region_mark=(result['row'], result['col']))
def print_symbol_doc(self, doc_string):
if not doc_string:
return
print_to_panel(self.view, doc_string, bDoc=True)
def get_db_doc(self, symbol):
lang = 'mac'
path_db = os.path.dirname(
os.path.abspath(__file__)) + "/dbHelp/%s.json" % lang
if not self.db_doc:
if os.path.exists(path_db):
self.db_doc = json.load(open(path_db))
else:
return
return self.db_doc.get(symbol.lower())
def find_in_doc(self, symbol):
return self.get_db_doc(symbol)
def get_doc(self, symbol):
found = self.get_db_doc(symbol)
if found:
menus = []
# Title
menus.append("Документация " + found["name"] + "\n" + "=" * max(len("Документация " + found["name"]), 40) + "\n")
# Syntax
menus.append(found["syntax"] + "\n")
# Parameters
for parameter in found["params"]:
menus.append(
"\t- " + parameter["name"] + ": " + parameter["descr"] + "\n")
# Description
menus.append("\n" + found["descr"] + "\n")
return ''.join(menus)
else:
return None
def is_visible(self):
return is_RStyle_view(self.window.active_view())
def description(self):
return 'RSBIDE: Показать область объявления\talt+s'
class RSBIDEListener(sublime_plugin.EventListener):
previous_window = None
previous_project = None
def index_view(self, view):
if not is_file_index(view.file_name()):
return
db = get_db(view.window())
self.async_index_view(view.file_name(), db, view.window().folders())
@staticmethod
def async_index_view(file_name, databases, project_folders):
if not update_settings():
return
for dbi, database in enumerate(databases):
symdb.process_file(dbi, file_name)
log('Indexed', file_name)
symdb.commit()
def on_post_save_async(self, view):
self.index_view(view)
if Settings.proj_settings.get("LINT_ON_SAVE", True):
lint = Linter(view)
lint.start()
def on_modified_async(self, view):
Linter(view).erase_all_regions()
def on_activated(self, view):
window = view.window()
db = get_db(view.window())
if not window:
return False
if self.previous_project != db:
if self.previous_project is not None:
if not update_settings():
return
view.window().run_command('rebuild_cache', {'action': 'update'})
self.previous_window = sublime.active_window().id()
self.previous_project = db
elif self.previous_window is not sublime.active_window().id():
self.previous_window = sublime.active_window().id()
if not update_settings():
return
view.window().run_command('rebuild_cache', {'action': 'update'})
def intelige_end(self, view):
result = []
scope = view.scope_name(view.sel()[0].begin())
if scope.strip().endswith('meta.if.mac') or scope.strip().endswith('meta.for.mac') or scope.strip().endswith('meta.while.mac'):
result = [('end\trsl', 'end;')]
elif scope.strip().endswith('meta.macro.mac'):
result = [('End\trsl', 'End;')]
elif scope.strip().endswith('meta.class.mac'):
result = [('END\trsl', 'END;')]
return result
def on_query_completions(cls, view, prefix, locations):
if len(locations) != 1:
return []
if not is_file_index(view.file_name()):
return []
if not update_settings():
return
completions = []
sel = view.sel()[0]
t = time.time()
cur_class = extent_reg(view, sel)
cur_macro = extent_reg(view, sel, 2)
log('Тек. контекст', "%.3f" % (time.time() - t))
if cur_class:
completions = [('this\tclass', 'this')]
if view.match_selector(
sel.begin() - 1,
'variable.declare.name.mac, entity.name.class.mac' +
', entity.name.function.mac, variable.parameter.macro.mac, variable.parameter.class.mac'
):
return ([], sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
elif view.match_selector(sel.begin() - 1, 'macro-param.mac, class-param.mac'):
return list(cls.get_completions_always(view))
t = time.time()
if 'init' in [view.substr(view.word(sel)).lower(), prefix.lower()] and view.match_selector(sel.begin(), 'source.mac meta.class.mac'):
cls_parent = [parent for parent in view.find_by_selector('entity.other.inherited-class.mac') if cur_class[0].contains(parent)]
if cls_parent and len(cls_parent) > 0:
return (
[('Init' + view.substr(cls_parent[0]) + '\tparent', 'Init' + view.substr(cls_parent[0]))],
sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS
)
# Подсказка файлов
elif view.match_selector(sel.begin(), 'source.mac & (meta.import.mac, punctuation.definition.import.mac)'):
currImp = [view.substr(s).lower().strip() for s in view.find_by_selector('meta.import.mac import.file.mac')]
if view.file_name():
currImp += [os.path.splitext(os.path.basename(view.file_name().lower()))[0]]
files = [(p + '\tFiles', p) for p in symdb.query_packages(prefix.lower(), case=True) if p.lower() not in currImp]
log('Подсказка файлов', "%.3f" % (time.time() - t))
return (files, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
# Подсказка метаданных
elif view.match_selector(sel.begin(), 'source.mac string.quoted.double.mac'):
if view.match_selector(sel.begin(), 'string.quoted.sql.mac'):
isbac = True
else:
isbac = False
if view.substr(sel.begin() - 1) == '.':
line = view.line(sel.begin())
bef_symbols = sublime.Region(line.begin(), sel.begin())
il = 3
word = ''
while bef_symbols.size() >= il:
if view.match_selector(sel.begin() - il, 'constant.other.table-name.mac'):
word = view.extract_scope(sel.begin() - il)
word = re.sub(r'(\\")', '', view.substr(word))
break
il += 1
log('Подсказка метаданных 1', "%.3f" % (time.time() - t))
return (symdb.query_metadata_object(word), sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
else:
log('Подсказка метаданных 2', "%.3f" % (time.time() - t))
return (symdb.query_metadata(prefix, isbac), sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
# Подсказка классов
elif view.match_selector(sel.begin(), 'source.mac inherited-class.mac'):
if cur_class:
completions = [
(view.substr(s) + '\tclass', view.substr(s))
for s in view.find_by_selector('entity.name.class.mac') if view.substr(cur_class[1]) != view.substr(s)
]
else:
completions = [(view.substr(s) + '\tclass', view.substr(s)) for s in view.find_by_selector('entity.name.class.mac')]
completions += symdb.query_globals_class(symdb.get_package(view.file_name()), prefix)
log('Подсказка классов', "%.3f" % (time.time() - t))
return (completions, sublime.INHIBIT_WORD_COMPLETIONS | sublime.INHIBIT_EXPLICIT_COMPLETIONS)
t = time.time()
if sel.begin() == sel.end():
sel = view.word(sel)
# Из текущего файла по контексту
g_all = ast_rsl.generat_scope(
view,
'variable.declare.name.mac - (meta.class.mac, meta.macro.mac)'
)
all_g_macros = ast_rsl.generat_scope(view, 'meta.macro.mac - (meta.class.mac)')
all_g_name_macros = ast_rsl.generat_scope(view, 'meta.macro.mac entity.name.function.mac - (meta.class.mac)')
all_cls_vars = ast_rsl.generat_scope(view, 'meta.class.mac variable.declare.name.mac - (meta.macro.mac, meta.class.mac meta.class.mac)')
g_macros = zip(all_g_macros, all_g_name_macros)
all_cls_macros = ast_rsl.generat_scope(view, 'meta.class.mac meta.macro.mac')
all_cls_macros_names = ast_rsl.generat_scope(view, 'meta.class.mac meta.macro.mac entity.name.function.mac')
g_class_names = ast_rsl.generat_scope(view, 'meta.class.mac entity.name.class.mac')
all_macro_params = ast_rsl.generat_scope(view, 'meta.macro.mac variable.parameter.macro.mac')
all_macro_vars = ast_rsl.generat_scope(view, 'meta.macro.mac variable.declare.name.mac')
all_g_param_macros = view.find_by_selector('variable.parameter.macro.mac - (meta.class.mac)')
log('Подготовка генераторов', "%.3f" % (time.time() - t))
t = time.time()
for g in g_all:
g_scop = 'global'
if view.match_selector(g.begin(), 'meta.const.mac'):
g_scop = 'const'
elif view.match_selector(g.begin(), 'variable.declare.name.mac'):
g_scop = 'var'
completions += [(view.substr(g) + '\t' + g_scop, view.substr(g))]
log('Глобал переменные', "%.3f" % (time.time() - t))
t = time.time()
for clsn in g_class_names:
completions += [(view.substr(clsn) + '\t' + 'class', view.substr(clsn))]
log('Глобал class', "%.3f" % (time.time() - t))
t = time.time()
for g_m in g_macros:
g_param_macros = [gpm for gpm in all_g_param_macros if g_m[0].contains(gpm)]
hint = ", ".join(["${%s:%s}" % (k + 1, view.substr(v).strip()) for k, v in enumerate(g_param_macros)])
completions += [(view.substr(g_m[1]) + '\tmacro', view.substr(g_m[1]) + '(' + hint + ')')]
log('Глобал macro', "%.3f" % (time.time() - t))
t = time.time()
cls_params = []
if cur_class:
cls_vars = [cv for cv in all_cls_vars if cur_class[0].contains(cv)]
cls_macros = [cl for cl in all_cls_macros if cur_class[0].contains(cl)]
cls_macros_names = [cmn for cmn in all_cls_macros_names if cur_class[0].contains(cmn)]
gen_mac = zip(cls_macros, cls_macros_names)
cls_params = [cp for cp in view.find_by_selector('meta.class.mac class-param.mac variable.parameter.class.mac') if cur_class[0].contains(cp)]
log('Подготовка класса', "%.3f" % (time.time() - t))
t = time.time()
for cls_var in cls_vars:
completions += [(view.substr(cls_var) + '\tvar in class', view.substr(cls_var))]
log('Переменные класса', "%.3f" % (time.time() - t))
t = time.time()
for c_elem in gen_mac:
param_macros = [gpm for gpm in view.find_by_selector('meta.class.mac variable.parameter.macro.mac') if c_elem[0].contains(gpm)]
hint = ", ".join(["${%s:%s}" % (k + 1, view.substr(v).strip()) for k, v in enumerate(param_macros)])
completions += [(view.substr(c_elem[1]) + '\tmacro in class', view.substr(c_elem[1]) + '(' + hint + ')')]
log('Функции класса', "%.3f" % (time.time() - t))
t = time.time()
if cur_macro:
cls_params = []
param_macro = [(view.substr(pm) + '\tmacro param', view.substr(pm)) for pm in all_macro_params if cur_macro[0].contains(pm)]
vars_macro = [(view.substr(vm) + '\tvar in macro', view.substr(vm)) for vm in all_macro_vars if cur_macro[0].contains(vm)]
completions += param_macro
completions += vars_macro
if cur_class and cur_class[0].contains(cur_macro[0]):
pass
else:
pass
log('Параметры текущего macro', "%.3f" % (time.time() - t))
t = time.time()
for cls_param in cls_params:
completions += [(view.substr(cls_param) + '\tclass param', view.substr(cls_param))]
log('Параметры текущего class', "%.3f" % (time.time() - t))
t = time.time()
# Из родителя, нужно именно тут т.к. сортировка
if cur_class:
completions += symdb.query_parent_symbols(view.substr(cur_class[1]), prefix)
log('Из родителей', "%.3f" % (time.time() - t))
t = time.time()
# Умный End
completions += cls.intelige_end(view)
log('Умный End', "%.3f" % (time.time() - t))
t = time.time()
# Автокомплит
completions += cls.get_completions_always(view)
log('Автокомплит', "%.3f" % (time.time() - t))
t = time.time()
# Из глобала
t = time.time()
completions += symdb.query_globals_in_packages(symdb.get_package(view.file_name()), prefix)
log('Из глобала', "%.3f" % (time.time() - t))
return completions
def get_completions_always(self, view):
collections = sublime.find_resources('RSBIDE*.sublime-completions')
sel = view.sel()[0]
for collection_file in collections:
collection_res = sublime.decode_value(
sublime.load_resource(collection_file)
)
if view.match_selector(sel.begin(), collection_res.get('scope', 'source.mac')):
completions = collection_res.get('completions', [])
else:
continue
descr = collection_res.get('descr', '\trsl')
for completion in completions:
if 'trigger' in completion:
yield (completion['trigger'] + descr, completion['contents'])
else:
yield (completion + descr, completion)
class RebuildCacheCommand(sublime_plugin.WindowCommand):
index_in_progress = False
exclude_folders = []
def run(self, action='update'):
log('run')
if action == 'cancel':
self.__class__.index_in_progress = False
return
self.view = self.window.active_view()
if action == 'update':
rebuild = False
elif action == 'rebuild':
rebuild = True
else:
raise ValueError('action must be one of {"cancel", "update", '
'"rebuild"}')
self.__class__.index_in_progress = True
db = get_db(self.view.window())
async_worker.schedule(self.async_process_files,
db,
self.view.window().folders(), rebuild)
def is_enabled(self, action='update'):
self.view = self.window.active_view()
if action == 'cancel':
return self.index_in_progress
else:
return not self.index_in_progress
@classmethod
def async_process_files(cls, databases, project_folders, rebuild):
try:
cls.async_process_files_inner(databases, project_folders, rebuild)
finally:
cls.index_in_progress = False
@classmethod
def all_files_in_folders(self, folder, base=None):
base = base if base is not None else folder
for test in self.exclude_folders:
if re.search('(?i)' + test, folder) is not None:
return
for x in os.listdir(folder):
current_path = os.path.join(folder, x)
if (os.path.isfile(current_path)):
if not is_file_index(current_path):
continue
yield posix(current_path)
elif (not x.startswith('.') and os.path.isdir(current_path)):
yield from self.all_files_in_folders(current_path, base)
@classmethod
def async_process_files_inner(cls, databases, project_folders, rebuild):
if rebuild:
# Helper process should not reference files to be deleted.
imp.reload(symdb)
# Simply remove associated database files if build from scratch is
# requested.
for database in databases:
try:
os.remove(os.path.expandvars(database))
except OSError:
# Specified database file may not yet exist or is
# inaccessible.
pass
t = time.time()
if not update_settings():
return
cls.exclude_folders = Settings.proj_settings.get('EXCLUDE_FOLDERS', [])
for dbi, database in enumerate(databases):
symdb.begin_file_processing(dbi)
for folder in project_folders:
aLL_f = list(cls.all_files_in_folders(folder))
lf = len(aLL_f)
t1 = time.time()
for it, path in enumerate(aLL_f):
if not cls.index_in_progress:
symdb.end_file_processing(dbi)
symdb.commit()
sublime.status_message('Indexing canceled')
return
symdb.process_file(dbi, path)
p = it * 100 / lf
sublime.status_message(' RSL index %03.2f %%' % p)
if it > 0 and it % 1000 == 0:
symdb.commit()
if it > 0 and it % 100 == 0:
log(it, 'of', lf, "%.3f" % (time.time() - t1))
t1 = time.time()
symdb.end_file_processing(dbi)
symdb.commit()
sublime.status_message(' RSL index Done %.3f sec' % (time.time() - t))
log('Parse_ALL', "%.3f" % (time.time() - t))
imp.reload(symdb)
class LintThisViewCommand(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
if not is_RStyle_view(view):
return
lint = Linter(view, force=True)
lint.start()
def is_visible(self):
view = self.window.active_view()
return is_RStyle_view(view) and Settings.proj_settings.get("LINT", True)
def description(self):
return 'RSBIDE: Проверить по соглашениям'
class PrintTreeImportCommand(sublime_plugin.WindowCommand):
def run(self):
view = self.window.active_view()
if not update_settings():
return
imports = symdb.query_imports(symdb.get_package(view.file_name()))
tree = Tree()
package = symdb.get_package(view.file_name(), True)
tree.add_node(package)
for node in imports:
tree.add_node(node[0], node[1])
v = self.window.new_file()
tree.display(package, view=v)
v.run_command('append', {'characters': "\n"})
def is_visible(self):
return is_RStyle_view(self.window.active_view())
def description(self):
return 'RSBIDE: Дерево импортов'
class StatusBarFunctionCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
if not is_RStyle_view(view):
return
self.settings = Settings.proj_settings
lint = Linter(view)
region = view.sel()[0]
mess_list = []
MessStat = ''
sep = ';'
cur_class = extent_reg(view, region)
cur_macro = extent_reg(view, region, 2)
lint_regions = [(j, lint.get_text_lint(i)) for i in lint.all_lint_regions() for j in view.get_regions(i)]
if len(lint_regions) > 0:
MessStat = 'Есть замечания: %s всего' % (len(lint_regions))
for x in lint_regions:
if x[0].intersects(region):
mess_list += [x[1]]
if len(mess_list) > 0:
MessStat = sep.join(mess_list)
elif self.settings.get("SHOW_CLASS_IN_STATUS", False):
if cur_class:
parent = [el for el in view.find_by_selector('entity.other.inherited-class.mac') if cur_class[0].contains(el)]
param = [p for p in view.find_by_selector('variable.parameter.class.mac') if cur_class[0].contains(p)]
sp = '(%s)' % (''.join([view.substr(i) for i in parent])) if len(parent) > 0 else ''
MessStat = 'class %s %s (%s)' % (sp, view.substr(cur_class[1]), ', '.join([view.substr(j) for j in param]))
if cur_macro:
if len(MessStat) > 0:
MessStat += ','
MessStat += ' macro: ' + view.substr(cur_macro[1])
view.set_status('rsbide_stat', MessStat)
class RunRsinitCommand(sublime_plugin.TextCommand):
currfolder = ""
def run(self, edit, action='file'):
def call_a():
log(call(['RSInit.exe', '-rsldebug', symdb.get_package(self.view.file_name()) + '.mac']))
if action != 'file':
return
os.chdir(self.currfolder)
sublime.set_timeout_async(call_a, 5)
def is_visible(self):
return os.path.lexists(os.path.join(self.currfolder, 'RSInit.exe'))
def is_enabled(self):
self.currfolder = sublime.expand_variables(
"$folder", sublime.active_window().extract_variables())
if is_RStyle_view(self.view):
return True
else:
return False
def description(self):
return 'RSBIDE: Запуск/Отладка файла'
def plugin_loaded():
if not update_settings():
return
global_settings = sublime.load_settings(config["RSB_SETTINGS_FILE"])
global_settings.clear_on_change('RSBIDE_settings')
global_settings.add_on_change("RSBIDE_settings", update_settings)
def update_settings():
""" restart projectFiles with new plugin and project settings """
# update settings
db = get_db(sublime.active_window())
if len(db) == 0:
debug('Не задана БД')
return False
symdb.set_databases(db)
if Settings:
global_settings = Settings.update()
settings = Settings.merge(global_settings, Settings.project(sublime.active_window()))
Settings.set_settings_project(settings)
symdb.set_settings(Settings)
return True
|
from PyQt5.QtWidgets import QWidget, QApplication, QMainWindow, QFrame, QDesktopWidget
from PyQt5.QtGui import QPainter, QColor, QPen, QBrush
from PyQt5.QtCore import Qt, QTimer, QTime
def drawLines(qp):
# print(self.t.elapsed())
pen = QPen(Qt.black, 2, Qt.SolidLine)
pen_dash = QPen(Qt.black, 2, Qt.DotLine)
# IM1 Vertical
qp.setPen(pen)
# qp.drawLine(270, 0, 270, 600)
#
# qp.drawLine(330, 0, 330, 600)
# qp.drawLine(300, 0, 300, 270)
# qp.drawLine(300, 330, 300, 600)
qp.setPen(pen_dash)
qp.drawLine(280, 330, 280, 600)
qp.drawLine(290, 330, 290, 600)
qp.drawLine(310, 330, 310, 600)
qp.drawLine(320, 330, 320, 600)
#
qp.drawLine(280, 0, 280, 270)
qp.drawLine(290, 0, 290, 270)
qp.drawLine(310, 0, 310, 270)
qp.drawLine(320, 0, 320, 270)
# IM1 Tropical
qp.setPen(pen)
# qp.drawLine(0, 270, 600, 270)
#
# qp.drawLine(0, 330, 600, 330)
# qp.drawLine(0, 300, 270, 300)
#
# qp.drawLine(330, 300, 600, 300)
qp.setPen(pen_dash)
qp.drawLine(0, 280, 270, 280)
qp.drawLine(0, 290, 270, 290)
qp.drawLine(0, 310, 270, 310)
qp.drawLine(0, 320, 270, 320)
#
qp.drawLine(330, 280, 600, 280)
qp.drawLine(330, 290, 600, 290)
qp.drawLine(330, 310, 600, 310)
qp.drawLine(330, 320, 600, 320)
# IM2 Vertical
qp.setPen(pen)
qp.drawLine(600, 0, 600, 600)
qp.drawLine(660, 0, 660, 600)
qp.drawLine(630, 0, 630, 270)
qp.drawLine(630, 330, 630, 600)
qp.setPen(pen_dash)
qp.drawLine(610, 0, 610, 270)
qp.drawLine(620, 0, 620, 270)
qp.drawLine(640, 0, 640, 270)
qp.drawLine(650, 0, 650, 270)
qp.drawLine(610, 330, 610, 600)
qp.drawLine(620, 330, 620, 600)
qp.drawLine(640, 330, 640, 600)
qp.drawLine(650, 330, 650, 600)
# IM2 Tropical
qp.setPen(pen)
qp.drawLine(600, 270, 930, 270)
qp.drawLine(600, 330, 930, 330)
qp.drawLine(660, 300, 930, 300)
# qp.setPen(pen_dash)
# qp.drawLine(660, 280, 930, 280)
# qp.drawLine(660, 290, 930, 290)
# qp.drawLine(660, 310, 930, 310)
# qp.drawLine(660, 320, 930, 320)
# IM3 Vertical
qp.setPen(pen)
qp.drawLine(270, 600, 270, 930)
qp.drawLine(330, 600, 330, 930)
qp.drawLine(300, 660, 300, 930)
qp.drawLine(300, 660, 300, 930)
# qp.setPen(pen_dash)
# qp.drawLine(280, 660, 280, 930)
# qp.drawLine(290, 660, 290, 930)
# qp.drawLine(310, 660, 310, 930)
# qp.drawLine(320, 660, 320, 930)
# IM3 Tropical
qp.setPen(pen)
qp.drawLine(0, 600, 600, 600)
qp.drawLine(0, 660, 600, 660)
qp.drawLine(0, 630, 270, 630)
qp.drawLine(330, 630, 600, 630)
# qp.setPen(pen_dash)
# qp.drawLine(0, 610, 270, 610)
# qp.drawLine(0, 620, 270, 620)
# qp.drawLine(0, 640, 270, 640)
# qp.drawLine(0, 650, 270, 650)
#
# qp.drawLine(330, 610, 600, 610)
# qp.drawLine(330, 620, 600, 620)
# qp.drawLine(330, 640, 600, 640)
# qp.drawLine(330, 650, 600, 650)
# IM4 Vertical
qp.setPen(pen)
qp.drawLine(600, 600, 600, 930)
qp.drawLine(660, 600, 660, 930)
qp.drawLine(630, 660, 630, 930)
# qp.drawLine(630, 330, 630, 600)
# qp.setPen(pen_dash)
#
# qp.drawLine(610, 660, 610, 930)
# qp.drawLine(620, 660, 620, 930)
# qp.drawLine(640, 660, 640, 930)
# qp.drawLine(650, 660, 650, 930)
# qp.drawLine(610, 330, 610, 600)
# qp.drawLine(620, 330, 620, 600)
# qp.drawLine(640, 330, 640, 600)
# qp.drawLine(650, 330, 650, 600)
# IM4 Tropical
qp.setPen(pen)
qp.drawLine(600, 600, 930, 600)
qp.drawLine(600, 660, 930, 660)
qp.drawLine(660, 630, 930, 630)
# qp.setPen(pen_dash)
# qp.drawLine(660, 610, 930, 610)
# qp.drawLine(660, 620, 930, 620)
# qp.drawLine(660, 640, 930, 640)
# qp.drawLine(660, 650, 930, 650)
# # IM1 Vertical
# qp.setPen(pen)
# qp.drawLine(270, 0, 270, 560)
#
# qp.drawLine(290, 0, 290, 560)
# qp.drawLine(280, 0, 280, 270)
# qp.drawLine(280, 290, 280, 560)
#
# # IM1 Tropical
# qp.drawLine(0, 270, 560, 270)
#
# qp.drawLine(0, 280, 270, 280)
# qp.drawLine(0, 290, 270, 290)
#
# qp.drawLine(290, 280, 560, 280)
# qp.drawLine(270, 290, 560, 290)
|
import matplotlib
import pandas as pd
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report, \
precision_recall_fscore_support
def beautify_treatment(treatment, treatment_dict):
return treatment_dict[treatment]
def get_input_diagnosis():
return {'diagnosis': input('whats your diagnosis? '),
'age': int(input('whats your age? ')),
'sex': input('whats your gender? f/m ')}
def main():
treatment_df = pd.read_csv('people_medicine_data_large.csv')
treatment_df = remove_useless_columns(treatment_df)
common_treatment = set.intersection(set(treatment_df['treatment']))
# common_diagnose = set.intersection(set(treatment_df['diagnosis']))
# print(common_diagnose)
treatment_dict = {treatment: i
for i, treatment in
zip(np.arange(len(common_treatment)),
common_treatment)}
treatment_df = treatment_df.replace({'treatment': treatment_dict})
x_data, labels = get_data_and_labels(treatment_df)
x_train, x_test, y_train, y_test = train_test_split(x_data, labels,
test_size=0.33,
random_state=42)
feature_columns_list, input_function, label_y = get_feature_columns_and_input_function(
treatment_df, treatment_dict, x_train, y_train)
model, y_test, predictions_trained = get_trained_model(feature_columns_list, input_function, label_y,
treatment_dict, x_test, y_test)
question_data = get_input_diagnosis()
x_test_question = pd.DataFrame([question_data])
prediction_function_question = \
tf.estimator.inputs.pandas_input_fn(x=x_test_question,
batch_size=
x_test_question.shape[0],
shuffle=False)
predictions = list(model.predict(input_fn=prediction_function_question))
res_treatment_dict = dict((v, k) for k, v in treatment_dict.items())
predictions_medicine = [res_treatment_dict[pred['class_ids'][0]]
for pred in predictions]
presicions_achieved = get_precentage_accurecy(y_test,predictions_trained)
precision = [presicions_achieved[pred['class_ids'][0]]
for pred in predictions]
print(predictions_medicine[0])
print('{}%'.format(precision[0]*100.00))
def get_trained_model(feature_columns_list, input_function, label_y,
treatment_dict, x_test, y_test):
print('values={} len={}'.format(treatment_dict.keys(), len(label_y)))
model = tf.estimator.LinearClassifier(
feature_columns=feature_columns_list, n_classes=len(label_y) + 1)
model.train(input_fn=input_function, steps=5000)
prediction_function = tf.estimator.inputs.pandas_input_fn(x=x_test,
batch_size=
x_test.shape[
0],
shuffle=False)
predictions = list(model.predict(input_fn=prediction_function))
predictions = [pred['class_ids'][0] for pred in predictions]
print(classification_report(y_test, predictions))
return model, y_test, predictions
def get_feature_columns_and_input_function(treatment_df, treatment_dict,
x_train, y_train):
data_columns = treatment_df.columns.tolist()
numeric_columns = ['age']
data_columns = \
list(set(data_columns) - set(numeric_columns + ['treatment']))
numeric_features = {column: tf.feature_column.numeric_column(column) for
column in numeric_columns}
categorical_features = {
column: tf.feature_column.categorical_column_with_hash_bucket(
column,
hash_bucket_size=1000)
for column in data_columns}
numeric_features.update(categorical_features)
feature_columns = numeric_features
feature_columns[
'sex'] = tf.feature_column.categorical_column_with_vocabulary_list(
'sex', ['f', 'm'])
age = tf.feature_column.bucketized_column(
feature_columns['age'],
boundaries=[0, 20, 40, 60, 80, 100])
feature_columns['age'] = age
input_function = tf.estimator.inputs.pandas_input_fn(x=x_train,
y=y_train,
batch_size=100,
num_epochs=1000,
shuffle=True)
feature_columns_list = list(feature_columns.values())
label_y = treatment_dict.keys()
return feature_columns_list, input_function, label_y
def get_data_and_labels(treatment_df):
x_data = treatment_df.drop(['treatment'], axis=1)
labels = treatment_df['treatment']
return x_data, labels
def get_precentage_accurecy(y_true, y_pred):
p, r, f1, s = precision_recall_fscore_support(y_true, y_pred,
labels=None,
average=None,
sample_weight=None
)
num_medicine = np.arange(len(p))
return {i: precision
for i , precision in zip(num_medicine, p)}
def remove_useless_columns(treatment_df):
return treatment_df.drop(['person_instance', 'company'], axis=1)
if __name__ == '__main__':
import logging
logging.getLogger().setLevel(logging.INFO)
main()
|
from urllib.request import urlopen
from urllib.error import HTTPError
from urllib.error import URLError
from bs4 import BeautifulSoup
def main():
"""
Dealing with errors and exceptions such as HTTP and URL errors
:return:
"""
try:
html = urlopen("http://pythonscraping.com/pages/page1.html")
except HTTPError as e:
print(e)
except URLError as e:
print(e)
else:
print("Everything is okay!")
return None
if __name__ == "__main__":
main()
|
import math
while(True):
IN = input().split(' ')
w = int(IN[0])
h = int(IN[1])
if w == 0 and h == 0:
break
if (2*math.pi*h/3) <= w:
v1 = (math.pi*(h**3))/27
else:
r = w/(2*math.pi)
v1 = math.pi*(r**2)*(h-(2*r))
r = h/(2*(math.pi+1))
if 2*r > w:
r = w/2
v2 = math.pi*(r**2)*w
print(round(max(v1,v2)*1000)/1000)
|
""" Script to check neutrons simulated with JUNO detsim (neutron initial momentum uniformly distributed from 0.001 MeV
to 30 MeV within radius of R<16m)
To check the delayed cut (delayed energy cut, neutron multiplicity cut, time cut and distance cut):
Four important things can be checked with this script:
1. Efficiency of the time cut between prompt and delayed signal. In this case there is no prompt signal, but,
because it is the prompt signal, it is assumed to be at very early hittimes (around 0 ns).
2. Efficiency of the delayed energy cut:
There are some problems with the first investigation of the delayed energy cut (based on 1.9 MeV, 2.2 MeV and
2.5 MeV gammas), because many delayed signals of NC events have to small nPE compared to the result of the
gamma simulation (see script OLD_check_delayed_energy.py and folder /output_gamma_2_2_MeV)
3. Efficiency of the neutron multiplicity cut (momentum of 0.001 MeV corresponds to minimum kinetic energy of a
neutron after inverse beta decay with a neutrino with initial energy of 10 MeV; momentum of 28 MeV corresponds
to the maximum kinetic energy a neutron can get after inverse beta decay with a neutrino with initial energy
of 115 MeV)
-> Can such neutrons produce further neutrons via scattering, which are also captured on H?
-> Is it possible that such a neutron is not captured by H?
These two questions are important for the efficiency of the neutron multiplicity cut (only 1 neutron capture
in time window 500 ns to 1 ms)
4. Efficiency of the distance cut between initial position (position, where prompt signal deposits its energy) and
position of the neutron capture. Both positions (initial position and start position of nCapture) must be
smeared
So, with this script, you can calculate the efficiency of the delayed cut for real IBD events.
The efficiency of the preselection, which is done in preselection_detsim_user.py, and the efficiency of the
delayed cut for IBD-like events (NC events) must be determined in another way.
"""
import datetime
import ROOT
import sys
import NC_background_functions
import numpy as np
from matplotlib import pyplot as plt
from scipy.optimize import curve_fit
# get the date and time, when the script was run:
date = datetime.datetime.now()
now = date.strftime("%Y-%m-%d %H:%M")
# set the path of the input files:
input_path = "/home/astro/blum/juno/atmoNC/data_NC/output_neutron_multiplicity/"
# set path, where results should be saved:
output_path = input_path + "results/"
# start file and stop file:
file_start = 0
file_end = 500
# file_end = 13
# number of events per file (for simulation with optical processes):
number_evts_per_file = 100
""" define time window and radius cut: """
# set time window of whole signal in ns (start at 200 ns to avoid the analysis of the small prompt signal):
min_time = 200
max_time = 1001000
# set maximum of time window of delayed signal (neutron capture window) in ns:
max_time_ncap = 1000000
# set time in ns, where the neutron capture signal should start:
time_limit = 500
# Set bin-width of hittime histogram in ns:
binwidth = 5.0
# set the radius for the volume cut in mm:
r_cut = 16000
# set the distance for the distance cut in mm:
distance_cut = 1500
""" thresholds and cuts for ncapture signal: """
# Set threshold of number of PE per bin for possible delayed signal (bin-width = 5 ns):
threshold1_del = 50
# set threshold2 of number of PEs per bin (signal peak is summed as long as nPE is above threshold2):
threshold2_del = 0
# min and max number of PE for delayed energy cut (delayed energy cut: 1.9 MeV / 0.0007483 = 2573 PE ~ 2500 PE,
# 2.5 MeV / 0.0007384 = 3385 PE ~ 3400 PE):
min_PE_delayed = 2400
max_PE_delayed = 3400
""" load position of the PMTs and corresponding PMT ID from file PMT_position.root: """
file_PMT_position = "/home/astro/blum/juno/atmoNC/PMT_information/PMT_position.root"
# array with PMT ID and corresponding x, y, z position in mm:
pmtID_pos_file, x_pos_pmt, y_pos_pmt, z_pos_pmt = NC_background_functions.get_pmt_position(file_PMT_position)
""" load 'time resolution' in ns of the 20 inch PMTs and corresponding PMT ID from file PmtData.root: """
file_PMT_time = "/home/astro/blum/juno/atmoNC/PMT_information/PmtData.root"
# array with PMT ID and corresponding sigma in ns:
pmtID_time_file, sigma_time_20inch = NC_background_functions.get_20inchpmt_tts(file_PMT_time)
# set TTS (FWHM) of the 3inch PMTs in ns:
tts_3inch = 5.0
# calculate time resolution (sigma) for the 3inch PMTs in ns:
sigma_time_3inch = tts_3inch / (2 * np.sqrt(2 * np.log(2)))
# set effective speed of light in the liquid scintillator in mm/ns (see page 7 of c_effective_JUNO-doc-3144-v2.pdf in
# folder /home/astro/blum/PhD/paper/Pulse_Shape_Discrimination/). Effective refraction index in LS n_eff = 1.54.
# c/n_eff = 299792458 m / 1.54 s ~ 194670427 m/s = 194670427 * 10**(-6) mm/ns ~ 194.67 mm/ns:
c_effective = 194.67
""" preallocate variables: """
# preallocate the total number of events simulated:
number_of_events_total = 0
# preallocate the number of events inside fiducial volume (defined by r_cut) (equal to number of initial neutrons):
number_of_events = 0
# preallocate the number of events with at least one signal pulse in the delayed time window (from around 500 ns to
# max_time):
number_timecut_pass = 0
# preallocate the number of events with NO signal pulse in the delayed time window (from around 500 ns to max_time):
number_timecut_rejected = 0
# preallocate the number of events, where delayed pulse begins before time_limit:
number_timecut_rejected_min = 0
# preallocate the number of events, where delayed pulse ends after max_time_ncap:
number_timecut_rejected_max = 0
# preallocate number of events with at least on signal pulse that pass the delayed energy cut in the delayed time window
# (min_PE_delayed < E_d < max_PE_delayed):
number_delayed_energy_pass = 0
# preallocate number of events with NO signal pulse that pass the delayed energy cut in the delayed time window:
number_delayed_energy_rejected = 0
# preallocate number of events, where smeared energy pass the cut, but real energy would be rejected:
number_delayed_energy_toomuch = 0
# preallocate number of events, where smeared energy is rejected by cut, but real energy would pass the cut:
number_delayed_energy_tooless = 0
# preallocate number of events with only 1 signal pulse in delayed time window, that pass delayed energy cut:
number_n_mult_pass = 0
# preallocate number of events that do not pass neutron multiplicity cut:
number_n_mult_rejected = 0
# preallocate number of events with distance between reco. initial position to reco. nCapture position below
# distance_cut, that pass all cuts above:
number_distance_cut_pass = 0
# preallocate number of events, that pass all cuts above, except of the distance cut:
number_distance_cut_rejected = 0
# preallocate number of events with distance between real initial position to real nCapture position below distance cut,
# that pass all cuts above:
number_distance_cut_pass_MCtruth = 0
# preallocate number of events, that pass all cuts above, except the distance cut of the real MC truth position:
number_distance_cut_rejected_MCtruth = 0
number_pe0_qedep2 = 0
number_pe3000_qedep2 = 0
number_pe0_qedep5 = 0
number_pe6500_qedep5 = 0
number_pe15000_qedep11 = 0
number_no_ncapture = 0
# preallocate list, where number of pe (directly from nPhotons) is saved:
array_npe = []
# preallocate list, where number of pe from analyzing the corrected hittime distribution is saved:
array_npe_from_hittime = []
# preallocate list, where edep in MeV is saved:
array_edep = []
# preallocate list, where Qedep in MeV is saved:
array_Qedep = []
""" Analyze the file user_neutron_multiplicity_{}.root: """
# loop over files:
for filenumber in range(file_start, file_end+1, 1):
# load file:
rfile = ROOT.TFile(input_path + "user_neutron_multiplicity_{0:d}.root".format(filenumber))
# print("... read {0}...".format(rfile))
# get evt tree from TFile:
rtree_evt = rfile.Get("evt")
# get geninfo tree from TFile:
rtree_geninfo = rfile.Get("geninfo")
# get prmtrkdep tree from TFile:
rtree_prmtrkdep = rfile.Get("prmtrkdep")
# get nCapture tree from TFile:
rtree_ncapture = rfile.Get("nCapture")
# get the number of events in the 'geninfo' Tree:
num_evts = rtree_geninfo.GetEntries()
if num_evts != number_evts_per_file:
sys.exit("ERROR: number of events differ in file {0}".format(rfile))
# add num_evts to number_of_events_total:
number_of_events_total += num_evts
# loop over events:
for event in range(num_evts):
# set flag, that event passes time cut (time_limit <= nCaptureT <= max_time):
flag_pass_timecut = False
# get current event:
rtree_prmtrkdep.GetEntry(event)
# get deposit energy of initial neutron in MeV (neutrons deposit most of the energy while being captured):
Qedep_capture = float(rtree_prmtrkdep.GetBranch('Qedep').GetLeaf('Qedep').GetValue())
# get current event:
rtree_geninfo.GetEntry(event)
# get initial x,y,z position in mm:
x_init = float(rtree_geninfo.GetBranch('InitX').GetLeaf('InitX').GetValue())
y_init = float(rtree_geninfo.GetBranch('InitY').GetLeaf('InitY').GetValue())
z_init = float(rtree_geninfo.GetBranch('InitZ').GetLeaf('InitZ').GetValue())
# do vertex reconstruction with function position_smearing() for distance cut:
# Smear x,y and z position of the initial position (returns reconstructed position in mm)
# (for Qedep use random number from uniform distribution between 10 MeV and 100 MeV. This represents the prompt
# energy of a positron like in a real IBD event, since in user_neutron_multiplicity.root only the neutron is
# simulated):
Qedep_init = np.random.uniform(10, 100)
x_reco_init = NC_background_functions.position_smearing(x_init, Qedep_init)
y_reco_init = NC_background_functions.position_smearing(y_init, Qedep_init)
z_reco_init = NC_background_functions.position_smearing(z_init, Qedep_init)
# get nCapture tree:
rtree_ncapture.GetEntry(event)
# get number of neutron captures:
NeutronN = int(rtree_ncapture.GetBranch('NeutronN').GetLeaf('NeutronN').GetValue())
# set variables to zeros:
nCaptureT = 0.0
x_reco_ncapture = -17000
y_reco_ncapture = 0
z_reco_ncapture = 0
# check NeutronN:
if NeutronN < 1:
# no neutron capture in event:
number_no_ncapture += 1
print("-------------no neutron capture in event {0:d} in file {1}".format(event, rfile))
elif NeutronN > 1:
# more than one neutron capture in event:
print("+++++++++++++more than 1 neutron capture in event {0:d} in file {1}".format(event, rfile))
else:
# NeutronN == 1 -> 1 neutron capture in event.
# check if captured neutron was the initial neutron:
NeutronTrkid = int(rtree_ncapture.GetBranch('NeutronTrkid').GetLeaf('NeutronTrkid').GetValue(0))
if NeutronTrkid != 1:
print("captured neutron is not initial neutron (event = {0:d}, file {1}".format(event, rfile))
# check neutron capture time in ns:
nCaptureT = float(rtree_ncapture.GetBranch("NeutronCaptureT").GetLeaf("NeutronCaptureT").GetValue(0))
# get the start position of neutron capture in mm:
x_ncapture = float(rtree_ncapture.GetBranch("NCStartX").GetLeaf("NCStartX").GetValue(0))
y_ncapture = float(rtree_ncapture.GetBranch("NCStartY").GetLeaf("NCStartY").GetValue(0))
z_ncapture = float(rtree_ncapture.GetBranch("NCStartZ").GetLeaf("NCStartZ").GetValue(0))
# do vertex reconstruction of neutron capture position with function position_smearing():
x_reco_ncapture = NC_background_functions.position_smearing(x_ncapture, Qedep_capture)
y_reco_ncapture = NC_background_functions.position_smearing(y_ncapture, Qedep_capture)
z_reco_ncapture = NC_background_functions.position_smearing(z_ncapture, Qedep_capture)
# calculate distance of neutron capture to detector center in mm:
r_reco_ncapture = np.sqrt(x_reco_ncapture**2 + y_reco_ncapture**2 + z_reco_ncapture**2)
# check volume cut:
if r_reco_ncapture >= r_cut:
# event outside fiducial volume:
continue
else:
# event inside fiducial volume:
number_of_events += 1
""" calculate the real hittime distribution (time of flight correction with reconstructed position of neutron
capture and time smearing with TTS for each hit): """
# get current event:
rtree_evt.GetEntry(event)
# get number of photons:
n_photons = int(rtree_evt.GetBranch("nPhotons").GetLeaf("nPhotons").GetValue())
# preallocate empty array to build default hittime-histogram:
hittime_array = []
for index in range(n_photons):
# get number of PE per photon:
nPE = int(rtree_evt.GetBranch("nPE").GetLeaf("nPE").GetValue(index))
if nPE != 1:
# more than 1 PE per photon
sys.exit("ERROR: more than 1 PE per photon (event {0:d}, file {1})".format(event, rfile))
# get the pmtID of the hit PMT:
pmtID = int(rtree_evt.GetBranch('pmtID').GetLeaf('pmtID').GetValue(index))
""" time of flight correction: """
# get hittime of PMT from tree in ns:
hittime = float(rtree_evt.GetBranch('hitTime').GetLeaf('hitTime').GetValue(index))
# get position of the PMT with specific pmtID (pmtID is ascending number from 0 to 17738 (17739 large PMTs)
# and from 300000 to 336571 (36572 small PMTs)).
# For large PMTs -> For 20inch PMTs, the pmtID is equal to index of x,y,z_pos_pmt array.
# For small PMTs -> For 3inch PMTs, the pmtID - (300000 - 17739) is equal to index of x,y,z_pos_pmt array.
# check if PMT is 20 inch or 3inch (pmtID < 50000 means 20inch PMT):
if pmtID < 50000:
# 20inch PMT:
# get PMT position in mm from arrays:
x_pmt = x_pos_pmt[pmtID]
y_pmt = y_pos_pmt[pmtID]
z_pmt = z_pos_pmt[pmtID]
else:
# 3inch PMT:
# calculate index of pos_pmt array that correspond to pmtID of 3inch PMTs (for example:
# first small PMT: 300000-282261 = 17739, last small PMT: 336571-282261 = 54310)
index_3inch = pmtID - 282261
# get PMT position in mm from arrays:
x_pmt = x_pos_pmt[index_3inch]
y_pmt = y_pos_pmt[index_3inch]
z_pmt = z_pos_pmt[index_3inch]
# calculate distance between reconstructed position neutron capture and position of PMT (in mm):
distance_tof = np.sqrt((x_reco_ncapture - x_pmt)**2 + (y_reco_ncapture - y_pmt)**2 +
(z_reco_ncapture - z_pmt)**2)
# calculate time of flight in ns:
time_of_flight = distance_tof / c_effective
""" time resolution of PMT: """
# get time resolution of PMT with specific pmtID (pmtID is ascending number from 0 to 17738 (17739 large
# PMTs)) -> For 20inch PMTs, the pmtID is equal to index of sigma_time_20inch array.
# check if PMT is 20 inch or 3inch (pmtID < 50000 means 20inch PMT):
if pmtID < 50000:
# 20inch PMT:
# get time resolution (sigma) of PMT in ns from array:
sigma_pmt = sigma_time_20inch[pmtID]
else:
# 3inch PMT:
sigma_pmt = sigma_time_3inch
# consider time resolution of PMT by generating normal distributed random number with mu = hittime and
# sigma = sigma_pmt (only the hittime at the PMT must be smeared, not the time-of-flight):
hittime_tts = np.random.normal(hittime, sigma_pmt)
""" calculate the 'real' hittime of the photon in ns: """
hittime_real = hittime_tts - time_of_flight
# append real hittime to array:
hittime_array.append(hittime_real)
# analyze hittime distribution (the same way like in prompt_signal_preselected_evts.py):
# build histogram, where hittimes are saved:
# set bin-edges of hittime histogram in ns (whole time window from min_time to max_time):
bins_hittime = np.arange(min_time, max_time + 2 * binwidth, binwidth)
# build hittime histogram:
npe_per_hittime_all, bin_edges_hittime_all = np.histogram(hittime_array, bins_hittime)
# analyze the whole time window (not only neutron capture time window) for the time cut:
num_pulses, index_test1, num_of_ncaptures, begin_time_pulse, end_time_pulse = \
NC_background_functions.analyze_delayed_signal(npe_per_hittime_all, bin_edges_hittime_all, 0,
threshold1_del, threshold2_del, min_PE_delayed,
max_PE_delayed, event)
# do time cut:
if begin_time_pulse <= time_limit:
# pulse begins before time_limit:
number_timecut_rejected += 1
number_timecut_rejected_min += 1
elif end_time_pulse >= max_time_ncap:
# pulse ends after time_limit:
number_timecut_rejected += 1
number_timecut_rejected_max += 1
else:
# event passes time cut:
flag_pass_timecut = True
number_timecut_pass += 1
# compare time cut done with begin_time_pulse and end_time_pulse with the time cut done with nCapture T:
if begin_time_pulse != 0 and begin_time_pulse <= time_limit and time_limit < nCaptureT < max_time_ncap:
print("----- event is rejected by time cut (begin_pulse = {0:.0f} ns), but would pass nCaptureT "
"cut (nCaptureT = {1:.0f} ns)".format(begin_time_pulse, nCaptureT))
elif begin_time_pulse != 0 and time_limit < begin_time_pulse < max_time_ncap and nCaptureT <= time_limit:
print("+++++ event pass time cut (begin_pulse = {0:.0f} ns), but would be rejected by nCaptureT cut "
"(nCaptureT = {1:.0f} ns)".format(begin_time_pulse, nCaptureT))
if end_time_pulse != 2000000 and end_time_pulse >= max_time_ncap and time_limit < nCaptureT < max_time_ncap:
print("------------ event is rejected by time cut (end_pulse = {0:.0f} ns), but would pass nCaptureT cut "
"(nCaptureT = {1:.0f} ns)".format(end_time_pulse, nCaptureT))
elif end_time_pulse != 2000000 and time_limit < end_time_pulse < max_time_ncap and nCaptureT >= max_time_ncap:
print("++++++++++++ event pass time cut (end_pulse = {0:.0f} ns), but would be rejected by nCaptureT cut "
"(nCaptureT = {1:.0f} ns)".format(end_time_pulse, nCaptureT))
#################################################
# plt.plot(bin_edges_hittime_all[:-1], npe_per_hittime_all)
# plt.show()
# get index of bins_hittime corresponding to time_limit_prompt:
index_time_limit = int((time_limit - min_time) / binwidth)
# get index of bins_hittime corresponding to max_time_ncap:
index_max_time_ncap = int((max_time_ncap - min_time) / binwidth)
# take only hittime histogram from index_time_limit to index_max_time_ncap:
bin_edges_hittime = bin_edges_hittime_all[index_time_limit:(index_max_time_ncap)]
npe_per_hittime = npe_per_hittime_all[index_time_limit:(index_max_time_ncap)]
index_test = 0
############################################
# plt.plot(bin_edges_hittime, npe_per_hittime)
# plt.show()
number_nCapture_pass_e_cut = 0
number_pe_ncapture = 0
number_nCapture_pass_e_cut_smeared = 0
number_pe_ncapture_smeared = 0
# analyze neutron capture signal (num_n_captures: number of signal with correct energy in event (0 or 1);
# index_test: index, where analysis of hittime starts;
# number_pe_ncapture: number of pe in neutron capture signal peak):
while index_test < len(npe_per_hittime):
# analyze delayed signal until you reach the end of the time window:
num_n_captures, index_test, num_pe_ncapture, begin_pulse, end_pulse = \
NC_background_functions.analyze_delayed_signal(npe_per_hittime, bin_edges_hittime, index_test,
threshold1_del,
threshold2_del, min_PE_delayed, max_PE_delayed, event)
number_nCapture_pass_e_cut += num_n_captures
number_pe_ncapture += num_pe_ncapture
if number_pe_ncapture != 0:
# apply energy resolution on the number of pe from ncapture:
# get sigma for this certain number of pe:
sigma_nPE = NC_background_functions.energy_resolution_pe(number_pe_ncapture)
# generate normal distributed random number with mean = number_pe_ncapture and sigma = sigma_nPE:
number_pe_ncapture_smeared = np.random.normal(number_pe_ncapture, sigma_nPE)
# check if smeared energy would pass the delayed energy cut:
if min_PE_delayed < number_pe_ncapture_smeared < max_PE_delayed:
number_nCapture_pass_e_cut_smeared = 1
else:
number_nCapture_pass_e_cut_smeared = 0
# do event pass time cut:
if flag_pass_timecut:
# check, if there is at least 1 n capture that pass the delayed energy cut in the delayed time window:
if number_nCapture_pass_e_cut_smeared > 0:
number_delayed_energy_pass += 1
# check, if there is only 1 signal pulse in delayed time window, that pass delayed energy cut:
if number_nCapture_pass_e_cut_smeared == 1:
# only 1 neutron capture in time window with correct energy:
number_n_mult_pass += 1
# check, if distance between reco. initial position to reco. nCapture position below
# distance_cut of event, that passes all cuts above (n-mult, time, energy):
# calculate distance between reco. initial position and reco. nCapture position:
distance = np.sqrt((x_reco_init - x_reco_ncapture) ** 2 + (y_reco_init - y_reco_ncapture) ** 2 +
(z_reco_init - z_reco_ncapture) ** 2)
if distance < distance_cut:
number_distance_cut_pass += 1
else:
number_distance_cut_rejected += 1
# check, if distance between real initial position to real nCapture position (from MC truth) is
# below distance_cut of event:
# calculate distance between real initial and real nCapture position:
distance_MCtruth = np.sqrt((x_init - x_ncapture)**2 + (y_init - y_ncapture)**2 +
(z_init - z_ncapture)**2)
if distance_MCtruth < distance_cut:
number_distance_cut_pass_MCtruth += 1
else:
number_distance_cut_rejected_MCtruth += 1
else:
number_n_mult_rejected += 1
else:
number_delayed_energy_rejected += 1
if number_nCapture_pass_e_cut_smeared == 0 and number_nCapture_pass_e_cut == 1:
# smeared energy is rejected, but real energy would pass:
number_delayed_energy_tooless += 1
elif number_nCapture_pass_e_cut_smeared == 1 and number_nCapture_pass_e_cut == 0:
# smeared energy pass, but real energy would be rejected:
number_delayed_energy_toomuch += 1
if 2 < Qedep_capture < 3 and number_pe_ncapture_smeared < 2:
number_pe0_qedep2 += 1
elif 2 < Qedep_capture < 3 and 2000 < number_pe_ncapture_smeared < 4000:
number_pe3000_qedep2 += 1
elif 4 < Qedep_capture < 6 and number_pe_ncapture_smeared < 2:
number_pe0_qedep5 += 1
elif 4 < Qedep_capture < 6 and 5000 < number_pe_ncapture_smeared < 8000:
number_pe6500_qedep5 += 1
elif 8 < Qedep_capture < 12 and 10000 < number_pe_ncapture_smeared < 20000:
number_pe15000_qedep11 += 1
else:
print("rfile {0}, event = {1:d}".format(rfile, event))
# append number_pe_ncapture to array:
array_npe_from_hittime.append(number_pe_ncapture_smeared)
# append values to list:
array_npe.append(n_photons)
array_Qedep.append(Qedep_capture)
print("\ntotal number of events = {0:d}".format(number_of_events_total))
print("\nnumber of events within volume (r < {0:.0f} mm) = {1:d}".format(r_cut, number_of_events))
print("\nnumber of events that pass the time cut = {0:d}".format(number_timecut_pass))
print("number of events that are rejected by time cut = {0:d}".format(number_timecut_rejected))
print("\nnumber of events that pass time and delayed energy cut (min={1:.0f}, max={2:.0f}) = {0:d}"
.format(number_delayed_energy_pass, min_PE_delayed, max_PE_delayed))
print("number of events that are rejected by delayed energy cut (but pass time cut) = {0:d}"
.format(number_delayed_energy_rejected))
print("number of events, that falsely pass the delayed energy cut (counted too much) (smeared energy pass, but real "
"energy is rejected) = {0:d}".format(number_delayed_energy_toomuch))
print("number of events, that are falsely rejected by delayed energy cut (counted too less) (smeared energy rejected, "
"but real energy pass) = {0:d}".format(number_delayed_energy_tooless))
print("\nnumber of events that pass the neutron multiplicity cut (and also time and energy cut) = {0:d}"
.format(number_n_mult_pass))
print("number of events that are rejected by neutron multiplicity cut (but pass time and energy cut) = {0:d}"
.format(number_n_mult_rejected))
print("\nnumber of events that pass distance cut (and also multiplicity, time and energy cut) = {0:d}"
.format(number_distance_cut_pass))
print("number of events that are rejected by distance cut (but pass multiplicity, time and energy cut) = {0:d}"
.format(number_distance_cut_rejected))
print("\nnumber of events that pass distance cut with MC truth position (and also multiplicity, time and energy cut) = "
"{0:d}".format(number_distance_cut_pass_MCtruth))
print("number of events that are rejected by distance cut with MC truth position (but pass multiplicity, time and "
"energy cut) = {0:d}".format(number_distance_cut_rejected_MCtruth))
print("\nnumber_pe0_qedep2 = {0:d}".format(number_pe0_qedep2))
print("number_pe3000_qedep2 = {0:d}".format(number_pe3000_qedep2))
print("number_pe0_qedep5 = {0:d}".format(number_pe0_qedep5))
print("number_pe6500_qedep5 = {0:d}".format(number_pe6500_qedep5))
print("number_pe15000_qedep11 = {0:d}".format(number_pe15000_qedep11))
print("\nnumber of events with 0 neutron captures = {0:d}".format(number_no_ncapture))
h1 = plt.figure(1, figsize=(15, 8))
plt.plot(array_npe_from_hittime, array_Qedep, "xb", label="entries = {0:.0f}".format(len(array_Qedep)))
plt.vlines(min_PE_delayed, ymin=0.0, ymax=11.0, colors="r", linestyles="dashed",
label="min. nPE = {0:.0f} PE".format(min_PE_delayed))
plt.vlines(max_PE_delayed, ymin=0.0, ymax=11.0, colors="r", linestyles="dotted",
label="max. nPE = {0:.0f} PE".format(max_PE_delayed))
plt.ylim(ymin=0.0, ymax=11.0)
plt.xlabel("number of p.e. per event (calculated from hittime distribution)")
plt.ylabel("visible energy (quenched deposited energy) in MeV")
plt.title("Correlation of number of p.e. to energy for captured neutrons in JUNO detector\n"
"(within time window between {0:.0f} ns and {1:.0f} ms)".format(time_limit, max_time/1000000))
plt.grid()
plt.legend()
h2 = plt.figure(2, figsize=(15, 8))
plt.plot(array_npe_from_hittime, array_Qedep, "xb", label="entries = {0:.0f}".format(number_pe3000_qedep2))
plt.vlines(min_PE_delayed, ymin=0.0, ymax=11.0, colors="r", linestyles="dashed",
label="min. nPE = {0:.0f} PE".format(min_PE_delayed))
plt.vlines(max_PE_delayed, ymin=0.0, ymax=11.0, colors="r", linestyles="dotted",
label="max. nPE = {0:.0f} PE".format(max_PE_delayed))
plt.xlim(xmin=2100, xmax=3600)
plt.ylim(ymin=1.5, ymax=3.0)
plt.xlabel("number of p.e. per event (calculated from hittime distribution)")
plt.ylabel("visible energy (quenched deposited energy) in MeV")
plt.title("Correlation of number of p.e. to energy for captured neutrons on H\n"
"(within time window between {0:.0f} ns and {1:.0f} ms)".format(time_limit, max_time/1000000))
plt.grid()
plt.legend()
plt.show()
|
#!/usr/bin/python3
# Filename : feibonashulie.py
# Author by : Lily
def recurfibo(n):
# 递归函数 输出斐波那契数列
if n <= 1:
return n
else:
return (recurfibo(n-1)) + recurfibo(n-2)
# 获取用户输入
nterms = int(input("您要输出几项?"))
if nterms <= 0:
print("请输入正数")
else:
print("斐波那契数列:")
for i in range(nterms):
print(recurfibo(i))
|
#!/usr/bin/env python
import random
import rospy
from turtlesim.srv import Spawn
def spawn_init_hunter():
rospy.init_node('spawn_init_hunter', anonymous=False)
rospy.wait_for_service('spawn')
try:
serv_func = rospy.ServiceProxy('spawn', Spawn)
response = serv_func(x=random.uniform(0, 10),
y=random.uniform(0, 10),
theta=0,
name='hunter')
except rospy.ServiceException as e:
print "Service call failed: %s" % e
if __name__ == '__main__':
spawn_init_hunter()
|
def gcd(x,y):
if(x<y):
temp=x
x=y
y=temp
for i in range (0,y):
if y!=0:
r=x%y
x=y
y=r
else:
break
r=x
return r
gcd1=gcd(12,30)
print(gcd1)
|
from common.run_method import RunMethod
import allure
@allure.step("通用/资源/更新资源信息")
def src_updateSrcInfo_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "通用/资源/更新资源信息"
url = f"/service-public/src/updateSrcInfo"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("通用/资源/获取资源信息")
def src_getSrcInfo_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "通用/资源/获取资源信息"
url = f"/service-public/src/getSrcInfo"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("通用/资源/录入资源信息")
def src_recordSrcInfo_post(params=None, body=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "通用/资源/录入资源信息"
url = f"/service-public/src/recordSrcInfo"
res = RunMethod.run_request("POST", url, params=params, body=body, header=header, return_json=return_json, name=name, **kwargs)
return res
|
from urllib import request
from bs4 import BeautifulSoup
url = "http://www.baidu.com"
rsp = request.urlopen(url)
content = rsp.read()
soup = BeautifulSoup(content,'lxml')
#bs自动转码
content = soup.prettify()
print("==" * 12)
print(soup.head)
print("==" * 12)
print(soup.meta)
print("==" * 12)
print(soup.link)
print(soup.link.name)
print(soup.link.attrs)
print(soup.link.attrs['type'])
soup.link.attrs['type'] = 'hahaha'
print(soup.link)
print("==" * 12)
print(soup.title.name)
print(soup.title.atrrs)
print(soup.title.string)
print("==" * 12)
print(soup.name)
print(soup.attrs)
print('**'*20)
print(soup.name)
print("==" * 12)
for node in soup.head.contents:
if node.name == "meta":
print(node)
print("==" * 12)
|
import math
from typing import Optional, Tuple
import torch
from torch import nn, Tensor
from torch.nn import init
from torch.nn.modules.utils import _pair
from torch.nn.parameter import Parameter
from torchvision.extension import _assert_has_ops
from ..utils import _log_api_usage_once
def deform_conv2d(
input: Tensor,
offset: Tensor,
weight: Tensor,
bias: Optional[Tensor] = None,
stride: Tuple[int, int] = (1, 1),
padding: Tuple[int, int] = (0, 0),
dilation: Tuple[int, int] = (1, 1),
mask: Optional[Tensor] = None,
) -> Tensor:
r"""
Performs Deformable Convolution v2, described in
`Deformable ConvNets v2: More Deformable, Better Results
<https://arxiv.org/abs/1811.11168>`__ if :attr:`mask` is not ``None`` and
Performs Deformable Convolution, described in
`Deformable Convolutional Networks
<https://arxiv.org/abs/1703.06211>`__ if :attr:`mask` is ``None``.
Args:
input (Tensor[batch_size, in_channels, in_height, in_width]): input tensor
offset (Tensor[batch_size, 2 * offset_groups * kernel_height * kernel_width, out_height, out_width]):
offsets to be applied for each position in the convolution kernel.
weight (Tensor[out_channels, in_channels // groups, kernel_height, kernel_width]): convolution weights,
split into groups of size (in_channels // groups)
bias (Tensor[out_channels]): optional bias of shape (out_channels,). Default: None
stride (int or Tuple[int, int]): distance between convolution centers. Default: 1
padding (int or Tuple[int, int]): height/width of padding of zeroes around
each image. Default: 0
dilation (int or Tuple[int, int]): the spacing between kernel elements. Default: 1
mask (Tensor[batch_size, offset_groups * kernel_height * kernel_width, out_height, out_width]):
masks to be applied for each position in the convolution kernel. Default: None
Returns:
Tensor[batch_sz, out_channels, out_h, out_w]: result of convolution
Examples::
>>> input = torch.rand(4, 3, 10, 10)
>>> kh, kw = 3, 3
>>> weight = torch.rand(5, 3, kh, kw)
>>> # offset and mask should have the same spatial size as the output
>>> # of the convolution. In this case, for an input of 10, stride of 1
>>> # and kernel size of 3, without padding, the output size is 8
>>> offset = torch.rand(4, 2 * kh * kw, 8, 8)
>>> mask = torch.rand(4, kh * kw, 8, 8)
>>> out = deform_conv2d(input, offset, weight, mask=mask)
>>> print(out.shape)
>>> # returns
>>> torch.Size([4, 5, 8, 8])
"""
if not torch.jit.is_scripting() and not torch.jit.is_tracing():
_log_api_usage_once(deform_conv2d)
_assert_has_ops()
out_channels = weight.shape[0]
use_mask = mask is not None
if mask is None:
mask = torch.zeros((input.shape[0], 1), device=input.device, dtype=input.dtype)
if bias is None:
bias = torch.zeros(out_channels, device=input.device, dtype=input.dtype)
stride_h, stride_w = _pair(stride)
pad_h, pad_w = _pair(padding)
dil_h, dil_w = _pair(dilation)
weights_h, weights_w = weight.shape[-2:]
_, n_in_channels, _, _ = input.shape
n_offset_grps = offset.shape[1] // (2 * weights_h * weights_w)
n_weight_grps = n_in_channels // weight.shape[1]
if n_offset_grps == 0:
raise RuntimeError(
"the shape of the offset tensor at dimension 1 is not valid. It should "
"be a multiple of 2 * weight.size[2] * weight.size[3].\n"
f"Got offset.shape[1]={offset.shape[1]}, while 2 * weight.size[2] * weight.size[3]={2 * weights_h * weights_w}"
)
return torch.ops.torchvision.deform_conv2d(
input,
weight,
offset,
mask,
bias,
stride_h,
stride_w,
pad_h,
pad_w,
dil_h,
dil_w,
n_weight_grps,
n_offset_grps,
use_mask,
)
class DeformConv2d(nn.Module):
"""
See :func:`deform_conv2d`.
"""
def __init__(
self,
in_channels: int,
out_channels: int,
kernel_size: int,
stride: int = 1,
padding: int = 0,
dilation: int = 1,
groups: int = 1,
bias: bool = True,
):
super().__init__()
_log_api_usage_once(self)
if in_channels % groups != 0:
raise ValueError("in_channels must be divisible by groups")
if out_channels % groups != 0:
raise ValueError("out_channels must be divisible by groups")
self.in_channels = in_channels
self.out_channels = out_channels
self.kernel_size = _pair(kernel_size)
self.stride = _pair(stride)
self.padding = _pair(padding)
self.dilation = _pair(dilation)
self.groups = groups
self.weight = Parameter(
torch.empty(out_channels, in_channels // groups, self.kernel_size[0], self.kernel_size[1])
)
if bias:
self.bias = Parameter(torch.empty(out_channels))
else:
self.register_parameter("bias", None)
self.reset_parameters()
def reset_parameters(self) -> None:
init.kaiming_uniform_(self.weight, a=math.sqrt(5))
if self.bias is not None:
fan_in, _ = init._calculate_fan_in_and_fan_out(self.weight)
bound = 1 / math.sqrt(fan_in)
init.uniform_(self.bias, -bound, bound)
def forward(self, input: Tensor, offset: Tensor, mask: Optional[Tensor] = None) -> Tensor:
"""
Args:
input (Tensor[batch_size, in_channels, in_height, in_width]): input tensor
offset (Tensor[batch_size, 2 * offset_groups * kernel_height * kernel_width, out_height, out_width]):
offsets to be applied for each position in the convolution kernel.
mask (Tensor[batch_size, offset_groups * kernel_height * kernel_width, out_height, out_width]):
masks to be applied for each position in the convolution kernel.
"""
return deform_conv2d(
input,
offset,
self.weight,
self.bias,
stride=self.stride,
padding=self.padding,
dilation=self.dilation,
mask=mask,
)
def __repr__(self) -> str:
s = (
f"{self.__class__.__name__}("
f"{self.in_channels}"
f", {self.out_channels}"
f", kernel_size={self.kernel_size}"
f", stride={self.stride}"
)
s += f", padding={self.padding}" if self.padding != (0, 0) else ""
s += f", dilation={self.dilation}" if self.dilation != (1, 1) else ""
s += f", groups={self.groups}" if self.groups != 1 else ""
s += ", bias=False" if self.bias is None else ""
s += ")"
return s
|
#!/usr/bin/python
from PyQt4.QtCore import * # Qt core
from PyQt4.QtGui import * # Qt GUI interface
from PyQt4.uic import * # ui files realizer
from PyQt4 import QtGui, uic
from brewtroller import *
from mash import *
from functools import *
from matplotlib.backends import qt_compat
use_pyside = qt_compat.QT_API == qt_compat.QT_API_PYSIDE
if use_pyside:
from PySide import QtGui, QtCore
else:
from PyQt4 import QtGui, QtCore
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
import matplotlib.pyplot as plt
# for example / remove later
from numpy import arange, sin, pi
# for example / end
import sys # system support
class TehFigure:
def __init__(self, layoutwidget):
self.figure = Figure(figsize=(5,4), dpi=100) # magic numbers
self.axes1=self.figure.add_subplot(111) # more magic
self.axes2=self.figure.add_subplot(111) # more magic
self.canvas = FigureCanvas(self.figure)
self.lines1, = self.axes1.plot([],[], '-')
self.lines2, = self.axes2.plot([],[], '-')
self.axes1.set_ylim(0, 100)
self.axes2.set_ylim(0, 100)
self.compute_initial_figure()
self.canvas.updateGeometry()
layoutwidget.addWidget(self.canvas)
def compute_initial_figure(self):
pass
def update_plot(self, xdata, ydata):
# we need to split this somehow.
#Update data (with the new _and_ the old points)
self.lines1.set_xdata(xdata["HLT"])
self.lines1.set_ydata(ydata["HLT"])
self.lines2.set_xdata(xdata["MLT"])
self.lines2.set_ydata(ydata["MLT"])
#Need both of these in order to rescale
self.axes1.relim()
self.axes1.autoscale_view()
self.axes2.relim()
self.axes2.autoscale_view()
#We need to draw *and* flush
self.figure.canvas.draw()
self.figure.canvas.flush_events()
def annotate(self, x, y, text):
self.axes1.annotate(text, xy=(x,y),xytext=(x,50),arrowprops=dict(facecolor='black', arrowstyle="->"))
UI_FILE = 'lodda.ui' # qt ui descriptor
class XTun(Tun):
setpointsaved = True
manualsetpoint = 0
def setSetpointManually(self, value):
self.setpointsaved = False
self.setPointWidget.display(value)
self.setPointWidget.setStyleSheet("QLCDNumber{color:blue;}")
self.manualsetpoint = value
def __init__(self, w, bt, myid, setpoint, temperature, setbutton, dial):
Tun.__init__(self,bt, myid)
self.setPointWidget = setpoint
self.dialWidget = dial
self.temperatureWidget = temperature
w.connect(dial,SIGNAL("valueChanged(int)"), partial(XTun.setSetpointManually,self))
def update(self):
if self.setpointsaved:
Tun.update(self)
if self.newsetpoint != self.setpoint:
self.setPointWidget.display(self.newsetpoint)
self.setPointWidget.setStyleSheet("QLCDNumber{color:green;}")
self.setpoint = self.newsetpoint
self.manualsetpoint = self.setpoint
# print(self.setpoint)
if (self.newtemperature < 200) and (self.newtemperature > -20): # disconnected onewire results in weird numbers.
if self.newtemperature != self.temperature:
self.temperatureWidget.setDecMode()
self.temperatureWidget.display(self.newtemperature)
self.temperatureWidget.setStyleSheet("QLCDNumber{color:red;}")
self.temperature = self.newtemperature
else:
self.temperatureWidget.setHexMode()
self.temperatureWidget.display(int("dead",16))
class XProgramStatus:
oldstep=255
xdata = {}
ydata = {}
xdata["HLT"] = []
ydata["HLT"] = []
xdata["MLT"] = []
ydata["MLT"] = []
def __init__(self, w, bt,stepWidgets,nextwidget,stopalarmwidget,plot):
self.BrewStep = BrewStep(bt)
self.stepWidgets = stepWidgets
nextwidget.clicked.connect(self.nextstep)
stopalarmwidget.clicked.connect(self.stopalarm)
self.plot = plot
self.bt = bt
def stopalarm(self):
# code to change the alarm indicator back to inactive
self.bt.stopAlarm()
def nextstep(self):
self.bt.advStep()
def update(self):
# if the brewstep is 255 the system is idle
brewstep = self.BrewStep.getStep()
# need to update the progress bars and display which step is active
fullstatus = self.bt.getFullStatus()
# print ("step" + str(brewstep))
print (fullstatus)
# put text on the active step
if self.oldstep != brewstep:
self.plot.annotate(fullstatus["timestamp"], float(fullstatus["MLT"]["temp"])/100, self.BrewStep.stepnames[brewstep])
for key in self.stepWidgets:
if key == brewstep:
self.stepWidgets[key].setTextVisible(True)
else:
self.stepWidgets[key].setTextVisible(False)
self.oldstep = brewstep
# update temperature plots
self.xdata["HLT"].append(fullstatus["timestamp"])
self.ydata["HLT"].append(float(fullstatus["HLT"]["temp"])/100)
self.xdata["MLT"].append(fullstatus["timestamp"])
self.ydata["MLT"].append(float(fullstatus["MLT"]["temp"])/100)
# print self.ydata
self.plot.update_plot(self.xdata, self.ydata)
# then figure out what the progress is.
# for steps that are timer controlled, we can use the timer
# for filling steps, we can use the volume (not implemented, I don't have autofill)
# for steps that are temperature controlled, it is more difficult ("what is 100% temperature compared to what")
# being celcius-centric we define the range for 100 % as zero to desired target temperature
# techdebt: we must be able to configure if we use the HLT or MLT for preheat - this assumes that it is a HLT
if brewstep==2: # preheat strike water
# get the temperature for the HLT
# get the program step temperature
# calculate and set percentage in progress bar
progress = (float(fullstatus["HLT"]["temp"]))/float(fullstatus["HLT"]["setpoint"])
#print (progress)
self.stepWidgets[brewstep].setValue(int(100*progress))
if brewstep==3: # dough in
progress = (float(fullstatus["MLT"]["temp"]))/float(fullstatus["MLT"]["setpoint"])
#print (progress)
self.stepWidgets[brewstep].setValue(int(100*progress))
if brewstep==6: # acid rest
if fullstatus["mashtimer"]["status"] == 1: #mashing
progdata = bt.getProgram(1) # this needs fixing!
progress = (float(fullstatus["mashtimer"]["value"])/float(progdata[brewstep]["time"])/60000)
self.stepWidgets[brewstep].setValue(int(100*progress))
if brewstep==8 or brewstep==9: # the saccarification steps
self.stopalarm() # turn off the alarms as soon as possible - it is annoying. I don't really do anything here anyway
# logic to move one step forward automatically for some parts.
# I personally do not care about:
# delay, refill, and will skip ahead over those
if brewstep==1 or brewstep==4 or brewstep==14:
self.stopalarm()
self.nextstep()
print "skipped step "+str(brewstep)
class MainWin(QtGui.QMainWindow):
def __init__(self,bt):
QtGui.QMainWindow.__init__(self)
self.bt = bt
# Set up the user interface from Designer.
self.ui = uic.loadUi(UI_FILE)
self.ui.show()
sc = TehFigure(self.ui.plotlayout)
self.HLT = XTun(self.ui, bt, 0, self.ui.HLTSet, self.ui.HLTTemp, self.ui.toggleHLT, self.ui.HLTdial)
self.MLT = XTun(self.ui, bt, 1, self.ui.MLTSet, self.ui.MLTTemp, self.ui.toggleMLT, self.ui.MLTdial)
stepwidgets = {
2: self.ui.progresspreheat,
5: self.ui.progressdoughin,
6: self.ui.progressacidrest,
7: self.ui.progressproteinrest,
8: self.ui.progressacc1,
9: self.ui.progressacc2,
10: self.ui.progressmashout
}
self.programstatus = XProgramStatus(self.ui, bt, stepwidgets,self.ui.nextProgStep,self.ui.stopAlarm,sc)
# init callbacks
# callback function
def updateui(self):
self.MLT.update()
self.HLT.update()
self.programstatus.update()
### main
# create a connection to the btnic daemon
bt = BrewTroller("http://10.168.0.129/cgi-bin/btnic.cgi")
app = QtGui.QApplication(sys.argv)
window = MainWin(bt)
# set a timer that update the status every ten seconds
timer = QTimer()
timer.timeout.connect(window.updateui)
timer.start(10000)
sys.exit(app.exec_())
|
import torch.nn as nn
from model import CRNN, ConvNet, LSTM_FIRST, LSTM_FULL, LSTM_LAST
from dataset import generate_loaders
class Config():
def __init__(self, **kwargs):
self.data_folder = None
self.num_threads = None
self.learning_rate = None
self.batch_size = None
self.num_epochs = None
self.test_to_all_ratio = None
self.results_dir = None
self.model = None
self.loss_criterion = None
self.lstm_output = None
self.shuffle = None
self.thresholds = None
for key,value in kwargs.items():
self.__dict__[key] = value
print('\n'.join(["{}={}".format(p,self.__dict__[p]) for p in self.__dict__]))
self.train_loader,self.val_loader,self.test_loader = generate_loaders(self)
def get_loaders(self):
return self.train_loader,self.val_loader,self.test_loader
def use_all_lstm_frames(self):
return self.lstm_output == LSTM_FULL and isinstance(self.model, CRNN)
class Dropouts():
def __init__(self, input_dropout, conv_dropout, lstm_dropout):
self.input_dropout = nn.Dropout(input_dropout)
self.conv_dropout = nn.Dropout(conv_dropout)
self.lstm_dropout = nn.Dropout(lstm_dropout)
|
class Person: # Person 클래스 정의
def __init __(self, first_name="", last_name=""):
self.first_name = first_name
self.last_name = last_name
person1 = Person("John", "Smith") # ❶
print(person1.first_name, person1.last_name)
person2 = Person() # ❷
person2.first_name = "Robert" # ❸
person2.last_name = "Johnson"
print(person2.first_name, person2.last_name)
|
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import os
import urllib.request
import json
def download_imgs(urls, folder_name):
'''
Given a list of image URLs, download them to the folder folder_name.
'''
folder_name = folder_name.replace(':', '-')
os.mkdir(folder_name)
for i in range(len(urls)):
file_name = urls[i].split('/')[-1].replace('%20', ' ')
file_path = folder_name + '/' + file_name
urllib.request.urlretrieve(urls[i], file_path)
SAVED_PDF = 'D:/Downloads/Organization Profile.pdf'
SAVE_FOLDER = 'D:/GH/FinancialInfoFromCharityPortal/pdf_saves/'
def print_and_save_as_pdf(temple_name, info_type):
driver.execute_script('window.print();')
os.rename(SAVED_PDF,
cur_org_folder + temple_name.replace(':', '') + info_type)
# os.remove(SAVED_PDF)
def process_temple(temple_name):
'''
Search for the temple_name in the Charity Portal.
Proceed to download the PDF images.
'''
searchField = driver.find_element_by_id("ctl00_PlaceHolderMain_txtSearch")
searchField.clear()
searchField.send_keys(temple_name)
searchBtn = driver.find_element_by_id("ctl00_PlaceHolderMain_btnSearch")
searchBtn.click()
mainHandle = driver.current_window_handle
print(mainHandle)
# confirm there is one and only one result for the search
resultsCount = driver.find_element_by_id(
"ctl00_PlaceHolderMain_lblSearchCount")
# assert "1 records found for" in resultsCount.text
# click on "View Details" button
# //*[@id="ctl00_PlaceHolderMain_lstSearchResults_ctrl0_lblNameOfOrg"]
found_church = False
for i in range(5):
church_id = 'ctl00_PlaceHolderMain_lstSearchResults_ctrl' + str(
i) + '_lblNameOfOrg'
churchLabel = driver.find_element_by_id(church_id)
if churchLabel.text.replace('.', '') == temple_name.replace('.', ''):
found_church = True
viewDetailedBtn = driver.find_element_by_id(
"ctl00_PlaceHolderMain_lstSearchResults_ctrl" + str(i) +
"_btnViewDetails")
viewDetailedBtn.click()
break
if not found_church:
print('search for church', temple_name, 'failed')
return
# viewDetailedBtn = driver.find_element_by_id(
# "ctl00_PlaceHolderMain_lstSearchResults_ctrl0_btnViewDetails")
# viewDetailedBtn.click()
# switch to the newly popped up window/tab
driver.switch_to.window(driver.window_handles[1])
# driver.switch_to_windows("Organisation Profile")
# print Organisation Profile as PDF
print_and_save_as_pdf(temple_name, '_OP.pdf')
# click on "Financial Information"
fiTab = driver.find_element_by_link_text("Financial Information")
print(fiTab.text)
fiTab.click()
# dismiss the alert pormpt
driver.switch_to.alert.accept()
# click on "Financial Information" again
fiTab = driver.find_element_by_link_text("Financial Information")
print(fiTab.text)
fiTab.click()
# print Financial Information as profile
print_and_save_as_pdf(temple_name, "_FI.pdf")
driver.close()
driver.switch_to.window(
driver.window_handles[0]) # get back to the search page
chrome_options = webdriver.ChromeOptions()
settings = {
"recentDestinations": [{
"id": "Save as PDF",
"origin": "local",
"account": "",
}],
"selectedDestinationId":
"Save as PDF",
"version":
2
}
prefs = {
'printing.print_preview_sticky_settings.appState': json.dumps(settings)
}
chrome_options.add_experimental_option('prefs', prefs)
chrome_options.add_argument('--kiosk-printing')
driver = webdriver.Chrome(options=chrome_options)
# set the implicity wait time to as long as 300 seconds
# so that accidently long loading time of the web site
# won't break the program
driver.implicitly_wait(300)
# Open the page and search for the temple
driver.get(
"https://www.charities.gov.sg/_layouts/MCYSCPSearch/MCYSCPSearchResultsPage.aspx"
)
organization_list_files = ['Hindusm.txt', 'Islam-oldlist.txt', 'Others.txt']
if not os.path.exists(SAVE_FOLDER):
os.mkdir(SAVE_FOLDER)
cur_org_folder = None
for list_file in organization_list_files:
with open(list_file, 'r') as temple_list:
cur_org_folder = SAVE_FOLDER + list_file[:-4] + '/'
if not os.path.exists(cur_org_folder):
os.mkdir(cur_org_folder)
print('cur_org_folder is:', cur_org_folder)
for temple in temple_list:
# ' '.join(temple.split()) to remove the trailing spaces and newline characters
process_temple(' '.join(temple.split()))
with open('print_as_pdf_done_' + list_file + '.txt', 'a+') as f:
f.write(temple)
# close the browser window
driver.close()
driver.quit()
|
#!/usr/bin/env python3
import asyncio
import logging
import argparse
from os import linesep
from functools import reduce
from typing import Union
import config
import sync
from filestructs import FileStat
def get_args():
"""Get arguments from command line"""
parser = argparse.ArgumentParser(description='Rsync like thingy')
parser.add_argument('--dry-run', required=False,
action='store_true', default=False, dest='dry_run',
help='Does not do anything on the destination filesystem')
parser.add_argument('-v', '--verbose', required=False,
action='count', default=0, dest='vlevel',
help='Log level')
parser.add_argument('--dump-delta', required=False,
default=None, metavar='DELTA_PATH', dest='delta_path',
help='If a filename is specified, the delta is dumped into the file as JSON')
parser.add_argument('--max-depth', required=False,
type=int, default=10, dest='max_depth',
help='Max depth for both local and remote')
parser.add_argument('--adb-batch-size', required=False,
type=int, default=5, dest='adb_batch_size',
help='Maximum number of adb push to run in parallel')
parser.add_argument('--command-batch-size', required=False,
type=int, default=100, dest='command_batch_size',
help='Maximum number of arguments / chained commands to run at once')
parser.add_argument('--delete', required=False,
action='store_true', default=False, dest='delete_files',
help='Creates a "perfect mirror" of local at remote')
parser.add_argument('source', help='The source')
parser.add_argument('destination', help='The destination')
return parser.parse_args()
def accumulate(a: Union[str, FileStat], b: FileStat) -> int:
return a.size + b.size if isinstance(a, FileStat) else a + b.size
async def run(args):
"""Runs everything"""
diff = None
try:
diff = await sync.get_diff(
args.source, args.destination,
max_depth=args.max_depth, delete_files=args.delete_files,
delta_path=args.delta_path)
except BaseException as e:
logging.error(e)
return
if args.delete_files and len(diff.remove) > 0:
(await sync.del_remote(diff)
if diff.direction == sync.SyncDirection.MACHINE_TO_PHONE else
await sync.del_local(diff))
else:
logging.info('No files to delete, hooray!')
if len(diff.upload) > 0:
(await sync.send_to_remote(diff)
if diff.direction == sync.SyncDirection.MACHINE_TO_PHONE else
await sync.send_to_local(diff))
else:
logging.info('No files to send to remote, hooray!')
total_transferred = reduce(accumulate, diff.upload, 0)
total_deleted = reduce(accumulate, diff.remove, 0)
print(linesep * 2)
logging.info(f'Transfer completed! {total_transferred} byte(s) transferred, {total_deleted} byte(s) deleted.')
logging.info(f'Deleted {len(diff.remove)} file(s) and sent {len(diff.upload)} file(s).')
def main():
"""Runs the thing that runs everything"""
args = get_args()
levels = [logging.INFO, logging.DEBUG]
level = levels[min(len(levels) - 1, args.vlevel)]
logging.basicConfig(level=level)
config.dry_run = args.dry_run
config.adb_batch_size = args.adb_batch_size
config.command_batch_size = args.command_batch_size
asyncio.run(run(args))
|
class TwoSum:
"""
@param: number: An integer
@return: nothing
"""
def add(self, number):
# write your code here
"""
@param: value: An integer
@return: Find if there exists any pair of numbers which sum is equal to the value.
"""
def find(self, value):
# write your code here
|
# -*- coding: utf-8 -*-
import json
import yaml
my_list = []
my_list.append("YAML")
my_list.append("JSON")
my_list.append({})
my_list[-1]["Cisco"] = True
my_list[-1]["Platform"] = "C819HWD-E-K9"
with open("list_file.yml", "w") as f:
f.write(yaml.dump(my_list, default_flow_style=False))
with open("list_file.json", "w") as f:
f.write(json.dumps(my_list))
|
"""
Definition of TreeNode:
"""
class TreeNode:
def __init__(self, val= None):
self.val = val
self.left, self.right = None, None
class Solution:
"""
@param: root: A Tree
@return: Preorder in ArrayList which contains node values.
"""
def createTree(self):
root = TreeNode(1);
root.left = TreeNode(2);
root.right = TreeNode(3)
root.left.left = TreeNode(4);
root.left.right = TreeNode(5);
return root;
def intern(self, root):
# write your code here
if(root == None):
return [];
left = self.intern(root.left)
right = self.intern(root.right)
res = []
res.append(root.val)
res= res + left+right;
return res
def preorderTraversal(self, root):
return self.intern(root);
def maxDepth(self, root):
return self.help2(root,0);
def help2(self,root,level):
if(root == None):
return level
if(None != root):
level +=1;
left = self.help2(root.left,level)
right = self.help2(root.right,level)
return max(left,right)
mysolution = Solution()
treeRoot = mysolution.createTree();
print(mysolution.preorderTraversal(treeRoot))
print( mysolution.maxDepth(treeRoot))
|
#!/usr/bin/env python3
#
# Adapted from litex/litex/litex/tools/litex_sim.py
#
# Copyright (c) 2015-2020 Florent Kermarrec <florent@enjoy-digital.fr>
# Copyright (c) 2020 Antmicro <www.antmicro.com>
# Copyright (c) 2017 Pierre-Olivier Vauboin <po@lambdaconcept>
# SPDX-License-Identifier: BSD-2-Clause
import sys
import argparse
from migen import *
from litex.build.generic_platform import *
from litex.build.sim import SimPlatform
from litex.build.sim.config import SimConfig
from litex.soc.integration.common import *
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from litex.soc.integration.soc import *
from litex.soc.cores.bitbang import *
from litex.soc.cores.cpu import CPUS
# IOs ----------------------------------------------------------------------------------------------
_io = [
("sys_clk", 0, Pins(1)),
("sys_rst", 0, Pins(1)),
("serial", 0,
Subsignal("source_valid", Pins(1)),
Subsignal("source_ready", Pins(1)),
Subsignal("source_data", Pins(8)),
Subsignal("sink_valid", Pins(1)),
Subsignal("sink_ready", Pins(1)),
Subsignal("sink_data", Pins(8)),
)
]
# Platform -----------------------------------------------------------------------------------------
class Platform(SimPlatform):
def __init__(self):
SimPlatform.__init__(self, "SIM", _io)
# Simulation SoC -----------------------------------------------------------------------------------
class SimSoC(SoCCore):
def __init__(self, **kwargs):
platform = Platform()
sys_clk_freq = int(1e6)
# SoCCore ----------------------------------------------------------------------------------
SoCCore.__init__(self, platform, clk_freq=sys_clk_freq,
ident = "LiteX Simulation",
ident_version = True,
**kwargs)
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = CRG(platform.request("sys_clk"))
platform.add_debug(self, reset=0)
# Build --------------------------------------------------------------------------------------------
def generate_gtkw_savefile(builder, vns, trace_fst):
from litex.build.sim import gtkwave as gtkw
dumpfile = os.path.join(builder.gateware_dir, "sim.{}".format("fst" if trace_fst else "vcd"))
savefile = os.path.join(builder.gateware_dir, "sim.gtkw")
soc = builder.soc
with gtkw.GTKWSave(vns, savefile=savefile, dumpfile=dumpfile) as save:
save.clocks()
save.fsm_states(soc)
save.add(soc.bus.slaves["main_ram"], mappers=[gtkw.wishbone_sorter(), gtkw.wishbone_colorer()])
def sim_args(parser):
builder_args(parser)
soc_core_args(parser)
parser.add_argument("--threads", default=1, help="Set number of threads (default=1)")
parser.add_argument("--ram-init", default=None, help="ram_init file")
parser.add_argument("--opt-level", default="O3", help="Compilation optimization level")
def main():
parser = argparse.ArgumentParser(description="Generic LiteX SoC Simulation")
sim_args(parser)
args = parser.parse_args()
soc_kwargs = soc_core_argdict(args)
builder_kwargs = builder_argdict(args)
sys_clk_freq = int(1e6)
sim_config = SimConfig()
sim_config.add_clocker("sys_clk", freq_hz=sys_clk_freq)
# Configuration --------------------------------------------------------------------------------
cpu = CPUS[soc_kwargs.get("cpu_type", "vexriscv")]
if soc_kwargs["uart_name"] == "serial":
soc_kwargs["uart_name"] = "sim"
sim_config.add_module("serial2console", "serial")
soc_kwargs["integrated_main_ram_size"] = 0x10000000 # 256 MB
if args.ram_init is not None:
soc_kwargs["integrated_main_ram_init"] = get_mem_data(args.ram_init, cpu.endianness)
# SoC ------------------------------------------------------------------------------------------
soc = SimSoC(**soc_kwargs)
soc.add_constant("ROM_BOOT_ADDRESS", 0x40000000)
# Build/Run ------------------------------------------------------------------------------------
builder_kwargs["csr_csv"] = "csr.csv"
builder = Builder(soc, **builder_kwargs)
builder.build(
run = False,
threads = args.threads,
sim_config = sim_config,
opt_level = args.opt_level
)
if __name__ == "__main__":
main()
|
print (2<<input())-2
|
# -*- coding: utf-8 -*-
import pylast
import keys as keys
API_KEY = keys.API_KEY
API_SECRET = keys.API_SECRET
username = keys.username
password_hash = pylast.md5(keys.password_hash)
network = pylast.LastFMNetwork(api_key = API_KEY)
per = pylast.PERIOD_OVERALL
class CustomUser(pylast.User):
def __init__(self, *args, **kwargs):
super(CustomUser, self).__init__(*args, **kwargs)
def input():
while True:
num = int(input("Possible values: \n1. PERIOD_OVERALL \n2. PERIOD_7DAYS \n3. PERIOD_1MONTH \n4. PERIOD_3MONTHS \n5. PERIOD_6MONTHS \n6. PERIOD_12MONTHS\n"))
if 1 <= num <= 6:
global per
global userChoice
print ('Value selected')
if num == 1:
per = pylast.PERIOD_OVERALL
print ('Overall')
userChoice = "Overall \n"
#break
if num == 2:
per = pylast.PERIOD_7DAYS
print ('7 days')
userChoice = "7 days \n"
#break
if num == 3:
per = pylast.PERIOD_1MONTH
print ('1 month')
userChoice = "1 months \n"
#break
if num == 4:
per = pylast.PERIOD_3MONTHS
print ('3 months')
userChoice = "3 months \n"
#break
if num == 5:
per = pylast.PERIOD_6MONTHS
print ('6 months')
userChoice = "6 months \n"
#break
if num == 6:
per = pylast.PERIOD_12MONTHS
print ('12 months')
userChoice = "12 months \n"
#break
break
else:
print ('number out of range')
def _get_things(
self, method, thing, thing_type, params=None, cacheable=True
):
"""Returns a list of the most played thing_types by this thing."""
from pylast import TopItem, _extract, _number
doc = self._request(
self.ws_prefix + "." + method, cacheable, params)
toptracks_node = doc.getElementsByTagName('toptracks')[0]
total_pages = int(toptracks_node.getAttribute('totalPages'))
seq = []
for node in doc.getElementsByTagName(thing):
title = _extract(node, "name")
artist = _extract(node, "name", 1)
mbid = _extract(node, "mbid")
playcount = _number(_extract(node, "playcount"))
thing = thing_type(artist, title, self.network)
thing.mbid = mbid
seq.append(TopItem(thing, playcount))
return seq, total_pages
#print("Possible values: \no PERIOD_OVERALL \no PERIOD_7DAYS \no PERIOD_1MONTH \no PERIOD_3MONTHS o PERIOD_6MONTHS \no PERIOD_12MONTHS")
#type(per)
input()
def get_top_tracks(
self, period=per, limit=1, page=1, cacheable=True):
"""Returns the top tracks played by a user.
* period: The period of time. Possible values:
o PERIOD_OVERALL
o PERIOD_7DAYS
o PERIOD_1MONTH
o PERIOD_3MONTHS
o PERIOD_6MONTHS
o PERIOD_12MONTHS
"""
params = self._get_params()
params['period'] = period
params['page'] = page
if limit:
params['limit'] = limit
return self._get_things(
"getTopTracks", "track", pylast.Track, params, cacheable)
my_user = CustomUser('Parth_M', network)
params = my_user._get_params()
params['period'] = pylast.PERIOD_1MONTH
params['limit'] = 1
page = 1
results,total_pages = my_user.get_top_tracks(page=page)
print (total_pages)
file = open("output.txt","w")
#print(userChoice)
#file.write(userChoice)
while len(results) != 0:
for track in results:
#lean = str(track.item.title + " - " + track.item.artist + track.weight)
print (track.item.title, track.item.artist, track.weight)
file.write(track.item.title + " - " + str(track.item.artist) + '\n')
page += 1
if(page == 31):
file.close()
break
results,total_pages = my_user.get_top_tracks(page=page)
|
import logging
import uuid
from abc import abstractmethod
from typing import Callable, List, Set
import sbol3
import labop
import uml
from labop.execution_engine import ExecutionEngine
l = logging.getLogger(__file__)
l.setLevel(logging.ERROR)
class ExecutionIssue(object):
pass
class ExecutionWarning(ExecutionIssue):
pass
class ExecutionError(ExecutionIssue):
pass
@abstractmethod
def activity_node_enabled(
self: uml.ActivityNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
"""Check whether all incoming edges have values defined by a token in tokens and that all value pin values are
defined.
Parameters
----------
self: node to be executed
tokens: current list of pending edge flows
Returns
-------
bool if self is enabled
"""
protocol = self.protocol()
incoming_controls = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ControlFlow)
}
incoming_objects = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ObjectFlow)
}
# Need all incoming control tokens
control_tokens = {t.edge.lookup() for t in tokens if t.edge}
if len(incoming_controls) == 0:
tokens_present = True
else:
tokens_present = len(control_tokens.intersection(incoming_controls)) == len(
incoming_controls
)
if hasattr(self, "inputs"):
required_inputs = [
p
for i in self.behavior.lookup().get_required_inputs()
for p in self.input_pins(i.property_value.name)
]
required_value_pins = {
p for p in required_inputs if isinstance(p, uml.ValuePin)
}
# Validate values, see #120
for pin in required_value_pins:
if pin.value is None:
raise ValueError(
f"{self.behavior.lookup().display_id} Action has no ValueSpecification for Pin {pin.name}"
)
required_input_pins = {
p for p in required_inputs if not isinstance(p, uml.ValuePin)
}
pins_with_tokens = {
t.token_source.lookup().node.lookup() for t in tokens if not t.edge
}
# pin_in_edges = { i.identity: [edge for edge in self.ex.protocol.lookup().incoming_edges(i)] for i in node.inputs}
# # Every required input pin has a token on each incoming edge
# all([
# all([
# any([
# any([flow.lookup().edge == in_edge.identity
# for flow in token.token_source.lookup().incoming_flows]) # flows are into pins
# for token in tokens ]) # tokens going from pins to activity
# for in_edge in pin_in_edges[pin.identity] ]) # in_edges are going into pins
# for pin in required_input_pins])
# parameter_names = {pv.parameter.lookup().property_value.name for pv in ex.parameter_values}
# pins_with_params = {p for p in required_input_pins if p.name in parameter_names}
# satisfied_pins = set(list(pins_with_params) + list(pins_with_tokens))
input_pins_satisfied = required_input_pins.issubset(pins_with_tokens)
value_pins_assigned = all({i.value for i in required_value_pins})
if engine.permissive:
return tokens_present
else:
return tokens_present and input_pins_satisfied and value_pins_assigned
else:
return tokens_present
uml.ActivityNode.enabled = activity_node_enabled
def activity_node_get_protocol(node: uml.ActivityNode) -> labop.Protocol:
"""Find protocol object that contains the node.
Parameters
----------
node: node in a protocol
Returns
-------
protocol containing node
"""
parent = node.get_parent()
if isinstance(parent, labop.Protocol):
return parent
elif not isinstance(parent, sbol3.TopLevel):
return parent.protocol()
else:
raise Exception(f"Cannot find protocol for node: {node}")
uml.ActivityNode.protocol = activity_node_get_protocol
def input_pin_enabled(
self: uml.InputPin,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
protocol = self.protocol()
incoming_controls = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ControlFlow)
}
incoming_objects = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ObjectFlow)
}
assert len(incoming_controls) == 0 # Pins do not receive control flow
# # Every incoming edge has a token
tokens_present = all(
[
any(
[token.edge == in_edge.identity for token in tokens]
) # tokens going from pins to activity
for in_edge in incoming_objects
]
) # in_edges are going into pins
return tokens_present or engine.permissive
uml.InputPin.enabled = input_pin_enabled
def value_pin_enabled(
self: uml.InputPin,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
protocol = self.protocol()
incoming_controls = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ControlFlow)
}
incoming_objects = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ObjectFlow)
}
assert (
len(incoming_controls) == 0 and len(incoming_objects) == 0
) # ValuePins do not receive flow
return True
uml.ValuePin.enabled = value_pin_enabled
def output_pin_enabled(
self: uml.InputPin,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
return False
uml.OutputPin.enabled = output_pin_enabled
def fork_node_enabled(
self: uml.ForkNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
protocol = self.protocol()
incoming_controls = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ControlFlow)
}
incoming_objects = {
e for e in protocol.incoming_edges(self) if isinstance(e, uml.ObjectFlow)
}
assert (len(incoming_controls) + len(incoming_objects)) == 1 and len(
tokens
) < 2 # At least one flow and no more than one token
# Need at least one incoming control token
tokens_present = {t.edge.lookup() for t in tokens if t.edge} == incoming_objects
return tokens_present
uml.ForkNode.enabled = fork_node_enabled
def final_node_enabled(
self: uml.FinalNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
"""
Check whether there exists at least one token on an incoming edge.
Parameters
----------
self : uml.FinalNode
Node to execute
engine : labop.ExecutionEngine
the engine executing the node
tokens : List[labop.ActivityEdgeFlow]
tokens offered to node
Returns
-------
bool
is the node enabled
"""
protocol = self.protocol()
token_present = (
len(
{t.edge.lookup() for t in tokens if t.edge}.intersection(
protocol.incoming_edges(self)
)
)
> 0
)
return token_present
uml.FinalNode.enabled = final_node_enabled
def activity_parameter_node_enabled(
self: uml.ActivityParameterNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
# FIXME update for permissive case where object token is not present
return len(tokens) <= 2 and all([t.get_target() == self for t in tokens])
uml.ActivityParameterNode.enabled = activity_parameter_node_enabled
def initial_node_enabled(
self: uml.InitialNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
return len(tokens) == 1 and tokens[0].get_target() == self
uml.InitialNode.enabled = initial_node_enabled
def merge_node_enabled(
self: uml.MergeNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
protocol = self.protocol()
return {t.edge.lookup() for t in tokens if t.edge} == protocol.incoming_edges(self)
uml.MergeNode.enabled = merge_node_enabled
def decision_node_enabled(
self: uml.DecisionNode,
engine: labop.ExecutionEngine,
tokens: List[labop.ActivityEdgeFlow],
):
# Cases:
# - primary is control, input_flow, no decision_input
# - primary is control, decision_input flow,
# - primary is object, no decision_input
# - primary is object, decision_input
protocol = self.protocol()
primary_flow = self.get_primary_incoming_flow(protocol)
primary_token = None
try:
primary_token = next(t for t in tokens if t.edge.lookup() == primary_flow)
except StopIteration:
pass
decision_input_token = None
try:
decision_input_token = next(
t
for t in tokens
if isinstance(t.edge.lookup().source.lookup(), uml.OutputPin)
and t.edge.lookup().source.lookup().get_parent().behavior
== self.decision_input
)
except StopIteration:
pass
decision_input_flow_token = None
try:
decision_input_flow_token = next(
t for t in tokens if t.edge.lookup() == self.decision_input_flow
)
except StopIteration:
pass
if isinstance(primary_flow, uml.ControlFlow):
# Need either decision_input_flow (if no decision_input) or flow from decision_input
if hasattr(self, "decision_input") and self.decision_input:
# Get flow from decision_input return
return primary_token is not None and decision_input_token is not None
else:
# Get flow from decision_input_flow
return primary_token and decision_input_flow_token
else: # primary is an object flow
if hasattr(self, "decision_input") and self.decision_input:
# Get flow from decision_input return
return decision_input_token
else:
# Get flow from primary
return primary_token
uml.DecisionNode.enabled = decision_node_enabled
class ProtocolExecutionExtractor:
def extract(self, record: labop.ActivityNodeExecution):
pass
def extract(self, token: labop.ActivityEdgeFlow):
pass
class JSONProtocolExecutionExtractor(ProtocolExecutionExtractor):
def __init__(self) -> None:
super().__init__()
self.extraction_map = {
uml.CallBehaviorAction: self.extract_call_behavior_action
}
def extract_call_behavior_action(self, token: labop.ActivityEdgeFlow):
return super().extract(token)
def extract(self, record: labop.ActivityNodeExecution):
behavior_str = (
record.node.lookup().behavior
if isinstance(record.node.lookup(), uml.CallBehaviorAction)
else (
(
record.node.lookup().get_parent().behavior,
record.node.lookup().name,
)
if isinstance(record.node.lookup(), uml.Pin)
else ""
)
)
record_str = f"{record.node} ({behavior_str})"
return record_str
class StringProtocolExecutionExtractor(ProtocolExecutionExtractor):
def extract(self, record: labop.ActivityNodeExecution):
behavior_str = (
record.node.lookup().behavior
if isinstance(record.node.lookup(), uml.CallBehaviorAction)
else (
(
record.node.lookup().get_parent().behavior,
record.node.lookup().name,
)
if isinstance(record.node.lookup(), uml.Pin)
else ""
)
)
record_str = f"{record.node} ({behavior_str})"
return record_str
def backtrace(
self,
stack=None,
extractor: ProtocolExecutionExtractor = JSONProtocolExecutionExtractor(),
):
stack = self.executions if stack is None else stack
if len(stack) == 0:
return set([]), []
else:
tail = stack[-1]
head = stack[:-1]
nodes, head = self.backtrace(stack=head)
nodes.add(tail.node.lookup())
head += [extractor.extract(tail)]
return nodes, head
labop.ProtocolExecution.backtrace = backtrace
def token_info(self: labop.ActivityEdgeFlow):
return {
"edge_type": (type(self.edge.lookup()) if self.edge else None),
"source": self.token_source.lookup().node.lookup().identity,
"target": self.get_target().identity,
"behavior": (
self.get_target().behavior
if isinstance(self.get_target(), uml.CallBehaviorAction)
else None
),
}
labop.ActivityEdgeFlow.info = token_info
def protocol_execution_to_json(self):
"""
Convert Protocol Execution to JSON
"""
p_json = self.backtrace(extractor=JSONProtocolExecutionExtractor())[1]
return json.dumps(p_json)
labop.ProtocolExecution.to_json = protocol_execution_to_json
def protocol_execution_unbound_inputs(self):
unbound_input_parameters = [
p.node.lookup().parameter.lookup().property_value
for p in self.executions
if isinstance(p.node.lookup(), uml.ActivityParameterNode)
and p.node.lookup().parameter.lookup().property_value.direction
== uml.PARAMETER_IN
and p.node.lookup().parameter.lookup().property_value
not in [pv.parameter.lookup().property_value for pv in self.parameter_values]
]
return unbound_input_parameters
labop.ProtocolExecution.unbound_inputs = protocol_execution_unbound_inputs
def protocol_execution_unbound_outputs(self):
unbound_output_parameters = [
p.node.lookup().parameter.lookup().property_value
for p in self.executions
if isinstance(p.node.lookup(), uml.ActivityParameterNode)
and p.node.lookup().parameter.lookup().property_value.direction
== uml.PARAMETER_OUT
and p.node.lookup().parameter.lookup().property_value
not in [pv.parameter.lookup().property_value for pv in self.parameter_values]
]
return unbound_output_parameters
labop.ProtocolExecution.unbound_outputs = protocol_execution_unbound_outputs
def activity_node_execute(
self: uml.ActivityNode,
engine: ExecutionEngine,
node_outputs: Callable = None,
) -> List[labop.ActivityEdgeFlow]:
"""Execute a node in an activity, consuming the incoming flows and recording execution and outgoing flows
Parameters
----------
self: node to be executed
engine: execution engine (for execution state and side-effects)
Returns
-------
updated list of pending edge flows
"""
# Extract the relevant set of incoming flow values
inputs = [t for t in engine.tokens if self == t.get_target()]
record = self.execute_callback(engine, inputs)
engine.ex.executions.append(record)
new_tokens = record.next_tokens(engine, node_outputs)
if record:
for specialization in engine.specializations:
try:
specialization.process(record, engine.ex)
except Exception as e:
if not engine.failsafe:
raise e
l.error(
f"Could Not Process {record.name if record.name else record.identity}: {e}"
)
# return updated token list
return new_tokens, inputs
uml.ActivityNode.execute = activity_node_execute
@abstractmethod
def activity_node_execute_callback(
self: uml.ActivityNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
raise ValueError(
f"Do not know how to execute node {self.identity} of type {self.type_uri}"
)
uml.ActivityNode.execute_callback = activity_node_execute_callback
def activity_node_execution_next_tokens(
self: labop.ActivityNodeExecution,
engine: ExecutionEngine,
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
node = self.node.lookup()
protocol = node.protocol()
out_edges = [
e
for e in protocol.edges
if self.node == e.source or self.node == e.source.lookup().get_parent().identity
]
edge_tokens = node.next_tokens_callback(self, engine, out_edges, node_outputs)
if edge_tokens:
# Save tokens in the protocol execution
engine.ex.flows += edge_tokens
else:
pass
self.check_next_tokens(edge_tokens, node_outputs, engine.sample_format)
# # Assume that unlinked output pins are possible output parameters for the protocol
# if isinstance(self, labop.CallBehaviorExecution):
# output_pins = self.node.lookup().outputs
# unlinked_output_pins = [p for p in output_pins if p not in {e.source.lookup() for e in out_edges}]
# possible_output_parameter_values = [labop.ParameterValue(parameter=self.node.lookup().pin_parameter(p.name),
# value=self.get_value())
# for p in unlinked_output_pins]
# engine.ex.parameter_values.extend(possible_output_parameter_values)
return edge_tokens
labop.ActivityNodeExecution.next_tokens = activity_node_execution_next_tokens
def activity_node_execution_check_next_tokens(
self: labop.ActivityNodeExecution,
tokens: List[labop.ActivityEdgeFlow],
node_outputs: Callable,
sample_format: str,
):
pass
labop.ActivityNodeExecution.check_next_tokens = (
activity_node_execution_check_next_tokens
)
def call_behavior_execution_check_next_tokens(
self: labop.CallBehaviorExecution,
tokens: List[labop.ActivityEdgeFlow],
node_outputs: Callable,
sample_format: str,
):
# ## Add the output values to the call parameter-values
linked_parameters = []
if not isinstance(self.node.lookup().behavior.lookup(), labop.Protocol):
# Protocol invocation's use output values for the linkage from
# protocol-input to subprotocol-input, so don't add as an output
# parameter-value
for token in tokens:
edge = token.edge.lookup()
if isinstance(edge, uml.ObjectFlow):
source = edge.source.lookup()
parameter = self.node.lookup().pin_parameter(source.name)
linked_parameters.append(parameter)
parameter_value = uml.literal(token.value.get_value(), reference=True)
pv = labop.ParameterValue(parameter=parameter, value=parameter_value)
self.call.lookup().parameter_values += [pv]
# Assume that unlinked output pins to the parameter values of the call
unlinked_output_parameters = [
p
for p in self.node.lookup().behavior.lookup().parameters
if p.property_value.direction == uml.PARAMETER_OUT
and p.property_value.name
not in {lp.property_value.name for lp in linked_parameters}
]
# Handle unlinked output pins by attaching them to the call
possible_output_parameter_values = []
for p in unlinked_output_parameters:
value = self.get_parameter_value(p.property_value, node_outputs, sample_format)
reference = hasattr(value, "document") and value.document is not None
possible_output_parameter_values.append(
labop.ParameterValue(
parameter=p,
value=uml.literal(value, reference=reference),
)
)
self.call.lookup().parameter_values.extend(possible_output_parameter_values)
### Check that the same parameter names are sane:
# 1. unbounded parameters can appear 0+ times
# 2. unique parameters must not have duplicate values (unbounded, unique means no pair of values is the same)
# 3. required parameters are present
pin_sets = {}
for pv in self.call.lookup().parameter_values:
name = pv.parameter.lookup().property_value.name
value = pv.value.get_value() if pv.value else None
if name not in pin_sets:
pin_sets[name] = []
pin_sets[name].append(value)
for p in self.node.lookup().behavior.lookup().parameters:
param = p.property_value
if (
param.lower_value
and param.lower_value.value > 0
and param.name not in pin_sets
):
raise ValueError(
f"Parameter '{param.name}' is required, but does not appear as a pin"
)
elif param.name in pin_sets:
count = len(pin_sets[param.name])
unique_count = len(set(pin_sets[param.name]))
if param.is_unique:
if count != unique_count:
raise ValueError(
f"{param.name} has {count} values, but only {unique_count} are unique"
)
if (param.lower_value and param.lower_value.value > count) or (
param.upper_value and param.upper_value.value < count
):
raise ValueError(
f"{param.name} has {count} values, but expecting [{param.lower_value.value}, {param.upper_value.value}] values"
)
labop.CallBehaviorExecution.check_next_tokens = (
call_behavior_execution_check_next_tokens
)
def activity_node_next_tokens_callback(
self: uml.ActivityNode,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
edge_tokens = []
for edge in out_edges:
try:
edge_value = source.get_value(edge, node_outputs, engine.sample_format)
except Exception as e:
if engine.permissive:
edge_value = uml.literal(str(e))
else:
raise e
edge_tokens.append(
labop.ActivityEdgeFlow(
edge=edge,
token_source=source,
value=edge_value,
)
)
return edge_tokens
uml.ActivityNode.next_tokens_callback = activity_node_next_tokens_callback
def activity_node_execution_get_parameter_value(
self: labop.ActivityNodeExecution,
parameter: uml.Parameter,
node_outputs: Callable,
sample_format: str,
):
if node_outputs:
value = node_outputs(self, parameter)
elif hasattr(self.node.lookup().behavior.lookup(), "compute_output"):
value = self.compute_output(parameter, sample_format)
else:
value = f"{parameter.name}"
return value
labop.ActivityNodeExecution.get_parameter_value = (
activity_node_execution_get_parameter_value
)
def activity_node_execution_get_value(
self: labop.ActivityNodeExecution,
edge: uml.ActivityEdge,
node_outputs: Callable,
sample_format: str,
):
value = ""
node = self.node.lookup()
reference = False
if isinstance(edge, uml.ControlFlow):
value = "uml.ControlFlow"
elif isinstance(edge, uml.ObjectFlow):
if (
isinstance(node, uml.ActivityParameterNode)
and node.parameter.lookup().property_value.direction == uml.PARAMETER_OUT
):
parameter = node.parameter.lookup().property_value
value = self.incoming_flows[0].lookup().value
reference = True
elif isinstance(node, uml.OutputPin):
call_node = node.get_parent()
parameter = call_node.pin_parameter(
edge.source.lookup().name
).property_value
value = self.incoming_flows[0].lookup().value
reference = True
else:
parameter = node.pin_parameter(edge.source.lookup().name).property_value
value = self.get_parameter_value(parameter, node_outputs, sample_format)
reference = isinstance(value, sbol3.Identified) and value.identity != None
value = uml.literal(value, reference=reference)
return value
labop.ActivityNodeExecution.get_value = activity_node_execution_get_value
def initial_node_execute_callback(
self: uml.InitialNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
non_call_edge_inputs = {
i for i in inputs if i.edge.lookup() not in engine.ex.activity_call_edge
}
if len(non_call_edge_inputs) != 0:
raise ValueError(
f"Initial node must have zero inputs, but {self.identity} had {len(inputs)}"
)
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.InitialNode.execute_callback = initial_node_execute_callback
def flow_final_node_execute_callback(
self: uml.FlowFinalNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
# FlowFinalNode consumes tokens, but does not emit
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.FlowFinalNode.execute_callback = flow_final_node_execute_callback
def get_calling_behavior_execution(
self: labop.ActivityNodeExecution,
visited: Set[labop.ActivityNodeExecution] = None,
) -> labop.ActivityNodeExecution:
"""Look for the InitialNode for the Activity including self and identify a Calling CallBehaviorExecution (if present)
Args:
self (labop.ActivityNodeExecution): current search node
Returns:
labop.CallBehaviorExecution: CallBehaviorExecution
"""
node = self.node.lookup()
if visited is None:
visited = set({})
if isinstance(node, uml.InitialNode):
# Check if there is a CallBehaviorExecution incoming_flow
try:
caller = next(
n.lookup().token_source.lookup()
for n in self.incoming_flows
if isinstance(
n.lookup().token_source.lookup(),
labop.CallBehaviorExecution,
)
)
except StopIteration:
return None
return caller
else:
for incoming_flow in self.incoming_flows:
parent_activity_node = incoming_flow.lookup().token_source.lookup()
if (
parent_activity_node
and (parent_activity_node not in visited)
and parent_activity_node.node.lookup().protocol() == node.protocol()
):
visited.add(parent_activity_node)
calling_behavior_execution = (
parent_activity_node.get_calling_behavior_execution(visited=visited)
)
if calling_behavior_execution:
return calling_behavior_execution
return None
labop.ActivityNodeExecution.get_calling_behavior_execution = (
get_calling_behavior_execution
)
def final_node_execute_callback(
self: uml.FinalNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
# FinalNode completes the activity
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.FinalNode.execute_callback = final_node_execute_callback
def call_behavior_execution_complete_subprotocol(
self: labop.CallBehaviorExecution,
engine: ExecutionEngine,
):
# Map of subprotocol output parameter name to token
subprotocol_output_tokens = {
t.token_source.lookup().node.lookup().parameter.lookup().property_value.name: t
for t in engine.tokens
if isinstance(t.token_source.lookup().node.lookup(), uml.ActivityParameterNode)
and self == t.token_source.lookup().get_calling_behavior_execution()
}
# Out edges of calling behavior that need tokens corresponding to the
# subprotocol output tokens
calling_behavior_node = self.node.lookup()
calling_behavior_out_edges = [
e
for e in calling_behavior_node.protocol().edges
if calling_behavior_node == e.source.lookup()
or calling_behavior_node == e.source.lookup().get_parent()
]
new_tokens = [
labop.ActivityEdgeFlow(
token_source=(
subprotocol_output_tokens[e.source.lookup().name].token_source.lookup()
if isinstance(e, uml.ObjectFlow)
else self
),
edge=e,
value=(
uml.literal(
subprotocol_output_tokens[e.source.lookup().name].value,
reference=True,
)
if isinstance(e, uml.ObjectFlow)
else uml.literal("uml.ControlFlow")
),
)
for e in calling_behavior_out_edges
]
# Remove output_tokens from tokens (consumed by return from subprotocol)
engine.tokens = [
t for t in engine.tokens if t not in subprotocol_output_tokens.values()
]
engine.blocked_nodes.remove(self)
return new_tokens
labop.CallBehaviorExecution.complete_subprotocol = (
call_behavior_execution_complete_subprotocol
)
def final_node_next_tokens_callback(
self: uml.FinalNode,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
calling_behavior_execution = source.get_calling_behavior_execution()
if calling_behavior_execution:
new_tokens = calling_behavior_execution.complete_subprotocol(engine)
return new_tokens
else:
return []
uml.FinalNode.next_tokens_callback = final_node_next_tokens_callback
def fork_node_execute_callback(
self: uml.ForkNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
if len(inputs) != 1:
raise ValueError(
f"Fork node must have precisely one input, but {self.identity} had {len(inputs)}"
)
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.ForkNode.execute_callback = fork_node_execute_callback
def fork_node_next_tokens_callback(
self: uml.ForkNode,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
[incoming_flow] = source.incoming_flows
incoming_value = incoming_flow.lookup().value
edge_tokens = [
labop.ActivityEdgeFlow(
edge=edge,
token_source=source,
value=uml.literal(incoming_value, reference=True),
)
for edge in out_edges
]
return edge_tokens
uml.ForkNode.next_tokens_callback = fork_node_next_tokens_callback
def control_node_execute_callback(
self: uml.ForkNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.ControlNode.execute_callback = control_node_execute_callback
def fork_node_execute_callback(
self: uml.ForkNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
if len(inputs) != 1:
raise ValueError(
f"Fork node must have precisely one input, but {self.identity} had {len(inputs)}"
)
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.ForkNode.execute_callback = fork_node_execute_callback
def decision_node_next_tokens_callback(
self: uml.DecisionNode,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
try:
decision_input_flow_token = next(
t
for t in source.incoming_flows
if t.lookup().edge == self.decision_input_flow
).lookup()
decision_input_flow = decision_input_flow_token.edge.lookup()
decision_input_value = decision_input_flow_token.value
except StopIteration as e:
decision_input_flow_token = None
decision_input_value = None
decision_input_flow = None
try:
decision_input_return_token = next(
t
for t in source.incoming_flows
if isinstance(t.lookup().edge.lookup().source.lookup(), uml.OutputPin)
and t.lookup().token_source.lookup().node.lookup().behavior
== self.decision_input
).lookup()
decision_input_return_flow = decision_input_return_token.edge.lookup()
decision_input_return_value = decision_input_return_token.value
except StopIteration as e:
decision_input_return_token = None
decision_input_return_value = None
decision_input_return_flow = None
try:
primary_input_flow_token = next(
t
for t in source.incoming_flows
if t.lookup() != decision_input_flow_token
and t.lookup() != decision_input_return_token
).lookup()
primary_input_flow = primary_input_flow_token.edge.lookup()
primary_input_value = primary_input_flow_token.value
except StopIteration as e:
primary_input_value = None
# Cases to evaluate guards of decision node:
# 1. primary_input_flow is ObjectFlow, no decision_input, no decision_input_flow:
# Use primary_input_flow token to decide if guard is satisfied
# 2. primary_input_flow is any, no decision_input, decision_input_flow present:
# Use decision_input_flow token to decide if guard is satisfied
# 3. primary_input_flow is ControlFlow, decision_input present, no decision_input_flow:
# Use decision_input return value to decide if guard is satisfied (decision_input has no params)
# 4. primary_input_flow is ControlFlow, decision_input present, decision_input_flow present:
# Use decision_input return value to decide if guard is satisfied (decision_input has decision_input_flow supplied parameter)
# 5. primary_input_flow is ObjectFlow, decision_input present, no decision_input_flow:
# Use decision_input return value to decide if guard is satisfied (decision_input has primary_input_flow supplied parameter)
# 6. primary_input_flow is ObjectFlow, decision_input present, decision_input_flow present:
# Use decision_input return value to decide if guard is satisfied (decision_input has primary_input_flow and decision_input_flow supplied parameters)
try:
else_edge = next(
edge for edge in out_edges if edge.guard.value == uml.DECISION_ELSE
)
except StopIteration as e:
else_edge = None
non_else_edges = [edge for edge in out_edges if edge != else_edge]
def satisfy_guard(value, guard):
if (value is None) or isinstance(value, uml.LiteralNull):
return (guard is None) or isinstance(guard, uml.LiteralNull)
elif (guard is None) or isinstance(guard, uml.LiteralNull):
return False
else:
if isinstance(value.value, str):
return value.value == str(guard.value)
else:
return value.value == guard.value
if hasattr(self, "decision_input") and self.decision_input:
# Cases: 3, 4, 5, 6
# The cases are combined because the cases refer to the inputs of the decision_input behavior
# use decision_input_value to eval guards
active_edges = [
edge
for edge in non_else_edges
if satisfy_guard(decision_input_return_value, edge.guard)
]
else:
# Cases: 1, 2
if decision_input_flow:
# Case 2
# use decision_input_flow_token to eval guards
active_edges = [
edge
for edge in non_else_edges
if satisfy_guard(decision_input_flow_token.value, edge.guard)
]
elif primary_input_flow and isinstance(primary_input_flow, uml.ObjectFlow):
# Case 1
# use primary_input_flow_token to eval guards
# Outgoing tokens are uml.ObjectFlow
active_edges = [
edge
for edge in non_else_edges
if satisfy_guard(primary_input_flow_token.value, edge.guard)
]
else:
raise Exception(
"ERROR: Cannot evaluate DecisionNode with no decision_input, no decision_input_flow, and a None or uml.ControlFlow primary_input"
)
assert else_edge or len(active_edges) > 0
if len(active_edges) > 0:
# FIXME always take first active edge, but could be different.
active_edge = active_edges[0]
else:
active_edge = else_edge
# Pick the value of the incoming_flow that corresponds to the primary_incoming edge
edge_tokens = [
labop.ActivityEdgeFlow(
edge=active_edge,
token_source=source,
value=uml.literal(primary_input_value),
)
]
return edge_tokens
uml.DecisionNode.next_tokens_callback = decision_node_next_tokens_callback
def activity_parameter_node_execute_callback(
self: uml.ActivityParameterNode,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
if self.parameter.lookup().property_value.direction == uml.PARAMETER_OUT:
try:
values = [
i.value.get_value()
for i in inputs
if isinstance(i.edge.lookup(), uml.ObjectFlow)
]
if len(values) == 1:
value = uml.literal(values[0], reference=True)
elif len(values) == 0:
value = uml.literal(self.parameter.lookup().property_value.name)
engine.ex.parameter_values += [
labop.ParameterValue(
parameter=self.parameter.lookup(),
value=value,
)
]
except Exception as e:
if not engine.permissive:
raise ValueError(
f"ActivityParameterNode execution for {self.identity} does not have an ObjectFlow token input present."
)
return record
uml.ActivityParameterNode.execute_callback = activity_parameter_node_execute_callback
def activity_parameter_node_next_tokens_callback(
self: uml.ActivityParameterNode,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
if self.parameter.lookup().property_value.direction == uml.PARAMETER_IN:
try:
parameter_value = next(
pv.value
for pv in engine.ex.parameter_values
if pv.parameter == self.parameter
)
except StopIteration as e:
try:
parameter_value = self.parameter.lookup().property_value.default_value
except Exception as e:
raise Exception(
f"ERROR: Could not find input parameter {self.parameter.lookup().property_value.name} value and/or no default_value."
)
edge_tokens = [
labop.ActivityEdgeFlow(
edge=edge,
token_source=source,
value=uml.literal(value=parameter_value, reference=True),
)
for edge in out_edges
]
else:
calling_behavior_execution = source.get_calling_behavior_execution()
if calling_behavior_execution:
return_edge = uml.ObjectFlow(
source=self,
target=calling_behavior_execution.node.lookup().output_pin(
self.parameter.lookup().property_value.name
),
)
engine.ex.activity_call_edge += [return_edge]
edge_tokens = [
labop.ActivityEdgeFlow(
edge=return_edge,
token_source=source,
value=source.get_value(
return_edge, node_outputs, engine.sample_format
)
# uml.literal(source.incoming_flows[0].lookup().value)
)
]
else:
edge_tokens = []
return edge_tokens
uml.ActivityParameterNode.next_tokens_callback = (
activity_parameter_node_next_tokens_callback
)
def call_behavior_action_execute_callback(
self: uml.CallBehaviorAction,
engine: ExecutionEngine,
inputs: List[labop.ActivityEdgeFlow],
) -> labop.ActivityNodeExecution:
record = labop.CallBehaviorExecution(node=self, incoming_flows=inputs)
completed_normally = True
# Get the parameter values from input tokens for input pins
input_pin_values = {
token.token_source.lookup().node.lookup().identity: []
for token in inputs
if not token.edge
}
for token in inputs:
if not token.edge:
name = token.token_source.lookup().node.lookup().identity
input_pin_values[name].append(uml.literal(token.value, reference=True))
# Get Input value pins
value_pin_values = {}
# Validate Pin values, see #130
# Although enabled_activity_node method also validates Pin values,
# it only checks required Pins. This check is necessary to check optional Pins.
required_inputs = [
p
for i in self.behavior.lookup().get_required_inputs()
for p in self.input_pins(i.property_value.name)
]
for pin in [i for i in self.inputs if i.identity not in input_pin_values]:
value = pin.value if hasattr(pin, "value") else None
if value is None:
if pin in required_inputs:
completed_normally = False
if engine.permissive:
engine.issues[engine.ex.display_id].append(
ExecutionError(
f"{self.behavior.lookup().display_id} Action has no ValueSpecification for Pin {pin.name}"
)
)
value = uml.literal("Error")
else:
raise ValueError(
f"{self.behavior.lookup().display_id} Action has no ValueSpecification for Pin {pin.name}"
)
value_pin_values[pin.identity] = value
# Check that pin corresponds to an input parameter. Will cause Exception if does not exist.
parameter = self.pin_parameter(pin.name)
# Convert References
value_pin_values = {
k: [uml.literal(value=v.get_value(), reference=True)]
for k, v in value_pin_values.items()
if v is not None
}
pin_values = {**input_pin_values, **value_pin_values} # merge the dicts
parameter_values = [
labop.ParameterValue(
parameter=self.pin_parameter(pin.name),
value=value,
)
for pin in self.inputs
for value in (pin_values[pin.identity] if pin.identity in pin_values else [])
]
# parameter_values.sort(
# key=lambda x: engine.ex.document.find(x.parameter).index
# )
call = labop.BehaviorExecution(
f"execute_{engine.next_id()}",
parameter_values=parameter_values,
completed_normally=True,
start_time=engine.get_current_time(), # TODO: remove str wrapper after sbol_factory #22 fixed
end_time=engine.get_current_time(), # TODO: remove str wrapper after sbol_factory #22 fixed
consumed_material=[],
) # FIXME handle materials
record.call = call
engine.ex.document.add(call)
return record
uml.CallBehaviorAction.execute_callback = call_behavior_action_execute_callback
def call_behavior_action_next_tokens_callback(
self: uml.CallBehaviorAction,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
if isinstance(self.behavior.lookup(), labop.Protocol):
if engine.is_asynchronous:
# Push record onto blocked nodes to complete
engine.blocked_nodes.add(source)
# new_tokens are those corresponding to the subprotocol initiating_nodes
init_nodes = self.behavior.lookup().initiating_nodes()
def get_invocation_edge(r, n):
invocation = {}
value = None
if isinstance(n, uml.InitialNode):
try:
invocation["edge"] = uml.ControlFlow(source=r.node, target=n)
engine.ex.activity_call_edge += [invocation["edge"]]
source = next(
i
for i in r.incoming_flows
if hasattr(i.lookup(), "edge")
and i.lookup().edge
and isinstance(i.lookup().edge.lookup(), uml.ControlFlow)
)
invocation["value"] = uml.literal(
source.lookup().value, reference=True
)
except StopIteration as e:
pass
elif isinstance(n, uml.ActivityParameterNode):
# if ActivityParameterNode is a ValuePin of the calling behavior, then it won't be an incoming flow
source = self.input_pin(n.parameter.lookup().property_value.name)
invocation["edge"] = uml.ObjectFlow(source=source, target=n)
engine.ex.activity_call_edge += [invocation["edge"]]
# ex.protocol.lookup().edges.append(invocation['edge'])
if isinstance(source, uml.ValuePin):
invocation["value"] = uml.literal(source.value, reference=True)
else:
try:
source = next(
iter(
[
i
for i in r.incoming_flows
if i.lookup()
.token_source.lookup()
.node.lookup()
.name
== n.parameter.lookup().property_value.name
]
)
)
# invocation['edge'] = uml.ObjectFlow(source=source.lookup().token_source.lookup().node.lookup(), target=n)
# engine.ex.activity_call_edge += [invocation['edge']]
# ex.protocol.lookup().edges.append(invocation['edge'])
invocation["value"] = uml.literal(
source.lookup().value, reference=True
)
except StopIteration as e:
pass
return invocation
new_tokens = [
labop.ActivityEdgeFlow(
token_source=source,
**get_invocation_edge(source, init_node),
)
for init_node in init_nodes
]
# engine.ex.flows += new_tokens
if len(new_tokens) == 0:
# Subprotocol does not have a body, so need to complete the CallBehaviorAction here, otherwise would have seen a FinalNode.
new_tokens = source.complete_subprotocol(engine)
else: # is synchronous execution
# Execute subprotocol
self.execute(
self.behavior.lookup(),
engine.ex.association[0].agent.lookup(),
id=f"{engine.display_id}{uuid.uuid4()}".replace("-", "_"),
parameter_values=[],
)
else:
new_tokens = uml.ActivityNode.next_tokens_callback(
self, source, engine, out_edges, node_outputs
)
return new_tokens
uml.CallBehaviorAction.next_tokens_callback = call_behavior_action_next_tokens_callback
def pin_execute_callback(
self: uml.Pin, engine: ExecutionEngine, inputs: List[labop.ActivityEdgeFlow]
) -> labop.ActivityNodeExecution:
record = labop.ActivityNodeExecution(node=self, incoming_flows=inputs)
return record
uml.Pin.execute_callback = pin_execute_callback
def input_pin_next_tokens_callback(
self: uml.InputPin,
source: labop.ActivityNodeExecution,
engine: ExecutionEngine,
out_edges: List[uml.ActivityEdge],
node_outputs: Callable,
) -> List[labop.ActivityEdgeFlow]:
assert len(source.incoming_flows) == len(
engine.ex.protocol.lookup().incoming_edges(source.node.lookup())
)
incoming_flows = [f.lookup() for f in source.incoming_flows]
pin_values = [
uml.literal(value=incoming_flow.value, reference=True)
for incoming_flow in incoming_flows
]
edge_tokens = [
labop.ActivityEdgeFlow(edge=None, token_source=source, value=pin_value)
for pin_value in pin_values
]
return edge_tokens
uml.InputPin.next_tokens_callback = input_pin_next_tokens_callback
def activity_node_execution_get_token_source(
self: labop.ActivityNodeExecution,
parameter: uml.Parameter,
target: labop.ActivityNodeExecution = None,
) -> labop.CallBehaviorExecution:
# Get a ActivityNodeExecution that produced this token assigned to this ActivityNodeExecution parameter.
# The immediate predecessor will be the token_source
node = self.node.lookup()
print(self.identity + " " + node.identity + " param = " + str(parameter))
if (
isinstance(node, uml.InputPin)
or isinstance(node, uml.ForkNode)
or isinstance(node, uml.CallBehaviorAction)
):
main_target = target if target else self
for flow in self.incoming_flows:
source = flow.lookup().get_token_source(parameter, target=main_target)
if source:
return source
return None
else:
return self
node = self.node.lookup()
print(self.identity + " " + node.identity + " param = " + str(parameter))
if (
isinstance(node, uml.InputPin)
or isinstance(node, uml.ForkNode)
or isinstance(node, uml.CallBehaviorAction)
):
main_target = target if target else self
for flow in self.incoming_flows:
source = flow.lookup().get_token_source(parameter, target=main_target)
if source:
return source
return None
else:
return self
labop.ActivityNodeExecution.get_token_source = activity_node_execution_get_token_source
def call_behavior_execution_get_token_source(
self: labop.CallBehaviorExecution,
parameter: uml.Parameter,
target: labop.ActivityNodeExecution = None,
) -> labop.CallBehaviorExecution:
node = self.node.lookup()
print(self.identity + " " + node.identity + " param = " + str(parameter))
if parameter:
return labop.ActivityNodeExecution.get_token_source(
self, parameter, target=target
)
else:
return self
labop.CallBehaviorExecution.get_token_source = call_behavior_execution_get_token_source
def activity_edge_flow_get_token_source(
self: labop.ActivityEdgeFlow,
parameter: uml.Parameter,
target: labop.ActivityNodeExecution = None,
) -> labop.CallBehaviorExecution:
node = self.token_source.lookup().node.lookup()
print(self.identity + " src = " + node.identity + " param = " + str(parameter))
if parameter and isinstance(node, uml.InputPin):
if node == target.node.lookup().input_pin(parameter.name):
return self.token_source.lookup().get_token_source(None, target=target)
else:
return None
elif not parameter:
return self.token_source.lookup().get_token_source(None, target=target)
else:
return None
labop.ActivityEdgeFlow.get_token_source = activity_edge_flow_get_token_source
|
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'test_midl_include_dirs',
'type': 'executable',
'sources': [
'hello.cc',
'subdir/foo.idl',
'subdir/bar.idl',
],
'midl_include_dirs': [
'subdir',
],
'msvs_settings': {
'VCMIDLTool': {
'OutputDirectory': '<(INTERMEDIATE_DIR)',
'DLLDataFileName': '$(InputName)_dlldata.h',
},
},
},
],
}
|
import os
# os.environ['PYSPARK_SUBMIT_ARGS'] = '\
# --packages org.apache.spark:spark-streaming-kafka-0-8_2.11:2.2.0,com.datastax.spark:spark-cassandra-connector_2.11:2.0.1 \
# pyspark-shell'
#
# from pyspark import SparkConf, SparkContext
# from pyspark.streaming import StreamingContext
# from pyspark.streaming.kafka import KafkaUtils
#
# from pyspark.sql import SQLContext, SparkSession, Row
# from pyspark.sql.types import *
import os
import sys
import uuid
import logging
import datetime
import json
from cassandra.cluster import Cluster
from cassandra.query import ordered_dict_factory
from kafka import KafkaConsumer
from time import sleep
import logging
#logging.basicConfig(level=logging.DEBUG)
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/shared'))
from config import *
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/shared/predictions'))
from forecast import Forecast
WAIT_TIME_IN_SECOND = 60
class Streaming(object):
def run(self):
sleep(30)
consumer = self._load_kafka_consumer()
self._consume_message(consumer)
consumer.close()
def _consume_message(self, consumer):
for msg in consumer:
self._save(msg.value)
self._predict_weather()
def _save(self, data):
session = self._load_cassandra_session()
session.row_factory = ordered_dict_factory
unix_datetime = None if data.get('dt') is None else datetime.datetime.fromtimestamp(data.get('dt'))
session.execute('''
INSERT INTO {} (id, dt, dt_iso, measured_at, clouds_all, condition_id, condition_details, condition, city_name, city_id, temperature, temperature_max, temperature_min, rain_3h, snow_3h, wind_speed, wind_degree, humidity, pressure)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
'''.format(RAW_DATA_TABLE_NAME), (
str(uuid.uuid4()),
data.get('dt'),
unix_datetime,
unix_datetime,
data.get('clouds_all'),
data.get('condition_id'),
data.get('condition_details'),
data.get('condition'),
data.get('city_name'),
data.get('city_id'),
data.get('temperature'),
data.get('temperature_max'),
data.get('temperature_min'),
data.get('rain_3h'),
data.get('snow_3h'),
data.get('wind_speed'),
data.get('wind_degree'),
data.get('humidity'),
data.get('pressure')
)
)
logging.critical("save is done!--------------")
def _predict_weather(self):
from forecast import Forecast
forecast = Forecast(type="streaming")
forecast.preprocess()
forecast.fit()
prediction = forecast.predict()
logging.critical('result: {}'.format(prediction))
forecast.save()
def _load_kafka_consumer(self):
consumer = None
while consumer is None:
try:
consumer = KafkaConsumer(
STREAMING_DATA_TOPIC_NAME,
value_deserializer=lambda m: json.loads(m.decode('utf-8')),
bootstrap_servers=[BOOTSTRAP_SERVER]
)
return consumer
except Exception as e:
logging.critical(e)
sleep(5)
def _load_cassandra_session(self):
session = None
while session is None:
try:
cluster = Cluster([os.environ.get('CASSANDRA_PORT_9042_TCP_ADDR', 'localhost')],
port=int(os.environ.get('CASSANDRA_PORT_9042_TCP_PORT', 9042))
)
session = cluster.connect(KEYSPACE_NAME)
return session
except Exception as e:
logging.error(e)
sleep(5)
if __name__ == '__main__':
streaming = Streaming()
streaming.run()
|
# Written by: Michael Imhof
# Date: 01/31/2020
# Part of Udacity Self-Driving Car Nanodegree
# Advanced Lane Finding Project
# Imports
import numpy as np
import cv2
import pickle
class CameraCalibration(object):
"""Class to calibrate the camera and warp perspective."""
def __init__(self):
# Initialize internal attributes
pickle_dict = pickle.load(open("distortion_cal.p", "rb"))
self._mtx = pickle_dict["mtx"]
self._dist = pickle_dict["dist"]
# Source points from test_images/straight_lines2.jpg
self._imshape = (1280, 720)
offset = 200
# src = np.float32([[275, 679], [588, 455], [698, 455], [1042, 679]])
src = np.float32([[218, 720], [588, 455], [698, 455], [1119, 720]])
dst = np.float32([[offset, self._imshape[1]],
[offset, 0],
[self._imshape[0]-offset, 0],
[self._imshape[0]-offset, self._imshape[1]]])
self._warp_matrix = cv2.getPerspectiveTransform(src, dst)
self._unwarp_matrix = cv2.getPerspectiveTransform(dst, src)
def calibrate_distortion(self, images, num_corners=(9, 6)):
"""
Calculate the distortion of the camera based on provided chessboard
images.
:param images: Image filenames for use in calibration.
:param num_corners: Number of internal corners on the chessboard image.
"""
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(9,6,0)
objp = np.zeros((6*9, 3), np.float32)
objp[:, :2] = np.mgrid[0:9, 0:6].T.reshape(-1, 2)
# Initialize arrays to hold object points and image points.
objpoints = []
imgpoints = []
# Step through the list and search for chessboard corners
image = None
for fname in images:
image = cv2.imread(fname)
# Convert to grayscale
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Find the chessboard corners
ret, corners = cv2.findChessboardCorners(gray, num_corners, None)
# If found, add object points, image points
if ret:
objpoints.append(objp)
imgpoints.append(corners)
# Calculate distortion
_, self._mtx, self._dist, _, _ = cv2.calibrateCamera(
objpoints,
imgpoints,
image.shape[1::-1],
None,
None
)
def undistort_image(self, image):
return cv2.undistort(image, self._mtx, self._dist, None, self._mtx)
def warp_image(self, image):
return cv2.warpPerspective(image,
self._warp_matrix,
self._imshape)
def unwarp_image(self, image):
return cv2.warpPerspective(image,
self._unwarp_matrix,
self._imshape)
if __name__ == '__main__':
# Make a list of calibration images
# import glob
# image_filenames = glob.glob('camera_cal/calibration*.jpg')
# camera_cal = CameraCalibration()
# camera_cal.calibrate_distortion(image_filenames, num_corners=(9, 6))
# pickle_dict = {"mtx": camera_cal._mtx, "dist": camera_cal._dist}
# pickle.dump(pickle_dict, open("distortion_cal.p", "wb"))
from matplotlib import pyplot as plt
test_img = plt.imread('test_images/test1.jpg')
# test_img = plt.imread('camera_cal/calibration1.jpg')
camera_cal = CameraCalibration()
undist = camera_cal.undistort_image(test_img)
warped = camera_cal.warp_image(undist)
# plt.imsave('writeup_media/calibration_image_distorted.jpg', test_img)
# plt.imsave('writeup_media/calibration_image_undistorted.jpg', undist)
plt.imsave('writeup_media/test_image_distorted.jpg', test_img)
plt.imsave('writeup_media/test_image_undistorted.jpg', undist)
plt.imsave('writeup_media/test_image_warped.jpg', warped)
plt.figure(figsize=(16, 8))
plt.imshow(warped)
plt.show()
|
from scrapy.spiders import Spider
from scrapy.selector import Selector
from dirbot.items import Website
class DmozSpider(Spider):
name = "dmoz"
allowed_domains = ["https://www.pinterest.com/"]
start_urls = ["https://www.pinterest.com/Girl_in_Dublin/followers/"]
def __init__(self,*args,**kwargs):
super(DmozSpider, self).__init__( *args, **kwargs)
self.download_delay = 6.0
def parse(self, response):
"""
The lines below is a spider contract. For more info see:
http://doc.scrapy.org/en/latest/topics/contracts.html
@url http://www.dmoz.org/Computers/Programming/Languages/Python/Resources/
@scrapes name
"""
#sel = Selector(response)
accounts = response.xpath('//a[@class="userWrapper"]')
items =[]
for user in accounts:
item = Website()
item['usr_name'] = user.xpath('./@href').extract()[0]
item['pins'] = user.xpath('.//p[@class="userStats"]/span[@class="value"]/text()').extract()[0]
item['followers'] = user.xpath('.//p[@class="userStats"]/span[@class="value"]/text()').extract()[1]
items.append(item)
return items
|
# -*- coding: utf-8 -*-
"""
目标:提供一个函数能够从网上下载资源
输入:
url列表
保存路径
输出:
保存到指定路径中的文件
要求:
能够实现下载过程,即从0%到100%可视化
"""
# =====================================================
from six.moves import urllib
import os
from urllib.parse import quote
import string
import sys
import json
def download_and_extract(Fileinfo, save_dir):
"""根据给定的URL地址下载文件
Parameter:
filepath: list 文件的URL路径地址
save_dir: str 保存路径
Return:
None
"""
temp_dir = save_dir + ''
for fileinfo in Fileinfo:
filepath = []
j = 3
while (j < len(fileinfo)):
filepath.append(fileinfo[j])
j += 1
save_dir = temp_dir + '\\' + fileinfo[0] + '\\' + fileinfo[2]
try:
os.makedirs(save_dir + '\\' + fileinfo[1])
except Exception as e:
print("User" + fileinfo[0] + ' already has the case ' + fileinfo[1])
save_dir = save_dir + '\\' + fileinfo[1]
for url, index in zip(filepath, range(len(filepath))):
filename = url.split('/')[-1]
save_path = os.path.join(save_dir, filename)
# url = urllib.parse.quote_plus(url)
url = quote(url, safe=string.printable)
urllib.request.urlretrieve(url, save_path)
sys.stdout.write('\r>> Downloading %.1f%%' % (float(index + 1) / float(len(filepath)) * 100.0))
sys.stdout.flush()
print('\nSuccessfully downloaded')
Fileinfo=[]
# filePath=[]
# userId=[]
i = 0
with open('D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\possiBC\\GetAlgorithmCapabilityRowDATA\\group3_data.json', 'r', encoding='utf8')as fp:
json_data = json.load(fp)
for key in json_data:
caseList = list(json_data[key]['cases'])
# userId.append(str(json_data['user_id']))
os.makedirs('D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']))
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\字符串')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\线性表')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\数组')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\查找算法')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\排序算法')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\数字操作')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\树结构')
os.makedirs(
'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm\\' + str(json_data[key]['user_id']) + '\\图结构')
for case in caseList:
Fileinfo.append([])
Fileinfo[i].append(str(json_data[key]['user_id']))
Fileinfo[i].append(str(case['case_id']))
Fileinfo[i].append(str(case['case_type']))
endIndex= len(case['upload_records'])-1
Fileinfo[i].append(str(case['upload_records'][endIndex]['code_url']))
i += 1
# print(key+':'+str(json_data[key]))
# os.makedirs('E:\\SampleTest\\可')
download_and_extract(Fileinfo, 'D:\\chengxu\\SoftwareEngineering\\probabilityTheory2\\Algorithm')
|
#!/usr/bin/env python
import sys
sys.dont_write_bytecode = True
import litefs
litefs.test_server()
|
from csv import DictReader as dr
from django.core.management import BaseCommand
from ticketingsystem.models import Device, Customer
loaded_error_message = """
If you need to reload the device data from the CSV file,
delete the db.sqlite3 file to destroy the database.
Then do a new migration"""
class Command(BaseCommand):
# Show this when the user types help
help = "populates device model with data from devicelist.csv"
def handle(self, *args, **options):
if Device.objects.exists():
print('device data already loaded...exiting.')
print(loaded_error_message)
return
print("Creating device data")
csv = open('.\devicelist.csv', encoding='utf-8-sig')
reader = dr(csv)
for row in reader:
device = Device()
device.deviceMake = row['Make']
device.deviceModel = row['Model']
device.deviceType = row['Type']
device.description = row['Description']
device.save()
|
print("Hello Folks - This is Number guessing game:")
print("_________________")
import random
print("easy is between 1 and 25")
print("medium is between 1 and 100")
print("hard is between 1 and 200")
dif=input("Choose your difficulty by typing 'e' for easy 'm' for medium and 'h' for hard:-")
while True:
if dif !="e" or "m" or "h":
print("Please type again as you have typed an invalid text\n")
dif=input("Choose your difficulty by typing 'e' for easy 'm' for medium and 'h' for hard:-\n")
if dif =="e" or "m" or "h":
if dif=="e":
num=random.randint(1,25)
print("So you have chosen easy difficulty")
break
if dif=="m":
num=random.randint(1,100)
print("So you have chosen medium difficulty")
break
if dif=="h":
num=random.randint(1,200)
print("So you have chosen hard difficulty")
break
loopCounter=0
while True:
inputValue=int(input("Guess the number here-> "))
loopCounter=loopCounter+1
if num<inputValue:
print("Try lower")
if num>inputValue:
print("Try higher")
if num==inputValue:
print("Congratulations, you got it")
break
print("Your tries: ",loopCounter)
|
import requests
import lxml.etree
url = 'https://th.wikipedia.org/wiki/รายชื่อเทศบาลตำบลในประเทศไทย'
resp = requests.get(url)
content = resp.content
tree = lxml.etree.fromstring(content, parser=lxml.etree.HTMLParser())
xpath = '//*[@id="mw-content-text"]/div/table[2 <= position]/tbody/tr/td[2 <= position() and position() <= 3]//a//text()'
tambon_list = tree.xpath(xpath)
for t_name in tambon_list:
print(t_name)
|
import abc
import numpy
import os
from smqtk.representation import SmqtkRepresentation
from smqtk.utils import plugin
from smqtk.utils import merge_dict
__author__ = "paul.tunison@kitware.com"
class DescriptorElement (SmqtkRepresentation, plugin.Pluggable):
"""
Abstract descriptor vector container.
This structure supports implementations that cache descriptor vectors on a
per-UUID basis.
UUIDs must maintain unique-ness when transformed into a string.
Descriptor element equality based on shared descriptor type and vector
equality. Two descriptor vectors that are generated by different types of
descriptor generator should not be considered the same (though, this may be
up for discussion).
Stored vectors should be effectively immutable.
"""
def __init__(self, type_str, uuid):
"""
Initialize a new descriptor element.
:param type_str: Type of descriptor. This is usually the name of the
content descriptor that generated this vector.
:type type_str: str
:param uuid: Unique ID reference of the descriptor.
:type uuid: collections.Hashable
"""
super(DescriptorElement, self).__init__()
self._type_label = type_str
self._uuid = uuid
def __hash__(self):
return hash(self.uuid())
def __eq__(self, other):
if isinstance(other, DescriptorElement):
return numpy.array_equal(self.vector(), other.vector())
return False
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return "%s{type: %s, uuid: %s}" % (self.__class__.__name__, self.type(),
self.uuid())
def __getstate__(self):
return {
"_type_label": self._type_label,
"_uuid": self._uuid,
}
def __setstate__(self, state):
self._type_label = state['_type_label']
self._uuid = state['_uuid']
@classmethod
def get_default_config(cls):
"""
Generate and return a default configuration dictionary for this class.
This will be primarily used for generating what the configuration
dictionary would look like for this class without instantiating it.
By default, we observe what this class's constructor takes as arguments,
aside from the first two assumed positional arguments, turning those
argument names into configuration dictionary keys.
If any of those arguments have defaults, we will add those values into
the configuration dictionary appropriately.
The dictionary returned should only contain JSON compliant value types.
It is not be guaranteed that the configuration dictionary returned
from this method is valid for construction of an instance of this class.
:return: Default configuration dictionary for the class.
:rtype: dict
"""
# similar to parent impl, except we remove the ``type_str`` and ``uuid``
# configuration parameters as they are to be specified at runtime.
dc = super(DescriptorElement, cls).get_default_config()
# These parameters must be specified at construction time.
del dc['type_str'], dc['uuid']
return dc
# noinspection PyMethodOverriding
@classmethod
def from_config(cls, config_dict, type_str, uuid, merge_default=True):
"""
Instantiate a new instance of this class given the desired type, uuid,
and JSON-compliant configuration dictionary.
:param type_str: Type of descriptor. This is usually the name of the
content descriptor that generated this vector.
:type type_str: str
:param uuid: Unique ID reference of the descriptor.
:type uuid: collections.Hashable
:param config_dict: JSON compliant dictionary encapsulating
a configuration.
:type config_dict: dict
:param merge_default: Merge the given configuration on top of the
default provided by ``get_default_config``.
:type merge_default: bool
:return: Constructed instance from the provided config.
:rtype: DescriptorElement
"""
c = {}
merge_dict(c, config_dict)
c['type_str'] = type_str
c['uuid'] = uuid
return super(DescriptorElement, cls).from_config(c, merge_default)
def uuid(self):
"""
:return: Unique ID for this vector.
:rtype: collections.Hashable
"""
return self._uuid
def type(self):
"""
:return: Type label type of the DescriptorGenerator that generated this
vector.
:rtype: str
"""
return self._type_label
###
# Abstract methods
#
@abc.abstractmethod
def has_vector(self):
"""
:return: Whether or not this container current has a descriptor vector
stored.
:rtype: bool
"""
@abc.abstractmethod
def vector(self):
"""
:return: Get the stored descriptor vector as a numpy array. This returns
None of there is no vector stored in this container.
:rtype: numpy.ndarray or None
"""
@abc.abstractmethod
def set_vector(self, new_vec):
"""
Set the contained vector.
If this container already stores a descriptor vector, this will
overwrite it.
:param new_vec: New vector to contain.
:type new_vec: numpy.ndarray
:returns: Self.
:rtype: DescriptorMemoryElement
"""
from ._io import *
def get_descriptor_element_impls(reload_modules=False):
"""
Discover and return discovered ``DescriptorElement`` classes. Keys in the
returned map are the names of the discovered classes, and the paired values
are the actual class type objects.
We search for implementation classes in:
- modules next to this file this function is defined in (ones that begin
with an alphanumeric character),
- python modules listed in the environment variable
``DESCRIPTOR_ELEMENT_PATH``
- This variable should contain a sequence of python module
specifications, separated by the platform specific PATH separator
character (``;`` for Windows, ``:`` for unix)
Within a module we first look for a helper variable by the name
``DESCRIPTOR_ELEMENT_CLASS``, which can either be a single class object or
an iterable of class objects, to be specifically exported. If the variable
is set to None, we skip that module and do not import anything. If the
variable is not present, we look at attributes defined in that module for
classes that descend from the given base class type. If none of the above
are found, or if an exception occurs, the module is skipped.
:param reload_modules: Explicitly reload discovered modules from source.
:type reload_modules: bool
:return: Map of discovered class object of type ``DescriptorElement``
whose keys are the string names of the classes.
:rtype: dict[str, type]
"""
this_dir = os.path.abspath(os.path.dirname(__file__))
env_var = "DESCRIPTOR_ELEMENT_PATH"
helper_var = "DESCRIPTOR_ELEMENT_CLASS"
return plugin.get_plugins(__name__, this_dir, env_var, helper_var,
DescriptorElement, reload_modules)
|
from .. import Verb
from ..interface import Adapter
class FunctionAdapter(Adapter):
def __init__(self, function_pointer):
self.fn = function_pointer
def get_value(self, ctx : Verb) -> str:
return str(self.fn())
|
"""
rabbitpy Specific Exceptions
"""
from pamqp import specification
class ActionException(Exception):
def __repr__(self):
return self.args[0]
class ChannelClosedException(Exception):
def __repr__(self):
return 'Can not perform RPC requests on a closed channel, you must ' \
'create a new channel'
class ConnectionBlockedWarning(Warning):
def __repr__(self):
return 'Will not write to a connection that RabbitMQ is throttling'
class ConnectionException(Exception):
def __repr__(self):
return 'Unable to connect to the remote server %r' % self.args
class ConnectionResetException(Exception):
def __repr__(self):
return 'Connection was reset at socket level'
class EmptyExchangeNameError(Exception):
def __repr__(self):
return 'You must specify an Exchange name'
class EmptyQueueNameError(Exception):
def __repr__(self):
return 'You must specify a Queue name'
class RemoteClosedChannelException(Exception):
def __repr__(self):
return 'Channel %i was closed by the remote server (%i): %s' % \
(self.args[0], self.args[1], self.args[2])
class RemoteClosedException(Exception):
def __repr__(self):
return 'Connection was closed by the remote server (%i): %s' % \
(self.args[0], self.args[1])
class MessageReturnedException(Exception):
def __repr__(self):
return 'Message %s was returned by RabbitMQ: (%s) %s' % \
(self.args[0], self.args[1], self.args[2])
class NoActiveTransactionError(Exception):
def __repr__(self):
return 'No active transaction for the request, channel closed'
class TooManyChannelsError(Exception):
def __repr__(self):
return 'The maximum amount of negotiated channels has been reached'
class UnexpectedResponseError(Exception):
def __repr__(self):
return 'Received an expected response, expected %s, received %s' % \
(self.args[0], self.args[1])
# AMQP Exceptions
AMQPContentTooLarge = specification.AMQPContentTooLarge
AMQPNoRoute = specification.AMQPNoRoute
AMQPNoConsumers = specification.AMQPNoConsumers
AMQPConnectionForced = specification.AMQPConnectionForced
AMQPInvalidPath = specification.AMQPInvalidPath
AMQPAccessRefused = specification.AMQPAccessRefused
AMQPNotFound = specification.AMQPNotFound
AMQPResourceLocked = specification.AMQPResourceLocked
AMQPPreconditionFailed = specification.AMQPPreconditionFailed
AMQPFrameError = specification.AMQPFrameError
AMQPSyntaxError = specification.AMQPSyntaxError
AMQPCommandInvalid = specification.AMQPCommandInvalid
AMQPChannelError = specification.AMQPChannelError
AMQPUnexpectedFrame = specification.AMQPUnexpectedFrame
AMQPResourceError = specification.AMQPResourceError
AMQPNotAllowed = specification.AMQPNotAllowed
AMQPNotImplemented = specification.AMQPNotImplemented
AMQPInternalError = specification.AMQPInternalError
AMQP = {311: AMQPContentTooLarge,
312: AMQPNoRoute,
313: AMQPNoConsumers,
320: AMQPConnectionForced,
402: AMQPInvalidPath,
403: AMQPAccessRefused,
404: AMQPNotFound,
405: AMQPResourceLocked,
406: AMQPPreconditionFailed,
501: AMQPFrameError,
502: AMQPSyntaxError,
503: AMQPCommandInvalid,
504: AMQPChannelError,
505: AMQPUnexpectedFrame,
506: AMQPResourceError,
530: AMQPNotAllowed,
540: AMQPNotImplemented,
541: AMQPInternalError}
|
from flaskr import jwt
from functools import wraps
from flask_jwt_extended import (
verify_jwt_in_request,
get_jwt_claims
)
from utils.errors import NotAuthorizedError
from utils.blacklist_helpers import is_token_revoked
# ROLES VALIDATIONS
# Define our callback function to check if a token has been revoked or not
@jwt.token_in_blacklist_loader
def check_if_token_revoked(decoded_token):
return is_token_revoked(decoded_token)
@jwt.user_claims_loader
def add_claims_to_access_token(identity):
return identity
def prohibitted(fn):
"""Here is a custom decorator that verifies the JWT is present in
the request, as well as insuring that this user has a role of
`noneone` in the access token.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
raise NotAuthorizedError()
return wrapper
def admin_required(fn):
"""Here is a custom decorator that verifies the JWT is present in
the request, as well as insuring that this user has a role of
`superuser` in the access token.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
verify_jwt_in_request()
claims = get_jwt_claims()
if claims.get('roles', None) \
and isinstance(claims['roles'], dict) \
and claims['roles'].get('is_superuser', None):
return fn(*args, **kwargs)
raise NotAuthorizedError()
return wrapper
def manager_required(fn):
"""Here is a custom decorator that verifies the JWT is present in
the request, as well as insuring that this user has a role of
`manager` or `superuser` in the access token.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
verify_jwt_in_request()
claims = get_jwt_claims()
if claims.get('roles', None) \
and isinstance(claims['roles'], dict) \
and (claims['roles'].get('is_manager', None)
or claims['roles'].get('is_superuser', None)):
return fn(*args, **kwargs)
raise NotAuthorizedError()
return wrapper
def staff_required(fn):
"""Here is a custom decorator that verifies the JWT is present in
the request, as well as insuring that this user has a role of
`seller`, `staff` or `superuser` in the access token.
"""
@wraps(fn)
def wrapper(*args, **kwargs):
verify_jwt_in_request()
claims = get_jwt_claims()
if claims.get('roles', None) \
and isinstance(claims['roles'], dict) \
and (claims['roles'].get('is_seller', None)
or claims['roles'].get('is_superuser', None)
or claims['roles'].get('is_manager', None)):
return fn(*args, **kwargs)
raise NotAuthorizedError()
return wrapper
|
import math
class Solution:
def networkBecomesIdle(self, edges: List[List[int]], patience: List[int]) -> int:
n = len(patience)
def dijkstra(g:List[List[tuple]], src:int) -> List[int]:
dis = [inf] * n
dis[src] = 0
# 堆优化
q = [(0, src)]
while q:
d, x = heappop(q)
if dis[x] < d:
continue
for y in g[x]:
newd = 1 + dis[x]
if newd < dis[y]:
dis[y] = newd
heappush(q, (newd, y))
return dis
g = [[] for _ in range(n)]
for x, y in edges:
g[x].append(y)
g[y].append(x)
dis = dijkstra(g, 0)
res = -1
for i in range(1, n):
d = patience[i]
rtt = 2*dis[i]
resend_times = math.ceil(rtt / d) - 1
last_send = resend_times * d
finish_time = last_send + rtt + 1
res = max(res, finish_time)
return res
# 改成bfs
def bfs(g:List[List[tuple]], src:int) -> List[int]:
dis = [inf] * n
dis[src] = 0
q = deque([src])
while q:
cur = q.popleft()
for x in g[cur]:
if dis[x] != inf:
continue
dis[x] = dis[cur] + 1
q.append(x)
return dis
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import nntools as nt
# In[2]:
from models.architectures import *
import torch
import torch.nn as nn
from config import args
import os
from data_loader import get_loader
from torch.nn.utils.rnn import pack_padded_sequence
# In[3]:
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# In[4]:
encoder = args['encoder'](args['embed_size']).to(device)
decoder = args['decoder'](args['embed_size'], args['hidden_size'], args['vocab_size'], args['num_layers'], list(args['vocabulary'].keys()), args['glove_path'], args.get('max_sentence_length', 100), args.get('is_pretrained', True)).to(device)
# In[5]:
criterion = args['loss_criterion']
# In[6]:
params = list(list(encoder.parameters()) + list(decoder.parameters()))
# In[7]:
optimizer = torch.optim.Adam(params, lr=args['learning_rate'])
# In[8]:
stats_manager = nt.StatsManager()
# In[9]:
exp1 = nt.Experiment(encoder, decoder, device, criterion, optimizer, stats_manager,
output_dir=args['model_path'], perform_validation_during_training=True)
# In[10]:
import time
exp1.run(num_epochs=args['epochs'])
# exp1.load_bestmodel()
# exp1.encoder.eval()
# exp1.decoder.eval()
# a,b = exp1.evaluate(mode='test',generate=False,generate_mode='deterministic',temperature=1)
# print((a,b))
#a,b = exp1.evaluate(mode='test',generate=True,generate_mode='stochastic',temperature=0.2)
# time.sleep(30)
# exp1.load_bestmodel()
# exp1.encoder.eval()
# exp1.decoder.eval()
# exp1.evaluate(mode='test',generate=True,generate_mode='stochastic',temperature=0.1)
# time.sleep(30)
# exp1.load_bestmodel()
# exp1.encoder.eval()
# exp1.decoder.eval()
# exp1.evaluate(mode='test',generate=True,generate_mode='stochastic',temperature=0.5)
# time.sleep(30)
# exp1.load_bestmodel()
# exp1.encoder.eval()
# exp1.decoder.eval()
# exp1.evaluate(mode='test',generate=True,generate_mode='stochastic',temperature=1)
# time.sleep(30)
# exp1.load_bestmodel()
# exp1.encoder.eval()
# exp1.decoder.eval()
# exp1.evaluate(mode='test',generate=True,generate_mode='stochastic',temperature=1.5)
# time.sleep(30)
# exp1.load_bestmodel()
# exp1.encoder.eval()
# exp1.decoder.eval()
# exp1.evaluate(mode='test',generate=True,generate_mode='stochastic',temperature=2)
# time.sleep(30)
# In[ ]:
|
# test 1
cars = ["bwm","loslias","toyota","Audi"]
for car in cars:
if car.lower() == "audi":
print(car.upper())
else:
print(car.title())
'''
python中大小写不同,比较得到的结论为不等
如果想忽略大小写进行比较,可以用lower()函数将两者均变为小写(临时)再进行比较
'''
# test 2
if car[2] != "2333":
print("yep")
else:
print("osh")
'''
数值检测等或不等与上述方法相同
'''
# test 3
# 多个条件的判断
# and表示'且';or表示'或'
if 2<=1 and 3>=2:
print("2<=1 and 3>=2")
elif 2<=1 or 3>=2:
print("2<=1 or 3>=2")
'''
注意else if 在python中要写作elif
最后的else可以省略
可以多个if语句并列使用
'''
# test 4
# 检查特定关键字是否在列表/元组中
if "bwm" not in cars:
print("bwm is not in cars")
else:
print("bwm is in cars")
# test 5
bool_ = True
bool__ = False
print(bool_)
print(bool__)
'''
布尔表达式,要么为True,要么为False
注意True和False的首字母必须大写
'''
# test 6
# 确定列表不空
if cars:
print("列表不空")
else:
print("列表为空")
|
from common.run_method import RunMethod
import allure
@allure.step("小程序/订单/根据订单状态查询订单数量")
def order_api_inner_order_order_countByStatuses_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "小程序/订单/根据订单状态查询订单数量"
url = f"/service-order/order-api/inner-order/order/countByStatuses"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
@allure.step("极运营/系统设置/优惠券/优惠券查询服务")
def order_api_inner_order_queryCoupon_get(params=None, header=None, return_json=True, **kwargs):
'''
:param: url地址后面的参数
:body: 请求体
:return_json: 是否返回json格式的响应(默认是)
:header: 请求的header
:host: 请求的环境
:return: 默认json格式的响应, return_json=False返回原始响应
'''
name = "极运营/系统设置/优惠券/优惠券查询服务"
url = f"/service-order/order-api/inner-order/queryCoupon"
res = RunMethod.run_request("GET", url, params=params, header=header, return_json=return_json, name=name, **kwargs)
return res
|
import sys
class MyQueue:
#initialize the main, buffer queues
def __init__(self, input=[]):
self.main_stack = [];
self.buffer_stack = [];
self.enqueue(input);
#enqueue-ing a list of input data
def enqueue(self, input_list):
#very basic error checking
if ( (input_list is None) ):
print("ERROR: 'None' cannot be enqueued");
if (not isinstance(input_list, list)):
input_list = [input_list];
#check if we need to move stuff to the main buffer
if (self.buffer_stack):
self.main_stack = self.buffer_stack[:];
self.main_stack.reverse();
self.buffer_stack = [];
#add new elements to the queue
self.main_stack.extend(input_list);
def dequeue(self):
#we try to save time by checking if the queue is already in the buffer
if (len(self.buffer_stack) + len(self.main_stack)):
if (self.buffer_stack):
return self.buffer_stack.pop();
else:
self.buffer_stack = self.main_stack[:];
self.buffer_stack.reverse();
self.main_stack = [];
return self.buffer_stack.pop();
else:
print("ERROR: attempted to dequeue an empty queue");
def print_queue(self):
print("FULL QUEUE: ");
print("-> " + str(self.main_stack[::-1]+self.buffer_stack) + " ->");
print("MAIN: ");
print("-> "+str(self.main_stack[::-1])+" ->");
print("BUFFER: ");
print("-> "+str(self.buffer_stack)+" ->");
print("");
#test-cases
x = MyQueue([1, 2, 3]);
x.print_queue();
x.dequeue();
x.print_queue();
x.dequeue();
x.print_queue();
x.dequeue();
x.print_queue();
x.enqueue([-1, 4, 3]);
x.print_queue();
x.dequeue();
x.print_queue();
x.enqueue(1);
x.print_queue();
x.enqueue(5);
x.print_queue();
x.dequeue();
x.print_queue();
x.dequeue();
x.print_queue();
x.dequeue();
x.print_queue();
x.dequeue();
x.print_queue();
|
print('X si 0 -> 2 jucatori reali')
print('')
board = [' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ']
def tabla():
#print(' %c | %c | %c ' % (board[1], board[2], board[3]))
print(board[0], ' |', board[1], '|', board[2])
print('___|___|___')
print(board[3], ' |', board[4], '|', board[5])
print('___|___|___')
print(board[6], ' |', board[7], '|', board[8])
print(' | | ')
print('')
def castigator_x():
if "x" == board[0] and board[0] == board[1] and board[1] == board[2]:
print("x a castigat")
elif "x" == board[2] and board[2] == board[5] and board[5] == board[8]:
print("x a castigat")
elif "x" == board[6] and board[6] == board[7] and board[7] == board[8]:
print("x a castigat")
elif "x" == board[0] and board[0] == board[3] and board[3] == board[6]:
print("x a castigat")
elif "x" == board[0] and board[0] == board[4] and board[4] == board[8]:
print("x a castigat")
elif "x" == board[2] and board[2] == board[4] and board[4] == board[6]:
print("x a castigat")
elif "x" == board[1] and board[1] == board[4] and board[4] == board[7]:
print("x a castigat")
elif "x" == board[3] and board[3] == board[4] and board[4] == board[5]:
print("x a castigat")
def castigator_0():
if "0" == board[0] and board[0] == board[1] and board[1] == board[2]:
print("0 a castigat")
elif "0" == board[2] and board[2] == board[5] and board[5] == board[8]:
print("0 a castigat")
elif "0" == board[6] and board[6] == board[7] and board[7] == board[8]:
print("0 a castigat")
elif "0" == board[0] and board[0] == board[3] and board[3] == board[6]:
print("0 a castigat")
elif "0" == board[0] and board[0] == board[4] and board[4] == board[8]:
print("0 a castigat")
elif "0" == board[2] and board[2] == board[4] and board[4] == board[6]:
print("0 a castigat")
elif "0" == board[1] and board[1] == board[4] and board[4] == board[7]:
print("0 a castigat")
elif "0" == board[3] and board[3] == board[4] and board[4] == board[5]:
print("0 a castigat")
tabla()
bucla = 1
while bucla == 1:
print("Randul lui x")
alegere_x = input()
if alegere_x == "x,1":
board[0] = "x"
tabla()
if "x" == board[0] and board[0] == board[1] and board[1] == board[2]:
break
elif alegere_x == "x,2":
board[1] = "x"
tabla()
castigator_x()
elif alegere_x == "x,3":
board[2] = "x"
tabla()
castigator_x()
elif alegere_x == "x,4":
board[3] = "x"
tabla()
castigator_x()
elif alegere_x == "x,5":
board[4] = "x"
tabla()
castigator_x()
elif alegere_x == "x,6":
board[5] = "x"
tabla()
castigator_x()
elif alegere_x == "x,7":
board[6] = "x"
tabla()
castigator_x()
elif alegere_x == "x,8":
board[7] = "x"
tabla()
castigator_x()
elif alegere_x == "x,9":
board[8] = "x"
tabla()
castigator_x()
print("Randul lui 0")
alegere_0 = input()
if alegere_0 == "0,1":
board[0] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,2":
board[1] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,3":
board[2] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,4":
board[3] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,5":
board[4] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,6":
board[5] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,7":
board[6] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,8":
board[7] = "0"
tabla()
castigator_0()
elif alegere_0 == "0,9":
board[8] = "0"
tabla()
castigator_0()
print("da")
|
lavaRoof = "lava2,{0},0,150,100,lava2.png\n"
lavaFloor = "lava,{0},600,150,100,lava.png\n"
ground = "layer2,{0},600,150,100,layer2.png\n"
roof = "layer3,{0},0,150,100,layer3.png\n"
blueCoin = "blueCoin,{0},{1},57,65,6,3,blueCoin.png\n"
blueDimond = "blueDimond,{0},{1},56,60,6,7,blueDimond.png\n"
redCoin ="redCoin,{0},{1},58,65,6,2,redCoin.png\n"
redDimond = "redDimond,{0},{1},54,60,6,10,redDimond.png\n"
goldCoin = "goldCoin,{0},{1},53,60,6,1,goldCoin.png\n"
smallRock= "smallRock,{0},{1},80,80,40,smallRock.png\n"
mediumRock="mediumRock,{0},{1},110,110,55,mediumRock.png\n"
bigRock="bigRock,{0},{1},140,140,70,bigRock.png\n"
flag = "flag,{0},{1},100,100,flag.png\n"
# p="Platform,{0},{1},200,52\n" # x, y
# g="Gomba,{0},{1},{2},35,70,70,5,gomba.png\n" # x, y, g
# s="Star,{0},{1},-1,20,40,40,6,star.png,{2},{3},False\n" # x,y,theta,radius
# m="Mario,{0},{1},{2},39,78,78,4,marioRun.png\n" #x, y, g
file = open("bear.csv","r")
lavafallStage=file.read().split("\n")[:]
file.close()
file = open("lavafall.csv","w")
y = 0
for line in lavafallStage:
line = line.split(",")
x = 0
for segment in line:
if segment == 'lava2':
file.write(lavaRoof.format(x))
elif segment == 'lava':
file.write(lavaFloor.format(x))
elif segment == 'layer2':
file.write(ground.format(x))
elif segment == 'layer3':
file.write(roof.format(x))
elif segment == 'blueCoin':
file.write(blueCoin.format(x,y))
elif segment == 'blueDimond':
file.write(blueDimond.format(x,y))
elif segment == 'redCoin':
file.write(redCoin.format(x,y))
elif segment == 'redDimond':
file.write(redDimond.format(x,y))
elif segment == 'goldCoin':
file.write(goldCoin.format(x,y))
elif segment == 'smallRock':
file.write(smallRock.format(x,y))
elif segment == 'mediumRock':
file.write(mediumRock.format(x,y))
elif segment == 'bigRock':
file.write(bigRock.format(x,y))
elif segment == 'flag':
file.write(flag.format(x,y))
x+=5
y+=5
file.close()
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013-2015 Marcos Organizador de Negocios SRL http://marcos.do
# Write by Eneldo Serrata (eneldo@marcos.do)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, orm
from openerp import netsvc
from openerp.exceptions import ValidationError
from openerp.exceptions import except_orm
class account_cash_statement(orm.Model):
_inherit = "account.bank.statement"
def _all_lines_reconciled(self, cr, uid, ids, name, args, context=None):
res = super(account_cash_statement, self)._all_lines_reconciled(cr, uid, ids, name, args, context=None)
if context.get("journal_type", False) == "cash" and res:
for key in res.keys():
res[key] = True
return res
def journal_id_change(self, cr, uid, ids, journal_id):
is_cjc = self.pool.get("account.journal").browse(cr, uid, journal_id).is_cjc
return {"value": {"is_cjc": is_cjc}}
_columns = {
"is_cjc": fields.boolean("Control de caja chica", readonly=False),
'all_lines_reconciled': fields.function(_all_lines_reconciled, string='All lines reconciled', type='boolean'),
}
def create_invoice_wizard(self, cr, uid, ids, context=None):
view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'marcos_cjc', 'cjc_wizard_view_form')[1]
wizard = {
'name': 'Gasto de caja chica',
'view_mode': 'form',
'view_id': False,
'views': [(view_id, 'form')],
'view_type': 'form',
'res_model': 'cjc.invoice.wizard',
'type': 'ir.actions.act_window',
'target': 'new',
'context': context
}
return wizard
def button_confirm_cash(self, cr, uid, ids, context=None):
statement = self.browse(cr, uid, ids)[0]
if statement.journal_id.is_cjc and context:
wf_service = netsvc.LocalService("workflow")
# invoiced = []
uninvoiced = []
for statement in self.browse(cr, uid, ids):
for line in statement.line_ids:
# if line.invoice_id:
# invoiced.append(line.invoice_id.id)
if not line.invoice_id and line.amount < 0:
uninvoiced.append(line)
# for inv_id in invoiced:
# wf_service.trg_validate(uid, 'account.invoice', inv_id, 'invoice_open', cr)
journal = statement.journal_id
minor_journal = journal.gastos_journal_id
minor_partner = minor_journal.special_partner
minor_product = minor_journal.special_product
vals = {}
vals.update({
u'account_id': journal.default_credit_account_id.id,
u'check_total': 0,
u'child_ids': [[6, False, []]],
u'comment': "Gasto menor generado por caja chica",
u'company_id': 1,
u'currency_id': journal.company_id.currency_id.id,
u'date_due': False,
u'date_invoice': statement.date,
u'fiscal_position': minor_partner.property_account_position.id,
u'internal_number': False,
u'journal_id': minor_journal.id,
u'message_follower_ids': False,
u'message_ids': False,
u'name': False,
u'ncf_required': False,
u'origin': statement.name,
u'parent_id': False,
u'partner_bank_id': False,
u'partner_id': minor_partner.id,
u'payment_term': False,
u'period_id': statement.period_id.id,
u'reference': False,
u'reference_type': "02",
u'supplier_invoice_number': False,
u'tax_line': [],
u'user_id': uid,
u'pay_to': statement.journal_id.pay_to.id,
u'invoice_line': []
})
if uninvoiced:
if not minor_product.property_account_expense.id and statement.journal_id.is_cjc:
raise ValidationError(u"En el diario de gasto menor seleccionado para esta caja chica "
u"el producto {} utilizado por defecto no tiene la cuenta de gasto asignada!".format(
minor_product.name))
line_ids = []
for line in uninvoiced:
line.account_id = journal.default_credit_account_id
line_ids.append(line.id)
line_list = [0, False]
line_dict = {}
line_dict.update({
u'account_analytic_id': False,
u'account_id': minor_product.property_account_expense.id,
u'asset_category_id': False,
u'discount': 0,
u'invoice_line_tax_id': [[6, False, [t.id for t in minor_product.supplier_taxes_id]]],
u'name': line.name,
u'price_unit': abs(line.amount),
u'product_id': minor_product.id,
u'quantity': 1,
u'uos_id': 1
})
line_list.append(line_dict)
vals["invoice_line"].append(line_list)
if statement.journal_id.is_cjc:
context = {u'default_type': u'in_invoice', u'journal_type': u'purchase', u"minor": True}
inv_id = self.pool.get("account.invoice").create(cr, uid, vals, context=context)
inv = self.pool.get("account.invoice").browse(cr, uid, inv_id)
inv.check_total = inv.amount_total
wf_service.trg_validate(uid, 'account.invoice', inv_id, 'invoice_open', cr)
self.pool.get("account.bank.statement.line").write(cr, uid, line_ids, {"invoice_id": inv_id, "partner_id": minor_partner.id, "ref": inv.number})
res = super(account_cash_statement, self).button_confirm_bank(cr, uid, ids, context=context)
if statement.journal_id.is_cjc and context:
for move in statement.move_line_ids:
if move.credit > 0:
self.pool.get("account.move.line").write(cr, uid, move.id,
{"partner_id": statement.journal_id.pay_to.id})
for statement in self.browse(cr, uid, ids):
for line in statement.line_ids:
number = line.invoice_id.number
account_id = line.account_id.id
partner_id = line.partner_id.id
cjc_journal = line.journal_id.id
inv_journal = line.invoice_id.journal_id.id
move_line_ids = []
move_line_ids += self.pool.get("account.move.line").search(cr, uid, [('ref', '=', number),
('account_id', '=', account_id),
('partner_id', '=', partner_id),
('journal_id', '=', cjc_journal),
('debit', '>', 0)
])
move_line_ids += self.pool.get("account.move.line").search(cr, uid, [('ref', '=', number),
('account_id', '=', account_id),
('partner_id', '=', partner_id),
('journal_id', '=', inv_journal),
('credit', '>', 0)
])
try:
self.pool.get("account.move.line.reconcile").trans_rec_reconcile_full(cr, uid, ids, {"active_ids": move_line_ids})
except except_orm as e:
if e.value.startswith("El apunte ya est"):
pass
#else:
# raise e
return res
class account_bank_statement_line(orm.Model):
_inherit = "account.bank.statement.line"
_columns = {
"invoice_id": fields.many2one("account.invoice", "Factura", copy=False)
}
# def unlink(self, cr, uid, ids, context=None):
# context = context or {}
# for line in self.browse(cr, uid, ids):
# if context.get("journal_type", False) == "cash" and line.invoice_id:
# self.pool.get("account.invoice").unlink(cr, uid, [line.invoice_id.id], context=context)
#
# return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context)
def view_invoice(self, cr, uid, ids, context=None):
"""
Method to open create customer invoice form
"""
record = self.browse(cr, uid, ids, context=context)
view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'invoice_supplier_form')
view_id = view_ref[1] if view_ref else False
res = {
'type': 'ir.actions.act_window',
'name': 'Supplier Invoice',
'res_model': 'account.invoice',
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'target': 'new',
"res_id": record.invoice_id.id,
}
return res
|
# author zyyFTD
# Github: https://github.com/YuyangZhangFTD/zyy_ML-DL
"""
this code is for python3
"""
import tensorflow as tf # import tensorflow
c = tf.constant(1.5) # creat a constant
x = tf.Variable(1.0, name="x") # creat a variable
add_op = tf.add(x, c) # creat add operation
assign_op = tf.assign(x, add_op)# creat assign operation
init = tf.global_variables_initializer() # init variables *
sess = tf.Session() # get session object
sess.run(init) # run session
sess.run(assign_op) # run assgin operation
print(sess.run(x)) # should print out 2.5
sess.close() # close session
|
import matplotlib.pyplot as plt
import numpy as np
SIZE = 5
plt.rcParams["figure.figsize"] = (SIZE,SIZE)
# Gravity
g = 9.8 # [m/s^2]
class Pendulum():
def __init__(self):
# Base Dimensions
self.base_width = 1 # [m]
self.base_height = 0.5 # [m]
# Wheel Radius
self.wheel_radius = 0.1 # [m]
# Ball Radius
self.ball_radius = 0.1 # [m]
# Bar Length
self.bar_length = 3 # [m]
# Cart Mass
self.M = 10 # [kg]
# Ball Mass
self.m = 2 # [kg]
# Time Step
self.dt = 0.1 # [s]
# Simulation
def model_matrix(self):
A = np.array([
[0, 1, 0, 0],
[0, 0, self.m * g / self.M, 0],
[0, 0, 0, 1],
[0, 0, g * (self.M + self.m) / (self.bar_length * self.M), 0]
])
B = np.array([
[0],
[1 / self.M],
[0],
[1 / (self.bar_length * self.M)]
])
return A, B
def run_step(self, x, u):
A, B = self.model_matrix()
x_dot = np.dot(A, x) + np.dot(B, u).reshape((4,1))
x += self. dt * x_dot
return x
def plot_pendulum(self,xt,theta, radians = True):
plt.figure("Simulation")
# Clear figure
plt.clf()
if not radians:
theta = np.radians(theta)
# Model Coordiates
base_xy =[-self.base_width/2, self.base_height/2] # Base coordiates
wr_xy = [self.base_width/2 - self.wheel_radius, abs(self.base_height/2-self.wheel_radius)] # Right Wheel
wl_xy = [-self.base_width/2 + self.wheel_radius, abs(self.base_height/2-self.wheel_radius)] # Left Wheel
bar_xs = np.array([0, self.bar_length * np.sin(-theta)])
bar_ys = np.array([self.base_height, self.bar_length * np.cos(-theta) + self.base_height])
base_xy[0] += xt
wr_xy[0] += xt
wl_xy[0] += xt
bar_xs += [xt,xt]
# Model Shapes and Plot
# Cart
base = plt.Rectangle(base_xy, self.base_width, self.base_height,
fc="#073642",
ec="#2E3436")
wheel_r = plt.Circle(wr_xy, self.wheel_radius,
fc="#469EBD",
ec="#469EBD",
)
wheel_l = plt.Circle(wl_xy, self.wheel_radius,
fc="#469EBD",
ec="#469EBD",
)
plt.gca().add_patch(base)
plt.gca().add_patch(wheel_l)
plt.gca().add_patch(wheel_r)
# Pendulum
ball = plt.Circle((bar_xs[1], bar_ys[1]), self.ball_radius,
zorder=10,
fc="#FFA000",
)
pendulum_bar = plt.plot(bar_xs,bar_ys,
c="#CB1616",
lw=3,
)
plt.gca().add_patch(ball)
plt.axis("equal")
plt.xlim([-SIZE / 2 + xt, SIZE / 2 + xt])
plt.pause(0.001)
# Paramter Update Functions
def update_cart_dims(self, width, height, wheel_radius):
if width <= 0 or height <= 0 or wheel_radius <= 0:
print("Please enter positive nonzero values.")
raise ValueError
self.base_height = height
self.base_width = width
self.wheel_radius = wheel_radius
def update_pendulum_dims(self, bar_length, ball_radius):
if bar_length <=0 or ball_radius <= 0:
print("Values must nonzero positives.")
raise ValueError
self.bar_length = bar_length
self.ball_radius = ball_radius
def update_masses(self, cart_mass, ball_mass):
if cart_mass <= 0 or ball_mass <= 0:
print("Masses must be positive nonzero values.")
raise ValueError
self.M = cart_mass
self.m = ball_mass
if __name__ == "__main__":
P= Pendulum()
print(P.model_matrix())
for x in range(0,10,1):
x=x/10
P.plot_pendulum(x,0)
|
from django.shortcuts import render, redirect
from .forms.pizza_app.user import UserForm, UserLoginForm
from .forms.pizza_app.pizza import PizzaForm
from .forms.pizza_app.address import AddressForm
from collections import namedtuple
from typing import ContextManager
from django.contrib import messages
from pizza_app.models import *
from datetime import datetime, timezone, timedelta
from django.http import HttpResponse, JsonResponse
from django.db.models import Count
from random import randint
# Create your views here.
APP_NAME = 'pizza_app'
def home(request):
user = None
if 'logged_user' in request.session:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
users_type = UserType.objects.filter(type='admin')
if len(users_type) > 0:
user_type = users_type[0]
else:
user_type = None
context = {
'my_pizzas' : Pizza.objects.filter(user=user),
'pizzas' : Pizza.objects.filter(user__in=User.objects.filter(user_type=user_type)),
'extras' : Extra.objects.all(),
}
return render(request, f'{APP_NAME}/index.html', context)
def error_404_view(request, exception):
print('ERROR 404')
return HttpResponse("Hola, esta página no está disponible o no es válida.")
def register(request):
if request.method == 'GET':
user_form = UserForm()
context = {
'user_form' : user_form,
}
return render(request, f'{APP_NAME}/register.html', context)
if request.method == 'POST':
users_type = UserType.objects.all()
if len(users_type) < 1:
type_admin = UserType.objects.create(name='Administrador', type='admin')
type_user = UserType.objects.create(name='Empleado', type='user')
type_client = UserType.objects.create(name='Cliente', type='client')
else:
type_client = UserType.objects.get(name='Cliente', type='client')
errors = User.objects.validator(request.POST)
if len(errors) > 0:
for key, value in errors.items():
messages.error(request, value)
context = {
'user_form' : UserForm(request.POST),
'user_login_form' : UserLoginForm(),
}
return render(request, f'{APP_NAME}/register.html', context)
if User.ifExists(request.POST['email']):
messages.error(request, 'Usuario ya existe')
context = {
'user_form' : UserForm(request.POST),
'user_login_form' : UserLoginForm(),
}
return render(request, f'{APP_NAME}/register.html', context)
user_form = UserForm(request.POST)
if user_form.is_valid():
users = User.objects.all()
if len(users) > 0:
user = user_form.save(commit=False)
user.user_type = type_client
user.save()
else:
user = user_form.save(commit=False)
user.user_type = UserType.objects.get(type='admin')
user.save()
request.session['logged_user'] = user.email
request.session['logged_perfil'] = user.user_type.name
request.session['logged_user_name'] = user.first_name + ' ' + user.last_name
else:
context = {
'user_form' : UserForm(request.POST),
'user_login_form' : UserLoginForm(),
}
return render(request, f'{APP_NAME}/register.html', context)
return redirect('home')
def login(request):
if request.method == 'GET':
user_form = UserForm()
user_login_form = UserLoginForm()
context = {
'user_form' : user_form,
'user_login_form' : user_login_form,
}
return render(request, f'{APP_NAME}/login.html', context)
if request.method == 'POST':
loginForm = UserLoginForm(request.POST)
if loginForm.is_valid():
logged_user = loginForm.login(request.POST)
if logged_user:
request.session['logged_user_name'] = logged_user.first_name + ' ' + logged_user.last_name
request.session['logged_user'] = logged_user.email
request.session['logged_perfil'] = logged_user.user_type.name
print('logged_user: ', request.session['logged_user'])
return redirect('home')
else:
messages.error(request, 'usuario no existe o clave inválida')
user_form = UserForm()
user_login_form = UserLoginForm(request.POST)
context = {
'user_form' : user_form,
'user_login_form' : user_login_form,
}
return render(request, f'{APP_NAME}/login.html', context)
def logout(request):
try:
del request.session['logged_user']
del request.session['logged_user_name']
del request.session['logged_perfil']
del request.session['carrito']
del request.session['total_carrito']
except:
print('Error')
return redirect('home')
def create_pizza(request):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'GET':
request.session['price'] = 0
context = {
'ingredients' : Ingredient.objects.all(),
'pizza_form' : PizzaForm()
}
return render(request, f'{APP_NAME}/create_pizza.html', context)
if request.method == 'POST':
print(request.POST)
errors = Pizza.objects.validator(request.POST)
if len(errors) > 0:
for key, value in errors.items():
messages.error(request, value)
context = {
'ingredients' : Ingredient.objects.all(),
'pizza' : request.POST,
}
return render(request, f'{APP_NAME}/create_pizza.html', context)
if 'name' not in request.POST or 'image' not in request.POST or 'discount' not in request.POST or 'special_price' not in request.POST or 'price' not in request.POST:
messages.error(request, 'Falta información para crear Pizza')
context = {
'ingredients' : Ingredient.objects.all(),
'pizza' : request.POST,
}
return render(request, f'{APP_NAME}/create_pizza.html', context)
# Obtener todos los ingredientes seleccionados
price = 0
for key, value in request.POST.items():
print(key)
if 'Option' in key:
values = value.split('|')
id = values[0]
options = IngredientOption.objects.filter(id=id)
if len(options) > 0:
option = options[0]
price = price + option.price
if price <= 0:
messages.error(request, 'Falta información ingredientes')
context = {
'ingredients' : Ingredient.objects.all(),
'pizza' : request.POST,
}
return render(request, f'{APP_NAME}/create_pizza.html', context)
pizza = Pizza()
pizza.name = request.POST['name']
pizza.image = request.POST['image']
pizza.discount = request.POST['discount']
pizza.special_price = request.POST['special_price'] if 'special_price' in request.POST and request.POST['special_price'] != '' else 0
pizza.user = user
pizza.price = price
pizza.save()
for key, value in request.POST.items():
print(key)
if 'Option' in key:
values = value.split('|')
id = values[0]
options = IngredientOption.objects.filter(id=id)
if len(options) > 0:
option = options[0]
pizza.all_ingredients.add(option)
return redirect('home')
def get_price(request):
if request.method == 'POST':
print(request.POST)
request.session['price'] = 0
for key, value in request.POST.items():
print(key)
if 'Option' in key:
values = value.split('|')
id = values[0]
price = values[1]
options = IngredientOption.objects.filter(id=id)
if len(options) > 0:
option = options[0]
price = option.price
request.session['price'] = request.session['price'] + int(price)
price = request.session['price']
return JsonResponse({'price' : price})
return redirect(create_pizza)
def add_pizza(request, id_pizza):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'GET':
pizzas = Pizza.objects.filter(id=id_pizza)
if len(pizzas) > 0:
pizza = pizzas[0]
#del request.session['carrito']
#del request.session['total_carrito']
carrito = []
total_carrito = 0
if 'carrito' in request.session:
carrito = request.session['carrito']
if 'total_carrito' in request.session:
total_carrito = request.session['total_carrito']
total_carrito += pizza.price
request.session['total_carrito'] = total_carrito
value = randint(1000000, 9999999)
carrito.append({'id_carrito': value, 'item': 'pizza', 'id': pizza.id, 'name': pizza.name, 'price': pizza.price, 'image': pizza.image})
request.session['carrito'] = carrito
print(carrito)
return redirect('home')
def add_extra(request, id_extra):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'GET':
extras = Extra.objects.filter(id=id_extra)
if len(extras) > 0:
extra = extras[0]
#del request.session['carrito']
#del request.session['total_carrito']
carrito = []
total_carrito = 0
if 'carrito' in request.session:
carrito = request.session['carrito']
if 'total_carrito' in request.session:
total_carrito = request.session['total_carrito']
total_carrito += extra.price
request.session['total_carrito'] = total_carrito
value = randint(1000000, 9999999)
carrito.append({'id_carrito': value, 'item': 'extra', 'id': extra.id, 'name': extra.name, 'price': extra.price, 'image': extra.image})
request.session['carrito'] = carrito
print(carrito)
return redirect('home')
def ver_carrito(request):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'GET':
tax = 0
delivery = 0
if 'carrito' in request.session:
carrito = request.session['carrito']
total_carrito = 0
for item in carrito:
total_carrito += item['price']
tax = round(total_carrito / 5)
delivery = round(total_carrito / 10)
request.session['tax'] = tax
request.session['delivery'] = delivery
request.session['total_carrito'] = total_carrito + tax + delivery
context = {
'items' : Pizza.objects.all(),
'addresses' : Address.objects.filter(user=user),
'orders' : Order.objects.filter(user=user),
}
return render(request, f'{APP_NAME}/carrito.html', context)
def del_item_carrito(request, carrito_id):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'GET':
items = []
if 'carrito' in request.session:
carrito = request.session['carrito']
del request.session['carrito']
del request.session['total_carrito']
total_carrito = 0
for item_carrito in carrito:
if item_carrito['id_carrito'] == int(carrito_id):
print('elimina item')
else:
items.append(item_carrito)
total_carrito += item_carrito['price']
request.session['total_carrito'] = total_carrito
request.session['carrito'] = items
return redirect(ver_carrito)
def del_pizza(request, pizza_id):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'GET':
pizzas = Pizza.objects.filter(id=pizza_id)
print(pizza_id)
print(pizzas)
if len(pizzas) > 0:
pizza = pizzas[0]
if pizza.user == user:
pizza.delete()
messages.error(request, 'Pizza eliminada!')
else:
messages.error(request, 'Pizza no puede ser eliminada')
return redirect('home')
def create_address(request):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
regiones = Region.objects.all()
if len(regiones) < 1:
region = Region.objects.create(name='Metropolitana')
cities = City.objects.all()
if len(cities) < 1:
city = City.objects.create(name='Santiago', region=Region.objects.get(name='Metropolitana'))
comunas = Comuna.objects.all()
if len(comunas) < 1:
comuna = Comuna.objects.create(name='Santiago Centro', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='La Reina', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='Providencia', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='Las Condes', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='Recoleta', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='Estacion Central', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='La Florida', city=City.objects.get(name='Santiago'))
comuna = Comuna.objects.create(name='Puente Alto', city=City.objects.get(name='Santiago'))
if request.method == 'GET':
context = {
'address_form' : AddressForm(),
'addresses' : Address.objects.filter(user=user),
}
return render(request, f'{APP_NAME}/address.html', context)
if request.method == 'POST':
print(request.POST)
address_form = AddressForm(request.POST)
if address_form.is_valid():
address = address_form.save(commit=False)
address.user = user
address.save()
else:
context = {
'address_form' : AddressForm(request.POST)
}
return render(request, f'{APP_NAME}/address.html', context)
return redirect('ver_carrito')
def make_purchases(request):
if 'logged_user' not in request.session:
return redirect(login)
else:
users = User.objects.filter(email=request.session['logged_user'])
if len(users) > 0:
user = users[0]
else:
return redirect(login)
if request.method == 'POST':
print(request.POST)
if 'carrito' in request.session:
carrito = request.session['carrito']
if len(carrito) > 0:
if 'addresses' not in request.POST:
messages.error(request, 'Seleccione una dirección de envío')
else:
addresses = Address.objects.filter(id=int(request.POST['addresses']))
if len(addresses) > 0:
address = addresses[0]
order = Order()
order.user = user
order.address = address
order.total = request.session['total_carrito']
order.total_discount = 0
order.fee_delivery = request.session['delivery']
order.tax = request.session['tax']
order.save()
for item_carrito in carrito:
print(item_carrito)
if item_carrito['item'] == 'pizza':
pizza = Pizza.objects.get(id=item_carrito['id'])
details = DetailPizzaOrder()
details.order = order
details.quantity = 1
details.save()
details.all_pizzas.add(pizza)
if item_carrito['item'] == 'extra':
extra = Extra.objects.get(id=item_carrito['id'])
details = DetailExtraOrder()
details.order = order
details.quantity = 1
details.save()
details.all_extras.add(extra)
del request.session['carrito']
del request.session['total_carrito']
del request.session['tax']
del request.session['delivery']
else:
messages.error(request, 'Seleccione una dirección de envío')
else:
messages.error(request, 'No hay Items para generar la compra')
else:
messages.error(request, 'No hay Items para generar la compra')
return redirect('ver_carrito')
def make_pizzas_data(request):
# el usuario inicial es un Administrador
users = User.objects.all()
if len(users) < 1:
messages.error(request, 'No hay usuarios registrados, registrese para quedar como Administrador y luego precargar datos.')
return redirect('register_pizza_app')
else:
user = User.objects.get(user_type=UserType.objects.get(type='admin'))
# Crea los ingredientes
ing = Ingredient.objects.create(name='Tamaño', price=2000, discount=False, special_price=0, optional=True, orden=0, multiple_option=False)
opt = IngredientOption.objects.create(option='Extra Grande', price=1200, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Familiar', price=1000, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Mediana', price=900, discount=False, special_price=0, orden=0, ingredient=ing)
# Tipo de Masa
ing = Ingredient.objects.create(name='Tipo de Masa', price=2000, discount=False, special_price=0, optional=True, orden=0, multiple_option=False)
opt = IngredientOption.objects.create(option='Tradicional', price=2000, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Delgada', price=1500, discount=False, special_price=0, orden=0, ingredient=ing)
# Carnes
ing = Ingredient.objects.create(name='Carnes', price=2000, discount=False, special_price=0, optional=False, orden=0, multiple_option=True)
opt = IngredientOption.objects.create(option='Tocino', price=800, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Jamón', price=800, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Peperoni', price=800, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Pollo', price=800, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Carne', price=800, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Lomito', price=800, discount=False, special_price=0, orden=0, ingredient=ing)
# Vegetales
ing = Ingredient.objects.create(name='Vegetales', price=2000, discount=False, special_price=0, optional=False, orden=0, multiple_option=True)
opt = IngredientOption.objects.create(option='Tomate', price=600, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Champiñon', price=600, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Choclo', price=600, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Cebolla', price=600, discount=False, special_price=0, orden=0, ingredient=ing)
opt = IngredientOption.objects.create(option='Aceituna', price=600, discount=False, special_price=0, orden=0, ingredient=ing)
# Crea Pizza Napolitana
pizza = Pizza.objects.create(name='Napolitana', image='napolitana.jpeg', user=user, price=0, discount=False, special_price=0)
ingredient1 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Tipo de Masa')).get(option='Tradicional')
ingredient2 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Tamaño')).get(option='Familiar')
ingredient3 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Carnes')).get(option='Jamón')
ingredient4 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Vegetales')).get(option='Tomate')
pizza.all_ingredients.add(ingredient1)
pizza.all_ingredients.add(ingredient2)
pizza.all_ingredients.add(ingredient3)
pizza.all_ingredients.add(ingredient4)
pizza.price = ingredient1.price + ingredient2.price + ingredient3.price + ingredient4.price
pizza.save()
# Crea Pizza Italiana
pizza = Pizza.objects.create(name='Italiana', image='italiana.jpeg', user=user, price=0, discount=False, special_price=0)
ingredient1 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Tipo de Masa')).get(option='Tradicional')
ingredient2 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Tamaño')).get(option='Familiar')
ingredient3 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Carnes')).get(option='Peperoni')
ingredient4 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Vegetales')).get(option='Champiñon')
ingredient5 = IngredientOption.objects.filter(ingredient=Ingredient.objects.get(name='Vegetales')).get(option='Cebolla')
pizza.all_ingredients.add(ingredient1)
pizza.all_ingredients.add(ingredient2)
pizza.all_ingredients.add(ingredient3)
pizza.all_ingredients.add(ingredient4)
pizza.all_ingredients.add(ingredient5)
pizza.price = ingredient1.price + ingredient2.price + ingredient3.price + ingredient4.price + ingredient5.price
pizza.save()
return redirect('home')
def make_extras_data(request):
users = User.objects.all()
if len(users) < 1:
messages.error(request, 'No hay usuarios registrados, registrese para quedar como Administrador y luego precargar datos.')
return redirect('register_pizza_app')
extra1 = Extra.objects.create(name='Alitas de Pollo', price=4500, image='alitas.jpeg', discount=False, special_price=0)
extra2 = Extra.objects.create(name='Palitos de Ajo', price=2800, image='palitos.jpeg', discount=False, special_price=0)
return redirect('home')
def del_data(request):
delete = Order.objects.all().delete()
delete = Extra.objects.all().delete()
delete = Pizza.objects.all().delete()
delete = Ingredient.objects.all().delete()
delete = Region.objects.all().delete()
delete = Address.objects.all().delete()
delete = User.objects.all().delete()
delete = UserType.objects.all().delete()
del request.session['logged_user']
del request.session['logged_user_name']
del request.session['logged_perfil']
del request.session['carrito']
del request.session['total_carrito']
return redirect('home')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.