text
stringlengths
8
6.05M
from random import randint from django.core.management.base import BaseCommand from faker import Faker from homepage.models import Student, Subject class Command(BaseCommand): """ Help command that generate studentsmodels and add them to DB By default generate 10 students. You can set amount of student to generate using argument '-a' or '--am' """ help = 'Generate and add new student(s) to the DB using Faker' def add_arguments(self, parser): parser.add_argument('-a', '--am', type=int, default=10) def handle(self, *args, **options): faker = Faker() self.stdout.write('Start generating and inserting Students') for _ in range(options['am']): subject, is_created = Subject.objects.get_or_create(title='Python') self.stdout.write('Start inserting Students') student = Student() student.name = faker.first_name() student.surname = faker.last_name() student.age = randint(20, 40) student.sex = faker.simple_profile()['sex'] student.address = faker.address() student.description = faker.text() student.birthday = faker.date_of_birth() student.email = faker.email() student.subject = subject student.save() self.stdout.write('End inserting Students')
from flask import jsonify, request, current_app, g, url_for from ..models import Bucketlist, Item from .authentication import auth from .. import db from . import api from .pagination import paginate from .response import unauthorized, forbidden, custom_response, not_found #Bucketlist Endpoint @api.route('/bucketlist/', methods=['POST','GET']) @auth.login_required def bucketlists(): """ This funciton takes care of the GET and the POST request for /v1/bucketlist endpoint""" if request.method == 'POST': #returns status code of 201 and json success message for a newly created bucket list name = request.json.get('bucketlist_name') bucketlist = Bucketlist(name=name) bucketlist.create() bucketlist.save() return custom_response("Successfully created {}".format(name),201) if request.method == 'GET': #returns status code of 200 and a list of bucket list created by the user user = g.current_user options = request.args.copy() #Handles the pagination and quering of the bucketlists by name and limit bucket_lists = paginate(user.bucketlists,'api.bucketlists',options,Bucketlist) bucket_lists['created_by'] = user.email response = jsonify(bucket_lists) response.status_code = 200 return response #Single bucket list endpoint @api.route('/bucketlist/<int:id>', methods=['GET','PUT','DELETE']) @auth.login_required def manage_bucketlist(id): """ This function manages a particular bucket list of id : id """ #assing the bucketlist object to a variable bucketlist = Bucketlist.query.filter_by(id=id).first() if bucketlist is None: #checks for existence return not_found("Bucketlist not found") if g.current_user.id != bucketlist.user_id: #check if the user have access to the bucketlist return unauthorized("You Dont Have Access to this resouce") if request.method == 'GET': #returns the item of the bucketlist with its items in bracket with a status code of 200 bucketlist = bucketlist.to_json() response = jsonify(bucketlist) response.status_code = 200 return response if request.method == 'PUT': #handles the edition of a particular bucketlist name takes a json with the property of "bucketlist_name" name = request.json.get("bucketlist_name") bucketlist.edit(name) bucketlist.save() return custom_response("Successfully updated {}".format(name),201) if request.method == 'DELETE': #handles the deletion of a particular bucket list item bucketlist.delete() return custom_response("Sucessfully deleted {}".format(bucketlist.name),204) #Bucketlist item editing endpoint @api.route('/bucketlist/<int:id>/items', methods=['GET','POST']) @auth.login_required def bucketlist_items(id): """This function handles the bucketlist item for a particular bucket list /v1/:id/items""" bucketlist = Bucketlist.query.filter_by(id=id).first() if bucketlist is None: #Checks if the bucketlist is available return not_found("This bucket list is not available") if g.current_user.id != bucketlist.user_id: #checks if the global user have access to the selected bucketlist item return unauthorized("You dont have Access to this resource") if request.method == 'POST': #creates a new item that is a member of the present bucket list name = request.json.get("item_name") item = Item(name=name) item.bucketlist_id = bucketlist.id item.save() return custom_response("Successfully created item {}".format(item.name), 201) if request.method == 'GET': #Get all the bucketlist items under this bucketlist options = request.args.copy() #gets all the paginated items under this bucket list bucket_items = paginate(bucketlist.items,'api.bucketlist_items',options,Item,id) bucket_items['name'] = bucketlist.name bucket_items['created_by'] = bucketlist.owned_by.email response = jsonify(bucket_items) response.status_code = 200 return response
bundle_path = "../tls-ca-bundle.pem" with open(bundle_path, 'r') as f: bundle = f.read() certs = bundle.split('\n\n') n = len(certs) for i in range(n): with open('%04d.pem'%(i+1), 'w') as cert_file: cert_file.write(certs[i])
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue May 28 14:51:10 2019 @author: kj22643 """ %reset import numpy as np import pandas as pd import os import scanpy as sc import seaborn as sns from plotnine import * path = '/Users/kj22643/Documents/Documents/231_Classifier_Project/data' #path = '/stor/scratch/Brock/231_10X_data/' os.chdir(path) sc.settings.figdir = 'KJ_plots' sc.set_figure_params(dpi_save=300) sc.settings.verbosity = 3 # Try loading in Eric's data frame to see if we can start to perform some # matrix operations on the gene expression data set. #%% adataEB = sc.read('231_all_samples_post-leiden-clustering.h5ad') survlins = adataEB.obs.loc[(adataEB.obs['sample'].isin(['107Aziz','113Aziz']))&(adataEB.obs.lineage!='nan'),'lineage'].unique().tolist() adataEB.obs.loc[adataEB.obs.lineage.isin(survlins)==False,'survivor'] = 'sens' adataEB.obs.loc[adataEB.obs.lineage.isin(survlins)==True,'survivor'] = 'res' print(adataEB.obs['survivor']) #%% Look at what the smaples are called to see what the pre-treatment sample is called print(adataEB.obs) #%% Take only the TP0 anndata object # made up 3182 cells (rows) with adata_subset = adataEB[adataEB.obs['sample']=='Doxneg', :] #%% View the subset # 3182 cells with 22191 genes print(adata_subset) #%% View just the survivor info print(adata_subset.obs['survivor']) # this #%% Make a data frame with the raw gene expression matrix # each column is a gene, each row is a cell df = pd.concat([adata_subset.obs['survivor'], pd.DataFrame(adata_subset.raw.X,index=adata_subset.obs.index, columns=adata_subset.var_names),],axis=1) #%% print(df) # 22192 columns corresponding to 22191 genes #%% labelsdf = df['survivor'] print(labelsdf) #%% genematdf = df.loc[:, df.columns != 'survivor'] print(genematdf) #%%
# load required modules import os import subprocess as sp import getpass import lvm from lvm import * while True: lvm() y = input("Tell me what I can do for you: ").lower() if (("view" in y) and (("volume" in y) or ("storage" in y) or ("block" in y) or ("harddisk" in y) or ("hd" in y)) or ("1" in y)): lvm_1() elif (("create") and (("physical" in y) or ("pv" in y)) or ("2" in y)): lvm_2() elif ((("view" in y) or ("detail" in y)) and (("pv" in y) or ("physical" in y)) or ("3" in y)): lvm_3() elif (("create" in y) and (("group" in y) or "vg" in y) or ("4" in y)): lvm_4() elif ((("view" in y) or ("detail" in y)) and (("vg" in y) or ("group" in y)) or ("5" in y)): lvm_5() elif (("create" in y) and (("logical" in y) or "lv" in y) or ("6" in y)): lvm_6() elif ((("view" in y) or ("detail" in y)) and (("lv" in y) or ("logical" in y)) or ("7" in y)): lvm_7() elif (("extend" in y) and (("logical" in y) or ("lv" in y)) or ("8" in y)): lvm_8() elif (("extend" in y) and (("group" in y) or ("vg" in y)) or ("9" in y)): lvm_9() elif (("exit" in y) or ("close" in y)): text("2") print("\nThankyou! Meet you next time :)\n") text("7") break else: print("\nI can't understand you! Seems like a wrong input") text("6") input("\nPress ENTER to Continue...") text("7")
#!/usr/bin/env python3 import time import datetime import subprocess import RPi.GPIO as GPIO but_pin = 13 if __name__ == '__main__': try: print("setup") GPIO.setwarnings(False) GPIO.setmode(GPIO.BCM) GPIO.setup(but_pin, GPIO.IN) time.sleep(0.1) waspressed = False while GPIO.input(but_pin) == GPIO.HIGH: GPIO.setup(but_pin, GPIO.IN) waspressed = True print("waiting for initial released state") time.sleep(1) if waspressed: print("release has occured") while True: if GPIO.input(but_pin) == GPIO.HIGH: heldcnt = 0 heldlimit = 400 print("press") subprocess.run("sync") # flush cache to disk while GPIO.input(but_pin) == GPIO.HIGH and heldcnt < heldlimit: heldcnt += 1 time.sleep(0.01) if heldcnt >= heldlimit: print("shutdown") subprocess.run("shutdown now", shell=True) quit() while True: # do nothing, wait for shutdown time.sleep(1) else: print("short press") else: time.sleep(1) finally: try: print("cleanup") GPIO.cleanup() except: print("exception while cleaning up")
from RSBIDE.common.config import config IGNORE = ["dbHelp"] def log(*args): if config["LOG"]: print("RSB\t", *args) def debug(*args): if config["DEBUG"] is True and not args[0] in IGNORE: print("RSB\t", *args) def warn(*args): print("RSB -WARNING-\t", *args)
#!/usr/bin/env python # -*- coding:utf-8 -*- class superClass(object): def hello(self): self.data1 = 'spam' class subClass(superClass): def hola(self): self.data2 = 'eggs' if __name__ == "__main__": X = subClass() print(X.__dict__) print(X.__class__) print(subClass.__bases__) print(superClass.__bases__) print('1----------------------------------------') X.hello() print(X.__dict__) print('2----------------------------------------') X.hola() print(X.__dict__) print('3----------------------------------------') print(subClass.__dict__.keys()) print(superClass.__dict__.keys()) print('4----------------------------------------') Y = subClass() print(Y.__dict__) print('5----------------------------------------') print(X.data1, X.__dict__['data1']) print('6----------------------------------------') X.data3 = 'toast' print(X.__dict__) print('7----------------------------------------') X.__dict__['data3'] = 'ham' print(X.data3)
import numpy as np def xy(vect): return vect[:,:,0]*vect[:,:,1] def xz(vect): return vect[:,:,0]*vect[:,:,2] def inject_constant(row): """ Silly function that returns 1. :param row: Not meaningful :return: Returns the int 1 """ return 1 def cos_t(freq): def func(vect): return np.cos(freq*vect[:,:,-1]) return func def x_cubed(vect): return vect[:,:,0]**3
# Basic Interactive Plot Example Requires `wrf-python` and `ipywidgets` %matplotlib inline import matplotlib.pyplot as plt import numpy as np from datetime import datetime from netCDF4 import Dataset from wrf import getvar, interplevel, ALL_TIMES from ipywidgets import interactive # Open a netCDF data file using xarray default engine and load the data into xarrays path = '/expanse/lustre/projects/uic406/snesbitt/em_quarter_ss/mp10/' ncfile = Dataset('wrfout_d01_0001-01-01_00:00:00') # Extract slices of lon and lat # Read in data from netCDF file. # Note that when we extract ``u`` and ``v`` from the file, # Extract the Geopotential Height and wind fields z = getvar(ncfile, "z", timeidx = ALL_TIMES) ua = getvar(ncfile, "ua", timeidx = ALL_TIMES) va = getvar(ncfile, "va", timeidx = ALL_TIMES) wa = getvar(ncfile, "wa", timeidx = ALL_TIMES) def plot_interact(tindex, level): ua_interp = interplevel(ua, z, level) va_interp = interplevel(va, z, level) wa_interp = interplevel(wa, z, level) fig1, ax1 = plt.subplots(figsize=(12, 10)) cb = ax1.contourf(z['west_east'].values, z['south_north'].values, wa_interp.isel(Time=tindex).values,levels=np.arange(-30,30,0.5),cmap='Spectral_r') Q = ax1.quiver(z['west_east'].values, z['south_north'].values, ua_interp.isel(Time=tindex).values, va_interp.isel(Time=tindex).values,pivot='middle',color='black', units='width',width=0.0007,headwidth=10) qk = ax1.quiverkey(Q, 0.92, .95, 5, r'$5 \frac{m}{s}$', labelpos='E', coordinates='figure') cb = plt.colorbar(cb, shrink=0.5, title='Vertical wind (m/s)') ax1.set_title('Vertical motion (m/s) and winds (m/s) at time='+str(tindex)+' and level='+str(level)) plt.tight_layout() plt.show() interactive_plot = interactive(plot_interact, tindex=(0, 13), level=(0, 20000, 500)) output = interactive_plot.children[-1] output.layout.height = '500px' interactive_plot plt.quiver(x,ua_interp.values, va_interp.values) x,y = np.meshgrid(z.west_east.values,z.south_north.values) x y
TOKEN = 'TOKEN' BOTAN_TOKEN = '' # Optional: Add a botan.io token if you want bot statistics
from __future__ import generator_stop import ast import operator import sys import timeit as real_timeit import unittest from functools import lru_cache from inspect import signature from io import StringIO from itertools import chain from pprint import pprint from textwrap import dedent import wrapt from littleutils import only from sorcery.core import spell, node_names, node_name _NO_DEFAULT = object() PYPY = 'pypy' in sys.version.lower() @spell def assigned_names(frame_info): """ Instead of: foo = func('foo') bar = func('bar') write: foo, bar = map(func, assigned_names()) or: foo, bar = [func(name) for name in assigned_names()] Instead of: class Thing(Enum): foo = 'foo' bar = 'bar' write: class Thing(Enum): foo, bar = assigned_names() More generally, this function returns a tuple of strings representing the names being assigned to. The result can be assigned to any combination of either: - plain variables, - attributes, or - subscripts (square bracket access) with string literal keys So the following: spam, x.foo, y['bar'] = assigned_names() is equivalent to: spam = 'spam' x.foo = 'foo' y['bar'] = 'bar' Any expression is allowed to the left of the attribute/subscript. Only simple tuple unpacking is allowed: - no nesting, e.g. (a, b), c = ... - no stars, e.g. a, *b = ... - no chains, e.g. a, b = c = ... - no assignment to a single name without unpacking, e.g. a = ... """ return frame_info.assigned_names()[0] @spell def unpack_keys(frame_info, x, default=_NO_DEFAULT, prefix=None, swapcase=False): """ Instead of: foo = d['foo'] bar = d['bar'] write: foo, bar = unpack_keys(d) Instead of: foo = d.get('foo', 0) bar = d.get('bar', 0) write: foo, bar = unpack_keys(d, default=0) Instead of: foo = d['data_foo'] bar = d['data_bar'] write: foo, bar = unpack_keys(d, prefix='data_') Instead of: foo = d['FOO'] bar = d['BAR'] write: foo, bar = unpack_keys(d, swapcase=True) and similarly, instead of: FOO = d['foo'] BAR = d['bar'] write: FOO, BAR = unpack_keys(d, swapcase=True) Note that swapcase is not applied to the prefix, so for example you should write: env = dict(DATABASE_USERNAME='me', DATABASE_PASSWORD='secret') username, password = unpack_keys(env, prefix='DATABASE_', swapcase=True) The rules of the assigned_names spell apply. This can be seamlessly used in for loops, even inside comprehensions, e.g. for foo, bar in unpack_keys(list_of_dicts): ... If there are multiple assignment targets in the statement, e.g. if you have a nested list comprehension, the target nearest to the function call will determine the keys. For example, the keys 'foo' and 'bar' will be extracted in: [[foo + bar + y for foo, bar in unpack_keys(x)] for x, y in z] Like assigned_names, the unpack call can be part of a bigger expression, and the assignment will still be found. So for example instead of: foo = int(d['foo']) bar = int(d['bar']) you can write: foo, bar = map(int, unpack_keys(d)) or: foo, bar = [int(v) for v in unpack_keys(d)] The second version works because the spell looks for multiple names being assigned to, so it doesn't just unpack 'v'. """ if default is _NO_DEFAULT: getter = operator.getitem else: # Essentially dict.get, without relying on that method existing def getter(d, name): try: return d[name] except KeyError: return default return _unpack(frame_info, x, getter, prefix, swapcase) @spell def unpack_attrs(frame_info, x, default=_NO_DEFAULT, prefix=None, swapcase=False): """ This is similar to unpack_keys, but for attributes. Instead of: foo = x.foo bar = x.bar write: foo, bar = unpack_attrs(x) """ if default is _NO_DEFAULT: getter = getattr else: def getter(d, name): return getattr(d, name, default) return _unpack(frame_info, x, getter, prefix, swapcase) def _unpack(frame_info, x, getter, prefix, swapcase): names, node = frame_info.assigned_names(allow_loops=True) def fix_name(n): if swapcase: n = n.swapcase() if prefix: n = prefix + n return n if isinstance(node, ast.Assign): return [getter(x, fix_name(name)) for name in names] else: # for loop return ([getter(d, fix_name(name)) for name in names] for d in x) @spell def args_with_source(frame_info, *args): """ Returns a list of pairs of: - the source code of the argument - the value of the argument for each argument. For example: args_with_source(foo(), 1+2) is the same as: [ ("foo()", foo()), ("1+2", 3) ] """ return [ (frame_info.get_source(arg), value) for arg, value in zip(frame_info.call.args, args) ] @spell def dict_of(frame_info, *args, **kwargs): """ Instead of: {'foo': foo, 'bar': bar, 'spam': thing()} or: dict(foo=foo, bar=bar, spam=thing()) write: dict_of(foo, bar, spam=thing()) In other words, returns a dictionary with an item for each argument, where positional arguments use their names as keys, and keyword arguments do the same as in the usual dict constructor. The positional arguments can be any of: - plain variables, - attributes, or - subscripts (square bracket access) with string literal keys So the following: dict_of(spam, x.foo, y['bar']) is equivalent to: dict(spam=spam, foo=x.foo, bar=y['bar']) *args are not allowed. To give your own functions the ability to turn positional argments into keyword arguments, use the decorator magic_kwargs. """ result = { node_name(arg): value for arg, value in zip(frame_info.call.args[-len(args):], args) } result.update(kwargs) return result @spell def print_args(frame_info, *args, file=None): """ For each argument, prints the source code of that argument and its value. Returns the first argument. """ for source, arg in args_with_source.at(frame_info)(*args): print(source + ' =', file=file) pprint(arg, stream=file) print(file=file) return args and args[0] @spell def call_with_name(frame_info, func): """ Given: class C: def generic(self, method_name, *args, **kwargs): ... Inside the class definition, instead of: def foo(self, x, y): return self.generic('foo', x, y) def bar(self, z): return self.generic('bar', z) write: foo, bar = call_with_name(generic) This only works for methods inside classes, not free functions. """ def make_func(name): return lambda self, *args, **kwargs: func(self, name, *args, **kwargs) return [ make_func(name) for name in frame_info.assigned_names()[0] ] @spell def delegate_to_attr(frame_info, attr_name): """ This is a special case of the use case fulfilled by call_with_name. Given: class Wrapper: def __init__(self, thing): self.thing = thing Inside the class definition, instead of: def foo(self, x, y): return self.thing.foo(x, y) def bar(self, z): return self.thing.bar(z) Write: foo, bar = delegate_to_attr('thing') Specifically, this will make: Wrapper().foo equivalent to: Wrapper().thing.foo """ def make_func(name): return property(lambda self: getattr(getattr(self, attr_name), name)) return [ make_func(name) for name in frame_info.assigned_names()[0] ] class _Nothing(object): def __init__(self, count): self.__count = count def __getattribute__(self, item): if item == '_Nothing__count': return object.__getattribute__(self, item) return _Nothing.__op(self) def __op(self, *_args, **_kwargs): self.__count -= 1 if self.__count == 0: return None return self __getitem__ = __call__ = __op @spell def maybe(frame_info, x): """ Instead of: None if foo is None else foo.bar() write: maybe(foo).bar() Specifically, if foo is not None, then maybe(foo) is just foo. If foo is None, then any sequence of attributes, subscripts, or calls immediately to the right of maybe(foo) is ignored, and the final result is None. So maybe(foo)[0].x.y.bar() is None, while func(maybe(foo)[0].x.y.bar()) is func(None) because enclosing expressions are not affected. """ if x is not None: return x node = frame_info.call count = 0 while True: parent = node.parent if not (isinstance(parent, ast.Attribute) or isinstance(parent, ast.Call) and parent.func is node or isinstance(parent, ast.Subscript) and parent.value is node): break count += 1 node = parent if count == 0: return x return _Nothing(count) if PYPY: def maybe(_): raise NotImplementedError("maybe isn't supported on pypy`") @spell def select_from(frame_info, sql, params=(), cursor=None, where=None): """ Instead of: cursor.execute(''' SELECT foo, bar FROM my_table WHERE spam = ? AND thing = ? ''', [spam, thing]) for foo, bar in cursor: ... write: for foo, bar in select_from('my_table', where=[spam, thing]): ... Specifically: - the assigned names (similar to the assigned_names and unpack_keys spells) are placed in the SELECT clause - the first argument (usually just a table name but can be any SQL) goes after the FROM - if the where argument is supplied, it must be a list or tuple literal of values which are supplied as query parameters and whose names are used in a WHERE clause using the = and AND operators. If you use this argument, don't put a WHERE clause in the sql argument and don't supply params - a cursor object is automatically pulled from the calling frame, but if this doesn't work you can supply one with the cursor keyword argument - the params argument can be supplied for more custom cases than the where argument provides. - if this is used in a loop or list comprehension, all rows in the result will be iterated over. If it is used in an assignment statement, one row will be returned. - If there are multiple names being assigned (i.e. multiple columns being selected) then the row will be returned and thus unpacked. If there is only one name, it will automatically be unpacked so you don't have to add [0]. This spell is much more a fun rough idea than the others. It is expected that there are many use cases it will not fit into nicely. """ if cursor is None: frame = frame_info.frame cursor = only(c for c in chain(frame.f_locals.values(), frame.f_globals.values()) if 'cursor' in str(type(c).__mro__).lower() and callable(getattr(c, 'execute', None))) names, node = frame_info.assigned_names(allow_one=True, allow_loops=True) sql = 'SELECT %s FROM %s' % (', '.join(names), sql) if where: where_arg = only(kw.value for kw in frame_info.call.keywords if kw.arg == 'where') where_names = node_names(where_arg) assert len(where_names) == len(where) sql += ' WHERE ' + ' AND '.join('%s = ?' % name for name in where_names) assert params == () params = where cursor.execute(sql, params) def unpack(row): if len(row) == 1: return row[0] else: return row if isinstance(node, ast.Assign): return unpack(cursor.fetchone()) else: def vals(): for row in cursor: yield unpack(row) return vals() def magic_kwargs(func): """ Applying this decorator allows a function to interpret positional arguments as keyword arguments, using the name of the positional argument as the keyword. For example, given: @magic_kwargs def func(*, foo, bar, spam): or @magic_kwargs def func(**kwargs): then instead of: func(foo=foo, bar=bar, spam=thing) you can just write: func(foo, bar, spam=thing) Without the @magic_kwargs, the closest magical alternative would be: func(**dict_of(foo, bar, spam=thing)) The function is not allowed to have optional positional parameters, e.g. `def func(x=1)`, or *args. """ args_count = 0 for param in signature(func).parameters.values(): if (param.kind == param.VAR_POSITIONAL or param.kind == param.POSITIONAL_OR_KEYWORD and param.default != param.empty): raise TypeError( 'The type of the parameter %s is not allowed with @magic_kwargs' % param.name) if param.kind == param.POSITIONAL_OR_KEYWORD: args_count += 1 @wrapt.decorator def wrapper(wrapped, instance, args, kwargs): frame_info, *args = args count = args_count - (instance is not None) # account for self argument normal_args = args[:count] magic_args = args[count:] full_kwargs = dict_of.at(frame_info)(*magic_args, **kwargs) return wrapped(*normal_args, **full_kwargs) return spell(wrapper(func)) @spell def switch(frame_info, val, _cases, *, default=_NO_DEFAULT): """ Instead of: if val == 1: x = 1 elif val == 2 or val == bar(): x = spam() elif val == dangerous_function(): x = spam() * 2 else: x = -1 write: x = switch(val, lambda: { 1: 1, {{ 2, bar() }}: spam(), dangerous_function(): spam() * 2 }, default=-1) This really will behave like the if/elif chain above. The dictionary is just some nice syntax, but no dictionary is ever actually created. The keys are evaluated only as needed, in order, and only the matching value is evaluated. The keys are not hashed, only compared for equality, so non-hashable keys like lists are allowed. If the default is not specified and no matching value is found, a KeyError is raised. Note that `if val == 2 or val == bar()` is translated to `{{ 2, bar() }}`. This is to allow emulating multiple case clauses for the same block as in the switch construct in other languages. The double braces {{}} create a value that's impossible to evaluate normally (a set containing a set) so that it's clear we don't simply want to check `val == {{ 2, bar() }}`, whereas `{2, bar()}` would be evaluated and checked normally. As always, the contents are lazily evaluated and compared in order. The keys and values are evaluated with the compiler statement `from __future__ import generator_stop` in effect (which you should really be considering using anyway if you're using Python < 3.7). """ frame = frame_info.frame switcher = _switcher(frame_info.call.args[1], frame.f_code) def ev(k): return eval(k, frame.f_globals, frame.f_locals) def check(k): return ev(k) == val for key_code, value_code in switcher: if isinstance(key_code, tuple): test = any(map(check, key_code)) else: test = check(key_code) if test: return ev(value_code) if default is _NO_DEFAULT: raise KeyError(val) else: return default @lru_cache() def _switcher(cases, f_code): if not (isinstance(cases, ast.Lambda) and isinstance(cases.body, ast.Dict)): raise TypeError('The second argument to switch must be a lambda with no arguments ' 'that returns a dictionary literal') def comp(node): return compile(ast.Expression(node), filename=f_code.co_filename, mode='eval') result = [] for key, value in zip(cases.body.keys, cases.body.values): if (isinstance(key, ast.Set) and isinstance(key.elts[0], ast.Set)): key_code = tuple(comp(k) for k in key.elts[0].elts) else: key_code = comp(key) result.append((key_code, comp(value))) return result def _raise(e): # for tests raise e class TimerWithExc(real_timeit.Timer): def timeit(self, *args, **kwargs): try: return super().timeit(*args, **kwargs) except: # Sets up linecache for future tracebacks self.print_exc(StringIO()) raise @spell def timeit(frame_info, repeat=5): """ This function is for writing quick scripts for comparing the speeds of two snippets of code that do the same thing. It's a nicer interface to the standard timeit module that doesn't require putting your code in strings, so you can use your IDE features, while still using the standard timeit for accuracy. Instead of import timeit nums = [3, 1, 2] setup = 'from __main__ import nums' print(timeit.repeat('min(nums)', setup)) print(timeit.repeat('sorted(nums)[0]', setup)) write: import sorcery nums = [3, 1, 2] if sorcery.timeit(): result = min(nums) else: result = sorted(nums)[0] The if statement is just syntax for denoting the two blocks of code being tested. Some other nice features of this function over the standard timeit: - Automatically determines a high enough 'number' argument. - Asserts that any variable named 'result' is equal in both snippets, for correctness testing. The variable should be present in both or neither snippets. - Nice formatting of results for easy comparison, including best times - Source lines shown in tracebacks The spell must be called at the top level of a module, not inside another function definition. """ globs = frame_info.frame.f_globals if globs is not frame_info.frame.f_locals: _raise(ValueError('Must execute in global scope')) setup = 'from %s import %s\n' % ( globs['__name__'], ', '.join(globs.keys()), ) if_stmt = frame_info.call.parent stmts = [ dedent('\n'.join(map(frame_info.get_source, lines))) for lines in [if_stmt.body, if_stmt.orelse] ] timers = [ TimerWithExc(stmt, setup) for stmt in stmts ] # Check for exceptions for timer in timers: timer.timeit(1) # Compare results def get_result(stmt): ns = {} exec(setup + stmt, ns) return ns.get('result') unittest.TestCase('__init__').assertEqual( *map(get_result, stmts), '\n=====\nThe two methods yielded different results!' ) # determine number so that 1 <= total time < 3 number = 1 for i in range(22): number = 3 ** i if timers[0].timeit(number) >= 1: break print('Number of trials:', number) print() def print_time(idx, el): print('Method {}: {:.3f}'.format( idx + 1, el)) times = [[] for _ in timers] for _ in range(repeat): for i, timer in enumerate(timers): elapsed = timer.timeit(number) print_time(i, elapsed) times[i].append(elapsed) print() print('Best times:') print('-----------') for i, elapsed_list in enumerate(times): print_time(i, min(elapsed_list))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import os from datetime import datetime __author__ = 'Hanzhiyun' pwd = os.getcwd() print("Last Modified Size\Byte Name") print('----------------------------------------------') for f in os.listdir(pwd): fsize = os.path.getsize(f) mtime = datetime.fromtimestamp(os.path.getmtime(f)).strftime('%Y-%m-%d %H:%M') flag = '<DIR>' if os.path.isdir(f) else ' ' print("%s %s %12d %s" % (mtime, flag, fsize, f)) os.system('pause')
""" This type stub file was generated by pyright. """ from typing import Any, Dict import apispec from flask.app import Flask from webargs.flaskparser import abort from .spec import APISpecMixin from .blueprint import Blueprint from .pagination import Page from .error_handler import ErrorHandlerMixin """Api extension initialization""" __version__ = "0.25.0" class Api(APISpecMixin, ErrorHandlerMixin): """Main class Provides helpers to build a REST API using Flask. :param Flask app: Flask application :param dict spec_kwargs: kwargs to pass to internal APISpec instance The ``spec_kwargs`` dictionary is passed as kwargs to the internal APISpec instance. **flask-smorest** adds a few parameters to the original parameters documented in :class:`apispec.APISpec <apispec.APISpec>`: :param apispec.BasePlugin flask_plugin: Flask plugin :param apispec.BasePlugin marshmallow_plugin: Marshmallow plugin :param list|tuple extra_plugins: List of additional ``BasePlugin`` instances :param str title: API title. Can also be passed as application parameter `API_TITLE`. :param str version: API version. Can also be passed as application parameter `API_VERSION`. :param str openapi_version: OpenAPI version. Can also be passed as application parameter `OPENAPI_VERSION`. This allows the user to override default Flask and marshmallow plugins. For more flexibility, additional spec kwargs can also be passed as app parameter `API_SPEC_OPTIONS`. """ spec: apispec.APISpec = ... def __init__(self, app=..., *, spec_kwargs: Dict[str, Any] = ...) -> None: ... def init_app(self, app: Flask, *, spec_kwargs: Dict[str, Any] = ...) -> None: """Initialize Api with application :param dict spec_kwargs: kwargs to pass to internal APISpec instance. Updates ``spec_kwargs`` passed in ``Api`` init. """ ... def register_blueprint(self, blp: Blueprint, **options: Dict[str, Any]) -> None: """Register a blueprint in the application Also registers documentation for the blueprint/resource :param Blueprint blp: Blueprint to register :param dict options: Keyword arguments overriding Blueprint defaults Must be called after app is initialized. """ ...
dicts = {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': 8, 'i': 9, 'j': 10, 'k': 11, 'l': 12, 'm': 13, 'n': 14, 'o': 15, 'p': 16, 'q': 17, 'r': 18, 's': 19, 't': 20, 'u': 21, 'v': 22, 'w': 23, 'x': 24, 'y': 25, 'z': 26} your_input = input('Input your name here: ') print(your_input) numeric_value = [] for i in your_input: numeric_value.append(dicts[i]) true_value = 0 for j in numeric_value: true_value += j print(numeric_value) print('This is the value of your name', true_value)
""" Write a python program which finds the maximum number from num1 to num2 (num2 inclusive) based on the following rules. Always num1 should be less than num2 Consider each number from num1 to num2 (num2 inclusive). Populate the number into a list, if the below conditions are satisfied Sum of the digits of the number is a multiple of 3 Number has only two digits Number is a multiple of 5 Display the maximum element from the list In case of any invalid data or if the list is empty, display -1. """ #PF-Assgn-28 def find_max(num1, num2): max_num=-1 l=[max_num] # Write your logic here if num1<num2: for i in range(num1,num2+1): if len(str(i))==2: sum=0 for num in str(i): sum=sum+int(num) if sum%3==0: if i%5==0: l.append(i) return max(l) #Provide different values for num1 and num2 and test your program. max_num=find_max(3,30) print(max_num)
from numpy import * from pylab import * from scipy import * from os.path import * numRuns = 500; def Get_Matrix(): fileName = 'Robot_Matrix.txt'; f = open(fileName,'r'); numSegments = 0; for line in f.readlines(): numSegments = numSegments + 1; f.close(); M = zeros((numSegments,5),dtype='f'); i = 0; f = open(fileName,'r'); for line in f.readlines(): line = line.split(); for j in range(0,len(line)): print i,j,line[j]; M[i,j] = float(line[j]); i = i + 1; f.close(); return M; def Draw_Robot(M,k,numSteps,markerSize): ln = size(M,0); wd = size(M,1); x1=0; y1=0; x2=0; y2=0; for i in range(0,ln): parentObj = M[i,0]; length = M[i,1]; angle = M[i,2]; x = M[i,3]; y = M[i,4]; if ( parentObj == -1 ): x1=0; y1=0; else: x1=M[parentObj,3]; y1=M[parentObj,4]; x2=x; y2=y; c = (1.0 - (double(k)/double(numSteps)))*0.9; plot([x1,x2],[y1,y2],'k-',linewidth=1*(numSteps-k)+1,color=[c,c,c]); if ( (i>=(ln-2)) & (k==(numSteps-1)) ): plot([x2,x2],[y2,y2],'ko',markersize=markerSize,markerfacecolor=[0,0,0]); def FileExists(fileName): return( exists(fileName) ); def GetRobotMatrix(fileName): i = 7; j = 5; k = 0; f = open(fileName,'r'); for line in f.readlines(): if ( len(line)==1 ): k = k + 1; f.close(); M = zeros((i,j,k),dtype='f'); i = 0; k = 0; f = open(fileName,'r'); for line in f.readlines(): if ( len(line)==1 ): i = 0; k = k + 1; else: line = line.split(); for j in range(0,len(line)): M[i,j,k] = float(line[j]); i = i + 1; f.close(); return(M); def Draw_Graph(N,adjMatrix): pos = zeros((N,2),dtype='f'); stepSize = 2*3.14159/N; i = 0; while ( i<N ): angle = i*stepSize; x = 1.0*sin(angle); y = 1.0*cos(angle); pos[i,0] = x; pos[i,1] = y; i = i + 1; for i in range(0,5+1): for j in range(6,13+1): cComponent = 1-adjMatrix[i,j]; c = [cComponent,cComponent,cComponent]; if ( cComponent<1.0 ): plot([pos[i,0],pos[j,0]],[pos[i,1],pos[j,1]],'k-',color=c); for i in range(0,4+1): for j in range(i+1,5+1): cComponent = 1-adjMatrix[i,j]; c = [cComponent,cComponent,cComponent]; if ( cComponent<1.0 ): plot([pos[i,0],pos[j,0]],[pos[i,1],pos[j,1]],'k-',color=c); for i in range(6,12+1): for j in range(i+1,13+1): cComponent = 1-adjMatrix[i,j]; c = [cComponent,cComponent,cComponent]; if ( cComponent<1.0 ): plot([pos[i,0],pos[j,0]],[pos[i,1],pos[j,1]],'k-',color=c); for i in range(0,N): if ( i<=5 ): plot(pos[i,0],pos[i,1],'ko',markerfacecolor=[1,1,1],markersize=18); else: plot(pos[i,0],pos[i,1],'ko',markerfacecolor=[0.5,0.5,0.5],markersize=18); xticks([],[]); yticks([],[]); def Read_Network(N,adjMatrix,runIndex,panelIndex): fileName='../Code/Mod_Shaping_AB/Data/GRN_'+str(runIndex)+'_'+str(panelIndex)+'.dat'; f = open(fileName,'r'); line = f.readline(); localAdjMatrix = zeros((N,N),dtype='f'); for line in f.readlines(): line = line.split(); fromNeuron = int(line[0]); toNeuron = int(line[1]); if ( (fromNeuron>-1) & (toNeuron>-1) & (localAdjMatrix[fromNeuron,toNeuron]==0) ): localAdjMatrix[fromNeuron,toNeuron] = 1.0; f.close(); return adjMatrix+localAdjMatrix; def Get_RunData(fileName,numFinishers,gens,mods,i): f = open(fileName,'r'); j = 0; for line in f.readlines(): line = line.split(); gens[numFinishers[j],j] = double(line[2]); armArm = double(line[5]); handHand = double(line[6]); armHand = double(line[7]); if ( armHand == 0.0 ): armHand = 0.01; mods[numFinishers[j],j] = ((armArm+handHand)/2.0)/armHand; numFinishers[j] = numFinishers[j] + 1.0; j = j + 1; f.close(); return( numFinishers,gens,mods ); def Add_Line(v,lw): xSpacing = linspace(1,len(v),len(v)); plot(xSpacing,v,color=[0,0,0],linewidth=lw); def Add_Vertical_Lines(x1,x2,xSpacing,y1,y2,c,lw): for j in range(x1,x2,xSpacing): plot([j,j],[y1,y2],color=[c,c,c],linewidth=lw); def Add_Lines(numConditions,fn,figIndex,lineWidth): numFinishers = zeros(numConditions,dtype='f'); gens = zeros((numRuns,numConditions),dtype='f'); meanGens = zeros(numConditions,dtype='f'); serrGens = zeros(numConditions,dtype='f'); mods = zeros((numRuns,numConditions),dtype='f'); meanMods = zeros(numConditions,dtype='f'); serrMods = zeros(numConditions,dtype='f'); for i in range(0,numRuns): fileName = '../Code/'+fn+'/Data/runData_'+str(i)+'.dat'; [numFinishers,gens,mods] = Get_RunData(fileName,numFinishers,gens,mods,i); for j in range(0,numConditions): meanGens[j] = gens[0:numFinishers[j],j].mean(); #meanGens[j] = gens[0:numRuns,j].mean(); serrGens[j] = gens[0:numFinishers[j],j].std()/sqrt(numFinishers[j]); meanMods[j] = mods[0:numFinishers[j],j].mean(); serrMods[j] = mods[0:numFinishers[j],j].std()/sqrt(numFinishers[j]); figure(figIndex); Add_Line(numFinishers,lineWidth); figure(figIndex+1); Add_Line(meanGens,lineWidth); #Add_Line(meanGens+serrGens,1); #Add_Line(meanGens-serrGens,1); figure(figIndex+2); Add_Line(meanMods,lineWidth); Add_Line(meanMods+serrMods,1); Add_Line(meanMods-serrMods,1); def Fig1(figIndex): figure(figIndex,figsize=(4,10)); #figure(figIndex); k = 1; for j in range(0,15+1): for i in range(0,3+1): subplot(16,4,k); ax=gca(); xticks([],[]); yticks([],[]); if ( i==0 ): labelStr = ''; if ( j<15 ): labelStr = labelStr + 'IC' + str(j+1); else: labelStr = labelStr + 'Final'; ylabel(labelStr,fontsize=16); if ( j==15 ): labelStr = 'Env'+str(i+1); xlabel(labelStr,fontsize=16); plot([-4.6,4.6],[0,0],color=[0.5,0.5,0.5]); if ( i==0 ): circ=Circle((-3.268,-0.853),radius=1.207,facecolor=[1,1,1]); elif ( i==1 ): circ=Circle((+2.793,-0.379),radius=0.536,facecolor=[1,1,1]); elif ( i==2 ): circ=Circle((+3.268,-0.853),radius=1.207,facecolor=[1,1,1]); elif ( i==3 ): circ=Circle((-2.793,-0.379),radius=0.536,facecolor=[1,1,1]); ax.add_patch(circ); fileName = '../Code/Mod_IC/Data/robot_'+str(i)+'_'+str(j)+'.dat'; M = GetRobotMatrix(fileName); #print M[:,:,0]; Draw_Robot(M[:,:,0],1,2,2); #raw_input(''); axis([-4.6,4.6,-2.1,4.1]); k = k + 1; subplots_adjust(left=0.11,bottom=0.03,right=0.97,top=0.99); savefig('../Images/Fig'+str(figIndex)+'.png'); savefig('../Images/Fig'+str(figIndex)+'.eps'); def Fig2(figIndex): runIndex = 13; figure(figIndex); for l in range(0,3+1): fileName = '../Code/Mod_Shaping_AB/Data/robot_'+str(runIndex)+'_'+str(l)+'.dat'; M = GetRobotMatrix(fileName); subplot(2,2,l+1); ax=gca(); plot([-4.6,4.6],[0,0],color=[0.5,0.5,0.5]); numSteps = size(M,2); if ( l==0 ): title('(a) Environment 1'); circ=Circle((-3.268,-0.853),radius=1.207,facecolor=[1,1,1]); elif ( l==1 ): title('(b) Environment 2'); circ=Circle((+2.793,-0.379),radius=0.536,facecolor=[1,1,1]); elif ( l==2 ): title('(c) Environment 3'); circ=Circle((+3.268,-0.853),radius=1.207,facecolor=[1,1,1]); elif ( l==3 ): title('(d) Environment 4'); circ=Circle((-2.793,-0.379),radius=0.536,facecolor=[1,1,1]); ax.add_patch(circ); for k in range(0,numSteps): Draw_Robot(M[:,:,k],k,numSteps,5); xticks([],[]); yticks([],[]); axis([-4.6,4.6,-2.1,4.1]); subplots_adjust(left=0.01,bottom=0.02,right=0.98,top=0.94); savefig('../Images/Fig'+str(figIndex)+'.png'); savefig('../Images/Fig'+str(figIndex)+'.eps'); def Fig3(figIndex): runIndex = 13; N = (3+2*2)*2; figure(figIndex); adjMatrix = zeros((N,N,4),dtype='f'); for i in range(0,3+1): subplot(2,2,i+1); adjMatrix[:,:,i] = Read_Network(N,adjMatrix[:,:,i],runIndex,i); for k in range(0,N): for l in range(0,N): adjMatrix[k,l,i] = (adjMatrix[k,l,i] + adjMatrix[l,k,i])/2.0; adjMatrix[l,k,i] = (adjMatrix[l,k,i] + adjMatrix[k,l,i])/2.0; Draw_Graph(N,adjMatrix[:,:,i]); if ( i==0 ): title('(a) Environment 1'); elif ( i==1 ): title('(b) Environment 2'); elif ( i==2 ): title('(c) Environment 3'); elif ( i==3 ): title('(d) Environment 4'); subplots_adjust(left=0.01,bottom=0.02,right=0.98,top=0.94); savefig('../Images/Fig'+str(figIndex)+'.png'); savefig('../Images/Fig'+str(figIndex)+'.eps'); def Fig4(figIndex): N = (3+2*2)*2; figure(figIndex); adjMatrix = zeros((N,N,4),dtype='f'); for i in range(0,3+1): for j in range(0,numRuns): adjMatrix[:,:,i] = Read_Network(N,adjMatrix[:,:,i],j,i); for k in range(0,N): for l in range(0,N): adjMatrix[k,l,i] = adjMatrix[k,l,i] + adjMatrix[l,k,i]; adjMatrix[l,k,i] = adjMatrix[l,k,i] + adjMatrix[k,l,i]; adjMatrix = adjMatrix - adjMatrix.min(); adjMatrix = adjMatrix/adjMatrix.max(); for i in range(0,3+1): subplot(2,2,i+1); Draw_Graph(N,adjMatrix[:,:,i]); if ( i==0 ): title('(a) Environment 1'); elif ( i==1 ): title('(b) Environment 2'); elif ( i==2 ): title('(c) Environment 3'); elif ( i==3 ): title('(d) Environment 4'); subplots_adjust(left=0.01,bottom=0.02,right=0.98,top=0.94); savefig('../Images/Fig'+str(figIndex)+'.png'); savefig('../Images/Fig'+str(figIndex)+'.eps'); def Fig5_6(figIndex,figFileIndex,expRegime,titleStr,xStep,xLabel): numEnvs = 4; numICs = 15; numConditions = numEnvs * numICs; figure(figIndex); Add_Vertical_Lines(0,numConditions,1,0,numRuns+10,0.9,1); Add_Vertical_Lines(0,numConditions,xStep,0,numRuns+10,0.8,2); figure(figIndex+1); Add_Vertical_Lines(0,numConditions,1,0,1200000,0.9,1); Add_Vertical_Lines(0,numConditions,xStep,0,1200000,0.8,2); figure(figIndex+2); Add_Vertical_Lines(0,numConditions,1,1,2.8,0.9,1); Add_Vertical_Lines(0,numConditions,xStep,1,2.8,0.8,2); Add_Lines(numConditions,expRegime,figIndex,6); Add_Lines(numConditions,expRegime+'_AnyAtt',figIndex,4); Add_Lines(numConditions,expRegime+'_NoAtt',figIndex,2); if ( figIndex==5 ): xTicks = linspace(numEnvs,numConditions,numICs); xLabels = zeros(numICs,dtype='i'); for i in range(0,numICs): xLabels[i] = i+1; else: xTicks = linspace(numICs,numConditions,numEnvs); xLabels = zeros(numEnvs,dtype='i'); for i in range(0,numEnvs): xLabels[i] = i+1; figure(figIndex); axis([0,numConditions,0,numRuns+10]); xticks(xTicks,xLabels,fontsize=18); xlabel(xLabel,fontsize=18); yticks(fontsize=18); ylabel('Successful Runs',fontsize=18); title('(a) '+titleStr,fontsize=18); subplots_adjust(left=0.14,bottom=0.10,right=0.97,top=0.94); savefig('../Images/Fig'+str(figFileIndex)+'a.png'); savefig('../Images/Fig'+str(figFileIndex)+'a.eps'); figure(figIndex+1); axis([0,numConditions-1,0,1200000]); xticks(xTicks,xLabels,fontsize=18); xlabel(xLabel,fontsize=18); yTicks = linspace(0,1200000,7); yLabels = ['0','20K','40K','60K','80K','100K','120K']; yticks(yTicks,yLabels,fontsize=18); ylabel('Elapsed Generations',fontsize=18); title('(b) '+titleStr,fontsize=18); subplots_adjust(left=0.14,bottom=0.10,right=0.97,top=0.94); savefig('../Images/Fig'+str(figFileIndex)+'b.png'); savefig('../Images/Fig'+str(figFileIndex)+'b.eps'); figure(figIndex+2); axis([0,numConditions-1,1,2.8]); xticks(xTicks,xLabels,fontsize=18); xlabel(xLabel,fontsize=18); yticks(fontsize=18); ylabel('Modularity',fontsize=18); title('(c) '+titleStr,fontsize=18); subplots_adjust(left=0.14,bottom=0.10,right=0.97,top=0.94); savefig('../Images/Fig'+str(figFileIndex)+'c.png'); savefig('../Images/Fig'+str(figFileIndex)+'c.eps'); # Main function --------------------------------------- numEnvs = 4; numICs = 15; #Fig1(1); #Fig2(2); #Fig3(3); #Fig4(4); #titleStr = 'Add Attractors, then Widen Basin of Attraction'; #xLabel = 'Initial Condition'; #Fig5_6(5,5,'Mod_Shaping_AB',titleStr,numEnvs,xLabel); titleStr = 'Widen Basin of Attraction, Then Add Attractors'; xLabel = 'Environment'; Fig5_6(8,6,'Mod_Shaping_BA',titleStr,numICs,xLabel); show();
import json import re import sys import urllib.parse import urllib.request import requests import Levenshtein import utils from collections import OrderedDict, defaultdict import redis import pickle import py2neo from rdflib import Graph, URIRef, Literal sparql_endpoint = "http://http://sparql.fii800.lod.labs.vu.nl/sparql" identityRelation = URIRef("http://www.w3.org/2005/11/its/rdf#taIdentRef") confidenceRelation = URIRef("http://www.w3.org/2005/11/its/rdf#taConfidence") confidenceRelation2 = URIRef("http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#confidence") candidateRelation = URIRef("http://ilievski.nl/candidate") candidateScoresRelation = URIRef("http://ilievski.nl/candidatescores") rds=redis.Redis(socket_timeout=5) def getPreviousOccurrence(c, entities, eid): while eid>0: if str(eid) in entities: if entities[str(eid)]==c: return eid eid-=1 return -1 def maxCoherence(w, l): m=0.0 c=0 while c<l: m+=w[str(c)] c+=1 return m def reread(resolvedMentions, entities, start, allCandidates, allMentions, weights, factorWeights, timePickle, limit, lastN, N, lcoref): scores={} while start<=len(entities): mention=allMentions[str(start)] candidates=allCandidates[str(start)] special=None if lcoref: candidates,special=moreLocalCandidates(mention, resolvedMentions, candidates, idToOffsets[str(start)], entities) if not special: candidates=appendViews(candidates, timePickle) #print("############################################## Resolving " + mention) maxCount=getMaxCount(allCandidates[str(start)]) myLink, score, cnd=disambiguateEntity(candidates, weights, entities, factorWeights, maxCount, start, limit, lastN) else: myLink=special score=1.0 #print() #print("########################### BEST: %s. Score: %f" % (myLink, score)) #print() allCandidates[str(start)]=cnd entities[str(start)]=myLink scores[str(start)]=score lastN.append(myLink) lastN=lastN[N*(-1):] start+=1 return entities, scores, lastN, allCandidates def normalizeTPs(cands): m=1 for c in cands: view=c[1]['tp'] if view>m: m=view for c in cands: c[1]["tp"]/=m return cands def disambiguateEntity(candidates, weights,resolvedEntities, factorWeights, maxCount, currentId, limit, lastN): if len(candidates): max_score=limit aging_factor=0.005 best_candidate=None if currentId in resolvedEntities: del resolvedEntities[str(currentId)] candidates=normalizeTPs(candidates) newCandidates=[] for cand in candidates: candidate=cand[0] ss=cand[1]["ss"] associativeness=cand[1]["count"]/maxCount # normalizationFactor=maxCoherence(weights, min(10,len(resolvedEntities))) normalizationFactor=1.0 coherence=computeCoherence(candidate, lastN, weights)/normalizationFactor lastId=getPreviousOccurrence(utils.normalizeURL(candidate), resolvedEntities, currentId-1) recency=0.0 if lastId>-1: age=abs(currentId-lastId) recency=(1-aging_factor)**age temporalPopularity=cand[1]["tp"] score=factorWeights['wss']*ss+factorWeights['wc']*coherence+factorWeights["wa"]*associativeness+factorWeights['wr']*recency+factorWeights['wt']*temporalPopularity if score>limit and (score>max_score or (score==max_score and len(candidate)<len(best_candidate))) and not isDisambiguation(candidate): max_score=score best_candidate=candidate debugCand=tuple([candidate, [ss, coherence, associativeness, recency, temporalPopularity, score]]) newCandidates.append(debugCand) return utils.normalizeURL(best_candidate), max_score, newCandidates else: return "--NME--", 1.0, [] def isDisambiguation(c): query='select ?b where { <' + c + '> <http://dbpedia.org/ontology/wikiPageDisambiguates> ?b } LIMIT 1' l=len(get_dbpedia_results(query)) return l def existingURI(c): query='select ?b where { <' + c + '> ?a ?b } LIMIT 1' l=len(get_dbpedia_results(query)) return l def get_dbpedia_results(query): q = {'query': query, 'format': 'json'} # s='http://dbpedia.org/sparql' global sparql_endpoint url = sparql_endpoint + '?' + urllib.parse.urlencode(q) r = requests.get(url=url) try: page = r.json() return page["results"]["bindings"] except ValueError: return [] def shouldITry(maxi, s, other_id, current_id, weights): if maxi<=0.0: return True while other_id<=min(len(weights), current_id-1): s+=weights[str(other_id)] other_id+=1 if s>maxi: return True else: return False def computeCoherence(newEntity, lastN, w): total=0.0 compareTo=len(lastN) counter=1 while counter<=compareTo: weight=w[str(counter)] otherEntity=lastN[(-1)*counter] if otherEntity!='--NME--' and 'http://vu.nl' not in otherEntity: total+=computeShortestPathCoherence(otherEntity, utils.normalizeURL(newEntity), weight) counter+=1 return total def computeShortestPathCoherence(node1, node2, w): """Connects to graph database, then creates and sends query to graph database. Returns the shortest path between two nodes. Format: (67149)-[:'LINKS_TO']->(421)""" if node1.strip()==node2.strip(): return w fromCache=rds.get("%s:%s" % (node1, node2)) if fromCache: return float(fromCache)*w else: gn = py2neo.Graph() q="MATCH path=shortestPath((m:Page {name:\"%s\"})-[LINKS_TO*1..10]-(n:Page {name:\"%s\"})) RETURN LENGTH(path) AS length, path, m, n" % (node1, node2) cursor=gn.run(q) path=None for c in cursor: path=c # if path: rds.set("%s:%s" % (node1, node2), 1/path["length"]) rds.set("%s:%s" % (node2, node1), 1/path["length"]) return w/path["length"] else: rds.set("%s:%s" % (node1, node2), 0.0) rds.set("%s:%s" % (node2, node1), 0.0) return 0.0 def get_initials(entity_string): initials="" ent_split=entity_string.split() if len(ent_split)>1: for word in ent_split: if word[0].isupper(): initials+=word[0] else: initials=None return initials def is_abbrev(abbrev, text): if abbrev==text: return False abbrev=abbrev.replace('.', '').replace(' ', '') if not text or not get_initials(text): return False if abbrev[0]!=text[0]: return False else: return abbrev==get_initials(text) def isEnoughSubset(small, big): return small in big and small!=big def getCorefentialEntities(current): cos=[] for c in chains: if current in c: for o in offsetsToIds: if o in c and o!=current: cos.append(o) return cos def getLinks(corefs, candidates, resolvedEntities): for c in corefs: thisId=offsetsToIds[c] prevLink=resolvedEntities[thisId] if prevLink=='--NME--' or 'http://vu.nl' in prevLink: return candidates, prevLink else: prevLinkDB=utils.makeDbpedia(prevLink) candidates.append(tuple([prevLinkDB, {"ss": 1.0, "count": 0.0}])) return candidates, None def moreLocalCandidates(m, previous, candidates, currentOffsets, resolvedEntities): special=None corefs=getCorefentialEntities(currentOffsets) if len(corefs): candidates, special=getLinks(corefs, candidates, resolvedEntities) if special: return candidates, special else: for pm, pl in previous.items(): if special: break if is_abbrev(m, pm): for prevLink in previous[pm]: #if prevLink=='--NME--': # special=utils.makeVU(m) # break prevLinkDB=utils.makeDbpedia(prevLink) candidates.append(tuple([prevLinkDB, {"ss": 1.0, "count": 0.0}])) elif isEnoughSubset(m, pm): for prevLink in previous[pm]: #if prevLink=='--NME--': # special=utils.makeVU(m) # break prevLinkDB=utils.makeDbpedia(prevLink) candidates.append(tuple([prevLinkDB, {"ss": Levenshtein.ratio(m.lower(), pm.lower()), "count": 0.0}])) return candidates, special def noCandidate(newCand, cands): return not any(newCand==c1 for c1,c2 in cands) def appendViews(c, timePickle): m=0 for cand in c: #print(cand) entity=utils.normalizeURL(cand[0]) view=0.0 if entity in timePickle: view=timePickle[entity] cand[1]['tp']=view return c def getMaxCount(cands): if len(cands): srt=sorted(cands, key=lambda x:x[1]["count"], reverse=True)[0] maxCount=srt[1]["count"] if maxCount==0: maxCount=1.0 else: maxCount=1 return maxCount def generateCandidatesWithLOTUS(mention, minSize=10, maxSize=100): normalized=utils.normalizeURL(mention) fromCache=rds.get("lotus001:%s" % normalized) if fromCache: cands=pickle.loads(fromCache) else: cands=getCandidatesForLemma(mention, minSize, maxSize) cands=cleanRedirects(cands) rds.set("lotus001:" + normalized, pickle.dumps(cands)) sortedCands=sorted(cands.items(), key=lambda x:x[1]["count"], reverse=True) #try: maxCount=getMaxCount(cands.items()) #except: # print("we have an issue") # sys.exit(0) # maxCount=1 return sortedCands, maxCount def getCandidatesForLemma(lemma, min_size, max_size): hits=[] for match in ["phrase", "conjunct"]: url="http://lotus.lodlaundromat.org/retrieve?size=" + str(max_size) + "&match=" + match + "&rank=psf&noblank=true&" + urllib.parse.urlencode({"string": lemma, "predicate": "label", "subject": "\"http://dbpedia.org/resource\""}) r = requests.get(url=url) content = r.json() these_hits=content["hits"] hits=hits + these_hits if content["numhits"]>=min_size or len(lemma.split(' '))==1: break subjects={} for hit in hits: lev_sim=Levenshtein.ratio(hit["string"].lower(), lemma.lower()) if "Disambiguation" not in hit["subject"].lower() and "Category" not in hit["subject"]: if hit["subject"] not in subjects: #subjects[hit["subject"]]=hit["length"]*len(lemma.split()) subjects[hit["subject"]]={"ss": lev_sim, "count": 1} else: subjects[hit["subject"]]["ss"]=max(subjects[hit["subject"]]["ss"], lev_sim) subjects[hit["subject"]]["count"]+=1 return subjects def cleanRedirects(c): new_cands={} for link in c: if 'http://dbpedia.org/resource' not in link: continue query='select ?b where { <' + link + '> <http://dbpedia.org/ontology/wikiPageRedirects> ?b } LIMIT 1' results=get_dbpedia_results(query) if len(results): for result in results: newLink=result["b"]["value"] #print(newLink) if newLink in new_cands: new_cands[newLink]["ss"]=max(new_cands[newLink]["ss"], c[link]["ss"]) new_cands[newLink]["count"]+=c[link]["count"] else: new_cands[newLink]={"ss": c[link]["ss"], "count": c[link]["count"]} else: if link in new_cands: new_cands[link]["ss"]=max(new_cands[link]["ss"], c[link]["ss"]) new_cands[link]["count"]+=c[link]["count"] else: new_cands[link]={"ss": c[link]["ss"], "count": c[link]['count']} return new_cands def computeWeights(n): i=1 w={} total=n*(n+1)/2 while i<=n: w[str(i)]=1/n #w[str(i)]=(n-i)/total i+=1 return w def run(g, factorWeights={'wss':0.5,'wc':0.4, 'wa':0.05, 'wr': 0.05, 'wt': 0.0}, timePickle={}, iterations=1, lcoref=True, order=True, lastN=[], limits={'l1': 0.375, 'l2': 0.54}, N=10): weights=computeWeights(N) minSize=20 maxSize=200 potential=0 total=0 resolvedEntities={} resolvedMentions=defaultdict(list) allCandidates={} allMentions={} originalIds={} limitFirstTime=limits['l1'] limitReread=limits['l2'] qres=utils.getNIFEntities(g, order) if len(qres)==0: return g, lastN global chains chains=utils.getCorefChains(g) global offsetsToIds offsetsToIds={} global idToOffsets idToOffsets={} for row in qres: mention=row['mention'] start=str(row['start']) end=str(row['end']) entityId=row['id'] nextId=str(len(resolvedEntities)+1) currentOffset=tuple([start,end]) offsetsToIds[currentOffset]=nextId idToOffsets[nextId]=currentOffset candidates, maxCount=generateCandidatesWithLOTUS(mention, minSize, maxSize) if lcoref: candidates, special=moreLocalCandidates(mention, resolvedMentions, candidates, currentOffset, resolvedEntities) else: special=None allCandidates[nextId]=candidates allMentions[nextId]=mention if not special: candidates=appendViews(candidates, timePickle) #print("############################################## Resolving " + mention) myLink, score, cndt=disambiguateEntity(candidates, weights, resolvedEntities, factorWeights, maxCount, int(nextId), limitFirstTime, lastN) else: myLink=special score=1.0 #print() #print("########################### BEST: %s. Score: %f" % (myLink, score)) #print() originalIds[nextId]=entityId resolvedEntities[nextId]=myLink resolvedMentions[mention].append(myLink) lastN.append(myLink) lastN=lastN[N*(-1):] while iterations>0: iterations-=1 start=1 if iterations>0: resolvedEntities, scores, lastN, myCandidates=reread(resolvedMentions,resolvedEntities,start, allCandidates, allMentions, weights, factorWeights, timePickle, limitReread, lastN, N, lcoref) else: while start<=len(resolvedEntities): link=resolvedEntities[str(start)] score=scores[str(start)] if link=='--NME--': #g.add( (originalIds[str(start)], identityRelation, Literal("null")) ) link=utils.makeVU(allMentions[str(start)]) else: link=utils.makeDbpedia(link) g.add( (originalIds[str(start)], identityRelation, URIRef(link)) ) g.add( (originalIds[str(start)], confidenceRelation, Literal(float(score)))) g.add( (originalIds[str(start)], confidenceRelation2, Literal(float(score)))) for cd in myCandidates[str(start)]: g.add( (originalIds[str(start)], candidateRelation, URIRef(cd[0])) ) g.add ( (originalIds[str(start)], candidateScoresRelation, Literal(json.dumps({cd[0]: cd[1]})))) start+=1 return g, lastN
import logging from logging.handlers import RotatingFileHandler from config import LOG_FILE, LOG_FILE_COUNT, LOG_FILE_MAX_SIZE def create_rotating_log(name, log_level, log_format, log_datefmt): """ Creates a rotating log which will limit the size of the log file. :param name str: the name of the logger to add the rotating log handler to :param log_level int: the logging level to use, e.g logging.DEBUG :param log_format str: the format of each log message :param log_datefmt str: the format of the timestamp """ logger = logging.getLogger(name) logger.setLevel(log_level) formatter = logging.Formatter(fmt=log_format, datefmt=log_datefmt) handler = RotatingFileHandler(LOG_FILE, maxBytes=LOG_FILE_MAX_SIZE, backupCount=LOG_FILE_COUNT) handler.setFormatter(formatter) logger.addHandler(handler) def setup_logging(verbose, print_mode): """ Creates a rotating log. :param verbose bool: if debug messages should be saved/printed :param print_mode bool: whether or not to print to the terminal """ log_format = '[%(asctime)s] %(levelname)s:%(message)s' log_datefmt = '%m-%d-%Y %I:%M:%S' if verbose == True: if print_mode == True: logging.basicConfig( format=log_format, datefmt=log_datefmt, level=logging.DEBUG ) else: logging.basicConfig( format=log_format, datefmt=log_datefmt, filename=LOG_FILE, level=logging.DEBUG, filemode='w' # since we are debugging we want to overwrite the log each time ) else: create_rotating_log('', logging.ERROR, log_format, log_datefmt) # use the root logger
import sqlalchemy from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy import Column,Integer,String,DATE,Enum,ForeignKey from sqlalchemy.orm import sessionmaker,relationship from sqlalchemy import func engine = create_engine("mysql+pymysql://root:123456@192.168.235.131/oldmandb", encoding='utf-8',echo = False) Base = declarative_base() class Student(Base): __tablename__ = 'student' id = Column(Integer,primary_key=True) name = Column(String(32), nullable=False) age = Column(Integer, nullable=False) register_date = Column(DATE, nullable=False) def __repr__(self): return "<%s name=%s>"%(self.id,self.name) class StudyRecord(Base): __tablename__ = 'study_record' id = Column(Integer,primary_key=True) day = Column(Integer,nullable=False) status = Column(String(32),nullable=False) stu_id = Column(Integer,ForeignKey('student.id')) student = relationship("Student",backref='my_study_record') def __repr__(self): return "<%s day=%s status=%s>"%(self.student.name,self.day,self.status) # # class StudySchedule(Base): # __tablename__ = 'study_schedule' # id = Column(Integer, primary_key=True) # day = Column(Integer,nullable=False) # content = Column(Integer, nullable=False) Base.metadata.create_all(engine) Session_class = sessionmaker(bind=engine) session = Session_class() # # s1 = Student(name='Alex',age=23,register_date='2019-08-21') # s2 = Student(name='Jack',age=24,register_date='2019-08-22') # s3 = Student(name='Rain',age=21,register_date='2019-08-23') # s4 = Student(name='Eric',age=25,register_date='2019-08-24') # s5 = Student(name='Tom',age=27,register_date='2019-08-25') # # record1 = StudyRecord(day=1,status='YES',stu_id=1) # record2 = StudyRecord(day=2,status='NO',stu_id=1) # record3 = StudyRecord(day=1,status='YES',stu_id=2) # record4 = StudyRecord(day=2,status='YES',stu_id=2) # # session.add_all([s1,s2,s3,s4,s5,record1,record2,record3,record4]) stu_obj = session.query(Student).filter(Student.name == 'alex').first() # print(stu_obj) print(stu_obj.my_study_record) session.commit()
from db_objects import Story, User, Task from db_session import session stories = [Story(id=1, story_title='Story 001'), Story(id=2, story_title='Story 002'), Story(id=3, story_title='Story 003')] session.add(stories[0]) session.add(stories[1]) session.add(stories[2]) users = [User(id=1, user_name='User 001'), User(id=2, user_name='User 002'), User(id=3, user_name='User 003')] tasks = [Task(id=1, task_title='Task 001', story_id=1, user_id=1), Task(id=2, task_title='Task 002', story_id=1, user_id=2), Task(id=3, task_title='Task 003', story_id=1, user_id=3), Task(id=4, task_title='Task 004', story_id=2, user_id=1), Task(id=5, task_title='Task 005', story_id=2, user_id=2), Task(id=6, task_title='Task 006', story_id=2, user_id=3), Task(id=7, task_title='Task 007', story_id=3, user_id=1), Task(id=8, task_title='Task 008', story_id=3, user_id=2), Task(id=9, task_title='Task 009', story_id=3, user_id=3)] session.add_all(users + tasks) session.commit()
__all__ = [ 'body_38', 'body_75', 'body_1', 'body_64', 'body_63', 'body_22', 'body_21', 'body_68', 'body_24', 'body_23', 'body_58', 'body_52', 'body_54', 'body_71', 'body_55', 'body_47', 'body_59', 'body_14', 'body_17', 'body_16', 'body_19', 'body_18', 'body_11', 'body_10', 'body_13', 'body_56', 'body_12', 'body_39', 'body_73', 'body_6', 'body_5', 'body_31', 'body_7', 'body_33', 'body_76', 'body_32', 'body_4', 'body_70', 'body_26', 'body_28', 'body_27', 'body_29', 'body_62', 'body_66', 'body_65', 'body_60', 'body_51', 'body_50', 'body_53', 'body_57', 'body_48', 'body_49', 'body_40', 'body_42', 'body_41', 'body_43', 'body_44', 'body_46', 'body_45', 'body_36', 'body_72', 'body_8', 'body_74', 'body_30', 'body_9', 'body_35', 'body_34', 'body_2', 'body_3', 'body_69', 'body_25', 'body_61', 'body_20', 'body_67', 'body', 'body_77', 'product_code_enum', 'number_type_2_enum', 'numbertype_1_enum', 'activate_enum', 'merge_number_enum', 'number_type_enum', 'play_dtmf_direction_enum', 'direction_1_enum', 'audio_direction_enum', 'direction_enum', 'status_enum', 'audio_url_enum', 'group_confirm_file_enum', 'status_1_enum', 'type_enum', 'fileformat_enum', 'if_machine_enum', ]
def select(arr,dim): k = 0 alg_count = [0,0] for k in range(0, dim - 1): m = k i = k + 1 for i in range(i, dim): alg_count[0] += 1 if arr[i] < arr[m]: m = i i += 1 if k != m: t = arr[k] arr[k] = arr[m] arr[m] = t alg_count[1] += 1 k += 1 print(alg_count) import random arry = [random.randint(0,100000) for i in range(0,100000)] print(arry) select(arry,len(arry)) print(arry)
import unittest from katas.kyu_7.after_midnight import day_and_time class DayAndTimeTestCase(unittest.TestCase): def test_equals(self): self.assertEqual(day_and_time(0), 'Sunday 00:00') def test_equals_2(self): self.assertEqual(day_and_time(-3), 'Saturday 23:57') def test_equals_3(self): self.assertEqual(day_and_time(45), 'Sunday 00:45') def test_equals_4(self): self.assertEqual(day_and_time(759), 'Sunday 12:39') def test_equals_5(self): self.assertEqual(day_and_time(1236), 'Sunday 20:36') def test_equals_6(self): self.assertEqual(day_and_time(1447), 'Monday 00:07') def test_equals_7(self): self.assertEqual(day_and_time(7832), 'Friday 10:32') def test_equals_8(self): self.assertEqual(day_and_time(18876), 'Saturday 02:36') def test_equals_9(self): self.assertEqual(day_and_time(259180), 'Thursday 23:40') def test_equals_10(self): self.assertEqual(day_and_time(-349000), 'Tuesday 15:20')
# xxx: Consider more complex situations? class Argpost_analyzer: def __init__(self): self.arg_num = -1 self.args_checked_count = [] def update(self, feature, time): if 'arg.post' not in feature: return argpost_feature = feature['arg.post'] # Init default self.arg_num if self.arg_num == -1: self.arg_num = argpost_feature['arg_num'] for _ in range(self.arg_num): self.args_checked_count.append(0) # Lazily handle the functions with variable arguments elif self.arg_num == 0 or argpost_feature['arg_num'] != self.arg_num: return for num in range(self.arg_num): cur_feature = argpost_feature['feature'][num] is_not_constant = True # Only focus on the argument which is not a constant if 'arg.pre' in feature: is_not_constant = not feature['arg.pre']['feature'][num]['is_constant'] if is_not_constant and cur_feature['used_in_check']: self.args_checked_count[num] += time return def get_specification(self, sum_time, threshold, disable_code): if disable_code: return { 'args_need_to_check': [[False, None] for _ in self.args_checked_count] } # Normally: # format: {'args_need_to_check': [[true/false, score], ...]} return { 'args_need_to_check': [[i/sum_time >= threshold, round(i/sum_time, 3)] \ for i in self.args_checked_count] }
''' 메모리 초과 코드 ''' from sys import stdin def dijkstra(startP) : visit = [True for _ in range(size)] visit[startP] = False cnt = 1 while cnt < size : minW = 11 nextNode = 0 for i in range(size) : if visit[i] == True and visit[i] != -1: if w[startP][i] < minW : minW = w[startP][i] nextNode = i visit[nextNode] = False for j in range(size) : if j == startP or w[nextNode][j] == -1 : continue elif w[startP][j] == -1 : w[startP][j] = w[nextNode][j] + minW elif w[nextNode][j] + minW < w[startP][j] : w[startP][j] = w[nextNode][j] + minW cnt += 1 size, stroke = stdin.readline().split() size = int(size) stroke = int(stroke) w = [[-1 for _ in range(size)] for _ in range(size)] startP = int(stdin.readline()) for i in range(stroke) : inputW = list(map(int, stdin.readline().split())) w[inputW[0] - 1][inputW[1] - 1] = inputW[2] for i in range(size) : w[i][i] = 0 dijkstra(startP - 1) for i in range(size) : if w[startP - 1][i] == -1 : print("INF") else : print(w[startP - 1][i])
#!/usr/bin/env python # encoding: utf-8 """ the observatory emits certain demands, fulfilled by this interface """ import sys import os from pylab import * from numpy import * from scipy.optimize import fmin, brute from scipy import random from numpy.random import rand from xml_reading import xml_reading from janskies_converter import janskies_converter import lomb_scargle from gcd_utilities import gcd_array from scipy.fftpack import fftfreq, ifft import pre_whiten import pickle import copy import ephem from spline_fit import spline_fitted_magnitudes, window_creator , window_tttimes, spline_fitted_magnitudes_brute from numpy.random import permutation class lomb_model(object): def create_model(self, available, m, m_err, out_dict): def model(times): data = zeros(times.size) for freq in out_dict: freq_dict = out_dict[freq] # the if/else is a bit of a hack, # but I don't want to catch "freq_searched_min" or "freq_searched_max" if len(freq) > 8: continue else: time_offset = freq_dict["harmonics_time_offset"] for harmonic in range(freq_dict["harmonics_freq"].size): f = freq_dict["harmonics_freq"][harmonic] omega = f * 2 * pi amp = freq_dict["harmonics_amplitude"][harmonic] phase = freq_dict["harmonics_rel_phase"][harmonic] new = amp * sin(omega * (times - time_offset) + phase) data += new return data return model class period_folding_model(lomb_model): """ contains methods that use period-folding to model data """ def period_folding(self, needed, available, m , m_err, out_dict, doplot=True): """ period folds both the needed and available times. Times are not ordered anymore! """ # find the first frequency in the lomb scargle dictionary: f = (out_dict["freq1"]["harmonics_freq"][0]) if out_dict["freq2"]["signif"] > out_dict["freq1"]["signif"]: f = out_dict["freq2"]["frequency"] # find the phase: p = out_dict["freq1"]["harmonics_rel_phase"][0] #period-fold the available times t_fold = mod( available + p/(2*pi*f) , (1./f) ) #period-fold the needed times t_fold_model = mod( needed + p/(2*pi*f) , (1./f) ) ###### DEBUG ###### early_bool = available < (2.4526e6 + 40) ###### DEBUG ##### period_folded_progenitor_file = file("period_folded_progenitor.txt", "w") progenitor_file = file("progenitor.txt", "w") for n in range(len(t_fold)): period_folded_progenitor_file.write("%f\t%f\t%f\n" % (t_fold[n], m[n], m_err[n])) progenitor_file.write("%f\t%f\t%f\n" % (available[n], m[n], m_err[n])) progenitor_file.close() return t_fold, t_fold_model def create_model(self,available, m , m_err, out_dict): f = out_dict["freq1"]["frequency"] def model(times): t_fold, t_fold_model = self.period_folding(times, available, m, m_err, out_dict) data = empty(0) rms = empty(0) for time in t_fold_model: # we're going to create a window around the desired time and sample a gaussian distribution around that time period = 1./f assert period < available.ptp()/3, "period is greater than one third of the duration of available data" # window is 2% of the period passed = False for x in arange(0.01, 0.1, 0.01): t_min = time - x * period t_max = time + x * period window = logical_and((t_fold < t_max), (t_fold > t_min)) # picks the available times that are within that window try: # there must be more than 3 points in the window for this to work: assert (window.sum() > 5), str(time) except AssertionError: continue else: passed = True break assert passed, "No adequate window found" m_window = m[window] mean_window = mean(m_window) std_window = std(m_window) # now we're ready to sample that distribution and create our point new = (random.normal(loc=mean_window, scale = std_window, size = 1))[0] data = append(data,new) rms = append(rms, std_window) period_folded_model_file = file("period_folded_model.txt", "w") # model_file = file("model.txt", "w") for n in range(len(t_fold_model)): period_folded_model_file.write("%f\t%f\t%f\n" % (t_fold_model[n], data[n], rms[n])) # model_file.write("%f\t%f\t%f\n" % (available[n], data[n], rms[n])) # model_file.close() period_folded_model_file.close() return {'flux':data, 'rms': rms} return model class spline_model(lomb_model): """ Uses a spline fit as model """ def create_model(self,available, m , m_err, out_dict): def model(times): assert available.ptp() > times.ptp(), "not enough range in available time data to perform spline fit" delta = available[0] - times[0] shifted_times = times + delta mindiff = min(abs(diff(shifted_times))) possible_range = max(available) - max(shifted_times) dt = arange(0,possible_range, mindiff/5.*sign(possible_range)) rand_dt = dt[permutation(dt.size)] ispassed = False for t in rand_dt: try: trial_times = shifted_times+t # try to advance shifted_times by amount t and see if that works for i in arange(trial_times.size): difftimes = available - trial_times[i] absdiff = abs(difftimes) nearest = absdiff.argmin() # the distance to the two nearest points other_side = nearest - 1 * sign(difftimes[nearest]) distance_to_nearest = absdiff[nearest] distance_to_other_side = absdiff[other_side] # check that the distance to the nearest points is smaller than the minimum separation between desired times assert distance_to_nearest < mindiff, "distance to the nearest points must be smaller than the minimum separation between desired times" assert distance_to_other_side < mindiff, "distance to the other side must also be smaller than the minimum separation between desired times" shift_worked = t desired_times = shifted_times + t window = logical_and(available > (desired_times.min()-10.), available < (desired_times.max()+10)) data, rms = spline_fitted_magnitudes(available, m, m_err, desired_times, mindiff = mindiff) except AssertionError, description: latest_description = description continue else: print "passed" ispassed = True break assert ispassed, "Didn't find a time shift that works, latest reason:" + str(latest_description) time_fold, time_model_fold = period_folding_model().period_folding(desired_times, available, m , m_err, out_dict, doplot = False) model_file = file("model.txt", "w") for n in range(len(desired_times)): model_file.write("%f\t%f\t%f\n" % (desired_times[n], data[n], rms[n])) model_file.close() return {'flux':data, 'rms':rms, 'new_times':desired_times} return model class brutespline(lomb_model): def create_model(self,available, m , m_err, out_dict): def model(times): assert available.ptp() > times.ptp(), "not enough range in available time data to perform spline fit" delta = available[0] - times[0] shifted_times = times + delta possible_range = available[-2] - max(shifted_times) mindiff = min(abs(diff(shifted_times))) dt = arange(available[1],possible_range, mindiff/5.*sign(possible_range)) rand_dt = dt[permutation(dt.size)] t= rand_dt[0] ispassed = False desired_times = shifted_times+t # try to advance shifted_times by amount t and see if that works data, rms = spline_fitted_magnitudes_brute(available, m, m_err, desired_times) time_fold, time_model_fold = period_folding_model().period_folding(desired_times, available, m , m_err, out_dict, doplot = False) model_file = file("model.txt", "w") for n in range(len(desired_times)): model_file.write("%f\t%f\t%f\n" % (desired_times[n], data[n], rms[n])) model_file.close() return {'flux':data, 'rms':rms, 'new_times':desired_times} return model class observatory_source_interface(object): # # # # # #: dstarr changes this to initially exclude spline models. Only want period_folded. if len(sys.argv) > 4: # xxx if sys.argv[4] == 'tutor': list_of_models = [period_folding_model()] else: list_of_models = [period_folding_model(), spline_model()] else: list_of_models = [period_folding_model(), spline_model()] def __init__(self): pass def get_out_dict(self, available, m, m_err, xml_file): # time = x # time.sort() # 20080520: dstarr finds that SDSS-II data can have "duplicate" data points, which a noisy result of the photometric pipeline(s?). The median() can often be 0.0, which fouls things. So I use a mean here: #dt = median( time[1:]-time[:-1] ) dt = median( available[1:]-available[:-1] ) maxlogx = log(0.5/dt) # max frequency is ~ the sampling rate minlogx = log(0.5/(available[-1]-available[0])) #min frequency is 0.5/T # sample the PSD with 1% fractional precision M=long(ceil( (maxlogx-minlogx)*1000. )) # could change 100 to 1000 for higher resolution frequencies = exp(maxlogx-arange(M, dtype=float) / (M-1.) * (maxlogx-minlogx)) out_dict = self.lomb_code(frequencies, m, m_err, available) f = (out_dict["freq1"]["harmonics_freq"][0]) if out_dict["freq2"]["signif"] > out_dict["freq1"]["signif"]: f = out_dict["freq2"]["frequency"] narrow_frequencies = arange(0.99*f,1.01*f, 0.00001) out_dict = self.lomb_code(narrow_frequencies, m, m_err, available) return out_dict def lomb_code(self,frequencies, m, m_err, available): len_av = len(available) dx = zeros(len_av,dtype=float) num_freq_comps = 4 out_dict={} ytest=m dof = len_av # don't know why we need to have two separate variables for this if (dof>=5): out_dict['frequencies'] = frequencies out_dict['freq_searched_min']=min(frequencies) out_dict['freq_searched_max']=max(frequencies) for i in range(num_freq_comps): psd, freqs, signi, sim_signi, peak_sort = lomb_scargle.lomb(available,ytest,delta_time=dx, signal_err=m_err,freqin=frequencies,verbosity=2) imax = psd.argmax() freq_max = freqs[imax] void_ytest, harm_dict = pre_whiten.pre_whiten(available, ytest, freq_max, delta_time=dx, signal_err=m_err, dof=dof, nharm_min=1, nharm_max=99) dstr = "freq%i" % (i+1) # check for nharm and rerun nharm = harm_dict["nharm"] if nharm == 0: break print "frequency", i+1, "nharm", nharm ytest, harm_dict = pre_whiten.pre_whiten(available, ytest, freq_max, delta_time=dx, signal_err=m_err, dof=dof, nharm_min=nharm, nharm_max=nharm) out_dict[dstr] = {} freq_dict = out_dict[dstr] freq_dict["signif"] = signi freq_dict["frequency"] = freq_max for elem_k, elem_v in harm_dict.iteritems(): freq_dict["harmonics_" + elem_k] = elem_v dof = dof - harm_dict['nharm']*2. return out_dict def form_vsrc_xml_ts(self, old_ts_dict, times, mags, merrs): """ form a s_dict['ts'] style dict and return it. """ new_ts = copy.deepcopy(old_ts_dict) assert len(new_ts.keys()) == 1 # DEBUG KLUDGE TEST band_dict = new_ts.values()[0] band_dict['m'] = mags band_dict['m_err'] = merrs band_dict['t'] = times return new_ts def obs_request(self, target, needed, band="u"): # first, have xml_reading parse the xml: xml_file, picked_band = self.pick_object(target,band) self.xml_file = xml_file s_dict, source = xml_reading().read_xml(xml_file = xml_file) # we're going to dig through the dictionary and define a few useful variables # this is the sub-dictionary that contains the actually time series data, it is defined by db_importer: ts = s_dict["ts"] # ts's sub-entries are the different bands available for this source # we choose the band we want: picked_band_key = picked_band for item in ts.keys(): if picked_band == item.split(":")[0]: try: picked_band_key = picked_band + ":" + item.split(":")[1] except IndexError: break print picked_band_key try: if len(sys.argv) > 4: if sys.argv[4] != 'tutor': band_dic = ts[picked_band] else: if picked_band == 'any': band_dic = ts.values()[0] else: bands = ts.keys() band_dic = {} for vsrc_band in bands: if picked_band in vsrc_band: band_dic = ts[vsrc_band] break if len(band_dic) == 0: raise KeyError else: band_dic = ts[picked_band] except KeyError: print "print ts.keys()", ts.keys() raise KeyError # we then make a copy of this dictionary for us to work with: my_dic = band_dic.copy() # we now prepare for the lomb scargle periodogram available = array(my_dic["t"]) # available times available = available - min(available) m = array(my_dic["m"]) m_err = array(my_dic["m_err"]) self.out_dict = self.get_out_dict(available, m, m_err, xml_file) passed = False for model in self.list_of_models: print "trying", model try: model_function = model.create_model(available,m,m_err, self.out_dict) model_output = model_function(needed) except AssertionError, description: print "we caught an assertion error %s" % description continue else: passed = True break print passed, "passed?" assert passed, "None of the models supplied worked :-/" # reduce my_dic to these picked times model_flux = model_output["flux"] model_rms = model_output["rms"] m_new = model_flux model_lightcurve = [] avg_model_mag = model_flux.mean() vosource_fpath = sys.argv[1] # TODO: replace modeled_lightcurves/ with some explicit, command-line stated dirpath if vosource_fpath.count('/') == 0: sourceid = vosource_fpath[:-4] else: sourceid = vosource_fpath[vosource_fpath.rfind('/')+1:\ vosource_fpath.rfind('.')] ##### The following will automatically contain the TUTOR Classification info: temp_ts = {} temp_ts['default_band'] = my_dic ### OLD (gets full, original t,m,merr): #s_dict['ts'] = temp_ts s_dict['ts'] = self.form_vsrc_xml_ts(temp_ts, needed, model_output["flux"], model_output["rms"]) source.source_dict_to_xml(s_dict) out_xml_fpath = "OutputVOSources/%s_%s.xml" % (sourceid, str(avg_model_mag)) # DEBUG/KLUDGE: (the first generated model seems to have an average mag of 0, while subsequent are >>0) I skip the ~0 source: if avg_model_mag > 3: source.write_xml(out_xml_fpath=out_xml_fpath) first_time = needed[0] model_lightcurve_file2 = file("model.txt", "w") model_lightcurve_file = file("modeled_lightcurves/" + sourceid + "_" + str(avg_model_mag), "w") for n in range(len(needed)): model_lightcurve_file.write(str(needed[n]) + "\t" + str(model_flux[n]) + "\t" + str(model_rms[n]) + "\n") model_lightcurve_file2.write(str(needed[n]-first_time) + "\t" + str(model_flux[n]) + "\t" + str(model_rms[n]) + "\n") model_lightcurve.append([needed[n], model_flux[n], model_rms[n]]) model_lightcurve_file.close() model_lightcurve_file2.close() my_dic["t"] = needed my_dic["m"] = m_new + m.mean() my_dic["m_err"] = model_rms return {"old data": m, "new data": my_dic["m"], "difference": None, "needed": needed, "available":available, "m_err": m_err, "out_dict": self.out_dict, "my_dic": my_dic, "s_dict":s_dict, "source": source, "old_xmlfile":xml_file} def pick_object(self,target, band = "u"): if len(sys.argv) > 1: xml_filename = sys.argv[1] band = sys.argv[2] assert xml_filename.split('.')[1] == 'xml' if xml_filename.count('/') == 0: xml_fpath = "VOsources/" + xml_filename else: xml_fpath = xml_filename # I expect this to be a full, expanded filepath to .xml return xml_fpath, band def read_mags_and_convert(self,my_dic,band): """ reads the magnitudes from the source dictionary and converts them to janskies this function assumes the vo_source structure """ # each band has an entry for the actual data, the magnitudes, which we convert to a numpy array: magnitudes = array(my_dic['m']) # and the uncertainties: errors = array(my_dic["m_err"]) # send this off to a separate function in janskies_converter for conversion janskies_dic = janskies_converter().m_to_janskies(magnitudes,errors,band) my_dic["janskies"] = janskies_dic["janskies"] my_dic["j_err"] = janskies_dic["errors"] return my_dic class use_pickle(observatory_source_interface): """ This class stores the lomb scargle model in a pickle file to speed up simulation of the same source multiple times """ def get_out_dict(self, available, m, m_err, xml_file): if '/' in xml_file: sourcename = xml_file[xml_file.rfind('/')+1:xml_file.rfind('.')] else: sourcename = xml_file.split('.')[0] sourcename = sourcename.split('/')[1] band = sys.argv[2] pklfile = 'pickled_models/' + sourcename + "_" + band + '.pkl' try: outdict_file = open(pklfile, 'r') out_dict = pickle.load(outdict_file) return out_dict except IOError: out_dict = super(use_pickle, self).get_out_dict(available, m, m_err, xml_file) outdict_file = open(pklfile, 'w') pickle.dump(out_dict, outdict_file) return out_dict def main(): def request_noisified(): my_obs = observatory_PTF.PTF # make up an object: vega = my_obs.create_target(ephem.hours('18:36:56.20'), ephem.degrees('38:46:59.0'), "cepheid") # coordinates of vega for i in xrange(10): mindiff_multiplier = i - 5 if mindiff_multiplier < 1: mindiff_multiplier = 1 t = generic_observatory.time_series_generator() time_series = t.generate_time_series(vega, my_obs) print "mindiff_multiplier should be: ", mindiff_multiplier try: output = my_obs.observe(target=vega, times = time_series, band = "V") except AssertionError, description: print "Failed %s times so far, because of %s" % ((i+1), description) else: return output return request_noisified() if __name__ == '__main__': output = main()
with open('in.txt', 'r') as input, open('out.txt', 'w') as out: input_list = [i.rstrip() for i in input][::-1] out.write("\n".join(input_list))
""" 面向对象编程: 公有属性和私有属性;属性装饰器,描述符;实例方法,静态方法,类方法 疑惑点:装饰器不懂 """ # 定义类 # 类声明和函数声明很相似,头一行用class关键字来创建,接下来是一个作为它的定义代码体 class ClassName(object): # "class documentation string" # '类文档字符串' pass # class_suite # 类体 # 类属性 # 属性是属于一个对象的数据或者函数元素,可以通过我们熟悉的句点属性标识法来访问。 # 一些python类型,比如复数有数据属性(实部和虚部) # 而另一些像列表和字典,拥有方法(函数属性)可以访问,这导致了一个属性链 # sys.stdout.write('foo') # print(myModule.myClass.__doc__) # myList.extend(map(upper,open('x').readlines())) # 尽量把需要用户传入的属性作为实例属性,而把同类都一样的属性作为类属性。 # 实例属性在每创造一个实例时都会初始化一遍,不同的实例的实例属性可能不同,不同实例的类属性都相同。从而减少内存。 # 公有属性:指的是所属的这个类的所有对象都可以访问的属性 class Person(object): nationality = 'CN' # 定义公有属性 def __init__(self, name, job, phone, address): self.name = name self.job = job self.phone = phone self.address = address def sayhi(self): print("hell,%s" % self.name) p1 = Person('Bigberg', 'Doctor', '8833421', 'hz') p2 = Person('Ashlex', 'Police', '8833232', 'bj') print(p1.nationality) # 输出CN print(p2.nationality) # 输出CN # 公有属性的特性:我们不仅可以访问,还能改变公有属性 class Person(object): nationality = 'CN' # 定义公有属性 def __init__(self, name, job, phone, address): self.name = name self.job = job self.phone = phone self.address = address def sayhi(self): print("hell,%s" % self.name) p1 = Person('Bigberg', 'Doctor', '8833421', 'hz') p2 = Person('Ashlex', 'Police', '8833232', 'bj') print(Person.nationality) # 调用 公有属性 Person.nationality = 'us' # 改变 公有属性 print(Person.nationality) # 输出us print(p1.nationality) # 输出us print(p1.nationality) # 输出us p1.nationality = 'jp' print(Person.nationality) # 输出us print(p1.nationality) # 输出jp # 成员属性(实例属性):存在于构造方法中 # Python中要声明私有属性,需要在属性前加上双下划线(但是结尾处不能有双下划线) # 如:self.__a。然而这样的什么方式并不是真正私有,而是“伪私有”。 # python直接访问私有属性方式:实例化对象名._类名__私有属性名 class Person(object): def __init__(self, name, job, phone, address): self.name = name # 成员属性,属于某个实例对象 self.job = job self.phone = phone self.__address = address # 私有属性 def get_private(self): return self.__address def sayhi(self): print("hell,%s" % self.name) p1 = Person('Bigberg', 'Doctor', '8833421', 'hz') p3 = Person('Ashlex', 'Police', '8833232', 'bj') print(p1.job, p2.job) # 输出Doctor Police # print(p3._Person__address) # 访问私有属性 # 装饰器:其实就是一个以函数作为参数并返回一个替换函数的可执行函数 def outer(some_func): def inner(): print("before some_func") ret = some_func() # 1 return ret + 1 return inner def foo(): return 1 decorated = outer(foo) # 2 decorated() # 根据鸭子模型理论,只要具有__get__方法的类就是描述符类。 # 如果一个类中具有__get__和__set__两个方法,那么就是数据描述符,。 # 如果一个类中只有__get__方法,那么是非数据描述符。 # # __get__:当我们用类或者实例来调用该属性时,Python会返回__get__函数的结果。 # __set__:当我们用实例来设置属性值时,Python会调用该函数。对类没有限制作用。 # __delete__:当我们用实例试图删除该属性时,Python会调用该函数。对类没有限制作用。 # 非数据描述类 class Desc: def __init__(self, value=22): self.value = value def __get__(self, ins, cls): return self.value class A: v = Desc() a = A() print(a.v) # 由于实例中没有v属性,所以找到了类的属性,而类的属性是一个描述符类实例,所以调用其__get__方法的结果。 print(a.__dict__) # 实例的__dict__空空如也。 print(A.__dict__) # 类的__dict__中确实存在v属性,且是一个Desc object对象。 a.v = 30 # 我们通过实例设置v属性 print(a.__dict__) # 我们发现实例的__dict__中存入了我们刚才设置的属性 print(A.__dict__) # 类的__dict__没有发生任何变化 print(a.v) # 访问到了a.__dict__中的内容。 del a.v # 我们删除实例的属性v后发现居然还是可以调用a.v,返回的是我们设置之前的值。 print(a.v) # 22 print(A.__dict__) # 和前面一样,没有发生变化。 # 通过上面的测试,我们发现非数据描述类有如下特点: # 如果实例__dict__没有设置同名属性,那么返回描述类的__get__方法的结果。 # 如果实例__dict__中存在同名属性,那么返回实例__dict__中的内容。 # 对我们设置实例的__dict__中的行为并不做阻止。所以我说这是查看级别的描述类。 # 数据描述类 class Desc1: def __init__(self, value=22): self.value = value def __get__(self, ins, cls): return self.value def __set__(self, ins, value): self.value = value # raise AttributeError class A: v = Desc1() a = A() # 实例方法就是类的实例能够使用的方法 class Foo: def __init__(self, name): self.name = name def hi(self): print(self.name) if __name__ == '__main__': foo01 = Foo('letian') foo01.hi() print(type(Foo)) print(type(foo01)) print(id(foo01)) print(id(Foo)) # 静态方法是一种普通函数,就位于类定义的命名空间中,它不会对任何实例类型进行操作。 # 使用装饰器@staticmethod定义静态方法。类对象和实例都可以调用静态方法: class Foo: def __init__(self, name): self.name = name def hi(self): print(self.name) @staticmethod def add(k, b): print(k + b) if __name__ == '__main__': foo01 = Foo('letian') foo01.hi() foo01.add(1, 2) Foo.add(1, 2) # 类方法是将类本身作为对象进行操作的方法。 # 类方法使用@classmethod装饰器定义,其第一个参数是类,约定写为cls。 # 类对象和实例都可以调用类方法: class Foo: name = 'letian ' @classmethod def hi(cls, x): print(cls.name * x) if __name__ == '__main__': foo01 = Foo() foo01.hi(2) Foo.hi(3)
from rubicon_ml.exceptions import RubiconException class TrainingMetadata: """A tuple or list of tuples representing metadata about a training dataset. Intentionally left arbitrary to allow users to use as it suits them. Notes ----- `TrainingMetadata` is a simple wrapper around a list of tuples for the purpose of validation and de/serialization. Examples -------- >>> TrainingMetadata([ >>> ("s3://bucket/a.parquet", "SELECT * FROM a"), >>> ("s3://bucket/b.parquet", "SELECT * FROM b"), >>> ]) [('s3://bucket/a.parquet', 'SELECT * FROM a'), ...] >>> TrainingMetadata( >>> ("s3", ["bucket/a.csv", "bucket/b.csv"], "SELECT * FROM x") >>> ) [('s3', ['bucket/a.csv', 'bucket/b.csv'], 'SELECT * FROM x')] """ def __init__(self, training_metadata): if not isinstance(training_metadata, list): training_metadata = [training_metadata] if not all([isinstance(tm, tuple) for tm in training_metadata]): raise RubiconException("`training_metadata` must be a list of tuples.") self.training_metadata = training_metadata def __repr__(self): return str(self.training_metadata)
import mysql.connector config = { 'user': 'gatuser', 'password': 'gatuser', 'host': '10.8.7.63', 'database': 'gatmgr2', 'raise_on_warnings': True, 'use_pure': False, } cnx = mysql.connector.connect(**config) try: cursor = cnx.cursor() cursor.execute(""" SELECT CURRENT_DATE() """) result = cursor.fetchall() print result finally: cnx.close()
from typing import Dict, Sequence, Tuple, Optional, List, KeysView from flask_login import current_user from time import sleep import logging from threading import Timer from waitlist.base import db from waitlist.storage.database import WaitlistGroup, CrestFleet, WaitlistEntry, \ HistoryEntry, Character, TeamspeakDatum, Account, SSOToken from datetime import datetime, timedelta from waitlist.utility.history_utils import create_history_object from flask.helpers import url_for from waitlist.utility.settings import sget_active_ts_id, sget_motd_hq,\ sget_motd_vg from waitlist.data.sse import send_server_sent_event, InviteMissedSSE,\ EntryRemovedSSE from waitlist.utility.swagger import esi_scopes from waitlist.utility.swagger.eve.fleet import EveFleetEndpoint import flask from waitlist.utility.swagger.eve import get_esi_client_for_account, ESIResponse from waitlist.utility.swagger.eve.fleet import EveFleetMembers from waitlist.utility.swagger.eve.fleet.models import FleetMember logger = logging.getLogger(__name__) class FleetMemberInfo: def __init__(self): self._cached_until: Dict[int, datetime] = {} self._lastmembers: Dict[int, Dict[int, FleetMember]] = {} def get_fleet_members(self, fleet_id: int, account: Account) -> Optional[Dict[int, FleetMember]]: return self._get_data(fleet_id, account) def get_expires(self, fleet_id: int) -> datetime: return self._cached_until[fleet_id] def get_fleet_ids(self) -> KeysView: self._clean_old_fleets() return self._lastmembers.keys() def is_member_in_fleet(self, character_id: int) -> bool: for fleet_id in self.get_fleet_ids(): db_fleet: CrestFleet = db.session.query(CrestFleet).get(fleet_id) members: Dict[int, FleetMember] = self.get_fleet_members(fleet_id, db_fleet.comp) if character_id in members: return True return False def _clean_old_fleets(self): tnow = datetime.utcnow() remove_ids: List[int] = [] for fleet_id in self._lastmembers.keys(): db_fleet: CrestFleet = db.session.query(CrestFleet).get(fleet_id) if db_fleet is None or db_fleet.comp is None: remove_ids.append(fleet_id) continue if tnow - self._cached_until[fleet_id] < timedelta(minutes=5): continue token: Optional[SSOToken] = db_fleet.comp.get_a_sso_token_with_scopes(esi_scopes.fleetcomp_scopes) if token is None: return fleet_api = EveFleetEndpoint(token, fleet_id, get_esi_client_for_account(token)) resp: ESIResponse = fleet_api.get_member() if resp.is_error(): remove_ids.append(fleet_id) for fleet_id in remove_ids: del self._lastmembers[fleet_id] del self._cached_until[fleet_id] for fleet_id in db.session.query(CrestFleet.fleetID): if fleet_id[0] not in self._lastmembers: self._lastmembers[fleet_id[0]] = None @classmethod def _to_members_map(cls, response: EveFleetMembers) -> Dict[int, FleetMember]: data = {} logger.debug("Got MemberList from API %s", str(response)) for member in response.fleet_members(): data[member.character_id()] = member return data def _get_data(self, fleet_id: int, account: Account) -> Optional[Dict[int, FleetMember]]: token: Optional[SSOToken] = account.get_a_sso_token_with_scopes(['esi-fleets.read_fleet.v1']) if token is None: logger.warning('No valid SSOToken found for %s', account) return None fleet_api = EveFleetEndpoint(token, fleet_id, get_esi_client_for_account(token)) utcnow = datetime.utcnow() if self._is_expired(fleet_id, utcnow): logger.debug("Member Data Expired for %d and account %s", fleet_id, account.username) try: logger.debug("%s Requesting Fleet Member", account.username) data: EveFleetMembers = fleet_api.get_member() if not data.is_error(): logger.debug("%s Got Fleet Members", account.username) self._update_cache(fleet_id, data) logger.debug("%s Successfully updated Fleet Members", account.username) elif data.code() == 404 or data.code() == 403: logger.info("Failed to get Fleetmembers from API because access was denied code[%d] msg[%s]", data.code(), data.error()) return self.get_cache_data(fleet_id) else: logger.error("Failed to get Fleetmembers from API code[%d] msg[%s]", data.code(), data.error()) return self.get_cache_data(fleet_id) except Exception as ex: logger.error("%s Getting Fleet Members caused: %s", account.username, ex, exc_info=True) return self.get_cache_data(fleet_id) else: logger.debug("Cache hit for %d and account %s", fleet_id, account.username) return self._lastmembers[fleet_id] def get_cache_data(self, fleet_id) -> Optional[Dict[int, FleetMember]]: if fleet_id in self._lastmembers: return self._lastmembers[fleet_id] return None def _is_expired(self, fleet_id, utcnow) -> bool: if fleet_id not in self._cached_until: return True else: expires_at = self._cached_until[fleet_id] if utcnow < expires_at: return False else: return True def _update_cache(self, fleet_id: int, response: EveFleetMembers) -> None: self._lastmembers[fleet_id] = self._to_members_map(response) self._cached_until[fleet_id] = response.expires() member_info = FleetMemberInfo() def setup(token: SSOToken, fleet_id: int, fleet_type: str)\ -> Optional[Tuple[Optional[int], Optional[int], Optional[int], Optional[int]]]: fleet_api = EveFleetEndpoint(token, fleet_id) fleet_settings = fleet_api.get_fleet_settings() if fleet_settings.is_error(): logger.error("Failed to get Fleet Settings code[%d] msg[%s]", fleet_settings.code(), fleet_settings.error()) flask.abort(500) old_motd = fleet_settings.get_motd() wait_for_change = False # check number of wings fleet_wings = fleet_api.get_wings() num_wings = len(fleet_wings.wings()) if num_wings <= 0: fleet_api.create_wing() # create 1st wing fleet_api.create_wing() # create 2nd wing wait_for_change = True elif num_wings <= 1: fleet_api.create_wing() # create 2nd wing wait_for_change = True ts_string = "" ts_id = sget_active_ts_id() if ts_id is not None: teamspeak = db.session.query(TeamspeakDatum).get(ts_id) ts_string = teamspeak.displayHost if teamspeak.displayPort != 9987: ts_string = ts_string + ":" + str(teamspeak.displayPort) if len(old_motd) < 50: new_motd = "" if fleet_type == "hq": hq_motd = sget_motd_hq() if hq_motd is not None: new_motd = hq_motd elif fleet_type == "vg": vg_motd = sget_motd_vg() if vg_motd is not None: new_motd = vg_motd fleet_api.set_fleet_settings(False, new_motd.replace("$ts$", ts_string)) if wait_for_change: sleep(6) wait_for_change = False wing1 = wing2 = None for wing in fleet_api.get_wings().wings(): if wing.name() == "Wing 1" or wing.name().lower() == "on grid": wing1 = wing elif wing.name() == "Wing 2" or wing.name().lower() == "off grid": wing2 = wing if wing1 is None or wing2 is None: return None if wing1.name().lower() != "on grid": wait_for_change = True fleet_api.set_wing_name(wing1.id(), 'ON GRID') num_needed_squads = 4 if fleet_type == "hq" else 2 num_squads = len(wing1.squads()) if num_squads < num_needed_squads: for _ in range(num_needed_squads-num_squads): wait_for_change = True fleet_api.create_squad(wing1.id()) if wing2.name().lower() != "óff grid": fleet_api.set_wing_name(wing2.id(), 'OFF GRID') num_squads = len(wing2.squads()) if num_squads < 1: wait_for_change = True fleet_api.create_squad(wing2.id()) if wait_for_change: sleep(6) wings = fleet_api.get_wings() for wing in wings.wings(): if wing.name().lower() == "on grid": wing1 = wing elif wing.name().lower() == "off grid": wing2 = wing if wing1 is None or wing2 is None: return None logi_squad = sniper_squad = dps_squad = more_dps_squad = None for squad in wing1.squads(): if squad.name() == "Squad 1" or squad.name().lower() == "logi": logi_squad = squad elif squad.name() == "Squad 2" or squad.name().lower() == "sniper": sniper_squad = squad elif squad.name() == "Squad 3" or squad.name().lower() == "dps": dps_squad = squad elif squad.name() == "Squad 4" or squad.name().lower() == "more dps" or squad.name().lower() == "other": more_dps_squad = squad if fleet_type == "hq": if logi_squad is not None and logi_squad.name() == "Squad 1": fleet_api.set_squad_name(logi_squad.id(), 'LOGI') if sniper_squad is not None and sniper_squad.name() == "Squad 2": fleet_api.set_squad_name(sniper_squad.id(), 'SNIPER') if dps_squad is not None and dps_squad.name() == "Squad 3": fleet_api.set_squad_name(dps_squad.id(), 'DPS') if more_dps_squad is not None and more_dps_squad.name() == "Squad 4": fleet_api.set_squad_name(more_dps_squad.id(), 'MORE DPS') elif fleet_type == "vg": if logi_squad is not None and logi_squad.name() == "Squad 1": fleet_api.set_squad_name(logi_squad.id(), 'LOGI') if sniper_squad is not None and sniper_squad.name() == "Squad 2": fleet_api.set_squad_name(sniper_squad.id(), 'DPS') if wing2 is not None and len(wing2.squads()) > 0 and wing2.squads()[0].name().lower() != "off grid": fleet_api.set_squad_name(wing2.squads()[0].id(), 'OFF GRID') sleep(5) return logi_squad, sniper_squad, dps_squad, more_dps_squad def invite(user_id: int, squad_id_list: Sequence[Tuple[int, int]]): token: Optional[SSOToken] = current_user.get_a_sso_token_with_scopes(esi_scopes.fleet_write) if token is None: return {'status_code': 404, 'text': "You need to go to <a href='" + url_for('fc_sso.login_redirect') + "'>SSO Login</a> and relogin in!"} fleet: CrestFleet = current_user.fleet fleet_api: EveFleetEndpoint = EveFleetEndpoint(token, fleet.fleetID) oldsquad = (0, 0) for idx in range(len(squad_id_list)): squad = squad_id_list[idx] if squad[0] == oldsquad[0] and squad[1] == oldsquad[1]: continue logger.info("Invite %s to wingID %s and squadID %s", str(user_id), str(squad[0]), str(squad[1])) try: response = fleet_api.invite(user_id, 'squad_member', squad[1], squad[0]) except Exception as ex: logger.error("Failed to Invite Member[%d] into squad[%d] wing[%d]", user_id, squad[0], squad[1]) raise ex if response.is_error(): logger.info('Got code[%d] back from invite call', response.code()) if response.is_monolith_error(): mono_error = response.get_monolith_error() if mono_error['error_label'] == 'FleetTooManyMembersInSquad': logger.info(f'Failed to invites because there are to many people in this squad' f' {mono_error["error_dict"]["num"]} ... trying next one') continue elif mono_error['error_label'] == 'FleetCandidateOffline': logger.info('Failed invite because target player is offline.') return {'status_code': 520, 'text': 'They player you tried to invite was offline.'} elif mono_error['error_label'] == 'ContactOwnerUnreachable': logger.info(f'Failed to invite {mono_error["error_dict"]["name"]}' f' because he has the invitee blocked') return {'status_code': 520, 'text': f'Could not invite {mono_error["error_dict"]["name"]}' f' because he has you blocked or is otherwise unreachable.'} else: logger.error(f'Failed invite because of monolith error {response.error()}') return {'status_code': 520, 'text': f'Failed invite because of unhandled Monolith error ' f'{response.error()} please report this to the waitlist ' f'maintainer with the monolith message.'} elif response.code() == 404: return {'status_code': 404, 'text': "You need to go to <a href='" + url_for('fc_sso.login_redirect') + "'>SSO Login</a> and relogin in!"} else: return {'status_code': response.code(), 'text': response.error()} return {'status_code': response.code(), 'text': 'Invite failed for unknown reason'} logger.info("Failed to invite %d to a squad, because all squads are full!", user_id) return {'status_code': 403, 'text': 'Failed to invite person a squad, all squads are full!'} def spawn_invite_check(character_id, group_id, fleet_id): logger.info(f"Spawning invite check timer for character_id={character_id} group_id={group_id}" f" and fleet_id={fleet_id}") timer_id = (character_id, group_id, fleet_id) if timer_id in check_timers: # this invite check is already running logger.info(f"There is already an invite check running for {timer_id}") return check_timers[timer_id] = 0 t = Timer(20.0, check_invite_and_remove_timer, [character_id, group_id, fleet_id]) t.start() logger.debug(f'Started timer for {timer_id}') check_timers: Dict[Tuple[int, int, int], int] = dict() def check_invite_and_remove_timer(char_id: int, group_id: int, fleet_id: int): try: logger.info(f"Check Invite and remove timer triggered for" f" char_id={char_id}, group_id={group_id} and fleet_id={fleet_id}") max_runs: int = 4 current_run: int = 1 timer_id = (char_id, group_id, fleet_id) if timer_id in check_timers: current_run = check_timers[timer_id]+1 check_timers[timer_id] = current_run logger.info(f"New Timer count={check_timers[timer_id]} for timer_id={timer_id}") # hold SSE till sending _events = [] logger.info("Checking invite for charID[%d] groupID[%d] fleetID[%d] current_run[%d]", char_id, group_id, fleet_id, current_run) group: WaitlistGroup = db.session.query(WaitlistGroup).get(group_id) crest_fleet = db.session.query(CrestFleet).get(fleet_id) # the fleet was deleted meanwhile or has no fleetcomp if group is None or crest_fleet is None or crest_fleet.comp is None: if group is None: logger.error("On Invitecheck group is None") if crest_fleet is None: logger.error("On Invitecheck crestFleet is None") elif crest_fleet.comp is None: logger.error("On Invitecheck FleetComp is None") db.session.remove() return member = member_info.get_fleet_members(fleet_id, crest_fleet.comp) character = db.session.query(Character).filter(Character.id == char_id).first() waitlist_entries = db.session.query(WaitlistEntry)\ .filter((WaitlistEntry.user == char_id) & (WaitlistEntry.waitlist_id != group.xuplist.id) ).all() if member is not None and char_id in member: # he is in the fleet logger.info("Member %s found in members", char_id) fittings = [] for entry in waitlist_entries: fittings.extend(entry.fittings) for entry in waitlist_entries: event = EntryRemovedSSE(entry.waitlist.group.groupID, entry.waitlist_id, entry.id) _events.append(event) db.session.query(WaitlistEntry).filter( (WaitlistEntry.user == char_id) & (WaitlistEntry.waitlist_id != group.xuplist.id) ).delete() h_entry = create_history_object(char_id, HistoryEntry.EVENT_AUTO_RM_PL, None, fittings) h_entry.exref = group.groupID db.session.add(h_entry) db.session.commit() for event in _events: send_server_sent_event(event) logger.info("auto removed %s from %s waitlist.", character.eve_name, group.groupName) # we are done delete timer entry check_timers.pop(timer_id, None) else: logger.info("Character %d %s not found in fleetmembers", char_id, character.eve_name) if current_run == max_runs: # he reached his invite timeout logger.info("Max Runs reached and Member %s not found in members", str(char_id)) for entry in waitlist_entries: entry.inviteCount += 1 h_entry = create_history_object(char_id, HistoryEntry.EVENT_AUTO_CHECK_FAILED, None, None) h_entry.exref = group.groupID db.session.add(h_entry) db.session.commit() send_server_sent_event(InviteMissedSSE(group_id, char_id)) logger.info("%s missed his invite", character.eve_name) # we are done delete the timer entry check_timers.pop(timer_id, None) else: # we want to wait some more, set up new timer logger.info('charID[%d] groupID[%d] fleetID[%d] %s was not in fleet this time, checking again in 20s', char_id, group_id, fleet_id, character.eve_name) t = Timer(20.0, check_invite_and_remove_timer, [char_id, group_id, fleet_id]) t.start() db.session.remove() except Exception: logger.exception("Some thing went wrong during invite check!")
import os path=os.getcwd()+'/trainingData/test/aphids/' print(path) files=os.listdir(path) newpath=os.getcwd()+'/trainingData/test/aphids_r/' n=1 for filename in files: oldpath=os.path.join(path,filename) newname=os.path.join(newpath, 'aphids'+ str(n)+'.jpg') #print(newname) #print(oldpath) os.rename(oldpath,newname) #print(n) n=n+1
import unittest from list import List from message import Message class TestList(unittest.TestCase): def test_init(self): for t in 'bBhHiIqQfds': List(t) List('m', Message) self.assertRaises(TypeError, lambda: List('x')) self.assertRaises(TypeError, lambda: List('m', bytearray)) def test_setitem(self): l = List('b') l.append(23) self.assertRaises(ValueError, l.__setitem__, 0, 129) l[0] = 42 def test_append(self): l = List('i') l.append(42) self.assertRaises(ValueError, l.append, '42') def test_extend(self): l = List('i') l.extend((1, 2, 3, 4)) self.assertRaises(ValueError, l.extend, (1, '23', 4)) def test_insert(self): l = List('q') l.insert(0, 42) l.insert(42, 42) self.assertRaises(ValueError, l.insert, 23, '23') def test_string(self): l = List('s') if __name__ == '__main__': unittest.main()
#! /usr/bin/env python3 # ---------------------------------------------------------------------------- # # solver.py # # # # By - jacksonwb # # Created: Wednesday December 1969 4:00:00 pm # # Modified: Saturday Aug 2019 7:09:06 pm # # Modified By: jacksonwb # # ---------------------------------------------------------------------------- # import time from src.generate import make_goal_puzzle from src.check_solvable import is_solvable from src.PQueue import PQueue GREEN = "\033[32m" RED = "\033[31m" NO_COLOR = "\033[0m" class PuzzleException(Exception): pass def generate_children(n_map, size): zero = [[i, row.index(0)] for i, row in enumerate(n_map) if row.count(0)][0] pos_lst = [[sum(y) for y in zip(zero, x)] for x in [[0,1], [0,-1], [1,0], [-1,0]]] for pos in pos_lst: if all(map(lambda x: x < size and x >= 0, pos)): new_map = list([list(row) for row in n_map]) new_map[zero[0]][zero[1]], new_map[pos[0]][pos[1]] = new_map[pos[0]][pos[1]], new_map[zero[0]][zero[1]] yield tuple(tuple(row) for row in new_map) class Puzzle: def __init__(self, size, in_map, h_fn, greedy): self.size = size self.start = make_goal_puzzle(self.size) self.finish = in_map if not is_solvable(size, self.finish, self.start): raise PuzzleException(RED + "Not Solvable" + NO_COLOR) self.h_fn = h_fn self.open_set = PQueue(key=lambda x: (self.g_val[x] if not greedy else 0) + self.h_fn(self.size, x, self.finish)) self.closed_set = {} self.parent = dict([(self.start, None)]) self.g_val = {} self.g_val[self.start] = 0 def solve(self): self.start_time = time.process_time() self.open_set.push(self.start) while not self.open_set.is_empty(): current = self.open_set.pop() self.closed_set[current] = current if current == self.finish: self.end_time = time.process_time() return for child in generate_children(current, self.size): if child in self.closed_set: continue tentative_g_val = self.g_val[current] + 1 if not self.open_set.contains(child) or tentative_g_val < self.g_val[child]: self.g_val[child] = tentative_g_val self.parent[child] = current self.open_set.push(child) def map_str(self, n_map): s = len(str(len(n_map) ** 2)) return '\n'.join([''.join(map(lambda x: str(x).rjust(s + 1), row)) for row in n_map]) def print_all_states(self, state): solution = [] while state: solution.append(state) print(self.map_str(state), '\n') state = self.parent[state] def print_solution(self): self.print_all_states(self.finish) print(GREEN, f"Solved in {self.end_time - self.start_time:.4f} seconds", NO_COLOR, sep='') print("Time complexity:", len(self.closed_set)) print("Space Complexity:", len(self.g_val)) print("Number of steps:", self.g_val[self.finish])
""" Write a function to compute the number of python files(.py extension) in a specified directory recursively. """ from os import listdir from os.path import isfile, join import sys count = 0 def count_pyfile(path): contents = listdir(path) for f in contents: if isfile(join(path, f)): if '.py' in f: global count count += 1 else: count_pyfile(join(path, f)) if len(sys.argv) < 2: print 'usage: python problem4.py path' else: count_pyfile(sys.argv[1]) print 'python files:', count
from odoo import models, fields class tipoTipoOperacion(models.Model): _name = 'analisis_pic.tipotipooperacion' idtipoTipoOperacion = fields.Char('ID') codigo = fields.Char('Codigo') descripcion = fields.Char('Descipción')
# !/usr/bin/python # -*- coding: utf-8 -*- # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - # # The MIT License (MIT) # # Copyright (c) 2017 Jorge A. Gomes (jorgegomes83 at hotmail dot com) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - from collections import namedtuple as nt from ctypes import sizeof import OpenGL.GL as GL __all__ = [ 'DType', 'DTypeInfo', ] class DTypeInfo(nt("DTypeInfo", "name size byte_size gl_size, gl_type uniform format")): def load(self, *args): self.uniform(*args) DType = nt( "DType", "bool byte short int ubyte ushort uint ulong " "bool_v2 byte_v2 short_v2 int_v2 ubyte_v2 ushort_v2 uint_v2 ulong_v2 " "bool_v3 byte_v3 short_v3 int_v3 ubyte_v3 ushort_v3 uint_v3 ulong_v3 " "bool_v4 byte_v4 short_v4 int_v4 ubyte_v4 ushort_v4 uint_v4 ulong_v4 " "float double " "float_v2 double_v2 " "float_v3 double_v3 " "float_v4 double_v4 " "float_m2 float_m23 float_m24 " "float_m3 float_m32 float_m34 " "float_m4 float_m42 float_m43 " "double_m2 double_m23 double_m24 " "double_m3 double_m32 double_m34 " "double_m4 double_m42 double_m43 " )( DTypeInfo('bool', 1, sizeof(GL.GLboolean), GL.GL_BOOL, GL.GLboolean, GL.glUniform1ui, '?'), DTypeInfo('byte', 1, sizeof(GL.GLbyte), GL.GL_BYTE, GL.GLbyte, GL.glUniform1i, 'b'), DTypeInfo('short', 1, sizeof(GL.GLshort), GL.GL_SHORT, GL.GLshort, GL.glUniform1i, 'h'), DTypeInfo('int', 1, sizeof(GL.GLint), GL.GL_INT, GL.GLint, GL.glUniform1i, 'i'), DTypeInfo('ubyte', 1, sizeof(GL.GLubyte), GL.GL_UNSIGNED_BYTE, GL.GLubyte, GL.glUniform1ui, 'B'), DTypeInfo('ushort', 1, sizeof(GL.GLushort), GL.GL_UNSIGNED_SHORT, GL.GLushort, GL.glUniform1ui, 'H'), DTypeInfo('uint', 1, sizeof(GL.GLuint), GL.GL_UNSIGNED_INT, GL.GLuint, GL.glUniform1ui, 'I'), DTypeInfo('ulong', 1, sizeof(GL.GLulong), GL.GL_UNSIGNED_INT64, GL.GLulong, GL.glUniform1ui, 'L'), DTypeInfo('bool_v2', 2, 2 * sizeof(GL.GLboolean), GL.GL_BOOL, GL.GLboolean, GL.glUniform2ui, '??'), DTypeInfo('byte_v2', 2, 2 * sizeof(GL.GLbyte), GL.GL_BYTE, GL.GLbyte, GL.glUniform2i, 'bb'), DTypeInfo('short_v2', 2, 2 * sizeof(GL.GLshort), GL.GL_SHORT, GL.GLshort, GL.glUniform2i, 'hh'), DTypeInfo('int_v2', 2, 2 * sizeof(GL.GLint), GL.GL_INT, GL.GLint, GL.glUniform2i, 'ii'), DTypeInfo('ubyte_v2', 2, 2 * sizeof(GL.GLubyte), GL.GL_UNSIGNED_BYTE, GL.GLubyte, GL.glUniform2ui, 'BB'), DTypeInfo('ushort_v2', 2, 2 * sizeof(GL.GLushort), GL.GL_UNSIGNED_SHORT, GL.GLushort, GL.glUniform2ui, 'HH'), DTypeInfo('uint_v2', 2, 2 * sizeof(GL.GLuint), GL.GL_UNSIGNED_INT, GL.GLuint, GL.glUniform2ui, 'II'), DTypeInfo('ulong_v2', 2, 2 * sizeof(GL.GLulong), GL.GL_UNSIGNED_INT64, GL.GLulong, GL.glUniform2ui, 'LL'), DTypeInfo('bool_v3', 3, 3 * sizeof(GL.GLboolean), GL.GL_BOOL, GL.GLboolean, GL.glUniform3ui, '???'), DTypeInfo('byte_v3', 3, 3 * sizeof(GL.GLbyte), GL.GL_BYTE, GL.GLbyte, GL.glUniform3i, 'bbb'), DTypeInfo('short_v3', 3, 3 * sizeof(GL.GLshort), GL.GL_SHORT, GL.GLshort, GL.glUniform3i, 'hhh'), DTypeInfo('int_v3', 3, 3 * sizeof(GL.GLint), GL.GL_INT, GL.GLint, GL.glUniform3i, 'iii'), DTypeInfo('ubyte_v3', 3, 3 * sizeof(GL.GLubyte), GL.GL_UNSIGNED_BYTE, GL.GLubyte, GL.glUniform3ui, 'BBB'), DTypeInfo('ushort_v3', 3, 3 * sizeof(GL.GLushort), GL.GL_UNSIGNED_SHORT, GL.GLushort, GL.glUniform3ui, 'HHH'), DTypeInfo('uint_v3', 3, 3 * sizeof(GL.GLuint), GL.GL_UNSIGNED_INT, GL.GLuint, GL.glUniform3ui, 'III'), DTypeInfo('ulong_v3', 3, 3 * sizeof(GL.GLulong), GL.GL_UNSIGNED_INT64, GL.GLulong, GL.glUniform3ui, 'LLL'), DTypeInfo('bool_v4', 4, 4 * sizeof(GL.GLboolean), GL.GL_BOOL, GL.GLboolean, GL.glUniform4ui, '????'), DTypeInfo('byte_v4', 4, 4 * sizeof(GL.GLbyte), GL.GL_BYTE, GL.GLbyte, GL.glUniform4i, 'bbbb'), DTypeInfo('short_v4', 4, 4 * sizeof(GL.GLshort), GL.GL_SHORT, GL.GLshort, GL.glUniform4i, 'hhhh'), DTypeInfo('int_v4', 4, 4 * sizeof(GL.GLint), GL.GL_INT, GL.GLint, GL.glUniform4i, 'iiii'), DTypeInfo('ubyte_v4', 4, 4 * sizeof(GL.GLubyte), GL.GL_UNSIGNED_BYTE, GL.GLubyte, GL.glUniform4ui, 'BBBB'), DTypeInfo('ushort_v4', 4, 4 * sizeof(GL.GLushort), GL.GL_UNSIGNED_SHORT, GL.GLushort, GL.glUniform4ui, 'HHHH'), DTypeInfo('uint_v4', 4, 4 * sizeof(GL.GLuint), GL.GL_UNSIGNED_INT, GL.GLuint, GL.glUniform4ui, 'IIII'), DTypeInfo('ulong_v4', 4, 4 * sizeof(GL.GLulong), GL.GL_UNSIGNED_INT64, GL.GLulong, GL.glUniform4ui, 'LLLL'), DTypeInfo('float', 1, 1 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniform1f, 'f'), DTypeInfo('double', 1, 1 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniform1d, 'd'), DTypeInfo('float_v2', 2, 2 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniform2f, 'ff'), DTypeInfo('double_v2', 2, 2 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniform2d, 'dd'), DTypeInfo('float_v3', 3, 3 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniform3f, 'fff'), DTypeInfo('double_v3', 3, 3 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniform3d, 'ddd'), DTypeInfo('float_v4', 4, 4 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniform4f, 'ffff'), DTypeInfo('double_v4', 4, 4 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniform4d, 'dddd'), DTypeInfo('float_m2', 4, 4 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix2fv, '4f'), DTypeInfo('float_m23', 6, 6 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix2x3fv, '6f'), DTypeInfo('float_m24', 8, 8 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix2x4fv, '8f'), DTypeInfo('float_m3', 9, 9 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix3fv, '9f'), DTypeInfo('float_m32', 6, 6 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix3x2fv, '6f'), DTypeInfo('float_m34', 12, 12 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix3x4fv, '12f'), DTypeInfo('float_m4', 16, 16 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix4fv, '16f'), DTypeInfo('float_m42', 8, 8 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix4x2fv, '8f'), DTypeInfo('float_m43', 12, 12 * sizeof(GL.GLfloat), GL.GL_FLOAT, GL.GLfloat, GL.glUniformMatrix4x3fv, '12f'), DTypeInfo('double_m2', 4, 4 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix2dv, '4d'), DTypeInfo('double_m23', 6, 6 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix2x3dv, '6d'), DTypeInfo('double_m24', 8, 8 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix2x4dv, '8d'), DTypeInfo('double_m3', 9, 9 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix3dv, '9d'), DTypeInfo('double_m32', 6, 6 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix3x2dv, '6d'), DTypeInfo('double_m34', 12, 12 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix3x4dv, '12d'), DTypeInfo('double_m4', 16, 16 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix4dv, '16d'), DTypeInfo('double_m42', 8, 8 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix4x2dv, '8d'), DTypeInfo('double_m43', 12, 12 * sizeof(GL.GLdouble), GL.GL_DOUBLE, GL.GLdouble, GL.glUniformMatrix4x3dv, '12d'), )
from django.contrib.auth.models import User from django.db import models from django.utils import timezone from neighborhood.models import House, Neighborhood class UserProfile(models.Model): user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True) house = models.ForeignKey(House, null=True) join_date = models.DateField(default=timezone.now) bio = models.TextField(max_length=1000, default='Default biography') member_status = models.CharField(max_length=100, default='neighbor') neighborhood_id = models.IntegerField(null=True) def is_board_member(self): return self.member_status == 'board-member' def __str__(self): return self.user.first_name + " " + self.user.last_name class Activity(models.Model): id = models.AutoField(primary_key=True) date = models.DateTimeField(auto_now=True) activity_type = models.CharField(default='POST', max_length=30) user = models.ForeignKey(User) assoc_obj_id = models.IntegerField(default=-1)
import requests import datetime import config import json import csv import random # MidList = [ # '467', # '469', # '471', # '473', # '475' # ] # cardBinList = [ # '402360', # '533317', # '403027', # '408443', # '402361', # '408089', # '489452', # '423067', # '403035', # '437863', # '416334', # '456542', # ] price = '85.80' inputFilename = "update_price4.csv" # _______________________________________________________________________________ inputFilePath = "C:\\Users\\GuestUser\\Desktop\\AF decline cascading\\" loginId = config.loginId password = config.password with open(inputFilePath + inputFilename, "r", encoding="latin-1") as f: lst = csv.reader(f, delimiter=',') for line in lst: purchaseId = line[0] customerId = line[1] costumerEmail = line[2] # ListLen = len(MidList)-1 # randomNumMID = random.randint(0, ListLen) # newMerchantId = MidList[randomNumMID] outputFilePath = "C:\\Users\\GuestUser\\Desktop\\AF decline cascading\\" outputFilename = "increase price4 " + datetime.date.today().strftime("%d.%m.%Y") + ".csv" # urlUpdatePurchase1 = 'https://api.konnektive.com/transactions/query/?' \ # 'loginId=' + \ # loginId + \ # '&password=' + \ # password + \ # '&customerId=' + \ # customerId # # '&nextBillDate=' + \ # # nextBillDate # r = requests.post(urlUpdatePurchase1) # responseUpdatePurchase = requests.post(urlUpdatePurchase1) # parseResponseUpdatePurchase = responseUpdatePurchase.json() # try: # cardBin = parseResponseUpdatePurchase['message']['data'][0]['cardBin'] # customerPhone = parseResponseUpdatePurchase['message']['data'][0]['phoneNumber'] # except: # print('Incorrect input data for file line: ' + str(line)) # print(customerId, cardBin) # cardBinListCross = cardBin in cardBinList # if cardBinListCross: urlUpdatePurchase1 = 'https://api.konnektive.com/purchase/update/?' \ 'loginId=' + \ loginId + \ '&password=' + \ password + \ '&purchaseId=' + \ purchaseId + \ '&price=' + \ price # '&nextBillDate=' + \ # nextBillDate r = requests.post(urlUpdatePurchase1) # responseUpdatePurchase = requests.post(urlUpdatePurchase1) # parseResponseUpdatePurchase = responseUpdatePurchase.json() # resultApiCall1 = parseResponseUpdatePurchase['result'] # messageApiCall1 = parseResponseUpdatePurchase['message'] # with open(outputFilePath + outputFilename, "a", newline="") as file: # stringWriter = costumerEmail, purchaseId, resultApiCall1, messageApiCall1, cardBin, \ # customerPhone # writer = csv.writer(file) # writer.writerow(stringWriter) # print(costumerEmail, purchaseId, resultApiCall1, messageApiCall1, cardBin, customerPhone) print(costumerEmail)
from django.db import models, transaction from django.db.models.signals import pre_save from django.dispatch import receiver from django.utils import timezone from project_apps.users.models import CustomUser as User from sorl.thumbnail import get_thumbnail from PIL import Image as pil_image def user_directory_path(instance, filename): return "images/{0}/".format(filename) class Image(models.Model): title = models.CharField(max_length=125) description = models.CharField(max_length=250, null=True) image = models.ImageField(upload_to="images/") created_at = models.DateTimeField(default=timezone.now) owner = models.ForeignKey(User, on_delete=models.CASCADE) def get_available_thumbnails_sizes(self): user = User.objects.filter(pk=self.owner.id).first() available_thumbnail_sizes = user.get_available_thumbnail_sizes() return available_thumbnail_sizes def make_thumbnail(self, thumbnail_height): thumbnail_width = self.get_thumbnail_width(thumbnail_height) size = f"{thumbnail_width}x{thumbnail_height}" img = get_thumbnail(self.image, size, quality=90) Thumbnail.objects.create( thumbnail=img.url, size=thumbnail_height, org_image=self ) def get_thumbnail_width(self, thumb_height): image = pil_image.open(self.image) img_width, img_height = image.size height_percent = thumb_height / float(img_height) thumb_width = int((float(img_width) * float(height_percent))) return thumb_width def delete_old_thumbnails(self): thumbnails = Thumbnail.objects.filter(org_image=self.pk) if thumbnails: for thumb in thumbnails: thumb.delete() def generate_image_thumbnails(self, thumbnail_heights): self.delete_old_thumbnails() for height in thumbnail_heights: self.make_thumbnail(height) def get_image_thumbnails(self): return Thumbnail.objects.filter(org_image=self.pk) class Thumbnail(models.Model): thumbnail = models.URLField(max_length=2000) size = models.CharField(max_length=125) org_image = models.ForeignKey(Image, on_delete=models.CASCADE) @receiver(pre_save, sender=User) @transaction.atomic def make_new_thumbnails_if_plan_changed(sender, instance, **kwargs): try: obj = sender.objects.select_for_update().get(pk=instance.pk) except sender.DoesNotExist: pass else: if not obj.plan == instance.plan: # Field has changed user_images = Image.objects.filter(owner=obj.pk) new_thumb_heights = instance.plan.get_available_thumbnail_sizes_list() for image in user_images: image.generate_image_thumbnails(new_thumb_heights)
# # 2019-10-21 # This is a (very) slightly modified version of the program posted at: # https://www.ohjelmointiputka.net/koodivinkit/25050 # by user Hassu. # from tkinter import * import random class Lotto(Frame): def __init__(self, master=None): Frame.__init__(self, master) self.pack() self.buildGUI() def buildGUI(self): # add a button in the GUI self.main_button = Button (self, width=20, bg="brown", fg="white", text="Draw the numbers", cursor="trek", command=self.respond) # make the button visible self.main_button.pack() # add a field where the numbers will be shown self.tulos = Label (self, bg="yellow", font=('times', 20, 'bold'), width=20, height=2) # make the field visible self.tulos.pack() def respond(self): # seven random integers from the range 1..40 numbers = random.sample(range(1, 40), 7) numbers.sort() # show the numbers self.tulos.config(text=numbers) program = Lotto() program.master.title("Are you ready for Lotto?") program.mainloop()
import unittest from katas.kyu_7.zebulans_nightmare import zebulansNightmare class ZebulanTestCase(unittest.TestCase): def test_equal_1(self): self.assertEqual(zebulansNightmare('camel_case'), 'camelCase') def test_equal_2(self): self.assertEqual(zebulansNightmare('mark_as_issue'), 'markAsIssue') def test_equal_3(self): self.assertEqual( zebulansNightmare('copy_paste_pep8'), 'copyPastePep8' ) def test_equal_4(self): self.assertEqual(zebulansNightmare('goto_next_kata'), 'gotoNextKata') def test_equal_5(self): self.assertEqual(zebulansNightmare('repeat'), 'repeat')
import tkinter as tk from tkinter import ttk class DownloaderInterface: def __init__(self, master): self.url = "" self.label = ttk.Label (master, text = "URL: ") self.entry = ttk.Entry (master, textvariable = self.url) self.entry.config (width = 100) self.entry.config (background = 'black') self.label.pack (side = tk.LEFT); self.entry.pack (side = tk.RIGHT); if __name__ == '__main__': root = tk.Tk() d = DownloaderInterface(root) root.mainloop()
###9.4 Write a program to read through the mbox-short.txt and figure out who has sent the greatest number of mail messages. The program looks for 'From ' lines and takes the second word of those lines as the person who sent the mail. The program creates a Python dictionary that maps the sender's mail address to a count of the number of times they appear in the file. After the dictionary is produced, the program reads through the dictionary using a maximum loop to find the most prolific committer. name = input("Enter file:") if len(name) < 1 : name = "mbox-short.txt" handle = open(name) lista = list() for line in handle: if not line.startswith("From:"): continue line = line.split() remitente = line[1] lista.append(remitente) #print(remitente) counts = dict() for palabra in lista: counts[palabra] = counts.get(palabra,0) + 1 ##histogram totalcuenta = None palabraRepetida = None for palabra,cuentaPalabra in counts.items(): if totalcuenta is None or cuentaPalabra>totalcuenta: totalcuenta = cuentaPalabra palabraRepetida = palabra print (palabraRepetida,totalcuenta)
# import keras from sklearn.utils import shuffle from tqdm import tqdm import librosa import librosa.display import os import numpy as np import pandas as pd import random import matplotlib.pyplot as plt import warnings warnings.filterwarnings("ignore") os.environ['CUDA_VISIBLE_DEVICES'] = '1' root_path = "F:/dataset/pig_voice_origion" train = os.path.join(root_path, "train") val = os.path.join(root_path, "val") test = os.path.join(root_path, "test") def get_features(path, f='MC'): y, sr = librosa.load(path) hop_l = 512 * 2 f1 = librosa.feature.mfcc(y, sr, n_mfcc=60, hop_length=hop_l) f2 = librosa.feature.chroma_stft(y, sr, hop_length=hop_l) f3 = librosa.feature.spectral_contrast(y, sr, hop_length=hop_l) f4 = librosa.effects.harmonic(y) f4 = librosa.feature.tonnetz(f4, sr, hop_length=hop_l) f5 = librosa.feature.melspectrogram(y, sr, n_mels=60, hop_length=hop_l) f5 = librosa.power_to_db(f5) LMC = np.vstack((f1, f2, f3, f4)) MC = np.vstack((f5, f2, f3, f4)) MLMC = np.vstack((f1, f5, f2, f3, f4)) features = {"f1": f1,"f2":f2,"f3":f3,"f4":f4,"f5":f5,"LMC": LMC, "MC": MC, "MLMC": MLMC} return features[f] def get_data(path, f='MLMC'): classnumber = [0, 0, 0, 0] path_list = [] label_list = [] paths = os.listdir(path) paths = shuffle(paths, random_state=21) for i in paths: path_list.append(os.path.join(path, i)) label = int(i.split("-")[1].split(".")[0]) label_list.append(label) classnumber[label] += 1 datas = [] for i in tqdm(path_list): datas.append(get_features(i,f)) wav_max = 55 for i in range(len(datas)): while (datas[i].shape[1] < wav_max): datas[i] = np.c_[datas[i], np.zeros(datas[i].shape[0])] return datas, label_list, classnumber f = 'MC' train_data, train_labels, train_classes = get_data(train, f=f) print(np.array(train_data).shape, np.array(train_labels).shape, train_classes) val_data, val_labels, val_classes = get_data(val, f=f) print(np.array(val_data).shape, np.array(val_labels).shape, val_classes) test_data, test_labels, test_classes = get_data(test, f=f) print(np.array(test_data).shape, np.array(test_labels).shape, test_classes) np.save("./"+f+"/"+"train_" + f + ".npy", np.array(train_data)) np.save("./"+f+"/"+"train_label.npy", np.array(train_labels)) np.save("./"+f+"/"+"val_" + f + ".npy", np.array(val_data)) np.save("./"+f+"/"+"val_label.npy", np.array(val_labels)) np.save("./"+f+"/"+"test_" + f + ".npy", np.array(test_data)) np.save("./"+f+"/"+"test_label.npy", np.array(test_labels))
#!/usr/bin/python from pyrosetta import * from design_protease import res_to_design, apply_constraints from glob import glob from os.path import basename, join opts='-mute core -mute protocols -mute basic -enzdes::cstfile ly104.cst -cst_fa_weight 1.0 -run:preserve_header' init(opts) cleaved = glob(join(basename('a_to_s_ly104_WT_CLEAVED_decoys'),"*best.pdb.gz")) uncleaved = glob(join(basename('a_to_s_ly104_WT_UNcleaved_decoys'),"*best.pdb.gz")) sf=create_score_function('ref2015_cst') des_res=res_to_design('ly104_WT.pdb',8,[72,154])[2] pep_res=list(range(197,208)) with open('res_enetgies.txt', 'w') as w: w.write('Cleaved\n') for i in cleaved: print(i) try: pose=apply_constraints(pose_from_pdb(i)) sf(pose) energies=str(pose.energies()).split('\n') if i == cleaved[0]: w.write(energies[0].lstrip() + '\n') w.write(basename(i) + '\nNeighbor residues\n') for j in des_res: w.write(energies[j].lstrip() + '\n') w.write('Peptide residues\n') for j in pep_res: w.write(energies[j].lstrip() + '\n') except: continue w.write('\nUncleaved\n') for i in uncleaved: print(i) pose=apply_constraints(pose_from_pdb(i)) sf(pose) energies=str(pose.energies()).split('\n') w.write(basename(i) + '\nNeighbor residues\n') for j in des_res: w.write(energies[j].lstrip() + '\n') w.write('Peptide residues\n') for j in pep_res: w.write(energies[j].lstrip() + '\n')
import sys import os import subprocess gpu = int(sys.argv[1]) mode = sys.argv[2] poisons_root_path = sys.argv[3] assert os.path.exists(poisons_root_path) assert mode in ['convex', 'mean'] assert gpu >= 0 and gpu <= 3 max_id = 50 i = gpu if i == 0: i = 4 while i <= max_id: if mode == 'convex': poisons_path = '{}/{}/poison_03999.pth'.format(poisons_root_path, i) elif mode == 'mean': poisons_path = '{}/{}/poison_01000.pth'.format(poisons_root_path, i) assert os.path.exists(poisons_path) cmd = 'bash launch/eval-attack-transfer-18-nodp.sh {} {} {} {}'.format(gpu, mode, i, poisons_path) print(cmd) subprocess.run(cmd.split()) i += 4
"""Import the named modules in this directory.""" from . import download from . import playback from . import playlist
from ..FeatureExtractor import InterExtractor import numpy from numpy import random from scipy import fftpack, stats, optimize try: from pylab import * except: pass from common_functions import * from dist_from_u_extractor import dist_from_u_extractor from wei_av_uncertainty_extractor import wei_av_uncertainty_extractor class stdvs_from_u_extractor(InterExtractor): active = True extname = 'stdvs_from_u' #extractor's name def extract(self): dist_from_u = self.fetch_extr('dist_from_u', returnall=True) dist = dist_from_u.result sd = self.fetch_extr('weighted_average', returnall=True).uncertainty # returns the uncertainty in the weighted average num = dist/sd # number of standard deviations from the weighted average uncer = dist_from_u.uncertainty/sd # scales the uncertainty (not sure this is correct) self.uncertainty = uncer return num
""" Commands for Dox. """ from ace.plugins.dox import utils from ace import config from os import walk, getcwd import os.path from ace.plugins.dox.uploader import upload_document, find_key, extract_keyfield from ace.plugins.dox.client import ping_library, get_content_keys, get_content_item def init(args): """ Initializes Dox. """ if not (args.content_type and args.body_field and args.key_field): raise ValueError('You must specify --content-type --body-field and --key-field') env = config.get_env() if not env.has_section('Dox'): env.add_section('Dox') env.set('Dox','content_type',args.content_type) env.set('Dox','body_field',args.body_field) env.set('Dox','key_field',args.key_field) config.write_env(env) print 'Dox environment initialized.' def upload(args): """ Upload command. """ utils.check_init() print 'Uploading documents...' keymap = utils.get_keymap() keyfields = utils.get_keyfields() for root, dirs, fields in walk(getcwd()): # TODO - .doxignore for name in files: if name.endswith('.md'): path = os.path.join(root,name) if utils.is_modified(path): key = find_key(path,keymap,keyfields) key, created = upload_document(path,args,key=key) utils.write_hash(path) if created: print 'Created new content item',key keymap[path] = key keyfield = extract_keyfield(path) print 'assigning key',key,'to keyfields',keyfields,'under keyfield',keyfield keyfields[keyfield] = key else: print name, 'not modified. Skipping.' utils.write_keyfields(keyfields) utils.write_keymap(keymap) def keyfields(args): """ Keyfields command. """ print 'Synchronizing keyfield cache.' keyfield_data = {} keys = get_content_keys() for key in keys: content_item = get_content_item(key) keyfield_data[content_item['data'][keyfield_name]] = key print 'Mapping',content_item['data'][keyfield_name],'to',key uitls.write_keyfields(keyfield_data) print 'Keyfield cache synchronized.' def clean(args): """ Clean command. """ print 'Cleaning out local file records. All local files eligible for upload.' utils.clean_hashes()
import numpy import statsmodels.sandbox.stats.multicomp import scipy.stats import sys file_name="TE_result_matrix.txt" ifile = open(file_name) line = ifile.readline() temp = line.split() gene_name=[] for i in range(len(temp)-1): gene_name.append(temp[i+1]) cutOff=0 sourceIndex=0 TEnetwork=[] source=[] TE=[] target=[] for line in ifile: temp = line.split() for targetIndex in range(len(temp)-1): if float(temp[targetIndex+1])>cutOff: source.append(gene_name[sourceIndex]) TE.append(float(temp[targetIndex+1])) target.append(gene_name[targetIndex]) sourceIndex=sourceIndex+1 ifile.close() TEzscore=(TE-numpy.mean(TE))/numpy.std(TE) TEpvalue=1-scipy.stats.norm.cdf(TEzscore) TEfdr=statsmodels.sandbox.stats.multicomp.multipletests(TEpvalue,alpha=0.05,method='fdr_bh') fdrCutoff=float(sys.argv[1]) ofile = open(file_name.replace(".txt",".fdr")+str(fdrCutoff)+".sif","w") for i in range(len(source)): if TEfdr[1][i]<fdrCutoff: ofile.write(source[i]+"\t"+str(TE[i])+"\t"+target[i]+"\n") ofile.close()
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Tue May 29 16:26:10 2018 @author: kieling """ import numpy as np from sklearn import datasets def sigmoid (soma): return 1 / (1 + np.exp(-soma)) def sigmoidDerivada(sig): return sig * (1 - sig) base = datasets.load_breast_cancer() entradas = base.data valoresSaidas = base.target saidas = np.empty([569,1],dtype=int) for i in range(569): saidas[i] = valoresSaidas[i] pesos0 = np.random.random((30,5)) pesos1 = np.random.random((5,1)) epocas = 100000 taxaAprendizagem = 20 momento = 1 for j in range(epocas): camadaEntrada = entradas #Ativaçao primeira camada somaSinapse0 = np.dot(camadaEntrada,pesos0) camadaOculta = sigmoid(somaSinapse0) #Ativaçao segunda camada somaSinapse1 = np.dot(camadaOculta,pesos1) camadaSaida = sigmoid(somaSinapse1) #Calculando o erro (cost function) erroCamadaSaida = saidas - camadaSaida ##encontra a media absoluta pois temos varias entradas tendo a media. ##diferente de quando e apenas um peso por entrada que apenas ## temos erro = esperado - saida_ativacao mediaAbsoluta = np.mean(np.abs(erroCamadaSaida)) print('Erro: '+str(mediaAbsoluta)) ##Derivada ##DeltaSaida ## ##Derivada para calcular o gradiente derivadaSaida = sigmoidDerivada(camadaSaida) ##DeltaSaida para calcular o gradiente deltaSaida = erroCamadaSaida * derivadaSaida #a transportadora colocar as colunas em uma linha transporta pesos1Transposta = pesos1.T deltaSaidaXPeso = deltaSaida.dot(pesos1Transposta) deltaCamadaOculta = deltaSaidaXPeso * sigmoidDerivada(camadaOculta) camadaOcultaTransposta = camadaOculta.T pesosNovo1 = camadaOcultaTransposta.dot(deltaSaida) pesos1 = (pesos1 * momento) + (pesosNovo1 * taxaAprendizagem) camadaEntradaTransposta = camadaEntrada.T pesosNovo0 = camadaEntradaTransposta.dot(deltaCamadaOculta) pesos0 = (pesos0 * momento) + (pesosNovo0 * taxaAprendizagem)
#from vizdoom import * import scipy import numpy as np from ple.games.catcher import Catcher from ple.games.raycastmaze import RaycastMaze from ple.games.pixelcopter_v2 import Pixelcopter_v2 from ple import PLE from PIL import Image import deepmind_lab import random import cv2 seed = 147 random.seed(seed) import os import sys unbuffered = os.fdopen(sys.stdout.fileno(), 'w', 0) sys.stdout = unbuffered class LabWrapper: def __init__(self, width): self.game = None self.actions = None self.width = width self.frames_played = 0 self.max_game_len = 3000 self.visitation_map = {} self.reward_positions = [] # Create game env self.game = self.set_lab_game_setup() # Reset game self.restart_game() self.top_down_view=None def construct_visitation_map(self): return None def _action(self,*entries): return np.array(entries, dtype=np.intc) def set_lab_game_setup(self): level = 'nav_maze_static_01' level = 'small_maze' #level = 'small_maze_multimap' env = deepmind_lab.Lab( level, ['RGB_INTERLACED', 'DEBUG.POS.TRANS', 'DEBUG.CAMERA.TOP_DOWN'], config={ 'fps': str(60), 'width': str(self.width), 'height': str(self.width) }) self.actions = [ self._action(-20, 0, 0, 0, 0, 0, 0), # look_left self._action(20, 0, 0, 0, 0, 0, 0), # look_right # _action( 0, 10, 0, 0, 0, 0, 0), # look_up # _action( 0, -10, 0, 0, 0, 0, 0), # look_down self._action(0, 0, -1, 0, 0, 0, 0), # strafe_left self._action(0, 0, 1, 0, 0, 0, 0), # strafe_right self._action(0, 0, 0, 1, 0, 0, 0), # forward self._action(0, 0, 0, -1, 0, 0, 0), # backward # _action( 0, 0, 0, 0, 1, 0, 0), # fire # _action( 0, 0, 0, 0, 0, 1, 0), # jump # _action( 0, 0, 0, 0, 0, 0, 1) # crouch ] return env def restart_game(self): self.game.reset() self.frames_played = 0 self.visitation_map[self.frames_played] = self.game.observations()[ 'DEBUG.POS.TRANS'] #the starting point is random through env def process_frame(self, image): image = image.astype(np.float32) #normalize image = np.reshape(image, [np.prod(image.shape), 1]) / 255.0 return image def get_frame(self): colour_frame = self.game.observations()['RGB_INTERLACED'] frame = cv2.cvtColor( colour_frame, cv2.COLOR_RGB2GRAY ) #cv2.imshow('Image',self.last_frame) #cv2.waitKey(0) #cv2.destroyAllWindows() return frame, colour_frame def game_finished(self): if self.frames_played >= self.max_game_len: print('LOG: max frames') return True if not self.game.is_running(): print('LOG: game ended from engine') return not self.game.is_running() def make_action(self, action_index): #print ('ACTION index %d action %s'%(action_index,str(self.actions[action_index]))) reward = self.game.step(self.actions[action_index], num_steps=4) self.frames_played += 1 if self.game_finished(): self.visitation_map[self.frames_played] = self.visitation_map[ self.frames_played - 1] + (1,0,0) else: self.visitation_map[self.frames_played] = self.game.observations()['DEBUG.POS.TRANS'] if reward > 0 and not self.game_finished(): self.reward_positions.append(self.game.observations()[ 'DEBUG.POS.TRANS']) return reward def construct_visitation_map(self): # mark rewards on map with red max_pos_x = 0 max_pos_y = 0 min_pos_x = 9999 min_pos_y = 9999 for position in self.visitation_map: (x,y,rot) = self.visitation_map[position] if x > max_pos_x: max_pos_x = x if x < min_pos_x: min_pos_x = x if y > max_pos_y: max_pos_y = y if y < min_pos_y: min_pos_y = y print('image of shape (%d,%d,%d)'%(483,483,4)) print('bias x %d bias y %d'%(min_pos_x,min_pos_y)) print('max pos x %d max pos y %d'%(max_pos_x, max_pos_y)) image = np.ones((int(483+116), int(483+116),4)) image.fill(255) image[:,:,3].fill(0) transparency = np.linspace(20,255,len(self.visitation_map)) print('total steps %d '%len(self.visitation_map)) step_color = (128,0,255) for timestep in range(len(self.visitation_map)): (x,y,z) = self.visitation_map[timestep] x = int(x) y = int(y) if timestep == 0 : #red is start point cv2.circle(image, (x,y), 7, (0,0,255,255), thickness=-1, lineType=8, shift=0) elif timestep == len(self.visitation_map) - 1: # blue is end point cv2.circle(image, (x, y), 7, (255, 0 , 0,255), thickness=-1, lineType=8, shift=0) else: cv2.circle(image, (x, y), 4, (128,0,128,int(transparency[ timestep])), thickness=-1, lineType=8, shift=0) return image ''' Doom game class''' class DoomWrapper: def __init__(self, width): ''' @width : width of game window ''' self.game = None self.max_game_len = 300 self.actions = [[True, False, False], [False, True, False], [False, False, True]] self.width = width #Create game env self.game = self.set_doom_game_setup(self.max_game_len) def set_doom_game_setup(self, max_game_len): ''' @max_game_len : maximum time steps allowed before terminating episode ''' game = DoomGame() game.set_doom_scenario_path("basic.wad") # This corresponds to the simple task we will pose our agent game.set_doom_map("map01") game.set_screen_resolution(ScreenResolution.RES_160X120) game.set_screen_format(ScreenFormat.GRAY8) game.set_render_hud(False) game.set_render_crosshair(False) game.set_render_weapon(True) game.set_render_decals(False) game.set_render_particles(False) game.add_available_button(Button.MOVE_LEFT) game.add_available_button(Button.MOVE_RIGHT) game.add_available_button(Button.ATTACK) game.add_available_game_variable(GameVariable.AMMO2) game.add_available_game_variable(GameVariable.POSITION_X) game.add_available_game_variable(GameVariable.POSITION_Y) game.set_episode_timeout(max_game_len) game.set_episode_start_time(10) game.set_window_visible(False) game.set_sound_enabled(False) game.set_living_reward(-1) game.set_mode(Mode.PLAYER) game.init() return game def restart_game(self): self.game.new_episode() def get_frame(self): frame = self.game.get_state().screen_buffer return frame ''' Processes Doom screen image to produce cropped and resized image.''' def process_frame(self, frame): processed = frame[10:-10, 30:-30] processed = scipy.misc.imresize(processed, [self.width, self.width]) # also normalize processed = np.reshape(processed, [np.prod(processed.shape)]) / 255.0 return processed def game_finished(self): return self.game.is_episode_finished() def make_action(self, action_index): reward = self.env.make_action(self.actions[action_index]) / 100.0 return reward ''' PLE Catcher game ''' class CatcherWrapper: def __init__(self, width, lives = 1): ''' @width : width of game window @lives : number of deaths before the episode terminates (death = pallet does not catch ball) ''' self.width = width self.game = None self.actions = None self.max_game_len = 150 self.visitation_map = {} self.timer = 0 self.coordinates = (0, 0) # Create game env catcher = Catcher(width=width, height=width,init_lives=lives) self.game = self.set_catcher_game_setup(catcher) def set_catcher_game_setup(self, game): p = PLE(game, display_screen=False) self.actions = p.getActionSet() p.init() return p def restart_game(self): self.visitation_map = {} self.timer = 0 self.coordinates = (0, 0) self.game.reset_game() frame_skip = random.randint(0,30) #Randomize start for i in range(frame_skip): reward = self.make_action(random.choice(range(len(self.actions)))) self.coordinates = (self.game.game.getGameState()['player_x'], 10) def get_frame(self): frame = self.game.getScreenGrayscale() color_frame = self.game.getScreenRGB() return frame, color_frame def process_frame(self, frame): #normalize processed = np.reshape(frame, [np.prod(frame.shape), 1]) / 255.0 return processed def game_finished(self): return self.game.game_over() def make_action(self, action_index): reward = self.game.act(self.actions[action_index]) #update visitation map self.coordinates = (self.game.game.getGameState()['player_x'], 10) self.visitation_map[self.timer] = self.coordinates self.timer += 1 return reward def construct_visitation_map(self): image = np.uint8(np.zeros((11,self.width, 4))) image = Image.fromarray(image) image = image.convert("RGBA") pixels = image.load() opacity = 100 increase = 20 for timestep in self.visitation_map: coordinate = self.visitation_map[timestep] if pixels[coordinate[0], coordinate[1]] == (0,0,0,0): pixels[coordinate[0],coordinate[1]] = (255,0,0, int(opacity)) else: value = tuple(sum(x) for x in zip(pixels[coordinate[0], coordinate[1]], (0, 0, 0, int(increase)))) pixels[coordinate[0], coordinate[1]] = value #mark start and end positions coordinate = self.visitation_map[0] pixels[coordinate[0], coordinate[1]/2] = (0,255,0,255) pixels[coordinate[0], coordinate[1]/2 - 1] = (0, 255, 0, 255) coordinate = self.visitation_map[len(self.visitation_map) - 1] # rewrite coordinate pixels[coordinate[0], coordinate[1]/2] = (0, 0, 255, 255) return image #TODO GENERIC PLE WRAPPER class RaycastMazeWrapper: def __init__(self, width): ''' @width : width of game window ''' self.game = None self.actions = None # Maximum 1000 steps in maze self.max_game_len = 500 self.frames_no = 0 # Create game env raycast = RaycastMaze(width=width, height=width, map_size=6) self.game = self.set_maze_game_setup(raycast) def set_maze_game_setup(self, game): ''' @game : game instance ''' p = PLE(game, display_screen=False) #In some games, doing nothing is a valid action #in a maze, it is not self.actions = p.getActionSet()[:-1] p.init() return p def restart_game(self): self.game.reset_game() frame_skip = random.randint(0, 30) # Randomize start for i in range(frame_skip): reward = self.make_action(random.choice(range(len(self.actions)))) def get_frame(self): frame = self.game.getScreenGrayscale() color_frame = self.game.getScreenRGB() return frame, color_frame def process_frame(self, frame): ''' @frame : frame to be processed ''' # normalize processed = np.reshape(frame, [np.prod(frame.shape), 1]) / 255.0 return processed def game_finished(self): return self.game.game_over() def make_action(self, action_index): ''' @action_index : index of action ''' reward = self.game.act(self.actions[action_index]) return reward class PixelcopterWrapper: def __init__(self, width): ''' @width : width of game window ''' self.game = None self.actions = None # Maximum 1000 steps in maze self.max_game_len = 300 self.frames_no = 0 # Create game env raycast = Pixelcopter_v2(width=width, height=width) self.game = self.set_maze_game_setup(raycast) def construct_visitation_map(self): return None def set_maze_game_setup(self, game): ''' @game : game instance ''' p = PLE(game, display_screen=True) self.actions = p.getActionSet() p.init() return p def restart_game(self): self.game.reset_game() #don't randomize start since it will most likely end the game #frame_skip = random.randint(0, 30) # Randomize start #for i in range(frame_skip): # reward = self.make_action(random.choice(range(len(self.actions)))) def get_frame(self): frame = self.game.getScreenGrayscale() color_frame = self.game.getScreenRGB() return frame, color_frame def process_frame(self, frame): ''' @frame : frame to be processed ''' # normalize processed = np.reshape(frame, [np.prod(frame.shape), 1]) / 255.0 return processed def game_finished(self): return self.game.game_over() def make_action(self, action_index): ''' @action_index : index of action ''' reward = self.game.act(self.actions[action_index]) return reward class GameWrapper: def __init__(self, game_name, window_width): ''' @game_name : name of required game @window_width : width of window/image to be used ''' self.game = None if game_name == 'Doom': self.game = DoomWrapper(window_width) self.game.name = 'Doom' if game_name == 'Catcher': self.game = CatcherWrapper(window_width) self.game.name = 'Catcher' if game_name == 'Maze': self.game = RaycastMazeWrapper(window_width) self.game.name = 'Maze' if game_name == 'Copter': self.game = PixelcopterWrapper(window_width) self.game.name = 'Copter' if game_name == 'LabMaze': self.game = LabWrapper(window_width) self.game.name = 'LabMaze' def get_game(self): return self.game
# Clustering of timeseries data import pandas as pd import numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt from sklearn.manifold import TSNE from sklearn.cluster import AgglomerativeClustering from sklearn.preprocessing import MinMaxScaler from VisualizationFunctions import plot_by_factor, AnalyzeClusters print('Reading in the data...') agg = pd.read_csv('sku_labels.csv') df = pd.read_csv('extracted_features.csv') df.dropna(axis=1, inplace=True) scale = MinMaxScaler() skus = df['id'] df.set_index('id', inplace=True) X = scale.fit_transform(df) names = df.columns print('Running Dimentionality Reduction...') dimred = TSNE(2) tsnes = dimred.fit_transform(X) #Merge tsne coordinates onto original df with sku_keys plot_df = pd.DataFrame(tsnes).join(df.reset_index()) #Merge above tsne and features table to sku_key and categories plot_df['sku_key'] = plot_df['id'].astype(int) agg['sku_key'] = agg['sku_key'].astype(int) plot_df = plot_df.merge(agg, how='left', on='sku_key') colors=['b', 'r', 'g', 'y', 'm', 'orange', 'gold', 'skyblue'] plot_by_factor(plot_df, 'sku_department', colors) #This is where the clusters come into play. here you have to read in #the files which contain clusters from the different methods and #assess them. colors = ['darkblue', 'tomato', 'orchid', 'darkorange', 'lime', 'gold', 'dodgerblue', 'pink', 'grey', 'darkgreen', 'y', 'slateblue', 'r', 'brown'] run_cont = ['sales', 'selling_price', 'avg_discount'] run_cats = ['sku_department', 'sku_subdepartment', 'sku_category', 'sku_subcategory'] product_sales = pd.read_csv('aggregate_products.csv') clusters = pd.read_csv('som_clusters.csv') clusters.columns = ['sku_key', 'cluster'] analyze = AnalyzeClusters() df_dict = analyze.make_dataset(product_sales, clusters) analyze.plot_cluster_continuous(df_dict, run_cont) analyze.plot_cluster_continuous_box(df_dict, run_cont) analyze.plot_cluster_categorical(df_dict, run_cats)
from shapely.geometry import Polygon class Hotpoint: def __init__(self, contour): self.polygon = Polygon([(p[0][0], p[0][1]) for p in contour]) self.area = self.polygon.area x, y = self.polygon.centroid.coords.xy self.x = round(x[0]) self.y = round(y[0]) def __str__(self): return "c: ({}, {}) - a: {}".format(self.x, self.y, self.area) def is_good(self): # optimize: add more clausule to define if hotpoints is good or not return True def __create_vertices(self, center_x, center_y, width, height): p1 = [center_x - width / 2, center_y - height / 2] p2 = [center_x + width / 2, center_y - height / 2] p3 = [center_x + width / 2, center_y + height / 2] p4 = [center_x - width / 2, center_y + height / 2] return p1, p2, p3, p4 def generate_window(self, scale=3) -> Polygon: minx, miny, maxx, maxy = self.polygon.bounds dx = (maxx - minx) dy = (maxy - miny) height = dy * scale width = dx * scale p1 = [self.x - width / 2, self.y - height / 2] p2 = [self.x + width / 2, self.y - height / 2] p3 = [self.x + width / 2, self.y + height / 2] p4 = [self.x - width / 2, self.y + height / 2] window = Polygon([p1, p2, p3, p4]) return window def generate_window_with_quadrant(self, width, height, quarter, scale=2): """ :param width: first_obj_width of the rectangular containing the first homography found :param height: first_obj_height of the rectangular containing the first homography found :param quarter: represent the position of the with w.r.t. the object :param scale: scale factor for adaptive window :return: window """ width = scale * width height = scale * height minx = self.x - width / 2 maxx = self.x + width / 2 miny = self.y - height / 2 maxy = self.y + height / 2 if quarter == 0: # 1-----------2 # | MASK | # | |----| | # | |obj_| | # | | # 4-----------3 center_x = self.x center_y = self.y p1, p2, p3, p4 = self.__create_vertices(center_x, center_y, width, height) elif quarter == 1: # 1-----------2 # | | obj| # | |----| # | | # | MASK | # 4-----------3 center_x = minx center_y = maxy p1, p2, p3, p4 = self.__create_vertices(center_x, center_y, width, height) elif quarter == 2: # 1----------2 # |obj | | # |----| | # | | # | MASK | # 4----------3- center_x = maxx center_y = maxy p1, p2, p3, p4 = self.__create_vertices(center_x, center_y, width, height) elif quarter == 3: # 1-----------2 # | MASK | # | | # |----| | # |obj | | # 4-----------3 center_x = maxx center_y = miny p1, p2, p3, p4 = self.__create_vertices(center_x, center_y, width, height) elif quarter == 4: # 1-----------2 # | MASK | # | | # | |----| # | |obj | # 4-----------3 center_x = minx center_y = miny p1, p2, p3, p4 = self.__create_vertices(center_x, center_y, width, height) else: raise ValueError('Quadrante must be between 0 and 4') window = Polygon([p1, p2, p3, p4]) return window def generate_window_with_chessboard(self, width, height, position, scale=2): # ------------------------- # | | | | # | 1 | 2 | 3 | # | | | | # ------------------------- # | | | | # | 8 | 0 | 4 | # | | | | # ------------------------- # | | | | # | 7 | 6 | 5 | # | | | | # ------------------------- width = scale * width height = scale * height p1, p2, p3, p4 = self.__create_vertices(self.x, self.y, width, height) # 1-----------2 # | | # | 0 | # | | # 4-----------3 if position == 0: pass elif position == 1: x, y = p1 p1 = [x - width, y - height] p2 = [x, y - height] p3 = [x, y] p4 = [x - width, y] elif position == 2: x, y = p1 p1 = [x, y - height] p2 = [x + width, y - height] p3 = [x + width, y] p4 = [x, y] elif position == 3: x, y = p2 p1 = [x, y - height] p2 = [x + width, y - height] p3 = [x + width, y] p4 = [x, y] elif position == 4: x, y = p2 p1 = [x, y] p2 = [x + width, y] p3 = [x + width, y + height] p4 = [x, y + height] elif position == 5: x, y = p3 p1 = [x, y] p2 = [x + width, y] p3 = [x + width, y + height] p4 = [x, y + height] elif position == 6: x, y = p3 p1 = [x - width, y] p2 = [x, y] p3 = [x, y + height] p4 = [x - width, y + height] elif position == 7: x, y = p4 p1 = [x - width, y] p2 = [x, y] p3 = [x, y + height] p4 = [x - width, y + height] elif position == 8: x, y = p4 p1 = [x - width, y - height] p2 = [x, y - height] p3 = [x, y] p4 = [x - width, y] else: raise ValueError('Position must be between 0 and 8') window = Polygon([p1, p2, p3, p4]) return window
import requests url= 'https://th.wikipedia.org/wiki/รายชื่อเทศบาลตำบลในประเทศไทย' resp = requests.get(url) print(resp) #print(resp.text[:5_000])
#v4 of the python matching algorithm #we process fsc first in this version #packages used import csv from Levenshtein import * import time #this function checks whether NAV values are similar #we use a band variable to determine the range we are ok with #THIS FUNCTION HAS BEEN DEBUGGED def NAVCheck(N1, N2): if N1 != "" and N2 != "": N1 = float(N1) N2 = float(N2) else: return -1 band = N1*0.02 if N2 < N1 + band and N2 > N1 - band: return abs(N1-N2) else: return -1 #nMatch looks for a match for list a in the dataset dset (the entirety of dataset 2) #it returns the best match #note that it matches off string similarities #THIS FUNCTION HAS BEEN DEBUGGED def nMatch(a,dset): levthresh = 0.6 levscore = 1 navscore = 1 temp = [] count = 0 decide = 0 for b in dset: cur_nav = NAVCheck(a[8],b[11]) if a[7] == b[10] and cur_nav>=0 and marked[count] < 0: lratio = ratio(a[1],b[9]) if len(temp) == 0: if lratio > levthresh: temp = b levscore = 1-lratio navscore = cur_nav decide = count else: if lratio > ratio(a[1],temp[9]): temp = b levscore = 1-lratio navscore = cur_nav decide = count count = count + 1 if temp != []: nresult = a + temp marked[decide] = 1 #we include their NAV score and lev score #the closer to 0, the more ideal the match nresult.append(navscore) nresult.append(levscore) return nresult else: return [] #this function just finds the fsc code/date match, and returns it if NAV is similar #THIS FUNCTION IS DEBUGGED def fscMatch(a,dset): count = 0 decide = 0 for b in dset: navscore = NAVCheck(a[8],b[11]) if marked[count] < 0 and a[2] == b[6] and a[7] == b[10] and (a[11] == 1 or navscore >=0): #we add a NAV score, and the lev score is automatically 0 nresult = a + b nresult.append(navscore) nresult.append(0) marked[count] = 1 return nresult count = count + 1 return [] #mark beginning of runtime start_time = time.time() #this step imports the datasets into lists comp1 (fundata) and comp2 (univeris) fundataCSV = csv.reader(open('fundata.csv','rb'),delimiter=',',quotechar='"') univerisCSV = csv.reader(open('ivd_univeris.csv','rb'),delimiter=',',quotechar='"') ''' #test data ver fundataCSV = csv.reader(open('test1.csv','rb'),delimiter=',',quotechar='"') univerisCSV = csv.reader(open('test2.csv','rb'),delimiter=',',quotechar='"') ''' univerisCSV.next() fundataCSV.next() #comp1 is fundata #comp2 is univeris #fscs is fundservcodes of univeris data comp1 = [] comp2 = [] fscs = [] for row in fundataCSV: comp1.append(row) for row in univerisCSV: comp2.append(row) fscs.append(row[6]) #then we clean up structural problems in the strings #we have a key that corresponds to the following problems #problem 1: there are random stars in the strings of ivd_univeris #problem 2: class/series must be exact match (end of string) #problem 3: random abbreviations need exact match #THIS CHUNK OF CODE IS DEBUGGED swapoutCSV = csv.reader(open('swapout.csv','rb'),delimiter=',',quotechar='"') swapout = [] for row in swapoutCSV: swapout.append(row) for row in comp1: for swap in swapout: row[1] = row[1].replace(swap[0],swap[1]) for row in comp2: for swap in swapout: row[9] = row[9].replace(swap[0],swap[1]) #now we need to make sure we get exact NAV matches for funds without a lot of entries #for all the entries in column 11 (comp1) column 12 (comp2) #1 means we have 10+ samples #0 means we don't cnt = 0 len1 = len(comp1) while cnt < len1: if cnt > 10 and cnt < len1-11: start = cnt - 10 end = cnt + 10 holder = [] while start < end: holder.append(comp1[start][1]) start = start + 1 num = holder.count(comp1[cnt][1]) if num >= 10: comp1[cnt].append(1) else: comp1[cnt].append(0) else: comp1[cnt].append(1) cnt = cnt + 1 #we also want to mark which slots are already taken in comp2 #-1 means not matched 1 means matched global marked marked = [-1]*len(comp2) global c1taken c1taken = [-1]*len(comp1) #now we insert our results into our array called "final" final = [] count1 = 0 for s in comp1: print count1 #check if fundservcode exists in comp1 and comp2 #if so, send it into fscMatch if s[2] != "" and s[2] in fscs: fMed = fscMatch(s, comp2) if fMed != []: final.append(fMed) c1taken[count1] = 1 count1 = count1 + 1 count2 = 0 for s in comp1: print count2 if c1taken[count2] < 0: nMed = nMatch(s,comp2) if nMed != []: final.append(nMed) count2 = count2 + 1 with open('mergeddata.csv','wb') as csvfile: merge = csv.writer(csvfile) for x in final: merge.writerow(x) #mark endtime end_time = time.time() print("Elapsed time was %g seconds" % (end_time - start_time))
from django.urls import path from . import views urlpatterns = [ path('', views.view_member, name='view_member') ]
# -*- coding: utf-8 -*- # Generated by Django 1.10 on 2017-11-01 14:09 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('mpm', '0005_musica_tem_imagem'), ] operations = [ migrations.CreateModel( name='Banner', fields=[ ('id', models.PositiveIntegerField(primary_key=True, serialize=False)), ('url', models.SlugField(max_length=255)), ('titulo', models.CharField(max_length=255)), ('img', models.CharField(max_length=255)), ], ), ]
##coding=utf8 """ EagleForce EmailBot Import Command -------------- from archives.emailbot import efa_client """ import smtplib from email.mime.text import MIMEText class Config(): """Email Account Configuration Class """ def __init__(self, smtpServer, port, acc, pwd): self.smtpServer = smtpServer self.port = port self.acc = acc self.pwd = pwd class EmailClient(): """Email Client Class """ def __init__(self, config): self.config = config self.server = smtplib.SMTP() def login(self): self.server.connect(host=self.config.smtpServer, port=self.config.port) self.server.login(self.config.acc, self.config.pwd) self.server.ehlo() # say hi to server def send_text(self, toAddr, subject, content): """Send simple pure text email to multiple recipients. No attachments, text only. Arguments --------- toAddr: list of recipient's email address subject: text content: text """ msg = MIMEText(content) msg["Subject"] = subject msg["From"] = self.config.acc msg["To"] = ", ".join(toAddr) self.server.sendmail(self.config.acc, toAddr, msg.as_string()) def quit(self): self.server.quit() config = Config( smtpServer="smtpout.secureserver.net", port=3535, acc="sanhe.hu@theeagleforce.net", pwd="EagleForce2014", ) efa_client = EmailClient(config=config) if __name__ == "__main__": import time print("send first") efa_client.login() efa_client.send_text(["sanhe.hu@theeagleforce.net"], "Greeting", "This is a test email. For testing my email bot.") efa_client.quit() time.sleep(60) print("send second") efa_client.login() efa_client.send_text(["sanhe.hu@theeagleforce.net"], "Greeting", "This is a test email. For testing my email bot.") efa_client.quit()
import sys, os from socket import * if(len(sys.argv)>1): port=int(sys.argv[1]) else: print("Unable to continue, port required as parameter") sys.exit(1) listening_socket=socket(AF_INET,SOCK_STREAM) listening_socket.bind(("",port)) listening_socket.listen(1) accepted_socket, address=listening_socket.accept() pid=os.fork() if pid!=0: listening_socket.close() incoming_stream=accepted_socket.makefile("r") print("Server - Server is accepting client messages") while True: msg=incoming_stream.readline() print(msg) if msg=="salir\n": break incoming_stream.close() accepted_socket.close() print("Client disconnected: If the client is not already disconected, type 'Salir'") os.waitpid(pid,0) else: listening_socket.close() outgoing_stream=accepted_socket.makefile("w") print("Server - Server alowed to send messagges to client") while True: msg=input() outgoing_stream.write(msg+"\n") outgoing_stream.flush() if msg=="salir\n": break outgoing_stream.close() accepted_socket.close() sys.exit(0)
# Copyright (c) 2014 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'targets': [ { 'target_name': 'actions-test', 'type': 'none', 'actions': [ { 'action_name': 'first action (fails)', 'inputs': [ 'action_fail.py', ], 'outputs': [ 'ALWAYS_OUT_OF_DATE', ], 'action': [ 'python', '<@(_inputs)' ], 'msvs_cygwin_shell': 0, }, { 'action_name': 'second action (succeeds)', 'inputs': [ 'action_succeed.py', ], 'outputs': [ 'ALWAYS_OUT_OF_DATE', ], 'action': [ 'python', '<@(_inputs)' ], 'msvs_cygwin_shell': 0, }, ], }, ], }
from django.contrib import admin from .models import * # Register your models here. admin.register(intents) admin.register(responses) admin.register(conversations) admin.register(intentSamples) admin.register(responseSamples) admin.register(conversationSamples)
import os, subprocess, time, csv, sys def read_table(): data = [] if not os.path.isfile("Times.csv"): sys.exit("The File 'Times.csv' does not exist!") with open('Times.csv', newline='') as csvfile: reader = csv.DictReader(csvfile) for row in reader: url = row['URL'] hour_from, min_from = row['From'].split(":")[:2] hour_to, min_to = row['To'].split(":")[:2] dic = {'From': {'hour': int(hour_from), 'min': int(min_from)}, 'To': {'hour': int(hour_to), 'min': int(min_to)}, 'URL': url} print(dic) data.append(dic) return data def check_time(times, reference): loctime = time.localtime() now_min = loctime.tm_min now_hour = loctime.tm_hour print("Time now: " + str(now_hour) + ":" + str(now_min)) if now_hour > times[reference]['hour']: return True elif now_hour == times[reference]['hour'] and now_min >= times[reference]['min']: return True else: return False def open_meeting(zoom_url): print("Start obs ...") os.chdir("C:/Program Files/obs-studio/bin/64bit") obs = subprocess.Popen("obs64.exe --startrecording --minimize-to-tray") print("Start the meeting ...") try: # try to open chrome as a 64 bit program chrome = subprocess.Popen("C:/Program Files/Google/Chrome/Application/chrome.exe --new-window " + zoom_url) except FileNotFoundError: # open chrome as a 32 bit program chrome = subprocess.Popen("C:/Program Files (x86)/Google/Chrome/Application/chrome.exe --new-window " + zoom_url) return obs if __name__ == "__main__": print("Reading file") data = read_table() num_meetings = len(data) next_meeting = 0 while(next_meeting < num_meetings): print("Proof the start of the meeting " + str(next_meeting)) start = check_time(data[next_meeting], 'From') if start: print("starting meeting " + str(next_meeting)) obs = open_meeting(data[next_meeting]['URL']) print("Proof the end of the meeting " + str(next_meeting)) while(not check_time(data[next_meeting], 'To')): time.sleep(60) subprocess.Popen.terminate(obs) time.sleep(5) os.system("TASKKILL /F /IM Zoom.exe") start = False next_meeting += 1 time.sleep(60)
def check_email(e_string): if (" " not in e_string) and ("." in e_string) and ("@" in e_string) and ("@." not in e_string): place_at = e_string.find("@") place_dot = e_string.rfind(".") if e_string.endswith('.') is False and place_dot > place_at + 1: return True return False
from bs4 import BeautifulSoup import urllib2 import re from datetime import date, timedelta import warnings import collections warnings.filterwarnings("ignore", category=UserWarning, module='bs4') def TH_Article_Content_Extractor(ArtURL): """ This function takes in a hindu article url and returns a dictionary with following details: Title Caption Body Image URL Author Date time. returns a dictionary """ try: title =BeautifulSoup(urllib2.urlopen(ArtURL)).find_all("h1",{"class":re.compile("title")})[0].text.strip().encode('utf-8') except IndexError: title = '' try: caption =BeautifulSoup(urllib2.urlopen(ArtURL)).find_all("h2",{"class":re.compile("intro")})[0].text.strip().encode('utf-8') except IndexError: caption = '' try: body =BeautifulSoup(urllib2.urlopen(ArtURL)).find_all("div",{"id":re.compile("content-body-*")})[0].text.strip().encode('utf-8') except IndexError: body = '' try: imgUrl =re.sub('FREE_215','FREE_660',BeautifulSoup(urllib2.urlopen(ArtURL)).find_all("img",{"class":re.compile("media-object adaptive placeholder lead-img")})[0]['data-proxy-image']) except IndexError: imgUrl = '' try: author = BeautifulSoup(urllib2.urlopen(ArtURL)).find_all("a",{"class":re.compile("auth-nm lnk|auth-nm no-lnk")})[0].text.strip().encode('utf-8') except IndexError: author = '' try: tmstmp = BeautifulSoup(urllib2.urlopen(ArtURL)).find_all("div",{"class":re.compile("ut-container")})[0].text.strip().encode('utf-8') except IndexError: tmstmp = '' row = {} row["Title"] = title row["Caption"] = caption row["Body"] = body row["ImgUrl"] = imgUrl row["Author"] = author row["DateTime"] = tmstmp return row def TH_DayUrl_Generator(): """ This function takes in a date range and give a list of hindu daily urls from the hindu archives page ( print edition). returns a list """ print "Enter Start and End Date details" start_year = int(raw_input("Enter the start date year: ")) start_month = int(raw_input("Enter the start date month: ")) start_day = int(raw_input("Enter the start date day: ")) end_year = int(raw_input("Enter the end date year: ")) end_month = int(raw_input("Enter the end date month: ")) end_day = int(raw_input("Enter the end date day: ")) DayUrls =[] Start_Date = date(start_year, start_month, start_day) # start date End_Date = date(end_year, end_month, end_day) # end date delta = (End_Date-Start_Date) for i in range(delta.days + 1): DayUrls.append('http://www.thehindu.com/archive/print/'+str((Start_Date + timedelta(days=i)).year)+'/'+str((Start_Date + timedelta(days=i)).strftime('%m'))+'/'+str((Start_Date + timedelta(days=i)).strftime('%d'))+'/') return DayUrls def TH_Article_URL__Extractor(DayUrls_list): """ This function takes in a daily hindu url list and gives all artilce links from that day along with the date. returns a list """ ArtUrls = {} for i in range(len(DayUrls_list)): soup = BeautifulSoup(urllib2.urlopen(DayUrls_list[i])) soup.prettify() AllUrls = soup.findAll('a', href=True) Articles = [AllUrls[url]['href'] for url in range(len(AllUrls)) if '.ece' in AllUrls[url]['href'] and '/todays-paper/' in AllUrls[url]['href'] ] for j in range(len(Articles)): print str(re.sub('[^0-9]','',DayUrls_list[i])) +' '+Articles[j] ArtUrls[Articles[j].encode('utf-8')] = str(re.sub('[^0-9]','',DayUrls_list[i])) return ArtUrls ####Comments #Used below commands on console to fix python encoding issue #set PYTHONIOENCODING=UTF-8 #pip --version
from .people import PeopleViews def create_people_views(service, router): return PeopleViews(service, router)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('base', '0001_initial'), ('basketball', '0055_game_published'), ] operations = [ migrations.AddField( model_name='game', name='group', field=models.ForeignKey(on_delete=models.CASCADE, to='base.Group', blank=True, null=True), ), migrations.AddField( model_name='player', name='group', field=models.ForeignKey(on_delete=models.CASCADE, to='base.Group', blank=True, null=True), ), ]
#run.py import todofile f = todofile.TodoFile() mode = input("Enter Mode: (\"run\",\n OR\n\"close item N\"): ") if "close item" in mode: record_id = mode.split(' ')[1] print(record_id) f.close_item(record_id=record_id) elif mode == "run": f.new_entry()
# # Copyright (C) 2018 University of Southern California. # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License, # version 2, as published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # import MySQLdb import sys password=sys.argv[1] interface=sys.argv[2] type=sys.argv[3] db=MySQLdb.connect(host="localhost",port=3306,user="root",passwd=password) cur=db.cursor() if type=="server": try: cur.execute("CREATE DATABASE SENSS") print "Database SENSS created" except: print "Database SENSS already exists" cur.execute("USE SENSS") try: cur.execute("CREATE TABLE `CLIENT_LOGS` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `log_type` varchar(45) NOT NULL, `match_field` text, `packet_count` bigint(20) DEFAULT NULL, `byte_count` bigint(20) DEFAULT NULL, `speed` varchar(45) DEFAULT NULL, `flag` int(1) DEFAULT 0, `active` int(1) DEFAULT NULL, `frequency` int(11) DEFAULT 0, `end_time` int(15) DEFAULT 0,`threshold` int(15) DEFAULT 10, PRIMARY KEY (`id`))") print "Table CLIENT_LOGS created" except Exception as e: print e print "Table CLIENT_LOGS already exists" cur.execute("DROP TABLE CLIENT_LOGS") cur.execute("CREATE TABLE `CLIENT_LOGS` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `log_type` varchar(45) NOT NULL, `match_field` text, `packet_count` bigint(20) DEFAULT NULL, `byte_count` bigint(20) DEFAULT NULL, `speed` varchar(45) DEFAULT NULL, `flag` int(1) DEFAULT 0, `active` int(1) DEFAULT NULL, `frequency` int(11) DEFAULT 0, `end_time` int(15) DEFAULT 0,`threshold` int(15) DEFAULT 10, PRIMARY KEY (`id`))") print "Table CLIENT_LOGS created" try: cur.execute("CREATE TABLE `SERVER_LOGS` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `request_type` varchar(45) NOT NULL,`as_name` varchar(45) NOT NULL, `match_field` text, `packet_count` bigint(20) DEFAULT NULL, `end_time` int(15) DEFAULT 0,`byte_count` bigint(20) DEFAULT NULL,valid_request INT DEFAULT NULL,prefix_allowed varchar(45) DEFAULT NULL, speed varchar(25) DEFAULT NULL,PRIMARY KEY (`id`))") print "Table SERVER_LOGS created" except Exception as e: print e print "Table SERVER_LOGS already exists" cur.execute("DROP TABLE SERVER_LOGS") cur.execute("CREATE TABLE `SERVER_LOGS` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `request_type` varchar(45) NOT NULL,`as_name` varchar(45) NOT NULL, `match_field` text, `packet_count` bigint(20) DEFAULT NULL, `end_time` int(15) DEFAULT 0,`byte_count` bigint(20) DEFAULT NULL,valid_request INT DEFAULT NULL,prefix_allowed varchar(45) DEFAULT NULL, speed varchar(25) DEFAULT NULL,PRIMARY KEY (`id`))") print "Table SERVER_LOGS created" try: cur.execute("CREATE TABLE `CONSTANTS` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `controller_url` VARCHAR(250) NOT NULL, `rule_capacity` INT NOT NULL,`fair_sharing` INT NOT NULL, `revoke_all` INT NOT NULL, `auth_type` VARCHAR(250) NOT NULL, PRIMARY KEY (`id`))") print "Table CONSTANTS created" except Exception as e: print e print "Table CONSTANTS already exists" cur.execute("DROP TABLE CONSTANTS") cur.execute("CREATE TABLE `CONSTANTS` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `controller_url` VARCHAR(250) NOT NULL, `rule_capacity` INT NOT NULL,`fair_sharing` INT NOT NULL, `revoke_all` INT NOT NULL, `auth_type` VARCHAR(250) NOT NULL, PRIMARY KEY (`id`))") print "Table CONSTANTS created" try: cur.execute("CREATE TABLE `THRESHOLDS` (`as_name` varchar(45) NOT NULL, `used_filter_requests` INT NOT NULL, `max_filter_requests` INT NOT NULL, `used_monitoring_requests` INT NOT NULL, `max_monitoring_requests` INT NOT NULL,`block_monitoring` INT NOT NULL, `block_filtering` INT NOT NULL, `fair_sharing` INT NOT NULL, PRIMARY KEY (`as_name`))") print "Table THRESHOLDS created" except: print "Table THRESHOLDS already exists" cur.execute("DROP TABLE THRESHOLDS") cur.execute("CREATE TABLE `THRESHOLDS` (`as_name` varchar(45) NOT NULL, `used_filter_requests` INT NOT NULL, `max_filter_requests` INT NOT NULL, `used_monitoring_requests` INT NOT NULL, `max_monitoring_requests` INT NOT NULL,`block_monitoring` INT NOT NULL, `block_filtering` INT NOT NULL, `fair_sharing` INT NOT NULL, PRIMARY KEY (`as_name`))") print "Table THRESHOLDS created" try: cur.execute("CREATE TABLE `BLACKLIST` (`id` int(11) NOT NULL AUTO_INCREMENT, `signature` varchar(2500) NOT NULL, PRIMARY KEY(`id`))") print "Table BLACKLIST created" except: print "Table BLACKLIST already exists" cur.execute("DROP TABLE BLACKLIST") cur.execute("CREATE TABLE `BLACKLIST` (`id` int(11) NOT NULL AUTO_INCREMENT, `signature` varchar(2500) NOT NULL, PRIMARY KEY(`id`))") print "Table BLACKLIST created" cur.close() cur=db.cursor() if type=="client": try: cur.execute("CREATE DATABASE SENSS_CLIENT") print "Database SENSS_CLIENT created" except: print "Database SENSS_CLIENT already exists" cur.execute("USE SENSS_CLIENT") try: cur.execute("CREATE TABLE `AS_URLS` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `server_url` varchar(255) NOT NULL, `links_to` text, `self` int(1) DEFAULT 0, PRIMARY KEY (`id`))") print "Table AS_URLS created" except Exception as e: print e print "Table AS_URLS already exists" cur.execute("DROP TABLE AS_URLS") cur.execute("CREATE TABLE `AS_URLS` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `server_url` varchar(255) NOT NULL, `links_to` text, `self` int(1) DEFAULT 0, PRIMARY KEY (`id`))") print "Table AS_URLS created" try: cur.execute("CREATE TABLE `CLIENT_PROCESSES` (`id` int(11) NOT NULL AUTO_INCREMENT, `process_name` varchar(45) NOT NULL, `status` INT NOT NULL, `change_status` INT NOT NULL, `interface` varchar(45) NOT NULL, `pid` INT NOT NULL, PRIMARY KEY (`id`))") print "Table CLIENT_PROCESSES created" except Exception as e: print e print "Table CLIENT_PROCESSES already exists" cur.execute("DROP TABLE CLIENT_PROCESSES") cur.execute("CREATE TABLE `CLIENT_PROCESSES` (`id` int(11) NOT NULL AUTO_INCREMENT, `process_name` varchar(45) NOT NULL, `status` INT NOT NULL, `interface` VARCHAR(25) NOT NULL,`change_status` INT NOT NULL, `pid` INT NOT NULL, PRIMARY KEY (`id`))") print "Table CLIENT_PROCESSES created" if interface!="None": cmd="INSERT INTO `CLIENT_PROCESSES` (`id`, `process_name`,`status`,`change_status`,`interface`, `pid`) VALUES (%s,'%s',%d,%d,'%s',%d)" % (0,"AMON SENSS",0,0, interface, 0) else: cmd="INSERT INTO `CLIENT_PROCESSES` (`id`, `process_name`,`status`, `change_status`, `pid`) VALUES (%s,'%s',%d,%d)" % (0,"AMON SENSS",0,0,0) cur.execute(cmd) db.commit() try: cur.execute("CREATE TABLE `MONITORING_RULES` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `match_field` text, `frequency` int(5) DEFAULT 0, `end_time` int(15) DEFAULT 0, `monitor_id` bigint(20) DEFAULT 0,`type` text,`message` text, PRIMARY KEY (`id`))") print "Table MONITORING_RULES created" except Exception as e: print e print "Table MONITORING_RULES already exists" cur.execute("DROP TABLE MONITORING_RULES") cur.execute("CREATE TABLE `MONITORING_RULES` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `match_field` text, `frequency` int(5) DEFAULT 0, `end_time` int(15) DEFAULT 0, `monitor_id` bigint(20) DEFAULT 0,`type` text,`message` text, PRIMARY KEY (`id`))") print "Table MONITORING_RULES created" try: cur.execute("CREATE TABLE `AMON_SENSS` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `match_field` text, `frequency` int(5) DEFAULT 0, `monitor_duration` int(15) DEFAULT 0,`type` text, PRIMARY KEY (`id`))") print "Table AMON_SENSS created" except Exception as e: print e print "Table AMON_SENSS already exists" cur.execute("DROP TABLE AMON_SENSS") cur.execute("CREATE TABLE `AMON_SENSS` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, `match_field` text, `frequency` int(5) DEFAULT 0, `monitor_duration` int(15) DEFAULT 0,`type` text, PRIMARY KEY (`id`))") print "Table AMON_SENSS created" try: cur.execute("CREATE TABLE `CLIENT_LOGS` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `request_type` varchar(45) NOT NULL,`as_name` varchar(45) NOT NULL, `match_field` text, `packet_count` bigint(20) DEFAULT NULL, `time` varchar(45) DEFAULT 0,`byte_count` bigint(20) DEFAULT NULL, speed varchar(2500) DEFAULT NULL,monitor_id int(5), PRIMARY KEY (`id`))") print "Table CLIENT_LOGS created" except Exception as e: print e print "Table CLIENT_LOGS already exists" cur.execute("DROP TABLE CLIENT_LOGS") cur.execute("CREATE TABLE `CLIENT_LOGS` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `request_type` varchar(45) NOT NULL,`as_name` varchar(45) NOT NULL, `match_field` text, `packet_count` bigint(20) DEFAULT NULL, `time` varchar(45) DEFAULT 0,`byte_count` bigint(20) DEFAULT NULL, speed varchar(2500) DEFAULT NULL,monitor_id int(5), PRIMARY KEY (`id`))") print "Table CLIENT_LOGS created" if type=="proxy": try: cur.execute("CREATE DATABASE SENSS_PROXY") print "Database SENSS_PROXY created" except: print "Database SENSS_PROXY already exists" cur.execute("USE SENSS_PROXY") try: cur.execute("CREATE TABLE `NONCES` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `ip` text NOT NULL, `nonce` text NOT NULL, PRIMARY KEY (`id`))") print "Table nonces created" except Exception as e: print e print "Table NONCES already exists" cur.execute("DROP TABLE NONCES") cur.execute("CREATE TABLE `NONCES` (`id` bigint(20) NOT NULL AUTO_INCREMENT, `ip` text NOT NULL, `nonce` text NOT NULL, PRIMARY KEY (`id`))") print "Table NONCES created" try: cur.execute("CREATE TABLE `PROXY_INFO` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, PRIMARY KEY (`id`))") print "Table PROXY_INFO created" except Exception as e: print e print "Table PROXY_INFO already exists" cur.execute("DROP TABLE PROXY_INFO") cur.execute("CREATE TABLE `PROXY_INFO` (`id` int(11) NOT NULL AUTO_INCREMENT, `as_name` varchar(45) NOT NULL, PRIMARY KEY (`id`))") print "Table PROXY_INFO created"
#! /usr/bin/python3 """For each input line, decode into an array and print the smallest column sum. Lines are assumed to be 28x28 uint8 arrays, base64 encoded. """ import base64 import sys import numpy as np from PIL import Image # import matplotlib.pyplot as plt for line in sys.stdin: if not line.strip() or line.strip().startswith('#'): continue data = base64.b64decode(line.strip()) array = np.frombuffer(data, dtype=np.uint8).reshape((28, 28)).transpose() sums = array.sum(axis=0, dtype=np.int64) print(np.min(sums)) # Image.fromarray(array,mode="1").save("fromPy3.png") # doesn't show image # print(array) # print("fin1") Image.fromarray(array * 255, mode='L').save('asdf.bmp') # plt.imshow(array) #Needs to be in row,col order # plt.axis('off') # plt.savefig("fromPy4.png") # works! but takes ages, and is wrong size, and is rotated with row, col in wrong order print("fin2")
class Solution: def spiralOrder(self, matrix: List[List[int]]) -> List[int]: if not matrix: return None rowStart = 0 rowEnd = len(matrix) colStart = 0 colEnd = len(matrix[0]) resLength = rowEnd * colEnd res = [] dir = 0 while len(res) < resLength: if dir == 0: #Right for i in range(colStart,colEnd): res.append(matrix[rowStart][i]) rowStart += 1 elif dir == 1: #Down for i in range(rowStart,rowEnd): res.append(matrix[i][colEnd-1]) colEnd -= 1 elif dir == 2: #Left for i in range(colEnd-1,colStart-1,-1): res.append(matrix[rowEnd-1][i]) rowEnd -= 1 elif dir == 3: #Up for i in range(rowEnd-1,rowStart-1,-1): res.append(matrix[i][colStart]) colStart += 1 dir = (dir+1)%4 return res
#-*- coding:utf8 -*- # Copyright (c) 2012 barriery # Python release: 3.7.0 import pymysql import json import sshtunnel import copy import datetime class DatabaseManager(object): def __init__(self, remote_ip, remote_usr, remote_pwd, database_usr, database_pwd, database_name): self.server = sshtunnel.SSHTunnelForwarder( (remote_ip, 22), ssh_username=remote_usr, ssh_password=remote_pwd, remote_bind_address=('localhost', 3306)) self.server.start() self.conn = pymysql.connect( user=database_usr, password=database_pwd, host='127.0.0.1', database=database_name, port=self.server.local_bind_port) def __del__(self): self.conn.close() self.server.stop() def queryNewestItems(self, table, timelabel, condition=None): with self.conn.cursor() as cursor: sql_cmd = f"select * from {table} where {timelabel}=" \ + f"(select max({timelabel}) from {table})" if condition is not None: sql_cmd += f" and {condition}" print(sql_cmd) cursor.execute(sql_cmd) data = cursor.fetchall() return data def queryItems(self, table, condition=None): with self.conn.cursor() as cursor: sql_cmd = f"select * from {table}" if condition: sql_cmd += f" where {condition}" print(sql_cmd) cursor.execute(sql_cmd) data = cursor.fetchall() return data def in_table(self, table, condition): with self.conn.cursor() as cursor: sql_cmd = f"select * from {table} where {condition}" cursor.execute(sql_cmd) data = cursor.fetchone() return data is not None def insert(self, table, params): with self.conn.cursor() as cursor: keys = None values = None for k, v in params.items(): keys = k if keys is None else "%s, %s"%(keys, k) values = "'%s'"%v if values is None else "%s, '%s'"%(values, v) sql_cmd = "insert into %s (%s) values (%s)" % (table, keys, values) cursor.execute(sql_cmd) self.conn.commit()
import functools import threading def synchronized(lock): """ Synchronization decorator """ def wrapper(f): @functools.wraps(f) def inner_wrapper(*args, **kw): with lock: return f(*args, **kw) return inner_wrapper return wrapper class Singleton(type): _lock = threading.Lock() _instance = None def __call__(cls, *args, **kwargs): if cls._instance is None: cls._locked_call(*args, **kwargs) return cls._instance @synchronized(_lock) def _locked_call(cls, *args, **kwargs): if cls._instance is None: cls._instance = super().__call__(*args, **kwargs) class GameManager(metaclass=Singleton): pass
import math count=0 a,b=map(int,input().split()) for x in range(a+1,b): if(math.sqrt(x)*math.sqrt(x)==x): count=count+1 print(count)
from django.db import models # Create your models here. class Student(models.Model): name = models.CharField(max_length=25) age = models.PositiveIntegerField() course = models.CharField(max_length=25) date_of_birth = models.DateField() bio = models.TextField() def __str__(self): return self.name # class Book(models.Model): # title = models.CharField(max_length=300) # no_of_pages = models.IntegerField(default=10) # author = models.CharField(max_length=300) # student = models.OneToOneField(Student, on_delete=models.CASCADE) # body = models.TextField() # isbn = models.CharField(max_length=20, null=True, blank=True) # date = models.DateTimeField(auto_now_add=True) # def __str__(self): # return self.title class Book(models.Model): title = models.CharField(max_length=300) no_of_pages = models.IntegerField(default=10) author = models.CharField(max_length=300) student = models.ForeignKey(Student, on_delete=models.CASCADE, related_name="books") body = models.TextField() isbn = models.CharField(max_length=20, null=True, blank=True) date = models.DateTimeField(auto_now_add=True) def __str__(self): return self.title
''' Author: Deepti Mahesh Date: 31/10/2019 Undo Recovery ''' import sys import time import random as rand import os disk, logs = dict(), list() def roll_complete(): complete = [] global logs mod_logs = logs[::-1] for line in mod_logs: not_T = line.split()[0] if line[0] == 'T': string = line.replace(" ", "").split(',') if string[0] not in complete: disk[string[1]] = int(string[2]) #update based on if transaction in committed or not elif not_T == 'COMMIT': complete.append(line.split()[1]) checkpoint_start,checkpoint_end = -1, -1 numof_line = 1 def case_three(): ''' End is present ''' complete = [] global logs mod_logs = (logs[checkpoint_start + 1:])[::-1] for line in mod_logs: not_T = line.split()[0] if line[0] == 'T': string = line.replace(" ", "").split(',') if string[0] not in complete: disk[string[1]] = int(string[2]) elif not_T == 'COMMIT': complete.append(line.split(' ')[1]) def rollback(): global checkpoint_start, checkpoint_end if checkpoint_end < checkpoint_start: checkpoint_end = -1 if checkpoint_start == -1: if checkpoint_end == -1: roll_complete() else: print("Error with checkpoints found") else: if checkpoint_end == -1: case_two() else: case_three() def case_two(): ''' Transaction not ended ''' global logs, disk mod_logs = logs[checkpoint_start] mod_logs = mod_logs[mod_logs.find('(') + 1:mod_logs.find(')')] list_trans = mod_logs.replace(' ', '').split(",") complete = [] for line in logs[::-1]: not_t = line.split(' ')[0] if len(list_trans) == 0: break else: if line[0] == 'T': string = line.replace(" ", "").split(',') if string[0] not in complete: disk[string[1]] = int(string[2]) elif not_t == 'COMMIT': complete.append(line.split(' ')[1]) elif not_t == 'START': if (line.split())[1] != 'CKPT': if line.split()[1] in list_trans: list_trans.remove(line.split()[1]) def read_file(input_file): ''' Parse given inputs ''' global checkpoint_start, checkpoint_end, numof_line file = open(input_file) for line in file: if numof_line == 1: # ie parse first line with init vals of variables variables = line.split() for i in range(len(variables)): if i % 2 != 0: pass else: disk[variables[i]] = int(variables[i + 1]) else: inputs = line.strip() if inputs: logs.append(line[1:-2]) if line.find('CKPT') != -1: if line.find('START') != -1 or line.find('END') != -1: checkpoint = numof_line - 3 if line.find('START') != -1: checkpoint_start = checkpoint if line.find('END') != -1: checkpoint_end = checkpoint numof_line += 1 if __name__ == "__main__": # global logs input_file = sys.argv[1] output_file = open('20171212_2.txt', 'w') read_file(input_file) rollback() string, s_disk = '', sorted(disk) for i in sorted(disk): string += i + ' ' + str(disk[i]) + ' ' output_file.write(string[:-1] + '\n') output_file.close()
#!/usr/bin/python # -*- coding: utf-8 -*- """ taken from https://github.com/Ankirama/python-timeular """ import requests import json from functools import wraps from datetime import datetime import logging _log = logging.getLogger(__name__) _log.addHandler(logging.StreamHandler()) _log.setLevel(logging.NOTSET) def check_token(f): @wraps(f) def wrapper(self, *args, **kwargs): if self._access_token == None: return False return f(self, *args, **kwargs) return wrapper def get_current_time(): return datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3] class API(object): _METHODS = ['get', 'post', 'patch', 'delete'] _CLASS_STATUS_CODES = (200, 226) # https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#2xx_Success _access_token = None _base_url = None def __init__(self, base_url, access_token=None): self._base_url = base_url self._access_token = access_token def _make_response(self, route='', method='get', json_data={}, need_auth=True, headers={}): if method not in self._METHODS: _log.info('[%s] is not allowed' % method) return False url = self._base_url + route if need_auth: headers['Authorization'] = 'Bearer ' + self._access_token response = getattr(requests, method)(url, json=json_data, headers=headers) if response.status_code < self._CLASS_STATUS_CODES[0] or \ response.status_code > self._CLASS_STATUS_CODES[1]: _log.info('code error: %d' % response.status_code) _log.info('[%s]: %s' % (url, response.text)) return {"status_code": response.status_code, "message": response.text} return response.json() class Timeular(API): activities = None devices = None tracking = None time_entries = None tags_and_mentions = None _api_key = None _api_secret = None def __init__(self, api_key='', api_secret='', base_url='https://api.timeular.com/api/v2'): super(Timeular, self).__init__(base_url) self._api_key = api_key self._api_secret = api_secret if not self.get_access_token(): raise ValueError('Check base_url and the route to get your access token') self.activities = Activities(base_url, self._access_token) self.devices = Devices(base_url, self._access_token) self.tracking = Tracking(base_url, self._access_token) self.time_entries = TimeEntries(base_url, self._access_token) self.tags_and_mentions = TagMentions(base_url, self._access_token) def set_api_key(self, api_key): self._api_key = api_key def set_api_secret(self, api_secret): self._api_secret = api_secret def get_access_token(self): result = self._make_response('/developer/sign-in', method="post", json_data={'apiKey': self._api_key, 'apiSecret': self._api_secret}, need_auth=False) if "status_code" in result: return False self._access_token = result['token'] return result @check_token def get_profile(self): return self._make_response('/user/profile') @check_token def get_integrations(self): return self._make_response('/integrations') @check_token def get_report(self, start_timestamp, stop_timestamp, timezone='Europe/Paris'): route = '/report/%s/%s?timezone=%s' % (str(start_timestamp), str(stop_timestamp), str(timezone)) return self._make_response(route) class TagMentions(API): _BASE_URL = '/tags-and-mentions' def __init__(self, base_url, access_token): super(TagMentions, self).__init__(base_url + self._BASE_URL, access_token) @check_token def get(self): return self._make_response() class Activities(API): _BASE_URL = '/activities' def __init__(self, base_url, access_token): super(Activities, self).__init__(base_url + self._BASE_URL, access_token) @check_token def get(self): return self._make_response() @check_token def get_activity_id(self, id): activities = self._make_response()["activities"] for activity in activities: if activity["id"] == id: return activity @check_token def get_activitity_side(self, side): activities = self._make_response()["activities"] for activity in activities: if activity["deviceSide"] == side: return activity @check_token def post(self, json): return self._make_response(method='post', json=json) @check_token def patch(self, activity_id, json={}): route = '/%s' % str(activity_id) return self._make_response(route, method='patch', json=json) @check_token def delete(self, activity_id): route = '/%s' % str(activity_id) return self._make_response(route, method='delete', json=json) @check_token def post_device_side(self, activity_id, device_side): route = '/%s/device-side/%s' % (str(activity_id), str(device_side)) return self._make_response(route, method='post') @check_token def delete_device_side(self, activity_id, device_side): route = '/%s/device-side/%s' % (str(activity_id), str(device_side)) return self._make_response(route, method='delete') @check_token def get_tags_and_mentions(self, activity_id): route = '/%s' % (str(activity_id)) return self._make_response(route) @check_token def get_archived(self): return self._make_response('/archived-activities', method='delete') class Devices(API): _BASE_URL = '/devices' def __init__(self, base_url, access_token): super(Devices, self).__init__(base_url + self._BASE_URL, access_token) @check_token def get(self): return self._make_response() @check_token def patch(self, device_serial, json={}): route = '/%s' % str(device_serial) return self._make_response(route, method='patch', json_data=json) @check_token def delete(self, device_serial): route = '/%s' % str(device_serial) return self._make_response(route, method='delete') @check_token def post_disabled(self, device_serial): route = '/%s/disabled' % str(device_serial) return self._make_response(route, method='post') @check_token def delete_disabled(self, device_serial): route = '/%s/disabled' % str(device_serial) return self._make_response(route, method='delete') @check_token def post_active(self, device_serial): route = '/%s/active' % str(device_serial) return self._make_response(route, method='post') @check_token def delete_active(self, device_serial): route = '/%s/active' % str(device_serial) return self._make_response(route, method='delete') class Tracking(API): _BASE_URL = '/tracking' def __init__(self, base_url, access_token): super(Tracking, self).__init__(base_url + self._BASE_URL, access_token) @check_token def get(self): return self._make_response() @check_token def post_start(self, activity_id): route = '/%s/start' % str(activity_id) datetime = get_current_time() return self._make_response(route, method='post', json_data={'startedAt': datetime}) @check_token def patch(self, activity_id, json={}): route = '/%s' % str(activity_id) return self._make_response(route, method='patch', json_data=json) @check_token def post_stop(self, activity_id): route = '/%s/stop' % str(activity_id) datetime = get_current_time() return self._make_response(route, method='post', json_data={'stoppedAt': datetime}) class TimeEntries(API): _BASE_URL = '/time-entries' def __init__(self, base_url, access_token): super(TimeEntries, self).__init__(base_url + self._BASE_URL, access_token) @check_token def get_in_range(self, stopped_after, started_before): route = '/%s/%s' % (str(stopped_after), str(started_before)) return self._make_response(route) @check_token def get_by_id(self, time_entry_id): route = '/%s' % str(time_entry_id) return self._make_response(route) @check_token def post(self, json): return self._make_response(method='post', json=json) @check_token def patch(self, time_entry_id, json={}): route = '/%s' % str(time_entry_id) return self._make_response(route, method='patch', json=json) @check_token def delete(self, time_entry_id): route = '/%s' % str(time_entry_id) return self._make_response(route, method='delete')
num=int(input("Enter value of num :")) first,second=0,1 print(first) print(second) for i in range(num): print(first+second) temp=first first=second second=first+temp
in_file = open('occurrences.txt', 'r') out_file = open('present.txt', 'w') counter = 7740 for line in in_file: row = line.split('\t') country = row[5] id = row[0] ref = row[8] if country == '': country = row[6] if country == '': pass else: out_file.write('M' + str(counter) + '\t' + id + '\t\ttrue\t' + 'http://eol.org/schema/terms/Present' + '\t' + country + '\t\t\t\tCompiler: Anne E Thessen\t' + ref + '\n') counter = counter + 1
import numpy as np import matplotlib.pyplot as plt plt.switch_backend('agg') filename1 = 'SarahQDdata.dat' filename2 = 'PA_final.dat' cases1 = {('6','1.0'):0,('6','0.28'):1,('6','0.1'):2,('12','1.0'):3,('12','0.28'):4,('12','0.1'):5,('20','1.0'):6,('20','0.28'):7,('20','0.1'):8,('30','1.0'):9,('30','0.28'):10,('30','0.1'):11,('42','1.0'):12,('42','0.28'):13,('42','0.1'):14} cases2 = {('2','1.00'):0,('2','0.28'):1,('2','0.10'):2,('3','1.00'):3,('3','0.28'):4,('3','0.10'):5,('4','1.00'):6,('4','0.28'):7,('4','0.10'):8,('5','1.00'):9,('5','0.28'):10,('5','0.10'):11,('6','1.00'):12,('6','0.28'):13,('6','0.10'):14} shells = {'2':'6','3':'12','4':'20','5':'30','6':'42'} x1 = [[] for i in range(15)] y1 = [[] for i in range(15)] x2 = [[] for i in range(15)] y2 = [[] for i in range(15)] ind = -1 for line in open(filename1): if(line == '\n'): continue elif(line.find('####') > -1): line = line.split() ind = cases1.get((line[1],line[2]), -1) #print(line,'ind = ',ind) else: if(ind == -1): continue line = line.split() #print(line,len(line)) if(len(line) < 4): continue else: #print(line[0],line[3]) x1[ind].append(float(line[0])) y1[ind].append(float(line[3])) with open(filename2) as f2: data2 = f2.read() data2 = data2.split('\n') ind = -1 for num in range(2,len(data2)): if((num - 2)%3 == 0): line = data2[num].split() ind = cases2.get((line[1],line[4]), -1) print(line) print(line[0],line[4],'ind = ',ind) print(line[1],line[5]) if(ind == -1): continue x2[ind].append(float(line[0])) y2[ind].append(float(line[5])) #print(len(x1[0]),len(y1[0]),len(x2[0]),len(y2[0])) #print(' '.join(map(str,x1[0]))) #print(' '.join(map(str,y1[0]))) #print(' '.join(map(str,x2[0]))) #print(' '.join(map(str,y2[0]))) for num in range(15): plt.plot(x1[num],y1[num],x2[num],y2[num]) plt.savefig('fig'+str(num+1)+'.pdf', format='pdf') plt.clf() #plt.show()
# 前缀和 # 随机的范围是整个点的数量 # 根据值确定是在哪个矩形,然后根据矩形长宽,将点的值转为行和列 class Solution: def __init__(self, rects: List[List[int]]): self.rects = rects self.presum = [0] for a, b, x, y in rects: self.presum.append(self.presum[-1] + (x - a + 1) * (y - b + 1)) def pick(self) -> List[int]: rdm = random.randint(0, self.presum[-1] - 1) idx = bisect_right(self.presum, rdm) - 1 a, b, x, y = self.rects[idx] v = rdm - self.presum[idx] return [a + v % (w := x - a + 1), b + v // w] # Your Solution object will be instantiated and called as such: # obj = Solution(rects) # param_1 = obj.pick()
from werkzeug.wsgi import DispatcherMiddleware from dash_app1 import app as app1 from dash_app2 import app as app2 from flask_app import flask_app application = DispatcherMiddleware(flask_app, { '/app1': app1.server, '/app2': app2.server, })
import re def checking_number_for_validating(phone_number): if re.search(r'[789][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]',phone_number): return True return False number_of_phones=int(input()) for i in range(number_of_phones): phone_number=input() if checking_number_for_validating(phone_number) and len(phone_number)==10: print("YES") else: print("NO")
# Date: 03/09/2020 # Author: rohith mulumudy # Description: fetches certificate data import socket from OpenSSL import SSL from OpenSSL import crypto from ssl import PROTOCOL_TLSv1 import json import socks from datetime import datetime import threading from display import Display class Certificates: def __init__(self, cert_chain_flag=False, display_flag=False, display_rate=10, err_file="error_hosts.txt", out_file="certs.json", tmp_file="certs_temp.json"): self.lock = threading.Lock() self.port = "443" self.cert_chain_flag = cert_chain_flag self.err_file = err_file self.out_file = out_file self.tmp_file = tmp_file self.host_count = 0 self.display_flag = display_flag self.display = Display() self.display_rate = display_rate def get_cert_sans(self, x509cert): san = '' ext_count = x509cert.get_extension_count() for i in range(0, ext_count): ext = x509cert.get_extension(i) if 'subjectAltName' in str(ext.get_short_name()): san = ext.__str__() # replace commas to not break csv output san = san.replace(',', ';') return san def get_cert_details(self, cert): context = {} issuer={} subject = {} issuer['countryName'] = cert.get_issuer().C issuer['stateOrProvinceName'] = cert.get_issuer().ST issuer['localityName'] = cert.get_issuer().L issuer['organizationName'] = cert.get_issuer().O issuer['organizationUnitName'] = cert.get_issuer().OU issuer['commonName'] = cert.get_issuer().CN issuer['emailAddress'] = cert.get_issuer().emailAddress context['issuer'] = issuer context['serialNumber'] = str(cert.get_serial_number()) context['signatureAlgorithm'] = cert.get_signature_algorithm().decode() subject['countryName'] = cert.get_subject().C subject['stateOrProvinceName'] = cert.get_subject().ST subject['localityName'] = cert.get_subject().L subject['organizationName'] = cert.get_subject().O subject['organizationUnitName'] = cert.get_subject().OU subject['commonName'] = cert.get_subject().CN subject['emailAddress'] = cert.get_subject().emailAddress context['subject'] = subject context['version'] = cert.get_version() context['subjectNameHash'] = cert.subject_name_hash() context['san'] = self.get_cert_sans(cert) context['expired'] = cert.has_expired() # Valid from valid_from = datetime.strptime(cert.get_notBefore().decode('ascii'), '%Y%m%d%H%M%SZ') context['valid_from'] = valid_from.strftime('%Y-%m-%d') # Valid till valid_till = datetime.strptime(cert.get_notAfter().decode('ascii'), '%Y%m%d%H%M%SZ') context['valid_till'] = valid_till.strftime('%Y-%m-%d') # Validity days context['validity_days'] = (valid_till - valid_from).days # Validity in days from now now = datetime.now() context['days_left'] = (valid_till - now).days return context def get_certs(self, host): # socks.setdefaultproxy(socks.PROXY_TYPE_HTTP, '172.16.2.30', 8080, True) socket.socket = socks.socksocket sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) osobj = SSL.Context(PROTOCOL_TLSv1) try: sock.connect((host, int(self.port))) oscon = SSL.Connection(osobj, sock) oscon.set_tlsext_host_name(host.encode()) oscon.set_connect_state() oscon.do_handshake() if self.cert_chain_flag: certs = oscon.get_peer_cert_chain() else: certs = oscon.get_peer_certificate() sock.close() return certs except: return 0 def store_certs(self, host, cert, fp): data = {} data['host'] = host data['certificate'] = self.get_cert_details(cert) fp.write(json.dumps(data)) fp.write('\n') def store_cert_chain(self, host, certs, fp): data = {} data['host'] = host data['certificate'] = {} for i,cert in enumerate(certs): data['certificate'][i+1] = self.get_cert_details(cert) fp.write(json.dumps(data)) fp.write('\n') def display_progress(self): self.host_count+=1 if self.host_count%self.display_rate == 0: self.display.progress(self.host_count) def get_and_store_certs(self, host): cert = self.get_certs(host) self.lock.acquire() # print(threading.current_thread().getName()) try: if self.display_flag == True: self.display_progress() with open(self.tmp_file,'a') as fp: if self.cert_chain_flag: self.store_cert_chain(host,cert,fp) else: self.store_certs(host,cert,fp) except: with open(self.err_file,'a') as fp: fp.write(host+'\n') finally: self.lock.release()
import pathlib from typing import Any, BinaryIO, Dict, List, Tuple, Union import numpy as np from torchdata.datapipes.iter import IterDataPipe, Mapper, UnBatcher from torchvision.prototype.datasets.utils import Dataset, HttpResource, OnlineResource from torchvision.prototype.datasets.utils._internal import hint_sharding, hint_shuffling, read_mat from torchvision.prototype.tv_tensors import Label from torchvision.tv_tensors import Image from .._api import register_dataset, register_info NAME = "svhn" @register_info(NAME) def _info() -> Dict[str, Any]: return dict(categories=[str(c) for c in range(10)]) @register_dataset(NAME) class SVHN(Dataset): """SVHN Dataset. homepage="http://ufldl.stanford.edu/housenumbers/", dependencies = scipy """ def __init__( self, root: Union[str, pathlib.Path], *, split: str = "train", skip_integrity_check: bool = False, ) -> None: self._split = self._verify_str_arg(split, "split", {"train", "test", "extra"}) self._categories = _info()["categories"] super().__init__(root, skip_integrity_check=skip_integrity_check, dependencies=("scipy",)) _CHECKSUMS = { "train": "435e94d69a87fde4fd4d7f3dd208dfc32cb6ae8af2240d066de1df7508d083b8", "test": "cdce80dfb2a2c4c6160906d0bd7c68ec5a99d7ca4831afa54f09182025b6a75b", "extra": "a133a4beb38a00fcdda90c9489e0c04f900b660ce8a316a5e854838379a71eb3", } def _resources(self) -> List[OnlineResource]: data = HttpResource( f"http://ufldl.stanford.edu/housenumbers/{self._split}_32x32.mat", sha256=self._CHECKSUMS[self._split], ) return [data] def _read_images_and_labels(self, data: Tuple[str, BinaryIO]) -> List[Tuple[np.ndarray, np.ndarray]]: _, buffer = data content = read_mat(buffer) return list( zip( content["X"].transpose((3, 0, 1, 2)), content["y"].squeeze(), ) ) def _prepare_sample(self, data: Tuple[np.ndarray, np.ndarray]) -> Dict[str, Any]: image_array, label_array = data return dict( image=Image(image_array.transpose((2, 0, 1))), label=Label(int(label_array) % 10, categories=self._categories), ) def _datapipe(self, resource_dps: List[IterDataPipe]) -> IterDataPipe[Dict[str, Any]]: dp = resource_dps[0] dp = Mapper(dp, self._read_images_and_labels) dp = UnBatcher(dp) dp = hint_shuffling(dp) dp = hint_sharding(dp) return Mapper(dp, self._prepare_sample) def __len__(self) -> int: return { "train": 73_257, "test": 26_032, "extra": 531_131, }[self._split]
import paq1.biblioratos from paq3 import solteronas, secretaria import estribillo paq1.biblioratos.biblioratos() paq1.biblioratos.minifaldas() estribillo.estribillo(2) secretaria.secretaria() estribillo.estribillo(1) solteronas.solteronas() estribillo.estribillo(2) solteronas.solteronas() print("") print("Pruebas:") #para probar si se puede ejecutar individualmente o no a la función _doctor() #solteronas._doctor() #ejecuta la funcion #_doctor() #no ejecuta la funciona
import nltk from nltk import sent_tokenize from nltk.corpus import stopwords from nltk import word_tokenize from nltk import pos_tag import string TEXT = "It was not clear that minds were changed. Certainly they were not inside the room, and most likely not elsewhere on Capitol Hill, where Republicans and Democrats were locked into their positions long ago. Nor were there any immediate signs that the hearing penetrated the general public. While major television networks broke into regular programming to carry it live, there was little sense of a riveted country putting everything aside to watch à la Watergate." # Sentence tokenization sentences = sent_tokenize(TEXT) print("------- Original Text : \n" + TEXT + "\n\n") print("------- Sentences : " + str(len(sentences)) + "\n") i = 0 for sentence in sentences: i = i + 1 print("-" + str(i) + " : " + sentence) # Word Tokenization print("\n-------- Tokenizaion\n") tokens = word_tokenize(sentence) print("Tokens Counting : " + str(len(tokens))) i=0 for token in tokens: i = i + 1 print("-" + str(i) + " : " + token) # Removing Stop Words print("\n-------- Stop Words\n") stop_words = set(stopwords.words('english')) stop_words = stop_words.union(string.punctuation) cleanWords = [w for w in tokens if not w in stop_words] print("------- Clean Words Count : " + str(len(cleanWords))) i=0 for word in cleanWords: i = i + 1 print("-" + str(i) + " : " + word) # POS Tag print("\n-------- POS Tag\n") taggedWords = pos_tag(cleanWords) print(nltk.help.upenn_tagset()) for taggedWord in taggedWords: print("- " + str(taggedWord)) # Stemming from nltk.stem.lancaster import LancasterStemmer from nltk.stem.porter import PorterStemmer from nltk.stem.snowball import SnowballStemmer stemmer = SnowballStemmer('english') # Lematization from nltk.stem import WordNetLemmatizer lemmatizer = WordNetLemmatizer() print("\n-------- Lematization\n") for word in cleanWords: print("- " + word + " : " + lemmatizer.lemmatize(word, pos="v")) # Named Entity Recognition NER from nltk import ne_chunk, sent_tokenize, word_tokenize sentences = sent_tokenize(TEXT) for sentence in sentences: words = word_tokenize(sentence) tags = pos_tag(words) ner = ne_chunk(tags) ner.draw()
import unittest from katas.beta.professor_oaks_trouble_new_pokedex_prototype import PokeScan class PokeScanTestCase(unittest.TestCase): def test_equal_1(self): self.assertEqual(PokeScan('Squirtle', 0, 'water').info(), 'Squirtle, a wet and weak Pokemon.') def test_equal_2(self): self.assertEqual(PokeScan('Charmander', 0, 'fire').info(), 'Charmander, a fiery and weak Pokemon.') def test_equal_3(self): self.assertEqual(PokeScan('Bulbasaur', 0, 'grass').info(), 'Bulbasaur, a grassy and weak Pokemon.') def test_equal_4(self): self.assertEqual(PokeScan('Squirtle', 20, 'water').info(), 'Squirtle, a wet and weak Pokemon.') def test_equal_5(self): self.assertEqual(PokeScan('Charmander', 20, 'fire').info(), 'Charmander, a fiery and weak Pokemon.') def test_equal_6(self): self.assertEqual(PokeScan('Bulbasaur', 20, 'grass').info(), 'Bulbasaur, a grassy and weak Pokemon.') def test_equal_7(self): self.assertEqual(PokeScan('Squirtle', 21, 'water').info(), 'Squirtle, a wet and fair Pokemon.') def test_equal_8(self): self.assertEqual(PokeScan('Charmander', 21, 'fire').info(), 'Charmander, a fiery and fair Pokemon.') def test_equal_9(self): self.assertEqual(PokeScan('Bulbasaur', 21, 'grass').info(), 'Bulbasaur, a grassy and fair Pokemon.') def test_equal_10(self): self.assertEqual(PokeScan('Squirtle', 50, 'water').info(), 'Squirtle, a wet and fair Pokemon.') def test_equal_11(self): self.assertEqual(PokeScan('Charmander', 50, 'fire').info(), 'Charmander, a fiery and fair Pokemon.') def test_equal_12(self): self.assertEqual(PokeScan('Bulbasaur', 50, 'grass').info(), 'Bulbasaur, a grassy and fair Pokemon.') def test_equal_13(self): self.assertEqual(PokeScan('Squirtle', 51, 'water').info(), 'Squirtle, a wet and strong Pokemon.') def test_equal_14(self): self.assertEqual(PokeScan('Charmander', 51, 'fire').info(), 'Charmander, a fiery and strong Pokemon.') def test_equal_15(self): self.assertEqual(PokeScan('Bulbasaur', 51, 'grass').info(), 'Bulbasaur, a grassy and strong Pokemon.') def test_equal_16(self): self.assertEqual(PokeScan('Squirtle', 100, 'water').info(), 'Squirtle, a wet and strong Pokemon.') def test_equal_17(self): self.assertEqual(PokeScan('Charmander', 100, 'fire').info(), 'Charmander, a fiery and strong Pokemon.') def test_equal_18(self): self.assertEqual(PokeScan('Bulbasaur', 100, 'grass').info(), 'Bulbasaur, a grassy and strong Pokemon.')
def mayusculas(password_upper): return any(char.isupper() for char in password_upper) def minusculas(password_lower): return any(char.islower() for char in password_lower) def numeros(password_number): return any(char.isdigit() for char in password_number) def no_alfanumericos(password_noalpha): return password_noalpha.isalnum() def espacio(password_espacio): return any(char.isspace() for char in password_espacio) def validacion_clave(password): if(len(password) < 8 or mayusculas(password) == False or minusculas(password) == False or espacio(password) == True or numeros(password) == False or no_alfanumericos(password) == True): respuesta = "La contraseña elegida no es segura" else: respuesta = True return respuesta while True: clave = input("Ingrese clave a validar: ") print(validacion_clave(clave)) continuar = input("Precione q para salir o cualquier tecla para continuar: ") continuar = continuar.lower() if (continuar == "q"): break
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import annotations import pytest from pants.backend.python.goals.pytest_runner import ( _count_pytest_tests, validate_pytest_cov_included, ) from pants.backend.python.subsystems.pytest import PyTest from pants.backend.python.util_rules.interpreter_constraints import InterpreterConstraints from pants.backend.python.util_rules.lockfile_metadata import PythonLockfileMetadataV3 from pants.backend.python.util_rules.pex import ReqStrings from pants.backend.python.util_rules.pex_requirements import ( LoadedLockfile, LoadedLockfileRequest, Lockfile, PexRequirements, Resolve, ) from pants.engine.fs import DigestContents, FileContent from pants.engine.internals.native_engine import EMPTY_DIGEST from pants.testutil.option_util import create_subsystem from pants.testutil.rule_runner import MockGet, run_rule_with_mocks from pants.util.pip_requirement import PipRequirement EXAMPLE_TEST1 = b""" def test_foo(): pass def test_bar(): pass """ EXAMPLE_TEST2 = b""" class TestStuff(TestCase): def test_baz(): pass def testHelper(): pass """ def test_count_pytest_tests_empty() -> None: digest_contents = DigestContents([FileContent(path="tests/test_empty.py", content=b"")]) test_count = _count_pytest_tests(digest_contents) assert test_count == 0 def test_count_pytest_tests_methods() -> None: digest_contents = DigestContents( [FileContent(path="tests/test_example1.py", content=EXAMPLE_TEST1)] ) test_count = _count_pytest_tests(digest_contents) assert test_count == 2 def test_count_pytest_tests_in_class() -> None: digest_contents = DigestContents( [FileContent(path="tests/test_example1.py", content=EXAMPLE_TEST2)] ) test_count = _count_pytest_tests(digest_contents) assert test_count == 1 def test_count_pytest_tests_multiple() -> None: digest_contents = DigestContents( [ FileContent(path="tests/test_empty.py", content=b""), FileContent(path="tests/test_example1.py", content=EXAMPLE_TEST1), FileContent(path="tests/test_example2.py", content=EXAMPLE_TEST2), ] ) test_count = _count_pytest_tests(digest_contents) assert test_count == 3 @pytest.mark.parametrize("entire_lockfile", [False, True]) def test_validate_pytest_cov_included(entire_lockfile: bool) -> None: def validate(reqs: list[str]) -> None: if entire_lockfile: tool = create_subsystem( PyTest, lockfile="dummy.lock", install_from_resolve="dummy_resolve", requirements=[], ) else: tool = create_subsystem( PyTest, lockfile="dummy.lock", install_from_resolve="dummy_resolve", requirements=reqs, ) lockfile = Lockfile("dummy_url", "dummy_description_of_origin", "dummy_resolve") metadata = PythonLockfileMetadataV3( valid_for_interpreter_constraints=InterpreterConstraints(), requirements={PipRequirement.parse(req) for req in reqs}, manylinux=None, requirement_constraints=set(), only_binary=set(), no_binary=set(), ) loaded_lockfile = LoadedLockfile(EMPTY_DIGEST, "", metadata, 0, True, None, lockfile) run_rule_with_mocks( validate_pytest_cov_included, rule_args=[tool], mock_gets=[ MockGet(ReqStrings, (PexRequirements,), lambda x: ReqStrings(tuple(reqs))), MockGet(Lockfile, (Resolve,), lambda x: lockfile), MockGet( LoadedLockfile, (LoadedLockfileRequest,), lambda x: loaded_lockfile if x.lockfile == lockfile else None, ), ], ) # Canonicalize project name. validate(["PyTeST_cOV"]) with pytest.raises(ValueError) as exc: validate([]) assert "missing `pytest-cov`" in str(exc.value) with pytest.raises(ValueError) as exc: validate(["custom-plugin"]) assert "missing `pytest-cov`" in str(exc.value)
lis=list([10,11,12,13,14,15,16]) s=0 '''for num in list: sumlist=num+sumlist print(num,"----->",sumlist)''' suml=0 while(s<len(lis)): suml=suml+lis[s] print(lis[s],"____+___>",suml) s=s+1
class Point3D(object): def __init__(self, x, y, z): self.x = x self.y = y self.z = z def __repr__(self): print "("+ str(self.x) +", "+ str(self.y) +", "+ str(self.z) +")" myPoint = Point3D(1, 2, 3) print myPoint
=====================猜字游戏========================================= import random num = random.randint(0, 100) guess = 0 while True: num_input = input("请输入一个0到100的数字:") guess += 1 if not num_input.isdigit(): print("请输入数字") elif int(num_input) not in range(100): print("输入的数字必须介于0到100") else: if num == int(num_input): print("恭喜,猜对了!总共猜了%d次" % guess) break elif num > int(num_input): print("你输入的数字小了") elif num < int(num_input): print("你输入的数字大了") else: print("系统异常") ====================出行建议============================================ def __init__(self,input_daytime): self.input_daytime = input_daytime def search_visibility(self): visible_level = 0 if self.input_daytime =="daytime": visible_level = 2 if self.input_daytime =="night": visible_level = 9 return visible_level def search_temperature(self): temperature = 0 if self.input_daytime == "daytime": temperature = 26 if self.input_daytime == "night": temperature = 16 return temperature class OutAdvice(WeatherSearch): def __init__(self,input_daytime): WeatherSearch.__init__(self, input_daytime) def search_temperature(self): vehicle = "" if self.input_daytime == "daytime": vehicle = "bike" if self.input_daytime == "night": vehicle = "taxi" return vehicle def out_advice(self): visible_level = self.search_visibility() if visible_level == 2 : print("The weather is good,suitable for use %s."%self.search_temperature()) elif visible_level == 9 : print("The weather is bad,you should use %s."%self.search_temperature()) else : print("The weather is beyond my scope,I can not give you any advice") got = OutAdvice("daytime") got.out_advice() =====================装饰器应用============================================ import time user,passwd = 'alex','abc123' def auth(auth_type): print("auth func:",auth_type) def outer_wrapper(func): def wrapper(*args, **kwargs): print("wrapper func args:", *args, **kwargs) if auth_type == "local": username = input("Username:").strip() password = input("Password:").strip() if user == username and passwd == password: print("\033[32;1mUser has passed authentication\033[0m") res = func(*args, **kwargs) # from home print("---after authenticaion ") return res else: exit("\033[31;1mInvalid username or password\033[0m") elif auth_type == "ldap": print("搞毛线ldap,不会。。。。") return wrapper return outer_wrapper def index(): print("welcome to index page") @auth(auth_type="local") # home = wrapper() def home(): print("welcome to home page") return "from home" @auth(auth_type="ldap") def bbs(): print("welcome to bbs page") index() print(home()) #wrapper() bbs() ====================购物列表==================================== product_list = [ ('Iphone',5800), ('Mac Pro',9800), ('Bike',800), ('Watch',10600), ('Coffee',31), ('Alex Python',120), ] shopping_list = [] salary = input("Input your salary:") if salary.isdigit(): salary = int(salary) while True: for index,item in enumerate(product_list): #print(product_list.index(item),item) print(index,item) user_choice = input("选择要买嘛?>>>:") if user_choice.isdigit(): user_choice = int(user_choice) if user_choice < len(product_list) and user_choice >=0: p_item = product_list[user_choice] if p_item[1] <= salary: #买的起 shopping_list.append(p_item) salary -= p_item[1] print("Added %s into shopping cart,your current balance is \033[31;1m%s\033[0m" %(p_item,salary) ) else: print("\033[41;1m你的余额只剩[%s]啦,还买个毛线\033[0m" % salary) else: print("product code [%s] is not exist!"% user_choice) elif user_choice == 'q': print("--------shopping list------") for p in shopping_list: print(p) print("Your current balance:",salary) exit() else: print("invalid option") ====================三级菜单============================================= data = { '北京':{ "昌平":{ "沙河":["oldboy","test"], "天通苑":["链家地产","我爱我家"] }, "朝阳":{ "望京":["奔驰","陌陌"], "国贸":{"CICC","HP"}, "东直门":{"Advent","飞信"}, }, "海淀":{}, }, '山东':{ "德州":{}, "青岛":{}, "济南":{} }, '广东':{ "东莞":{}, "常熟":{}, "佛山":{}, }, } exit_flag = False while not exit_flag: for i in data: print(i) choice = input("选择进入1>>:") if choice in data: while not exit_flag: for i2 in data[choice]: print("\t",i2) choice2 = input("选择进入2>>:") if choice2 in data[choice]: while not exit_flag: for i3 in data[choice][choice2]: print("\t\t", i3) choice3 = input("选择进入3>>:") if choice3 in data[choice][choice2]: for i4 in data[choice][choice2][choice3]: print("\t\t",i4) choice4 = input("最后一层,按b返回>>:") if choice4 == "b": pass elif choice4 == "q": exit_flag = True if choice3 == "b": break elif choice3 == "q": exit_flag = True if choice2 == "b": break elif choice2 == "q": exit_flag = True ===================进度条============================================= import sys,time for i in range(20): sys.stdout.write("#") sys.stdout.flush() #写入后刷新内存 time.sleep(0.1) ====================xml============================================== <?xml version="1.0"?> <data> <country name="Liechtenstein"> <rank updated="yes">2</rank> <year>2008</year> <gdppc>141100</gdppc> <neighbor name="Austria" direction="E"/> <neighbor name="Switzerland" direction="W"/> </country> <country name="Singapore"> <rank updated="yes">5</rank> <year>2011</year> <gdppc>59900</gdppc> <neighbor name="Malaysia" direction="N"/> </country> <country name="Panama"> <rank updated="yes">69</rank> <year>2011</year> <gdppc>13600</gdppc> <neighbor name="Costa Rica" direction="W"/> <neighbor name="Colombia" direction="E"/> </country> </data> ===================configparser================================== [DEFAULT] ServerAliveInterval = 45 Compression = yes CompressionLevel = 9 ForwardX11 = yes [bitbucket.org] User = hg [topsecret.server.com] Port = 50022 ForwardX11 = no ====================继承:学校系统================================ class School(object): def __init__(self, name, addr): self.name = name self.addr = addr self.students = [] self.staffs = [] def enroll(self, stu_obj): print("为学员%s 办理注册手续" % stu_obj.name) self.students.append(stu_obj) def hire(self, staff_obj): self.staffs.append(staff_obj) print("雇佣新员工%s" % staff_obj.name) class SchoolMember(object): def __init__(self, name, age, sex): self.name = name self.age = age self.sex = sex def tell(self): pass class Teacher(SchoolMember): def __init__(self, name, age, sex, salary, course): super(Teacher, self).__init__(name, age, sex) self.salary = salary self.course = course def tell(self): print(''' ---- info of Teacher:%s ---- Name:%s Age:%s Sex:%s Salary:%s Course:%s ''' % (self.name, self.name, self.age, self.sex, self.salary, self.course)) def teach(self): print("%s is teaching course [%s]" % (self.name, self.course)) class Student(SchoolMember): def __init__(self, name, age, sex, stu_id, grade): super(Student, self).__init__(name, age, sex) self.stu_id = stu_id self.grade = grade def tell(self): print(''' ---- info of Student:%s ---- Name:%s Age:%s Sex:%s Stu_id:%s Grade:%s ''' % (self.name, self.name, self.age, self.sex, self.stu_id, self.grade)) def pay_tuition(self, amount): print("%s has paid tution for $%s" % (self.name, amount)) school = School("老男孩IT", "沙河") t1 = Teacher("Oldboy", 56, "MF", 200000, "Linux") t2 = Teacher("Alex", 22, "M", 3000, "PythonDevOps") s1 = Student("ChenRonghua", 36, "MF", 1001, "PythonDevOps") s2 = Student("徐良伟", 19, "M", 1002, "Linux") t1.tell() s1.tell() school.hire(t1) school.enroll(s1) school.enroll(s2) print(school.students) print(school.staffs) school.staffs[0].teach() for stu in school.students: stu.pay_tuition(5000)
#!/usr/bin/env python3 # Marcos del Cueto # Import libraries import math import matplotlib.pyplot as plt import numpy as np from sklearn import preprocessing from sklearn.model_selection import LeaveOneOut from sklearn.metrics import mean_squared_error from sklearn import neighbors from matplotlib.ticker import (MultipleLocator) # Set optimum k optimized previously best_k = 2 # Initialize lists list_x = [] list_y = [] # Generate dataset as 10 points from x=5 to x=6.8, with y=exp(x) for x in np.arange(5, 7, 0.2): y = math.exp(x) list_x.append(x) list_y.append(y) # Transform lists to numpy arrays list_x = np.array(list_x).reshape(-1, 1) list_y = np.array(list_y) # Create arrays with function y=exp(x) function_x = np.arange(4.9, 7.0, 0.01) function_y = [math.exp(x) for x in function_x] # Assign train data as all 10 points in dataset X_train = np.array(list_x).reshape(-1, 1) y_train = np.array(list_y) # Assign prediction X to intermediate x values X_pred = np.arange(5.0, 6.81, 0.01).reshape(-1, 1) # Scale data (not needed here, since just 1 descriptor, but it is good practice) scaler = preprocessing.StandardScaler().fit(X_train) X_train_scaled = scaler.transform(X_train) X_pred_scaled = scaler.transform(X_pred) # kNN regressor knn = neighbors.KNeighborsRegressor(n_neighbors=best_k, weights='distance') label_kNN = "k-NN regression (distance)" # Train kNN model and predict values for X_pred_scaled y_pred = knn.fit(X_train_scaled, y_train).predict(X_pred_scaled) # Plot points in dataset plus dashed line with function plt.plot(function_x,function_y,color='C0',linestyle='dashed',linewidth=1) plt.scatter(list_x, list_y,color='C0',label='Training points') # Plot predicted values with kNN regressor plt.plot(X_pred, y_pred,color='C1',label=label_kNN) # Plot legend plt.legend() # Set axis labels plt.xlabel('$x$',fontsize=15) plt.ylabel('$y$',fontsize=15) # Set axis ticks and limits plt.xticks(np.arange(5,7,0.2)) plt.xlim(4.92,6.88) plt.ylim(100,1000) # Set minor ticks axes = plt.gca() axes.xaxis.set_minor_locator(MultipleLocator(0.05)) # Save plot into png file_name='Figure5.png' plt.savefig(file_name,format='png',dpi=600) plt.close()
import telebot import csv import enchant from telebot import types import config token = config.BOT_TOKEN bot = telebot.TeleBot(token) dictionary = enchant.Dict("en_US") dictionary_rus = enchant.Dict("ru_RU") dict_word = {} common_list_ind = [] common_list_names = [] ind_game = [] names_game = [] current = 1 word = "" common_set = set() @bot.message_handler(content_types=['text']) def start(message): if message.text == '/reg': bot.send_message(message.from_user.id, f"Nice to see you, {message.from_user.first_name}") bot.send_message(message.from_user.id, f"Enter '/users' to see the list of players") save_into_file(message) elif message.text == '/start': bot.send_message(message.from_user.id, f"Enter '/reg' to start working with bot") elif message.text == '/users': get_users(message) elif message.text == '/play': if len(ind_game) == 2: bot.send_message(message.from_user.id, f"Enter starting word") bot.register_next_step_handler(message, defining) else: bot.send_message(message.from_user.id, f"The opponent wasn't chosen") def get_users(message): results = [] str_to_print = "" count = 1 with open('data.csv', 'r') as f: reader = csv.DictReader(f) for row in reader: results.append(row) for i in results: str_to_print = f"{str_to_print}{count}. {i.get('name')}\n" common_list_ind.append(i.get('id')) common_list_names.append(i.get('name')) count += 1 bot.send_message(message.from_user.id, str_to_print) bot.send_message(message.from_user.id, "Please, choose a partner") bot.register_next_step_handler(message, start_game) def start_game(message): ind = 1 try: ind = int(message.text) except Exception: bot.send_message(message.from_user.id, 'Enter numbers') bot.register_next_step_handler(message, start_game) if ind < 1 or ind > len(common_list_ind): bot.send_message(message.from_user.id, 'There is no player with such number, try again') bot.register_next_step_handler(message, start_game) ind -= 1 keyboard = telebot.types.InlineKeyboardMarkup() key_yes = types.InlineKeyboardButton(text='Play!', callback_data='/play') keyboard.add(key_yes) question = f"Enter '/play' to start the game with {message.from_user.first_name}" bot.send_message(common_list_ind[ind], text=question, reply_markup=keyboard) ind_game.append(message.from_user.id) ind_game.append(int(common_list_ind[ind])) names_game.append(message.from_user.first_name) names_game.append(common_list_names[ind]) bot.register_next_step_handler(message, game) def check_letters_in_word(input_word: str) -> bool: temp_dict = dict_word.copy() for i in input_word: if i not in temp_dict or temp_dict.get(i) == 0: return False else: temp_dict[i] -= 1 return True def check_existance(input_word: str) -> bool: return dictionary.check(input_word) or dictionary_rus.check(input_word) def check_word(input_word: str, current: int) -> str: if input_word in common_set: return "This word was used, try again" if check_letters_in_word(input_word): if check_existance(input_word): bot.send_message(ind_game[1 - current], f"New word of your opponent is {input_word}") bot.send_message(ind_game[1 - current], f"It's your turn now") common_set.add(input_word) return "ok" else: return "Such word doesn't exist" return "It can't be here, check your input" def game(message): inp = message.text.lower() global current if inp == '/stop': bot.send_message(message.from_user.id, "You lost the game") bot.send_message(ind_game[1 - current], "You won the game") bot.register_next_step_handler(message, start) # print(inp) if current == ind_game.index(message.from_user.id): result_string = check_word(inp, current) if result_string == "ok": current = 1 - current else: bot.send_message(message.from_user.id, result_string) else: bot.send_message(message.from_user.id, f"It's not your turn now") bot.register_next_step_handler(message, game) def defining(message): global word global current word = message.text bot.send_message(ind_game[0], f"The chosen word for game is {word}, it's your turn") list_word = list(word) set_word = set(list_word) global dict_word for letter in set_word: dict_word[letter] = list_word.count(letter) current = 1 - current bot.register_next_step_handler(message, game) def save_into_file(message): results = [] try: with open('data.csv', 'r') as f: reader = csv.DictReader(f) for row in reader: results.append(row) except FileNotFoundError: with open('data.csv', 'w') as f: fieldnames = ['id', 'name', 'surname'] writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writeheader() results = str(results) if results.find(str(message.from_user.id)) == -1: common_dict = {"id": message.from_user.id, "name": message.from_user.first_name, "surname": message.from_user.last_name} with open('data.csv', 'a') as f: fieldnames = ['id', 'name', 'surname'] writer = csv.DictWriter(f, fieldnames=fieldnames) writer.writerow(common_dict) @bot.callback_query_handler(func=lambda call: True) def callback_worker(call): if call.data == "/play" and len(ind_game) == 2: bot.send_message(call.message.chat.id, f"Enter starting word") bot.register_next_step_handler(call.message, defining) else: bot.send_message(call.message.chat.id, f"The opponent wasn't chosen") bot.polling(none_stop=True, interval=0)
import sys import os from numpy import * import numpy as np import numpy.random from sklearn.datasets import fetch_mldata import sklearn.preprocessing from numpy import linalg as LA import matplotlib matplotlib.use('Agg') import matplotlib.pyplot as plt import pandas as pd from sklearn import svm import matplotlib.patches as mpatches mnist = fetch_mldata('MNIST original') data = mnist['data'] labels = mnist['target'] neg, pos = 0, 8 train_idx = numpy.random.RandomState(0).permutation( where((labels[:60000] == neg) | (labels[:60000] == pos))[0]) test_idx = numpy.random.RandomState(0).permutation(where((labels[60000:] == neg) | (labels[60000:] == pos))[0]) train_data_unscaled = data[train_idx[:6000], :].astype(float) train_labels = (labels[train_idx[:6000]] == pos) * 2 - 1 validation_data_unscaled = data[train_idx[6000:], :].astype(float) validation_labels = (labels[train_idx[6000:]] == pos) * 2 - 1 test_data_unscaled = data[60000 + test_idx, :].astype(float) test_labels = (labels[60000 + test_idx] == pos) * 2 - 1 # Preprocessing train_data = sklearn.preprocessing.scale(train_data_unscaled, axis=0, with_std=False) validation_data = sklearn.preprocessing.scale(validation_data_unscaled, axis=0, with_std=False) test_data = sklearn.preprocessing.scale(test_data_unscaled, axis=0, with_std=False) def main(args): # output path: if len(args) == 1: output = args[0] + '/' if not os.path.exists(output): print("Path does not exist!") sys.exit(2) elif len(args) > 1: print("usage: Q3.py <output_path>") sys.exit(2) else: output = '' # Section A etas = [x for x in range(1, 101)] T = 1000 C = 1.0 best_eta0 = 0 best_accuracy = 0 acs = [] for eta0 in etas: accuracy = accuracyCalc(1.0 * eta0, C, T, validation_data, validation_labels) acs.append(accuracy) if accuracy > best_accuracy: best_eta0 = eta0 best_accuracy = accuracy print('The best eta_0 is: ', best_eta0, 'with accuracy: ', best_accuracy) plt.figure(1) plt.plot(etas, acs) plt.xlabel('$\eta_{0}$ value') plt.ylabel('Accuracy') plt.title('Different $\eta_{0}$ values vs. their accuracy') img_save = output + 'Q3_Section_A' plt.savefig(img_save) # Section B C_list = [math.pow(math.sqrt(10), x) for x in range(-20, 22)] T = 1000 best_C = 0 best_accuracy = 0 acs = [] for C in C_list: accuracy = accuracyCalc(best_eta0, C, T, validation_data, validation_labels) acs.append(accuracy) if accuracy > best_accuracy: best_C = C best_accuracy = accuracy print('The best C is: ', best_C, 'with accuracy: ', best_accuracy) plt.figure(2) plt.plot(C_list, acs) plt.xscale('log') plt.xlabel('C value') plt.ylabel('Accuracy') plt.title('Different C values vs. their accuracy') img_save = output + 'Q3_Section_B' plt.savefig(img_save) # Section C T = 20000 w = ourSGDSVM(train_data, train_labels, best_C, best_eta0, T) plt.figure(3) plt.imshow(reshape(w, (28, 28)), interpolation='nearest', cmap='gray') plt.title('weight vector') img_save = output + 'Q3_Section_C_weight_vector' plt.savefig(img_save) # Section D best_accuracy = accuracyCalc(best_eta0, best_C, T, test_data, test_labels) print('The best accuracy on test set is: ', best_accuracy) def accuracyCalc(eta0, C, T, set, labels): s = 0.0 for i in range(10): w = ourSGDSVM(train_data, train_labels, C, eta0, T) s += testAccuracy(w, set, labels) return 1.0 * s / 10 def testAccuracy(w, set, labels): prediction = [np.dot(w, set[i]) for i in range(len(labels))] accuracy_for_validation = 1.0 * np.array( [0.0 if np.multiply(labels[i], prediction[i]) < 0 else 1.0 for i in range(len(labels))]).sum() / len(labels) return accuracy_for_validation def ourSGDSVM(samples, labels, C, eta0, T): w = np.zeros(len(samples[0]), dtype='float64') for t in range(1, T + 1): i = np.random.randint(0, len(samples)) prediction_result = False if np.multiply(labels[i], np.dot(w, samples[i])) < 1 else True eta_t = 1.0 * eta0 / t if not prediction_result: w = np.multiply((1 - eta_t), w) + np.multiply(eta_t * C * labels[i], samples[i]) return w if __name__ == '__main__': main(sys.argv[1:])
import random values = [1, 2, 3, 4, 5, 6] print random.choice(values) print random.choice(values) print random.choice(values) print random.choice(values) print random.choice(values) print random.sample(values, 2) print random.sample(values, 2) print random.sample(values, 2) print random.sample(values, 2) random.shuffle(values) print values random.shuffle(values) print values random.shuffle(values) print values print random.randint(0, 10) print random.randint(0, 10) print random.randint(0, 10) print random.randint(0, 10) print random.randint(0, 10) print random.random() print random.random() print random.random() print random.random() print random.getrandbits(200)
from django.shortcuts import render,redirect # Create your views here. from django.views.generic.base import View from django.contrib import messages,auth from django.contrib.auth.models import User from home.models import Category,Slider,Ad,Item,Brand,Cart,Contact from django.core.mail import EmailMultiAlternatives class BaseView(View): views = {} class HomeView(BaseView): def get(self,request): self.views['categories'] = Category.objects.all() self.views['sliders'] = Slider.objects.all() self.views['ads1'] = Ad.objects.filter(rank = 1) self.views['ads2'] = Ad.objects.filter(rank = 2) self.views['ads3'] = Ad.objects.filter(rank = 3) self.views['ads4'] = Ad.objects.filter(rank = 4) self.views['ads5'] = Ad.objects.filter(rank = 5) self.views['ads6'] = Ad.objects.filter(rank = 6) self.views['ads7'] = Ad.objects.filter(rank = 7) self.views['ads8'] = Ad.objects.filter(rank = 8) self.views['items'] = Item.objects.all() self.views['new_items'] = Item.objects.filter(label = 'new') self.views['hot_items'] = Item.objects.filter(label='hot') self.views['sale_items'] = Item.objects.filter(label='sale') return render(request,'index.html',self.views) class ProductDetailView(BaseView): def get(self, request,slug): category = Item.objects.get(slug = slug).category self.views['detail_item'] = Item.objects.filter(slug = slug) self.views['categories'] = Category.objects.all() self.views['brands'] = Brand.objects.all() self.views['related_item'] = Item.objects.filter(category=category) return render(request,'product-detail.html',self.views) class SearchView(BaseView): def get(self,request): query = request.GET.get('query',None) if not query: return redirect("/") self.views['search_query'] = Item.objects.filter( description__icontains =query ) self.views['searched_for'] = query return render(request,'search.html',self.views) class CategoryView(BaseView): def get(self,request,slug): cat = Category.objects.get(slug=slug).id self.views['category_items'] = Item.objects.filter(category = cat) class BrandView(BaseView): def get(self, request, name): cat = Brand.objects.get(slug=slug).id self.views['brand_items'] = Item.objects.filter(brand=cat) return render(request,'brand.html',self.views) def register(request): if request.method == 'POST': first_name = request.POST['first_name'] last_name = request.POST['last_name'] username = request.POST['username'] email = request.POST['email'] password = request.POST['password'] cpassword = request.POST['cpassword'] if password == cpassword: if User.objects.filter(username = username).exists(): messages.error(request,'The username is already used.') return redirect('home:signup') elif User.objects.filter(email = email).exists(): messages.error(request, 'The email is already used.') return redirect('home:signup') else: data = User.objects.create_user( first_name = first_name, last_name = last_name, username = username, email = email, password = password ) data.save() messages.error(request, 'You are signed up.') return redirect('home:signup') else: messages.error(request, 'Password doest not match to each other.') return redirect('home:signup') return render(request,'signup.html') def signin(request): if request.method == "POST": username = request.POST['username'] password = request.POST['password'] user = auth.authenticate(username = username,password = password) if user is not None: auth.login(request,user) return redirect('/') else: messages.error(request,'Username and password do not match.') return redirect('home:signin') return render(request,'signin.html') class ViewCart(BaseView): def get(self,request): self.views['carts'] = Cart.objects.filter(user = request.user.username) return render(request,'cart.html',self.views) def cart(request,slug): if Cart.objects.filter(slug = slug,user = request.user.username).exists(): quantity = Cart.objects.get(slug = slug,user = request.user.username).quantity quantity = quantity +1 price = Item.objects.get(slug=slug).price discounted_price = Item.objects.get(slug=slug).discounted_price if discounted_price >0: total = discounted_price*quantity else: total = price * quantity Cart.objects.filter(slug=slug, user=request.user.username).update(quantity = quantity,total = total) else: price = Item.objects.get(slug=slug).price discounted_price = Item.objects.get(slug=slug).discounted_price if discounted_price>0: total = discounted_price else: total = price data = Cart.objects.create( user = request.user.username, slug = slug, item = Item.objects.filter(slug = slug)[0], total = total ) data.save() return redirect('home:mycart') def deletecart(request,slug): if Cart.objects.filter(slug=slug, user=request.user.username).exists(): Cart.objects.filter(slug=slug, user=request.user.username).delete() messages.success(request,'The item is deleted') return redirect("home:mycart") def delete_single_cart(request,slug): if Cart.objects.filter(slug = slug,user = request.user.username).exists(): quantity = Cart.objects.get(slug = slug,user = request.user.username).quantity quantity = quantity -1 price = Item.objects.get(slug=slug).price discounted_price = Item.objects.get(slug=slug).discounted_price if discounted_price >0: total = discounted_price*quantity else: total = price * quantity Cart.objects.filter(slug=slug, user=request.user.username).update(quantity = quantity,total = total) return redirect("home:mycart") def contact(request): if request.method == 'POST': name = request.POST['name'] email = request.POST['email'] subject = request.POST['subject'] message = request.POST['message'] data = Contact.objects.create( name = name, email = email, subject = subject, message = message ) data.save() messages.success("Message is submitted.") html_content = f"<p> The customer having name {name} ,mail addrees {email} and subject {subject} has some message and the message is {message}" msg = EmailMultiAlternatives(subject, message, 'aceaayush@gmail.com', ['aceraayush@gmail.com']) msg.attach_alternative(html_content, "text/html") msg.send() return render(request,'contact.html') # ---------------------------------------------API--------------------------------------------------------- from rest_framework import viewsets from . serializers import * class ItemViewSet(viewsets.ModelViewSet): queryset = Item.objects.all() serializer_class = ItemSerializer class CartViewSet(viewsets.ModelViewSet): queryset = Cart.objects.all() serializer_class = CartSerializer from django.views.generic import View, DetailView from django_filters.rest_framework import DjangoFilterBackend from rest_framework import viewsets, generics from rest_framework.filters import OrderingFilter, SearchFilter class ItemFilterListView(generics.ListAPIView): queryset = Item.objects.all() serializer_class = ItemSerializer filter_backends = (DjangoFilterBackend,OrderingFilter,SearchFilter) filter_fields = ['id','title','price','label','category'] ordering_fields = ['price','title','id'] search_fields = ['title','description']