text stringlengths 8 6.05M |
|---|
import xml.etree.ElementTree as etree
class Corpus:
def __init__(self):
self._sentences = []
def load_corpus(self, path_to_file):
tree = etree.parse(path_to_file)
root = tree.getroot()
for sentence in root.iter('sentence'):
for source in sentence.iter('source'):
self._sentences.append(source.text)
Sentence.sent = source.text
for word in root.iter('token'):
for g in word.iter('g'):
if g.get('v') == 'PNKT':
pass
else:
word = Wordform()
word.text = Wordform().word
Sentence(source.text).words.append(word.text)
for gramm in root.iter('g'):
Wordform().grammems.append(str(gramm.get('v')))
def get_sentence(self):
i = int(input('Введите номер предложения: '))
if i in range(len(self._sentences)):
print(self._sentences[i])
else:
print("Предложения с таким номером не существует.")
class Sentence:
def __init__(self, sent):
self.sent = sent
self.words = []
class Wordform:
def __init__(self):
self.word = ''
self.grammems = []
corp = Corpus()
corp.load_corpus('C:\\Users\\Виктория\\Downloads\\annot.opcorpora.no_ambig.xml')
corp.get_sentence()
|
from django.db import models
from .Musica import Musica
class Categoria(models.Model):
slug = models.SlugField(primary_key=True, max_length=100)
nome = models.CharField(max_length=255)
descricao = models.CharField(max_length=500)
categoria_mae = models.ForeignKey("self", blank=True, null=True)
ordem = models.PositiveSmallIntegerField()
banner_lateral = models.ForeignKey("Banner", related_name="banner_lateral_cat", blank=True, null=True)
banner_footer = models.ForeignKey("Banner", related_name="banner_footer_cat", blank=True, null=True)
class Meta:
app_label = "mpm"
def __str__(self):
nome_completo = ""
if(self.categoria_mae):
nome_completo += self.categoria_mae.__str__() + " / "
nome_completo += self.nome
return nome_completo
def get_musicas(self):
return Musica.objects.filter(categorias__slug=self.slug)
def get_filhas(self):
return Categoria.objects.filter(categoria_mae__slug=self.slug).order_by('ordem')
def get_absolute_url(self):
return "/musicas-de/%s/" % self.slug
|
from random import randint
from prac_08.unreliable_car import UnreliableCar
def main():
new_car = UnreliableCar('Truck', 100, 80)
print(new_car)
random_distance = randint(0, 100)
new_car.drive(random_distance)
print(new_car)
main() |
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 29 15:14:17 2019
@author: KelvinOX25
"""
import pyvisa
import time
import logging
import numpy as np
import struct
from qcodes import VisaInstrument, validators as vals
class Tektronix_AWG3252(VisaInstrument):
def __init__(self, name, address, **kw):
super().__init__(name, address, **kw)
self.add_parameter('V',
label='Voltage',
unit = 'V',
get_cmd = None,
set_cmd=':SOUR1:VOLT:OFFS '+'{}'+'V;',
vals=vals.Numbers(-4, 4),
set_parser=float)
def init(self):
lines = ['*RST;',
':SOUR1:FUNC:SHAP DC;',
':SOUR1:VOLT:OFFS 0V;',
':OUTP1:IMP INF;',
':OUTP1:STAT on;']
for l in lines:
self.write_raw(l)
time.sleep(0.2)
##Testing our codes
#from qcodes.instrument.base import Instrument
#try:
# Instrument.close_all()
#except KeyError:
# pass
#except NameError:
# pass
#
#gen = Tektronix_AWG3252('gen', 'TCPIP0::192.168.13.32::inst0::INSTR')
#gen.init()
#gen.V.set(0.324) |
from django.contrib import admin
from .models import Podcast
# Register your models here.
class PodcastAdmin(admin.ModelAdmin):
list_display = ('title', 'description', 'image', 'audio', 'completed')
admin.site.register(Podcast, PodcastAdmin)
|
import os
import gtk
import time
import gobject
import threading
import traceback
import envi.bits as e_bits
import envi.config as e_config
import vwidget
import vwidget.main as vw_main
import vwidget.views as vw_views
import vwidget.layout as vw_layout
import vwidget.memview as vw_memview
import vwidget.windows as vw_windows
import vwidget.vwvtrace as vw_vtrace
import vwidget.pydialog as vw_pydialog
import vwidget.vwvstruct as vw_vstruct
import vwidget.menubuilder as vw_menu
import vivisect
import vivisect.base as viv_base
import vivisect.vdbext as viv_vdbext
import vivisect.server as viv_server
import viv_views
import vivisect.reports as viv_reports
import vtrace
import vtrace.envitools as vt_envitools
import vdb
import vdb.gui as vdb_gui
from vivisect.const import *
from envi.threads import firethread
from vwidget.main import idlethread,idlethreadsync
def cmpoffset(x,y):
return cmp(x[0], y[0])
class VivWindow(vw_layout.LayoutWindow):
def __init__(self, vw, gui):
vw_layout.LayoutWindow.__init__(self)
self.vw = vw
self.gui = gui
self.vivBuildWindow()
def vivBuildWindow(self):
pass
class VivVaSetViewWindow(VivWindow):
def __init__(self, vw, gui, setname=None):
self.vasetname = setname
self.vivview = None
VivWindow.__init__(self, vw, gui)
def getWindowState(self):
return self.vasetname
def setWindowState(self, state):
self.set_title("Va Set: %s" % state)
self.vasetname = state
self.vivview.va_set_name = state
self.vivview.vwLoad()
def vivBuildWindow(self):
self.vivview = viv_views.VaSetView(self.vw, self.gui, self.vasetname)
self.set_title("Va Set: %s" % self.vasetname)
self.add(self.vivview)
class VivCallersWindow(VivWindow):
def __init__(self, vw, gui, funcva=None):
self.funcva = funcva
VivWindow.__init__(self, vw, gui)
def getWindowState(self):
return self.funcva
def setWindowState(self, funcva):
self.funcva = funcva
self.vivview.funcva = funcva
pstr = self.vw.arch.pointerString(self.funcva)
self.set_title("Callers: %s" % pstr)
self.vivview.vwLoad()
def vivBuildWindow(self):
self.vivview = viv_views.CallersView(self.vw, self.gui, self.funcva)
if self.funcva != None:
pstr = self.vw.arch.pointerString(self.funcva)
self.set_title("Callers: %s" % pstr)
self.add(self.vivview)
class InputDialog(gtk.Dialog):
def __init__(self, parent):
self.prompt = gtk.Label()
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OK,gtk.RESPONSE_OK)
gtk.Dialog.__init__(self, "Input Required", parent, buttons=buttons)
hb = gtk.HBox()
hb.pack_start(self.prompt)
self.entry = gtk.Entry()
self.entry.connect("activate", self.entryActivate)
hb.pack_start(self.entry)
hb.show_all()
self.vbox.pack_start(hb)
def setPrompt(self, prompt):
self.prompt.set_text(' %s' % prompt)
def setDefault(self, default=None):
if default == None:
default = ''
self.entry.set_text(default)
def entryActivate(self, *args):
self.response(gtk.RESPONSE_OK)
def getInputText(self):
ret = None
if self.run() == gtk.RESPONSE_OK:
ret = self.entry.get_text()
self.hide()
return ret
class VivMemoryView(vw_memview.MemoryView):
def __init__(self, vw, gui, memwin):
self.vw = vw
self.gui = gui
self.memwin = memwin
vw_memview.MemoryView.__init__(self, vw, syms=vw)
for name in vw.canvas.getRendererNames():
self.addRenderer(name, vw.canvas.getRenderer(name))
self.registerHotKey(ord('c'), self.hkCode)
self.registerHotKey(ord('s'), self.hkString)
self.registerHotKey(ord('S'), self.hkStruct)
self.registerHotKey(ord('p'), self.hkPointer)
self.registerHotKey(ord('f'), self.hkFunc)
self.registerHotKey(ord('u'), self.hkUnicode)
self.registerHotKey(ord('x'), self.hkXrefs)
self.registerHotKey(ord('n'), self.hkName)
self.registerHotKey(ord('U'), self.hkUndef)
self.registerHotKey(ord(';'), self.hkComment)
self.registerHotKey(ord('B'), self.hkBookmark)
self.registerHotKey(ord('G'), self.hkFuncGraph)
def hkFuncGraph(self, *args):
if self.selectva == None:
return
fva = self.vw.getFunction(self.selectva)
if fva == None:
return
self.popFunctionGraph(None, fva)
def hkBookmark(self, *args):
if self.selectva == None:
return
name = self.gui.getInputText("Bookmark Name:", parent=self.memwin)
if name != None:
self.vw.setVaSetRow("Bookmarks",(self.selectva, name))
def hkFlow(self, *args):
if self.selectva == None:
return
def hkComment(self, *args):
if self.selectva == None:
return
cmnt = self.gui.getInputText("Comment:", parent=self.memwin)
if cmnt != None:
self.vw.setComment(self.selectva, cmnt)
def hkXrefs(self, *args):
if self.selectva == None:
return
win = self.gui.presentWindow("VivXrefWindow")
win.addXrefTab(self.selectva)
def hkUndef(self, *args):
if self.selectva == None:
return
if self.vw.getLocation(self.selectva) == None:
return
self.vw.delLocation(self.selectva)
def hkName(self, *args):
if self.selectva == None:
return
name = self.gui.getInputText("Name:", parent=self.memwin)
if name != None:
self.vw.makeName(self.selectva, name)
def hkFunc(self, *args):
if self.selectva == None:
return
loc = self.vw.getLocation(self.selectva)
if loc != None:
if loc[L_LTYPE] != LOC_OP:
return
self.vw.makeFunction(self.selectva)
def hkStruct(self, *args):
if self.selectva == None:
return
if self.vw.getLocation(self.selectva) != None:
return
sname = vw_vstruct.selectStructure(self.vw.vsbuilder, parent=self.memwin)
if sname == None:
return
self.vw.makeStructure(self.selectva, sname)
def hkUnicode(self, *args):
if self.selectva == None:
return
if self.vw.getLocation(self.selectva) != None:
return
self.vw.makeUnicode(self.selectva)
@firethread
def hkCode(self, *args):
if self.selectva == None:
return
if self.vw.getLocation(self.selectva) != None:
return
self.vw.makeCode(self.selectva)
def hkString(self, *args):
if self.selectva == None:
return
if self.vw.getLocation(self.selectva) != None:
return
self.vw.makeString(self.selectva)
def hkPointer(self, *args):
if self.selectva == None:
return
if self.vw.getLocation(self.selectva) != None:
return
self.vw.makePointer(self.selectva)
def renderMemory(self, va, size, rend=None):
# We're a little special, when we get asked to render,
# render twice the size, starting at va-size and then scroll
# to the actual VA location.
base = va-size
map = self.vw.getMemoryMap(va)
if map != None:
mapbase, mapsize, mperm, mfile = map
if mapbase > base:
base = mapbase
vw_memview.MemoryView.renderMemory(self, base, size*2, rend=rend)
# We need everything to think our last requested size was real
self.lastsize = size
def vwGetPopup(self, textview, menu, vwfaddr=None):
pos = 0
va = self.selectva
vwmenu = vwfaddr
if vwmenu == None:
vwmenu = vw_menu.FieldAdder(menu, splitchar='/')
if va != None:
vwmenu.addField('Rename (n)', self.hkName)
vwmenu.addField('Comment (;)', self.hkComment)
# Check for and add xrefs right click option
for x,tova,xrtype,xrflag in self.vw.getXrefsTo(va):
p = self.vw.arch.pointerString(x)
locstr = "Undefined"
loc = self.vw.getLocation(x)
if loc != None:
locstr = self.vw.reprLocation(loc)
locstr = locstr.replace('.','_')
vwmenu.addField('Xrefs To/%s: %s' % (p, locstr[:32]), self.popSelectXref, (x,))
for fromva,x,xrtype,xrflag in self.vw.getXrefsFrom(va):
p = self.vw.arch.pointerString(x)
locstr = "Undefined"
loc = self.vw.getLocation(x)
if loc != None:
locstr = self.vw.reprLocation(loc)
locstr = locstr.replace('.','_')
vwmenu.addField('Xrefs From/%s: %s' % (p, locstr[:32]), self.popSelectXref, (x,))
fva = self.vw.getFunction(va)
if fva != None:
fname = self.vw.getName(fva)
vwmenu.addField('Function/%s' % fname, self.popGoto, (fva,))
for i,(atype,aname) in enumerate(self.vw.getFunctionArgs(fva)):
vwmenu.addField('Function/Arguments/%s_%d' % (aname,i), self.popEditFuncArg, (fva, i, atype, aname))
locals = self.vw.getFunctionLocals(fva)
locals.sort(cmp=cmpoffset)
for aoffset, atype, aname in locals:
vwmenu.addField('Function/Locals/%s_%d' % (aname,aoffset), self.popEditFuncLocal, (fva, aoffset, atype, aname))
vwmenu.addField('Function/Edit...', self.popEditFunc, (fva,))
vwmenu.addField('Function/Emulation/Emulate To Here', self.popEmulateFunc, (fva,va))
vwmenu.addField('Function/Emulation/Show Emu State', self.popEmulateShow, (fva,va))
vwmenu.addField('Function/Highlight', self.popHighlightFunc, (fva,))
vwmenu.addField('Function/Show Callers', self.popShowCallers, (fva,))
vwmenu.addField('Function/Graph (alpha)(G)', self.popFunctionGraph, (fva,))
# FIXME function code flow to here with highlight
loc = self.vw.getLocation(va)
if loc == None:
vwmenu.addField('Make/Code (c)', self.hkCode)
vwmenu.addField('Make/Function (f)', self.hkFunc)
vwmenu.addField('Make/String (s)', self.hkString)
vwmenu.addField('Make/Pointer (p)', self.hkPointer)
vwmenu.addField('Make/Unicode (u)', self.hkUnicode)
vwmenu.addField('Make/Structure (S)', self.hkStruct)
elif loc[L_LTYPE] == LOC_OP:
op = self.vw.parseOpcode(va)
for idx,oper in enumerate(op.opers):
# Give the option to switch ('hint') that you want
# the immediate operand displayed differently...
if oper.isImmed():
val = oper.getOperValue(op)
hval = e_bits.hex(val)
cval = val
r = []
while cval:
r.append(chr(cval & 0xff))
cval = cval >> 8
cstr = repr(''.join(r))
vwmenu.addField('Immediate/Decimal (%d)' % val, self.symHintThunk, (va, idx, str(val)))
vwmenu.addField('Immediate/Hex (%s)' % hval, self.symHintThunk, (va, idx, hval))
vwmenu.addField('Immediate/Chars (%s)' % cstr, self.symHintThunk, (va, idx, cstr))
names = self.vw.vsconsts.revLookup(val)
if names != None:
for name in names:
vwmenu.addField('Immediate/%s' % name, self.symHintThunk, (va, idx, name))
if not self.vw.isFunction(va):
vwmenu.addField('Make/Function (f)', self.hkFunc)
vwmenu.addField('Color Maps/Clear All...', self.popColorMap, (None, ))
names = self.vw.getColorMaps()
names.sort()
for name in names:
map = self.vw.getColorMap(name)
vwmenu.addField('Color Maps/%s' % name, self.popColorMap, (map,))
vwmenu.addField('Bookmark (B)', self.hkBookmark)
def popEditFuncArg(self, item, fva, idx, atype, aname):
newname = self.gui.getInputText('Argument Name', default=aname)
if newname != None:
self.vw.setFunctionArg(fva, idx, atype, aname=newname, doprec=False)
def popEditFuncLocal(self, item, fva, offset, atype, aname):
newname = self.gui.getInputText('Local Name', default=aname)
if newname != None:
self.vw.setFunctionLocal(fva, offset, atype, newname)
def popHighlightFunc(self, item, funcva):
map = {}
for cbva, cbsize, fva in self.vw.getFunctionBlocks(funcva):
endva = cbva+cbsize
while cbva < endva:
lva,lsize,ltype,linfo = self.vw.getLocation(cbva)
map[lva] = "yellow"
cbva+=lsize
self.setColorMap(map)
def popEditFunc(self, item, fva):
w = VivFunctionWindow(self.vw, self.gui, fva)
self.gui.manageWindow(w)
def popColorMap(self, item, map):
self.setColorMap(map)
def popFunctionGraph(self, item, fva):
import vivisect.gui.funcgraph as viv_funcgraph
thr = threading.Thread(target=viv_funcgraph.makeFuncGraphWindow, args=(self.vw,fva))
thr.setDaemon(True)
thr.start()
def popShowCallers(self, item, fva):
w = VivCallersWindow(self.vw, self.gui, fva)
self.gui.manageWindow(w)
@firethread
def popEmulateFunc(self, item, fva, va):
self.vw.vprint('Running emulator to: 0x%.8x' % (va,))
emu = self.vw.getEmulator()
emu.runFunction(fva, stopva=va)
trace = vt_envitools.TraceEmulator(emu)
db = vdb.Vdb(trace=trace)
vdb_gui.VdbGui(db, ismain=False)
db.onecmd('dis')
db.vprint('Emulation Stepped To: 0x%.8x (in 0x%.8x)' % (va, fva))
def symHintThunk(self, item, va, idx, hint):
self.vw.setSymHint(va, idx, hint)
@firethread
def popEmulateShow(self, item, fva, va):
self.vw.vprint('Running emulator to: 0x%.8x' % (va,))
emu = self.vw.getEmulator()
emu.runFunction(fva, stopva=va)
regs = emu.getRegisters()
rnames = regs.keys()
rnames.sort()
self.vw.vprint("Showing Register/Magic State At: 0x%.8x" % va)
op = self.vw.parseOpcode(va)
self.vw.canvas.addVaText("0x%.8x: " % va, va)
op.render(self.vw.canvas)
self.vw.canvas.addText("\n")
for i in xrange(len(op.opers)):
o = op.opers[i]
o.render(self.vw.canvas, op, i)
self.vw.canvas.addText(" = ")
oval = o.getOperValue(op, emu)
mag = emu.getMagic(oval)
base = "%.8x (%d)" % (oval,oval)
if mag != None:
if mag.va > oval:
base += " %s - %d" % (repr(mag), mag.va - oval)
else:
base += " %s + %d" % (repr(mag), oval - mag.va)
self.vw.vprint(base)
def popGoto(self, item, va):
self.goto(va)
def popSelectXref(self, item, va):
self.gui.goto(va)
class VivMemoryWindow(vw_memview.MemoryWindow, viv_base.VivEventCore):
def __init__(self, vw, gui):
self.vw = vw
self.gui = gui
canvas = VivMemoryView(vw, gui, self)
vw_memview.MemoryWindow.__init__(self, canvas)
viv_base.VivEventCore.__init__(self, vw)
gui.addEventCore(self)
def vwDestroy(self):
self.gui.delEventCore(self)
def _refreshFunction(self, fva):
for cbva, cbsize, cbfva in self.vw.getFunctionBlocks(fva):
self.canvas.refresh(cbva, cbsize)
def VWE_SYMHINT(self, vw, event, einfo):
va, idx, hint = einfo
self.canvas.refresh(va, 1)
def VWE_ADDLOCATION(self, vw, event, einfo):
va,size,ltype,tinfo = einfo
self.canvas.refresh(va, size)
def VWE_DELLOCATION(self, vw, event, einfo):
va,size,ltype,tinfo = einfo
self.canvas.refresh(va, size)
def VWE_ADDFUNCTION(self, vw, event, einfo):
va,meta = einfo
self.canvas.refresh(va, 1)
def VWE_SETFUNCMETA(self, vw, event, einfo):
fva, key, val = einfo
self._refreshFunction(fva)
def VWE_SETFUNCARGS(self, vw, event, einfo):
fva, fargs = einfo
self._refreshFunction(fva)
def VWE_COMMENT(self, vw, event, einfo):
va,cmnt = einfo
self.canvas.refresh(va, 1)
def VWE_SETNAME(self, vw, event, einfo):
va,name = einfo
self.canvas.refresh(va, 1)
for fromva,tova,rtype,rflag in self.vw.getXrefsTo(va):
self.canvas.refresh(fromva, 1)
class VivNaviWindow(VivWindow):
def vivBuildWindow(self):
self.set_title("Viv Navi")
notebook = gtk.Notebook()
self.funcview = viv_views.FunctionsView(self.vw, self.gui)
self.exportsview = viv_views.ExportsView(self.vw, self.gui)
self.importsview = viv_views.ImportsView(self.vw, self.gui)
notebook.append_page(self.funcview, gtk.Label("Functions"))
notebook.append_page(self.importsview, gtk.Label("Imports"))
notebook.append_page(self.exportsview, gtk.Label("Exports"))
self.add(notebook)
class VivDataWindow(VivWindow):
def vivBuildWindow(self):
self.set_title("Viv Data")
notebook = gtk.Notebook()
self.filesview = viv_views.FilesView(self.vw, self.gui)
self.stringsview = viv_views.StringsView(self.vw, self.gui)
self.structsview = viv_views.StructsView(self.vw, self.gui)
notebook.append_page(self.filesview, gtk.Label("Files/Sections"))
notebook.append_page(self.stringsview, gtk.Label("Strings"))
notebook.append_page(self.structsview, gtk.Label("Structures"))
self.add(notebook)
class VivXrefWindow(VivWindow):
def vivBuildWindow(self):
self.notebook = gtk.Notebook()
self.add(self.notebook)
def addXrefTab(self, va):
view = viv_views.XrefView(self.vw, self.gui, va)
l = gtk.Label(view.vwGetDisplayName())
self.notebook.append_page(view, l)
self.notebook.set_current_page(-1)
l.show_all()
view.show_all()
def getWindowState(self):
ret = []
for i in range(self.notebook.get_n_pages()):
p = self.notebook.get_nth_page(i)
ret.append(p.xrefva)
return ret
def setWindowState(self, state):
for va in state:
self.addXrefTab(va)
#FIXME update on events
class VivSearchWindow(VivWindow):
def vivBuildWindow(self):
self.set_title("Search Results")
self.notebook = gtk.Notebook()
self.add(self.notebook)
def addSearchPattern(self, pattern, res=None):
if res == None:
res = self.vw.searchMemory(pattern)
l = gtk.Label("Pattern: %s" % pattern.encode('hex')[:20])
view = viv_views.SearchView(self.vw, self.gui, res)
l.show_all()
view.show_all()
self.notebook.append_page(view, l)
class VivReportWindow(VivWindow):
def __init__(self, vw, gui, repmod, report=None):
self.repmod = repmod
self.report = report
VivWindow.__init__(self, vw, gui)
self.lyt_restore = False # We don't want to be saved
def vivBuildWindow(self):
self.set_title("Report Results: %s" % self.repmod.__name__)
self.reportview = viv_views.ReportView(self.vw, self.gui, self.repmod, self.report)
self.add(self.reportview)
class VivFunctionWindow(VivWindow, viv_base.VivEventCore):
def __init__(self, vw, gui, fva):
self.fva = fva
VivWindow.__init__(self, vw, gui)
viv_base.VivEventCore.__init__(self,vw)
gui.addEventCore(self)
self.set_title('Editing: 0x%.8x %s' % (fva, vw.getName(fva)))
def vwDestroy(self):
self.gui.delEventCore(self)
def vivBuildWindow(self):
vbox = gtk.VBox()
hbox1 = gtk.HBox()
h1_vbox1 = gtk.VBox()
h1_vbox2 = gtk.VBox()
h1_vbox1.pack_start(gtk.Label('Function Parameters'), expand=False)
parmview = viv_views.VivFuncParamView(self.vw, self.gui, self.fva)
h1_vbox1.pack_start(parmview)
fcconv = self.vw.getFunctionMeta(self.fva, 'CallingConvention')
matchidx = None
h1_vbox2.pack_start(gtk.Label('Calling Convention'), expand=False)
callconv = gtk.combo_box_new_text()
callconv.connect('changed', self.cbCallConvChanged)
conventions = self.vw.arch.getCallingConventions()
for i,(name,obj) in enumerate(conventions):
if name == fcconv:
matchidx = i
callconv.append_text(name)
callconv.append_text('__unknown')
if matchidx == None:
# Set ourself to '__unknown'
matchidx = len(conventions)
callconv.set_active(matchidx)
h1_vbox2.pack_start(callconv, expand=False)
hbox1.pack_start(h1_vbox1)
hbox1.pack_start(h1_vbox2)
vbox.pack_start(hbox1)
vbox.pack_start(gtk.Label('Function Code Blocks'), expand=False)
cbview = viv_views.VivCodeBlockView(self.vw, self.gui, self.fva)
vbox.pack_start(cbview)
vbox.pack_start(gtk.Label('Function Meta Data'), expand=False)
mview = viv_views.VivFuncMetaView(self.vw, self.gui, self.fva)
vbox.pack_start(mview)
vbox.show_all()
self.add(vbox)
self.resize(800, 600)
self._initFunctionData()
def getWindowState(self):
return self.fva
def setWindowState(self, fva):
self.fva = fva
self._initFunctionData()
def _initFunctionData(self):
name = self.vw.getName(self.fva)
self.set_title('Function: 0x%.8x %s' % (self.fva, name))
def cbCallConvChanged(self, cbox):
model = cbox.get_model()
idx = cbox.get_active()
cconv = model[idx][0]
if cconv == 'Unknown':
cconv = None
self.vw.setFunctionMeta(self.fva, 'CallingConvention', cconv)
class VivMainWindow(vw_windows.MainWindow):
def __init__(self, vw, gui):
self.vw = vw
self.gui = gui
# The viv workspace is cli/memobj/symboj
vw_windows.MainWindow.__init__(self, vw, vw, syms=vw)
self.connect('delete_event', self._mainDelete)
self.set_title("Vivisect Console")
self.menubar.addField("_File._Open._Workspace", self.file_open_workspace)
self.menubar.addField("_File._Open._Binary._PE", self.file_open_binary, args=("pe",))
self.menubar.addField("_File._Open._Binary._Elf", self.file_open_binary, args=("elf",))
self.menubar.addField("_File._Open._Binary._blob", self.file_open_binary, args=("blob",))
self.menubar.addField("_File._Save", self.file_save)
self.menubar.addField("_File.Save As", self.file_saveas)
self.menubar.addField("_File._Quit", self.file_quit)
#self.menubar.addField("Edit", None)
self.menubar.addField("_View._Memory", self.view_window, args=("VivMemoryWindow",))
self.menubar.addField("_View._Navigation", self.view_window, args=("VivNaviWindow",))
self.menubar.addField("_View._Data/Structures", self.view_window, args=("VivDataWindow",))
self.menubar.addField("_Search._String (asdf)", self.search_string)
self.menubar.addField("_Search._Hex (414243)", self.search_bytes)
#FIXME endian
self.menubar.addField("_Search._Expression (0xf0 + 20)", self.search_expression)
self.menubar.addField('_Share._Share Workspace', self.share_workspace)
self.menubar.addField('_Share._Connect To Workspace', self.share_connect)
self.menubar.addField('_Share._Lead/Follow._Lead', self.share_lead)
self.menubar.addField('_Share._Lead/Follow._Follow', self.share_follow)
self.menubar.addField('_Share._Lead/Follow._Get Out Of The Way', self.share_outtheway)
self.menubar.addField('_Debug._Vdb', self.debug_local)
self.menubar.addField('_Debug._Remote Vdb', self.debug_remote)
self.menubar.addField("_Tools._Python", self.tools_python)
self.menubar.addField('_Tools._Structures._Add Namespace', self.tools_struct_addns)
for descr, modname in viv_reports.listReportModules():
self.menubar.addField("_Tools._Reports._%s" % descr, self.tools_reports, args=(modname,))
self.menubar.addField("_Tools._Extended Analysis._Crypto Constants", self.tools_eanalysis, args=("vivisect.analysis.crypto.constants",))
i = self.menubar.addField("_Tools._Va Sets")
i.set_submenu(gtk.Menu())
i.connect("select", self.tools_vasets_select)
i.show_all()
def _mainDelete(self, *args):
if self.vw.vivhome:
lfile = os.path.join(self.vw.vivhome, "viv.lyt2")
self.gui.saveLayoutFile(file(lfile,"wb"))
self.gui.deleteAllWindows(omit=self)
self.vw.shutdown.set()
if self.gui.ismain:
vw_main.shutdown()
def share_workspace(self, item):
self.vw.vprint('Sharing workspace...')
daemon = viv_server.shareWorkspace(self.vw)
self.vw.vprint('Workspace Listening Port: %d' % daemon.port)
def share_connect(self, item):
self.vw.vprint('Viv now uses a client command... (to allow code sync)')
self.vw.vprint('Use python vivclient <host> <port>')
self.vw.vprint('Example: bash$ python vivclient 172.16.1.1 17384')
def share_lead(self, item):
self.vw.vprint('Leading Share GOTO...')
self.gui.leadstate = LEADSTATE_LEAD
def share_follow(self, item):
self.vw.vprint('Following Share GOTO...')
self.gui.leadstate = LEADSTATE_FOLLOW
def share_outtheway(self, item):
self.vw.vprint('Ignoring Share GOTO...')
self.gui.leadstate = LEADSTATE_OUTTHEWAY
def debug_local(self, item):
t = vtrace.getTrace()
dia = vw_vtrace.SelectProcessDialog(t)
pid = dia.selectProcess()
viv_vdbext.runVdb(self.gui, pid=pid)
def debug_remote(self, item):
x = self.gui.getInputText('Remote Host:', parent=self, default='<vdb server>')
if x == None:
return
vtrace.remote = x
self.debug_local(item)
def search_string(self, item):
x = self.gui.getInputText('String:', parent=self, default="invisigoth")
if x != None:
self.searchAndShow(x)
def search_bytes(self, item):
x = self.gui.getInputText('Hex Bytes:', parent=self, default="56565656")
if x != None:
self.searchAndShow(x.decode('hex'))
def search_expression(self, item):
self.vw.vprint("FIXME search_expression")
def searchAndShow(self, pattern):
win = self.gui.presentWindow("VivSearchWindow")
win.addSearchPattern(pattern)
def tools_python(self, item):
l = self.vw.getExpressionLocals()
p = vw_pydialog.PyDialog(l)
p.show()
def tools_struct_addns(self, item):
n = vw_vstruct.selectNamespace(parent=self)
if n != None:
nsname, modname = n
self.vw.addStructureModule(nsname, modname)
def tools_eanalysis(self, mitem, modname):
mod = self.vw.loadModule(modname)
self.vw.vprint("Running: %s" % modname)
thr = threading.Thread(target=mod.analyze, args=(self.vw,))
thr.setDaemon(True)
thr.start()
def tools_vaset(self, mitem, setname):
w = VivVaSetViewWindow(self.vw, self.gui, setname)
self.gui.manageWindow(w)
def tools_vasets_select(self, mitem):
subm = mitem.get_submenu()
for n in subm.get_children():
subm.remove(n)
for name in self.vw.getVaSetNames():
mi = gtk.MenuItem(label=name)
mi.show()
mi.connect("activate", self.tools_vaset, name)
subm.append(mi)
@firethread
def runReport(self, rmod):
rep = rmod.report(self.vw)
self.showReport(rmod, rep)
@idlethread
def showReport(self, rmod, rep):
self.vw.vprint("%d results" % len(rep))
rwin = VivReportWindow(self.vw, self.gui, rmod, rep)
self.gui.manageWindow(rwin)
def tools_reports(self, menuitem, reportname):
r = self.vw.loadModule(reportname)
self.vw.vprint("Running: %s" % reportname)
self.runReport(r)
def file_quit(self, *args):
self.gui.deleteAllWindows()
def file_save(self, menuitem):
self.saveWorkspace(fullsave=False)
@firethread
def saveWorkspace(self, fullsave=True):
self.vw.vprint('Saving %s...' % (self.vw.getMeta('StorageName')))
s = time.time()
self.vw.saveWorkspace(fullsave=fullsave)
e = time.time()
self.vw.vprint('...save complete! (%d sec)' % (e-s))
@firethread
def loadBinaryFile(self, filename, fmtname):
self.vw.vprint("Loading %s..." % filename)
self.vw.loadFromFile(filename, fmtname=fmtname)
self.analyzeWorkspace()
@firethread
def analyzeWorkspace(self):
self.vw.analyze()
@firethread
def loadWorkspace(self, wsname):
self.vw.vprint('Loading %s...' % wsname)
s = time.time()
self.vw.loadWorkspace(wsname)
e = time.time()
self.vw.vprint('...load complete! (%d sec)' % (e-s))
def file_saveas(self, menuitem):
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_SAVE,gtk.RESPONSE_OK)
f = gtk.FileChooserDialog("Open Vivisect Workspace", self, action=gtk.FILE_CHOOSER_ACTION_SAVE, buttons=buttons)
if f.run() == gtk.RESPONSE_OK:
self.vw.setMeta('StorageName', f.get_filename())
self.saveWorkspace(fullsave=True)
f.destroy()
def file_open_workspace(self, menuitem):
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)
f = gtk.FileChooserDialog("Open Vivisect Workspace", self, action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons=buttons)
if f.run() == gtk.RESPONSE_OK:
self.loadWorkspace(f.get_filename())
f.destroy()
def file_open_binary(self, menuitem, parsemod):
buttons=(gtk.STOCK_CANCEL,gtk.RESPONSE_CANCEL,gtk.STOCK_OPEN,gtk.RESPONSE_OK)
f = gtk.FileChooserDialog("Open %s Binary" % parsemod, self, action=gtk.FILE_CHOOSER_ACTION_OPEN, buttons=buttons)
if f.run() == gtk.RESPONSE_OK:
self.loadBinaryFile(f.get_filename(), parsemod)
f.destroy()
def view_window(self, menuitem, winname):
self.gui.presentWindow(winname)
def getMainToolbar(self):
return None # FIXME for now...
LEADSTATE_OUTTHEWAY = 0
LEADSTATE_LEAD = 1
LEADSTATE_FOLLOW = 2
class VivGui(vw_layout.LayoutManager, viv_base.VivEventDist):
def __init__(self, vw, ismain=True):
self.vw = vw
self.vw._viv_gui = self
self.leadstate = LEADSTATE_OUTTHEWAY
self.ismain = ismain
self.inputdia = None
vw_layout.LayoutManager.__init__(self)
viv_base.VivEventDist.__init__(self, vw)
self.defgeom = (20,20,600,450)
self.addWindowClass(VivMainWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivMemoryWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivVaSetViewWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivReportWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivCallersWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivNaviWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivDataWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivXrefWindow, args=(vw, self), defgeom=self.defgeom)
self.addWindowClass(VivSearchWindow, args=(vw, self), defgeom=self.defgeom)
if not self.loadDefaultLayout():
self.createWindow('VivMainWindow')
self.createWindow('VivMemoryWindow')
self.getOrCreateWindow('VivMainWindow')
@idlethread
def _ve_fireEvent(self, event, edata):
return viv_base.VivEventDist._ve_fireEvent(self, event, edata)
@idlethreadsync
def getInputText(self, prompt, parent=None, default=None):
"""
Request a single line of input from the user. Specify "default"
for the default text in the input box, and parent for the parent
window of the dialog box.
"""
if self.inputdia == None:
self.inputdia = InputDialog(parent)
self.inputdia.setPrompt(prompt)
self.inputdia.setDefault(default)
return self.inputdia.getInputText()
def VWE_FOLLOWME(self, event, einfo):
if self.leadstate == LEADSTATE_FOLLOW:
self.goto(einfo)
@idlethread
def goto(self, va):
win = self.presentWindow('VivMemoryWindow')
# If somebody uses GOTO and they are the leader, notify followers
if self.leadstate == LEADSTATE_LEAD:
self.vw._fireEvent(VWE_FOLLOWME, va)
pstr = self.vw.arch.pointerString(va)
oldva,oldsize,oldfmt = win.getWindowState()
win.setWindowState((pstr, oldsize, oldfmt))
@idlethread
def setColorMap(self, cmap):
win = self.presentWindow('VivMemoryWindow')
win.canvas.setColorMap(cmap)
def loadDefaultLayout(self):
if self.vw.vivhome != None:
lfile = os.path.join(self.vw.vivhome, "viv.lyt2")
if os.path.exists(lfile):
return self.loadLayoutFile(file(lfile, "rb"))
return False
def presentWindow(self, name):
match = None
for win in self.windows:
if win.getWindowName() == name:
match = win
break
if match == None:
match = self.createWindow(name)
match.present()
return match
def waitOnCli(self):
self.vw.shutdown.wait()
|
'''
Created on Aug 4, 2010
@author: avepoint
'''
from google.appengine.ext import db
from google.appengine.api import users
class MTGCard(db.Model):
name = db.StringProperty(required=True)
edition = db.StringProperty()
color = db.StringProperty()
type = db.StringProperty()
rarity = db.StringProperty()
cost = db.StringProperty()
pt = db.StringProperty()
text = db.StringProperty()
cmc = db.StringProperty()
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from gbpclient.gbp.v2_0 import router as router_ext
from gbpclient.tests.unit import test_cli20
from openstackclient.network.v2 import router
from openstackclient.tests.unit.network.v2 import test_router
# Tests for router create for APIC extensions
#
class TestRouterCreate(test_router.TestRouter, test_cli20.CLITestV20Base):
def setUp(self):
super(TestRouterCreate, self).setUp()
self.new_router = test_router.TestCreateRouter.new_router
self.network.create_router = mock.Mock(return_value=self.new_router)
self.cmd = router.CreateRouter(self.app, self.namespace)
def test_create_default_options(self):
arglist = [
self.new_router.name,
]
verifylist = [
('name', self.new_router.name),
('apic_external_provided_contracts', None),
('apic_external_consumed_contracts', None),
]
create_ext = router_ext.CreateAndSetRouterExtension(self.app)
parsed_args = self.check_parser_ext(
self.cmd, arglist, verifylist, create_ext)
columns, data = self.cmd.take_action(parsed_args)
self.network.create_router.assert_called_once_with(**{
'admin_state_up': True,
'name': self.new_router.name,
})
def test_create_all_options(self):
arglist = [
self.new_router.name,
"--apic-external-provided-contracts", 'ptest1',
"--apic-external-consumed-contracts", 'ctest1',
]
verifylist = [
('name', self.new_router.name),
('apic_external_provided_contracts', 'ptest1'),
('apic_external_consumed_contracts', 'ctest1'),
]
create_ext = router_ext.CreateAndSetRouterExtension(self.app)
parsed_args = self.check_parser_ext(
self.cmd, arglist, verifylist, create_ext)
columns, data = self.cmd.take_action(parsed_args)
self.network.create_router.assert_called_once_with(**{
'admin_state_up': True,
'name': self.new_router.name,
'apic:external_provided_contracts': ['ptest1'],
'apic:external_consumed_contracts': ['ctest1'],
})
# Tests for router set for APIC extensions
#
class TestRouterSet(test_router.TestRouter, test_cli20.CLITestV20Base):
_network = test_router.TestSetRouter._network
_subnet = test_router.TestSetRouter._subnet
_router = test_router.TestSetRouter._router
def setUp(self):
super(TestRouterSet, self).setUp()
self.network.router_add_gateway = mock.Mock()
self.network.update_router = mock.Mock(return_value=None)
self.network.set_tags = mock.Mock(return_value=None)
self.network.find_router = mock.Mock(return_value=self._router)
self.network.find_network = mock.Mock(return_value=self._network)
self.network.find_subnet = mock.Mock(return_value=self._subnet)
self.cmd = router.SetRouter(self.app, self.namespace)
def test_set_no_options(self):
arglist = [
self._router.name,
]
verifylist = [
('router', self._router.name),
]
set_ext = router_ext.CreateAndSetRouterExtension(self.app)
parsed_args = self.check_parser_ext(
self.cmd, arglist, verifylist, set_ext)
result = self.cmd.take_action(parsed_args)
self.assertFalse(self.network.update_router.called)
self.assertFalse(self.network.set_tags.called)
self.assertIsNone(result)
def test_set_all_valid_options(self):
arglist = [
self._router.name,
"--apic-external-provided-contracts", 'ptest1,ptest11',
"--apic-external-consumed-contracts", 'ctest1,ctest11',
]
verifylist = [
('router', self._router.name),
('apic_external_provided_contracts', 'ptest1,ptest11'),
('apic_external_consumed_contracts', 'ctest1,ctest11'),
]
set_ext = router_ext.CreateAndSetRouterExtension(self.app)
parsed_args = self.check_parser_ext(
self.cmd, arglist, verifylist, set_ext)
result = self.cmd.take_action(parsed_args)
attrs = {
'apic:external_provided_contracts': ['ptest1', 'ptest11'],
'apic:external_consumed_contracts': ['ctest1', 'ctest11'],
}
self.network.update_router.assert_called_once_with(
self._router, **attrs)
self.assertIsNone(result)
|
"""
DCC XML Generator Functions
H3A
"""
import os
import dcc
import dccxml
def GenH3A_AEWBParams(handle, h3a_aewb_params, cls_id):
return
def GenH3A_AEWBXML(directory, filebase, params, h3a_aewb_params):
if (os.path.exists(directory) == False):
print ('Creating directory: %s\n' %directory)
try:
os.makedirs(directory)
except OSError as err:
utils.error('%s: %s' %(err.strerror, err.filename), skip=True)
filename = os.path.join(directory, '%s_%s.xml' %(params['SENSOR'], filebase));
print ('Creating XML File: %s\n' %filename )
module_params = {}
module_params['NAME'] = 'VISS_H3A_AEWB_CFG'
module_params['STRUCT_NAME'] = 'iss_h3a_grid_size'
module_params['DCC_ID'] = dcc.DCC_ID_H3A_AEWB_CFG
module_params['FUNC_GENPARAMS'] = GenH3A_AEWBParams
handle = dccxml.OpenFile(filename)
dccxml.GenHeader(handle, params, module_params)
# Create the DCC Structure definition
handle.write(' <!--=======================================================================-->\n')
handle.write(' <typedef>\n')
handle.write(' <%s type="struct"> \n' %module_params['STRUCT_NAME'])
handle.write(' <enable type="uint8"> </enable> <!-- enable h3a aewb-->\n')
handle.write(' <mode type="uint8"> </mode> <!-- 0 = SUM_SQ, 1=MINMAX, 2=SUM_ONLY-->\n')
handle.write(' <v_start type="uint16"> </v_start> <!-- Paxel_0 Start Coordinate Y in Pixels -->\n')
handle.write(' <h_start type="uint16"> </h_start> <!-- Paxel_0 Start Coordinate H in Pixels -->\n')
handle.write(' <v_size type="uint8"> </v_size> <!-- Paxel Height in Pixels -->\n')
handle.write(' <h_size type="uint8"> </h_size> <!-- Paxel Width in Pixels -->\n')
handle.write(' <v_count type="uint8"> </v_count> <!-- number of vertical paxels -->\n')
handle.write(' <h_count type="uint8"> </h_count> <!-- number of horizontal paxels -->\n')
handle.write(' <v_skip type="uint8"> </v_skip> <!-- vertical subsampling factor (0-15) -->\n')
handle.write(' <h_skip type="uint8"> </h_skip> <!-- horizontal subsampling factor (0-15) -->\n')
handle.write(' <saturation_limit type="uint16"> </saturation_limit> <!-- saturation_limit (0-1023) -->\n')
handle.write(' <blk_win_numlines type="uint16"> </blk_win_numlines> <!-- Win Height for the single black line of windows (2-256)-->\n')
handle.write(' <blk_row_vpos type="uint16"> </blk_row_vpos> <!-- Vertical Position of Black Row -->\n')
handle.write(' <sum_shift type="uint8"> </sum_shift> <!-- Sum Shift (0-15) -->\n')
handle.write(' <ALaw_En type="uint8"> </ALaw_En> <!-- A Law Enable (0/1) -->\n')
handle.write(' <MedFilt_En type="uint8"> </MedFilt_En> <!-- Median Filter Enable (0/1) -->\n')
handle.write(' </%s> \n' %module_params['STRUCT_NAME'])
handle.write(' </typedef>\n')
handle.write(' <!--=======================================================================-->\n')
#Default config is 32x16 windows with 2x2 skip
#64 pixels are excluded at start and end in both H and V dimensions
h_count = 32
v_count = 16
h_start = 64
v_start = 64
h_size = 2*((params['SENSOR_WIDTH'] - h_start-64)//(2*h_count))
v_size = 2*((params['SENSOR_HEIGHT'] - v_start-64)//(2*v_count))
blk_win_numlines = 2
blk_row_vpos = params['SENSOR_HEIGHT'] - blk_win_numlines
v_skip = 2
h_skip = 2
handle.write(' <use_case val="65535"> \n')
handle.write(' <n-space>\n')
handle.write(' <region0 class="0">\n')
handle.write(' <gain val="0" min="0" max="10240"> </gain>\n')
handle.write(' <exposure val="1" min="0" max="10000000"> </exposure>\n')
handle.write(' <colortemperature val="2" min="0" max="10000"> </colortemperature>\n')
handle.write(' </region0>\n')
handle.write(' </n-space>\n')
handle.write(' <parameter_package>\n')
handle.write(' <h3a_aewb_dcc type="iss_h3a_grid_size">\n')
handle.write(' {\n')
handle.write(' 1, // enable: u8\n')
handle.write(' 2, // mode: u8\n')
handle.write(' %d, // v_start: u16\n' %v_start)
handle.write(' %d, // h_start: u16\n' %h_start)
handle.write(' %d, // v_size: u8\n' %v_size)
handle.write(' %d, // h_size: u8\n' %h_size)
handle.write(' %d, // v_count: u8\n' %v_count)
handle.write(' %d, // h_count: u8\n' %h_count)
handle.write(' %d, // v_skip: u8\n' %v_skip)
handle.write(' %d, // h_skip: u8\n' %h_skip)
handle.write(' 1000, // saturation_limit: u16\n')
handle.write(' %d, // blk_win_numlines: u16\n' %blk_win_numlines)
handle.write(' %d, // blk_row_vpos: u16\n' %blk_row_vpos)
handle.write(' 2, // Sum Shift: u8\n')
handle.write(' 0, // ALaw_En: u8\n')
handle.write(' 0, // MedFilt_En: u8\n')
handle.write(' }\n')
handle.write(' </h3a_aewb_dcc>\n')
handle.write(' </parameter_package>\n')
handle.write(' </use_case> \n')
dccxml.GenFooter(handle, module_params)
dccxml.CloseFile(handle)
|
# 第 0013 题: 用 Python 写一个爬图片的程序,爬 这个链接里的日本妹子图片 :-)
import os
import time
from bs4 import BeautifulSoup
import urllib.request
import urllib
def download_pic(url_pic,local_pic):
urllib.request.urlretrieve(url_pic, local_pic)
pass
url = 'https://www.enterdesk.com/zhuomianbizhi/secaibizhi/'
headers = {'User-agent':'Mozilla/5.0 (Windows NT 6.2; WOW64; rv:22.0) Gecko/20100101 Firefox/22.0'}
r = urllib.request.Request(url, headers=headers)
response = urllib.request.urlopen(r, timeout=20)
html = response.read()
soup = BeautifulSoup(html, 'lxml')
a = soup.find_all('img')
# 创建目录
file_name = r'E:\pythonProgram\LittleProgram\picture'
if not os.path.exists(file_name):
os.mkdir(file_name)
for i, j in zip(a, range(len(a))):
local_pic_path = os.path.join(file_name, str(j)+'.png')
time.sleep(2)
print('{0}/{1}'.format(j + 1, len(a)))
print(i['src'])
try:
download_pic(i['src'], local_pic=local_pic_path)
except:
continue
print('下载完成')
|
import unittest
from katas.kyu_6.greatest_position_distance import greatest_distance
class GreatestDistanceTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(greatest_distance([0, 2, 1, 2, 4, 1]), 3)
def test_equals_2(self):
self.assertEqual(greatest_distance([9, 7, 1, 2, 3, 7, 0, -1, -2]), 4)
def test_equals_3(self):
self.assertEqual(greatest_distance([0, 7, 0, 2, 3, 7, 0, -1, -2]), 6)
def test_equals_4(self):
self.assertEqual(greatest_distance([1, 2, 3, 4]), 0)
|
from flask import Flask, abort, flash, make_response, redirect, request, render_template, session, url_for
from functools import wraps
import json
import jwt
import logging
import os
import requests
import settings
import xml.etree.ElementTree as ET
# define our webapp
app = Flask(__name__)
#configuration from settings.py
app.secret_key = settings.SESSION_KEY
app.config['API_HOST'] = settings.API_HOST
app.config['AUTHORIZED_GROUP'] = settings.AUTHORIZED_GROUP
app.config['BIBS_API_KEY'] = settings.BIBS_API_KEY
app.config['USERS_API_KEY'] = settings.USERS_API_KEY
app.config['GET_BY_BARCODE'] = settings.GET_BY_BARCODE
app.config['USERS'] = settings.USERS
app.config['LOG_FILE'] = settings.LOG_FILE
# xpaths for fields that need updating
app.config['XPATH'] = {
'alt_call' : './/item_data/alternative_call_number',
'alt_call_type' : './/item_data/alternative_call_number_type',
'barcode' : './/item_data/barcode',
'int_note' : './/item_data/internal_note_1',
'mms_id' : './/mms_id',
'title' : './/title',
}
# audit log
audit_log = logging.getLogger('audit')
audit_log.setLevel(logging.INFO)
file_handler = logging.FileHandler(app.config['LOG_FILE'])
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(logging.Formatter('%(asctime)s\t%(message)s'))
audit_log.addHandler(file_handler)
# login wrapper
def auth_required(f):
@wraps(f)
def decorated(*args, **kwargs):
if 'username' in session:
return f(*args, **kwargs)
elif app.config['ENV'] == 'development':
session['username'] = 'devuser'
return f(*args, **kwargs)
else:
return redirect(url_for('login'))
return decorated
# error handlers
@app.errorhandler(403)
def page_not_found(e):
return render_template('403.html'), 403
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def page_not_found(e):
return render_template('500.html'), 500
# routes and controllers
@app.route('/')
@auth_required
def index():
return render_template("index.html")
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
response_code = _alma_authenticate(username, password)
if response_code == 204:
authorized = _alma_authorize(request.form['username'])
if authorized:
session['username'] = username
return redirect(url_for('index'))
else:
return abort(403)
else:
flash('User not found, check username and password')
return redirect(url_for('login'))
else:
return render_template('login.html')
@app.route('/logout')
def logout():
session.clear()
return render_template('logout.html')
@app.route('/update-field/alt-call')
@auth_required
def update_alt_call():
field_name = 'Alternative Call Number'
return render_template('find_item.html',
get_input_function='get_alt_call_input',
field_name=field_name)
@app.route('/update-field/alt-call/get-input', methods=['POST'])
@auth_required
def get_alt_call_input():
field_name = 'Alternative Call Number'
barcode = request.form['barcode']
# try to find the item in alma or return an error
try:
item = _alma_get(app.config['API_HOST'] +
app.config['GET_BY_BARCODE'].format(barcode))
except requests.exceptions.RequestException as e:
return e.args[0]
item_record = item.decode(encoding='utf-8')
item_root = _parse_item(item_record)
# grab some fields from retrieved item to show operator
try:
retrieved_barcode = item_root.find(app.config['XPATH']['barcode']).text
retrieved_title = item_root.find(app.config['XPATH']['title']).text.strip('/')
except:
return abort(500)
return render_template('get_input.html',
barcode=barcode,
field_name=field_name,
item=item_record,
update_function='update_alt_call_field',
retrieved_barcode=retrieved_barcode,
retrieved_title=retrieved_title)
@app.route('/update-field/alt-call/update', methods=['POST'])
@auth_required
def update_alt_call_field():
field = 'alt_call'
barcode = request.form['barcode']
new_val = request.form['new_val']
item_record = request.form['item-record']
updated_result = _update_field(item_record, field, new_val)
# save the result of the post transaction as a message to be displayed on the next page
if updated_result:
flash("Changed {} for {} to {}".format(field, barcode, new_val))
return redirect(url_for('update_alt_call'))
else:
return updated_result
# internal note update
@app.route('/update-field/int-note')
@auth_required
def update_int_note():
field_name = 'Internal Note'
return render_template('find_item.html',
get_input_function='get_int_note_input',
field_name=field_name)
@app.route('/update-field/int-note/get-input', methods=['POST'])
@auth_required
def get_int_note_input():
field_name = 'Internal Note'
barcode = request.form['barcode']
# try to find the item in alma or return an error
try:
item = _alma_get(app.config['API_HOST'] +
app.config['GET_BY_BARCODE'].format(barcode))
except requests.exceptions.RequestException as e:
return e.args[0]
item_record = item.decode(encoding='utf-8')
item_root = _parse_item(item_record)
# grab some fields from retrieved item to show operator
try:
retrieved_barcode = item_root.find(app.config['XPATH']['barcode']).text
retrieved_title = item_root.find(app.config['XPATH']['title']).text
except:
abort(500)
return render_template('get_input.html',
barcode=barcode,
field_name=field_name,
item=item_record,
update_function='update_int_note_field',
retrieved_barcode=retrieved_barcode,
retrieved_title=retrieved_title)
@app.route('/update-field/int-note/update', methods=['POST'])
@auth_required
def update_int_note_field():
field = 'int_note'
barcode = request.form['barcode']
new_val = request.form['new_val']
item_record = request.form['item-record']
updated_result = _update_field(item_record, field, new_val)
# save the result of the post transaction as a message to be displayed on the next page
if updated_result:
flash("Changed {} for {} to {}".format(field, barcode, new_val))
return redirect(url_for('update_int_note'))
else:
return updated_result
def fetch(item):
record = _alma_get(app.config['API_HOST'] + app.config['GET_BY_BARCODE'].format(item))
return record
# local functions
def _alma_authenticate(username, password):
headers = {
'Authorization' : 'apikey ' + app.config['USERS_API_KEY'],
'Exl-User-Pw' : password
}
r = requests.post(app.config['API_HOST'] + app.config['USERS'].format(username),
headers=headers)
return r.status_code
def _alma_authorize(username):
r = _alma_get(app.config['API_HOST'] +
app.config['USERS'].format(username),
api='users',
fmt='json')
if r['user_group']['value'] == app.config['AUTHORIZED_GROUP']:
return True
else:
return False
def _alma_get(resource, params=None, api='bibs', fmt='xml'):
'''
makes a generic alma api call, pass in a resource
'''
if api == 'bibs':
key = app.config['BIBS_API_KEY']
elif api == 'users':
key = app.config['USERS_API_KEY']
else:
abort(500)
params = params or {}
params['apikey'] = key
params['format'] = fmt
r = requests.get(resource, params=params)
r.raise_for_status()
if fmt == 'json':
return r.json()
else:
return r.content
def _alma_put(resource, payload=None, params=None, fmt='xml'):
'''
makes a generic put request to alma api. puts xml data.
'''
payload = payload or {}
params = params or {}
params['format'] = fmt
headers = {'Content-type': 'application/xml',
'Authorization' : 'apikey ' + app.config['BIBS_API_KEY']}
r = requests.put(resource,
headers=headers,
params=params,
data=payload)
r.raise_for_status()
if fmt == 'json':
return r.json()
else:
return r.content
def _parse_item(item_record):
'''
Returns Element tree from string
'''
try:
root = ET.fromstring(item_record)
return root
except ET.ParseError as e:
return e.args[0]
def _update_field(item_record, field, new_val):
'''
updates a feild in marcxml record pass in record as xml,
field name to be updated (must also be configured in XPATH
setting), and the new value the field should be updated to.
'''
item_root = _parse_item(item_record)
# get id
mms_id = item_root.find(app.config['XPATH']['mms_id']).text
# update field
item_root.find(app.config['XPATH'][field]).text = new_val
# if field is alt call, enforce alt call type
if field == 'alt_call':
try:
enforced = _enforce_call_type(item_root)
except:
return "could not enforce call type, aborting"
#try to post the modified item record back up to Alma or return an error
item_link = item_root.attrib['link']
updated_item = ET.tostring(item_root, encoding="utf-8")
try:
result = _alma_put(item_link, payload=updated_item)
audit_log.info('{operator}\t{mms_id}\t{type}\t{value}'.format(operator=session['username'],
mms_id=mms_id,
type=field,
value=new_val))
return result
except requests.exceptions.RequestException as e:
return e.args[0]
def _enforce_call_type(item_root):
ac_type = item_root.find(app.config['XPATH']['alt_call_type'])
ac_type.attrib['desc'] = "Other scheme"
ac_type.text = "8"
# run this app
if __name__ == "__main__":
app.run(debug=True, host="0.0.0.0")
|
from ._title import Title
from plotly.graph_objs.histogram2d.colorbar import title
from ._tickformatstop import Tickformatstop
from ._tickfont import Tickfont
|
def get_password_level(pw):
level = 0
letters = 0
numbers = 0
specials = 0
if len(pw) < 8 or " " in pw:
return(level)
for char in pw:
if char.isalpha():
letters += 1
elif char.isnumeric():
numbers += 1
else:
specials += 1
if letters > 0:
level += 1
if numbers > 0:
level += 1
if specials > 0:
level += 1
return(level)
def main():
pw = input("Enter password: ")
print("Security level:", get_password_level(pw))
main() |
# -*- coding: utf-8 -*-
'''测试函数的定义和调用'''
#有返回值得函数
def func01():
print('hello world!')
return '世界,你好!'
#没有返回值得函数
def func02():
print('good luck!')
#直接调用
func01()
func02()
print('------分割线-------')
#直接调用并使用返回值
print(func01())
print(func02())
#多次调用
print('------分割线-------')
for _ in range(2):
print(func01())
print(func02()) |
class Solution:
def change(self, amount: int, coins: List[int]) -> int:
row = amount + 1
column = len(coins) + 1
K = [[-1 for x in range(row)] for y in range(column)]
for i in range(row):
K[0][i] = 0
for i in range(column):
K[i][0] = 1
for i in range(1, column):
for j in range(1, row):
if(coins[i-1] <= j):
K[i][j] = K[i-1][j] + K[i][j - coins[i-1]]
else:
K[i][j] = K[i-1][j]
return K[-1][-1]
class Solution:
def change(self, amount: int, coins: List[int]) -> int:
ways = [0 for price in range(amount+1)]
ways[0] = 1
for denom in coins:
for price in range(1, amount+1):
if denom <= price:
ways[price] += ways[price - denom]
return ways[amount]
|
def program():
fr = open("manout.txt","w")
with open("manin.txt", "r") as f:
data = f.readlines()
for line in data:
words = line.split()
Ishr = int(words[0])
Clem = int(words[1])
IshrMan = int(words[2])
ClemMan = int(words[3])
print(words)
DIshManNeg = Ishr - IshrMan
DIshManPos = Ishr + IshrMan
DCleManNeg = Clem - ClemMan
DCleManPos = Clem + ClemMan
print(DCleManNeg)
print(DIshManPos)
DIshManNeg = str(DIshManNeg)
DIshManPos = str(DIshManPos)
DCleManPos = str(DCleManPos)
DCleManNeg = str(DCleManNeg)
if DIshManPos == DCleManNeg:
fr.write(DIshManPos)
elif DIshManNeg == DCleManPos:
fr.write(DIshManNeg)
elif DIshManPos == DCleManPos:
fr.write(DIshManPos)
elif DIshManNeg == DCleManNeg:
fr.write(DIshManNeg)
program()
|
from string import ascii_uppercase as az, maketrans
class CaesarCipher(object):
def __init__(self, shift):
self.shifted = az[shift:] + az[:shift]
self.decode_trans = maketrans(self.shifted, az)
self.encode_trans = maketrans(az, self.shifted)
def decode(self, s):
return s.upper().translate(self.decode_trans)
def encode(self, s):
return s.upper().translate(self.encode_trans)
|
import multiprocessing
import psutil
MEMORY_PER_JOB = 1024 * 1024 * 1024
def _calculate_jobs():
jobs_cpu = multiprocessing.cpu_count()
avail_mem = psutil.virtual_memory().available
jobs_mem = int(avail_mem / MEMORY_PER_JOB)
return min(jobs_cpu, jobs_mem)
def run(param):
if 'make_jobs' in param['config_option_attribute']:
make_jobs = param['config_option_attribute']['make_jobs']
else:
make_jobs = _calculate_jobs()
return {'make_jobs': make_jobs}
|
#!/usr/bin/python
# -*- coding: utf8 -*-
# auth : bluehdh0926@gmail.com, suck0818@gmail.com
# setting management json
import json, os, platform
class syncn(object):
def __init__(self, path='', debug=False):
try:
self.debug = debug
self.path = os.getcwd()+"\\setting.json"
self.pathOption = os.getcwd()+"\\syncn.json"
# self.path = path if path else str(os.path.dirname(os.path.realpath(__file__))).replace("Lib", '')+"setting.syncn"
if self.debug: print("exe location : {0}".format(self.path))
if(os.path.isfile(self.path)) : self.config = json.loads(open(self.path, 'r').read())
else: print("no have setting.json")
if(os.path.isfile(self.pathOption)) : self.option = json.loads(open(self.pathOption, 'r').read())
else: print("no have SyncN.json", self.option)
except Exception as e:
print(__file__, e)
pass
def readSetting(self):
return json.loads(open(self.path, 'r').read())
def writeSetting(self, data, mode='w'):
setting = open(self.path, mode)
data = json.dumps(data) if isinstance(data, dict) else data
setting.write(data)
return setting.close()
def addSetting(self, key, value):
rs = self.readSetting()
rs[str(key)] = value
return self.writeSetting(rs)
if __name__ == '__main__':
import time
test = syncn()
test.addSetting("tmp", "asd")
# print(test.config)
|
def is_pandigital(n):
s = str(n)
for it in (iter(reversed(s)), iter(s)):
mem = []
for i in range(9):
try:
digit = next(it)
except StopIteration:
return False
else:
if digit == '0' or digit in mem:
return False
mem.append(digit)
return True
fibo = [1, 1]
for i in range(2, 500000):
fibo.append(fibo[i-1] + fibo[i-2])
print("Fibo computed")
for i,f in enumerate(fibo):
if i % 1000 == 0:
print(i)
if is_pandigital(f):
print(i, f)
break
print(fibo[540])
print(is_pandigital(fibo[540])) |
count=0
def is_palindrome(word):
"""
Reversing the string
:param input: word
:return: reversed word
"""
w=""
count = -1
for i in word:
w+=word[count]
count+=-1
if w==word:
print("TRUE")
else:
print("FALSE")
is_palindrome("abba")
is_palindrome("abab")
is_palindrome("tenet")
is_palindrome("banana")
is_palindrome("straw warts")
is_palindrome("a")
is_palindrome("") |
#Advent of Code 2020 - Day8
import copy
def get_input(file):
with open(file, 'r') as f:
return f.read().split('\n')
all_conditions = get_input('day8_input.txt')
all_conditions_test = get_input('day8_test.txt')
def day8(input_val):
accumulator = 0
unique_vals = []
counter = 0
loop = False
while counter < len(input_val):
instruction, number = input_val[counter].split(' ')
if counter not in unique_vals:
unique_vals.append(counter)
else:
loop = True
break
if instruction == 'acc':
accumulator += int(number)
elif instruction == 'jmp':
counter += int(number)
continue
counter += 1
return accumulator, loop
def day8_pt2(input_val):
for command in range(len(input_val)):
instruction, number = input_val[command].split(' ')
input_val_copy = copy.copy(input_val)
if instruction == 'nop':
input_val_copy[command] = 'jmp ' + number
elif instruction == 'jmp':
input_val_copy[command] = 'nop ' + number
current_attempt = day8(input_val_copy)
if not current_attempt[1]:
return current_attempt[0]
#print (day8(all_conditions))
print (day8_pt2(all_conditions)) |
import cmd
import connection
class pyccConsole(cmd.Cmd):
debug=True
prompt = '> '
def __init__(self, backendConnection, logicThread, todoQueue, notifyEvent, *args, **kargs):
cmd.Cmd.__init__(self,*args,**kargs)
self.backendConnection = backendConnection
self.logicThread = logicThread
self.todoQueue = todoQueue
self.notifyEvent = notifyEvent
def completenames(self, text, *ignored):
''' add blank to commands for faster completion'''
return [complete + ' ' for complete in cmd.Cmd.completenames(self, text, *ignored)]
def do_status(self, args):
''' list information about open connections and other backend information'''
self.todoQueue.put(('status', None))
self.notifyEvent.set()
self.logicThread.syncRequestEvent.clear()
if not self.logicThread.syncRequestEvent.wait(1):
print('request timed out')
def do_connectTo(self, args):
''' open connection to pycc (relay) server'''
self.todoQueue.put(('connectTo', args))
self.notifyEvent.set()
self.logicThread.syncRequestEvent.clear()
if not self.logicThread.syncRequestEvent.wait(1):
print('request timed out')
def do_shutdown(self, args):
''' shutdown pycc backend and exit console'''
self.todoQueue.put(('shutdown', None))
self.notifyEvent.set()
if not self.logicThread.syncRequestEvent.wait(1):
print('request timed out')
return True
def emptyline(self):
pass
def do_EOF(self, line):
''' close console'''
return True
def do_list(self, args):
''' sends message to other chat user'''
self.todoQueue.put(('list', None))
self.notifyEvent.set()
self.logicThread.syncRequestEvent.clear()
if not self.logicThread.syncRequestEvent.wait(0.2):
print('request timed out')
def do_sendMessage(self, args):
''' sends message to other chat user'''
args = args.split(' ')
if len(args) == 1: # no message
message = ''
readMore = True
while readMore:
newMessage = input('|')
if newMessage:
if newMessage[-1] == '\\':
message += newMessage[0:-1] + '\n'
else:
message += newMessage + '\n'
else:
readMore = False
else:
message = " ".join(args[1:])
self.todoQueue.put(('sendMessage', args[0], message[0:-1]))
self.notifyEvent.set()
self.logicThread.syncRequestEvent.clear()
if not self.logicThread.syncRequestEvent.wait(0.2):
print('request timed out')
def complete_sendMessage(self, text, line, start_index, end_index):
try:
self.logicThread.accountLock.acquire()
if text:
return [
useraccount + ' ' for useraccount in self.logicThread.accounts
if useraccount.startswith(text)
]
else:
return list(self.logicThread.accounts.keys())
finally:
self.logicThread.accountLock.release()
|
#Algorithme d'optimisation
import math as m
import time
import cvrp.const as const
import cvrp.utile as utile
import cvrp.learning as learn
import cvrp.route as route
import cvrp.linKernighan as LK
import cvrp.ejectionChain as EC
import cvrp.crossExchange as CE
import cvrp.ClarkeWright as CW
def gravity_center(route, inst):
xg = 0
yg = 0
c = 0
for i in route:
pi = inst[i]
xg += pi[0]
yg += pi[1]
c += 1
return (xg/c, yg/c)
def width(i, j, G):
theta = m.acos(G[1]/utile.distance(G, (0, 0)))
proj_i = (i[0]*m.sin(theta), i[1]*m.cos(theta))
proj_j = (j[0]*m.sin(theta), j[1]*m.cos(theta))
return abs(utile.distance(i, proj_i)-utile.distance(j, proj_j))
def cost(i, j, p):
return utile.distance(i, j)*(1 + 0.1*p)
def depth(i, j):
return max(utile.distance(i, (0, 0)), utile.distance(j, (0, 0)))
def max_depth(inst):
d = 0
for i in inst:
di = utile.distance(i, (0, 0))
if di > d:
d = di
return d
def penalization_function(lw, lc, ld, max_d):
return lambda i, j, G, p: ((lw * width(i, j, G) + lc * cost(i, j, p))*(depth(i, j)/max_d)**(ld/2))/(1 + p)
def bad_edge(b, p, routes, inst, fixed):
cand = [0, (0, 0)]
for r in routes:
G = gravity_center(r, inst)
for i in range(len(r)-1):
pi = r[i]
pj = r[i+1]
b_ij = b(inst[pi], inst[pj], G, p[pi][pj])
if b_ij > cand[0] and pi != 0 and pj != 0 and (pi, pj) not in fixed and (pj, pi) not in fixed:
cand[0] = b_ij
cand[1] = (pi, pj)
return cand
def global_opti(solution, inst, demand,capacity, v, l):
edges = learn.all_edges(solution)
fixed_edges = []
c_init = route.cost_sol(solution, inst, const.quality_cost)
routes = route.copy_sol(solution)
new_solution = route.copy_sol(routes)
for e in edges:
cp = utile.rd_point(e, solution, inst)
routes = EC.ejection_chain(l, cp, v, routes, inst,
demand,capacity, fixed_edges, "DE")
for i in routes:
if len(i) == 2:
routes = EC.reject(i, routes, v, inst, demand,capacity)
for i in range(len(routes)):
routes[i] = LK.LK(routes[i], inst)
# apply cross-exchange
routes = CE.cross_exchange(cp, v, routes, inst, demand,capacity, fixed_edges, "DE")
# apply LK
for i in range(len(routes)):
routes[i] = LK.LK(routes[i], inst)
c_final = route.cost_sol(routes, inst, const.quality_cost)
if c_init - c_final > 0:
c_init = c_final
new_solution = route.copy_sol(routes)
return new_solution
def optimisation_heuristic(initial_routes, inst, demand,capacity, lam, mu, nu, l, max_d, v, fixed_edges):
tps1 = time.time()
B = [penalization_function(1, 0, 0, max_d), penalization_function(1, 1, 0, max_d), penalization_function(
1, 0, 1, max_d), penalization_function(1, 1, 1, max_d), penalization_function(0, 1, 0, max_d), penalization_function(0, 1, 1, max_d)]
b_i = 0
b = B[b_i]
p = [[0 for j in range(len(inst))] for i in range(len(inst))]
detailed_cust = [0 for i in range(len(inst))]
for r in range(len(initial_routes)):
for i in initial_routes[r]:
detailed_cust[i-1] = r
initial_routes = CW.ClarkeWright(
initial_routes, inst, demand,capacity, lam, mu, nu, detailed_cust)
routes = route.copy_sol(initial_routes)
routes2 = route.copy_sol(routes)
c_init = route.cost_sol(routes, inst,const.quality_cost)
tps2 = time.time()
tpsGS = time.time()
tpsCH = time.time()
while tps2-tps1 < len(demand)/8:
# find the worst edge
worst = bad_edge(b, p, routes, inst, fixed_edges)[1]
p[worst[0]][worst[1]] += 1
p[worst[1]][worst[0]] += 1
# apply ejection-chain
cp = utile.rd_point(worst, routes, inst)
routes = EC.ejection_chain(l, cp, v, routes, inst,
demand,capacity, fixed_edges, "RD")
for i in routes:
if len(i) == 2:
routes = EC.reject(i, routes, v, inst, demand,capacity)
for i in range(len(routes)):
routes[i] = LK.LK(routes[i], inst)
# apply cross-exchange
routes = CE.cross_exchange(cp, v, routes, inst,
demand,capacity, fixed_edges, "RD")
# apply LK
for i in range(len(routes)):
routes[i] = LK.LK(routes[i], inst)
#routes = global_opti(routes,inst,demand,v,l)
c_final = route.cost_sol(routes, inst,const.quality_cost)
if c_final < c_init:
routes2 = route.copy_sol(routes) # new optimum
for i in routes2:
if len(i) == 2:
routes2 = EC.reject(i, routes2, v, inst, demand,capacity)
c_init = route.cost_sol(routes2,inst,const.quality_cost)
print(round(tps2-tps1,2), round(c_init,3))
tps1 = time.time()
tpsCH = time.time()
tpsGS = time.time()
if tps2-tpsGS > 10:
# return to the last best solution, for gs iterations
routes = route.copy_sol(routes2)
tpsGS = time.time()
if tps2-tpsCH > 5:
tpsCH = time.time()
b_i += 1
if b_i < len(B):
b = B[b_i]
p = [[0 for j in range(len(inst))]
for i in range(len(inst))]
else:
b_i = 0
b = B[b_i]
p = [[0 for j in range(len(inst))]
for i in range(len(inst))]
tps2 = time.time()
for i in (routes2):
if len(i) == 2:
routes2 = EC.reject(i, routes2, v, inst, demand,capacity)
if len(i) == 1:
routes2.remove(i)
for i in range(len(routes2)):
routes2[i] = LK.LK(routes2[i], inst)
return initial_routes, routes2 |
# libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
df = pd.read_csv('MELBOURNE_HOUSE_PRICES_LESS.csv')
sample_mean = []
for iteration in range(100):
df_subsample = df.sample(1000)
mean = df_subsample['Price'].mean()
sample_mean.append(mean)
plt.hist(sample_mean, bins=10, density=True, histtype='step')
population_mean = df['Price'].mean()
population_variance = df['Price'].std()
print('Population mean:', population_mean)
print('Population variance:', population_variance)
mean_sample_mean = np.mean(sample_mean)
mean_sample_variance = np.std(sample_mean)
print(mean_sample_variance)
difference_mean = df['Price'].mean() - np.mean(sample_mean)
print(difference_mean)
plt.title('Histogram of Sample Mean Price')
plt.savefig('out.png', dpi=128)
plt.close()
|
# -*- coding: utf-8 -*-
import scrapy
import os
import csv
class MyradiosSpider(scrapy.Spider):
name = 'myradios'
allowed_domains = ['my-radios.com']
start_urls = ['http://my-radios.com/']
def parse(self, response):
datas = response.xpath('.//*[@class="list-inline intro-social-buttons"]/li').extract()
for data in datas:
sel = scrapy.Selector(text=data)
link = sel.xpath('.//a/@href').extract_first()
country = sel.xpath('.//a/span/text()').extract_first()
yield scrapy.Request(response.urljoin(link),callback=self.getstations,meta={
'country':country
})
def getstations(self,response):
links = response.xpath('.//*[@class="col-sm-3"]/a/@href').extract()
for link in links:
yield scrapy.Request(response.urljoin(link),callback=self.getdata,meta={
'country':response.meta.get('country')
})
def getdata(self,response):
title = response.xpath('.//*[@class="col-sm-4"]/h1/text()').extract_first()
if 'myradios.csv' not in os.listdir(os.getcwd()):
with open("myradios.csv","a") as f:
writer = csv.writer(f)
writer.writerow([response.meta.get('country'),title])
with open("myradios.csv","a") as f:
writer = csv.writer(f)
writer.writerow([response.meta.get('country'),title])
print([response.meta.get('country'),title])
|
import unittest
from katas.kyu_7.alphabetize_by_nth_char import sort_it
class SortItTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(sort_it('bid, zag', 2), 'zag, bid')
def test_equals_2(self):
self.assertEqual(sort_it('bill, bell, ball, bull', 2),
'ball, bell, bill, bull')
def test_equals_3(self):
self.assertEqual(sort_it('cat, dog, eel, bee', 3),
'bee, dog, eel, cat')
|
# Exercício 10.1 - Livro
class Televisao:
def __init__(self):
self.ligada = False
self.canal = 0
self.tamanho = 0
self.marca = ''
t1 = Televisao()
t1.tamanho = 32
t1.marca = 'AOC'
print(f'TV {t1.marca} de {t1.tamanho} polegadas')
t2 = Televisao()
t2.tamanho = 42
t2.marca = 'Samsnug'
print(f'TV {t2.marca} de {t2.tamanho} polegadas')
|
#!/usr/bin/env python3
# -*- encoding: utf-8 -*-
# File: osc4py3/demos/speedudpcommon.py
# <pep8 compliant>
"""Common data for testing UDP transmission speed.
"""
MESSAGES_COUNT = 1000
IP = "127.0.0.1"
PORT = 6503
|
from flask import Flask, render_template
app = Flask('HelloApp')
@app.route('/')
def helloWorld():
return render_template(
'layout.html',
title='HELLOOOOOO WORLLLLLLD',
hello='Hello World!!!!!'
)
if __name__ == '__main__':
app.run(debug=True)
|
from decodeNYTpage import decodeWebPage
def writeToFile(content):
with open('NYT_webpages.txt','w') as open_file:
open_file.write(content)
if __name__=="__main__":
writeToFile(decodeWebPage())
|
#Area of rectange
l=int(input("Enter length"))
b=int(input("Enter breadth"))
area=l*b
print("area is",area) |
import datetime
from collections import OrderedDict
from django.db.models import Count, Q
from django.db.models.functions import TruncDate
from django.utils import timezone
from django.utils.translation import gettext as _
from colossus.apps.subscribers.constants import ActivityTypes
from colossus.apps.subscribers.models import Activity
class Chart:
def __init__(self, chart_type):
self._chart_type = chart_type
def get_chart_type(self):
return self._chart_type
def get_data(self):
raise NotImplementedError
def get_options(self):
raise NotImplementedError
def get_settings(self):
settings = {
'type': self.get_chart_type(),
'data': self.get_data(),
'options': self.get_options()
}
return settings
class SubscriptionsSummaryChart(Chart):
"""
Build the last 30 days subscriptions summary in the list summary page.
Solid blue line display number of subscriptions, dashed red line shows
number of unsubscriptions.
"""
def __init__(self, mailing_list):
super().__init__(chart_type='bar')
self.mailing_list = mailing_list
def get_data(self):
thirty_days_ago = timezone.now() - datetime.timedelta(30)
# Group by query returning the counts for subscribe actions and
# unsubscribe actions. The Count queries are defined externally
# for readability.
# Output format:
# <QuerySet [
# {'trunc_date': datetime.date(2018, 6, 10), 'subscribed': 1, 'unsubscribed': 0},
# {'trunc_date': datetime.date(2018, 6, 11), 'subscribed': 3, 'unsubscribed': 2},
# {'trunc_date': datetime.date(2018, 6, 12), 'subscribed': 1, 'unsubscribed': 0}
# ]>
subscribed_expression = Count('id', filter=Q(activity_type=ActivityTypes.SUBSCRIBED))
unsubscribed_expression = Count('id', filter=Q(activity_type=ActivityTypes.UNSUBSCRIBED))
activities = Activity.objects \
.filter(subscriber__mailing_list=self.mailing_list, date__gte=thirty_days_ago) \
.values(trunc_date=TruncDate('date')) \
.annotate(subscribed=subscribed_expression, unsubscribed=unsubscribed_expression) \
.order_by('trunc_date')
# First initialize the `series` dictionary with all last 30 days.
# This is necessary because if the count of subscribers for a given
# day is equal to zero (or if the queryset is empty all together)
# we can still show an empty bar in the bar chart for that day.
# It's a way to keep the rendering of the bar chart consistent.
series = OrderedDict()
for i in range(-1, 29): # FIXME: There is an issue with current day, or something related to timezone
date = timezone.now() - datetime.timedelta(i)
key = date.strftime('%-d %b, %y')
series[key] = {'sub': 0, 'unsub': 0, 'order': i}
# Now we are replacing the existing entries with actual counts
# comming from our queryset.
for entry in activities:
key = entry['trunc_date'].strftime('%-d %b, %y')
series[key]['sub'] = entry['subscribed']
series[key]['unsub'] = entry['unsubscribed']
# Here is time to grab the info and place on lists for labels
# and the data. Note that we are sorting the series data so to
# display from the oldest day to the most recent.
labels = list()
subscriptions = list()
unsubscriptions = list()
for key, value in sorted(series.items(), key=lambda e: e[1]['order'], reverse=True):
labels.append(key)
subscriptions.append(value['sub'])
unsubscriptions.append(value['unsub'])
data = {
'labels': labels,
'datasets': [
{
'label': _('Unsubscritions'),
'borderColor': '#f25b69',
'backgroundColor': 'transparent',
'data': unsubscriptions,
'type': 'line',
'borderDash': [10, 5]
},
{
'label': _('Subscriptions'),
'borderColor': '#3a99fc',
'backgroundColor': '#3a99fc',
'data': subscriptions
}
]
}
return data
def get_options(self):
options = {
'scales': {
'yAxes': [{
'ticks': {
'beginAtZero': True
}
}]
}
}
return options
|
from distutils.core import setup
setup(name='uff',
version='0.6.3',
description='uff',
author='Nvidia',
packages=['uff'],
)
|
import requests
from bs4 import BeautifulSoup
import pandas as pd
from TickersList import tickers
import re
# Column names that are needed in the pandas dataframe
column_names = ['Ticker','Company Name','BusinessType','Date','Open','High','Low','Beta',
'VWAP','Market Cap All Classes', 'Dividend',
'Div Freq', 'P/E Ratio', 'EPS','Yield',
'P/B Ratio','Exchange']
# Create dictionary and fill with empty lists
tickerDict = {}
for i in column_names:
tickerDict[i]=[None]*len(tickers)
# Extract the data from web for each ticker
for i in range(len(tickerDict['Ticker'])):
tickerDict['Ticker'][i]=tickers[i]
results = requests.get('https://web.tmxmoney.com/quote.php?qm_symbol=' + tickers[i])
src = results.content
soup = BeautifulSoup(src, 'lxml')
texts = soup.find_all(text=True)
# Get company name
compName = soup.find_all('div', class_="quote-company-name")
tickerDict['Company Name'][i] = compName[0].find('h4').text
# Get the date
dateTick = compName[0].find('p').text
dateTick = dateTick.split('|')
tickerDict['Date'][i] = re.sub('\s', '', dateTick[1])
# most of the info is in the dq_card div class
dq_card = soup.find_all('div', class_="dq-card")
for dq in range(len(dq_card)):
if 'Open' in dq_card[dq].text:
tickerDict['Open'][i] = dq_card[dq].find('strong').string
if 'High' in dq_card[dq].text:
tickerDict['High'][i] = dq_card[dq].find('strong').string
if 'Low' in dq_card[dq].text:
tickerDict['Low'][i] = dq_card[dq].find('strong').string
if 'Beta' in dq_card[dq].text:
tickerDict['Beta'][i] = dq_card[dq].find('strong').string
if 'VWAP' in dq_card[dq].text:
tickerDict['VWAP'][i] = dq_card[dq].find('strong').string
if 'All Classes' in dq_card[dq].text:
tickerDict['Market Cap All Classes'][i] = dq_card[dq].find('strong').string
if 'Dividend' in dq_card[dq].text:
dividend = dq_card[dq].find('strong').string
dividend = dividend.replace("\xa0", " ")
tickerDict['Dividend'][i] = dividend
if 'Div. Frequency' in dq_card[dq].text:
tickerDict['Div Freq'][i] = dq_card[dq].find('strong').string
if 'P/E Ratio' in dq_card[dq].text:
tickerDict['P/E Ratio'][i] = dq_card[dq].find('strong').string
if 'EPS' in dq_card[dq].text:
tickerDict['EPS'][i] = dq_card[dq].find('strong').string
if 'Yield' in dq_card[dq].text:
tickerDict['Yield'][i] = dq_card[dq].find('strong').string
if 'P/B Ratio' in dq_card[dq].text:
tickerDict['P/B Ratio'][i] = dq_card[dq].find('strong').string
if 'Exchange' in dq_card[dq].text:
t = dq_card[dq].find('strong').string
tickerDict['Exchange'][i] = re.sub('\s','',t)
# setting up output for Pycharm
desired_width=320
pd.set_option('display.width', desired_width)
pd.set_option('display.max_columns',20)
# -- end
df_td = pd.DataFrame(tickerDict, columns = column_names)
print(df_td)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, with_statement
from revolver import command, file, package
from revolver import contextmanager as ctx
def install():
package.install('stunnel')
with ctx.sudo():
file.sed('/etc/default/stunnel4', 'ENABLED=0', 'ENABLED=1')
def ensure():
if not command.exists('stunnel4'):
install()
|
numero_a_adivinar = 20
numero_del_usuario = int(input("ingresa un numero del 1 al 30"))
if numero_a_adivinar == numero_del_usuario:
print("mut bien")
else:
intento_dos = int(input("ingresa un numero del 1 al 30 2"))
print("mal, intentalo devuelta")
if numero_a_adivinar == numero_del_usuario:
print("muy bien")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def next_weekday(d, weekday):
# Returns the next weekday for a datetime
days_ahead = weekday - d.weekday()
if days_ahead <= 0: # Target day already happened this week
days_ahead += 7
return d + datetime.timedelta(days_ahead)
def newyear(datetime):
if datetime.month == 1 and datetime.day < 4:
return "Happy New "+str(datetime.year)
else:
return False
def christmas(datetime):
if datetime.month == 12 and datetime.day == 24:
return "Merry Christmas"
else:
return False
def erstermai(datetime):
if datetime.month == 5 and datetime.day == 1:
return "Protest!"
else:
return False
def towelday(datetime):
if datetime.month == 5 and datetime.day == 25:
return "Towel Day"
else:
return False
def jazzday(datetime):
if datetime.month == 4 and datetime.day == 30:
return "Jazz?"
else:
return False
def worldmusicday(datetime):
if datetime.month == 10 and datetime.day == 1:
return "World Music Day"
else:
return False
def beerday(datetime):
if datetime.month == 8 and (datetime.weekday()==4) == (datetime.day < 7):
return "Beer"
else:
return False
def check_special(datetime):
"""
Check if any special day is occuring and return special string
"""
functions = [
newyear(datetime),
christmas(datetime),
erstermai(datetime),
towelday(datetime),
jazzday(datetime),
worldmusicday(datetime),
beerday(datetime)
]
functions = filter(bool, functions)
if len(functions) > 0:
return functions[0]
else:
return ""
if __name__ == "__main__":
from datetime import *
print check_special(datetime(2018, 8, 3, 18, 00))
|
import requests
import bs4
from bs4 import BeautifulSoup
import re
import pandas as pd
from time import sleep, time
from warnings import warn
import numpy as np
#From year 2000 to 2017
#Scraping the first four pages of each year
pages = [str(i) for i in range(1,5)]
years_url = [str(i) for i in range(2000, 2018)]
#a few containers
start_time = time()
num_requests = 0
names = []
years = []
imdb_ratings = []
metascores = []
votes = []
#main body
for year_url in years_url:
for page in pages:
#request web contents
response = requests.get('http://www.imdb.com/search/title?release_date=' + \
year_url + '&sort=num_votes,desc&page=' + page)
#pause for a while, try not be banned
sleep(np.random.randint(10,20))
#print process info
num_requests += 1
elapsed_time = time() - start_time
print("request: {} frequency: {:.3f}".format(num_requests, num_requests/elapsed_time))
if response.status_code != 200:
warn("request: {} status: {}".format(num_requests, response.status_code))
#set limit for total pages scrapped
if num_requests > 72:
warn("number of requests exceed threshold!")
break
#parse web content into beautiful soup object
soup = BeautifulSoup(response.content, 'lxml')
#find the minimum self-contained section with all the information we need
movie_containers = soup.find_all('div', class_='lister-item mode-advanced')
#loop through each section
for container in movie_containers:
#we need both IMDb ratings and Metascores
if container.find('div', class_ = 'ratings-metascore') is not None:
#find movie name
name = container.h3.a.string
names.append(name)
#find release year
year = container.h3.find_all('span')[1].string
year = int(re.sub("[^0-9]", "", year))
years.append(year)
#find IMDb rating
imdb_rating = float(container.strong.string)
imdb_ratings.append(imdb_rating)
#find Metascore
metascore = int(container.find('div', class_='inline-block ratings-metascore').span.string)
metascores.append(metascore)
#find how many voters
vote = container.find('span', string="Votes:").find_next().string
vote = int(re.sub("[^0-9]", "", vote))
votes.append(vote)
#build pandas dataframe
movies = pd.DataFrame(
{'movie': names,
'year': years,
'imdb': imdb_ratings,
'metascore': metascores,
'vote': votes}
)
#take a peek
movies.head()
|
import json
from ..constantes import *
from util import *
def create_chart(conf, entries):
"""
Update Chart configuration and Datas
"""
serie_index = 0
for serie in conf['series']:
data = []
for entry in entries:
if entry is not None:
data.append(entry.datatolist(str(serie['db'])))
conf['series'][serie_index]['data'] = data
serie_index += 1
""" Add PlotBands """
plotBands = []
last_entry = len(entries)-1
n = 1
while n < last_entry and\
entries[n].phase is not None and\
entries[n] is not None and\
entries[n].next().phase is not None:
begin = entries[n].dt
phase = entries[n].phase
n += 1
while entries[n] is not None and\
entries[n].phase is not None and\
entries[n].phase == phase and\
n < last_entry:
n += 1
end = entries[n].dt
plotBand = {
'color': PhaseColor[phase],
'from': datetime_to_timestamp(begin),
'to': datetime_to_timestamp(end)
}
plotBands.append(plotBand)
conf['xAxis']['plotBands'] = plotBands
""" Add Labels """
condition_flag_allumage = '((prec.phase is not None) and (prec.phase is not PHASE_ALLUMAGE))'
condition_next_is_not_maintien = '((next.phase is not None) and (next.phase is not PHASE_MAINTIEN))'
labels = json.loads(json.dumps(ChartLabel)) #make a copy of original object
labels['name'] = 'Labels'
for entry in entries:
if entry is not None and entry.phase is not None:
#Label Allumage
if entry.event is not None:
data = {
"x": datetime_to_timestamp(entry.dt),
"title": 'Allumage'
}
labels['data'].append(data)
"""
# Label Combustion
if entry.phase == PHASE_COMBUSTION and\
entry.prec() is not None and\
entry.prec().phase is not PHASE_COMBUSTION and\
entry.all_next_verify_condition(5, condition_next_is_not_maintien):
data = {
"x": datetime_to_timestamp(entry.dt),
"title": 'Combustion'
}
labels['data'].append(data)
"""
conf['series'].append(labels)
""" Add Subtitle (plotbands legend) """
#conf["subtitle"] = ChartLegend
""" Add Title (date begin date end) """
if len(entries) > 3:
begin = pretty_date(entries[0].dt)
end = pretty_date(entries[len(entries)-1].dt)
#conf["title"]["text"] = 'Monitoring Chaudière du {0} au {1}'.format(begin, end)
conf["title"]["text"] = 'Monitoring Chaudière'
conf["subtitle"]["text"] = ' du {0} au {1}'.format(begin, end)
else:
conf["title"]["text"] = 'Monitoring Chaudière'
""" Return new conf """
return conf |
#display.py
#***********************************************
#GlassClient RJGlass
#display.py -- Used to initalize the display and windows for pyglet
#
#***********************************************
import os
import logging
import pyglet
from pyglet.gl import *
from xml.etree.ElementTree import ElementTree
import gauge
class display_c(object):
def __init__(self, parse_file=None):
#Initalize the display
# fullscreen if True, setup fullscreen windows
# fullscreen if False, setup 1024x768 windows
#window = pyglet.window.Window(fullscreen=True)
self.platform = pyglet.window.get_platform()
self.display = self.platform.get_default_display()
self.screens = self.display.get_screens()
self.parse_file = parse_file
self.win = None #Leave None for now, will be created in self.parse_xml
self.fps_display = pyglet.clock.ClockDisplay()
self.view_l = [] #view list
if parse_file != None:
self.parse_view_xml(parse_file)
#window_list = []
#for screen in screens:
#if fullscreen:
# win = pyglet.window.Window(fullscreen=True, display=display)
#else:
# win = pyglet.window.Window(width = 1024, height = 768, display=display)
#window_list.append(win)
def parse_view_xml(self, parse_file):
def xml_val(element, prev=None):
if element == None:
return prev
else:
return element.text
#Parses xml config file
full_parse = os.path.join(os.getcwd(),'views', parse_file)
#print full_parse
tree = ElementTree()
tree.parse(full_parse)
#Create/Adjust window
config = pyglet.gl.Config(sample_buffers=1, samples=2)
#Read fullscreen
if 'Y' == xml_val(tree.find("fullscreen")):
self.win = pyglet.window.Window(fullscreen=True, display = self.display)
else:
size = xml_val(tree.find("size"))
if size:
width,height = size.split(",")
self.win = pyglet.window.Window(width = int(width), height = int(height), display=self.display, vsync=0)
#print fs, size_x, size_y
#Read views
views = tree.findall('view')
for view in views:
view_name = xml_val(view.find('name'))
view_i = view_c(view_name)
#Read guages
gauges = view.findall('gauge')
for g in gauges:
#Compute size field (percentage * 100)
size = xml_val(g.find('size'))
if size:
size = map(lambda x:int(x), size.split(','))
#Computer pos field (percentage * 100)
pos = xml_val(g.find('pos'))
if pos:
pos = map(lambda x:int(x), pos.split(','))
#Folder field
folder = xml_val(g.find('folder'))
#name field
name = xml_val(g.find('name'))
#Import gauge
i_name = 'gauges.' + name
g = __import__(i_name, fromlist=['main'])
gauge_i = g.main.gauge(size, pos, name,folder)
#gauge_i = gauge.gauge_c(name,folder,size,pos)
view_i.appendGauge(gauge_i)
self.view_l.append(view_i)
@self.win.event
def on_draw():
logging.debug("Display: Start on_draw")
pyglet.gl.glClear(pyglet.gl.GL_COLOR_BUFFER_BIT)
pyglet.gl.glLoadIdentity()
glEnable(GL_LINE_SMOOTH)
glEnable(GL_BLEND)
#glBlendFunc(GL_SRC_ALPHA, GL_ZERO)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glHint(GL_LINE_SMOOTH_HINT, GL_DONT_CARE)
#Draw guages
for g in self.view_l[0].gauges:
logging.debug("Display: Drawing Guage %s", g.name)
pyglet.gl.glPushMatrix()
g.on_draw()
pyglet.gl.glPopMatrix()
logging.debug("Display: End Drawing Guages")
class view_c(object):
#A view is a collection of guages, in a set layout.
#A display will cycle through a list of views
def __init__(self,name):
self.name = name
self.gauges = []
def appendGauge(self, i):
self.gauges.append(i)
|
#!/usr/bin/env python
try:
import urllib.request
from urllib.parse import urlparse
import bs4 as bs
import sys, subprocess, os, zipfile
except ModuleNotFoundError as e:
print("[*] Error: ", e)
sys.exit()
def search_subs():
# entering 'movie name' into url
url = "http://www.yifysubtitles.com/search?q="
movie_name = input("[*] Search for: ")
movie_fix = movie_name.lower().replace(" ", "+")
# Getting connection to website
movie_url = url + movie_fix
source = urllib.request.urlopen(movie_url).read()
soup = bs.BeautifulSoup(source, "html.parser")
# Searches through a table of movies
movie_dict = {} # 'title':'url'
movie_list = []
tables = soup.findAll("li", {"class": "media media-movie-clickable"})
for table in tables:
# Goes to media-body of the container
media_body = table.findAll("div", {"class":"media-body"})
media_body = media_body[0]
# Finding movie subtitle URL
# finds movie/subtitle sub-link
sub_link = media_body.findAll("a")
sub_link = sub_link[0]
link = sub_link.get("href")
# parses the original url
parse_obj = urlparse(movie_url)
url = parse_obj.scheme + "://" + parse_obj.netloc
sub_url = url + link
# Finds the movie title
movie_title = media_body.findAll("h3", {"itemprop":"name"})
movie_title = movie_title[0]
title = movie_title.string
# Finds the year of the movie
years = media_body.findAll("span", {"class":"movinfo-section"})[0]
year = years.contents[0]
movie_year = f"{title} ({year})"
movie_dict[movie_year] = sub_url
# copies dictionary items into a list
for m in movie_dict.items():
movie_list.append(m)
# enumerating list and prints titles
for k, v in enumerate(movie_list):
print(f"{k}) {v[0]}")
select_url = input("\nSelect the movie: ")
# Enumerates list again and allows selection from count
for k, v in enumerate(movie_list):
if int(select_url) == k:
# v[1] is the movie url
my_url = v[1]
# movie_url is the function argument
download(my_url)
def download(my_url):
# Opens The YIFY Subtitle Website To Be Read
source = urllib.request.urlopen(my_url).read()
soup = bs.BeautifulSoup(source, "html.parser")
# Gets Movie Name
title_sub = soup.title.string
# Variable for Subtitle Table
table_subs = soup.find("table", {"class": "table other-subs"}).findAll("tr")
# The First List In The Table Is Useless
useless = table_subs.pop(0)
# Makes A List For The English Rows
tabs = []
# Searches Through The Tabe For The First English Subtitles.
for table in table_subs:
if table.find("span", {"class": "sub-lang"}).string == "English":
tabs.append(table)
# Uses The First Subtitle Class Because It IS The Highest Rating Subtitle
my_subs = tabs[0]
# Finds The Subtitle URL
sub_link = my_subs.findAll("a")[0]
link = sub_link.get("href")
# Makes A Variable For The Subtitle URL
parse_obj = urlparse(my_url)
url = parse_obj.scheme + "://" + parse_obj.netloc
sub_url = url + link
# Opens The Subtitle URL To Be Read
source = urllib.request.urlopen(sub_url).read()
soup = bs.BeautifulSoup(source, "html.parser")
# Variable For The Zip URL
zip_url = soup.findAll("a", {"class": "btn-icon download-subtitle"})
z_url = zip_url[0]
files = z_url.get("href")
# Parses ONLY The File Name NOT The URL
movie_url = urlparse(files)
movie_path = movie_url.path
movie = movie_path.replace("/subtitle/", "")
if not os.path.isdir("subtitles"):
os.mkdir("subtitles")
os.chdir("subtitles")
else:
os.chdir("subtitles")
if not os.path.isfile(movie):
print(f"\nDownloading {title_sub}...")
subprocess.run(["curl", "-O", files])
zip_extraction()
else:
print(f"\nAlready Exists: {title_sub}")
zip_extraction()
def zip_extraction():
if os.path.isdir("subtitles"):
os.chdir("subtitles")
for file in os.listdir("."):
if zipfile.is_zipfile(file):
with zipfile.ZipFile(file) as fzip:
for f in fzip.namelist():
if f.endswith(".srt"):
try:
print(f"\nExtracting: {f}")
fzip.extract(f)
except:
print("\nCould not extract files!")
else:
sys.exit()
os.remove(file)
sys.exit()
if __name__ == "__main__":
search_subs()
|
import random
print('Welcome to the number guessing game')
number_to_guess= random.randint(1,10)
number_of_tries=1
guess=int(input('Please guess the number'))
while number_of_tries<=1:
print("chance",number_of_tries,"\n")
if number_to_guess==guess:
print('Well done, you win!')
print('You took', number_of_tries, 'goes to complete the game')
print('Sorry! wrong number')
elif guess<number_to_guess :
print('your guess was lower than the number')
else:
print('your guess was higher than the number')
guess=int(input('please guess again'))
number_of_tries +=1
if number_to_guess == guess:
print('Well done, you win!')
print('You took', number_of_tries, 'goes to complete the game')
else:
print('sorry you lose')
print('the number you needed to guess was', number_to_guess)
print('Game over')
|
from panda3d.core import GeomVertexFormat, GeomVertexWriter, Vec4
from .Geometry import Geometry
from .PolygonView import PolygonView
class Polygon(Geometry):
def __init__(self):
Geometry.__init__(self, "polygon", GeomVertexFormat.getV3c4())
self.vertices = []
self.color = Vec4(1, 1, 1, 1)
def addView(self, primitiveType, drawMask, viewHpr = None, state = None):
return Geometry.addView(self, PolygonView(self, primitiveType, drawMask, viewHpr, state))
def setVertices(self, verts):
self.vertices = verts
self.generateGeometry()
def addVertex(self, point):
self.vertices.append(point)
self.generateGeometry()
def setColor(self, color):
self.color = color
self.generateVertices()
def generateVertices(self):
self.vertexBuffer.setNumRows(len(self.vertices))
vwriter = GeomVertexWriter(self.vertexBuffer, "vertex")
cwriter = GeomVertexWriter(self.vertexBuffer, "color")
for i in range(len(self.vertices)):
vwriter.setData3f(self.vertices[i])
cwriter.setData4f(self.color)
Geometry.generateVertices(self)
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import logging
import sys
from gbpclient.gbp.v2_0 import groupbasedpolicy as gbp
from gbpclient.tests.unit import test_cli20
class CLITestV20ExternalSegmentJSON(test_cli20.CLITestV20Base):
LOG = logging.getLogger(__name__)
def setUp(self):
super(CLITestV20ExternalSegmentJSON, self).setUp()
def test_create_external_segment_with_mandatory_params(self):
"""external-segment-create with all mandatory params."""
resource = 'external_segment'
cmd = gbp.CreateExternalSegment(test_cli20.MyApp(sys.stdout), None)
name = 'my-name'
tenant_id = 'my-tenant'
my_id = 'my-id'
args = ['--tenant-id', tenant_id,
name]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, my_id, args,
position_names, position_values,
tenant_id=tenant_id)
def test_create_external_segment_with_all_params(self):
"""external-segment-create with all params."""
resource = 'external_segment'
cmd = gbp.CreateExternalSegment(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
tenant_id = 'mytenant'
description = 'My External Segment'
my_id = 'someid'
ip_version = '4'
cidr = '192.168.0.0/24'
external_route = 'destination=172.16.1.0/24,nexthop=192.168.0.10'
expected_external_routes = [{'destination': '172.16.1.0/24', 'nexthop':
'192.168.0.10'}]
port_address_translation = 'true'
shared = 'true'
args = ['--tenant-id', tenant_id,
'--description', description,
'--ip-version', ip_version,
'--cidr', cidr,
'--external-route', external_route,
'--port-address-translation', port_address_translation,
'--shared', shared,
name]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, my_id, args,
position_names, position_values,
tenant_id=tenant_id,
description=description,
ip_version=4,
cidr=cidr,
external_routes=expected_external_routes,
port_address_translation=
port_address_translation,
shared=shared)
def test_create_external_segment_with_external_route_no_nexthop(self):
"""external-segment-create with all params."""
resource = 'external_segment'
cmd = gbp.CreateExternalSegment(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
tenant_id = 'mytenant'
my_id = 'someid'
external_route = 'destination=172.16.1.0/24,nexthop'
expected_external_routes = [{'destination': '172.16.1.0/24', 'nexthop':
None}]
args = ['--tenant-id', tenant_id,
'--external-route', external_route,
name]
position_names = ['name', ]
position_values = [name, ]
self._test_create_resource(resource, cmd, name, my_id, args,
position_names, position_values,
tenant_id=tenant_id,
external_routes=expected_external_routes)
def test_list_external_segments(self):
"""external-segment-list."""
resource = 'external_segments'
cmd = gbp.ListExternalSegment(test_cli20.MyApp(sys.stdout), None)
self._test_list_resources(resource, cmd, True)
def test_show_external_segment_name(self):
"""external-segment-show."""
resource = 'external_segment'
cmd = gbp.ShowExternalSegment(test_cli20.MyApp(sys.stdout), None)
args = ['--fields', 'id', self.test_id]
self._test_show_resource(resource, cmd, self.test_id, args, ['id'])
def test_update_external_segment(self):
"external-segment-update myid --name myname --tags a b."
resource = 'external_segment'
cmd = gbp.UpdateExternalSegment(test_cli20.MyApp(sys.stdout), None)
self._test_update_resource(resource, cmd, 'myid',
['myid', '--name', 'myname',
'--tags', 'a', 'b'],
{'name': 'myname', 'tags': ['a', 'b'], })
def test_update_external_segment_with_all_params(self):
resource = 'external_segment'
cmd = gbp.UpdateExternalSegment(test_cli20.MyApp(sys.stdout), None)
name = 'myname'
description = 'My External Segment'
my_id = 'someid'
external_route = 'destination=172.16.1.0/24,nexthop=192.168.0.10'
expected_external_routes = [{'destination': '172.16.1.0/24', 'nexthop':
'192.168.0.10'}]
port_address_translation = 'true'
shared = 'true'
args = ['--name', name,
'--description', description,
'--external-route', external_route,
'--port-address-translation', port_address_translation,
'--shared', shared,
my_id]
params = {
'name': name,
'description': description,
'external_routes': expected_external_routes,
'port_address_translation': port_address_translation,
'shared': shared
}
self._test_update_resource(resource, cmd, my_id, args, params)
def test_update_external_segment_with_external_route_no_nexthop(self):
resource = 'external_segment'
cmd = gbp.UpdateExternalSegment(test_cli20.MyApp(sys.stdout), None)
my_id = 'someid'
external_route = 'destination=172.16.1.0/24,nexthop'
expected_external_routes = [{'destination': '172.16.1.0/24', 'nexthop':
None}]
args = ['--external-route', external_route,
my_id]
params = {
'external_routes': expected_external_routes,
}
self._test_update_resource(resource, cmd, my_id, args, params)
def test_update_external_segment_with_unset_external_route(self):
resource = 'external_segment'
cmd = gbp.UpdateExternalSegment(test_cli20.MyApp(sys.stdout), None)
my_id = 'someid'
external_route = ''
expected_external_routes = []
args = ['--external-route', external_route,
my_id]
params = {
'external_routes': expected_external_routes,
}
self._test_update_resource(resource, cmd, my_id, args, params)
def test_delete_external_segment_name(self):
"""external-segment-delete."""
resource = 'external_segment'
cmd = gbp.DeleteExternalSegment(test_cli20.MyApp(sys.stdout), None)
my_id = 'my-id'
args = [my_id]
self._test_delete_resource(resource, cmd, my_id, args)
|
from django.urls import path
from .views import *
app_name = 'iot'
urlpatterns = [
path('create', create_iot, name='iot_create'),
path('validate-serial-no', validate_serial_no, name='validate_serial_no'),
path('validate-plate-no', validate_plate_no, name='validate_plate_no'),
path('iot-list',iot_list,name='iot_list'),
path('iot-list-data',iot_list_data,name='iot_list_data'),
path('delete/<str:serial_no>',delete, name='iot_delete'),
path('update/<str:serial_no>',update, name='iot_update')
] |
from flask import *
app = Flask(__name__)
app.secret_key = 'my precious'
@app.route('/')
def home():
return render_template('home.html')
@app.route('/boiler_installation')
@app.route('/boiler_servicing')
@app.route('/plumbing')
@app.route('/bathroom_installation')
@app.route('/other_services')
def services():
modes = {
"/boiler_installation": "boiler_installation",
"/boiler_servicing": "boiler_servicing",
"/plumbing": "plumbing",
"/bathroom_installation": "bathroom_installation",
"/other_services": "other_services",
}
return render_template('services.html', mode=modes.get(request.url_rule.rule))
@app.route('/testimonials')
def testimonials():
return render_template('testimonials.html')
@app.route('/contact')
def contact():
return render_template('contact.html')
@app.errorhandler(404)
def page_not_found(e):
return home()
if __name__ == '__main__':
app.debug = True
app.run() |
import time
from mock import patch
from nose.tools import assert_equals, assert_true, assert_not_in, assert_in, assert_not_equals
from ckanpackager.lib.statistics import CkanPackagerStatistics, statistics, extract_domain, \
anonymize_email, anonymize_kwargs
class TestStatistics(object):
def setUp(self):
"""Create a statistics object"""
self._d = CkanPackagerStatistics('sqlite:///:memory:', False)
def test_log_request(self):
"""Test that requests are logged"""
assert_equals(0, len(self._d.get_requests()))
self._d.log_request('abcd', 'someone@example.com')
assert_equals(1, len(self._d.get_requests()))
def test_log_multiple_request(self):
"""Test that multiple requests are logged"""
assert_equals(0, len(self._d.get_requests()))
self._d.log_request('abcd', 'someone@example.com')
self._d.log_request('abcd', 'someone@example.com')
self._d.log_request('abcd', 'someone@example.com')
assert_equals(3, len(self._d.get_requests()))
def test_request_fields(self):
"""Ensure logged request fields contain expected data"""
self._d.log_request('abcd', 'someone@example.com')
requests = self._d.get_requests()
assert_equals(1, len(requests))
assert_equals('abcd', requests[0]['resource_id'])
assert_equals('someone@example.com', requests[0]['email'])
assert_equals('example.com', requests[0]['domain'])
assert_equals(type(requests[0]['timestamp']), int)
# For the stats, an hour precision is enough - and this test
# is unlikely to take more time so this test should be good.
assert_true(int(time.time()) - requests[0]['timestamp'] < 60*60)
def test_log_error(self):
"""Test that errors are logged"""
assert_equals(0, len(self._d.get_errors()))
self._d.log_error('abcd', 'someone@example.com', 'it failed')
assert_equals(1, len(self._d.get_errors()))
def test_log_multiple_error(self):
"""Test that multiple errors are logged"""
assert_equals(0, len(self._d.get_errors()))
self._d.log_error('abcd', 'someone@example.com', 'it failed')
self._d.log_error('abcd', 'someone@example.com', 'it failed')
self._d.log_error('abcd', 'someone@example.com', 'it failed')
assert_equals(3, len(self._d.get_errors()))
def test_error_fields(self):
"""Test that logged error fields contain expected data"""
self._d.log_error('abcd', 'someone@example.com', 'it failed')
errors = self._d.get_errors()
assert_equals(1, len(errors))
assert_equals('abcd', errors[0]['resource_id'])
assert_equals('someone@example.com', errors[0]['email'])
assert_equals('it failed', errors[0]['message'])
assert_equals(type(errors[0]['timestamp']), int)
# For the stats, an hour precision is enough - and this test
# is unlikely to take more time so this test should be good.
assert_true(int(time.time()) - errors[0]['timestamp'] < 60*60)
def test_overall_request_totals_updated(self):
"""Test that the overall request totals are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone2@example.com')
totals = self._d.get_totals()
assert_equals(3, totals['*']['requests'])
def test_overall_error_totals_updated(self):
"""Test that the overall error totals are updated"""
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone2@example.com', 'it failed')
self._d.log_error('efgh', 'someone3@example.com', 'it failed')
totals = self._d.get_totals()
assert_equals(4, totals['*']['errors'])
def test_per_resource_request_totals_updated(self):
"""Test that per-resource request totals are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone2@example.com')
totals = self._d.get_totals()
assert_equals(2, totals['abcd']['requests'])
def test_per_resource_error_totals_updated(self):
"""Test that the per-resource error totals are updated"""
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone2@example.com', 'it failed')
self._d.log_error('efgh', 'someone3@example.com', 'it failed')
totals = self._d.get_totals()
assert_equals(3, totals['abcd']['errors'])
def test_overall_unique_emails_updated(self):
"""Test that the overall number of unique emails are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone2@example.com')
totals = self._d.get_totals()
assert_equals(2, totals['*']['emails'])
def test_per_resource_unique_emails_updated(self):
"""Test that the per-resource number of unique emails are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone2@example.com')
self._d.log_request('efgh', 'someone3@example.com')
totals = self._d.get_totals()
assert_equals(2, totals['abcd']['emails'])
def test_totals_dont_include_id(self):
"""Check that the totals returned don't include an id field"""
self._d.log_request('abcd', 'someone1@example.com')
totals = self._d.get_totals()
assert_not_in('id', totals['*'])
assert_not_in('resource_id', totals['*'])
assert_not_in('id', totals['abcd'])
assert_not_in('resource_id', totals['abcd'])
def test_totals_return_all_resources(self):
"""Check that, unfilterd, get_totals returns entries for all resources"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone3@example.com')
self._d.log_request('ijkl', 'someone3@example.com')
totals = self._d.get_totals()
assert_in('*', totals)
assert_in('abcd', totals)
assert_in('efgh', totals)
assert_in('ijkl', totals)
def test_totals_filters(self):
"""Check it's possible to filter the rows returned by get_totals"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone2@example.com')
self._d.log_request('efgh', 'someone3@example.com')
totals = self._d.get_totals(resource_id='abcd')
assert_not_in('*', totals)
assert_not_in('efgh', totals)
assert_in('abcd', totals)
def test_requests_dont_include_id(self):
"""Check that the requests returned don't include an id field"""
self._d.log_request('abcd', 'someone1@example.com')
requests = self._d.get_requests()
assert_not_in('id', requests[0])
def test_errors_dont_include_id(self):
"""Check that the errors returned don't include an id field"""
self._d.log_error('abcd', 'someone1@example.com', 'borken')
errors = self._d.get_errors()
assert_not_in('id', errors[0])
def test_requests_ordered_by_timestamp_desc(self):
"""Check that the returned requests are ordered by timestamp desc"""
self._d.log_request('abcd', 'someone1@example.com')
time.sleep(1)
self._d.log_request('abcd', 'someone1@example.com')
time.sleep(1)
self._d.log_request('abcd', 'someone2@example.com')
requests = self._d.get_requests()
assert_true(requests[0]['timestamp'] > requests[1]['timestamp'])
assert_true(requests[1]['timestamp'] > requests[2]['timestamp'])
def test_errors_ordered_by_timestamp_desc(self):
"""Check that the returned requests are ordered by timestamp desc"""
self._d.log_error('abcd', 'someone1@example.com', 'borken')
time.sleep(1)
self._d.log_error('abcd', 'someone1@example.com', 'borken')
time.sleep(1)
self._d.log_error('abcd', 'someone2@example.com', 'borken')
errors = self._d.get_errors()
assert_true(errors[0]['timestamp'] > errors[1]['timestamp'])
assert_true(errors[1]['timestamp'] > errors[2]['timestamp'])
def test_statistics_shortcut(self):
"""Check that the 'statistics' shortcut returns an object as expected"""
o = statistics('sqlite:///:memory:', False)
assert_equals(CkanPackagerStatistics, type(o))
class TestStatisticsAnonymized(object):
def setUp(self):
"""
Create a statistics object with anonymizing turned on.
"""
self._d = CkanPackagerStatistics('sqlite:///:memory:', True)
self.someone_hash = anonymize_email(u'someone@example.com')
def test_log_request(self):
"""
Test that requests are logged
"""
assert_equals(0, len(self._d.get_requests()))
self._d.log_request('abcd', 'someone@example.com')
assert_equals(1, len(self._d.get_requests()))
def test_log_multiple_request(self):
"""Test that multiple requests are logged"""
assert_equals(0, len(self._d.get_requests()))
self._d.log_request('abcd', 'someone@example.com')
self._d.log_request('abcd', 'someone@example.com')
self._d.log_request('abcd', 'someone@example.com')
assert_equals(3, len(self._d.get_requests()))
def test_request_fields(self):
"""Ensure logged request fields contain expected data"""
self._d.log_request('abcd', 'someone@example.com')
requests = self._d.get_requests()
assert_equals(1, len(requests))
assert_equals('abcd', requests[0]['resource_id'])
assert_equals(self.someone_hash, requests[0]['email'])
assert_equals('example.com', requests[0]['domain'])
assert_equals(type(requests[0]['timestamp']), int)
# For the stats, an hour precision is enough - and this test
# is unlikely to take more time so this test should be good.
assert_true(int(time.time()) - requests[0]['timestamp'] < 60*60)
def test_log_error(self):
"""Test that errors are logged"""
assert_equals(0, len(self._d.get_errors()))
self._d.log_error('abcd', 'someone@example.com', 'it failed')
assert_equals(1, len(self._d.get_errors()))
def test_log_multiple_error(self):
"""Test that multiple errors are logged"""
assert_equals(0, len(self._d.get_errors()))
self._d.log_error('abcd', 'someone@example.com', 'it failed')
self._d.log_error('abcd', 'someone@example.com', 'it failed')
self._d.log_error('abcd', 'someone@example.com', 'it failed')
assert_equals(3, len(self._d.get_errors()))
def test_error_fields(self):
"""Test that logged error fields contain expected data"""
self._d.log_error('abcd', 'someone@example.com', 'it failed')
errors = self._d.get_errors()
assert_equals(1, len(errors))
assert_equals('abcd', errors[0]['resource_id'])
assert_equals(self.someone_hash, errors[0]['email'])
assert_equals('it failed', errors[0]['message'])
assert_equals(type(errors[0]['timestamp']), int)
# For the stats, an hour precision is enough - and this test
# is unlikely to take more time so this test should be good.
assert_true(int(time.time()) - errors[0]['timestamp'] < 60*60)
def test_overall_request_totals_updated(self):
"""Test that the overall request totals are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone2@example.com')
totals = self._d.get_totals()
assert_equals(3, totals['*']['requests'])
def test_overall_error_totals_updated(self):
"""Test that the overall error totals are updated"""
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone2@example.com', 'it failed')
self._d.log_error('efgh', 'someone3@example.com', 'it failed')
totals = self._d.get_totals()
assert_equals(4, totals['*']['errors'])
def test_per_resource_request_totals_updated(self):
"""Test that per-resource request totals are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone2@example.com')
totals = self._d.get_totals()
assert_equals(2, totals['abcd']['requests'])
def test_per_resource_error_totals_updated(self):
"""Test that the per-resource error totals are updated"""
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone1@example.com', 'it failed')
self._d.log_error('abcd', 'someone2@example.com', 'it failed')
self._d.log_error('efgh', 'someone3@example.com', 'it failed')
totals = self._d.get_totals()
assert_equals(3, totals['abcd']['errors'])
def test_overall_unique_emails_updated(self):
"""Test that the overall number of unique emails are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone2@example.com')
totals = self._d.get_totals()
assert_equals(2, totals['*']['emails'])
def test_per_resource_unique_emails_updated(self):
"""Test that the per-resource number of unique emails are updated"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone2@example.com')
self._d.log_request('efgh', 'someone3@example.com')
totals = self._d.get_totals()
assert_equals(2, totals['abcd']['emails'])
def test_totals_dont_include_id(self):
"""Check that the totals returned don't include an id field"""
self._d.log_request('abcd', 'someone1@example.com')
totals = self._d.get_totals()
assert_not_in('id', totals['*'])
assert_not_in('resource_id', totals['*'])
assert_not_in('id', totals['abcd'])
assert_not_in('resource_id', totals['abcd'])
def test_totals_return_all_resources(self):
"""Check that, unfilterd, get_totals returns entries for all resources"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('efgh', 'someone3@example.com')
self._d.log_request('ijkl', 'someone3@example.com')
totals = self._d.get_totals()
assert_in('*', totals)
assert_in('abcd', totals)
assert_in('efgh', totals)
assert_in('ijkl', totals)
def test_totals_filters(self):
"""Check it's possible to filter the rows returned by get_totals"""
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone1@example.com')
self._d.log_request('abcd', 'someone2@example.com')
self._d.log_request('efgh', 'someone3@example.com')
totals = self._d.get_totals(resource_id='abcd')
assert_not_in('*', totals)
assert_not_in('efgh', totals)
assert_in('abcd', totals)
def test_requests_dont_include_id(self):
"""Check that the requests returned don't include an id field"""
self._d.log_request('abcd', 'someone1@example.com')
requests = self._d.get_requests()
assert_not_in('id', requests[0])
def test_errors_dont_include_id(self):
"""Check that the errors returned don't include an id field"""
self._d.log_error('abcd', 'someone1@example.com', 'borken')
errors = self._d.get_errors()
assert_not_in('id', errors[0])
def test_requests_ordered_by_timestamp_desc(self):
"""Check that the returned requests are ordered by timestamp desc"""
self._d.log_request('abcd', 'someone1@example.com')
time.sleep(1)
self._d.log_request('abcd', 'someone1@example.com')
time.sleep(1)
self._d.log_request('abcd', 'someone2@example.com')
requests = self._d.get_requests()
assert_true(requests[0]['timestamp'] > requests[1]['timestamp'])
assert_true(requests[1]['timestamp'] > requests[2]['timestamp'])
def test_errors_ordered_by_timestamp_desc(self):
"""Check that the returned requests are ordered by timestamp desc"""
self._d.log_error('abcd', 'someone1@example.com', 'borken')
time.sleep(1)
self._d.log_error('abcd', 'someone1@example.com', 'borken')
time.sleep(1)
self._d.log_error('abcd', 'someone2@example.com', 'borken')
errors = self._d.get_errors()
assert_true(errors[0]['timestamp'] > errors[1]['timestamp'])
assert_true(errors[1]['timestamp'] > errors[2]['timestamp'])
def test_statistics_shortcut(self):
"""Check that the 'statistics' shortcut returns an object as expected"""
o = statistics('sqlite:///:memory:', False)
assert_equals(CkanPackagerStatistics, type(o))
def test_extract_domain():
assert_equals(extract_domain(u'someone@nhm.ac.uk'), u'nhm.ac.uk')
# if no @ is present, just return the whole thing
assert_equals(extract_domain(u'someone'), u'someone')
# if more than one @ is present, the "domain" starts at the first one
assert_equals(extract_domain(u'someone@@nhm.ac.uk'), u'@nhm.ac.uk')
# if only a @ is present, return empty
assert_equals(extract_domain(u'@'), u'')
# if the @ is at the end of the string, return empty
assert_equals(extract_domain(u'aaa@'), u'')
def test_anonymize_email():
assert_equals(anonymize_email(u'someone@nhm.ac.uk'), anonymize_email(u'someone@nhm.ac.uk'))
assert_equals(anonymize_email(u'SOMEONE@nhm.ac.uk'), anonymize_email(u'someone@NHM.ac.uk'))
assert_not_equals(anonymize_email(u'someone@nhm.ac.uk'),
anonymize_email(u'someone_else@nhm.ac.uk'))
# copes with an empty input
anonymize_email(u'')
# we know that the domain is used as the salt so lets check that silly salts don't throw errors
# much longer than the 22 character salt bcrypt needs
anonymize_email(u'a@{}'.format(u'x'*40))
# much shorter than the 22 character salt bcrypt needs
anonymize_email(u'a@{}'.format(u''))
anonymize_email(u'a@{}'.format(u'x'))
@patch(u'ckanpackager.lib.statistics.anonymize_email')
def test_anonymize_kwargs(mock_anonymize_email):
mock_hash = u'hashed!'
mock_anonymize_email.configure_mock(return_value=mock_hash)
kwargs = {u'email': u'someone@nhm.ac.uk'}
anonymize_kwargs(kwargs)
assert_equals(kwargs[u'email'], u'hashed!')
kwargs = {}
anonymize_kwargs(kwargs)
assert_equals(kwargs, {})
kwargs = {u'another': u'different_thing', u'email': u'someone@nhm.ac.uk'}
anonymize_kwargs(kwargs)
assert_equals(kwargs[u'email'], u'hashed!')
assert_equals(kwargs[u'another'], u'different_thing')
kwargs = {u'email': None}
anonymize_kwargs(kwargs)
assert_equals(kwargs[u'email'], None)
|
"""Tree traversal problems
- DFS: Depth-firt search
- BFS: Breadth-first search
"""
if __name__ == '__main':
# Recursion will first find the last iteration, and then execute all bottom
# to the top.
#
# This is know as Depth-firt search (DFS), this method allocate all memory and
# then remove after the calculation is done for that iteration, following a
# stack (LIFO) distribution.
def factorial(n):
if n == 0:
return 1
else:
return n * factorial(n - 1)
# In the other hand, we have the Corecursion, that opposite to the former always
# start the iteration from the first node, and then proceed to the next one,
# until reachs a end (or not).
#
# This is the Breadth-first search (BFS), where we allocate the necessary memory
# for the calculation, and after is done, we release it, this follows a queue
# (FIFO) distribution.
#
# An analogy to this paradigm is infite thread that will be execute in a
# organized manner until reachs it's end, and as infite loope maybe doens't.
def factorials():
n, f = 0, 1
while True:
yield f
n, f = n + 1, f * (n + 1)
def n_factorials(k):
n, f = 0, 1
while n <= k:
yield f
n, f = n + 1, f * (n + 1)
def nth_factorial(k):
n, f = 0, 1
while n < k:
n, f = n + 1, f * (n + 1)
yield f
print 'iterate n factorials until reach 120 = 5!'
for i in factorials():
if i > 120:
break
else:
print(i)
five_factorials = n_factorials(5)
print 'n_factorials(5)'
# Print all factorial up to 5
for f in five_factorials:
print f
print 'n_factorials(5) second run'
# Never executed because generators are run only once
for f in five_factorials:
print f
print 'nth_factorial(5)'
print nth_factorial(5).next()
|
VALID = frozenset('abcdefABCDEF')
def fisHex(s):
return reduce(lambda b, c: b ^ c, (int(a, 16) for a in s if a in VALID), 0)
|
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
def buildList(root):
a=[]
if root == None:
a.append("bozo")
return a
elif root.left == None and root.right == None:
a.append(root.val)
return a
else:
a.extend(buildList(root.left))
a.append(root.val)
a.extend(buildList(root.right))
return a
|
# Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from pants.backend.kotlin.goals import debug_goals
def rules():
return debug_goals.rules()
|
from django import forms
from .models import Msg
class MsgForm(forms.ModelForm):
class Meta:
model = Msg
fields = ('name', 'title', 'text',)
|
# -*- coding: utf-8 -*-
class Solution:
def transpose(self, A):
return [[A[j][i] for j in range(len(A))] for i in range(len(A[0]))]
if __name__ == "__main__":
solution = Solution()
assert [[1, 4, 7], [2, 5, 8], [3, 6, 9],] == solution.transpose(
[
[1, 2, 3],
[4, 5, 6],
[7, 8, 9],
]
)
|
# File: proj3.py
# Author: Maura Choudhary
# Date: 12/4/18
# Section: 20
# E-mail: maurac1@umbc.edu
# Description: This program allows a user to play or solve a sudoku puzzle
# Constants for the board
MIN_NUM = 1
MAX_NUM = 9
EMPTY = 0
SEPARATOR = ","
BOX1 = [1, 2, 3]
BOX2 = [4, 5, 6]
BOX3 = [7, 8, 9]
# Constants for menus
PLAY = "p"
SOLVE = "s"
SAVE = "s"
UNDO = "u"
QUIT = "q"
YES = "y"
NO = "n"
# Constants for a move list
MOVE_ROW = 0
MOVE_COLUMN = 1
MOVE_NUM = 2
# prettyPrint() prints the board with row and column labels,
# and spaces the board out so that it looks nice
# Input: board; the square 2d game board (of integers) to print
# Output: None; prints the board in a pretty way
def prettyPrint(board):
# print column headings and top border
print("\n 1 2 3 | 4 5 6 | 7 8 9 ")
print(" +-------+-------+-------+")
for i in range(len(board)):
# convert "0" cells to underscores (DEEP COPY!!!)
boardRow = list(board[i])
for j in range(len(boardRow)):
if boardRow[j] == 0:
boardRow[j] = "_"
# fill in the row with the numbers from the board
print( "{} | {} {} {} | {} {} {} | {} {} {} |".format(i + 1,
boardRow[0], boardRow[1], boardRow[2],
boardRow[3], boardRow[4], boardRow[5],
boardRow[6], boardRow[7], boardRow[8]) )
# the middle and last borders of the board
if (i + 1) % 3 == 0:
print(" +-------+-------+-------+")
# savePuzzle() writes the contents a sudoku puzzle out
# to a file in comma separated format
# Input: board; the square 2d puzzle (of integers) to write to a file
# fileName; the name of the file to use for writing to
def savePuzzle(board, fileName):
ofp = open(fileName, "w")
for i in range(len(board)):
rowStr = ""
for j in range(len(board[i])):
rowStr += str(board[i][j]) + ","
# don't write the last comma to the file
ofp.write(rowStr[ : len(rowStr)-1] + "\n")
ofp.close()
# checkBoardFull() checks if the board is full
#
# Input: board; the square 2d puzzle of integers
# Output: solution; the solved puzzle
def checkBoardFull(board):
full = True
# loop through the board looking for empty spaces
for i in range(len(board)):
for j in range(len(board[0])):
# if there's a 0 the board isn't full
if board[i][j] == 0:
full = False
return full
# getValidString() gets a valid user input
#
# Input: options, prompt; list of valid options, prompt to display
# Output: choice; valid user choice
def getValidString(options, prompt):
choice = input(prompt)
# while the user picks something that's not an option
while choice not in options:
print("That is not one of the options.")
choice = input(prompt)
return choice
# getValidInt() gets a valid user integer
#
# Input: prompt; prompt to display
# Output: choice; valid user choice
def getValidInt(prompt):
choice = int(input(prompt))
# while the choice is not in the valid range
while choice < MIN_NUM or choice > MAX_NUM:
print("You must choose a number greater than 1 and less than 9.")
choice = int(input(prompt))
return choice
# makeMove() allows the user to place a number in a cell that
# meets Sudoku requirements
# Input: board, moves, correct, solution; the square 2d puzzle,
# the list of moves, a boolean for whether correctness checking
# is on, a square 2d list of the solution
# Output: None; lists passed by reference are appropriately altered
def makeMove(board, moves, correct, solution):
moveMade = False
# get a row
numRow = getValidInt("Enter a row number (1-9): ")
# get a column
numColumn = getValidInt("Enter a column number (1-9): ")
# get a number
prompt = "Enter a number to put in cell ("+ str(numRow) +", "+ str(numColumn) +"): "
num = getValidInt(prompt)
move = [numRow - 1, numColumn - 1, num]
print()
# find possible errors for a move
errors = checkMove(board, numRow, numColumn, num)
available = True
if board[numRow - 1][numColumn - 1] != 0:
print("There's already a number there. Try again.")
available = False
# if correctness checking is on
if correct and available:
# perform correctness checking
if correctCheck(move, solution):
# check if it's a valid move
if len(errors) == 0:
board[numRow - 1][numColumn - 1] = num
moves.append(move)
# if it's not, print out the errors
else:
for i in range(len(errors)):
print(errors[i])
# if the move isn't right and correctness checking is on
# print out an error message
else:
msg = "OOPS! " + str(num) + " does not belong in position (" \
+ str(numRow) + ", " + str(numColumn) + "): "
print(msg)
# if correctness checking is off and it's a valid move
elif len(errors) == 0 and available:
board[numRow - 1][numColumn - 1] = num
moves.append(move)
elif available:
for i in range(len(errors)):
print(errors[i])
# checkMove() checks that a move is valid
#
# Input: board, rowNum, columnNum, num; the square 2d puzzle, the row where
# the user is making a move, the column, the list of moves, number
# they wish to enter
# Output: errors; list of errors
def checkMove(board, rowNum, columnNum, num):
# get the row
row = list(board[rowNum - 1])
# get the column
column = []
for i in range(len(board)):
column.append(board[i][columnNum-1])
# get the nonette(square[])
square = []
rowStart = 0
rowStop = 0
columnStart = 0
columnStop = 0
# get the starting row
if rowNum in BOX1:
rowStart = 0
elif rowNum in BOX2:
rowStart = 3
elif rowNum in BOX3:
rowStart = 6
# get the starting column
if columnNum in BOX1:
columnStart = 0
elif columnNum in BOX2:
columnStart = 3
elif columnNum in BOX3:
columnStart = 6
# create a 2d list for the nonette that
# that the number is in
for i in range(rowStart, rowStart + 3):
squareRow = []
for j in range(columnStart, columnStart + 3):
squareRow.append(board[i][j])
square.append(squareRow)
# check for errors
errors = []
#if board[rowNum - 1][columnNum - 1] != 0:
#errors.append("There is already a number in that cell! Try again.")
#return errors
# check if the number is in the nonette
for i in range(len(square)):
for j in range(len(square[i])):
if square[i][j] == num:
eMsg = "The number " + str(num) + " is already in that square."
errors.append(eMsg)
# check if the number's already in the row
if num in row:
eMsg = "The number " + str(num) + " is already in that row."
errors.append(eMsg)
# check if the number's already in the column
if num in column:
eMsg = "The number " + str(num) + " is already in that column."
errors.append(eMsg)
# return the list of errors
return errors
# correctCheck() performs correctness checking
#
# Input: move, solution; a list of the info about the move, the
# the 2d puzzle solution
# Output: valid; boolean for whether or not it's a valid move
def correctCheck(move, solution):
valid = True
row = move[MOVE_ROW]
column = move[MOVE_COLUMN]
solutionNum = solution[row][column]
# if the number is not in the solution
if solutionNum != move[MOVE_NUM]:
valid = False
return valid
# checkWin() compares the two boards to see if it has been solved
#
# Input: board, solution; the square 2d puzzle, the square 2d solution
# Output: win; boolean reporting if the user has won
def checkWin(board, solution):
win = True
# loop through the board and compare to the solution
for i in range(len(board)):
for j in range(len(board[i])):
# if there is a discrepancy it isn't a win
if board[i][j] != solution[i][j]:
win = False
return win
# makeNewBoard() makes a deep copy of the board
#
# Input: board; the square 2d puzzle (of integers)
# Output: copyBoard; the copy of the board
def makeNewBoard(board):
copyBoard = []
for i in range(len(board)):
row = []
for j in range(len(board[i])):
row.append(board[i][j])
copyBoard.append(row)
return copyBoard
# solvePuzzle() solves the puzzle
#
# Input: board, solution; the square 2d puzzle (of integers)
# Output: solution; the solved puzzle
def solvePuzzle(solution, row, column):
solution = makeNewBoard(solution)
# BASE CASE: If the board is full
if checkBoardFull(solution):
return solution
# RECURSIVE CASE
else:
# Find the first open spot
row = 0
column = 0
while solution[row][column] != 0:
# if at the end of the row
if column == len(solution[0]) - 1:
row += 1
column = 0
# otherwise in the middle of the row
else:
column += 1
# loop through and try numbers 1-9
for i in range(1, MAX_NUM + 1):
# check if i is a valid move
errors = checkMove(solution, (row + 1), (column + 1), i)
if len(errors) == 0:
# update board
solution[row][column] = i
# recursive call
result = solvePuzzle(solution, row, column)
if checkBoardFull(result) and len(result) != 0:
return result
result = []
return result
def main():
# Create a 2d list of the puzzle
fileName = input("Enter the file name of the puzzle you'd like to try: ")
puzzleFile = open(fileName, "r")
puzzleFileStrings = puzzleFile.readlines()
puzzle = []
for i in range(len(puzzleFileStrings)):
row = []
row = puzzleFileStrings[i].strip().split(SEPARATOR)
for j in range(len(row)):
row[j] = int(row[j])
puzzle.append(row)
puzzleFile.close()
# display the board
prettyPrint(puzzle)
# solve the puzzle
puzzle2 = makeNewBoard(puzzle)
solution = solvePuzzle(puzzle2, 0, 0)
# ask the user if they want to play or just solve
answer = input("Do you want to play the game (p) or just solve the puzzle (s): ")
if answer == SOLVE:
# display the solution
prettyPrint(solution)
elif answer == PLAY:
# create a list to track the moves the user makes
moves = []
# ask if they want correctness checking
correctCheck = False
correct = input("Would you like correctness checking (y/n): ")
if correct == YES:
correctCheck = True
end = False
full = checkBoardFull(puzzle)
# while the board is not full and the user doesn't quit:
while not full and not end:
# display the current board
prettyPrint(puzzle)
# Present the user with three choices
options = [PLAY, SAVE, UNDO, QUIT]
choice = getValidString(options, "play number (p), save (s), undo (u), quit (q): ")
print()
if choice == PLAY:
# if correctness checking is on, send True,
# otherwise send False
if correctCheck:
makeMove(puzzle, moves, True, solution)
else:
makeMove(puzzle, moves, False, solution)
elif choice == SAVE:
fileName = input("Enter the file name you'd like to save to: ")
savePuzzle(puzzle, fileName)
elif choice == UNDO:
if len(moves) == 0:
print("There are no moves to undo!")
else:
# find the location of the last move
row = moves[len(moves) - 1][MOVE_ROW]
column = moves[len(moves) - 1][MOVE_COLUMN]
# change the place back to empty
puzzle[row][column] = 0
# remove the move from the list
num = moves[len(moves) - 1][MOVE_NUM]
msg = "Removed " + str(num) + " you played at position (" \
+ str(row + 1) + ", " + str(column + 1) + ")."
print(msg)
moves.remove(moves[len(moves) - 1])
elif choice == QUIT:
end = True
print("Good bye! Here is the final board: ")
full = checkBoardFull(puzzle)
prettyPrint(puzzle)
if full:
# check win
if checkWin(puzzle, solution):
print("You win!")
else:
print("Sorry, you didn't solve it correctly.")
main()
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from abc import ABCMeta, abstractmethod
from dataclasses import dataclass
from pathlib import Path
from textwrap import dedent
from typing import Any, Iterable, Optional, Tuple, Type, TypeVar
import pytest
from pants.base.specs import Specs
from pants.core.goals.fix import FixFilesRequest, FixTargetsRequest
from pants.core.goals.fmt import FmtFilesRequest, FmtTargetsRequest
from pants.core.goals.lint import (
AbstractLintRequest,
Lint,
LintFilesRequest,
LintResult,
LintSubsystem,
LintTargetsRequest,
Partitions,
lint,
)
from pants.core.util_rules.distdir import DistDir
from pants.core.util_rules.environments import EnvironmentNameRequest
from pants.core.util_rules.partitions import PartitionerType
from pants.engine.addresses import Address
from pants.engine.environment import EnvironmentName
from pants.engine.fs import PathGlobs, SpecsPaths, Workspace
from pants.engine.internals.native_engine import EMPTY_SNAPSHOT, Snapshot
from pants.engine.rules import QueryRule
from pants.engine.target import Field, FieldSet, FilteredTargets, MultipleSourcesField, Target
from pants.engine.unions import UnionMembership
from pants.option.option_types import SkipOption
from pants.option.subsystem import Subsystem
from pants.testutil.option_util import create_goal_subsystem
from pants.testutil.rule_runner import MockGet, RuleRunner, mock_console, run_rule_with_mocks
from pants.util.logging import LogLevel
from pants.util.meta import classproperty
_LintRequestT = TypeVar("_LintRequestT", bound=AbstractLintRequest)
class MockMultipleSourcesField(MultipleSourcesField):
pass
class MockRequiredField(Field):
alias = "required"
required = True
class MockTarget(Target):
alias = "mock_target"
core_fields = (MockMultipleSourcesField, MockRequiredField)
@dataclass(frozen=True)
class MockLinterFieldSet(FieldSet):
required_fields = (MultipleSourcesField,)
sources: MultipleSourcesField
required: MockRequiredField
class MockLintRequest(AbstractLintRequest, metaclass=ABCMeta):
@staticmethod
@abstractmethod
def exit_code(_: Iterable[Address]) -> int:
pass
@classmethod
@abstractmethod
def get_lint_result(cls, elements: Iterable) -> LintResult:
pass
class MockLintTargetsRequest(MockLintRequest, LintTargetsRequest):
field_set_type = MockLinterFieldSet
@classmethod
def get_lint_result(cls, field_sets: Iterable[MockLinterFieldSet]) -> LintResult:
addresses = [field_set.address for field_set in field_sets]
return LintResult(cls.exit_code(addresses), "", "", cls.tool_name)
class SuccessfulRequest(MockLintTargetsRequest):
@classproperty
def tool_name(cls) -> str:
return "Successful Linter"
@classproperty
def tool_id(cls) -> str:
return "successfullinter"
@staticmethod
def exit_code(_: Iterable[Address]) -> int:
return 0
class FailingRequest(MockLintTargetsRequest):
@classproperty
def tool_name(cls) -> str:
return "Failing Linter"
@classproperty
def tool_id(cls) -> str:
return "failinglinter"
@staticmethod
def exit_code(_: Iterable[Address]) -> int:
return 1
class ConditionallySucceedsRequest(MockLintTargetsRequest):
@classproperty
def tool_name(cls) -> str:
return "Conditionally Succeeds Linter"
@classproperty
def tool_id(cls) -> str:
return "conditionallysucceedslinter"
@staticmethod
def exit_code(addresses: Iterable[Address]) -> int:
if any(address.target_name == "bad" for address in addresses):
return 127
return 0
class SkippedRequest(MockLintTargetsRequest):
@classproperty
def tool_name(cls) -> str:
return "Skipped Linter"
@classproperty
def tool_id(cls) -> str:
return "skippedlinter"
@staticmethod
def exit_code(_) -> int:
return 0
class InvalidField(MultipleSourcesField):
pass
class InvalidFieldSet(MockLinterFieldSet):
required_fields = (InvalidField,)
class InvalidRequest(MockLintTargetsRequest):
field_set_type = InvalidFieldSet
@classproperty
def tool_name(cls) -> str:
return "Invalid Linter"
@classproperty
def tool_id(cls) -> str:
return "invalidlinter"
@staticmethod
def exit_code(_: Iterable[Address]) -> int:
return -1
def _all_lint_requests() -> Iterable[type[MockLintRequest]]:
classes = [MockLintRequest]
while classes:
cls = classes.pop()
subclasses = cls.__subclasses__()
classes.extend(subclasses)
yield from subclasses
def mock_target_partitioner(
request: MockLintTargetsRequest.PartitionRequest,
) -> Partitions[MockLinterFieldSet, Any]:
if type(request) is SkippedRequest.PartitionRequest:
return Partitions()
operates_on_paths = {
getattr(cls, "PartitionRequest"): cls._requires_snapshot for cls in _all_lint_requests()
}[type(request)]
if operates_on_paths:
return Partitions.single_partition(fs.sources.globs for fs in request.field_sets)
return Partitions.single_partition(request.field_sets)
class MockFilesRequest(MockLintRequest, LintFilesRequest):
@classproperty
def tool_name(cls) -> str:
return "Files Linter"
@classproperty
def tool_id(cls) -> str:
return "fileslinter"
@classmethod
def get_lint_result(cls, files: Iterable[str]) -> LintResult:
return LintResult(0, "", "", cls.tool_name)
def mock_file_partitioner(request: MockFilesRequest.PartitionRequest) -> Partitions[str, Any]:
return Partitions.single_partition(request.files)
def mock_lint_partition(request: Any) -> LintResult:
request_type = {cls.Batch: cls for cls in _all_lint_requests()}[type(request)]
return request_type.get_lint_result(request.elements)
class MockFmtRequest(MockLintRequest, FmtTargetsRequest):
field_set_type = MockLinterFieldSet
class SuccessfulFormatter(MockFmtRequest):
@classproperty
def tool_name(cls) -> str:
return "Successful Formatter"
@classproperty
def tool_id(cls) -> str:
return "successfulformatter"
@classmethod
def get_lint_result(cls, field_sets: Iterable[MockLinterFieldSet]) -> LintResult:
return LintResult(0, "", "", cls.tool_name)
class FailingFormatter(MockFmtRequest):
@classproperty
def tool_name(cls) -> str:
return "Failing Formatter"
@classproperty
def tool_id(cls) -> str:
return "failingformatter"
@classmethod
def get_lint_result(cls, field_sets: Iterable[MockLinterFieldSet]) -> LintResult:
return LintResult(1, "", "", cls.tool_name)
class BuildFileFormatter(MockLintRequest, FmtFilesRequest):
@classproperty
def tool_name(cls) -> str:
return "Bob The BUILDer"
@classproperty
def tool_id(cls) -> str:
return "bob"
@classmethod
def get_lint_result(cls, files: Iterable[str]) -> LintResult:
return LintResult(0, "", "", cls.tool_name)
class MockFixRequest(MockLintRequest, FixTargetsRequest):
field_set_type = MockLinterFieldSet
class SuccessfulFixer(MockFixRequest):
@classproperty
def tool_name(cls) -> str:
return "Successful Fixer"
@classproperty
def tool_id(cls) -> str:
return "successfulfixer"
@classmethod
def get_lint_result(cls, field_sets: Iterable[MockLinterFieldSet]) -> LintResult:
return LintResult(0, "", "", cls.tool_name)
class FailingFixer(MockFixRequest):
@classproperty
def tool_name(cls) -> str:
return "Failing Fixer"
@classproperty
def tool_id(cls) -> str:
return "failingfixer"
@classmethod
def get_lint_result(cls, field_sets: Iterable[MockLinterFieldSet]) -> LintResult:
return LintResult(1, "", "", cls.tool_name)
class BuildFileFixer(MockLintRequest, FixFilesRequest):
@classproperty
def tool_name(cls) -> str:
return "BUILD Annually"
@classproperty
def tool_id(cls) -> str:
return "buildannually"
@classmethod
def get_lint_result(cls, files: Iterable[str]) -> LintResult:
return LintResult(0, "", "", cls.tool_name)
@pytest.fixture
def rule_runner() -> RuleRunner:
return RuleRunner()
def make_target(address: Optional[Address] = None) -> Target:
return MockTarget(
{MockRequiredField.alias: "present"}, address or Address("", target_name="tests")
)
def run_lint_rule(
rule_runner: RuleRunner,
*,
lint_request_types: Iterable[Type[_LintRequestT]],
targets: list[Target],
batch_size: int = 128,
only: list[str] | None = None,
skip_formatters: bool = False,
skip_fixers: bool = False,
) -> Tuple[int, str]:
union_membership = UnionMembership(
{
AbstractLintRequest: lint_request_types,
AbstractLintRequest.Batch: [rt.Batch for rt in lint_request_types],
LintTargetsRequest.PartitionRequest: [
rt.PartitionRequest
for rt in lint_request_types
if issubclass(rt, LintTargetsRequest)
],
LintFilesRequest.PartitionRequest: [
rt.PartitionRequest for rt in lint_request_types if issubclass(rt, LintFilesRequest)
],
}
)
lint_subsystem = create_goal_subsystem(
LintSubsystem,
batch_size=batch_size,
only=only or [],
skip_formatters=skip_formatters,
skip_fixers=skip_fixers,
)
with mock_console(rule_runner.options_bootstrapper) as (console, stdio_reader):
result: Lint = run_rule_with_mocks(
lint,
rule_args=[
console,
Workspace(rule_runner.scheduler, _enforce_effects=False),
Specs.empty(),
lint_subsystem,
union_membership,
DistDir(relpath=Path("dist")),
],
mock_gets=[
MockGet(
output_type=Partitions,
input_types=(LintTargetsRequest.PartitionRequest,),
mock=mock_target_partitioner,
),
MockGet(
output_type=EnvironmentName,
input_types=(EnvironmentNameRequest,),
mock=lambda _: EnvironmentName(None),
),
MockGet(
output_type=Partitions,
input_types=(LintFilesRequest.PartitionRequest,),
mock=mock_file_partitioner,
),
MockGet(
output_type=LintResult,
input_types=(AbstractLintRequest.Batch,),
mock=mock_lint_partition,
),
MockGet(
output_type=FilteredTargets,
input_types=(Specs,),
mock=lambda _: FilteredTargets(tuple(targets)),
),
MockGet(
output_type=SpecsPaths,
input_types=(Specs,),
mock=lambda _: SpecsPaths(("f.txt", "BUILD"), ()),
),
MockGet(
output_type=Snapshot,
input_types=(PathGlobs,),
mock=lambda _: EMPTY_SNAPSHOT,
),
],
union_membership=union_membership,
)
assert not stdio_reader.get_stdout()
return result.exit_code, stdio_reader.get_stderr()
def test_invalid_target_noops(rule_runner: RuleRunner) -> None:
exit_code, stderr = run_lint_rule(
rule_runner, lint_request_types=[InvalidRequest], targets=[make_target()]
)
assert exit_code == 0
assert stderr == ""
def test_summary(rule_runner: RuleRunner) -> None:
"""Test that we render the summary correctly.
This tests that we:
* Merge multiple results belonging to the same linter (`--per-file-caching`).
* Decide correctly between skipped, failed, and succeeded.
"""
good_address = Address("", target_name="good")
bad_address = Address("", target_name="bad")
request_types = [
ConditionallySucceedsRequest,
FailingRequest,
SkippedRequest,
SuccessfulRequest,
SuccessfulFormatter,
FailingFormatter,
BuildFileFormatter,
SuccessfulFixer,
FailingFixer,
BuildFileFixer,
MockFilesRequest,
]
targets = [make_target(good_address), make_target(bad_address)]
exit_code, stderr = run_lint_rule(
rule_runner,
lint_request_types=request_types,
targets=targets,
)
assert exit_code == FailingRequest.exit_code([bad_address])
assert stderr == dedent(
"""\
✓ BUILD Annually succeeded.
✓ Bob The BUILDer succeeded.
✕ Conditionally Succeeds Linter failed.
✕ Failing Fixer failed.
✕ Failing Formatter failed.
✕ Failing Linter failed.
✓ Files Linter succeeded.
✓ Successful Fixer succeeded.
✓ Successful Formatter succeeded.
✓ Successful Linter succeeded.
(One or more formatters failed. Run `pants fmt` to fix.)
(One or more fixers failed. Run `pants fix` to fix.)
"""
)
exit_code, stderr = run_lint_rule(
rule_runner,
lint_request_types=request_types,
targets=targets,
only=[
FailingRequest.tool_id,
MockFilesRequest.tool_id,
FailingFormatter.tool_id,
FailingFixer.tool_id,
BuildFileFormatter.tool_id,
BuildFileFixer.tool_id,
],
)
assert stderr == dedent(
"""\
✓ BUILD Annually succeeded.
✓ Bob The BUILDer succeeded.
✕ Failing Fixer failed.
✕ Failing Formatter failed.
✕ Failing Linter failed.
✓ Files Linter succeeded.
(One or more formatters failed. Run `pants fmt` to fix.)
(One or more fixers failed. Run `pants fix` to fix.)
"""
)
exit_code, stderr = run_lint_rule(
rule_runner,
lint_request_types=request_types,
targets=targets,
skip_formatters=True,
skip_fixers=True,
)
assert stderr == dedent(
"""\
✕ Conditionally Succeeds Linter failed.
✕ Failing Linter failed.
✓ Files Linter succeeded.
✓ Successful Linter succeeded.
"""
)
exit_code, stderr = run_lint_rule(
rule_runner,
lint_request_types=request_types,
targets=targets,
skip_fixers=True,
)
assert stderr == dedent(
"""\
✓ Bob The BUILDer succeeded.
✕ Conditionally Succeeds Linter failed.
✕ Failing Formatter failed.
✕ Failing Linter failed.
✓ Files Linter succeeded.
✓ Successful Formatter succeeded.
✓ Successful Linter succeeded.
(One or more formatters failed. Run `pants fmt` to fix.)
"""
)
exit_code, stderr = run_lint_rule(
rule_runner,
lint_request_types=request_types,
targets=targets,
skip_formatters=True,
)
assert stderr == dedent(
"""\
✓ BUILD Annually succeeded.
✕ Conditionally Succeeds Linter failed.
✕ Failing Fixer failed.
✕ Failing Linter failed.
✓ Files Linter succeeded.
✓ Successful Fixer succeeded.
✓ Successful Linter succeeded.
(One or more fixers failed. Run `pants fix` to fix.)
"""
)
def test_default_single_partition_partitioner() -> None:
class KitchenSubsystem(Subsystem):
options_scope = "kitchen"
help = "a cookbook might help"
name = "The Kitchen"
skip = SkipOption("lint")
class LintKitchenRequest(LintTargetsRequest):
field_set_type = MockLinterFieldSet
tool_subsystem = KitchenSubsystem
partitioner_type = PartitionerType.DEFAULT_SINGLE_PARTITION
rules = [
*LintKitchenRequest._get_rules(),
QueryRule(Partitions, [LintKitchenRequest.PartitionRequest]),
]
rule_runner = RuleRunner(rules=rules)
field_sets = (
MockLinterFieldSet(
Address("knife"),
MultipleSourcesField(["knife"], Address("knife")),
MockRequiredField("present", Address("")),
),
MockLinterFieldSet(
Address("bowl"),
MultipleSourcesField(["bowl"], Address("bowl")),
MockRequiredField("present", Address("")),
),
)
partitions = rule_runner.request(Partitions, [LintKitchenRequest.PartitionRequest(field_sets)])
assert len(partitions) == 1
assert partitions[0].elements == field_sets
rule_runner.set_options(["--kitchen-skip"])
partitions = rule_runner.request(Partitions, [LintKitchenRequest.PartitionRequest(field_sets)])
assert partitions == Partitions([])
@pytest.mark.parametrize("batch_size", [1, 32, 128, 1024])
def test_batched(rule_runner: RuleRunner, batch_size: int) -> None:
exit_code, stderr = run_lint_rule(
rule_runner,
lint_request_types=[
ConditionallySucceedsRequest,
FailingRequest,
SkippedRequest,
SuccessfulRequest,
],
targets=[make_target(Address("", target_name=f"good{i}")) for i in range(0, 512)],
batch_size=batch_size,
)
assert exit_code == FailingRequest.exit_code([])
assert stderr == dedent(
"""\
✓ Conditionally Succeeds Linter succeeded.
✕ Failing Linter failed.
✓ Successful Linter succeeded.
"""
)
def test_streaming_output_success() -> None:
result = LintResult(0, "stdout", "stderr", linter_name="linter")
assert result.level() == LogLevel.INFO
assert result.message() == dedent(
"""\
linter succeeded.
stdout
stderr
"""
)
def test_streaming_output_failure() -> None:
result = LintResult(18, "stdout", "stderr", linter_name="linter")
assert result.level() == LogLevel.ERROR
assert result.message() == dedent(
"""\
linter failed (exit code 18).
stdout
stderr
"""
)
def test_streaming_output_partitions() -> None:
result = LintResult(
21, "stdout", "stderr", linter_name="linter", partition_description="ghc9.2"
)
assert result.level() == LogLevel.ERROR
assert result.message() == dedent(
"""\
linter failed (exit code 21).
Partition: ghc9.2
stdout
stderr
"""
)
|
from sys import argv
import random
script = argv
max_q = int(input("max_q available-->"))
file=open("quess.txt", 'a')
def entry():
global ques_no
print("""Enter the option of operation which you want to do
1.face questions 2.assign answers""")
a = int(input("..."))
if a == 1:
ques_no = int(input("Enter the no.of questions you want to face-->"))
return file_write()
elif a==2:
assign_ans()
else:
exit(0)
def file_write():
global numbers
global ques_no
i = 1
numbers = []
file.truncate(0)
while i < max_q+1:
numbers.append(i)
i = i + 1
print("DONE!!!")
gettin_q()
def gettin_q():
global numbers
global used
used = []
while len(used) <= ques_no-1:
b = random.randint(0,5)
a = numbers[b]
used.append(a)
used = [x for n, x in enumerate(used) if x not in used[:n]]
used.sort()
print(used)
gettin_a()
def gettin_a():
global used
ans = open('ans.txt').read().splitlines()
ans_dis = []
n = 0
while len(ans_dis) != len(used):
input("To get the ans press enter:--->")
m = (used[n])-1
print(ans[m])
ans_dis.append(ans[m])
n+=1
def assign_ans():
ans = 1
file = open('ans.txt','w')
file.truncate(0)
while ans != max_q+1:
print(f"Enter the ans for {ans} ans")
answ = input(">")
file.write(answ)
file.write('\n')
ans+=1
entry()
|
# pylint: disable=g-bad-file-header
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Various function for graph editing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.graph_editor import select
from tensorflow.contrib.graph_editor import subgraph
from tensorflow.contrib.graph_editor import util
from tensorflow.python.ops import array_ops as tf_array_ops
def _check_graphs(*args):
"""Check that all the element in args belong to the same graph.
Args:
*args: a list of object with a obj.graph property.
Raises:
ValueError: if all the elements do not belong to the same graph.
"""
graph = None
for i, sgv in enumerate(args):
if graph is None and sgv.graph is not None:
graph = sgv.graph
elif sgv.graph is not None and sgv.graph != graph:
raise ValueError("Argument[{}]: Wrong graph!".format(i))
def _reroute_sgv_remap(sgv0, sgv1, mode):
"""Remap in place the inputs of two subgraph views to mimic the reroute.
This function is meant to used by reroute_inputs only.
Args:
sgv0: the first subgraph to have its inputs remapped.
sgv1: the second subgraph to have its inputs remapped.
mode: reroute mode, see util.reroute_ts(...).
Raises:
TypeError: if svg0 or svg1 are not SubGraphView.
ValueError: if sgv0 and sgv1 do not belong to the same graph.
"""
a2b, b2a = util.RerouteMode.check(mode)
if not isinstance(sgv0, subgraph.SubGraphView):
raise TypeError("Expected a SubGraphView, got {}".format(type(sgv0)))
if not isinstance(sgv1, subgraph.SubGraphView):
raise TypeError("Expected a SubGraphView, got {}".format(type(sgv1)))
_check_graphs(sgv0, sgv1)
sgv0_ = sgv0.copy()
sgv1_ = sgv1.copy()
# pylint: disable=protected-access
if a2b and b2a:
(sgv0_._input_ts, sgv1_._input_ts) = (
sgv1_._input_ts, sgv0_._input_ts)
(sgv0_._passthrough_ts, sgv1_._passthrough_ts) = (
sgv1_._passthrough_ts, sgv0_._passthrough_ts)
elif a2b:
sgv1_._input_ts = sgv0_._input_ts[:]
sgv1_._passthrough_ts = sgv0_._passthrough_ts[:]
elif b2a:
sgv0_._input_ts = sgv1_._input_ts[:]
sgv0_._passthrough_ts = sgv1_._passthrough_ts[:]
# Update the passthrough outputs as well.
def update_passthrough_outputs(a, b):
for i, t in enumerate(b._output_ts):
if t in a._passthrough_ts:
ii = a._input_ts.index(t)
b._output_ts[i] = b._input_ts[ii]
if a2b: update_passthrough_outputs(sgv0_, sgv1_)
if b2a: update_passthrough_outputs(sgv1_, sgv0_)
# in-place
sgv0._assign_from(sgv0_)
sgv1._assign_from(sgv1_)
def reroute_inputs(sgv0, sgv1, mode):
"""Re-route all the inputs of two subgraphs.
Args:
sgv0: the first subgraph to have its inputs swapped. This argument is
converted to a subgraph using the same rules than the function
subgraph.make_view.
sgv1: the second subgraph to have its inputs swapped. This argument is
converted to a subgraph using the same rules than the function
subgraph.make_view.
mode: reroute mode, see util.reroute_ts(...).
Returns:
Two new subgraph views with their inputs swapped.
Note that sgv0 and sgv1 are also modified in place.
Raises:
StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv0 = subgraph.make_view(sgv0)
sgv1 = subgraph.make_view(sgv1)
_check_graphs(sgv0, sgv1)
can_modify = sgv0.ops + sgv1.ops
# also allow consumers of passthrough to be modified:
can_modify += select.get_consuming_ops(sgv0.passthroughs)
can_modify += select.get_consuming_ops(sgv1.passthroughs)
util.reroute_ts(sgv0.inputs, sgv1.inputs, mode, can_modify=can_modify)
_reroute_sgv_remap(sgv0, sgv1, mode)
return sgv0, sgv1
def swap_inputs(sgv0, sgv1):
"""Swap all the inputs of sgv0 and sgv1 (see reroute_inputs)."""
return reroute_inputs(sgv0, sgv1, util.RerouteMode.swap)
def reroute_a2b_inputs(sgv0, sgv1):
"""Re-route all the inputs of sgv0 to sgv1 (see reroute_inputs)."""
return reroute_inputs(sgv0, sgv1, util.RerouteMode.a2b)
def reroute_b2a_inputs(sgv0, sgv1):
"""Re-route all the inputs of sgv1 to sgv0 (see reroute_inputs)."""
return reroute_inputs(sgv0, sgv1, util.RerouteMode.b2a)
def reroute_outputs(sgv0, sgv1, mode):
"""Re-route all the outputs of two operations.
Args:
sgv0: the first subgraph to have its outputs swapped. This argument is
converted to a subgraph using the same rules than the function
subgraph.make_view.
sgv1: the second subgraph to have its outputs swapped. This argument is
converted to a subgraph using the same rules than the function
subgraph.make_view.
mode: reroute mode, see util.reroute_ts(...).
Returns:
Two new subgraph views with their outputs swapped.
Note that sgv0 and sgv1 are also modified in place.
Raises:
StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv0 = subgraph.make_view(sgv0)
sgv1 = subgraph.make_view(sgv1)
_check_graphs(sgv0, sgv1)
cannot_modify = sgv0.ops + sgv1.ops
util.reroute_ts(sgv0.outputs, sgv1.outputs, mode, cannot_modify=cannot_modify)
return sgv0, sgv1
def swap_outputs(sgv0, sgv1):
"""Swap all the outputs of sgv0 and sgv1 (see reroute_outputs)."""
return reroute_outputs(sgv0, sgv1, util.RerouteMode.swap)
def reroute_a2b_outputs(sgv0, sgv1):
"""Re-route all the outputs of sgv0 to sgv1 (see reroute_outputs)."""
return reroute_outputs(sgv0, sgv1, util.RerouteMode.a2b)
def reroute_b2a_outputs(sgv0, sgv1):
"""Re-route all the outputs of sgv1 to sgv0 (see reroute_outputs)."""
return reroute_outputs(sgv0, sgv1, util.RerouteMode.b2a)
def reroute(sgv0, sgv1, mode):
"""Re-route both the inputs and the outputs of the two subgraph views.
This involves swapping all the inputs/ouputs of the two subgraph views.
Args:
sgv0: the first subgraph to be swapped. This argument is converted to a
subgraph using the same rules than the function subgraph.make_view.
sgv1: the second subgraph to be swapped. This argument is converted to a
subgraph using the same rules than the function subgraph.make_view.
mode: reroute mode, see util.reroute_ts(...).
Returns:
Two new subgraph views with their outputs and inputs swapped.
Note that sgv0 and sgv1 are also modified in place.
Raises:
StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
reroute_outputs(sgv0, sgv1, mode)
reroute_inputs(sgv0, sgv1, mode)
return sgv0, sgv1
def swap(sgv0, sgv1):
"""Swap the inputs and outputs of sgv1 to sgv0 (see reroute)."""
return reroute(sgv0, sgv1, util.RerouteMode.swap)
def reroute_a2b(sgv0, sgv1):
"""Re-route the inputs and outputs of sgv0 to sgv1 (see reroute_outputs)."""
return reroute(sgv0, sgv1, util.RerouteMode.a2b)
def reroute_b2a(sgv0, sgv1):
"""Re-route the inputs and outputs of sgv1 to sgv0 (see reroute_outputs)."""
return reroute(sgv0, sgv1, util.RerouteMode.b2a)
def detach_inputs(sgv):
"""Detach the inputs of a subgraph view.
Args:
sgv: the subgraph view to be detached. This argument is converted to a
subgraph using the same rules than the function subgraph.make_view.
Returns:
A new subgraph view of the detached subgraph.
Note that sgv is also modified in place.
Raises:
StandardError: if sgv cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv = subgraph.make_view(sgv)
with sgv.graph.as_default():
input_placeholders = [
tf_array_ops.placeholder(dtype=input_t.dtype,
name=util.placeholder_name(input_t))
for input_t in sgv.inputs
]
return swap_inputs(sgv, input_placeholders)
def detach_outputs(sgv):
"""Detach the outputa of a subgraph view.
Args:
sgv: the subgraph view to be detached. This argument is converted to a
subgraph using the same rules than the function subgraph.make_view.
Returns:
A new subgraph view of the detached subgraph.
Note that sgv is also modified in place.
Raises:
StandardError: if sgv cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv = subgraph.make_view(sgv)
# only select outputs with consumers
sgv_ = sgv.remap_outputs([output_id
for output_id, output_t in enumerate(sgv.outputs)
if output_t.consumers()])
# create consumer subgraph and remap
consumers_sgv = subgraph.SubGraphView(sgv_.consumers())
consumers_sgv = consumers_sgv.remap_inputs(
[input_id for input_id, input_t in enumerate(consumers_sgv.inputs)
if input_t in sgv_.outputs])
with sgv_.graph.as_default():
output_placeholders = [
util.make_placeholder_from_tensor(input_t)
for input_t in consumers_sgv.inputs
]
return swap_outputs(sgv_, output_placeholders)
def detach(sgv):
"""Detach both the inputs and the outputs of a subgraph view.
Args:
sgv: the subgraph view to be detached. This argument is converted to a
subgraph using the same rules than the function subgraph.make_view.
Returns:
A new subgraph view of the detached subgraph.
Note that sgv is also modified in place.
Raises:
StandardError: if sgv cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
_, detached_inputs = detach_inputs(sgv)
_, detached_outputs = detach_outputs(sgv)
return sgv, detached_inputs, detached_outputs
def connect(sgv0, sgv1, disconnect_first=False):
"""Connect the outputs of sgv0 to the inputs of sgv1.
Args:
sgv0: the first subgraph to have its outputs swapped. This argument is
converted to a subgraph using the same rules than the function
subgraph.make_view.
sgv1: the second subgraph to have its outputs swapped. This argument is
converted to a subgraph using the same rules than the function
subgraph.make_view.
disconnect_first: if True thecurrent outputs of sgv0 are disconnected.
Returns:
Two new subgraph views (now connected). sgv0 and svg1 are also modified
in place.
Raises:
StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv0 = subgraph.make_view(sgv0)
sgv1 = subgraph.make_view(sgv1)
_check_graphs(sgv0, sgv1)
if disconnect_first:
detach_outputs(sgv0)
sgv0_outputs = subgraph.SubGraphView(passthrough_ts=sgv0.outputs)
reroute_a2b_inputs(sgv0_outputs, sgv1)
return sgv0, sgv1
def remove(sgv, reconnect_after=False):
"""Remove sgv and optionally reconnect its inputs and outputs.
Args:
sgv: the subgraph view to be removed. This argument is converted to a
subgraph using the same rules than the function subgraph.make_view.
reconnect_after: if False, the inputs and outputs of sgv are not
reconnected after the removal.
Returns:
A new subgraph view of the removed subgraph.
Note that sgv is also modified in place.
Raises:
StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using
the same rules than the function subgraph.make_view.
"""
sgv = subgraph.make_view(sgv)
util.check_ts_compatibility(sgv.inputs, sgv.outputs)
sgv, detached_inputs, detached_outputs = detach(sgv)
if reconnect_after:
connect(detached_inputs, detached_outputs)
return sgv
|
from gi.repository import GObject
import xml
from Coordinates import Coordinates
class Action(GObject.Object):
#__metaclass__ = abc.ABCMeta
__gsignals__ = {
"abstract_signal" : (GObject.SIGNAL_RUN_FIRST, GObject.TYPE_NONE, ( GObject.TYPE_OBJECT,))
}
def __init__(self):
GObject.Object.__init__(self)
self.props = {}
self.props_var = {}
self.name = None
self.coords = Coordinates()
def get_properties(self):
"""Get Dictionary containing propserties
Variable value is returned if exists else concrete is returned"""
props = {}
for p in self.props.keys():
if self.props_var.has_key(p) and self.props_var[p] != None:
props[p] = self.props_var[p]
else:
props[p] = self.props[p]
return props
def get_var_properties(self):
"""Returns variable properties of the observation"""
props = {}
for p in self.props_var.keys():
if self.props_var[p] != None:
props[p] = self.props_var[p]
else:
props[p] = self.props[p]
return props
def get_concrete_properties(self):
"""Returns concrete properties, having concrete value not generalised"""
props = {}
for p in self.props.keys():
if self.props[p] != None:
props[p] = self.props[p]
else:
props[p] = self.props_var[p]
return props
def equals(self, a2,ignore = False):
"""Returns True if two actions are equal"""
if self.name != a2.name:
return False
props = self.get_concrete_properties(); props2 = a2.get_concrete_properties()
for p2 in props2.keys():
if not(props.has_key(p2)):
return False
if (type(props[p2]) == type(props2[p2]) == type(float())):
if abs(abs(props[p2]) - abs(props2[p2])) > 0:
return False
elif (props[p2] != props2[p2]):
return False
if ignore:
return True
if not self.coords.equals(a2.coords):
return False
return True
def similar(self, a2):
"""Returns True if two actions have same name"""
if a2.name != self.name:
return False
props = self.get_concrete_properties(); props2 = a2.get_concrete_properties()
for p2 in props2.keys():
if not(props.has_key(p2)):
return False
if props[p2] != props2[p2]:
return False
#print "Observation equals: ", props, props2
return True
def execute(self):
"""Execute action with abstract signal"""
#print "Emitting action signal:", self.name, self.props,self.coords.concrete_coords
self.emit("abstract_signal", self)
def repeat(self):
"""Emit abstract signal"""
self.emit("abstract_signal", self)
print "Repeating abstract signal with parameters: ", self.props, self.coords.concrete_coords
def set_concrete_prop(self, name, val):
"""Set value for the concrete property"""
try:
value = float(val)
except:
try:
value = int(val)
except:
value = str(val)
if name == "x" or name == "y":
self.coords.set_concrete_coords(name, value)
return
self.props[name] = value
#print "Setting in action: ", name, self.props[name], self.props_var[name]
def set_property_var(self, name, val):
"""Set value for the variable properties"""
try:
value = float(val)
except:
try:
value = int(val)
except:
value = str(val)
#print "@set_priop_val:", name, value
if name == "x" or name == "y":
self.coords.set_variable_coords(name, value)
return
self.props_var[name] = value
self.props[name] = None
def copy(self):
"""Create copy of the action"""
a2 = Action()
a2.name = str(self.name)
a2.props = self.props.copy()
a2.props_var = self.props_var.copy()
a2.coords = self.coords.copy()
#a2.connect("abstract_signal")
return a2
def to_string(self):
"""Returns action as string to print"""
act = "<action type='%s' name='%s' "%(str(type(self).__name__), self.name)
prop = self.get_properties()
for p in prop.keys():
act += " %s= '%s' "%(p, prop[p])
coords = self.coords.get_coords()
for prop in coords.keys():
act += "%s= '%s' "%(prop, coords[prop])
act += " /> \n"
return act
def to_concrete_string(self):
"""Returns action as string to print"""
act = "<action type='%s' name='%s' "%(str(type(self).__name__), self.name)
prop = self.get_concrete_properties()
for p in prop.keys():
act += " %s= '%s' "%(p, prop[p])
coords = self.coords.get_concrete_coords()
#print "Concrete Action:", self.get_concrete_properties(), self.coords.get_concrete_coords()
for prop in coords.keys():
act += "%s= '%s' "%(prop, coords[prop])
act += " /> \n"
return act
def to_xml(self):
"""Returns action in XML format"""
builder = ""
builder += "<action name='%s' "%(self.name) #str(type(self).__name__),
properties = self.get_properties()
for prop in properties.keys():
builder += "%s='%s' "%(prop, properties[prop])
coords = self.coords.get_coords()
for prop in coords.keys():
builder += "%s= '%s' "%(prop, coords[prop])
builder += " />\n"
return builder
def parse_node(self, node):
"""Create elememts of the action from XML format"""
for k in node.attrib.keys():
value = node.attrib[k]
if k == "type":
continue
if k == "name":
self.name = str(value)
elif k !="type":
if type(value) == type(str()):
if value[0] == '$':
self.set_property_var(k, value); continue
else:
self.set_concrete_prop(k, value); continue
else:
self.set_concrete_prop(k, value); continue
def instantiate_var(self, variable, value):
"""Instantiates the property (variable) with given value """
#print "@intantiate_var Instantiating: ",self.sensor_id, variable, value
props = self.get_properties()
coords = self.coords.get_coords()
props.update(coords)
#print "@intantiate_var Instantiating: ",props, self.coords.get_coords(), variable, value
for p in props.keys():
if p != variable:
continue
try:
p_val = float(props[p])
except:
try:
p_val = int(props[p])
except:
p_val = str(props[p])
#print "Values to be instantiated:", p, props[p], p_val, type(p_val) #!= type(str()) and "$" in p_val, (p_val == variable and len(p_val)<3) (len(p_val)>2 and p == variable)
if type(p_val) != type(str()) or not("$" in p_val):
continue
#print "$ sign found:", "$" in p_val, p_val
#if not("$" in p_val):
# continue
if (not "-" in p_val) and (not "+" in p_val):
#print "Setting concrete_values: ", p, value
self.set_concrete_prop(p, value)
else:#elif(str(p_val) != str(variable) and len(p_val)>2):
#print "Property with function to be changed:", p, p_val, value
if type(value) != type(float()):
return
try:
sym = p_val[p_val.index("-")]
except:
sym = p_val[p_val.index("+")]
#sym = p_val[2]
q = float(value); reply = None
# s = "0%c"%p_val[3]; w = float(s)
w = float(p_val[p_val.index(sym)+1:])
if (sym =='-'):
reply = q-w
else:
reply = q+w
ss = abs(reply)
#print "setting concrete Value with function created: ",p, ss
self.set_concrete_prop(p, ss)
GObject.type_register(Action) |
from django.shortcuts import render, redirect, HttpResponseRedirect, reverse
from subreddit.models import Subreddit
from subreddit.forms import SubredditCreationForm
from reddituser.models import RedditUser
from post.models import Post
from subreddit.helper import random_subreddits, subreddit_search
import random
if Subreddit.objects.all():
search_subreddits = Subreddit.objects.all()
def subreddit_view(request, title, sort_by):
subreddit = Subreddit.objects.get(title=title)
subreddits = random_subreddits()
members_query = subreddit.members
members = members_query.all()
subreddit_filter = subreddit_search(request)
is_member = request.user in subreddit.members.all()
posts = list()
if sort_by == 'trending':
current_subreddit = Subreddit.objects.filter(title=title).first()
post_list = list(Post.objects.filter(subreddit=current_subreddit))
post_list = sorted(post_list, key = lambda i: 0 if i.getPopularity() == 0 else -1 / i.getPopularity())
posts = post_list
else:
posts = Post.objects.filter(subreddit=subreddit).order_by('created_at').reverse()
context = {
'subreddit': subreddit,
'subreddits': subreddits,
'members': members,
'posts': posts,
'sort_by': sort_by,
'is_member': is_member,
'subreddit_filter': subreddit_filter,
'search_subreddits': search_subreddits
}
return render(request, 'subreddit/subreddit.html', context)
def subreddit_creation_view(request):
form = SubredditCreationForm()
title = 'Create Subreddit'
subreddit_filter = subreddit_search(request)
if request.method == "POST":
form = SubredditCreationForm(request.POST)
if form.is_valid():
data = form.cleaned_data
Subreddit.objects.create(
title=data['title'],
about=data['about']
)
return redirect(f"/subreddit/page/{data['title']}/recent")
context = {
'form': form,
'title': title,
'subreddit_filter': subreddit_filter,
'search_subreddits': search_subreddits
}
return render(request, 'subreddit/createsubreddit.html', context)
def subreddit_search_view(request):
subreddits = Subreddit.objects.all()
subreddit_filter = subreddit_search(request)
subreddits = subreddit_filter.qs[:6]
posts = []
for subreddit in subreddits:
query_set = Post.objects.filter(subreddit=subreddit)
for post in query_set:
posts.append(post)
random.shuffle(posts)
context = {
'subreddits': subreddits,
'subreddit_filter': subreddit_filter,
'posts': posts,
'search_subreddits': search_subreddits
}
return render(request, 'subreddit/search.html', context)
def subscribe(request, subreddit_id):
current_subreddit = Subreddit.objects.get(id=subreddit_id)
if request.user in current_subreddit.members.all():
current_subreddit.members.remove(request.user)
else:
current_subreddit.members.add(request.user)
last_url_list = request.META.get('HTTP_REFERER').split('/')
last_url_list = list(filter(None, last_url_list))
return HttpResponseRedirect(
reverse('subreddit', kwargs={
'title': current_subreddit.title,
'sort_by': last_url_list[-1]
}))
|
import pandas as pd
import numpy as np
import datetime
import math
from datetime import timedelta, date
exclude_days = [date(2020, 3, 1), date(2020,3,8), date(2020,3,15), date(2020,3,22), date(2020,3,25), date(2020,3,29), date(2020,3,30), date(2020,3,31)]
deliveries_df = pd.read_csv("delivery_orders_march.csv")
deliveries_df.replace(np.nan,0)
deliveries_list = deliveries_df.values.tolist()
final_ouput = []
array_length = len(deliveries_list)
def daterange(date1, date2):
for n in range(int ((date2 - date1).days)+1):
yield date1 + timedelta(n)
for rows in range(0, array_length):
count = 0
working_days = 0
if deliveries_list[rows][4].lower().find("metro manila") != -1 and deliveries_list[rows][5].lower().find("metro manila") != -1:
working_days = 3
deliveries_list[rows][1] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][1])))
pick = deliveries_list[rows][1].date()
deliveries_list[rows][2] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][2])))
attempt1 = deliveries_list[rows][2].date()
for dt in daterange(pick+timedelta(days=1), attempt1):
if dt in exclude_days:
continue
count += 1
if count <= working_days and math.isnan(deliveries_list[rows][3]):
final_ouput.append([deliveries_list[rows][0], 0])
elif count > working_days:
final_ouput.append([deliveries_list[rows][0], 1])
else:
count = 0
deliveries_list[rows][3] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][3])))
attempt2 = deliveries_list[rows][3].date()
for dt in daterange(attempt1+timedelta(days=1), attempt2):
if dt in exclude_days:
continue
count += 1
if count <= 3:
final_ouput.append([deliveries_list[rows][0], 0])
else:
final_ouput.append([deliveries_list[rows][0], 1])
elif deliveries_list[rows][4].lower().find("luzon") != -1 and deliveries_list[rows][5].lower().find("luzon") != -1:
working_days = 5
deliveries_list[rows][1] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][1])))
pick = deliveries_list[rows][1].date()
deliveries_list[rows][2] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][2])))
attempt1 = deliveries_list[rows][2].date()
for dt in daterange(pick+timedelta(days=1), attempt1):
if dt in exclude_days:
continue
count += 1
if count <= working_days and math.isnan(deliveries_list[rows][3]):
final_ouput.append([deliveries_list[rows][0], 0])
elif count > working_days:
final_ouput.append([deliveries_list[rows][0], 1])
else:
count = 0
deliveries_list[rows][3] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][3])))
attempt2 = deliveries_list[rows][3].date()
for dt in daterange(attempt1+timedelta(days=1), attempt2):
if dt in exclude_days:
continue
count += 1
if count <= 3:
final_ouput.append([deliveries_list[rows][0], 0])
else:
final_ouput.append([deliveries_list[rows][0], 1])
elif deliveries_list[rows][4].lower().find("luzon") != -1 and deliveries_list[rows][5].lower().find("metro manila") != -1:
working_days = 5
deliveries_list[rows][1] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][1])))
pick = deliveries_list[rows][1].date()
deliveries_list[rows][2] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][2])))
attempt1 = deliveries_list[rows][2].date()
for dt in daterange(pick+timedelta(days=1), attempt1):
if dt in exclude_days:
continue
count += 1
if count <= working_days and math.isnan(deliveries_list[rows][3]):
final_ouput.append([deliveries_list[rows][0], 0])
elif count > working_days:
final_ouput.append([deliveries_list[rows][0], 1])
else:
count = 0
deliveries_list[rows][3] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][3])))
attempt2 = deliveries_list[rows][3].date()
for dt in daterange(attempt1+timedelta(days=1), attempt2):
if dt in exclude_days:
continue
count += 1
if count <= 3:
final_ouput.append([deliveries_list[rows][0], 0])
else:
final_ouput.append([deliveries_list[rows][0], 1])
elif deliveries_list[rows][4].lower().find("metro manila") != -1 and deliveries_list[rows][5].lower().find("luzon") != -1:
working_days = 5
deliveries_list[rows][1] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][1])))
pick = deliveries_list[rows][1].date()
deliveries_list[rows][2] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][2])))
attempt1 = deliveries_list[rows][2].date()
for dt in daterange(pick+timedelta(days=1), attempt1):
if dt in exclude_days:
continue
count += 1
if count <= working_days and math.isnan(deliveries_list[rows][3]):
final_ouput.append([deliveries_list[rows][0], 0])
elif count > working_days:
final_ouput.append([deliveries_list[rows][0], 1])
else:
count = 0
deliveries_list[rows][3] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][3])))
attempt2 = deliveries_list[rows][3].date()
for dt in daterange(attempt1+timedelta(days=1), attempt2):
if dt in exclude_days:
continue
count += 1
if count <= 3:
final_ouput.append([deliveries_list[rows][0], 0])
else:
final_ouput.append([deliveries_list[rows][0], 1])
else:
working_days = 7
deliveries_list[rows][1] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][1])))
pick = deliveries_list[rows][1].date()
deliveries_list[rows][2] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][2])))
attempt1 = deliveries_list[rows][2].date()
for dt in daterange(pick+timedelta(days=1), attempt1):
if dt in exclude_days:
continue
count += 1
if count <= working_days and math.isnan(deliveries_list[rows][3]):
final_ouput.append([deliveries_list[rows][0], 0])
elif count > working_days:
final_ouput.append([deliveries_list[rows][0], 1])
else:
count = 0
deliveries_list[rows][3] = datetime.datetime.fromtimestamp(int(float(deliveries_list[rows][3])))
attempt2 = deliveries_list[rows][3].date()
for dt in daterange(attempt1+timedelta(days=1), attempt2):
if dt in exclude_days:
continue
count += 1
if count <= 3:
final_ouput.append([deliveries_list[rows][0], 0])
else:
final_ouput.append([deliveries_list[rows][0], 1])
final_df = pd.DataFrame(final_ouput, columns=['orderid', 'is_late'])
final_df.to_csv('logistics2.csv', index = False)
#Find buyer and seller destination, assign working days
#Calculate working days from pick up to 1st attempt
#check for exclude days
#if counter <= working days && 2nd attempt == nan, assign 0
#if 2nd attempt != NaT, check if within 3 working days |
import socket
import os
server_Host = 'localhost'
server_Port = 7343
client = socket.socket()
client.connect((server_Host,server_Port))
data_empty = ''
data_empty = data_empty.encode()
while True:
print("------------------------")
f = open("D:\own.txt",'ab+')
f_l = open("D:\limit.txt",'wb+')
data = client.recv(124000)
print(type(data))
f.write(data)
f_l.write(data)
f.close
f_l.close
print("------------------------")
client.close()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 12 01:25:40 2018
@author: ck807
"""
import os, glob
import cv2
import numpy as np
from keras.preprocessing.image import img_to_array
########################################################################################################################
def rgb2gray(rgb):
return np.dot(rgb[...,:3], [0.299, 0.587, 0.114])
print('Creating Expression Set')
i = 0
data_file = glob.glob('/local/data/chaitanya/attentionExp/images/train/*.png')
files = []
data_file_mask = glob.glob('/local/data/chaitanya/attentionExp/binaryLdmkImages/train9ldmk/*.png')
trainData1 = np.zeros((len(data_file),192, 192, 3))
trainLabel1 = np.zeros((len(data_file_mask), 192, 192, 1))
for f in (data_file):
a=cv2.imread(f)
trainData1[i,:,:,:] = a[:,:,:]
base = os.path.basename("/local/data/chaitanya/attentionExp/images/train/" + f)
fileName = os.path.splitext(base)[0]
files.append(fileName)
i += 1
for k in (data_file_mask):
base = os.path.basename("/local/data/chaitanya/attentionExp/binaryLdmkImages/train9ldmk/" + k)
fileName = os.path.splitext(base)[0]
fileName = fileName + '_depth'
index = files.index(fileName)
image = cv2.imread(k)
gray = rgb2gray(image)
gray_image = img_to_array(gray)
trainLabel1[index, :, :, :] = gray_image[:, :, :]
i = 0
data_file_val = glob.glob('/local/data/chaitanya/attentionExp/images/val/*.png')
files_val = []
data_file_mask_val = glob.glob('/local/data/chaitanya/attentionExp/binaryLdmkImages/val9ldmk/*.png')
valData1 = np.zeros((len(data_file_val),192, 192, 3))
valLabel1 = np.zeros((len(data_file_mask_val), 192, 192, 1))
for f in (data_file_val):
a=cv2.imread(f)
valData1[i,:,:,:] = a[:,:,:]
base = os.path.basename("/local/data/chaitanya/attentionExp/images/val/" + f)
fileName = os.path.splitext(base)[0]
files_val.append(fileName)
i += 1
for k in (data_file_mask_val):
base = os.path.basename("/local/data/chaitanya/attentionExp/binaryLdmkImages/val9ldmk/" + k)
fileName = os.path.splitext(base)[0]
fileName = fileName + '_depth'
index = files_val.index(fileName)
image = cv2.imread(k)
gray = rgb2gray(image)
gray_image = img_to_array(gray)
valLabel1[index, :, :, :] = gray_image[:, :, :]
########################################################################################################################
print('Creating Neutral Set')
i = 0
data_filen = glob.glob('/local/data/chaitanya/attentionNeutral/images/train/*.png')
filesn = []
data_file_maskn = glob.glob('/local/data/chaitanya/attentionNeutral/binaryLdmkImages/train9ldmk/*.png')
trainData2 = np.zeros((len(data_filen),192, 192, 3))
trainLabel2 = np.zeros((len(data_file_maskn), 192, 192, 1))
for f in (data_filen):
a=cv2.imread(f)
trainData2[i,:,:,:] = a[:,:,:]
base = os.path.basename("/local/data/chaitanya/attentionNeutral/images/train/" + f)
fileName = os.path.splitext(base)[0]
filesn.append(fileName)
i += 1
for k in (data_file_maskn):
base = os.path.basename("/local/data/chaitanya/attentionNeutral/binaryLdmkImages/train9ldmk/" + k)
fileName = os.path.splitext(base)[0]
fileName = fileName + '_depth'
index = filesn.index(fileName)
image = cv2.imread(k)
gray = rgb2gray(image)
gray_image = img_to_array(gray)
trainLabel2[index, :, :, :] = gray_image[:, :, :]
i = 0
data_file_valn = glob.glob('/local/data/chaitanya/attentionNeutral/images/val/*.png')
files_valn = []
data_file_mask_valn = glob.glob('/local/data/chaitanya/attentionNeutral/binaryLdmkImages/val9ldmk/*.png')
valData2 = np.zeros((len(data_file_valn),192, 192, 3))
valLabel2 = np.zeros((len(data_file_mask_valn), 192, 192, 1))
for f in (data_file_valn):
a=cv2.imread(f)
valData2[i,:,:,:] = a[:,:,:]
base = os.path.basename("/local/data/chaitanya/attentionNeutral/images/val/" + f)
fileName = os.path.splitext(base)[0]
files_valn.append(fileName)
i += 1
for k in (data_file_mask_valn):
base = os.path.basename("/local/data/chaitanya/attentionNeutral/binaryLdmkImages/val9ldmk/" + k)
fileName = os.path.splitext(base)[0]
fileName = fileName + '_depth'
index = files_valn.index(fileName)
image = cv2.imread(k)
gray = rgb2gray(image)
gray_image = img_to_array(gray)
valLabel2[index, :, :, :] = gray_image[:, :, :]
########################################################################################################################
print('Creating Concatenated Set')
trainData = np.zeros((16000 ,192, 192, 3))
trainData[0:8000, :, :, :] = trainData1[:, :, :, :]
trainData[8000:16000, :, :, :] = trainData2[:, :, :, :]
trainLabel = np.zeros((16000 ,192, 192, 1))
trainLabel[0:8000, :, :, :] = trainLabel1[:, :, :, :]
trainLabel[8000:16000, :, :, :] = trainLabel2[:, :, :, :]
valData = np.zeros((4000 ,192, 192, 3))
valData[0:2000, :, :, :] = valData1[:, :, :, :]
valData[2000:4000, :, :, :] = valData2[:, :, :, :]
valLabel = np.zeros((4000 ,192, 192, 1))
valLabel[0:2000, :, :, :] = valLabel1[:, :, :, :]
valLabel[2000:4000, :, :, :] = valLabel2[:, :, :, :]
########################################################################################################################
print('Preprocessing and saving tensors')
trainData = trainData.astype('float32')
trainDataMean = np.mean(trainData)
trainDataStd = np.std(trainData)
trainData -= trainDataMean
trainData /= trainDataStd
trainLabel = trainLabel.astype('float32')
trainLabel /= 255.
valData = valData.astype('float32')
valData -= trainDataMean
valData /= trainDataStd
valLabel = valLabel.astype('float32')
valLabel /= 255.
np.save('train9DataAttention.npy',trainData)
np.save('train9MaskAttention.npy', trainLabel)
np.save('val9DataAttention.npy',valData)
np.save('val9MaskAttention.npy', valLabel)
########################################################################################################################
|
import numpy as np
import math
T = 1.0
dens = 0.9
sigma=1
epsilon=1
print "hello world!"
def fill_init_pos(n, positions):
m = int((n/4)**(1.0/3)+0.01) #amount of unit cells per direction
print "m: ",m
a=(4.0/n)^(1./3)
#do things
positions[0]=[0.0,0.0,0.0]
positions[1]=[0.5,0.5,0.0]
positions[2]=[0.5,0.0,0.5]
positions[3]=[0.0,0.5,0.5]
counter=0
for i in xrange(0,m):
for j in xrange(0,m):
for k in xrange(0,m):
positions[counter:counter+4]=positions[0:4]+[i,j,k]
counter+=4
print positions[i*4:i*4+4]
return positions
def fill_init_mom(n,momenta):
mean,standdev=0,5
momenta[:,0]=np.random.normal(mean,standdev,n)
momenta[:,1]=np.random.normal(mean,standdev,n)
momenta[:,2]=np.random.normal(mean,standdev,n)
return momenta
class particleClass:
def __init__(self, n):
self.positions=np.zeros((n,3),dtype=float)
self.momenta=np.zeros((n,3),dtype=float)
self.forces=np.zeros((n,3),dtype=float)
self.positions = fill_init_pos(n, self.positions)
self.momenta = fill_init_mom(n, self.momenta)
def show(self):
print "Positions: ", self.positions
print "Momenta: ", self.momenta
def calc_force(particle1,particle2):
deltax=particle2[0]-particle1[0]
deltay=particle2[1]-particle1[1]
deltaz=particle2[2]-particle1[2]
r=math.sqrt(deltax**2+deltay**2+deltaz**2)
F=4*epsilon*((12*sigma**12)/r**13 - (6*sigma**6)/r**7)
return F
particles = particleClass(864)
particles.show()
|
#!/usr/bin/env python
import roslib
roslib.load_manifest('baxter_rr_bridge')
import rospy
import baxter_interface
from std_msgs.msg import Empty
import sys, argparse
import struct
import time
import RobotRaconteur as RR
import thread
import threading
import numpy
from geometry_msgs.msg import (
PoseStamped,
Pose,
Point,
Quaternion,
)
from std_msgs.msg import Header
from baxter_core_msgs.srv import (
SolvePositionIK,
SolvePositionIKRequest,
)
baxter_servicedef="""
#Service to provide simple interface to Baxter
service BaxterJoint_Interface
option version 0.4
object Baxter
property double[] joint_positions
property double[] joint_velocities
property double[] joint_torques
property double[] endeffector_positions
property double[] endeffector_orientations
property double[] endeffector_twists
property double[] endeffector_wrenches
function void setControlMode(uint8 mode)
function void setJointCommand(string limb, double[] command)
function void setPositionModeSpeed(double speed)
function double[] solveIKfast(double[] positions, double[] quaternions, string limb_choice)
end object
"""
class Baxter_impl(object):
def __init__(self):
print "Initializing Node"
rospy.init_node('baxter_jointstates')
print "Enabling Robot"
rs = baxter_interface.RobotEnable()
rs.enable()
self._valid_limb_names = {'left': 'left',
'l': 'left',
'right': 'right',
'r': 'right'}
# get information from the SDK
self._left = baxter_interface.Limb('left')
self._right = baxter_interface.Limb('right')
self._l_jnames = self._left.joint_names()
self._r_jnames = self._right.joint_names()
# data initializations
self._jointpos = [0]*14
self._jointvel = [0]*14
self._jointtor = [0]*14
self._ee_pos = [0]*6
self._ee_or = [0]*8
self._ee_tw = [0]*12
self._ee_wr = [0]*12
self._l_joint_command = dict(zip(self._l_jnames,[0.0]*7))
self._r_joint_command = dict(zip(self._r_jnames,[0.0]*7))
self.MODE_POSITION = 0;
self.MODE_VELOCITY = 1;
self.MODE_TORQUE = 2;
self._mode = self.MODE_POSITION
# initial joint command is current pose
self.readJointPositions()
self.setJointCommand('left',self._jointpos[0:7])
self.setJointCommand('right',self._jointpos[7:14])
# Start background threads
self._running = True
self._t_joints = threading.Thread(target=self.jointspace_worker)
self._t_joints.daemon = True
self._t_joints.start()
self._t_effector = threading.Thread(target=self.endeffector_worker)
self._t_effector.daemon = True
self._t_effector.start()
self._t_command = threading.Thread(target=self.command_worker)
self._t_command.daemon = True
self._t_command.start()
def close(self):
self._running = False
self._t_joints.join()
self._t_effector.join()
self._t_command.join()
if (self._mode != self.MODE_POSITION):
self._left.exit_control_mode()
self._right.exit_control_mode()
@property
def joint_positions(self):
return self._jointpos
@property
def joint_velocities(self):
return self._jointvel
@property
def joint_torques(self):
return self._jointtor
@property
def endeffector_positions(self):
return self._ee_pos
@property
def endeffector_orientations(self):
return self._ee_or
@property
def endeffector_twists(self):
return self._ee_tw
@property
def endeffector_wrenches(self):
return self._ee_wr
def readJointPositions(self):
l_angles = self._left.joint_angles()
r_angles = self._right.joint_angles()
if l_angles:
for i in xrange(0,len(self._l_jnames)):
self._jointpos[i] = l_angles[self._l_jnames[i]]
if r_angles:
for i in xrange(0,len(self._r_jnames)):
self._jointpos[i+7] = r_angles[self._r_jnames[i]]
def readJointVelocities(self):
l_velocities = self._left.joint_velocities()
r_velocities = self._right.joint_velocities()
if l_velocities:
for i in xrange(0,len(self._l_jnames)):
self._jointvel[i] = l_velocities[self._l_jnames[i]]
if r_velocities:
for i in xrange(0,len(self._r_jnames)):
self._jointvel[i+7] = r_velocities[self._r_jnames[i]]
def readJointTorques(self):
l_efforts = self._left.joint_efforts()
r_efforts = self._right.joint_efforts()
if l_efforts:
for i in xrange(0,len(self._l_jnames)):
self._jointtor[i] = l_efforts[self._l_jnames[i]]
if r_efforts:
for i in xrange(0,len(self._r_jnames)):
self._jointtor[i+7] = r_efforts[self._r_jnames[i]]
def readEndEffectorPoses(self):
l_pose = self._left.endpoint_pose()
if l_pose:
self._ee_pos[0] = l_pose['position'].x
self._ee_pos[1] = l_pose['position'].y
self._ee_pos[2] = l_pose['position'].z
self._ee_or[0] = l_pose['orientation'].w
self._ee_or[1] = l_pose['orientation'].x
self._ee_or[2] = l_pose['orientation'].y
self._ee_or[3] = l_pose['orientation'].z
r_pose = self._right.endpoint_pose()
if r_pose:
self._ee_pos[3] = r_pose['position'].x
self._ee_pos[4] = r_pose['position'].y
self._ee_pos[5] = r_pose['position'].z
self._ee_or[4] = r_pose['orientation'].w
self._ee_or[5] = r_pose['orientation'].x
self._ee_or[6] = r_pose['orientation'].y
self._ee_or[7] = r_pose['orientation'].z
def readEndEffectorTwists(self):
l_twist = self._left.endpoint_velocity()
if l_twist:
self._ee_tw[0] = l_twist['angular'].x
self._ee_tw[1] = l_twist['angular'].y
self._ee_tw[2] = l_twist['angular'].z
self._ee_tw[3] = l_twist['linear'].x
self._ee_tw[4] = l_twist['linear'].y
self._ee_tw[5] = l_twist['linear'].z
r_twist = self._right.endpoint_velocity()
if r_twist:
self._ee_tw[6] = r_twist['angular'].x
self._ee_tw[7] = r_twist['angular'].y
self._ee_tw[8] = r_twist['angular'].z
self._ee_tw[9] = r_twist['linear'].x
self._ee_tw[10] = r_twist['linear'].y
self._ee_tw[11] = r_twist['linear'].z
def readEndEffectorWrenches(self):
l_wrench = self._left.endpoint_effort()
if l_wrench:
self._ee_wr[0] = l_wrench['torque'].x
self._ee_wr[1] = l_wrench['torque'].y
self._ee_wr[2] = l_wrench['torque'].z
self._ee_wr[3] = l_wrench['force'].x
self._ee_wr[4] = l_wrench['force'].y
self._ee_wr[5] = l_wrench['force'].z
r_wrench = self._right.endpoint_effort()
if r_wrench:
self._ee_wr[6] = r_wrench['torque'].x
self._ee_wr[7] = r_wrench['torque'].y
self._ee_wr[8] = r_wrench['torque'].z
self._ee_wr[9] = r_wrench['force'].x
self._ee_wr[10] = r_wrench['force'].y
self._ee_wr[11] = r_wrench['force'].z
def setControlMode(self, mode):
if mode != self.MODE_POSITION and \
mode != self.MODE_VELOCITY and \
mode != self.MODE_TORQUE:
return
if mode == self.MODE_POSITION:
self._left.exit_control_mode()
self._right.exit_control_mode()
# set command to current joint positions
self.setJointCommand('left',self._jointpos[0:7])
self.setJointCommand('right',self._jointpos[7:14])
elif mode == self.MODE_VELOCITY:
# set command to zeros
self.setJointCommand('left',[0]*7)
self.setJointCommand('right',[0]*7)
elif mode == self.MODE_TORQUE:
# set command to zeros
self.setJointCommand('left',[0]*7)
self.setJointCommand('right',[0]*7)
self._mode = mode
# This function calls RSDK ikFast Service
def solveIKfast(self, positions, quaternions, limb_choice):
ns = "ExternalTools/" + limb_choice + "/PositionKinematicsNode/IKService"
iksvc = rospy.ServiceProxy(ns, SolvePositionIK)
ikreq = SolvePositionIKRequest()
hdr = Header(stamp=rospy.Time.now(), frame_id='base')
poses = {}
if (limb_choice == 'left' or limb_choice == 'l'):
limb_choice = 'left'
poses = {
'left': PoseStamped(
header=hdr,
pose=Pose(
position = Point(
x = positions[0],
y = positions[1],
z = positions[2],
),
orientation = Quaternion(
x = quaternions[1],
y = quaternions[2],
z = quaternions[3],
w = quaternions[0],
),
),
),
'right': PoseStamped(
header=hdr,
pose=Pose(
position = Point(
x = self._ee_pos[3],
y = self._ee_pos[4],
z = self._ee_pos[5],
),
orientation = Quaternion(
x = self._ee_or[5],
y = self._ee_or[6],
z = self._ee_or[7],
w = self._ee_or[4],
),
),
),
}
elif (limb_choice == 'right' or limb_choice == 'r'):
limb_choice = 'right'
poses = {
'left': PoseStamped(
header=hdr,
pose=Pose(
position = Point(
x = self._ee_pos[0],
y = self._ee_pos[1],
z = self._ee_pos[2],
),
orientation = Quaternion(
x = self._ee_or[1],
y = self._ee_or[2],
z = self._ee_or[3],
w = self._ee_or[0],
),
),
),
'right': PoseStamped(
header=hdr,
pose=Pose(
position = Point(
x = positions[0],
y = positions[1],
z = positions[2],
),
orientation = Quaternion(
x = quaternions[1],
y = quaternions[2],
z = quaternions[3],
w = quaternions[0],
),
),
),
}
else:
print "Not a valid arm"
return
# begin the solvinng process
ikreq.pose_stamp.append(poses[limb_choice])
try:
rospy.wait_for_service(ns, 5.0)
resp = iksvc(ikreq)
except (rospy.ServiceException, rospy.ROSException), e:
rospy.logerr("Service call failed: %s" % (e,))
return 1
# Check if result valid, and type of seed ultimately used to get solution
# convert rospy's string representation of uint8[]'s to int's
resp_seeds = struct.unpack('<%dB' % len(resp.result_type),
resp.result_type)
seed_dict = {
ikreq.SEED_USER: 'User Provided Seed',
ikreq.SEED_CURRENT: 'Current Joint Angles',
ikreq.SEED_NS_MAP: 'Nullspace Setpoints',
}
if (resp_seeds[0] != resp.RESULT_INVALID):
seed_str = seed_dict.get(resp_seeds[0], 'None')
print("SUCCESS - Valid Joint Solution Found from Seed Type: %s" %
(seed_str,))
# Format solution into Limb API-compatible dictionary
limb_joints = dict(zip(resp.joints[0].name, resp.joints[0].position))
print "\nIK Joint Solution:\n", limb_joints
print "------------------"
print "Response Message:\n", resp
# if no valid solution was found
else:
print("INVALID POSE - No Valid Joint Solution Found.")
return resp.joints[0].position
def setJointCommand(self, limb, command):
limb = limb.lower()
if not limb in self._valid_limb_names.keys():
return
if self._valid_limb_names[limb] == 'left':
for i in xrange(0,len(self._l_jnames)):
self._l_joint_command[self._l_jnames[i]] = command[i]
elif self._valid_limb_names[limb] == 'right':
for i in xrange(0,len(self._r_jnames)):
self._r_joint_command[self._r_jnames[i]] = command[i]
def setPositionModeSpeed(self, speed):
if speed < 0.0:
speed = 0.0
elif speed > 1.0:
speed = 1.0
self._left.set_joint_position_speed(speed)
self._right.set_joint_position_speed(speed)
# worker function to request and update joint data for baxter
# maintain 100 Hz read rate
# TODO: INCORPORATE USER-DEFINED JOINT PUBLISH RATE
def jointspace_worker(self):
while self._running:
t1 = time.time()
self.readJointPositions()
self.readJointVelocities()
self.readJointTorques()
while (time.time() - t1 < 0.01):
# idle
time.sleep(0.001)
# worker function to request and update end effector data for baxter
# Try to maintain 100 Hz operation
def endeffector_worker(self):
while self._running:
t1 = time.time()
self.readEndEffectorPoses()
self.readEndEffectorTwists()
self.readEndEffectorWrenches()
while (time.time() - t1 < 0.01):
# idle
time.sleep(0.001)
# worker function to continuously issue commands to baxter
# Try to maintain 100 Hz operation
# TODO: INCLUDE CLOCK JITTER CORRECTION
def command_worker(self):
while self._running:
t1 = time.time()
if (self._mode == self.MODE_POSITION):
self._left.set_joint_positions(self._l_joint_command)
self._right.set_joint_positions(self._r_joint_command)
elif (self._mode == self.MODE_VELOCITY):
self._left.set_joint_velocities(self._l_joint_command)
self._right.set_joint_velocities(self._r_joint_command)
elif (self._mode == self.MODE_TORQUE):
#self._supp_cuff_int_pubs['left'].publish()
#self._supp_cuff_int_pubs['right'].publish()
self._left.set_joint_torques(self._l_joint_command)
self._right.set_joint_torques(self._r_joint_command)
while (time.time() - t1 < 0.01):
# idle
time.sleep(0.001)
def main(argv):
# parse command line arguments
parser = argparse.ArgumentParser(
description='Initialize Joint Controller.')
parser.add_argument('--port', type=int, default = 0,
help='TCP port to host service on' + \
'(will auto-generate if not specified)')
args = parser.parse_args(argv)
#Enable numpy
RR.RobotRaconteurNode.s.UseNumPy=True
#Set the Node name
RR.RobotRaconteurNode.s.NodeName="BaxterJointServer"
#Initialize object
baxter_obj = Baxter_impl()
#Create transport, register it, and start the server
print "Registering Transport"
t = RR.TcpTransport()
t.EnableNodeAnnounce(RR.IPNodeDiscoveryFlags_NODE_LOCAL |
RR.IPNodeDiscoveryFlags_LINK_LOCAL |
RR.IPNodeDiscoveryFlags_SITE_LOCAL)
RR.RobotRaconteurNode.s.RegisterTransport(t)
t.StartServer(args.port)
port = args.port
if (port == 0):
port = t.GetListenPort()
#Register the service type and the service
print "Starting Service"
RR.RobotRaconteurNode.s.RegisterServiceType(baxter_servicedef)
RR.RobotRaconteurNode.s.RegisterService("Baxter",
"BaxterJoint_Interface.Baxter",
baxter_obj)
print "Service started, connect via"
print "tcp://localhost:" + str(port) + "/BaxterJointServer/Baxter"
raw_input("press enter to quit...\r\n")
baxter_obj.close()
# This must be here to prevent segfault
RR.RobotRaconteurNode.s.Shutdown()
if __name__ == '__main__':
main(sys.argv[1:])
|
# 计算测试数据
import numpy as np
import src.utils as utils
import gc
if __name__ == '__main__':
malls = utils.get_malls()
conn = utils.get_db_conn()
cur = conn.cursor()
i = 1
for mall_id in malls:
print(utils.get_time(), ' ','start handle mall ', mall_id)
# xgb获取模型
# model = utils.get_model_xgb(mall_id)
# 比较xgb和RF,选择模型
sql = "select result from score_xgb where mall_id='{m}'".format(m=mall_id)
cur.execute(sql)
xgb_res= float(cur.fetchall()[0][0])
sql = "select result from score_rf_1000 where mall_id='{m}'".format(m=mall_id)
cur.execute(sql)
rf_res= float(cur.fetchall()[0][0])
if (xgb_res-rf_res)>0.005:
model = utils.get_model_xgb(mall_id)
print("{m} choose xgb".format(m=mall_id))
else:
model = utils.get_model_rf(mall_id)
print("{m} choose RF".format(m=mall_id))
# 选定模型实施测试
if model == 0:
print('no model for mall ', mall_id)
continue
# 查出所有wifi,排序
sql = 'SELECT DISTINCT wifi_ssid FROM {m} ORDER BY wifi_ssid'.format(m=mall_id)
cur.execute(sql)
wifis = [r[0] for r in cur.fetchall()]
# 初始化数据矩阵和初始向量
matrix = []
weight_conn = 1.5 # 连接为true时的权重
matrix_day = []
weight_day = 3 # [0, 0, 3, 0, 0, 0, 0]
matrix_hour = []
# 以上三个矩阵分别存储wifi信息,消费时间是周几的信息,消费时间是几点的信息,最后合并三个矩阵,作为全部数据
weight_hour = 3 # [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
vec = [0 for wifi in range(0, len(wifis))]
vec_mod_day = [0 for x in range(0,7)]
vec_mod_hour = [0 for x in range(0,24)]
rows = []
# 查询所有数据
sql = "SELECT row_id,wifi_ssid,wifi_db,time_stamp,wifi_conn,DAYOFWEEK(time_stamp),HOUR(time_stamp),MINUTE(time_stamp) FROM data_test_final WHERE mall_id='%s' ORDER BY row_id,wifi_ssid " % mall_id
cur.execute(sql)
row = cur.fetchone()
v = vec[:]
vec_day = vec_mod_day[:]
vec_day[ row[5] - 1 ] = weight_day
vec_hour = vec_mod_hour[:]
hour = (row[6]+1) if row[7]>=30 else row[6]
vec_hour[0 if hour > 23 else hour] = weight_hour
row_id = row[0]
if wifis.__contains__(row[1]):
v[wifis.index(row[1])] = utils.normal(row[2])
for r in cur.fetchall():
# 根据是否与前一条row_id相同进行不同操作
if r[0] != row_id:
matrix.append(v)
matrix_day.append(vec_day)
matrix_hour.append(vec_hour)
# tar.append(shop_id)
rows.append(row_id)
v = vec[:]
vec_day = vec_mod_day[:]
vec_day[r[5] - 1] = weight_day
vec_hour = vec_mod_hour[:]
hour = (r[6] + 1) if r[7] >= 30 else r[6]
vec_hour[0 if hour > 23 else hour] = weight_hour
row_id = r[0]
if wifis.__contains__(r[1]):
v[wifis.index(r[1])] = utils.normal(r[2])
matrix.append(v)
matrix_day.append(vec_day)
matrix_hour.append(vec_hour)
rows.append(row_id)
matrix = np.hstack([matrix_day,matrix_hour,matrix])
result = model.predict(matrix)
# print(result)
# print(cur.rowcount)
# print(len(result))
# print(len(rows))
for r in range(0, len(rows)):
sql = "INSERT INTO data_test_result VALUES ('{r}','{s}')".format(r = rows[r], s=result[r])
cur.execute(sql)
sql = "INSERT INTO data_test_handled SET mall_id='%s',handled=1 ON DUPLICATE KEY UPDATE handled=1" % mall_id
cur.execute(sql)
conn.commit()
print(utils.get_time(), ' ',mall_id, ' handled done')
print(i, ' handled.')
i += 1
# 清空内存
gc.collect()
# 之前的,只有wifi作为对象
# if __name__ == '__main__':
# malls = utils.get_malls()
# conn = utils.get_db_conn()
# cur = conn.cursor()
# i = 1
# for mall_id in malls:
# print(utils.get_time(), ' ','start handle mall ', mall_id)
# # 获取模型
# model = utils.get_model_xgb(mall_id)
# if model == 0:
# print('no model for mall ', mall_id)
# continue
# # 查出所有wifi,排序
# sql = 'SELECT DISTINCT wifi_ssid FROM {m} ORDER BY wifi_ssid'.format(m=mall_id)
# cur.execute(sql)
# wifis = [r[0] for r in cur.fetchall()]
# # 初始化数据矩阵和初始向量
# metrix = []
# vec = [0 for wifi in range(0, len(wifis))]
# rows = []
# # 查询所有数据
# sql = "SELECT row_id,wifi_ssid,wifi_db FROM data_test_final WHERE mall_id='%s' ORDER BY row_id,wifi_ssid " % mall_id
# cur.execute(sql)
# row = cur.fetchone()
# v = vec[:]
# row_id = row[0]
# if wifis.__contains__(row[1]):
# v[wifis.index(row[1])] = utils.normal(row[2])
# for r in cur.fetchall():
# # 根据是否与前一条row_id相同进行不同操作
# if r[0] != row_id:
# metrix.append(v)
# rows.append(row_id)
# v = vec[:]
# row_id = r[0]
# if wifis.__contains__(r[1]):
# v[wifis.index(r[1])] = utils.normal(r[2])
# metrix.append(v)
# rows.append(row_id)
# metrix = np.array(metrix)
# result = model.predict(metrix)
# # print(result)
# # print(cur.rowcount)
# # print(len(result))
# # print(len(rows))
# for r in range(0, len(rows)):
# sql = "INSERT INTO data_test_result VALUES ('{r}','{s}')".format(r = rows[r], s=result[r])
# cur.execute(sql)
# sql = "INSERT INTO data_test_handled SET mall_id='%s',handled=1" % mall_id
# cur.execute(sql)
# conn.commit()
# print(utils.get_time(), ' ',mall_id, ' handled done')
# print(i, ' malls handled.')
# i += 1
|
#!/usr/bin/python3.6
from collections import Counter
set_of_string = "aaaasddddrrrww+++wwcccxxx+++"
my_counter = Counter(set_of_string)
print(my_counter)
# print(my_counter.items())
# print(my_counter.keys())
# print(my_counter.values())
# print(my_counter.most_common(2))
# print(my_counter.most_common(2)[0][0])
print(list(my_counter.elements()))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: trainer.py
# Author: Qian Ge <geqian1001@gmail.com>
import os
import scipy.misc
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
import matplotlib.patches as patches
class Trainer(object):
def __init__(self, model, train_data, init_lr=1e-3):
self._model = model
self._train_data = train_data
self._lr = init_lr
self._train_op = model.get_train_op()
self._loss_op = model.get_loss()
self._accuracy_op = model.get_accuracy()
self._sample_loc_op = model.layers['loc_sample']
self._pred_op = model.layers['pred']
self._sum_op = model.get_summary()
self._lr_op = model.cur_lr
self.global_step = 0
def train_epoch(self, sess, summary_writer=None):
self._model.set_is_training(True)
# self._lr = np.maximum(self._lr * 0.97, 1e-4)
cur_epoch = self._train_data.epochs_completed
step = 0
loss_sum = 0
acc_sum = 0
while cur_epoch == self._train_data.epochs_completed:
self.global_step += 1
step += 1
batch_data = self._train_data.next_batch_dict()
im = batch_data['data']
label = batch_data['label']
_, loss, acc, cur_lr, cur_summary = sess.run(
[self._train_op, self._loss_op, self._accuracy_op, self._lr_op, self._sum_op],
feed_dict={self._model.image: im,
self._model.label: label,
self._model.lr: self._lr})
loss_sum += loss
acc_sum += acc
if step % 100 == 0:
print('step: {}, loss: {:.4f}, accuracy: {:.4f}'
.format(self.global_step,
loss_sum * 1.0 / step,
acc_sum * 1.0 / step))
print('epoch: {}, loss: {:.4f}, accuracy: {:.4f}, lr:{}'
.format(cur_epoch,
loss_sum * 1.0 / step,
acc_sum * 1.0 / step, cur_lr))
if summary_writer is not None:
s = tf.Summary()
s.value.add(tag='train_loss', simple_value=loss_sum * 1.0 / step)
s.value.add(tag='train_accuracy', simple_value=acc_sum * 1.0 / step)
summary_writer.add_summary(s, self.global_step)
summary_writer.add_summary(cur_summary, self.global_step)
def valid_epoch(self, sess, dataflow, batch_size):
self._model.set_is_training(False)
dataflow.setup(epoch_val=0, batch_size=batch_size)
step = 0
loss_sum = 0
acc_sum = 0
while dataflow.epochs_completed == 0:
step += 1
batch_data = dataflow.next_batch_dict()
loss, acc = sess.run(
[self._loss_op, self._accuracy_op],
feed_dict={self._model.image: batch_data['data'],
self._model.label: batch_data['label'],
})
loss_sum += loss
acc_sum += acc
print('valid loss: {:.4f}, accuracy: {:.4f}'
.format(loss_sum * 1.0 / step, acc_sum * 1.0 / step))
self._model.set_is_training(True)
def test_batch(self, sess, batch_data, unit_pixel, size, scale, save_path=''):
def draw_bbx(ax, x, y):
rect = patches.Rectangle(
(x, y), cur_size, cur_size, edgecolor='r', facecolor='none', linewidth=2)
ax.add_patch(rect)
self._model.set_is_training(False)
test_im = batch_data['data']
loc_list, pred, input_im, glimpses = sess.run(
[self._sample_loc_op, self._pred_op, self._model.input_im,
self._model.layers['retina_reprsent']],
feed_dict={self._model.image: test_im,
self._model.label: batch_data['label'],
})
pad_r = size * (2 ** (scale - 2))
print(pad_r)
im_size = input_im[0].shape[0]
loc_list = np.clip(np.array(loc_list), -1.0, 1.0)
loc_list = loc_list * 1.0 * unit_pixel / (im_size / 2 + pad_r)
loc_list = (loc_list + 1.0) * 1.0 / 2 * (im_size + pad_r * 2)
offset = pad_r
print(pred)
for step_id, cur_loc in enumerate(loc_list):
im_id = 0
glimpse = glimpses[step_id]
for im, loc, cur_glimpse in zip(input_im, cur_loc, glimpse):
im_id += 1
fig, ax = plt.subplots(1)
ax.imshow(np.squeeze(im), cmap='gray')
for scale_id in range(0, scale):
cur_size = size * 2 ** scale_id
side = cur_size * 1.0 / 2
x = loc[1] - side - offset
y = loc[0] - side - offset
draw_bbx(ax, x, y)
# plt.show()
for i in range(0, scale):
scipy.misc.imsave(
os.path.join(save_path,'im_{}_glimpse_{}_step_{}.png').format(im_id, i, step_id),
np.squeeze(cur_glimpse[:,:,i]))
plt.savefig(os.path.join(
save_path,'im_{}_step_{}.png').format(im_id, step_id))
plt.close(fig)
self._model.set_is_training(True)
|
'''
Given a string s and a non-empty string p, find all the start indices of p's anagrams in s.
Strings consists of lowercase English letters only and the length of both strings s and p will not be larger than 20,100.
The order of output does not matter.
Example 1:
Input:
s: "cbaebabacd" p: "abc"
Output:
[0, 6]
Explanation:
The substring with start index = 0 is "cba", which is an anagram of "abc".
The substring with start index = 6 is "bac", which is an anagram of "abc".
Example 2:
Input:
s: "abab" p: "ab"
Output:
[0, 1, 2]
Explanation:
The substring with start index = 0 is "ab", which is an anagram of "ab".
The substring with start index = 1 is "ba", which is an anagram of "ab".
The substring with start index = 2 is "ab", which is an anagram of "ab".
'''
class Solution:
def findAnagrams(self, s, p):
p_counter=collections.Counter(p) # create a counter for p
output=[] # list of the starting indext of a substring in s which anagram with p.
#sub_string=s[0:len(p)]
prev=''
sub_string_counter=collections.Counter(s[0:len(p)])
if sub_string_counter-p_counter==p_counter-sub_string_counter:
output.append(0)
for i in range(len(p),len(s),1):
sub_string_counter.update(s[i])
prev=s[i-len(p)]
sub_string_counter-=collections.Counter(prev)
#print(p_counter)
if sub_string_counter==p_counter:
output.append(i-len(p)+1)
return output
"""
:type s: str
:type p: str
:rtype: List[int]
"""
|
"""empty message
Revision ID: d82dd1e7ad3c
Revises: 9bb2d69fbd7f
Create Date: 2018-10-26 10:44:16.979729
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd82dd1e7ad3c'
down_revision = '9bb2d69fbd7f'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('is_active', sa.Boolean(), nullable=False))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'is_active')
# ### end Alembic commands ###
|
import pytest # noqa: F401 (imported but unused)
import numpy as np
from osmo_camera import tiff as module
from osmo_camera.constants import DNR_TO_TIFF_FACTOR
@pytest.mark.parametrize(
"name, test_rgb_image",
[
(
"Within [0, 1) DNR",
# fmt: off
np.array(
[
[[0, 0, 0], [0.1, 0.2, 0.3]],
[[0.4, 0.5, 0.6], [0.7, 0.8, 0.9]]
]
),
# fmt: on
),
(
"Within padded [-64, 64) DNR",
np.array(
[
[[-64, -64, -64], [-0.5, -0.5, -0.5]],
[[0.5, 0.5, 0.5], [63.999999, 63.999999, 63.999999]],
]
),
),
(
"Challenging high-precision fractions",
np.array([[[1 / 3, 2 ** -25, 2 ** -29], [np.pi, np.e, -np.pi]]]),
),
],
)
def test_rgb_image_saved_to_tiff_file_and_loaded_retains_data(
tmp_path, name, test_rgb_image
):
absolute_tolerance = (
1 / DNR_TO_TIFF_FACTOR
) # quantization error when saving to and reading from an rgb image
assert (
absolute_tolerance == 2 ** -25
) # Reminder to take another look at this test if the constant changes
# tmp_path provides an object of type PosixPath, tifffile expects a file path as a string
tmp_filepath = str(tmp_path / "test_tiff.tiff")
module.save.as_tiff(test_rgb_image, tmp_filepath)
# read rgb_image from tmp tiff (should convert)
actual_tmp_tiff_as_rgb_image = module.open.as_rgb(tmp_filepath)
np.testing.assert_allclose(
test_rgb_image, actual_tmp_tiff_as_rgb_image, atol=absolute_tolerance
)
@pytest.mark.parametrize(
"name, test_rgb_image",
[
(
"Below min",
np.array(
[
[[0, 0, 0], [-0.5, -0.5, -0.5]],
[[0.5, 0.5, 0.5], [-64.01, -64.01, -64.01]],
]
),
),
(
"Above max",
np.array(
[
[[0, 0, 0], [-0.5, -0.5, -0.5]],
[[0.5, 0.5, 0.5], [64.01, 64.01, 64.01]],
]
),
),
],
)
def test_tiff_save_raises_if_image_out_of_range(tmp_path, name, test_rgb_image):
# tmp_path provides an object of type PosixPath, tifffile expects a file path as a string
tmp_filepath = str(tmp_path / "test_tiff.tiff")
with pytest.raises(module.save.DataTruncationError):
module.save.as_tiff(test_rgb_image, tmp_filepath)
|
print("Bonjour")
print("Ludovic")
print("fama")
print("Bazdar")
print("BOUGUERRA")
print("fama")
print("modif Lova")
print("aziz")
|
import facebook
class FacebookMessenger:
def __init__(self):
self.graph = facebook.GraphAPI(
"__REMOVED__")
def post_message(self, msg):
try:
self.graph.put_object("__REMOVED__", "feed", message=msg)
print "Facebook success"
except Exception as e:
print "Facebook update failed"
print e.message
|
'''
Why 0.1*10 == 1
The exact value of decimal 0.1 can't be represented in 64-bit binary floating-point, so it gets rounded to the nearest representable value, which is 0.1000000000000000055511151231257827021181583404541015625.
However, while the exact value of 0.1000000000000000055511151231257827021181583404541015625 * 10 can be represented in binary, it would take more bits of precision than 64-bit binary floating-point has. The result also gets rounded to the nearest representable value, and it turns out the nearest representable value is exactly 1.0.
Basically, you have two rounding errors, and they happen to cancel.
'''
|
import os
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a single string
RETURNS: the string with all / replaced with @ '''
def str_encode(string):
return string.replace("/","@")
''' PARAMETER: a single string
RETURNS: the string with all @ replaced with / '''
def str_decode(string):
return string.replace("@","/")
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a full path
RETURNS: only the filename '''
def get_fname_from_path(path):
filename = ""
for c in path[::-1]:
if c=="/" or c=="@":
break
filename = c+filename
return filename
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a single filename
RETURNS: the file without its extension '''
def remove_extension(filename):
length = len(filename)
for ch in filename[::-1]:
if ch==".":
break
length = length - 1
return filename[:length-1]
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a single filename.
RETURNS: the file extension from that filename. '''
def get_single_extension(filename):
extension = ""
length = len(filename)
for ch in filename[::-1]:
if ch==".":
break
# "extension" contains just the extension from a filename
extension = ch + extension
length = length - 1
return extension.lower()
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a directory path
RETURNS: a list of the immediate children of that directory '''
def get_immediate_subdirectories(a_dir):
sub_list = []
for name in os.listdir(a_dir):
if os.path.isdir(os.path.join(a_dir, name)):
sub_list.append(os.path.join(a_dir, name)+"/")
return sub_list
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: "char" a character to find
"string" a string to find the character in
RETURNS: index of "char" in "string". Returns length of "string"
if there are no instances of that character. '''
def find_char_from_end(string, char):
length = len(string)
# we iterate on the characters starting from end of the string
pos = length - 1
# dot pos will be position of the first period from the end,
# i.e. the file extension dot. If it is still "length" at end,
# we know that there are no dots in the filename.
dot_pos = length
while (pos >= 0):
if (filename[pos] == char):
dot_pos = pos
break
pos = pos - 1
return dot_pos
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a string with a path
RETURNS: "/hello/no/" from argument "/hello/no/yes/"
NOTE: works with or without the trailing "/" '''
def remove_path_end(path):
# remove trailing "/" if it exists.
if (path[len(path) - 1] == "/"):
path = path[0:len(path) - 1]
# location of the delimiter "/"
delim_loc = 0
j = len(path) - 1
# iterating to find location of first "/" from right
while (j >= 0):
if (path[j] == "/"):
delim_loc = j
break
j = j - 1
# shorten the path, include ending "/"
shortened_path = path[0:delim_loc + 1]
return shortened_path
#=========1=========2=========3=========4=========5=========6=========7=
''' PARAMETER: a string with a directory
RETURNS: only the last folder name '''
def get_last_dir_from_path(path):
filename = ""
if path[len(path) - 1] == "/" or path[len(path) - 1] == "@":
path = path[0:len(path) - 1]
for c in path[::-1]:
if c=="/" or c=="@":
break
filename = c+filename
return filename
#=========1=========2=========3=========4=========5=========6=========7=
def main():
print("This file is just for importing functions, don't run it. ")
if __name__ == "__main__":
# stuff only to run when not called via 'import' here
main()
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar
import errno
import io
import json
import math
import os
import time
from subprocess import call
import boto3
import numpy as np
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
from dateutil.relativedelta import *
from . import colortables
import webservice.GenerateImageMRF as MRF
from webservice.algorithms.NexusCalcHandler import NexusCalcHandler as BaseHandler
from webservice.NexusHandler import nexus_handler
@nexus_handler
class MapFetchCalcHandler(BaseHandler):
name = "MapFetchHandler"
path = "/map"
description = "Creates a map image"
params = {
"ds": {
"name": "Dataset",
"type": "string",
"description": "A supported dataset shortname identifier"
},
"t": {
"name": "Time",
"type": "int",
"description": "Data observation date"
},
"output": {
"name": "Output Format",
"type": "string",
"description": "Output format. Use 'PNG' for this endpoint"
},
"min": {
"name": "Minimum Value",
"type": "float",
"description": "Minimum value to use when computing color scales"
},
"max": {
"name": "Maximum Value",
"type": "float",
"description": "Maximum value to use when computing color scales"
},
"ct": {
"name": "Color Table",
"type": "string",
"description": "Identifier of a supported color table"
},
"interp": {
"name": "Interpolation filter",
"type": "string",
"description": "Interpolation filter to use when rescaling image data. Can be 'nearest', 'lanczos', 'bilinear', or 'bicubic'."
},
"width": {
"name": "Width",
"type": "int",
"description": "Output image width (max: 8192)"
},
"height": {
"name": "Height",
"type": "int",
"description": "Output image height (max: 8192)"
}
}
singleton = True
NO_DATA_IMAGE = None
def __init__(self, tile_service_factory):
BaseHandler.__init__(self, tile_service_factory)
@staticmethod
def __tile_to_image(img_data, tile, min, max, table, x_res, y_res):
width = len(tile.longitudes)
height = len(tile.latitudes)
d = np.ma.filled(tile.data[0], np.nan)
for y in range(0, height):
for x in range(0, width):
value = d[y][x]
if not np.isnan(value) and value != 0:
lat = tile.latitudes[y]
lon = tile.longitudes[x]
pixel_y = int(math.floor(180.0 - ((lat + 90.0) * y_res)))
pixel_x = int(math.floor((lon + 180.0) * x_res))
value = np.max((min, value))
value = np.min((max, value))
value255 = int(round((value - min) / (max - min) * 255.0))
rgba = MapFetchCalcHandler.__get_color(value255, table)
img_data.putpixel((pixel_x, pixel_y), (rgba[0], rgba[1], rgba[2], 255))
@staticmethod
def __translate_interpolation(interp):
if interp.upper() == "LANCZOS":
return Image.LANCZOS
elif interp.upper() == "BILINEAR":
return Image.BILINEAR
elif interp.upper() == "BICUBIC":
return Image.BICUBIC
else:
return Image.NEAREST
@staticmethod
def __make_tile_img(tile):
width = len(tile.longitudes)
height = len(tile.latitudes)
img = Image.new("RGBA", (width, height), (0, 0, 0, 0))
return img
@staticmethod
def __get_xy_resolution(tile):
x_res = abs(tile.longitudes[0] - tile.longitudes[1])
y_res = abs(tile.latitudes[0] - tile.latitudes[1])
return x_res, y_res
@staticmethod
def __create_global(nexus_tiles, stats, width=2048, height=1024, force_min=np.nan, force_max=np.nan,
table=colortables.grayscale, interpolation="nearest"):
data_min = stats["minValue"] if np.isnan(force_min) else force_min
data_max = stats["maxValue"] if np.isnan(force_max) else force_max
x_res, y_res = MapFetchCalcHandler.__get_xy_resolution(nexus_tiles[0])
x_res = 1
y_res = 1
canvas_width = int(360.0 / x_res)
canvas_height = int(180.0 / y_res)
img = Image.new("RGBA", (canvas_width, canvas_height), (0, 0, 0, 0))
img_data = img.getdata()
for tile in nexus_tiles:
MapFetchCalcHandler.__tile_to_image(img_data, tile, data_min, data_max, table, x_res, y_res)
final_image = img.resize((width, height), MapFetchCalcHandler.__translate_interpolation(interpolation))
return final_image
@staticmethod
def __get_color(value, table):
index = (float(value) / float(255)) * (len(table) - 1)
prev = int(math.floor(index))
next = int(math.ceil(index))
f = index - prev
prevColor = table[prev]
nextColor = table[next]
r = int(round(nextColor[0] * f + (prevColor[0] * (1.0 - f))))
g = int(round(nextColor[1] * f + (prevColor[1] * (1.0 - f))))
b = int(round(nextColor[2] * f + (prevColor[2] * (1.0 - f))))
return (r, g, b, 255)
@staticmethod
def __colorize(img, table):
data = img.getdata()
for x in range(0, img.width):
for y in range(0, img.height):
if data[x + (y * img.width)][3] == 255:
value = data[x + (y * img.width)][0]
rgba = MapFetchCalcHandler.__get_color(value, table)
data.putpixel((x, y), (rgba[0], rgba[1], rgba[2], 255))
@staticmethod
def __create_no_data(width, height):
if MapFetchCalcHandler.NO_DATA_IMAGE is None:
img = Image.new("RGBA", (width, height), (0, 0, 0, 0))
draw = ImageDraw.Draw(img)
fnt = ImageFont.load_default()
for x in range(10, width, 100):
for y in range(10, height, 100):
draw.text((x, y), "NO DATA", (180, 180, 180), font=fnt)
MapFetchCalcHandler.NO_DATA_IMAGE = img
return MapFetchCalcHandler.NO_DATA_IMAGE
def calc(self, computeOptions, **args):
ds = computeOptions.get_argument("ds", None)
dataTimeEnd = computeOptions.get_datetime_arg("t", None)
if dataTimeEnd is None:
raise Exception("Missing 't' option for time")
dataTimeEnd = time.mktime(dataTimeEnd.timetuple())
dataTimeStart = dataTimeEnd - 86400.0
color_table_name = computeOptions.get_argument("ct", "smap")
color_table = colortables.__dict__[color_table_name]
interpolation = computeOptions.get_argument("interp", "nearest")
force_min = computeOptions.get_float_arg("min", np.nan)
force_max = computeOptions.get_float_arg("max", np.nan)
width = np.min([8192, computeOptions.get_int_arg("width", 1024)])
height = np.min([8192, computeOptions.get_int_arg("height", 512)])
stats = self._get_tile_service().get_dataset_overall_stats(ds)
daysinrange = self._get_tile_service().find_days_in_range_asc(-90.0, 90.0, -180.0, 180.0, ds, dataTimeStart,
dataTimeEnd)
if len(daysinrange) > 0:
ds1_nexus_tiles = self._get_tile_service().get_tiles_bounded_by_box_at_time(-90.0, 90.0, -180.0, 180.0,
ds,
daysinrange[0])
img = self.__create_global(ds1_nexus_tiles, stats, width, height, force_min, force_max, color_table,
interpolation)
else:
img = self.__create_no_data(width, height)
imgByteArr = io.BytesIO()
img.save(imgByteArr, format='PNG')
imgByteArr = imgByteArr.getvalue()
class SimpleResult(object):
def toJson(self):
return json.dumps({"status": "Please specify output type as PNG."})
def toImage(self):
return imgByteArr
return SimpleResult()
def generate(self, ds, granule_name, prefix, ct, interp, _min, _max, width, height, time_interval):
color_table_name = ct
if ct is None:
color_table_name = "smap"
color_table = colortables.__dict__[color_table_name]
interpolation = interp
if interp is None:
interpolation = "near"
force_min = _min
force_max = _max
if _min is None:
force_min = np.nan
if _max is None:
force_max = np.nan
temp_width = width
temp_height = height
if width is None:
temp_width = 1024
if height is None:
temp_height = 512
width = np.min([8192, temp_width])
height = np.min([8192, temp_height])
if time_interval == 'day':
time_interval = relativedelta(days=+1)
else:
time_interval = relativedelta(months=+1)
stats = self._get_tile_service().get_dataset_overall_stats(ds)
start_time, end_time = self._get_tile_service().get_min_max_time_by_granule(ds, granule_name)
MRF.create_all(ds, prefix)
# Make a temporary directory for storing the .png and .tif files
temp_dir = '/tmp/tmp/'
try:
os.makedirs(temp_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
while start_time <= end_time:
one_interval_later = start_time + time_interval
temp_end_time = one_interval_later - relativedelta(minutes=+1) # prevent getting tiles for 2 intervals
ds1_nexus_tiles = self._get_tile_service().find_tiles_in_box(-90.0, 90.0, -180.0, 180.0, ds, start_time,
temp_end_time)
if ds1_nexus_tiles is not None:
img = self.__create_global(ds1_nexus_tiles, stats, width, height, force_min, force_max, color_table,
interpolation)
else:
img = self.__create_no_data(width, height)
imgByteArr = io.BytesIO()
img.save(imgByteArr, format='PNG')
imgByteArr = imgByteArr.getvalue()
arr = str(start_time).split() # arr[0] should contain a string of the date in format 'YYYY-MM-DD'
fulldate = arr[0]
temp_png = temp_dir + fulldate + '.png'
temp_tif = temp_dir + fulldate + '.tif'
open(temp_png, 'wb').write(imgByteArr)
arr = fulldate.split('-')
year = arr[0]
month = calendar.month_abbr[int(arr[1])]
dt = month + '_' + year
retcode = MRF.png_to_tif(temp_png, temp_tif)
if retcode == 0:
retcode = MRF.geo_to_mrf(temp_tif, prefix, year, dt, ds)
if retcode == 0:
retcode = MRF.geo_to_arctic_mrf(temp_tif, prefix, year, dt, ds)
if retcode == 0:
retcode = MRF.geo_to_antarctic_mrf(temp_tif, prefix, year, dt, ds, interp)
if retcode != 0:
break
start_time = one_interval_later
tar_file = ds + '.tar.gz'
retcode = call(["tar", "-zcvf", tar_file, ds])
if retcode == 0:
# Delete temporary files/folders if tar.gz is created successfully
call(["rm", "-rf", ds])
call(["rm", "-rf", temp_dir])
else:
print("Error creating tar.gz")
# Upload the tar.gz file to the sea-level-mrf S3 bucket
s3bucket = 'sea-level-mrf'
s3client = boto3.client('s3')
try:
with open(tar_file, 'rb') as data:
s3client.upload_fileobj(data, s3bucket, tar_file)
except Exception as e:
print(("Unable to add tar.gz to S3: \n" + str(e)))
call(["rm", "-rf", tar_file]) # Delete the tar.gz from local storage
|
Python 3.7.4 (tags/v3.7.4:e09359112e, Jul 8 2019, 20:34:20) [MSC v.1916 64 bit (AMD64)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>> a=(1,2,3,4,"nikhil")
>>> a[2]=5
Traceback (most recent call last):
File "<pyshell#1>", line 1, in <module>
a[2]=5
TypeError: 'tuple' object does not support item assignment
>>> a
(1, 2, 3, 4, 'nikhil')
>>> a[0]
1
>>> a[1]
2
>>> a[1:4]
(2, 3, 4)
>>> a[-1]
'nikhil'
>>> a[ : :-1]
('nikhil', 4, 3, 2, 1)
>>> a.index(3)
2
>>> a.index("nikhil")
4
>>> a.count(1)
1
>>>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-10-06 09:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app1', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='robot',
name='brand',
field=models.CharField(default='Roboticia', max_length=20),
),
]
|
#!/usr/bin/env python3
import sys
import json
from util.aoc import file_to_day
from util.input import load_data
def main(test=False):
numbers = [
json.loads(line) for line in load_data(file_to_day(__file__), test)
]
p1_res = numbers[0]
for num in numbers[1:]:
p1_res = add(p1_res, num)
p2 = 0
for num in numbers:
for num2 in numbers:
if num != num2:
p2 = max(p2, magnitude(add(num, num2)))
print("2021:18:1 =", magnitude(p1_res))
print("2021:18:2 =", p2)
def add(n1, n2):
return reduce([n1, n2])
def reduce(n):
d1, n1 = explode(n)
if d1:
return reduce(n1)
d2, n2 = split(n)
if d2:
return reduce(n2)
return n2
def split(n):
if isinstance(n, list):
did1, n1 = split(n[0])
if did1:
return True, [n1, n[1]]
did2, n2 = split(n[1])
return did2, [n1, n2]
if n >= 10:
return True, [n // 2, (n + 1) // 2]
return False, n
def explode(n):
ns = str(n)
parts = []
i = 0
while i < len(ns):
if ns[i] in "[,]":
parts.append(ns[i])
i += 1
elif ns[i] == " ":
i += 1
else:
j = i
while j < len(ns) and ns[j].isdigit():
j += 1
parts.append(int(ns[i:j]))
i = j
depth = 0
for i, c in enumerate(parts):
if c == "[":
depth += 1
if depth <= 4:
continue
left = parts[i + 1]
right = parts[i + 3]
left_i = None
right_i = None
for j in range(len(parts)):
if isinstance(parts[j], int) and j < i:
left_i = j
elif (
isinstance(parts[j], int) and j > i + 3 and right_i is None
):
right_i = j
if right_i is not None:
parts[right_i] += right
parts = parts[:i] + [0] + parts[i + 5 :]
if left_i is not None:
parts[left_i] += left
return True, json.loads("".join([str(p) for p in parts]))
elif c == "]":
depth -= 1
return False, n
def magnitude(n):
if isinstance(n, list):
return 3 * magnitude(n[0]) + 2 * magnitude(n[1])
return n
if __name__ == "__main__":
test = len(sys.argv) > 1 and sys.argv[1] == "test"
main(test)
|
#!\usr\bin\env python
import os
import sys
import subprocess as sp
dimensions = \
{"merged":63, \
"colorNormalHist":45, \
"colorHSV":3, \
"colorRGB":3, \
"colorH":5, \
"colorS":5, \
"colorV":5, \
"colorHist":30, \
"normal":3, \
"normalX":5, \
"normalY":5, \
"normalZ":5, \
"normalHist":15, \
"fpfh":33, \
"colorHSVNormal":6, \
"colorRGBNormal":6, \
"colorLab":6, \
"colorLabHist":15, \
"colorLabNormalHist":30, \
"meanFPFHLabHist":48, \
"merge":0}
if len(sys.argv) < 6:
print("Usage: arg1 : a folder that contain archives")
print("arg2 : method for generating saliency map")
print("arg3 : modality")
print("arg4 : number of iteration")
print("arg5 : output file")
sys.exit(1)
for folder in os.listdir(sys.argv[1]) :
print(folder)
sp.call(["roslaunch","dream_babbling","classifier_eval.launch", \
"archive_folder:="+ sys.argv[1] + folder, \
"method:=" + sys.argv[2], \
"modality:="+sys.argv[3], \
"dimension:="+str(dimensions[sys.argv[3]]), \
"number_of_iteration:=" + sys.argv[4], \
"output_file:=" + sys.argv[5]]) |
#!/bin/env python3
# Level-specific solvers
from . import player
class Level_1_2:
def __init__(self, the_map):
# Get da map
self._map = the_map
# Get da pacman
self._pacman = player.Pacman(the_map.get_items(9), 0)
# Get da ghost
ghost_list = the_map.get_items(2)
if ghost_list:
self._ghost = [player.Player(ghost_list[i]) for i in range(len(ghost_list))]
else:
self._ghost = [player.Player((-1, -1))]
# Get da food
self._food = the_map.get_items(2)[0]
# Turn queue for this level, ghost is unable to move to it will not be in the queue
self._turn_queue = [self._pacman]
# Game state: 2 = win, 1 = game over
self._game_state = 0
def update_game_state(self):
if self._pacman.get_position() != self._food and self._pacman.get_position() not in self._map.get_items(2):
self._pacman.update_score(False)
if self._pacman.get_position() == self._food:
self._pacman.update_score(True)
self._game_state = 2
if self._pacman.get_position() in self._map.get_items(3):
self._game_state = 1
def run(self, steps=-1):
### FIX ME
if steps == -1:
while self._game_state == 0:
for each_player in self._turn_queue:
loc_old = each_player.get_position()
move = each_player.take_turn(self._map, self._food, False)
if not move:
self._game_state = 2
break
self._map.move_player(loc_old, move)
self.update_game_state()
if self._game_state != 0:
break
else:
for _ in range(steps):
if self._game_state == 0:
for each_player in self._turn_queue:
loc_old = each_player.get_position()
move = each_player.take_turn(self._map, self._food, False)
if not move:
self._game_state = 2
break
self._map.move_player(loc_old, move)
self.update_game_state()
if self._game_state != 0:
break
|
#BEGIN_HEADER
import simplejson
import sys
import os
import ast
import glob
import json
import logging
import time
import subprocess
import threading, traceback
from collections import OrderedDict
from pprint import pprint
import script_util
import script_util2
from biokbase.workspace.client import Workspace
from biokbase.auth import Token
from os.path import isfile, join, exists
try:
from biokbase.HandleService.Client import HandleService
except:
from biokbase.AbstractHandle.Client import AbstractHandle as HandleService
import kb_cummerbundutils
class kb_cummerbundException(BaseException):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
#END_HEADER
class kb_cummerbund:
'''
Module Name:
kb_cummerbund
Module Description:
A KBase module: kb_cummerbund
'''
######## WARNING FOR GEVENT USERS #######
# Since asynchronous IO can lead to methods - even the same method -
# interrupting each other, you must be *very* careful when using global
# state. A method could easily clobber the state set by another while
# the latter method is running.
#########################################
#BEGIN_CLASS_HEADER
__TEMP_DIR = 'temp'
#END_CLASS_HEADER
# config contains contents of config file in a hash or None if it couldn't
# be found
def __init__(self, config):
#BEGIN_CONSTRUCTOR
if 'ws_url' in config:
self.__WS_URL = config['ws_url']
if 'shock_url' in config:
self.__SHOCK_URL = config['shock_url']
if 'hs_url' in config:
self.__HS_URL = config['hs_url']
if 'scratch' in config:
self.__SCRATCH = config['scratch']
if 'rscripts' in config:
self.__RSCRIPTS = config['rscripts']
#logging
self.__LOGGER = logging.getLogger('kb_cummerbund')
if 'log_level' in config:
self.__LOGGER.setLevel(config['log_level'])
else:
self.__LOGGER.setLevel(logging.INFO)
streamHandler = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter("%(asctime)s - %(filename)s - %(lineno)d - %(levelname)s - %(message)s")
formatter.converter = time.gmtime
streamHandler.setFormatter(formatter)
self.__LOGGER.addHandler(streamHandler)
self.__LOGGER.info("Logger was set")
#END_CONSTRUCTOR
pass
def generate_cummerbund_plots(self, ctx, cummerbundParams):
# ctx is the context object
# return variables are: returnVal
#BEGIN generate_cummerbund_plots
params = cummerbundParams
returnVal = params['ws_cummerbund_output']
#Set up workspace client
user_token = ctx['token']
ws_client = Workspace(url=self.__WS_URL, token=user_token)
#Read the input cuffdiff workspace object json file and get filehandle for cuffdiff tar file
s_res = ws_client.get_objects([{
'name' : params['ws_cuffdiff_id'],
'workspace' : params['workspace_name']
}])
# Check if workspace has data
if len(s_res) == 0:
self.__LOGGER.info("Workspace did not return any objects")
return returnVal
cuffdiff_dir = script_util2.extract_cuffdiff_data (self.__LOGGER, self.__SHOCK_URL, self.__SCRATCH, s_res, user_token)
self.__LOGGER.info("Cuffdiff folder = " + cuffdiff_dir)
if (cuffdiff_dir is False):
return returnVal
# Run R script to run cummerbund json and update the cummerbund output json file
# Prepare output object.
outputobject=dict()
# Prepare output plot list
cummerbundplotset=[]
# List of plots to generate
plotlist = [
{ 'file': "dispersionplot.R",
'title': "Dispersion plot",
'description': "Dispersion plot is the quality measure of the data. It estimates deviation from threshold against counts in FPKM." },
{ 'file': "fpkmscvplot.R",
'title': "Genes CV plot",
'description': "The squared coefficient of variation plot is a normalized measure of cross-replicate variability that can be useful for evaluating the quality of RNA-seq data." },
{ 'file': "isoformscvplot.R",
'title': "Isoform CV plot",
'description': "The squared coefficient of variation plot is a normalized measure of cross-replicate variability that can be useful for evaluating the quality of RNA-seq data.Differences in CV2 can result in lower numbers of differentially expressed isoforms due to a higher degree of variability between replicate fpkm estimates." },
{ 'file': "densityplot.R",
'title': "Density plot",
'description': "The density plot shows the distribution of FPKM scores across samples" },
{ 'file': "csdensityrepplot.R",
'title': "Replicates density plot",
'description': "The replicates density plot shows the distribution of FPKM scores across sample replicates" },
{ 'file': "boxplot.R",
'title': "Box plots",
'description': "The box plots show the FPKM distribution across samples." },
{ 'file': "boxrepplot.R",
'title': "Box plots of replicates",
'description': "The box plots of replicates show the FPKM distribution across sample replicates." },
{ 'file': "pairwisescatterplots.R",
'title': "Pairwise scatter plots",
'description': "The scatterplots show differences in gene expression between two samples. If two samples are identical, all genes will fall on the mid-line." },
{ 'file': "volcanomatrixplot.R",
'title': "Volcano matrix plots",
'description': "Volcano matrix plot is a scatter plot that also identifies differentially expressed genes (by color) between samples based on log2 fold change cut off." },
{ 'file': "pcaplot.R",
'title': "PCA plot",
'description': "Principal Component Analysis (PCA) is an informative approach for dimensionality reduction for exploring teh relationship between sample conditions." },
{ 'file': "pcarepplot.R",
'title': "PCA plot including replicates",
'description': "Principal Component Analysis (PCA) is an informative approach for dimensionality reduction for exploring teh relationship between sample conditions including replicates." },
{ 'file': "mdsplot.R",
'title': "Multi-dimensional scaling plot",
'description': "Multi-dimensional scaling plots are similar to PCA plots and useful for determining the major sources of variation in the dataset. " },
{ 'file': "mdsrepplot.R",
'title': "Multi-dimensional scaling plot including replicates",
'description': "Multi-dimensional scaling plot including replicates are similar to PCA plots and useful for determining the major sources of variation in the dataset with replicates. These can be useful to determine any systematic bias that may be present between conditions." }
]
#TODO.. Giving Rplot.pdf
# { 'file': "dendrogramplot.R",
# 'title': "Dendrogram",
# 'description': "Dendrogram based on the JS (Jensen-Shannon divergence) distance" },
#
# { 'file': "dendrogramrepplot.R",
# 'title': "Dendrogram including replicates",
# 'description': "Dendrogram including replicates based on the JS (Jensen-Shannon divergence) distance" },
# Iterate through the plotlist and generate the images and json files.
for plot in plotlist:
status = script_util2.rplotandupload(self.__LOGGER, self.__SCRATCH, self.__RSCRIPTS,
plot['file'], self.__SHOCK_URL, self.__HS_URL, user_token,
cummerbundplotset, plot['title'], plot['description'], cuffdiff_dir)
if status == False:
self.__LOGGER.info("Problem generating image and json file - " + plot["file"])
# Populate the output object
outputobject['cummerbundplotSet'] = cummerbundplotset
#TODO: Need to figure out how to get rnaseq experiment id
outputobject['rnaseq_experiment_id'] = "rnaseq_experiment_id"
outputobject['cuffdiff_input_id'] = params['ws_cuffdiff_id']
res = ws_client.save_objects({
"workspace":params['workspace_name'],
"objects": [{
"type":"KBaseRNASeq.cummerbund_output",
"data":outputobject,
"name":params["ws_cummerbund_output"]}]
})
#END generate_cummerbund_plots
# At some point might do deeper type checking...
if not isinstance(returnVal, basestring):
raise ValueError('Method generate_cummerbund_plots return value ' +
'returnVal is not type basestring as required.')
# return the results
return [returnVal]
def create_expression_matrix(self, ctx, expressionMatrixParams):
# ctx is the context object
# return variables are: returnVal
#BEGIN create_expression_matrix
params = expressionMatrixParams
returnVal = params['ws_expression_matrix_id']
#Set up workspace client
user_token = ctx['token']
workspace = params['workspace_name']
ws_client = Workspace(url=self.__WS_URL, token=user_token)
#Read the input cuffdiff workspace object json file and get filehandle for cuffdiff tar file
s_res = ws_client.get_objects([{
'name' : params['ws_cuffdiff_id'],
'workspace' : params['workspace_name']
}])
# Check if workspace has data
if len(s_res) == 0:
self.__LOGGER.info("Workspace did not return any objects")
return returnVal
cuffdiff_dir = join (self.__SCRATCH , "cuffdiffData/cuffdiff")
cuffdiff_dir = script_util2.extract_cuffdiff_data (self.__LOGGER, self.__SHOCK_URL, self.__SCRATCH, s_res, user_token)
self.__LOGGER.info("Cuffdiff folder = " + cuffdiff_dir)
if (cuffdiff_dir is False):
return returnVal
# Run R script to get fpkmgenematrix.R
# Prepare output object.
outjson = False;
#outjson = "repfpkmgenematrix.R.matrix.txt.json";
if params['include_replicates'] ==0:
scriptfile = "fpkmgenematrix.R"
outjson = script_util2.generate_and_upload_expression_matrix(self.__LOGGER, self.__SCRATCH,
self.__RSCRIPTS, scriptfile, self.__SHOCK_URL, self.__HS_URL, user_token,
cuffdiff_dir, self.__WS_URL,workspace)
else:
scriptfile = "repfpkmgenematrix.R"
outjson = script_util2.generate_and_upload_expression_matrix(self.__LOGGER, self.__SCRATCH,
self.__RSCRIPTS, scriptfile, self.__SHOCK_URL, self.__HS_URL, user_token,
cuffdiff_dir, self.__WS_URL,workspace)
if outjson is False:
self.__LOGGER.info("Creation of expression matrix failed")
return returnVal
with open("{0}/{1}".format(self.__SCRATCH , outjson),'r') as et:
eo = json.load(et)
eo['type']='untransformed'
genome_ref = s_res[0]['data']['analysis']['genome_id']
eo['genome_ref'] = genome_ref
self.__LOGGER.info(workspace + self.__SCRATCH + outjson + params['ws_expression_matrix_id'])
ws_client.save_objects({'workspace' : workspace,
'objects' : [{ 'type' : 'KBaseFeatureValues.ExpressionMatrix',
'data' : eo,
'name' : params['ws_expression_matrix_id']
}]})
#END create_expression_matrix
# At some point might do deeper type checking...
if not isinstance(returnVal, basestring):
raise ValueError('Method create_expression_matrix return value ' +
'returnVal is not type basestring as required.')
# return the results
return [returnVal]
def create_interactive_heatmap_de_genes(self, ctx, interactiveHeatmapParams):
# ctx is the context object
# return variables are: returnVal
#BEGIN create_interactive_heatmap_de_genes
fparams = interactiveHeatmapParams
#returnVal = "ttt"
#Set up workspace client
user_token = ctx['token']
workspace = fparams['workspace_name']
ws_client = Workspace(url=self.__WS_URL, token=user_token)
system_params = {}
system_params['token'] = user_token
system_params['ws_url'] = self.__WS_URL
system_params['logger'] = self.__LOGGER
system_params['shock_url'] = self.__SHOCK_URL
system_params['hs_url'] = self.__HS_URL
system_params['scratch'] = self.__SCRATCH
system_params['rscripts'] = self.__RSCRIPTS
system_params['workspace'] = workspace
#Read the input cuffdiff workspace object json file and get filehandle for cuffdiff tar file
s_res = ws_client.get_objects([{
'name' : fparams['ws_cuffdiff_id'],
'workspace' : fparams['workspace_name']
}])
#Check if workspace has data
if len(s_res) == 0:
self.__LOGGER.info("Workspace did not return any objects")
return returnVal
cuffdiff_dir = join (self.__SCRATCH , "cuffdiffData/cuffdiff")
cuffdiff_dir = script_util2.extract_cuffdiff_data (self.__LOGGER, self.__SHOCK_URL, self.__SCRATCH, s_res, user_token)
#cuffdiff_dir = "/kb/module/work/nnc/cuffdiff"
self.__LOGGER.info("Cuffdiff folder = " + cuffdiff_dir)
#if (cuffdiff_dir is False):
# return returnVal
fparams['cuffdiff_dir'] = cuffdiff_dir
fparams['infile'] = join (cuffdiff_dir, "gene_exp.diff")
fparams['outfile'] = join(system_params['scratch'], "gene_exp.diff.filter")
filtered_matrix = script_util2.filter_expression_matrix(fparams, system_params)
self.__LOGGER.info("matrix is " + filtered_matrix)
fparams['infile'] = join (system_params['scratch'], "gene_exp.diff.filter")
fparams['outfile'] = join(system_params['scratch'], "gene_exp.diff.filter.genelist")
genelist_filtered_matrix_file = script_util2.get_gene_list_from_filter_step(fparams)
# Prepare output object.
outjson = False;
rparams = {}
rparams['genelist'] = filtered_matrix
rparams['cuffdiff_dir'] = fparams['cuffdiff_dir']
rparams['outpng'] = join (system_params['scratch'], "heatmap.png")
rparams['imageheight'] = 1600
rparams['imagewidth'] = 800
rparams['plotscript'] = join(system_params['rscripts'], "heatmapplotinteractive.R")
rparams['include_replicates'] = 1
rparams['outmatrix'] = join (system_params['scratch'], "outmatrix")
roptstr_basic_heatmap_rep = script_util2.get_command_line_heatmap_basic (rparams)
# Run R script to run cummerbund json and update the cummerbund output json file
# Prepare output object.
outputobject=dict()
# Prepare output plot list
cummerbundplotset=[]
# List of plots to generate
plotlist = [
{ 'roptstr': roptstr_basic_heatmap_rep,
'title': "Heatmap",
'description': "Heatmap",
'exp' : fparams['ws_expression_matrix_id']
}
]
fparams['cummerbundplotset'] = cummerbundplotset
# Iterate through the plotlist and generate the images and json files.
for plot in plotlist:
fparams['title'] = plot['title']
fparams['description'] = plot['description']
status = script_util2.rplotanduploadinteractive(system_params,fparams, rparams, plot['roptstr'])
if status == False:
self.__LOGGER.info("Problem generating image and json file - " + plot["roptstr"])
else:
self.__LOGGER.info(status)
outjson = status
with open("{0}/{1}".format(self.__SCRATCH , outjson),'r') as et2:
eo2 = json.load(et2)
genome_ref = s_res[0]['data']['analysis']['genome_id']
eo2['type']='untransformed'
eo2['genome_ref'] = genome_ref
self.__LOGGER.info(workspace + self.__SCRATCH + outjson + plot['exp'])
ws_client.save_objects({'workspace' : workspace,
'objects' : [{ 'type' : 'KBaseFeatureValues.ExpressionMatrix',
'data' : eo2,
'name' : plot['exp']
}]})
returnVal = fparams['ws_expression_matrix_id']
#END create_interactive_heatmap_de_genes
# At some point might do deeper type checking...
if not isinstance(returnVal, basestring):
raise ValueError('Method create_interactive_heatmap_de_genes return value ' +
'returnVal is not type basestring as required.')
# return the results
return [returnVal]
|
from Tested_Method.MethodToTest import working_function_2
from unittest.mock import patch,call
TESTED_MODULE = 'Tested_Method.MethodToTest'
# mocking just the public function
@patch(f'{TESTED_MODULE}.get_element_1', return_value = 10)
@patch(f'{TESTED_MODULE}.get_element_2',return_value= 5)
@patch(f'{TESTED_MODULE}.sendAPI')
def test_working_function__apply_division_of_number1_by_number2_and_send(mock_sendAPi,mock_get_element_1,mock_get_element_2):
#given
expected_calls = [
call("Dest",2),
call("Dest1",2),
call("Dest2",2),
call("Dest3",2),
]
#when
result = working_function_2()
#then
mock_sendAPi.assert_has_calls(expected_calls) |
class Solution:
def maxSubArray(self, nums):
max_c = max_g = nums[0]
for i in range(1,len(nums)):
max_c = max(nums[i], max_c + nums[i])
if max_c > max_g:
max_g = max_c
return max_g
if __name__ == '__main__':
# nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
nums = [-2, 3, 2, -1]
sub = Solution()
v = sub.maxSubArray(nums)
print(v)
|
#!/usr/bin/env python
#-*-coding:utf-8-*-
# @File:main.py
# @Author: Michael.liu
# @Date:2020/6/30 13:18
# @Desc: this code is ....
from .helper import *
from .decisiontree_model import *
from .gbdt_lr_model import *
import time
import argparse
from .config_xgb import *
from .xgboost_model import *
head_path = './feature_head.txt'
head = load_head(head_path)
print(head)
if __name__ == '__main__':
#global head
t_start = time.time()
print("......开始训练.....")
print("....start....")
parser = argparse.ArgumentParser()
parser.add_argument("--config_path", help="config path of model")
args = parser.parse_args()
################################ DT ############################
# decisiontTree_model = DecisionTreeModel(args)
# decisiontTree_model.load_train_data(head)
# decisiontTree_model.load_test_data(head)
# decisiontTree_model.train(head[2:],head[0])
############################### gbdt ############################
# gbdt_model = GBDT_LR_MODEl(args)
# gbdt_model.load_train_data(head)
# gbdt_model.load_test_data(head)
# gbdt_model.train(head[0:],head[0])
############################## xgboost###########################
params = params
train_data_path = 'format_train.txt'
test_data_path = 'format_test.txt'
target = 'relevent'
ignore_list = ['qid']
sep = ','
xg = xgb_model(params=params)
x_train, y_train, x_test, y_test = xg.load_data(train_data_path, test_data_path, target, sep, ignore_list)
print(x_train.info())
xg.fit(x_train, y_train, x_test, y_test, rounds=500)
print("......训练结束.....,共耗费 %s " % (time.time()-t_start))
|
from setuptools import setup
__author__ = 'Kurt Rose'
__version__ = '0.1dev'
__contact__ = 'kurt@kurtrose.com'
__url__ = 'https://github.com/kurtbrose/relativity'
__license__ = 'MIT'
setup(name='relativity',
version=__version__,
description="Relational object sets.",
long_description=__doc__,
author=__author__,
author_email=__contact__,
url=__url__,
packages=['relativity', 'relativity.tests'],
include_package_data=True,
zip_safe=False,
license=__license__,
platforms='any',
classifiers=[
'Topic :: Utilities',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy', ]
)
|
from ctypes import *
from enum import Enum
FT_LIST_NUMBER_ONLY = 0x80000000
FT_LIST_BY_INDEX = 0x40000000
FT_LIST_ALL = 0x20000000
class FT_DEVICE(Enum):
FT_DEVICE_BM = 0
FT_DEVICE_AM = 1
FT_DEVICE_100AX = 2
FT_DEVICE_UNKNOWN = 3
FT_DEVICE_2232C = 4
FT_DEVICE_232R = 5
FT_DEVICE_2232H = 6
FT_DEVICE_4232H = 7
FT_DEVICE_232H = 8
FT_DEVICE_X_SERIES = 9
FT_DEVICE_4222H_0 = 10
FT_DEVICE_4222H_1_2 = 11
FT_DEVICE_4222H_3 = 12
FT_DEVICE_4222_PROG = 13
FT_DEVICE_900 = 14
FT_DEVICE_930 = 15
FT_DEVICE_UMFTPD3A = 16
class STATUS(Enum):
FT_OK = 0
FT_INVALID_HANDLE = 1
FT_DEVICE_NOT_FOUND = 2
FT_DEVICE_NOT_OPENED = 3
FT_IO_ERROR = 4
FT_INSUFFICIENT_RESOURCES = 5
FT_INVALID_PARAMETER = 6
FT_INVALID_BAUD_RATE = 7
FT_DEVICE_NOT_OPENED_FOR_ERASE = 8
FT_DEVICE_NOT_OPENED_FOR_WRITE = 9
FT_FAILED_TO_WRITE_DEVICE = 10
FT_EEPROM_READ_FAILED = 11
FT_EEPROM_WRITE_FAILED = 12
FT_EEPROM_ERASE_FAILED = 13
FT_EEPROM_NOT_PRESENT = 14
FT_EEPROM_NOT_PROGRAMMED = 15
FT_INVALID_ARGS = 16
FT_NOT_SUPPORTED = 17
FT_OTHER_ERROR = 18
FT_DEVICE_LIST_NOT_READY = 19
FT_OK = 0
FT_INVALID_HANDLE = 1
FT_DEVICE_NOT_FOUND = 2
FT_DEVICE_NOT_OPENED = 3
FT_IO_ERROR = 4
FT_INSUFFICIENT_RESOURCES = 5
FT_INVALID_PARAMETER = 6
FT_HANDLE = c_void_p
FT_OPEN_BY_SERIAL_NUMBER = 1
FT_OPEN_BY_DESCRIPTION = 2
FT_OPEN_BY_LOCATION = 4
FT_FLOW_NONE = 0x0000
FT_FLOW_RTS_CTS = 0x0100
FT_FLOW_DTR_DSR = 0x0200
FT_FLOW_XON_XOFF = 0x0400
FT_BITMODE_RESET = 0x00
FT_BITMODE_ASYNC_BITBANG = 0x01
FT_BITMODE_MPSSE = 0x02
FT_BITMODE_SYNC_BITBANG = 0x04
FT_BITMODE_MCU_HOST = 0x08
FT_BITMODE_FAST_SERIAL = 0x10
FT_BITMODE_CBUS_BITBANG = 0x20
FT_BITMODE_SYNC_FIFO = 0x40
class FT_DEVICE_LIST_INFO_NODE(Structure):
_fields_ = [
('Flags', c_int),
('Type', c_int),
('ID', c_int),
('LocID', c_int),
('SerialNumber', c_char * 16),
('Description', c_char * 64),
('ftHandle', FT_HANDLE),
]
# /*MPSSE Control Commands*/
MPSSE_CMD_SET_DATA_BITS_LOWBYTE = 0x80
MPSSE_CMD_SET_DATA_BITS_HIGHBYTE = 0x82
MPSSE_CMD_GET_DATA_BITS_LOWBYTE = 0x81
MPSSE_CMD_GET_DATA_BITS_HIGHBYTE = 0x83
MPSSE_CMD_SEND_IMMEDIATE = 0x87
MPSSE_CMD_ENABLE_3PHASE_CLOCKING = 0x8C
MPSSE_CMD_DISABLE_3PHASE_CLOCKING = 0x8D
MPSSE_CMD_ENABLE_DRIVE_ONLY_ZERO = 0x9E
# /*MPSSE Data Commands - bit mode - MSB first */
MPSSE_CMD_DATA_OUT_BITS_POS_EDGE = 0x12
MPSSE_CMD_DATA_OUT_BITS_NEG_EDGE = 0x13
MPSSE_CMD_DATA_IN_BITS_POS_EDGE = 0x22
MPSSE_CMD_DATA_IN_BITS_NEG_EDGE = 0x26
MPSSE_CMD_DATA_BITS_IN_POS_OUT_NEG_EDGE = 0x33
MPSSE_CMD_DATA_BITS_IN_NEG_OUT_POS_EDGE = 0x36
# /*MPSSE Data Commands - byte mode - MSB first * /
MPSSE_CMD_DATA_OUT_BYTES_POS_EDGE = 0x10
MPSSE_CMD_DATA_OUT_BYTES_NEG_EDGE = 0x11
MPSSE_CMD_DATA_IN_BYTES_POS_EDGE = 0x20
MPSSE_CMD_DATA_IN_BYTES_NEG_EDGE = 0x24
MPSSE_CMD_DATA_BYTES_IN_POS_OUT_NEG_EDGE = 0x31
MPSSE_CMD_DATA_BYTES_IN_NEG_OUT_POS_EDGE = 0x34
# /*SCL & SDA directions * /
DIRECTION_SCLIN_SDAIN = 0x10
DIRECTION_SCLOUT_SDAIN = 0x11
DIRECTION_SCLIN_SDAOUT = 0x12
DIRECTION_SCLOUT_SDAOUT = 0x13
# /*SCL & SDA values * /
VALUE_SCLLOW_SDALOW = 0x00
VALUE_SCLHIGH_SDALOW = 0x01
VALUE_SCLLOW_SDAHIGH = 0x02
VALUE_SCLHIGH_SDAHIGH = 0x03
# /*Data size in bits * /
DATA_SIZE_8BITS = 0x07
DATA_SIZE_1BIT = 0x00
'''/* The I2C master should actually drive the SDA line only when the output is LOW. It should
tristate the SDA line when the output should be high. This tristating the SDA line during high
output is supported only in FT232H chip. This feature is called DriveOnlyZero feature and is
enabled when the following bit is set in the options parameter in function I2C_Init * /'''
I2C_ENABLE_DRIVE_ONLY_ZERO = 0x0002
# /*clock*/
SET_LOW_BYTE_DATA_BITS_CMD = 0x80
GET_LOW_BYTE_DATA_BITS_CMD = 0x81
SET_HIGH_BYTE_DATA_BITS_CMD = 0x82
GET_HIGH_BYTE_DATA_BITS_CMD = 0x83
SET_CLOCK_FREQUENCY_CMD = 0x86
SET_LOW_BYTE_DATA_BITS_DATA = 0x13
SET_HIGH_BYTE_DATA_BITS_DATA = 0x0F
DISABLE_CLOCK_DIVIDE = 0x8A
ENABLE_CLOCK_DIVIDE = 0x8B
SLAVE_PREPARE_DURATION = 5
START_DURATION_1 = 10
START_DURATION_2 = 5
STOP_DURATION_1 = 10
STOP_DURATION_2 = 10
STOP_DURATION_3 = 10
RESTART_DURATION = 10
# START_DURATION_1 = 20
# START_DURATION_2 = 40
# STOP_DURATION_1 = 20
# STOP_DURATION_2 = 20
# STOP_DURATION_3 = 20
# RESTART_DURATION = 20
SEND_ACK = 0x00
SEND_NACK = 0x80
I2C_ADDRESS_READ_MASK = 0x01 # /*LSB 1 = Read*/
I2C_ADDRESS_WRITE_MASK = 0xFE # /*LSB 0 = Write*/
SPI_DIRECTION = 0x0B
# Word Lengths
FT_BITS_8 = 8
FT_BITS_7 = 7
# Stop Bits
FT_STOP_BITS_1 = 0
FT_STOP_BITS_2 = 2
# Parity
FT_PARITY_NONE = 0
FT_PARITY_ODD = 1
FT_PARITY_EVEN = 2
FT_PARITY_MARK = 3
FT_PARITY_SPACE = 4
# Flow Control
FT_FLOW_NONE = 0x0000
FT_FLOW_RTS_CTS = 0x0100
FT_FLOW_DTR_DSR = 0x0200
FT_FLOW_XON_XOFF = 0x0400
class FT_Exception(Exception):
pass
if __name__ == '__main__':
# print(FT_DEVICE_LIST_INFO_NODE.ftHandle)
# for dev in FT_DEVICE:
# print(dev, dev.value)
# print((FT_DEVICE(8)))
print(c_char(b'\x02'))
|
import os
import json
import sys
from infraboxcli.log import logger
def init(_):
p = os.getcwd()
logger.info("Initializing %s" % p)
infrabox_json = os.path.join(p, 'infrabox.json')
if os.path.exists(infrabox_json):
logger.error("%s already exists" % infrabox_json)
sys.exit(1)
dockerfile = os.path.join(p, 'infrabox', 'test', 'Dockerfile')
infrabox_test = os.path.join(p, 'infrabox', 'test')
if os.path.exists(dockerfile):
logger.error("%s already exists" % dockerfile)
sys.exit(1)
logger.info("Creating infrabox.json")
with open(infrabox_json, 'w+') as f:
json.dump({
"version": 1,
"jobs": [{
"name": "test",
"type": "docker",
"build_only": False,
"resources": {"limits": {"memory": 1024, "cpu": 1}},
"docker_file": "infrabox/test/Dockerfile"
}]
}, f, sort_keys=True, indent=4)
logger.info("Creating infrabox/test/Dockerfile")
os.makedirs(infrabox_test)
with open(dockerfile, 'w+') as f:
f.write("""
FROM alpine
RUN adduser -S testuser
USER testuser
CMD echo "hello world"
""")
gitignore = os.path.join(p, '.gitignore')
if os.path.exists(gitignore):
with open(gitignore, 'a') as f:
f.write("\n.infrabox/")
f.write("\n.infraboxsecrets.json")
dockerignore = os.path.join(p, '.dockerignore')
with open(dockerignore, 'a') as f:
f.write("\n.infrabox/")
f.write("\n.infraboxsecrets.json")
logger.info("Successfully initialized project")
logger.info("Use 'infrabox run' to execute your jobs")
|
import time
from math import sqrt, tan, sin, cos, pi, ceil, floor, acos, atan, asin, degrees, radians, log, atan2, acos, asin
from random import *
import numpy
from pymclevel import alphaMaterials, MCSchematic, MCLevel, BoundingBox
from mcplatform import *
import Queue
import utilityFunctions
from helper import *
from ChunkAnalysis import *
def createRoad(level, box, startingChunk, endingChunk, roadWidth, material):
class RoadNode:
def __init__(self, x, z, goals, prior = None, additionalCost = 0):
self.x = x
self.z = z
#self.y = getGroundYPos(x, z)
self.medianY, self.stdDev = self.getYStats()
self.prior = prior
if prior is None:
self.g = 0
else:
self.g = prior.g + additionalCost
self.g += abs(self.medianY - prior.medianY)
self.g += self.stdDev
# Calculating h (heuristic)
self.h = 999999
for goal in goals:
estimateToGoal = max(abs(goal[0] - self.x), abs(goal[1] - self.z))
if estimateToGoal < self.h:
self.h = estimateToGoal
# Setting f (expected total cost to the closest goal)
self.f = self.g + self.h
if prior is None:
self.deltaX = 0
self.deltaZ = 0
else:
self.deltaX = self.x - prior.x
self.deltaZ = self.z - prior.z
self.waterFraction = self.countWater() / (roadWidth ** 2.0)
def countWater(self):
count = 0
for x in xrange(self.x, self.x + roadWidth):
for z in xrange(self.z, self.z + roadWidth):
y = getGroundYPos(x, z)
id = level.blockAt(x, y, z)
if id == 8 or id == 9: # If liquid water
count += 1
return count
def getYStats(self):
yPositions = []
for x in xrange(self.x, self.x + roadWidth):
for z in xrange(self.z, self.z + roadWidth):
yPositions.append(getGroundYPos(x, z))
return numpy.median(yPositions), numpy.std(yPositions)
@staticmethod
def getSuccessorAttributes():
# Successor attributes is just to make it easier to process successors (it lists deltaX, deltaZ, and cost from parent)
successorAttributes = [(-1, 0, 1), (0, -1, 1), (0, 1, 1), (1, 0, 1)]
if roadWidth > 1: # Can only move diagonally if road width is greater than 1
successorAttributes += [(-1, -1, 1.5), (-1, 1, 1.5), (1, -1, 1.5), (1, 1, 1.5)]
# Scaling successor attributes by the road width
for i in xrange(len(successorAttributes)):
successorAttributes[i] = (successorAttributes[i][0] * roadWidth, successorAttributes[i][1] * roadWidth, successorAttributes[i][2] * roadWidth)
return successorAttributes
def getSuccessors(self, successorAttributes, goals):
successors = []
for i in successorAttributes:
# If the successor is within the box's bounds
if box.minx <= (self.x + i[0]) < box.maxx - roadWidth and box.minz <= (self.z + i[1]) < box.maxz - roadWidth:
candidate = RoadNode(self.x + i[0], self.z + i[1], goals, self, i[2])
if (candidate.medianY - self.medianY) <= roadWidth:
if self.deltaX == 0 and self.deltaZ == 0:
successors.append(candidate)
# If self is mostly over water, only add candidates whos deltaX and deltaZ are equal to self's
elif self.waterFraction > 0.5:
if self.deltaX == candidate.deltaX and self.deltaZ == candidate.deltaZ:
successors.append(candidate)
# Can only go in a direction that is 45 degrees from the current direction
elif roadWidth == 1: # Unless the road width is only 1
successors.append(candidate)
else:
if self.deltaX == 0 and self.deltaZ > 0:
if candidate.deltaZ > 0:
successors.append(candidate)
elif self.deltaX > 0 and self.deltaZ > 0:
if candidate.deltaX >= 0 and candidate.deltaZ >= 0:
successors.append(candidate)
elif self.deltaX > 0 and self.deltaZ == 0:
if candidate.deltaX > 0:
successors.append(candidate)
elif self.deltaX > 0 and self.deltaZ < 0:
if candidate.deltaX >= 0 and candidate.deltaZ <= 0:
successors.append(candidate)
elif self.deltaX == 0 and self.deltaZ < 0:
if candidate.deltaZ < 0:
successors.append(candidate)
elif self.deltaX < 0 and self.deltaZ < 0:
if candidate.deltaX <= 0 and candidate.deltaZ <= 0:
successors.append(candidate)
elif self.deltaX < 0 and self.deltaZ == 0:
if candidate.deltaX < 0:
successors.append(candidate)
else:
if candidate.deltaX <= 0 and candidate.deltaZ >= 0:
successors.append(candidate)
return successors
def regeneratePath(self, path = []):
path.append(self)
if self.prior is None:
return path
else:
return self.prior.regeneratePath(path)
def __lt__(self, other):
return self.f < other.f
def __hash__(self):
return self.x + (512 * self.z)
def __eq__(self, other):
return self.x == other.x and self.z == other.z
def getPath():
successorAttributes = RoadNode.getSuccessorAttributes()
goals = []
goals.append((endingChunk.box.minx, endingChunk.box.minz))
goals.append((endingChunk.box.maxx, endingChunk.box.minz))
goals.append((endingChunk.box.minx, endingChunk.box.maxz))
goals.append((endingChunk.box.maxx, endingChunk.box.maxz))
originX = startingChunk.box.minx + 8 - (roadWidth / 2)
originZ = startingChunk.box.minz + 8 - (roadWidth / 2)
originY = getGroundYPos(originX, originZ)
openList = Queue.PriorityQueue()
openList.put(RoadNode(originX, originZ, goals))
closedSet = set()
foundGoal = False
while openList.qsize() > 0:
current = openList.get()
# If we already checked this node, fetch the next best node in the open list
# This check is necessary because when adding successors, we can't tell if it was already in the open list
# Therefore, we will check if a better candidate at its position was already processed
if current in closedSet:
continue
# Checking if a goal is within the road
for goal in goals:
if current.x <= goal[0] < current.x + roadWidth and current.z <= goal[1] < current.z + roadWidth:
foundGoal = True
break
if foundGoal:
break
# Adding current to the closed set
closedSet.add(current)
# Adding successors to the open list
successors = current.getSuccessors(successorAttributes, goals)
for i in successors:
if i not in closedSet:
openList.put(i)
if foundGoal:
return current.regeneratePath()
else:
return []
# Gets a list of all blocks along the path that will make up the road
def getCompletePathCoordinates(path):
pathCoordinates = []
for i in xrange(len(path)):
for xOffset in xrange(roadWidth):
for zOffset in xrange(roadWidth):
x = path[i].x + xOffset
z = path[i].z + zOffset
# Smoothing out the road's height
# if path[i].deltaX > 0:
# if xOffset < roadWidth / 2:
# y = lerpInt(path[i - 1].medianY, path[i].medianY, (ceil(roadWidth / 2) + xOffset) / float(roadWidth))
# elif i < len(path) - 1:
# y = lerpInt(path[i].medianY, path[i + 1].medianY, (xOffset - (roadWidth / 2)) / float(roadWidth))
# else:
# y = path[i].medianY
# elif path[i].deltaX < 0:
# if xOffset >= roadWidth / 2:
# y = lerpInt(path[i].medianY, path[i - 1].medianY, (xOffset - (roadWidth / 2)) / float(roadWidth))
# elif i < len(path) - 1:
# y = lerpInt(path[i + 1].medianY, path[i].medianY, (ceil(roadWidth / 2) + xOffset) / float(roadWidth))
# else:
# y = path[i].medianY
# elif path[i].deltaZ > 0:
# if zOffset < roadWidth / 2:
# y = lerpInt(path[i - 1].medianY, path[i].medianY, (ceil(roadWidth / 2) + zOffset) / float(roadWidth))
# elif i < len(path) - 1:
# y = lerpInt(path[i].medianY, path[i + 1].medianY, (zOffset - (roadWidth / 2)) / float(roadWidth))
# else:
# y = path[i].medianY
# elif path[i].deltaZ < 0:
# if zOffset >= roadWidth / 2:
# y = lerpInt(path[i].medianY, path[i - 1].medianY, (zOffset - (roadWidth / 2)) / float(roadWidth))
# elif i < len(path) - 1:
# y = lerpInt(path[i + 1].medianY, path[i].medianY, (ceil(roadWidth / 2) + zOffset) / float(roadWidth))
# else:
# y = path[i].medianY
# else:
# if i < len(path) - 1:
# if path[i + 1].deltaX > 0:
# if xOffset >= roadWidth / 2:
# y = lerpInt(path[i].medianY, path[i + 1].medianY, (xOffset - (roadWidth / 2)) / float(roadWidth))
# else:
# y = path[i].medianY
# elif path[i + 1].deltaX < 0:
# if xOffset < roadWidth / 2:
# y = lerpInt(path[i + 1].medianY, path[i].medianY, (ceil(roadWidth / 2) + xOffset) / float(roadWidth))
# else:
# y = path[i].medianY
# elif path[i + 1].deltaZ > 0:
# if zOffset >= roadWidth / 2:
# y = lerpInt(path[i].medianY, path[i + 1].medianY, (zOffset - (roadWidth / 2)) / float(roadWidth))
# else:
# y = path[i].medianY
# else:
# if zOffset < roadWidth / 2:
# y = lerpInt(path[i + 1].medianY, path[i].medianY, (ceil(roadWidth / 2) + zOffset) / float(roadWidth))
# else:
# y = path[i].medianY
# else:
# y = path[i].medianY
# y = int(round(y))
y = getGroundYPos(x, z)
pathCoordinates.append((x, y, z))
# Determining road blocks between diagonal path coordinates
for i in xrange(len(path) - 1):
# If path[i] and path[i + 1] are diagonal from each other
if path[i].x != path[i + 1].x and path[i].z != path[i + 1].z:
# Getting the bounds of the 2x2 square containing the diagonal path coordinates
minx = min(path[i].x, path[i + 1].x)
maxx = max(path[i].x, path[i + 1].x)
minz = min(path[i].z, path[i + 1].z)
maxz = max(path[i].z, path[i + 1].z)
maxx += roadWidth
maxz += roadWidth
# Diagonally along y = x line
if (path[i + 1].x - path[i].x) == (path[i + 1].z - path[i].z):
# Filling in the bottom right half of the top left box of the 2x2 square
for x in xrange(minx + 1, minx + roadWidth):
for z in xrange(minz + roadWidth, maxz - ((minx + roadWidth) - x)):
y = getGroundYPos(x, z)
pathCoordinates.append((x, y, z))
# Filling in the top left half of the bottom right box of the 2x2 square
for x in xrange(minx + roadWidth, maxx - 1):
for z in xrange(minz + 1 + (x - (minx + roadWidth)), minz + roadWidth):
y = getGroundYPos(x, z)
pathCoordinates.append((x, y, z))
# Diagonally along y = -x line
else:
# Filling in the top right half of the bottom left box of the 2x2 square
for x in xrange(minx + 1, minx + roadWidth):
for z in xrange(minz + ((minx + roadWidth) - x), minz + roadWidth):
y = getGroundYPos(x, z)
pathCoordinates.append((x, y, z))
# Filling in the bottom left half of the top right box of the 2x2 square
for x in xrange(minx + roadWidth, maxx - 1):
for z in xrange(minz + roadWidth, maxz - 1 - (x - (minx + roadWidth))):
y = getGroundYPos(x, z)
pathCoordinates.append((x, y, z))
return pathCoordinates
# Builds a road on each path coordinate
def constructRoadOnPath(pathCoordinates, material):
for x, y, z in pathCoordinates:
setBlock(level, material, x, y, z)
for i in xrange(1, 5): # carving out space above the road
setBlock(level, (0, 0), x, y + i, z)
path = getPath()
pathCoordinates = getCompletePathCoordinates(path)
constructRoadOnPath(pathCoordinates, material)
return path, pathCoordinates
|
from concurrent.futures import ProcessPoolExecutor , wait
import time
executor = ProcessPoolExecutor(max_workers=100)
def task(msg):
print(f"{msg} start!")
time.sleep(1)
print(f"{msg} end!")
return f"{msg} done!"
def print_result(future):
result = future.result()
print(result)
def main():
for i in range(100):
executor.submit(task,"task"+str(i)).add_done_callback(print_result)
executor.shutdown()
if __name__ == "__main__":
start = time.time()
main()
end = time.time()
print(f"total delay:{end-start}")
|
num_char = len(input("What is your name?\n"))
# print("Your Name has " + num_char + " Characters.") This Line gives Type Error because num_char is of integer Data Type.
print(type(num_char)) #This Line prints The type of num_char i.e. <class 'int'>
# type conversion
new_num_char = str(num_char)
# Now This line works flawlessly
print("Your Name has " + new_num_char + " Characters.") |
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 5 14:09:52 2019
@author: PC
"""
import pickle
import xlrd
import os
#打开模型,利用pickle模块
def open_model(path):
with open(path,"rb")as f:
s=f.read()
model=pickle.loads(s)
return model
#加载数据
def load_data(file):
wb=xlrd.open_workbook(file)
table=wb.sheets()[0]
rows=table.nrows
#culums=table.ncols
ex_data=[]
label_data1=[]
label_data2=[]
for row in range(rows):
data=table.row_values(row)
ex_data.append(data[:len(data)-2])
label_data1.append(int(data[-2]))
label_data2.append(int(data[-1]))
print(label_data2)
return ex_data,label_data1,label_data2
#进行预测
def predict_result(model,data):
length=len(data)
predict_label=[]
#prob=[]
for i in range(length):
result=model.predict([data[i]])#svm模型进行预测的时候,必须加一个[]
m=result[0]
#prob.append(model.predict_proba(data[i]))
predict_label.append(m)
print(predict_label)
return predict_label
#计算准确率
def score_acc(name,c_s_label,predict_label1):
length=len(c_s_label)
#print(length)
#print(predict_label1)
a=0
for i in range(length):
if(c_s_label[i]==predict_label1[i]):
a+=1
else:
print(name+"音频的第%d个片段预测错误"%(i+1))
accuracy=a/length
return accuracy
#进行文件夹的寻找
def file_dir(path):
files=os.listdir(path)
return files
#用于条件判断到底执行乃个svm模型
def chose_model(path,data,predict_1):
results=[]
for i in range(len(predict_1)):
if predict_1[i] ==1:
model1=open_model(path[0])
result=model1.predict([data[i]])
m=result[0]
results.append(m)
elif predict_1[i] ==2:
model2=open_model(path[1])
result=model2.predict([data[i]])
m=result[0]
results.append(m)
else:
model3=open_model(path[2])
result=model3.predict([data[i]])
m=result[0]
results.append(m)
return results
if __name__=="__main__":
model_path=r"C:\Users\PC\Desktop\三模型图片\svm三类93.5065\svm_1"
path_list=[]
model_path1=r"C:\Users\PC\Desktop\三模型图片\123\svm_沉积岩\svm_chenjiyan"
model_path2=r"C:\Users\PC\Desktop\三模型图片\123\svm_火成岩\svm_huochengyan"
model_path3=r"C:\Users\PC\Desktop\三模型图片\123\svm_变质岩\svm_bianzhi"
path_list.append(model_path1)
path_list.append(model_path2)
path_list.append(model_path3)
dir_path=r"C:\Users\PC\Desktop\三模型图片\v3txt\123"
model=open_model(model_path)
files=file_dir(dir_path)
num_file=len(files)
for i in range(num_file):
file_split=files[i].split(".")
initial_data,label1,label2=load_data(dir_path+os.sep+files[i])
predict_label1=predict_result(model,initial_data)
#print(predict_label1)
jieguo_data=chose_model(path_list,initial_data,predict_label1)
accura=score_acc(file_split[0],label2,jieguo_data)
print("最终-----{}------的测试结果准确率为{}---------------------".format(file_split[0],accura))
print("完成所有的测试准确率测试") |
class Solution:
def isValidSudoku(self, board):
"""
:type board: List[List[str]]
:rtype: bool
https://www.cnblogs.com/zhuifengjingling/p/5277555.html
"""
row = [[] for _ in range(9)]
col = [[] for _ in range(9)]
area = [[] for _ in range(9)]
for i in range(9):
for j in range(9):
if board[i][j] != '.':
k = i // 3 * 3 + j // 3
if board[i][j] in row[i] or board[i][j] in col[j] or board[i][j] in area[k]:
return False
else:
row[i].append(board[i][j])
col[j].append(board[i][j])
area[k].append(board[i][j])
return True
import collections
# 这道题sp大神的解法在python3下都无法通过
class Solution_0:
def isValidSudoku(self, board):
"""
:type board: List[List[str]]
:rtype: bool
sp大神的解法 这个无法再python3通过
https://leetcode.com/problems/valid-sudoku/discuss/15460/1-7-lines-Python-4-solutions
"""
return 1 == max(collections.Counter(
x
for i, row in enumerate(board)
for j, col in enumerate(row)
if col != '.'
for x in ((col, i), (j, col), (i/3, j/3, col))
).values() + [1])
class Solution_1:
def isValidSudoku(self, board):
"""
:type board: List[List[str]]
:rtype: bool
sp大神的解法
https://leetcode.com/problems/valid-sudoku/discuss/15460/1-7-lines-Python-4-solutions
"""
seen = set()
return not any(x in seen or seen.add(x)
for i, row in enumerate(board)
for j, c in enumerate(row)
if c != '.'
for x in ((c, i), (j, c), (i/3, j/3, c)))
|
import argparse
from core.render_markdown import output_md
from core.render_html import output_html
from core.export_ddl import output_ddl_sql
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog="输出数据库信息到markdown/html/pdf",
usage="""
python main.py -t [type]
""",
description="""
输出数据库表结构信息到markdown/html/pdf
""",
epilog="That's all. Thank you for using it !",
add_help=True
)
parser.add_argument('-t', "--type", default='md', help="输出类型")
args = parser.parse_args()
output_type = args.type
if output_type in ('md', 'markdown', 'MD', 'MARKDOWN'):
output_md()
elif output_type in ('html', 'HTML'):
output_html()
elif output_type in ('pdf', 'PDF'):
print("TODO 暂未实现")
elif output_type in ('ddl', "DDL"):
output_ddl_sql()
else:
print("参数错误")
|
#!/usr/bin/env python
import sys
base_addr = int(sys.argv[1], 16)
f = open(sys.argv[2], 'r') # gadgets
for line in f.readlines():
target_str, gadget = line.split(':')
target_addr = int(target_str, 16)
# check alignment
if target_addr % 8 != 0:
continue
offset = (target_addr - base_addr) / 8
print 'offset =', (1 << 64) + offset
print 'gadget =', gadget.strip()
print 'stack addr = %x' % (target_addr & 0xffffffff)
break
|
#Leo Li
#Sep. 23
#This is guessing game in which the player has to guess the number that the system automatically generates. The player has 7 chances, and the system would make different suggestions according to how far is the user input away from the correct answer. When the game is finished, player can choose to play again or end the game.
#On my honor, I have neither given nor received any unauthorized aid.
import random
import time
#---------------------------------------------------------------
def start():
x = random.randint(200, 900)#system chooses a number between 200 and 900
y = 7
print("\nA random number is being generated")
print("\n......")
time.sleep(1)#I used time delay function throughout the game to show that the system is "Processing", and just to make better user experience
print("\n......")
time.sleep(1)
print("\nthe random number is now generated,")
while True:
try:
while y>0: #y repersents number of lives remaining, and this while loops allows the program to keep running while the lives are greater than 0
result = int(input("\nplease guess the number:\n\n>>"))
if result == x: #If the user get the number right, the computer tell the user he/she wins, and if he/she wants to start another one
print("\nchekcking...")
time.sleep(1.5)
print("\nauthenticating...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nThe number is correct! You win!")
z = int(input("\nPlay again?\n1)Yes\n2)No\n\n>>"))
if z == 1:
start()
break
if result < x: #if the user input is less than the number, the program makes different suggestions based on the difference between the user input and the actual number
if x>(300+result):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is far from being correct! It is way smaller than the correct answer")
y-=1
print("Lives remaining:", y)
elif x>(150+result):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is much smaller than the number")
y-=1
print("Lives remaining:", y)
elif x>(100+result):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is close to the number; it is smaller than the number by less than 150")
y-=1
print("Lives remaining:", y)
elif x>(50+result):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is pretty close to the correct answer; it is smaller than the number by less than 100")
y-=1
print("Lives remaining:", y)
elif x>(20+result):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYou are so close right now...; your answer is smaller than the number by less than 50")
y-=1
print("Lives remaining:", y)
elif x>(0.2+result):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYou are so close! If you can just add a little bit more...")
y-=1
print("Lives remaining:", y)
if result > x: #Same thing when the user input is greater than the number
if result>(300+x):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is far from being correct! It is way larger than the correct answer")
y-=1
print("Lives remaining:", y)
elif result>(150+x):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is much larger than the number")
y-=1
print("Lives remaining:", y)
elif result>(100+x):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is close to the number; it is larger than the number by less than 150")
y-=1
print("Lives remaining:", y)
elif result>(50+x):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYour answer is pretty close to the correct answer; it is larger than the number by less than 100")
y-=1
print("Lives remaining:", y)
elif result>(20+x):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYou are so close right now...; your answer is larger than the number by less than 50")
y-=1
print("Lives remaining:", y)
elif result>(0.2+x):
print("\nchecking...")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nYou are so close! If you can just reduce it by a little bit...")
y-=1
print("Lives remaining:", y)
if y == 0: #When the user used all their chances, the program tells the user the real answer, and if they want to start another one
print("\ncheking number of lives remaining......")
time.sleep(1.5)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\nlooks like you are dead...")
time.sleep(1)
print("\n......")
time.sleep(1)
print("\n......")
time.sleep(1)
print("The answer is",x)
time.sleep(1)
print("\ngame over")
z = int(input("\nPlay again?\n1)Yes\n2)No\n\n>>"))
if z == 1:
start()
break
except ValueError: #The except and the big "while True" loop and "try:" together makes it possible that when the user doesn't enter an integer, the program would tell the user to enter an integer
print("\n\nThat's not an integer, please try again")
start() |
def lineup_students(string):
return sorted(string.split(), key=lambda x:(len(x), x), reverse=True)
'''
Suzuki needs help lining up his students!
Today Suzuki will be interviewing his students to ensure they are progressing
in their training. He decided to schedule the interviews based on the length
of the students name in descending order. The students will line up and wait
for their turn.
You will be given a string of student names. Sort them and return a list of
names in descending order.
Here is an example input:
string = 'Tadashi Takahiro Takao Takashi Takayuki Takehiko Takeo Takeshi Takeshi'
Here is an example return from your function:
lst = ['Takehiko',
'Takayuki',
'Takahiro',
'Takeshi',
'Takeshi',
'Takashi',
'Tadashi',
'Takeo',
'Takao']
Names of equal length will be returned in reverse alphabetical order (Z->A) such that:
string = "xxa xxb xxc xxd xa xb xc xd"
Returns
['xxd', 'xxc', 'xxb', 'xxa', 'xd', 'xc', 'xb', 'xa']
'''
|
from __future__ import print_function, unicode_literals
from datetime import timedelta
from django.db import models
from django.db.models import Max, Sum, Count
from django.db.models.query import QuerySet
from django.utils import timezone
from generic_aggregation import generic_annotate
from hitcount.models import HitCount, Hit
# Note: Only import novel models inside method to prevent circular importing
class NovelManager(models.Manager):
def get_query_set(self):
return NovelQuerySet(self.model, using=self._db)
class NovelQuerySet(QuerySet):
def with_hit_count(self):
from .models import Chapter
return generic_annotate(
self,
HitCount,
Sum("volume__chapter__hitcount_object__hits"),
alias="hit_count",
force_rel_model=Chapter,
)
def with_hit_count_last_week(self):
from .models import Chapter
from django.db import connections
return generic_annotate(
self,
HitCount,
Count("volume__chapter__hitcount_object__hit__created"),
alias="hit_count_last_week",
force_rel_model=Chapter,
rel_slice_pos=-2,
).extra(where=[Hit._meta.db_table + ".created > %s"],
params=[connections[self.db].ops.value_to_db_datetime(
timezone.now() - timedelta(days=7)
)],
)
def execute_with_latest_chapter(self, exclude_novel_without_chapter=True):
from .models import Chapter
query = self.annotate(latest_chapter_id=Max("volume__chapter__id"))
if exclude_novel_without_chapter:
limit = query.query.low_mark, query.query.high_mark
query.query.clear_limits()
query = query.filter(latest_chapter_id__gt=0)
query.query.low_mark, query.query.high_mark = limit
result = list(query)
chapter_ids = [x.latest_chapter_id for x in result]
chapters = {
x.pk: x for x in
Chapter.objects.filter(pk__in=chapter_ids).
select_related("volume")
}
for novel in result:
if novel.latest_chapter_id:
novel.latest_chapter = chapters[novel.latest_chapter_id]
return result
|
"""Point 클래스는 2차원 평면의 점 (또는 2차원 벡터)을 나타내는 클래스이다.
* 필요한 멤버는 점의 x-좌표와 y-좌표이다."""
class Point:
def __init__(self, x=0, y=0):
self.x = x # x-좌표를 위한 멤버 self.x
self.y = y # y-좌표를 위한 멤버 self.y
def __str__(self):
return f"({self.x},{self.y})" # 문자열 출력
#생성함수(magic method 중 하나)__init__의 매개변수로 두 좌표 값을 받는다. default 좌표값은 0으로 정했다.(다른 default 값으로 지정해도 상관없다.)
#클래스의 모든 메쏘드의 첫 번째 매개변수는 이 메쏘드를 호출하는 객체를 나타내는 self이어야 한다.
# p = Point(1,2)
# print(p) # <__main__.Point object at 0x10976f910>
"""print(p)를 수행하면 객체 p를 프린트해야하는데, 구체적으로 어떤 내용을 출력해야 하는지 print 함수는 전혀 알지 못한다
print 함수에게 어떤 내용을 출력해야 하는지 알려줘야 한다. 그 역할을 하는 magic method가 __str__함수이다.
print(p)의 실행과정은 (1) p가 속한 클래스의 Point의 __str__ 함수가 정의되어 있다면 호출한다.
이 __str__함수는 출력용 문자열을 리턴하는 데, print 함수는 단순히 리턴된 문자열을 출력하는 역할이다.
만약, (2) __str__ 함수가 정의되어 있지 않다면, 해당 객체의 기본 정보(<__main__.Point object at 0x10976f910>)를 출력한다."""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.