blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d4703473ea1c490b4bf7d4e67561b41c1ba8b7f6
|
fabbb106c5a2855dc2466550fa41e4ef0262f647
|
/tests/test_arraystore.py
|
c38f8722d5d7599f542be1cc7f5e902de7a6aa87
|
[
"Apache-2.0"
] |
permissive
|
RUrlus/carma
|
8094f9f468c338369546015c211dd91d44b7e85e
|
1a984a44e849fa632315d196dd71f8f29e670534
|
refs/heads/stable
| 2023-09-01T20:50:35.134191
| 2023-08-22T07:47:40
| 2023-08-22T07:47:40
| 235,867,993
| 80
| 31
|
Apache-2.0
| 2023-08-22T06:55:36
| 2020-01-23T19:17:19
|
C++
|
UTF-8
|
Python
| false
| false
| 5,160
|
py
|
"""Tests for ArrayStore class."""
import pytest
import numpy as np
import test_carma as carma
def test_ArrayStore_get_view():
"""Tests for ArrayStore class.get_view()."""
sample = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
arraystore = carma.dArrayStore(sample, True)
arr = arraystore.get_view(False)
assert arr.flags['OWNDATA'] is False
np.testing.assert_allclose(arr.flatten(), sample)
def test_ArrayStore_copy():
"""Test ArrayStore class when not stealing."""
og_sample = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
sample = og_sample.copy()
arraystore = carma.dArrayStore(sample, True)
arr = arraystore.get_view(False)
np.testing.assert_allclose(arr.flatten(), og_sample)
# trigger descructor
arraystore = None
del arraystore
arr = None
del arr
# Validate the memory of sample is untouched
assert np.allclose(sample, og_sample)
def test_ArrayStore_non_writeable():
"""Test ArrayStore class when marked as non-readable."""
sample = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
arraystore = carma.dArrayStore(sample, True)
arr = arraystore.get_view(False)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is False
with pytest.raises(ValueError):
arr[0, 0] = 1.0
def test_ArrayStore_writeable():
"""Test ArrayStore class when marked as writeable."""
sample = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
arraystore = carma.dArrayStore(sample, True)
arr = arraystore.get_view(True)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is True
arr[0, 0] = 1.0
def test_ArrayStore_steal():
"""Test ArrayStore class when we steal the memory."""
og_sample = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
sample = og_sample.copy()
arraystore = carma.dArrayStore(sample, False)
arr = arraystore.get_view(True)
np.testing.assert_allclose(arr.flatten(), og_sample)
# trigger destructor
arraystore = None
del arraystore
arr = None
del arr
def test_ArrayStore_set_data():
"""Test ArrayStore class function set_data."""
sample1 = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
og_sample1 = np.copy(sample1)
sample2 = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
og_sample2 = np.copy(sample2)
arraystore = carma.dArrayStore(sample1, True)
arr = arraystore.get_view(True)
np.testing.assert_allclose(arr.flatten(), og_sample1)
arraystore.set_array(sample2, False)
arr = arraystore.get_view(True)
np.testing.assert_allclose(arr.flatten(), og_sample2)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is True
def test_ArrayStore_set_data_flags():
"""Test ArrayStore class function set_data."""
sample1 = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
sample2 = np.asarray(np.random.uniform(-1, 1, size=100), order='F')
arraystore = carma.dArrayStore(sample1, True)
arr = arraystore.get_view(True)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is True
arraystore.set_array(sample2, True)
arr = arraystore.get_view(False)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is False
def test_ArrayStore_get_view_float():
"""Tests for ArrayStore class.get_view()."""
sample = np.asarray(
np.random.uniform(-1, 1, size=100),
order='F',
dtype=np.float32
)
arraystore = carma.fArrayStore(sample, True)
arr = arraystore.get_view(False)
assert arr.flags['OWNDATA'] is False
np.testing.assert_allclose(arr.flatten(), sample)
def test_ArrayStore_get_view_long():
"""Tests for ArrayStore class.get_view()."""
sample = np.asarray(
np.random.randint(-10, 10, size=100).astype(np.int64),
order='F'
)
arraystore = carma.lArrayStore(sample, True)
arr = arraystore.get_view(False)
assert arr.flags['OWNDATA'] is False
np.testing.assert_allclose(arr.flatten(), sample)
def test_ArrayStore_get_view_int():
"""Tests for ArrayStore class.get_view()."""
sample = np.asarray(
np.random.randint(-10, 10, size=100).astype(np.int32),
order='F',
dtype=np.int32
)
arraystore = carma.iArrayStore(sample, True)
arr = arraystore.get_view(False)
assert arr.flags['OWNDATA'] is False
np.testing.assert_allclose(arr.flatten(), sample)
def test_ArrayStore_get_mat():
"""Tests for ArrayStore C++ api."""
delta = carma.test_ArrayStore_get_mat()
assert delta < 1e-6
def test_ArrayStore_get_mat_rvalue():
"""Tests for ArrayStore C++ api."""
delta = carma.test_ArrayStore_get_mat()
assert delta < 1e-6
def test_ArrayStore_get_view_cpp():
"""Tests for ArrayStore get_view."""
arr = carma.test_ArrayStore_get_view(True)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is True
arr = carma.test_ArrayStore_get_view(False)
assert arr.flags['OWNDATA'] is False
assert arr.flags['WRITEABLE'] is False
|
[
"RUrlus@users.noreply.github.com"
] |
RUrlus@users.noreply.github.com
|
30b8450ee869f8c36aa9c2ef89e7c2cd52df5a37
|
cb8fc28d8ffda6700299075c2952ab0c98579a9b
|
/cogs/meow_chat.py
|
6cf8b476de60912cfb45a6e32d126cc78a29f3df
|
[] |
no_license
|
Crafter-san/Apple.Py
|
855f0f36c798cee74d6aea703e4fd3f0b81d465f
|
e618fece877197cf3d91ab9eb68705dfcd58e962
|
refs/heads/master
| 2023-04-07T10:12:48.312298
| 2021-03-12T15:36:05
| 2021-03-12T15:36:05
| 343,938,993
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,807
|
py
|
import re
import discord
from discord.ext import commands
import voxelbotutils as utils
class MeowChat(utils.Cog):
VALID_KEYWORDS = (
"mew",
"meow",
"nya",
"uwu",
"owo",
"x3",
":3",
";3",
"rawr",
"purr",
"murr",
"nuzzle",
)
EMOJI_REGEX = re.compile(r"<a?:.+?:\d+?>")
def __init__(self, bot:utils.Bot):
super().__init__(bot)
self.meow_chats = set()
@utils.Cog.listener()
async def on_message(self, message:discord.Message):
await self.check_message(message)
@utils.Cog.listener()
async def on_message_edit(self, before:discord.Message, after:discord.Message):
await self.check_message(after)
async def check_message(self, message:discord.Message):
"""
Handles deleting any messages that aren't meow-friendly.
"""
if message.channel not in self.meow_chats:
return
if message.author.bot:
return
if message.author.id in self.bot.owner_ids:
return
content = self.EMOJI_REGEX.sub("", message.content.lower())
if any([i in content for i in self.VALID_KEYWORDS]):
return
try:
await message.delete()
return await message.channel.send(f"{message.author.mention}, your message needs to have a 'meow' in it to be valid :<", delete_after=3)
except discord.HTTPException:
pass
@utils.group()
@commands.has_permissions(manage_channels=True)
@commands.bot_has_permissions(send_messages=True, manage_messages=True)
async def meow(self, ctx:utils.Context):
"""
The parent group for the meow chat commands.
"""
if ctx.invoked_subcommand is None:
return await ctx.send_help(ctx.command)
@meow.command(name="enable", aliases=["start", "on"])
@commands.has_permissions(manage_channels=True)
@commands.bot_has_permissions(send_messages=True, manage_messages=True)
async def meow_enable(self, ctx:utils.Context):
"""
Turn on meow chat for this channel.
"""
self.meow_chats.add(ctx.channel)
await ctx.send(f"Meow chat has been enabled for {ctx.channel.mention} owo")
@meow.command(name="disable", aliases=["stop", "off"])
@commands.has_permissions(manage_channels=True)
@commands.bot_has_permissions(send_messages=True, manage_messages=True)
async def meow_disable(self, ctx:utils.Context):
"""
Turn off meow chat for this channel.
"""
self.meow_chats.remove(ctx.channel)
await ctx.send(f"Meow chat has been disabled for {ctx.channel.mention} :<")
def setup(bot:utils.Bot):
x = MeowChat(bot)
bot.add_cog(x)
|
[
"Kae@voxelfox.co.uk"
] |
Kae@voxelfox.co.uk
|
1d3cd26709b6bfcfdb5df341574e0135c7dc84bc
|
152640a5ee6811922de6a362f72c8a4284a3ae40
|
/CipherTools/Cred/__main__.py
|
5ec0c29ac63f1c03ba15c19356c0ea959ffa823f
|
[
"MIT"
] |
permissive
|
samsepiol-e/cipher
|
50874f7fc3687f428de9a5e67d20ba7f75e23f81
|
b2cf05c90cc5105c2aaf4b7291ae1de2f9cd974a
|
refs/heads/master
| 2023-02-28T10:48:51.706857
| 2021-01-28T06:20:53
| 2021-01-28T06:20:53
| 329,190,365
| 0
| 0
| null | 2021-01-19T09:10:10
| 2021-01-13T04:10:02
|
Python
|
UTF-8
|
Python
| false
| false
| 15,075
|
py
|
from CipherLib.encryption import *
from CipherLib.passgen import *
import configparser
import tkinter as tk
from tkinter import ttk
from ttkthemes import ThemedTk, ThemedStyle
import tkinter.filedialog as fd
import os
import pyperclip
import re
import io
class CredGui():
def __init__(self):
self.master = ThemedTk(theme = 'equilux', background = True)
#self.master.config(background = True)
self.master.title('3ncryp710n T00lz')
#self.style.set_theme('scidgrey')
self.statuslabel = tk.StringVar()
self.width = 800
self.height = 530
screen_width = self.master.winfo_screenwidth()
screen_height = self.master.winfo_screenheight()
x = (screen_width//2) - (self.width//2)
y = (screen_height//2) - (self.height//2)
self.master.geometry(f'{self.width}x{self.height}+{x}+{y}')
ttk.Label(self.master, text="Data File").grid(row=0)
ttk.Label(self.master, text="Decryption Key <C-k>").grid(row=1)
self.filepath = ttk.Entry(self.master)
self.keyentry = ttk.Entry(self.master)
self.filepath.grid(row=0, column=1, sticky=tk.W)
self.keyentry.grid(row=1, column=1, sticky=tk.W)
ttk.Button(self.master, text = 'Open <C-o>', command = self.opendatafile).grid(row=0, column =2, sticky=tk.W)
ttk.Button(self.master, text = 'New <C-n>', command = self.newfile).grid(row=0, column =2, sticky=tk.E)
self.status = ttk.Label(self.master, textvariable=self.statuslabel)
self.status.grid(row=2, column = 1, sticky=tk.W)
ttk.Label(self.master, text='Search <Ctr+f>').grid(row=3)
self.searchvar = tk.StringVar()
self.searchentry = ttk.Entry(self.master, width = 40, textvariable=self.searchvar)
self.searchentry.grid(row=3, column = 1, sticky=tk.W, columnspan = 2)
self.searchvar.trace('w', self.search)
self.myscroll = ttk.Scrollbar(self.master)
self.myscroll.grid(row=4, column = 3, sticky='nws')
self.mylist = tk.Listbox(self.master, yscrollcommand = self.myscroll.set, width = 50, bg = '#414141', fg = '#A3A3A3')
#self.mylist = ttk.Combobox(self.master, width = 50, height = 50)
self.mylist.grid(row=4, column = 1, columnspan=2, sticky=tk.W)
self.myscroll.config( command = self.mylist.yview )
ttk.Button(self.master, text='Encrypt <C-e>', command=self.enc).grid(row=10, column=1, sticky=tk.W, pady=4)
ttk.Button(self.master, text='Decrypt <C-d>', command=self.dec).grid(row=10, column=2, sticky=tk.W, pady=4)
self.master.grid_columnconfigure(5, weight = 2)
self.master.grid_columnconfigure(4, weight = 2)
ttk.Separator(self.master).grid(row = 14, sticky = 'ew', columnspan = 6)
ttk.Label(self.master, text="Credential").grid(row=15, pady = 4)
ttk.Label(self.master, text="Section <C-s>").grid(row=16, column = 1, sticky=tk.W)
ttk.Label(self.master, text="Username <C-u>").grid(row=16, column = 2, sticky=tk.W)
self.secentry = ttk.Entry(self.master)
self.userentry = ttk.Entry(self.master)
self.secentry.grid(row=17, column = 1, sticky=tk.W)
self.userentry.grid(row=17, column = 2, sticky=tk.W)
ttk.Button(self.master, text = 'Copy Password <C-c>', command = self.copy_passwd).grid(row=18, column =4, sticky=tk.E, pady = 4)
self.passlabel = ttk.Label(self.master, text="Password <C-p>")
self.passlabel.grid(row=18, pady = 4)#, column = 0, sticky=tk.W)
self.passvar = tk.StringVar()
self.passentry = ttk.Entry(self.master, width = 40, textvariable=self.passvar)
self.passentry.grid(row=18, column = 1, sticky='ew', columnspan = 2, pady = 4)
self.passvar.trace('w', self.check_passwd_strength)
self.pbval = tk.IntVar(value = 0)
self.passcheckphrase = tk.StringVar()
self.passcheckl = ttk.Label(self.master, textvariable = self.passcheckphrase)
self.passcheckl.grid(row = 19, column = 0)
self.pb = ttk.Progressbar(self.master, orient = tk.HORIZONTAL, variable = self.pbval, maximum = 8, mode = 'determinate', length=360)
self.pb.grid(row = 19, column = 1, sticky='ew', columnspan = 2)
ttk.Label(self.master, text="Password Lengt <C-h>").grid(row=20, column = 1, sticky = tk.W)
ttk.Label(self.master, text="Password Exclude <C-x>").grid(row=20, column = 2, sticky = tk.W)
ttk.Button(self.master, text='Encrypt and Quit <C-q>', command=self.encrypt_and_quit).grid(row=20, column=4, sticky=tk.E, pady=4)
self.passlene = ttk.Entry(self.master)
self.passexe = ttk.Entry(self.master)
self.passlene.grid(row = 21, column = 1, sticky=tk.W)
self.passexe.grid(row = 21, column = 2, sticky=tk.W)
self.passlene.insert(tk.END, '32')
self.passexe.insert(tk.END, r'%\()|{}[]:";' + "'" + '<>,./?')
ttk.Button(self.master, text = 'Generate Password <C-g>', command = self.new_password).grid(row=22, column =1, sticky=tk.W, pady = 4)
ttk.Button(self.master, text = 'Add Password <C-a>', command = self.add_cred).grid(row=22, column =2, sticky=tk.W, pady = 4)
self.master.bind('<Control-o>', lambda event: self.opendatafile())
self.master.bind('<Control-n>', lambda event: self.newfile())
self.master.bind('<Control-e>', lambda event: self.enc())
self.master.bind('<Control-d>', lambda event: self.dec())
self.keyentry.bind('<Return>', lambda event: self.dec())
self.master.bind('<Control-c>', lambda event: self.copy_passwd())
self.master.bind('<Control-v>', lambda event: self.paste_passwd())
self.master.bind('<Control-g>', lambda event: self.new_password())
self.master.bind('<Control-a>', lambda event: self.add_cred())
self.master.bind('<Control-q>', lambda event: self.encrypt_and_quit())
self.master.bind('<Control-l>', lambda event: self.clear())
self.master.bind('<Control-r>', lambda event: self.del_cred())
self.master.bind('<Control-f>', lambda event: self.searchentry.focus_set())
self.master.bind('<Control-p>', lambda event: self.passentry.focus_set())
self.master.bind('<Control-s>', lambda event: self.secentry.focus_set())
self.master.bind('<Control-u>', lambda event: self.userentry.focus_set())
self.master.bind('<Control-k>', lambda event: self.keyentry.focus_set())
self.master.bind('<Control-x>', lambda event: self.passexe.focus_set())
self.master.bind('<Control-h>', lambda event: self.passlene.focus_set())
#self.searchentry.bind('<Key>', self.search)
#self.searchentry.bind('<FocusIn>', self.search)
self.master.bind('<<ListboxSelect>>', self.lbselect)
self.mylist.bind('<Up>', self.lbselect)
self.mylist.bind('<Down>', self.lbselect)
self.master.mainloop()
def encrypt_and_quit(self):
self.enc()
self.master.quit()
def new_password(self):
passlen = self.passlene.get()
if passlen == '':
passlen = 16
else:
passlen = int(passlen)
passexclude = self.passexe.get()
password = generate_password(passlen, passexclude)
self.passentry.delete(0, tk.END)
self.passentry.insert(tk.END, password)
def check_passwd_strength(self, *args):
password = self.passentry.get()
if isinstance(password, bytes):
password = password.decode('utf-8')
if password == '':
self.pbval.set(0)
self.passcheckphrase.set('')
self.passlabel.config(background=None)
#self.passlabel.config(background = 'grey')
else:
passstr = get_pass_strength(password)
outputphrase = ['bad', 'very weak', 'weak', 'slightly weak', 'slightly strong', 'strong', 'very strong', 'excellent']
outputcol = ['red', 'orange', 'yellow', 'pale green', 'green', 'deep sky blue', 'blue', 'purple']
self.pbval.set(passstr+1)
self.passcheckphrase.set(outputphrase[passstr])
self.passlabel.config(background=outputcol[passstr])
def add_cred(self):
section = self.secentry.get()
username = self.userentry.get()
password = self.passentry.get()
fabspath = self.filepath.get()
config = configparser.ConfigParser()
config.read(fabspath)
if not section in config.sections():
config.add_section(section)
if username == '':
self.statuslabel.set('Username empty, please fillin.')
self.status.config(background = 'red')
else:
config[section][username] = password
with open(fabspath, 'w') as f:
config.write(f)
self.statuslabel.set('Credential added')
self.status.config(background = 'green')
self._readfile(fabspath)
self.userentry.delete(0, tk.END)
self.passentry.delete(0, tk.END)
self.search(None)
def lbselect(self, event):
def _bytes_to_str(b):
if isinstance(b, bytes):
b = b.decode('utf-8')
return b
idx_t = self.mylist.curselection()
if len(idx_t)>0:
idx = idx_t[0]
kv = self.mylist.get(idx_t)
kv = _bytes_to_str(kv)
self.secentry.delete(0, tk.END)
self.userentry.delete(0, tk.END)
self.passentry.delete(0, tk.END)
if kv != '':
if kv[0] == '[':
self.secentry.insert(tk.END, kv[1:-1])
else:
kv_l = kv.split('=', 1)
key = kv_l[0].replace(' ', '')
val = kv_l[1].replace(' ', '')
while True:
idx -= 1
s = self.mylist.get((idx,))
s = _bytes_to_str(s)
if s[0] == '[':
self.secentry.insert(tk.END, s[1:-1])
self.userentry.insert(tk.END, key)
self.passentry.insert(tk.END, val)
break
def del_cred(self):
section = self.secentry.get()
username = self.userentry.get()
fabspath = self.filepath.get()
config = configparser.ConfigParser()
config.read(fabspath)
if not section in config.sections():
self.statuslabel.set('No Credential Found')
self.status.config(background = 'red')
else:
if username == '':
config.remove_section(section)
else:
config.remove_option(section, username)
with open(fabspath, 'w') as f:
config.write(f)
self.statuslabel.set('Credential Removed')
self.status.config(background = 'green')
self._readfile(fabspath)
self.secentry.delete(0, tk.END)
self.userentry.delete(0, tk.END)
self.passentry.delete(0, tk.END)
self.search(None)
def clear(self):
focus = self.master.focus_get()
if isinstance(focus, ttk.Entry):
focus.delete(0, tk.END)
def search(self, *args):
config = configparser.ConfigParser()
fabspath = self.filepath.get()
try:
config.read(fabspath)
except:
self.statuslabel.set('File not in searchable format.')
self.status.config(background='red')
else:
self.mylist.delete(0, tk.END)
buf = io.StringIO()
searchkey = self.searchentry.get()
p = re.compile(searchkey)
searchres = configparser.ConfigParser()
for section in config.sections():
res = p.search(section)
if res is not None:
if not section in searchres.sections():
searchres.add_section(section)
for k, v in config.items(section):
searchres[section][k] = v
else:
for k, v in config.items(section):
res = p.search(k)
if res is not None:
if not section in searchres.sections():
searchres.add_section(section)
searchres[section][k]=v
searchres.write(buf)
c = buf.getvalue()
self.mylist.insert(tk.END, *c.splitlines())
buf.close()
def paste_passwd(self):
passwd = pyperclip.paste()
self.passentry.delete(0, tk.END)
self.passentry.insert(tk.END, passwd)
self.status.config(background='green')
self.statuslabel.set('Password Pasted From Clipboard')
def copy_passwd(self):
password = self.passentry.get()
pyperclip.copy(password)
self.status.config(background='green')
self.statuslabel.set('Password Copied to Clipboard')
def opendatafile(self):
self.filepath.delete(0, tk.END)
fabspath = fd.askopenfilename(title = 'Please choose Data File')
if fabspath != '':
self._readfile(fabspath)
self.filepath.insert(tk.END, fabspath)
self.keyentry.focus_set()
def newfile(self):
self.filepath.delete(0, tk.END)
f = fd.asksaveasfile('wb')
self.filepath.insert(tk.END, f.name)
fabspath = self.filepath.get()
self._readfile(fabspath)
self.keyentry.focus_set()
def _readfile(self, fabspath):
self.mylist.delete(0, tk.END)
f = open(fabspath, 'rb')
c = f.read()
f.close()
self.mylist.insert(tk.END, *c.splitlines())
#filecontent.set(c)
def enc(self):
fabspath = self.filepath.get()
if fabspath != '':
key = self.keyentry.get()
self.statuslabel.set('Encrypting File')
self.status.config(background = 'red')
encryptfile(key, fabspath, fabspath)
self.statuslabel.set('Encrypted!')
self.status.config(background = 'green')
self._readfile(fabspath)
#self.filepath.delete(0, tk.END)
def dec(self):
key = self.keyentry.get()
fabspath = self.filepath.get()
self.statuslabel.set('Decrypting File...')
self.status.config(background = 'red')
try:
decryptfile(key, fabspath, fabspath)
self.statuslabel.set('Decrypted!')
self.status.config(background = 'green')
self._readfile(fabspath)
except:
self.statuslabel.set('Wrong Key!')
self.status.config(background = 'Red')
#self.statuslabel.set('Decrypted!')
#self.status.config(bg = 'green')
#self._readfile(fabspath)
#self.filepath.delete(0, tk.END)
def main():
gui = CredGui()
if __name__ == '__main__':
main()
|
[
"0.alicegawa@gmail.com"
] |
0.alicegawa@gmail.com
|
af54114818f226f1e71a187fff93ad14de3ecaa2
|
a1335fdfd7f967ca9df8e2c72780ff617addb9f8
|
/python_thread.py
|
c453df499172ac7b3019525422c7c0a7f0064a84
|
[] |
no_license
|
umtkrl1993/BasicPythonCodes
|
91618447997d0001ec7a664b6a81fb2e36e0f3b9
|
f9eca7d748af599de8e6da89918def298f9961bf
|
refs/heads/master
| 2020-07-17T11:53:21.666213
| 2018-11-06T14:35:25
| 2018-11-06T14:35:25
| 94,318,067
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,571
|
py
|
#!/usr/bin/python
import threading
import socket
import json
name_ip_map = {"berke":"192.168.1.108", "umit":"192.168.1.105" }
class ClientHandlerThread( threading.Thread ):
def __init__( self, conn, addr ):
threading.Thread.__init__( self )
self.conn = conn
self.addr = addr
self.dummy_logger = None
def _process_message( self , message ):
parsed_message = json.loads( message )
message_to_client = parsed_message['message']
name = parsed_message['name']
if not name in name_ip_map:
self.dummy_logger.write( "name can not be found\n" )
return -1
target_ip = name_ip_map[name]
complete_message = "Message from " + name + ":" + message_to_client
self.dummy_logger.write( "complete message is created %s\n" %complete_message )
remote_client = socket.socket( socket.AF_INET, socket.SOCK_STREAM )
remote_client.connect( ( target_ip, 2000 ) )
self.dummy_logger.write( "Connection has established with remote\n")
remote_client.send( complete_message )
self.dummy_logger.write( "Mesasge has been send\n")
remote_client.close()
def run( self ):
self.dummy_logger = open( "/home/umit/client.txt", "a" )
while True:
try:
message = self.conn.recv( 1024 )
if message == None:
break
self.dummy_logger.write( "Client addr is %s\n" %str( self.addr ) )
self.dummy_logger.write( "Client message is %s\n\n" %str( message ) )
self.dummy_logger.flush()
self._process_message( message )
except socket.error,e:
print "Error receiving message from client %s" %str(e)
self.dummy_logger.close()
|
[
"noreply@github.com"
] |
umtkrl1993.noreply@github.com
|
eda5336026e3192e31dafdb64dfb2cadae53cd8c
|
b8b70fec347cb90581999dcbb1c4e0a6aeb6f4b6
|
/Lotto/game_of_lottery_updated.py
|
7bca60c0edc71deeb2f4f43bbfe15dd417ddb5b3
|
[] |
no_license
|
quirkyabhi/WIP
|
23e3e5901a1b6c35ee80327ae291373e460de717
|
a80dc4620b6ac20f17017bf9c8c9068f8bcb26af
|
refs/heads/master
| 2021-05-19T04:48:19.742125
| 2020-03-31T13:31:56
| 2020-03-31T13:31:56
| 251,535,097
| 1
| 0
| null | 2020-03-31T07:50:06
| 2020-03-31T07:50:06
| null |
UTF-8
|
Python
| false
| false
| 2,537
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import random
# import import_ipynb
import lottery_tickets_updated as lottery_tickets
def main():
# all the tickets created in lottery_ticket
tickets=lottery_tickets.main()
# names of each tickets
Ticket_names = ['Ticket1', 'Ticket2', 'Ticket3', 'Ticket4', 'Ticket5']
# creating a dictionary of tickets with names of tickets
myDict1=dict(zip(Ticket_names, tickets))
# Printing the dictionary with name and values
for name, ticket in myDict1.items():
ticket.sort()
print(f"{name} = {ticket}")
ticket_checking(tickets)
def shuffle_numbers():
bag_of_numbers = list(range(1, 91))
random.shuffle(bag_of_numbers)
# print(bag_of_numbers)
return bag_of_numbers
def ticket_checking(tickets):
count= 0
pass1 = True
pass2 = True
pass3 = True
pass4 = True
pass5 = True
popper = shuffle_numbers()
print(popper)
for nums in popper:
print("the num called is", nums)
count += 1
if nums in tickets[0]:
print(f"Ticket1 has {nums}")
if nums in tickets[1]:
print(f"Ticket2 has {nums}")
if nums in tickets[2]:
print(f"Ticket3 has {nums}")
if nums in tickets[3]:
print(f"Ticket4 has {nums}")
if nums in tickets[4]:
print(f"Ticket5 has {nums}")
for elements in tickets:
while nums in elements:
elements.pop(elements.index(nums))
if not tickets[0]:
pass1 = False
break
if not tickets[1]:
pass2 = False
break
if not tickets[2]:
pass3 = False
break
if not tickets[3]:
pass4 = False
break
if not tickets[4]:
pass5 = False
break
if not pass1:
print(f"\nAll the numbers in were found in {count} counts")
print("Ticket1 has won")
if not pass2:
print(f"\nAll the numbers in Ticket2 were found in {count} counts")
print("Ticket2 has won")
if not pass3:
print(f"\nAll the numbers in were found in {count} counts")
print("Ticket3 has won")
if not pass4:
print(f"\nAll the numbers in Ticket4 were found in {count} counts")
print("Ticket4 has won")
if not pass5:
print(f"\nAll the numbers in Ticket5 were found in {count} counts")
print("Ticket5 has won")
main()
|
[
"noreply@github.com"
] |
quirkyabhi.noreply@github.com
|
1694e6abd73228e1e0c3de0f0ef44927d5728da8
|
328cc491d622267b2fddb613637b8a5f47fcb2f6
|
/niku/module/trading/models/trading.py
|
a0083355776b771c429b007158ce8b3215b6a164
|
[] |
no_license
|
webclinic017/histdata
|
7ca3c3d891d80c80f23662266cfebe8d7f0a942b
|
40e4d4a54865563d53b539a47fc5ce940c3cc9e1
|
refs/heads/master
| 2021-12-04T22:31:13.014440
| 2015-05-19T09:48:44
| 2015-05-19T09:48:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,731
|
py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import random
class TradingManager(object):
POSITION_LIMIT = 10
def __init__(self):
from apps.parse.views import read_csv
self.csv_data = read_csv()
self.current_rate = None # 現在の最新レート値
self._index = 0
self.positions = []
def get_rate(self):
"""
最新のレートを取得する。
:rtype : boole
"""
try:
self.current_rate = self.generator()
self._index += 1
except IndexError:
return False
return True
def sell(self):
"""
既存ポジションを処理する
:rtype : Position
"""
for position in self.positions:
if not position.is_open:
continue
profit = position.get_current_profit(self.current_rate)
if profit >= 15000:
position.close(self.current_rate)
return position
# 損失出てたら必ず処理
# if profit < 0:
# position.close(self.current_rate)
# return position
# 利益出てたら1/5の確率で処理
# if random.randint(1, 5) == 1 and profit > 0:
# position.close(self.current_rate)
# return position
def buy(self):
"""
新規にポジションを立てたらTrue
:rtype : Position
"""
# 制限以上立てていたら立てない(リスク制限)
if len(self.open_positions) >= self.POSITION_LIMIT:
return None
# 1.135なら購入する。
if self.current_rate.pt_open <= 1.135:
position = Position.open(self.current_rate)
self.positions.append(position)
return position
# # 20%の確率でランダムで購入する。
# if random.randint(1, 100) == 1:
# position = Position.open(self.current_rate)
# self.positions.append(position)
# return position
return None
def generator(self):
return self.csv_data[self._index]
@property
def current_profit(self):
"""
ポジション利益を表示
:rtype : int
"""
r = 0
for position in self.positions:
if position.is_open:
r += position.get_current_profit(self.current_rate)
return r
@property
def profit(self):
"""
確定利益
:rtype : int
"""
r = 0
for position in self.positions:
if not position.is_open:
r += position.get_profit()
return r
@property
def profit_summary(self):
"""
確定利益 + ポジション利益
:rtype : int
"""
return self.current_profit + self.profit
@property
def open_positions(self):
"""
未決済のポジションの数
:rtype : list of Position
"""
return [x for x in self.positions if x.is_open]
@property
def close_positions(self):
"""
決済済みのポジションの数
:rtype : list of Position
"""
return [x for x in self.positions if not x.is_open]
class Position(object):
"""
購入した資産
"""
open_rate = None
close_rate = None
@classmethod
def open(cls, rate):
"""
ポジションを作る
:param rate: CandleModel
"""
position = cls()
position.open_rate = rate
return position
@property
def is_open(self):
"""
そのポジションが未決済ならTrue
:rtype : bool
"""
return not bool(self.close_rate)
def get_current_profit(self, rate):
"""
現在の利益を計算する(円)
:param rate: CandleModel
:rtype : int
"""
profit = rate.pt_open - self.open_rate.pt_open
return _tick_to_yen(profit)
def get_profit(self):
"""
確定済みの利益を返却(円)
:rtype : int
"""
if self.is_open:
return 0
profit = self.close_rate.pt_open - self.open_rate.pt_open
return _tick_to_yen(profit)
def close(self, rate):
"""
ポジションをクローズする
:param rate: CandleModel
"""
self.close_rate = rate
def _tick_to_yen(tick):
"""
1tickを円に変換する
:param tick: float
:rtype : int
"""
return int(tick * 10000 * 120)
|
[
"gumikun@000196-M.local"
] |
gumikun@000196-M.local
|
0129e5f8bb4ef9510bef37bfe7c32a58b45a1089
|
6ec8e4271968cae715babe05029931d2c11df754
|
/run.py
|
3381b4ca954744143adb1172231fafc792c96a42
|
[
"MIT"
] |
permissive
|
lllhhhqqq/SPIRAL-tensorflow
|
040efe8af0fd3bc4d5f5ce2ed5474e6d732763f5
|
05ddfdc20c73a61cde46594bd6b7b7a2e255a44b
|
refs/heads/master
| 2020-03-08T08:57:45.938448
| 2018-04-03T15:32:19
| 2018-04-03T15:32:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,371
|
py
|
# -*- coding: future_fstrings -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
import tensorflow as tf
from six.moves import shlex_quote
import utils as ut
def new_cmd(session, name, cmd, load_path, shell):
if isinstance(cmd, (list, tuple)):
cmd = " ".join(shlex_quote(str(v)) for v in cmd)
return name, "tmux send-keys -t {}:{} {} Enter".format(session, name, shlex_quote(cmd))
def create_commands(session, args, shell='bash'):
ut.train.prepare_dirs(args)
actual_args = ut.io.get_cmd(as_list=True)
actual_cmd = ' '.join(actual_args)
# for launching the TF workers and for launching tensorboard
base_cmd = [
'CUDA_VISIBLE_DEVICES=',
sys.executable, 'main.py',
'--load_path', args.load_path,
'--start_port', args.start_port,
'--num_gpu', ut.misc.count_gpu(),
] + actual_args
cmds_map = [new_cmd(session, "ps", base_cmd + ["--job_name", "ps"], args.load_path, shell)]
if args.loss == 'l2':
gpu_task_num = 1
elif args.loss == 'gan':
gpu_task_num = 2
for i in range(args.num_workers):
if i < gpu_task_num: # gpu workers
cmd = base_cmd[1:]
else:
cmd = base_cmd[:]
cmd += ["--job_name", "worker", "--task", str(i)]
cmds_map += [new_cmd(session, "w-%d" % i, cmd, args.load_path, shell)]
tmp_tb_dir = "/".join(sys.executable.split('/')[:-1])
tmp_tb_path = os.path.join(tmp_tb_dir, "tensorboard")
if os.path.exists(tmp_tb_path):
tb = tmp_tb_dir + "/tensorboard"
else:
tb = "tensorboard"
tb_args = [tb, "--logdir", args.log_dir, "--port", "12345"]
cmds_map += [new_cmd(session, "tb", tb_args, args.load_path, shell)]
cmds_map += [new_cmd(session, "htop", ["htop"], args.load_path, shell)]
windows = [v[0] for v in cmds_map]
notes = []
cmds = []
notes += ["Use `tmux attach -t {}` to watch process output".format(session)]
notes += ["Use `tmux kill-session -t {}` to kill the job".format(session)]
notes += ["Point your browser to http://localhost:12345 to see Tensorboard"]
cmds += [
# kill any process using tensorboard's port
f"kill $( lsof -i:{args.tb_port} -t ) > /dev/null 2>&1",
# kill any processes using ps / worker ports
f"kill $( lsof -i:{args.start_port}-{args.num_workers + args.start_port} -t ) > /dev/null 2>&1",
f"tmux kill-session -t {session}",
f"tmux new-session -s {session} -n {windows[0]} -d {shell}",
]
for w in windows[1:]:
cmds += ["tmux new-window -t {} -n {} {}".format(session, w, shell)]
cmds += ["sleep 1"]
for window, cmd in cmds_map:
cmds += [cmd]
return cmds, notes
def run(args):
cmds, notes = create_commands("spiral", args)
if args.dry_run:
print("Dry-run mode due to -n flag, otherwise the following commands would be executed:")
else:
print("Executing the following commands:")
print("\n".join(cmds))
print("")
if not args.dry_run:
os.environ["TMUX"] = ""
os.system("\n".join(cmds))
print('\n'.join(notes))
if __name__ == "__main__":
from config import get_args
args = get_args()
run(args)
|
[
"carpedm20@gmail.com"
] |
carpedm20@gmail.com
|
03e17f9b968c0638c48a848cb9483eabc9e07906
|
61311217cf5841d02ca8056d8146b51ab9aab8bf
|
/AStar.py
|
6dd4e07e48a5e0a6e96f0a72a289b6353b091246
|
[] |
no_license
|
danksalot/AStarPython
|
691002b5ae67b04e4b0c3ee438b611819dabf708
|
edddba6d48f043a111938b3a0cbded51d0f52bb1
|
refs/heads/master
| 2021-01-11T16:39:54.077051
| 2017-01-30T22:23:49
| 2017-01-30T22:23:49
| 80,133,042
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,931
|
py
|
import random
import sys
import os
import math
class Node:
def __init__(self, x, y):
self.x = x
self.y = y
x = 0
y = 0
fScore = 0
gScore = sys.maxint
passable = True
fromNode = None
def findNeighbors(self):
neighbors = []
if self.y > 0 and grid[self.x][self.y - 1].passable == True:
neighbors.append(grid[self.x][self.y - 1])
if self.y < size - 1 and grid[self.x][self.y + 1].passable == True:
neighbors.append(grid[self.x][self.y + 1])
if self.x > 0 and grid[self.x - 1][self.y].passable == True:
neighbors.append(grid[self.x - 1][self.y])
if self.x < size - 1 and grid[self.x + 1][self.y].passable == True:
neighbors.append(grid[self.x + 1][self.y])
# Diagonals
if self.x > 0 and self.y > 0 and grid[self.x - 1][self.y - 1].passable == True \
and (grid[self.x][self.y - 1].passable == True or grid[self.x - 1][self.y].passable == True):
neighbors.append(grid[self.x - 1][self.y - 1])
if self.x < size - 1 and self.y > 0 and grid[self.x + 1][self.y - 1].passable == True \
and (grid[self.x][self.y - 1].passable == True or grid[self.x + 1][self.y].passable == True):
neighbors.append(grid[self.x + 1][self.y - 1])
if self.x > 0 and self.y < size - 1 and grid[self.x - 1][self.y + 1].passable == True \
and (grid[self.x][self.y + 1].passable == True or grid[self.x - 1][self.y].passable == True):
neighbors.append(grid[self.x - 1][self.y + 1])
if self.x < size - 1 and self.y < size - 1 and grid[self.x + 1][self.y + 1].passable == True \
and (grid[self.x][self.y + 1].passable == True or grid[self.x + 1][self.y].passable == True):
neighbors.append(grid[self.x + 1][self.y + 1])
return neighbors
def explore(self, fromNode):
if self.gScore > fromNode.gScore + getDistance(self, fromNode):
grid[self.x][self.y].gScore = fromNode.gScore + getDistance(self, fromNode)
grid[self.x][self.y].fScore = grid[self.x][self.y].gScore + heuristic(self, end)
grid[self.x][self.y].fromNode = grid[fromNode.x][fromNode.y]
def getDistance(toNode, fromNode):
return math.sqrt((abs(toNode.x - fromNode.x) ** 2) + (abs(toNode.y - fromNode.y) ** 2))
def heuristic(current, end):
hDiff = abs(end.x - current.x)
vDiff = abs(end.y - current.y)
# Chebyshev's distance
# return max(hDiff, vDiff)
# Euclidean distance
# return math.sqrt((hDiff ** 2) + (vDiff ** 2))
# Manhattan distance
# return hDiff + vDiff
# Diagonal then straight
return min(hDiff, vDiff) * math.sqrt(2) + abs(hDiff - vDiff)
def getPath(current):
current = grid[current.x][current.y]
path = [grid[current.x][current.y]]
while current.fromNode != None:
current = grid[current.fromNode.x][current.fromNode.y]
path.append(current)
return path
def printPath(current):
path = getPath(current)
print '\n'.join(''.join('0' if node in path else unichr(0x2588) if not node.passable else '+' if node in openSet else '-' if node in closedSet else ' ' for node in row) for row in grid)
return len(path)
# Declare Variables
size = 30
wallRate = 0.3
grid = [[Node(x, y) for y in range(size)] for x in range(size)]
start = grid[0][0]
start.gScore = 0
end = grid[size-1][size-1]
openSet = [start]
closedSet = []
# Add walls to grid
for x in range(size):
for y in range(size):
if random.random() < wallRate:
grid[x][y].passable = False
grid[start.x][start.y].passable = True
grid[end.x][end.y].passable = True
# Loop through
while len(openSet) > 0:
openSet.sort(key = lambda x: x.fScore, reverse=True)
current = openSet.pop()
closedSet.append(current)
printPath(current)
if current == end:
length = printPath(current)
print "Evaluated", len(closedSet), "nodes and found a path that is", length, "nodes long!"
exit()
for neighbor in current.findNeighbors():
if neighbor not in closedSet:
neighbor.explore(current)
if neighbor not in openSet:
openSet.append(neighbor)
printPath(start)
print "Evaluated", len(closedSet), "nodes and could not find a path"
exit()
|
[
"noreply@github.com"
] |
danksalot.noreply@github.com
|
007d16e66c60f2d00ace8968f69849d8bdf1e9a8
|
7ed7a2f34b5726e0f99095bebf1804ad84ba9c10
|
/backend/qaemdecor/decorbusiness/views.py
|
4ef62812b1d13db91dbed495bef3aa7a42373ff5
|
[] |
no_license
|
mahmoodDehghan/qaemdecor
|
3d9aea1c3f448a599e6e3782254e04df867d62df
|
567c1e5d97cc3ec0904d1a8fb25450e9d6870c23
|
refs/heads/main
| 2023-07-27T16:40:51.937314
| 2021-09-10T09:00:42
| 2021-09-10T09:00:42
| 404,981,859
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,910
|
py
|
from django.shortcuts import render
from rest_framework import viewsets
from .models import DecorCategory, MainCategory, DecorSample
from .models import Customer
from .models import OrderCategory, Order
from .serializers import DecorCategorySerializer
from .serializers import MainCategorySerializer
from .serializers import DecorSampleSerializer
from .serializers import CustomerSerializer
from .serializers import OrderCategorySerializer
from .serializers import OrderSerializer
from rest_framework.permissions import AllowAny
from rest_framework import generics
# Create your views here.
class MainCategoryViewSet(viewsets.ModelViewSet):
permission_classes = [AllowAny]
queryset = MainCategory.objects.all()
serializer_class = MainCategorySerializer
class DecorCategoryViewSet(viewsets.ModelViewSet):
permission_classes = [AllowAny]
queryset = DecorCategory.objects.all()
serializer_class = DecorCategorySerializer
class DecorSampleViewSet(viewsets.ModelViewSet):
permission_classes = [AllowAny]
queryset = DecorSample.objects.all()
serializer_class = DecorSampleSerializer
class CustomerViewSet(viewsets.ModelViewSet):
permission_classes = [AllowAny]
queryset = Customer.objects.all()
serializer_class = CustomerSerializer
class OrderCategoryViewSet(viewsets.ModelViewSet):
permission_classes = [AllowAny]
queryset = OrderCategory.objects.all()
serializer_class = OrderCategorySerializer
class OrderCreateView(generics.CreateAPIView):
permission_classes = [AllowAny]
queryset = Order.objects.all()
serializer_class = OrderSerializer
class OrderDetailEditView(generics.RetrieveUpdateDestroyAPIView):
permission_classes = [AllowAny]
queryset = Order.objects.all()
serializer_class = OrderSerializer
class OrdersListGet(generics.ListAPIView):
permission_classes = [AllowAny]
queryset = Order.objects.all()
serializer_class = OrderSerializer
|
[
"m.mahmooddm@gmail.com"
] |
m.mahmooddm@gmail.com
|
3d2048c9ebf6fa1a734b247850fc97400c7c5e54
|
20a6e0291ed4d235879eaafc3daa6b109dc867e0
|
/214A_SystemOfEquations.py
|
6687740bd4512108274fc886bf31e53fd69ed53a
|
[] |
no_license
|
Nisarg6892/CodeForces-Codes
|
fab41a9e548e9531e2b275117189ec4e5b8db837
|
a08efc8532575508f626359a0a60d3168f4b088e
|
refs/heads/master
| 2021-01-10T15:06:41.501564
| 2016-01-07T07:51:49
| 2016-01-07T07:51:49
| 49,189,538
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 309
|
py
|
import math
string_int = map(int,raw_input().split())
n = string_int[0]
m = string_int[1]
c = max(n,m)
SquareRoot = int(math.sqrt(c))
satisfy = 0
for a in range(0, SquareRoot+1):
for b in range(0, SquareRoot+1):
if a**2 + b == n and a + b**2 == m :
# print a,b
satisfy += 1
print satisfy
|
[
"shah.nisarg6892@gmail.com"
] |
shah.nisarg6892@gmail.com
|
efa421e1a76afe0964e948df99b54226c8ffe9f6
|
3d63a9af2f1ea9104fb6ee6dcd9dea47534e8d70
|
/app/__init__.py
|
5ef198607d7297faf146605489474e14dc1e3343
|
[] |
no_license
|
Eddychase/PitPitch
|
eb1491a50aefff40dda53780d461a953c2729561
|
8989d953a70f33cab9392a0fbbbaff90671f56d2
|
refs/heads/master
| 2023-01-31T15:12:51.779157
| 2020-12-07T18:11:21
| 2020-12-07T18:11:21
| 319,123,743
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 976
|
py
|
from flask import Flask
from config import config_options
from flask_mail import Mail
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
from flask_uploads import IMAGES, UploadSet,configure_uploads
db = SQLAlchemy()
mail = Mail()
bootstap = Bootstrap()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
photos = UploadSet('photos',IMAGES)
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config_options[config_name])
from .auth import auth as authentication_blueprint
from .main import main as main_blueprint
app.register_blueprint(authentication_blueprint)
app.register_blueprint(main_blueprint)
login_manager.init_app(app)
db.init_app(app)
bootstap.init_app(app)
configure_uploads(app,photos)
mail.init_app(app)
return app
|
[
"eddykanyz.ek@gmail.com"
] |
eddykanyz.ek@gmail.com
|
edc72ccfeef18d398c5ad5c13d2f70e6945f8af5
|
a287edfd5e2b7789f75e6f0c32a9725978d9b12a
|
/examples/03_smoothingonsurface_rhino/03b_smoothingonsurface_rhino.py
|
9567907691183a01658ba2c567eb3e6a0ffee04e
|
[] |
no_license
|
whiskyching/WS_digitalfutures
|
7a40825b4ea06b6a4aea8c216cb08467befb78ce
|
9bf1af1955cf9847c7c9ef2226a58fba2601ce9d
|
refs/heads/master
| 2022-02-26T16:59:04.969565
| 2019-10-03T10:05:04
| 2019-10-03T10:05:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,093
|
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compas.datastructures import Mesh
from compas.geometry import smooth_area
import compas_rhino
from compas_rhino.helpers import mesh_from_guid
from compas_rhino.conduits import LinesConduit
from compas_rhino.geometry import RhinoSurface
from compas_rhino.artists import MeshArtist
# make a mesh datastructure from a Rhino mesh object
# and select a target surface
guid = compas_rhino.select_mesh()
mesh = mesh_from_guid(Mesh, guid)
guid = compas_rhino.select_surface()
surf = RhinoSurface(guid)
# extract the input for the smoothing algorithm from the mesh
# and identify the boundary as fixed
vertices = mesh.get_vertices_attributes('xyz')
faces = [mesh.face_vertices(fkey) for fkey in mesh.faces()]
adjacency = [mesh.vertex_faces(key, ordered=True) for key in mesh.vertices()]
fixed = set(mesh.vertices_on_boundary())
# make a conduit for visualization
# and a callback for updating the conduit
# and for pulling the free vertices back to the surface
# at every iteration
edges = list(mesh.edges())
lines = [[vertices[u], vertices[v]] for u, v in edges]
conduit = LinesConduit(lines, refreshrate=5)
def callback(k, args):
for index in range(len(vertices)):
if index in fixed:
continue
x, y, z = surf.closest_point(vertices[index])
vertices[index][0] = x
vertices[index][1] = y
vertices[index][2] = z
conduit.lines = [[vertices[u], vertices[v]] for u, v in edges]
conduit.redraw(k)
# with the conduit enabled
# run the smoothing algorithm
# update the mesh when smoothing is done
# and draw the result
with conduit.enabled():
smooth_area(
vertices,
faces,
adjacency,
fixed=fixed,
kmax=100,
callback=callback)
for key, attr in mesh.vertices(True):
attr['x'] = vertices[key][0]
attr['y'] = vertices[key][1]
attr['z'] = vertices[key][2]
artist = MeshArtist(mesh, layer='mesh-out')
artist.clear_layer()
artist.draw_mesh()
|
[
"brg@arch.ethz.ch"
] |
brg@arch.ethz.ch
|
6b4a2259a9521f7bac941dbd7c5d804a80f437c4
|
2bf7fdade3ac15135aaa6516fd409a3ddb95a573
|
/seqparse/files.py
|
b4bd2f79418a40e05e6b8c7245fe6fe3edd17a62
|
[
"MIT"
] |
permissive
|
hoafaloaf/seqparse
|
d90a8aed98bd2f4cb32e5ffd4f757a8f03363bc4
|
78551b65e0d04b2abe4fce4e7ce78d308c3654e8
|
refs/heads/master
| 2022-10-20T08:25:58.523104
| 2022-09-14T04:20:55
| 2022-09-14T04:20:55
| 84,528,131
| 1
| 1
|
MIT
| 2022-09-14T04:21:55
| 2017-03-10T06:41:24
|
Python
|
UTF-8
|
Python
| false
| false
| 3,956
|
py
|
"""Singleton file-related data structures utilized by the Seqparse module."""
from functools import total_ordering
import os
__all__ = ("File",)
###############################################################################
# Class: File
@total_ordering
class File:
"""
Simple representation of files on disk.
Args:
file_name (str): Full path to the input file.
stat (stat_result, optional): Disk stats you'd like to cache for the
specified file.
"""
def __init__(self, file_name, stat=None):
"""Initialise the instance."""
self._info = dict(full=None, name=None, path=None)
self._stat = None
self._cache_stat(stat)
self._set_name(file_name)
def __eq__(self, other):
"""Define equality between instances."""
if type(other) is type(self):
return self.full_name == other.full_name
return False
def __lt__(self, other):
"""Define equality between instances."""
if type(other) is type(self):
return self.full_name < other.full_name
return True
def __repr__(self): # pragma: no cover
"""Pretty representation of the instance."""
blurb = ("{cls}({full!r})")
return blurb.format(cls=type(self).__name__, **self._info)
def __str__(self):
"""String representation of a File instance."""
return str(self.full_name)
@property
def full_name(self):
"""str: Full name of the sequence, including containing directory."""
return self._info["full"]
@property
def mtime(self):
"""
int: Modification time of the file.
Returns None if the files have not been stat'd on disk.
"""
if not self._stat:
return None
return self._stat.st_mtime
@property
def name(self):
"""str: Base name of the file sequence (no containing directory)."""
return self._info["name"]
@property
def path(self):
"""str: Directory in which the contained files are located."""
return self._info["path"]
@property
def size(self):
"""
int: Size of the file in bytes.
Returns None if the files have not been stat'd on disk.
"""
if not self._stat:
return None
return self._stat.st_size
def _cache_stat(self, input_stat):
"""
Cache file system stat data.
Args:
input_stat (stat_result): Value that you'd like to cache.
Returns:
stat_result that was successfully cached.
"""
from . import get_stat_result # pylint: disable=C0415
self._stat = None
if input_stat:
self._stat = get_stat_result(input_stat)
return self._stat
def _set_name(self, full_name):
"""
Set all name-related fields on the instance.
Args:
full_name (str): Full path to the contained file.
Returns:
dict of path-related strings (full name, base name, path).
"""
path_name, file_name = os.path.split(full_name)
self._info.update(full=full_name, name=file_name, path=path_name)
return self._info
def stat(self, force=False, lazy=False):
"""
File system status.
Args:
force (bool, optional): Whether to force disk stat query,
regardless of caching status.
lazy (bool, optional): Whether to query disk stats should no cached
value exist.
Returns:
None if a frame has been specified but disk stats have not been
cached.
stat_result if a frame has been specified and disk stats have
been previously cached.
"""
if force or (lazy and self._stat is None):
self._cache_stat(os.stat(self.full_name))
return self._stat
|
[
"hoafaloaf@gmail.com"
] |
hoafaloaf@gmail.com
|
7c602f029e3a124f40432e96b024c8300417ae5b
|
f4b5721c6b3f5623e306d0aa9a95ec53461c1f89
|
/backend/src/gloader/xml/dom/html/HTMLTableRowElement.py
|
e18280ba18ad8af52f593f29fbe8bf83d5cc6ac0
|
[
"Apache-1.1",
"MIT"
] |
permissive
|
citelab/gini5
|
b53e306eb5dabf98e9a7ded3802cf2c646f32914
|
d095076113c1e84c33f52ef46a3df1f8bc8ffa43
|
refs/heads/uml-rename
| 2022-12-10T15:58:49.578271
| 2021-12-09T23:58:01
| 2021-12-09T23:58:01
| 134,980,773
| 12
| 11
|
MIT
| 2022-12-08T05:20:58
| 2018-05-26T17:16:50
|
Python
|
UTF-8
|
Python
| false
| false
| 3,711
|
py
|
########################################################################
#
# File Name: HTMLTableRowElement.py
#
#
"""
WWW: http://4suite.com/4DOM e-mail: support@4suite.com
Copyright (c) 2000 Fourthought Inc, USA. All Rights Reserved.
See http://4suite.com/COPYRIGHT for license and copyright information
"""
import string
from xml.dom import implementation
from xml.dom import IndexSizeErr
from xml.dom.html.HTMLElement import HTMLElement
class HTMLTableRowElement(HTMLElement):
def __init__(self, ownerDocument, nodeName='TR'):
HTMLElement.__init__(self, ownerDocument, nodeName)
### Attribute Methods ###
def _get_align(self):
return string.capitalize(self.getAttribute('ALIGN'))
def _set_align(self,align):
self.setAttribute('ALIGN', align)
def _get_bgColor(self):
return self.getAttribute('BGCOLOR')
def _set_bgColor(self, color):
self.setAttribute('BGCOLOR', color)
def _get_cells(self):
cells = []
for child in self.childNodes:
if child.tagName in ['TD','TH']:
cells.append(child)
return implementation._4dom_createHTMLCollection(cells)
def _get_ch(self):
return self.getAttribute('CHAR')
def _set_ch(self, ch):
self.setAttribute('CHAR', ch)
def _get_chOff(self):
return self.getAttribute('CHAROFF')
def _set_chOff(self, offset):
self.setAttribute('CHAROFF', offset)
def _get_rowIndex(self):
#Get our index in the table
section = self.parentNode
if section == None:
return -1
table = section.parentNode
if table == None:
return -1
rows = table._get_rows()
return rows.index(self)
def _get_sectionRowIndex(self):
section = self.parentNode
if section == None:
return -1
rows = section._get_rows()
return rows.index(self)
def _get_vAlign(self):
return string.capitalize(self.getAttribute('VALIGN'))
def _set_vAlign(self, valign):
self.setAttribute('VALIGN', valign)
### Methods ###
def insertCell(self, index):
cells = self._get_cells()
if index < 0 or index > len(cells):
raise IndexSizeErr()
cell = self.ownerDocument.createElement('TD')
length = cells.length
if index == len(cells):
ref = None
elif index < len(cells):
ref = cells[index]
return self.insertBefore(cell, ref)
def deleteCell(self,index):
cells = self._get_cells()
if index < 0 or index >= len(cells):
raise IndexSizeErr()
self.removeChild(cells[index])
### Attribute Access Mappings ###
_readComputedAttrs = HTMLElement._readComputedAttrs.copy()
_readComputedAttrs.update ({
'rowIndex' : _get_rowIndex,
'sectionRowIndex' : _get_sectionRowIndex,
'cells' : _get_cells,
'align' : _get_align,
'bgColor' : _get_bgColor,
'ch' : _get_ch,
'chOff' : _get_chOff,
'vAlign' : _get_vAlign,
})
_writeComputedAttrs = HTMLElement._writeComputedAttrs.copy()
_writeComputedAttrs.update ({
'align' : _set_align,
'bgColor' : _set_bgColor,
'ch' : _set_ch,
'chOff' : _set_chOff,
'vAlign' : _set_vAlign,
})
# Create the read-only list of attributes
_readOnlyAttrs = filter(lambda k,m=_writeComputedAttrs: not m.has_key(k),
HTMLElement._readOnlyAttrs + _readComputedAttrs.keys())
|
[
"maheswar@MacBook-Pro.local"
] |
maheswar@MacBook-Pro.local
|
e165539b779cc69789fdf2f72ed06839e6bf732e
|
0dfbb481a56dbf4ae044859c308c87bb4a871ed6
|
/backend/__init__.py
|
d02809d47158ebf6a7f2764755cd9185890d627b
|
[
"MIT"
] |
permissive
|
Darthone/atto
|
e818fe2d5634f63673caf67140f70436ba567597
|
4d9093b45e82ed5e561878de6064ec7a1d57da05
|
refs/heads/master
| 2021-01-10T18:01:53.955447
| 2015-12-07T16:00:56
| 2015-12-07T16:00:56
| 47,152,667
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 298
|
py
|
from flask_peewee.rest import RestAPI
from app import app # our project's Flask app
# instantiate our api wrapper
api = RestAPI(app)
# register our models so they are exposed via /api/<model>/
api.register(User)
api.register(Relationship)
api.register(Message)
# configure the urls
api.setup()
|
[
"dm661@live.com"
] |
dm661@live.com
|
ff1c4d029dd283c6abac27def1e344d19da67be6
|
c3a814de299cb7fe7e0f194ea0b434285e4d8c89
|
/untitled0.py
|
dd7e0ab644983a0ea1c57095ac69b8adac6a81ff
|
[] |
no_license
|
Gailpig/computationalphysics_N2013301020123
|
2559e975f29b42b37a2597d4201ffd1ea9302015
|
db87cb53180a3572cedcf0645cd70b069fd6d4cd
|
refs/heads/master
| 2021-01-21T04:32:38.350525
| 2016-06-21T13:19:52
| 2016-06-21T13:19:52
| 52,862,134
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 643
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 13 20:22:00 2016
@author: dell1
"""
print' ## ### ### ######'
print' ## ## ## ## ## ##'
print' ## ## ## ## ## ##'
print' ## ## ## ## ## ##'
print' ## ## ## ## ## ##'
print' ## ## ## ## ## ## ##'
print' ## ## ## ## ## ## ##'
print' ####### ## ### ## ####'
|
[
"noreply@github.com"
] |
Gailpig.noreply@github.com
|
c6b17689089d32247142bc099851f137f9c1284f
|
e65435847a5f421cb127253214c0b4a1ca4af489
|
/chapter_2/5_1.py
|
6dc43386dd99784cdaf68cec5982f02c7ba225dd
|
[] |
no_license
|
jobiaj/Think_python
|
1ef68a81d92b31c962a4ff543a520a29683d10c0
|
cbeba0c6576a6b5092accc8717fb749cb2c1bc27
|
refs/heads/master
| 2020-06-03T11:35:07.948680
| 2014-09-29T09:05:00
| 2014-09-29T09:05:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 461
|
py
|
def first_line():
print "+","-","-","-","-","+","-","-","-","-","+"
def second_line():
print "/", " " * 7, "/" , " " * 7 ,"/"
first_line() , first_line()
second_line() , second_line()
second_line() , second_line()
second_line() , second_line()
second_line() , second_line()
first_line() , first_line()
second_line() , second_line()
second_line() , second_line()
second_line() , second_line()
second_line() , second_line()
first_line() , first_line()
|
[
"jobyalungal@gmail.com"
] |
jobyalungal@gmail.com
|
82cd114c38d8767bd5493b1054b0112eb2f33b82
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02772/s117684727.py
|
af179f388db806b32c0635d0c096c78b0d0171ea
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
N = int(input())
A = list(map(int, input().split()))
even_numbers = [a for a in A if a % 2 == 0]
is_approved = all([even_num % 3 == 0 or even_num % 5 == 0 for even_num in even_numbers])
if is_approved:
print('APPROVED')
else:
print('DENIED')
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
b195749bd3eeada2ec74e5e6b6e576aeb3b40e75
|
4cdfab83d22271e3db4522a179cdd1926f26c5fa
|
/__openerp__.py
|
b2e72004bc5b2b6a9cc582abbe37ba134648d8d0
|
[] |
no_license
|
cgsoftware/View-Ciciriello
|
6aa628e547a1c7b93780a66c282b7259debbed36
|
8180d70398f5b3e8d010fdc89c2cb9f4ab609092
|
refs/heads/master
| 2020-05-18T00:42:12.687876
| 2011-11-06T10:12:40
| 2011-11-06T10:12:40
| 2,372,268
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,517
|
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Viste modificate per cliente Ciciriello',
'version': '1.0',
'category': 'Generic Modules/Base',
'description': """
Modifche Specifiche ad alcune view
""",
'author': 'C & G Software',
"depends" : ['base', 'account'],
"update_xml" : [
'view_address.xml', 'view_product.xml','view_format.xml'
],
'website': 'http://www.cgsoftware.it',
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"4everamd@gmail.com"
] |
4everamd@gmail.com
|
749e567b11e7775cb000dbb9272ed2c75b74dd54
|
306fa1a76fc5caf86ca4faa42fff08de4e3875d4
|
/queens.py
|
42164e915ed23898591225a82e32c814bcc7a5ee
|
[] |
no_license
|
sivaneshl/pyhton-tutorials
|
2148d0e3eac9098ea37098e231caadab1f3b1f1b
|
5bb25f544c3ae1c09721f7dd8da7ad9f4381bb34
|
refs/heads/master
| 2021-09-04T04:02:56.972691
| 2018-01-15T16:34:43
| 2018-01-15T16:34:43
| 79,484,719
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,265
|
py
|
#!/usr/bin/env python3
"""
N queens problem.
The (well-known) problem is due to Niklaus Wirth.
This solution is inspired by Dijkstra (Structured Programming). It is
a classic recursive backtracking approach.
"""
N = 8 # Default; command line overrides
class Queens:
def __init__(self, n=N):
self.n = n
self.reset()
def reset(self):
n = self.n
self.y = [None] * n # Where is the queen in column x
self.row = [0] * n # Is row[y] safe?
self.up = [0] * (2 * n - 1) # Is upward diagonal[x-y] safe?
self.down = [0] * (2 * n - 1) # Is downward diagonal[x+y] safe?
self.nfound = 0 # Instrumentation
def solve(self, x=0): # Recursive solver
for y in range(self.n):
if self.safe(x, y):
self.place(x, y)
if x + 1 == self.n:
self.display()
else:
self.solve(x + 1)
self.remove(x, y)
def safe(self, x, y):
return not self.row[y] and not self.up[x - y] and not self.down[x + y]
def place(self, x, y):
self.y[x] = y
self.row[y] = 1
self.up[x - y] = 1
self.down[x + y] = 1
def remove(self, x, y):
self.y[x] = None
self.row[y] = 0
self.up[x - y] = 0
self.down[x + y] = 0
silent = 0 # If true, count solutions only
def display(self):
self.nfound = self.nfound + 1
if self.silent:
return
print('+-' + '--' * self.n + '+')
for y in range(self.n - 1, -1, -1):
print('|', end=' ')
for x in range(self.n):
if self.y[x] == y:
print("Q", end=' ')
else:
print(".", end=' ')
print('|')
print('+-' + '--' * self.n + '+')
def main():
import sys
silent = 0
n = N
if sys.argv[1:2] == ['-n']:
silent = 1
del sys.argv[1]
if sys.argv[1:]:
n = int(sys.argv[1])
q = Queens(n)
q.silent = silent
q.solve()
print("Found", q.nfound, "solutions.")
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
sivaneshl.noreply@github.com
|
8eeaf7e25b89c82e2173abd9454af9198b6e9be1
|
79461a46a9ddc3952ddfcecd54383eb4dd8ba957
|
/code/Old/syntaxic.py
|
c209b38dcc36e7944cd5d73aacf9ac136c34d1bb
|
[] |
no_license
|
HuylenbroeckFlorent/BA3_Compilation_Projet
|
068473752525061355e51069caded79ae2476f69
|
3c526316e7f2d3ea5b2e95927cb4c86ba921d774
|
refs/heads/master
| 2020-04-25T21:29:41.242429
| 2019-05-06T21:59:57
| 2019-05-06T21:59:57
| 173,081,386
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,097
|
py
|
import ply.lex as lex
import ply.yacc as yacc
from lexical import tokens
operations = {
'+' : lambda x, y : x+y,
'-' : lambda x, y : x-y,
'*' : lambda x, y : x*y,
'/' : lambda x, y : x/y,
'>' : lambda x, y : x>y,
'<' : lambda x, y : x<y,
'=' : lambda x, y : x==y,
'!=' : lambda x, y : x!=y,
'<=' : lambda x, y : x<=y,
'>=' : lambda x, y : x>=y,
'and' : lambda x, y : x and y,
'or' : lambda x, y : x or y,
}
precedence = (
('left','COMPARE'),
('left','POINT'),
('left','ADD_OP'),
('left', 'MUL_OP'),
('right', 'COMPARE'),
)
def finalPrint(t):
print(t) #FONCTION TEMPORAIRE
return t
def p_programme_txt(p):
'''programme : txt'''
p[0]=p[1]
def p_programme_txtprog(p):
'''programme : txt programme'''
p[0]=p[1]+p[2]
def p_programme_dumboblock(p):
'''programme : dumbo_bloc'''
p[0]=p[1]
def p_programme_dumboblockprog(p):
'''programme : dumbo_bloc programme'''
p[0]=p[1]+p[2]
def p_txt(p):
'''txt : TXT'''
p[0]=p[1]
def p_dumbo_block(p):
'''dumbo_bloc : BLOCKstart expression_list BLOCKend'''
#A FINIR
return 1
def p_expression_list_exprl(p):
'''expression_list : expression SEMICOLON expression_list'''
p[0]=[p[1],p[3]]
def p_expression_list(p):
'''expression_list : expression SEMICOLON'''
p[0]=[p[1]]
def p_expression_print(p):
'''expression : PRINT string_expression'''
p[0]=finalPrint(p[2])
def p_expression_forstr(p):
'''expression : FOR VARIABLE IN string_list DO expression_list ENDFOR'''
#FOR IMPLEMENTATION
p[0]=p[2]
def p_expression_forvar(p):
'''expression : FOR VARIABLE IN VARIABLE DO expression_list ENDFOR'''
#FOR IMPLEMENTATION
def p_expression_varstring(p):
'''expression : VARIABLE AFFECT string_expression'''
p[0]=1 #Assignement p[0]=("p[1]=p[3]")
def p_expression_varlist(p):
'''expression : VARIABLE AFFECT string_list'''
def p_string_expression_string(p):
'''string_expression : STR'''
p[0]=str(p[1])
def p_string_expression_var(p):
'''string_expression : VARIABLE'''
p[0]=p[1] #VARIABLE
def p_string_expression_strstr(p):
'''string_expression : string_expression POINT string_expression'''
p[0]=p[1]+p[3]
def p_string_list(p):
'''LPAREN string_list_interior RPAREN'''
p[0]=p[2]
def p_string_list_interior(p):
'''string_list_interior : string COMA string_list_interior'''
def p_string_list_interior_end(p):
'''string_list_interior : STR'''
p[0]=[str(p[1])]
def p_error(p):
print("Yacc Error")
yacc.error()
yacc.yacc()
if __name__ == "__main__":
import sys
lexer = lex.lex()
lexer.input(sys.stdin.read())
for token in lexer:
print("line %d : %s (%s) " % (token.lineno, token.type, token.value))
"""
yacc.yacc(outputdir='generated')
if __name__ == '__main__':
import sys
input = file(sys.argv[1]).read()
result = yacc.parse(input)
print(result)
"""
|
[
"alfatta_@hotmail.com"
] |
alfatta_@hotmail.com
|
401cb8855aa3803f300b8f36d911255bb8d75d12
|
2d89afd5ca29fc2735a00b0440ea7d5408c8e398
|
/Crash Course/chap08/making_pizzas.py
|
af1cccef50058c8fd85c9b35c58b99c668176743
|
[] |
no_license
|
TrystanDames/Python
|
6b2c8721606e046d9ff0708569a97d7b78a0f88e
|
68b3f5f160b46fa4e876d58808ff78ac7f2d84df
|
refs/heads/main
| 2023-06-03T14:25:51.638345
| 2021-06-23T08:54:18
| 2021-06-23T08:54:18
| 357,112,394
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 165
|
py
|
#import pizza
from pizza import make_pizza as mp
#this will allow us to just use make_pizza'
mp(16, 'pepperoni')
mp(12, 'mushrooms', 'green peppers', 'extra cheese')
|
[
"trystandames08@gmail.com"
] |
trystandames08@gmail.com
|
8aa533705c3bfa46061d55ff545f89b8b3795379
|
632f496713b33e60605ad5e76bf68720df5609fd
|
/test_env/iot_toolkit/device_informer.py
|
5a5ad5d40b7298149644e4c64468504591a569d8
|
[] |
no_license
|
johnny-cy/MQTT_KAFKA_DOCKER_MYSQL_DJANGO_ELK
|
01e00f706a9de2f172c521e16423b67dc65e07f5
|
41545d19ca7bc74d833d225469546c8b67275973
|
refs/heads/master
| 2022-08-20T19:35:28.489747
| 2020-05-28T14:29:29
| 2020-05-28T14:29:29
| 267,560,496
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 822
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Get devices information.
This application try to get devices information using RESTful API::
https://iot.epa.gov.tw/iot/v1/device
"""
import time
from main import main_func
from coordinator import Coordinator
from etl_functions import ETLMixin
class DeviceInformer(Coordinator, ETLMixin):
def pre_read(self):
http_handler = self.config.input
http_handler.post_read_func = self.sleep_one_day
def post_read(self, data):
yield from self.etl_value(data)
def sleep_one_day(self):
time.sleep(24*60*60)
def main():
main_func(DeviceInformer,
process_name="device_informer",
description="Get device information from iot.epa.gov.tw",
)
if __name__ == "__main__":
main()
|
[
"johnny-cy.lin@outlook.com"
] |
johnny-cy.lin@outlook.com
|
1e377736dc3ac326fedd2f872744593f1f7c31c5
|
95410dd4b8a12e2551448c013fb2f3efa6b6f6c1
|
/Challenge.py
|
8bf400afa46fc34e20f7c550f3e9a85289edcfaa
|
[] |
no_license
|
bitbyt3r/magfest-challenges
|
f7f60759b7e71af31b52cc999bb5a9f430639f81
|
d6455aea867104cdc910d325f7ec5285c29f54e3
|
refs/heads/master
| 2021-01-13T02:19:15.589039
| 2015-04-16T12:43:24
| 2015-04-16T12:43:24
| 28,695,467
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 792
|
py
|
class Challenge:
def __init__(self, config):
self.__dict__.update(config)
self.image_path = "./images/"+self.system+"/"+self.imageFile
self.rom_path = "./roms/"+self.system+"/"+self.romFile
self.save_path = "./saves/"+self.system+"/"+self.saveFile
self.description = self.description.split("NEWLINE")
self.console = None
def generate_evrouter(self):
string = ""
string += 'Window ""\n'
string += self.console.evrouter_load + "\n"
string += self.console.evrouter_kill + "\n"
string += self.console.evrouter_exit + "\n"
return string
def launch_cmd(self):
cmd = self.console.command + " "
cmd += " ".join(self.console.arguments)
cmd += " "+self.rom_path
return cmd
def __repr__(self):
return self.readableName
|
[
"mark25@umbc.edu"
] |
mark25@umbc.edu
|
88c547a8ef64b27b0186eb32424d09cd931b7714
|
20c0dbde3c2fdb2345171ecd869f56b4ddcb4a11
|
/zlibrary/main.py
|
b62edb38dd9f5fd715d788a1322c024c368526a1
|
[] |
no_license
|
danielggc/python
|
a981d9be8d8591d796a9508fd813b3a2e94d7c26
|
f8ef5218e2a577a1535e8c57287dfffe6e98294c
|
refs/heads/master
| 2023-01-13T11:09:53.483352
| 2020-11-18T21:17:28
| 2020-11-18T21:17:28
| 314,055,752
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 24,701
|
py
|
from urllib.request import urlopen
from bs4 import BeautifulSoup
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.utils import keys_to_typing
from selenium.webdriver.common.keys import Keys
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
import pyautogui
import shutil
import re
import random
from unicodedata import normalize
import csv
import requests
import wget
import shutil
import os
import errno
import unittest
from selenium.webdriver.support.ui import WebDriverWait
from tbselenium.tbdriver import TorBrowserDriver
import tbselenium.common as cm
from tbselenium.utils import launch_tbb_tor_with_stem
from selenium.webdriver.common.keys import Keys
from time import sleep
from tbselenium.utils import launch_tbb_tor_with_stem
from selenium.webdriver.common.by import By
from argparse import ArgumentParser
# trampas http://dl39.zlibcdn.com/dtoken/ee9a8e4f62ed4dbdb5c39eb772c3c300 ?
#https://singlelogin.org/?logoutAll
#https://b-ok.cc/book/
class DescargarPdf:
def __init__(self):
self.contadorCredenciales=0
self.tbb_dir = "/usr/local/share/tor-browser_en-US"
self.usuario=[]
self.contraseñaTxT=[]
self.conversor='?convertedTo=pdf'
def iniciarTor(self):
self.zLibraty = TorBrowserDriver(self.tbb_dir, tbb_logfile_path='test.log')
def iniciarSecion(self):
self.element=self.zLibraty.find_element_by_name("email")
self.element.send_keys(self.correo)
sleep(2)
self.element2=self.zLibraty.find_elements_by_class_name("form-control")[1]
self.element2.send_keys(self.contraseña)
self.element2.send_keys(Keys.RETURN)
def paginaDescargas(self):
print("estoy en la funcion paginaDescagas")
self.zLibraty.load_url(self.url)
sleep(4)
self.html=self.zLibraty.page_source
def paginaPrinsipal(self,añoInicial,añoFinal):
self.urlAños='http://zlibraryexau2g3p.onion/s/?yearFrom='+str(añoInicial)+'&yearTo='+str(añoFinal)
self.url=self.urlAños
def cambiarPagina(self,x):
print("estoy en cambiar pagina prinsipal")
self.url+='&page='+str(x)
print(self.url)
def Crearcsv(self):
desde=datosDescarga(1)
asta=datosDescarga(2)
self.carpetaUrl='/home/dd/Documentos/zlibrary/libros'+str(desde)+'-'+str(asta)+'/url'
try :
os.mkdir(self.carpetaUrl)
except OSError as e:
if e.errno != errno.EEXIST:
raise
self.escrivirUrlWed=csv.writer(open('/home/dd/Documentos/zlibrary/libros'+str(desde)+'-'+str(asta)+'/url/url2.csv','w'))
self.imprimirUrlPdf=csv.writer(open('/home/dd/Documentos/zlibrary/libros'+str(desde)+'-'+str(asta)+'/url/urlDowload2.csv','w'))
def credenciales(self,numeroUsuario):
print("llegue")
if self.contadorCredenciales==0 or self.contadorCredenciales==20:
self.zLibraty.load_url("https://singlelogin.org/")
self.zLibraty.find_element_by_name("redirectToHost").click()
sleep(3)
pyautogui.press("down")
sleep(2)
pyautogui.press("down")
sleep(1)
pyautogui.press("enter")
sleep(5)
self.correo=self.usuario[numeroUsuario]
self.contraseña=self.contraseñaTxT[numeroUsuario]
def UsuariosYcontraseñas(self):
self.dir='/home/dd/Documentos/zlibrary/credenciales/contraseñasYcorreos.txt'
self.data=open(self.dir,'r+')
for self.i in range(0,200):
if self.i%2==0 :
self.usuario.append(self.data.readline())
if self.i%2!=0:
self.contraseñaTxT.append(self.data.readline())
def urlPdf(self,):
self.contadorCredenciales=1
self.boleanoPdf=0
self.respaldoContador=0
self.contadorUsuarios=usuarioUsadosLeer()
self.contadorLibros=datosDescarga(4)
self.contadorLibros2=self.contadorLibros%10
self.Crearcsv()
self.soup=BeautifulSoup(self.html,'html.parser')
try:
for self.urlwed in self.soup.find_all(itemprop = "name") :
self.contador=0
self.urlwed=self.urlwed.find('a',href=re.compile(''))
self.urlDowload=self.urlwed.get('href')
self.urlpdfGeleneralH=re.sub('/book/','https://b-ok.cc/book/',self.urlDowload)
self.urlDowload=re.sub('/book/','http://zlibraryexau2g3p.onion/book/',self.urlDowload)
self.escrivirUrlWed.writerow([self.urlDowload])
print(self.urlDowload)
self.voleano=validarFormato(self.urlpdfGeleneralH)
guardarNumeroDescargas(self.contadorLibros)
print(self.respaldoContador)
if self.contadorLibros==self.respaldoContador:
for self.urlRedirec in range(0,1):
self.zLibraty.load_url(self.urlDowload)
sleep(5)
self.htmlPdf=self.zLibraty.page_source
self.soupRedirec=BeautifulSoup(self.htmlPdf,'html.parser')
self.urlDowloadPDF=self.soupRedirec.find(class_="btn btn-primary dlButton addDownloadedBook")
self.urlDowloadPDF=self.urlDowloadPDF.get('href')
self.urlDowloadPDF=re.sub('/dl/','http://zlibraryexau2g3p.onion/dl/',self.urlDowloadPDF)
self.imprimirUrlPdf.writerow([self.urlDowloadPDF])
print(self.urlDowloadPDF)
print("vamos a por el if")
sleep(15)
if self.voleano==True:
self.zLibraty.set_page_load_timeout(12)
try:
self.zLibraty.load_url(self.urlDowloadPDF)
except:
sleep(5)
self.zLibraty.set_page_load_timeout(7000)
print("funciona PDF ")
self.voleano=False
sleep(5)
self.contadorLibros+=1
self.contadorLibros2+=1
else:
self.zLibraty.set_page_load_timeout(12)
try:
self.zLibraty.load_url(self.urlDowloadPDF)
except:
sleep(8)
pyautogui.press("down")
sleep(2)
pyautogui.press("enter")
self.zLibraty.set_page_load_timeout(7000)
sleep(5)
self.contadorLibros+=1
self.contadorLibros2+=1
self.zLibraty.load_url("about:downloads")
self.datosEsperaDescarga()
self.peticiones()
self.zLibraty.back()
informaiconPdf(self.urlpdfGeleneralH)
guardarNumeroDescargas(self.contadorLibros)
self.respaldoContador+=1
if self.contadorLibros==self.respaldoContador:
if self.contadorLibros2%10==0:
print((self.contadorLibros2-1)%10)
self.contador+=1
if self.contadorLibros==20:
self.contadorCredenciales=20
print("saliendo de secion¡¡¡¡¡¡")
pyautogui.moveTo(1707,245)
pyautogui.hotkey("ctrl","shift","u")
sleep(2)
pyautogui.press("enter")
sleep(7)
pyautogui.press("enter")
sleep(15)
else:
print("saliendo de secion")
self.zLibraty.get("http://zlibraryexau2g3p.onion/logout.php")
self.contadorUsuarios+=1
print(self.contadorUsuarios)
try:
self.zLibraty.switch_to_window(self.zLibraty.window_handles[0])
except:
print("error al cambian de ventana")
usuarioUsadosReescrivir(self.contadorUsuarios)
print("por aqui¿¿¿¿¿¿")
self.credenciales(self.contadorUsuarios)
self.contadorCredenciales=1
print("no por aqui¿¿¿¿¿¿")
sleep(20)
self.iniciarSecion()
sleep(15)
self.paginaDescargas()
sleep(7)
self.contadorLibros2=0
sleep(15)
print("numero de li bros por usuario ",self.contadorLibros2)
if self.contador==5:
self.contador=0
except OSError as e :
print(e.strerror)
print("error en la urlPdf:::::")
guardarNumeroDescargas(self.contadorLibros)
usuarioUsadosReescrivir(self.contadorUsuarios)
print(self.contadorLibros)
archivos=int(contarNueroArchivos())
print(archivos)
self.zLibraty.load_url("about:downloads")
self.datosEsperaDescarga()
self.peticiones()
self.zLibraty.back()
informaiconPdf(self.urlpdfGeleneralH)
def DescargarContenido(self,_html):
self.contenido=_html
def serrarTor(self):
self.zLibraty.close()
def datosEsperaDescarga(self):
sleep(4)
self.htmlValidador=self.zLibraty.page_source
def validarDescarga(self):
self.htmlFalce=self.zLibraty.page_source
self.soupFalce=BeautifulSoup(self.htmlFalce,"html.parser")
self.validarfalce=self.soupFalce.find_all("description",class_="downloadDetails downloadDetailsNormal")
self.respuestafalce=re.search("value=.+",str(self.validarfalce))
self.buscarFalse=self.respuestafalce.group()
if re.search("Canceled",self.buscarFalse):
print("se daño al descarga =(")
sleep(5)
pyautogui.click(1393,139)
sleep(5)
else :
if re.search("Failed",self.buscarFalse):
print("se daño al descarga pero vamos a solucionarlo =( ")
sleep(5)
pyautogui.click(1393,139)
sleep(5)
else:
print("la descarga va bien =)")
def peticiones(self):
self.validarDescarga()
self.carga=0
self.daño=0
self.conteo=0
while self.carga<100:
self.soup=BeautifulSoup(self.htmlValidador,"html.parser")
try:
self.archivoDescarga=self.soup.find_all("progress",class_="downloadProgress")
self.respaldo=re.split("value",str(self.archivoDescarga))
self.tiempo=re.search("[0-9]+",self.respaldo[1])
print(self.tiempo.group())
self.carga=int(self.tiempo.group())
self.datosEsperaDescarga()
sleep(3)
self.validarDescarga()
if self.conteo==3:
pyautogui.press("enter")
self.conteo=0
except:
print("o no ,se daño la descargar y no la e podido volver a iniciar")
if self.daño==7:
os.system('rm -r /home/dd/zlibros/libros1920-1921/libro/*.*')
raise
self.daño+=1
sleep(5)
def contarNueroArchivos():
fuenteLibro='/home/dd/zlibros/libros1920-1921/libro'
NumeroDescargas=0
if os.listdir(fuenteLibro):
for i in os.listdir(fuenteLibro):
NumeroDescargas+=1
return NumeroDescargas
def informaiconPdf(_urlInformacion):
urlInformacion=_urlInformacion
archivos=int(contarNueroArchivos())
print(archivos)
if archivos==0:
dirUrlDañadas='/home/dd/Documentos/zlibrary/registro/UrlDañadas.txt'
urlDañadas=open(dirUrlDañadas,'r+')
for a in urlDañadas.readlines():
urlDañadas.readline()
urlDañadas.write(urlInformacion+'\n')
if archivos!=0:
htmlWed=requests.get(urlInformacion)
soup=BeautifulSoup(htmlWed.content,'html.parser')
autor=str()
boleanoAutor=False
if soup.find_all(itemprop="author")[0]:
autor=soup.find_all(itemprop="author")[0]
autor=autor.get('href')
if re.sub("/g/",'',autor):
autor=re.sub("/g/",'',autor)
boleanoAutor=True
if re.sub("/",'',autor):
autor=re.sub("/",'',autor)
titulo=soup.find_all(class_="moderatorPanelToggler")
titulo=re.sub('</h1>]',' ',str(titulo))
respaldo=re.split('>',titulo)
titulo=respaldo[1]
if re.split('[A-Z]|[a-z]|[0-9]',titulo):
print(titulo)
try:
d=re.search('[A-Z]|[a-z]|[0-9]|\w',titulo,1)
print(d.group())
respaldo=re.split('[A-Z]|[a-z]|[0-9]|\w',titulo,1)
titulo=d.group()+respaldo[1]
if re.split(' ',titulo):
respaldo=re.split(' ',str(titulo))
titulo=respaldo[0]
respaldo=re.sub(' ','_',titulo)
titulo=respaldo
if re.search("/","_",titulo):
titulo=re.sub("/","_",titulo)
if re.search("'\'","_",titulo):
titulo=re.sub("/","_",titulo)
print("ey aqui hay un bacaeslach si es que aparesio un problema")
except:
if boleanoAutor==True:
titulo=autor
else:
dirUrlDañadas='/home/dd/Documentos/zlibrary/registro/UrlDañadas.txt'
urlDañadas=open(dirUrlDañadas,'r+')
for a in urlDañadas.readlines():
urlDañadas.readline()
urlDañadas.write(urlInformacion+'\n')
os.system('rm -r /home/dd/zlibros/libros1920-1921/libro/*.*')
print(titulo)
try:
descripcion=soup.find_all(id="bookDescriptionBox")
descripcion=re.sub('<p>',' ',str(descripcion))
descripcion=re.sub('</p>',' ',str(descripcion))
descripcion=re.sub('<span>',' ',str(descripcion))
descripcion=re.sub('</span>',' ',str(descripcion))
descripcion=re.sub('</b>',' ',str(descripcion))
descripcion=re.sub('<b>',' ',str(descripcion))
descripcion=re.sub('</div>]',' ',str(descripcion))
respaldo=re.split('>',descripcion)
except:
print("huvo un error en la direccion de la descripcion")
try:
descripcion=respaldo[1]
except:
print("no se encontro la descripcion")
print(descripcion)
try:
informacionPDF =soup.find_all(style=re.compile("overflow: hidden; zoom: 1; margin-top: 30px;"))
datos=re.sub("<div class=",' ',str(informacionPDF))
datos=re.sub('</div',' ',datos)
datos=re.sub('div',' ',datos)
datos=re.sub('property_label',' ',datos)
datos=re.sub('property_value',' ',datos)
datos=re.sub('<',' ',datos)
datos=re.sub('"',' ',datos)
datos=re.sub('>',' ',datos)
except:
print("huvo un errror en la direccion datos")
try:
i =re.split('\n',datos,1)
datos=i[1]
datos=re.sub(']',' ',datos)
except:
print("no se encontraron los datos")
desde=datosDescarga(1)
asta=datosDescarga(2)
destinoLibro='/home/dd/Documentos/zlibrary/libros'+str(desde)+'-'+str(asta)+'/'+str(titulo)
print(destinoLibro)
fuenteLibro='/home/dd/zlibros/libros1920-1921/libro'
if re.sub('\n','',destinoLibro):
destinoLibro=re.sub('\n','',destinoLibro)
try :
os.mkdir(destinoLibro)
except OSError as e:
if e.errno != errno.EEXIST:
raise
print("error al crear la carpeta del libro")
try :
shutil.move(fuenteLibro,destinoLibro)
except OSError as e :
contadorNombreCarpeta=1
crear=False
while crear==False:
try:
os.mkdir(destinoLibro+str(contadorNombreCarpeta))
crear=True
except OSError as e :
contadorNombreCarpeta+=1
shutil.move(fuenteLibro,destinoLibro+str(contadorNombreCarpeta))
txt=open(destinoLibro+str(contadorNombreCarpeta)+'/informacion.txt','w')
try :
if titulo!=autor:
if boleanoAutor==True:
txt.write(titulo+'\n'+autor+'\n'+ descripcion +'\n'+ datos+"\n"+urlInformacion)
else:
txt.write(titulo+'\n'+ descripcion +'\n'+ datos+"\n"+urlInformacion)
else:
txt.write(titulo+'\n'+ descripcion +'\n'+ datos+"\n"+urlInformacion)
except OSError as e :
if e.errno != errno.EEXIST:
raise
txt.close()
txt=open(destinoLibro+str(contadorNombreCarpeta)+'/indiseYmas.txt','w')
try :
txt.write(str(soup))
except OSError as e :
if e.errno != errno.EEXIST:
raise
txt.close()
print("error al copiar la carpeda libro ya esistia")
try :
os.mkdir(fuenteLibro)
except OSError as e:
if e.errno != errno.EEXIST:
raise
print("error al crear la carpeta de descarga de libros")
txt=open(destinoLibro+'/informacion.txt','w')
try :
txt.write(titulo+'\n'+ descripcion +'\n'+ datos+"\n"+urlInformacion)
except OSError as e :
if e.errno != errno.EEXIST:
raise
txt2=open(destinoLibro+'/indiseYmas.txt','w')
try :
txt2.write(str(soup))
except OSError as e :
if e.errno != errno.EEXIST:
raise
print("error al crear el indise")
else:
dirUrlDañadas='/home/dd/Documentos/zlibrary/registro/UrlDañadas.txt'
urlDañadas=open(dirUrlDañadas,'r+')
for a in urlDañadas.readlines():
urlDañadas.readline()
urlDañadas.write(urlInformacion+'\n')
def validarFormato(_url):
formatoPdf=True
url=_url
htmlFormato=requests.get(url)
soup=BeautifulSoup(htmlFormato.content,'html.parser')
informacionPDF =soup.find_all(style=re.compile("overflow: hidden; zoom: 1;"))
datos=re.sub("<div class=",' ',str(informacionPDF))
datos=re.sub('</div',' ',datos)
datos=re.sub('div',' ',datos)
if re.search('PDF',datos):
formatoPdf=True
else :
formatoPdf=False
return formatoPdf
def tiempoDescarga():
fuenteLibro='/home/dd/zlibros/libros1920-1921/libro'
descargado=False
numeroArchivo=contarNueroArchivos()
print("esperando la descarga ...")
while numeroArchivo >1:
numeroArchivo=contarNueroArchivos()
sleep(4)
pyautogui.press("enter")
sleep(2)
archivos= os.listdir(fuenteLibro)
print(archivos)
while descargado==False:
if re.search('Sin|sin',str(archivos)):
if re.search('confirmar|Confirmar',str(archivos)):
pyautogui.press("enter")
sleep(4)
else:
descargado=True
else:
if re.search('Unconfirmed|unconfirmed',str(archivos)):
pyautogui.press("enter")
sleep(4)
else:
descargado=True
archivos=os.listdir(fuenteLibro)
print("termine",archivos)
def guardarNumeroDescargas(_numeroDescargas):
dirRegistroDescargas='/home/dd/Documentos/zlibrary/registro/registroDescargas.txt'
datoAñosF=open(dirRegistroDescargas,'r')
contador=0
lista=[]
nuemroDescargas=_numeroDescargas
for datos in re.split('\n',str(datoAñosF.read())):
contador+=1
if contador <4:
lista.append(datos)
print(datos)
lista.append(nuemroDescargas)
datoAñosF.close()
datoAñosF=open(dirRegistroDescargas,'w')
datoAñosF.close()
datoAñosF=open(dirRegistroDescargas,'r+')
for a in range(0,4):
datoAñosF.write(str(lista[a])+'\n')
def guardarHistorial(_añoX,_añoY,_paginas):
dirRegistroDescargas='/home/dd/Documentos/zlibrary/registro/registroDescargas.txt'
datoAñosF=open(dirRegistroDescargas,'r')
contador=0
lista=[]
paginash=_paginas
añoXh=_añoX
añoYh=_añoY
for datos in re.split('\n',str(datoAñosF.read())):
contador+=1
if contador <5:
respaldoh=datos
lista.append(añoXh)
print(paginash)
lista.append(añoYh)
lista.append(paginash)
lista.append(respaldoh)
datoAñosF.close()
datoAñosF=open(dirRegistroDescargas,'w')
datoAñosF.close()
datoAñosF=open(dirRegistroDescargas,'r+')
for a in range(0,4):
datoAñosF.write(str(lista[a])+'\n')
def datosDescarga(_indicador):
indicador=_indicador
dirRegistroDescargas='/home/dd/Documentos/zlibrary/registro/registroDescargas.txt'
datoAños=open(dirRegistroDescargas,'r')
contador=0
for i in datoAños.readlines():
contador+=1
if indicador==contador:
dato=i
print("datos=)=)=)=)",dato)
datoAños.close()
return int(dato)
def usuarioUsadosLeer():
dirRegistroDescargas='/home/dd/Documentos/zlibrary/registro/registroUsuario.txt'
datosAñosU=open(dirRegistroDescargas,'r')
datosUsuarios=int(datosAñosU.readline())
return datosUsuarios
def usuarioUsadosReescrivir(_usuariUsados):
usuariUsados =_usuariUsados
dirRegistroDescargas='/home/dd/Documentos/zlibrary/registro/registroUsuario.txt'
datosAñosU=open(dirRegistroDescargas,'w+')
datosAñosU.write(str(usuariUsados))
añoX=datosDescarga(1)
añoY=datosDescarga(2)
conteoPagina=datosDescarga(3)
wedPrinsipal=DescargarPdf()
wedPrinsipal.iniciarTor()
sleep(15)
wedPrinsipal.UsuariosYcontraseñas()
sleep(15)
wedPrinsipal.credenciales(usuarioUsadosLeer())
sleep(7)
wedPrinsipal.iniciarSecion()
sleep(10)
for i in range(0,3):
wedPrinsipal.paginaPrinsipal(añoX,añoY)
carpetalibrosAños='/home/dd/Documentos/zlibrary/libros'+str(añoX)+'-'+str(añoY)
try :
os.mkdir(carpetalibrosAños)
except OSError as e:
if e.errno != errno.EEXIST:
raise
print("error al crear la carpeta del libro")
try:
while conteoPagina <11:
guardarHistorial(añoX,añoY,conteoPagina)
print(conteoPagina)
print('hola')
print(conteoPagina)
print("holas ya terminaste una pagina ?")
wedPrinsipal.cambiarPagina(conteoPagina)
wedPrinsipal.paginaDescargas()
sleep(6)
wedPrinsipal.urlPdf()
print(conteoPagina)
print('hola')
guardarNumeroDescargas(0)
conteoPagina+=1
print(conteoPagina)
guardarHistorial(añoX,añoY,conteoPagina)
except OSError as a :
print("no es so eso ======u")
print(a.strerror)
guardarHistorial(añoX,añoY,conteoPagina)
conteoPagina=1
añoY+=2
añoX+=2
print("vamos a cambiar de año ¡¡¡¡¡ yupy")
print(añoX,añoY)
guardarHistorial(añoX,añoY,conteoPagina)
sleep(30)
|
[
"danielgrecia7@gmail.com"
] |
danielgrecia7@gmail.com
|
4abd216b71e494c4d3722125e10ab928624f62d1
|
f2581c0caaf711fd7e864ad346bf7f09337b5919
|
/d8night/urls.py
|
12f7e605a1c9890e451b2a1a5c1c04426b9377d6
|
[] |
no_license
|
Cristian-Baeza/d8nite-backend
|
43fbac284f04bdacf40fa9d8b2821dcc889acff3
|
c64167f42700f05cdcea7c3ad67cfd71e81cd824
|
refs/heads/main
| 2023-04-12T13:16:57.551799
| 2021-05-14T20:58:15
| 2021-05-14T20:58:15
| 367,477,367
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,022
|
py
|
from django.urls import path
from .views import current_user, UserList, ActivityViewSet, DateViewSet, ProfileViewSet, breweries_list, yelp_api_zip, google_maps_api_zip, google_places_api_zip_keyword, weather_api_zip
from rest_framework.routers import DefaultRouter
urlpatterns = [
path('current_user/', current_user),
path('users/', UserList.as_view()),
path("breweries/<int:zip>", breweries_list, name='breweries_list'),
path("restaurants/<int:zip>", yelp_api_zip, name="yelp_api_zip"),
path("maps/<int:zip>", google_maps_api_zip, name="google_maps_api_zip"),
path("maps/<int:zip>/<str:keyword>", google_places_api_zip_keyword, name="google_places_api_zip_keyword"),
path("weather/<int:zip>", weather_api_zip, name="weather_api_zip"),
]
router = DefaultRouter()
router.register(r'activities', ActivityViewSet, basename='activity')
router.register(r'dates', DateViewSet, basename='date')
router.register(r'profiles', ProfileViewSet, basename='profile')
urlpatterns = urlpatterns + router.urls
|
[
"cristianbaeza092@gmail.com"
] |
cristianbaeza092@gmail.com
|
fc3486add471fbe41421c34ec345863d7b438a4a
|
7edce8acdb6017502a17be74f8e232ba810313f5
|
/KeithleyInstruments.py
|
e90a1e0f40963291297a6eed5a029ea62bab0382
|
[] |
no_license
|
SherwinGroup/HarvardCode
|
1fe4997d3c5b533d039d97650d8303584bf4c626
|
2f20b284d0f38673fd7ad767b94485122d4e3e7d
|
refs/heads/master
| 2021-01-19T22:32:52.146842
| 2015-05-09T20:28:15
| 2015-05-09T20:28:15
| 35,344,156
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,894
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 02 09:03:41 2014
@author: dvalovcin
"""
import visa
import numpy as np
import pyqtgraph as pg
class Keithley236:
on = False
def __init__(self,GPIB_Number=None):
rm = visa.ResourceManager()
self.instrument = rm.get_instrument(GPIB_Number)
pass
def setBias(self,BiasLevel):
#Set to the desired bias level, auto ranging and waiting 10ms
toWrite = 'B'+str(BiasLevel)+',0,10X'
print toWrite
self.instrument.write('B'+str(BiasLevel)+',0,10X')
def askCurrent(self):
return float(self.instrument.ask('G5,2,0X').encode('ascii')[:-2].split(',')[1])
def toggleOutput(self):
self.on = not self.on
toWrite = 'N'+str(int(self.on))
print toWrite
self.instrument.write(toWrite)
def turnOff(self):
self.on = False
self.instrument.write('N0X')
def turnOn(self):
self.on = True
self.instrument.write('N1X')
class Keithley2400:
sourcing = None
sensing = None
def __init__(self,GPIB_Number=None,graph = None,stopCurrent=1e-4,compliance=1e-3):
rm = visa.ResourceManager()
self.instrument = rm.get_instrument(GPIB_Number)
if not graph == None:
self.graphing = True
self.graph = graph
else:
self.graphing = False
self.graph = None
self.setScanParams(stopCurrent,compliance)
self.instrument.write("*rst; status:preset; *cls")
self.setSourceMode('volt')
self.setSenseMode('curr')
#set sensing ranging
self.setSenseRange(5e-3)
self.instrument.write("sens:CURR:prot:lev " + str(compliance))
self.instrument.write("SYST:RSEN OFF") #set to 2point measurement?
def setSourceMode(self, mode):
newMode = 'volt'
if mode.lower() in ('c', 'current', 'curr'):
newMode = 'curr'
st = 'sour:func:mode '+newMode
self.instrument.write(st)
self.sourcing = newMode
def setSenseMode(self, mode):
newMode = "'volt'"
if mode.lower() in ('c', 'current', 'curr'):
newMode = "'curr'"
st = 'sens:func '+newMode
self.instrument.write(st)
#cut off the leading/tailing quote marks
self.sensing = newMode[1:-1]
def setSenseRange(self, level):
if level>0:
#turn off autorange
lev = 'auto off'
self.write('sens:'+self.sensing+':rang:'+lev)
#set the range
lev = 'upp '+ str(level)
self.write('sens:'+self.sensing+':rang:'+lev)
else:
#Turn on autorange if negative number is given
lev = 'auto on'
self.write('sens:'+self.sensing+':rang:'+lev)
def setSourceRange(self, level):
if level>0:
#turn off autorange
lev = 'auto off'
self.write('sour:'+self.sourcing+':rang:'+lev)
#set the range
lev = 'upp '+level
self.write('sour:'+self.sourcing+':rang:'+lev)
else:
#Turn on autorange if negative number is given
lev = 'auto on'
self.write('sour:'+self.sourcing+':rang:'+lev)
def setCompliance(self, level):
pass
def set4Probe(self, flag):
doIt = 'OFF'
if flag:
doIt = 'ON'
def setScanParams(self,stop1,compliance):
points1 =41 #number of data points
start1 = 0# mA
points2 =81 #number of data points
start2 = stop1# mA
stop2 = -stop1 # mA *** SINGLE SCAN ***
points3 =21 #number of d06-10-2014ata points
start3 = -stop1# mA
stop3 = start1 # mA *** SINGLE SCAN ***
#END initialization
## Calculations
step = (1.0*stop1-1.0*start1)/(points1-1)
gainarr1 = np.arange(start1,stop1,step)
gainarr1 = np.append(gainarr1,stop1)
step = (1.0*stop2-1.0*start2)/(points2-1)
gainarr2= np.arange(start2,stop2,step)
gainarr2 = np.append(gainarr2,stop2)
step = (1.0*stop3-1.0*start3)/(points3-1)
gainarr3 = np.arange(start3,stop3,step)
gainarr3 = np.append(gainarr3,stop3)
gainarr = np.append(gainarr1, gainarr2)
self.gainarr = np.append(gainarr, gainarr3)
## END calculations06-10-2014
self.points = points1+points2+points3
self.instrument.write("sens:curr:prot:lev " + str(compliance))
def doScan(self):
#turn on output
self.instrument.write("OUTP ON")
data = np.zeros([self.points,2],float)
for i in range(0,self.points):
data[i,0] = self.gainarr[i]
self.setCurrent(self.gainarr[i])
data[i,1] = self.readValue()
self.instrument.write("OUTP OFF")
return data
def readValue(self):
ret = self.instrument.ask("read?").encode('ascii')
#comes back as a unicode, comma separated list of values
return float(ret.encode('ascii')[:-1].split(',')[1])
def setCurrent(self,current):
self.instrument.write("sour:curr:lev " + str(current))
def setVoltage(self, voltage):
self.instrument.write('sour:volt:lev ' + str(voltage))
def turnOn(self):
self.instrument.write('OUTP ON')
def turnOff(self):
self.instrument.write('OUTP OFF')
def write(self, command):
self.instrument.write(command)
|
[
"valovcin@physics.ucsb.edu"
] |
valovcin@physics.ucsb.edu
|
da8dc774c7f411cc5bce474e831ecbc71c8452b6
|
b5db781f2f20df948c69ce782ea56174ed38fa8f
|
/ex7.10.py
|
11579d5904d889e90b02269220d7e69ea71cd522
|
[] |
no_license
|
MuhammadZahid9596/Python-Crash-course
|
ecd805598892850d3c4fe2f6aa9fd4013ba8601e
|
39e574d90a57667185ddf50e76622e2084a0131b
|
refs/heads/master
| 2021-05-11T01:14:57.751567
| 2018-03-07T19:53:05
| 2018-03-07T19:53:05
| 118,322,264
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 350
|
py
|
poll_results={}
while True:
name = input('what is your name?')
place = input('where do you wanna go?')
stopper=input('would you like some one else to responf?')
poll_results[name]=place
if stopper !='yes':
break
for name,place in poll_results.items():
print(name.title()+' would like to visit '+place.title())
|
[
"noreply@github.com"
] |
MuhammadZahid9596.noreply@github.com
|
eceb66f9d207eea28d2851aa72477b974c8783d5
|
485f832364ef339edc426117d0bcff9c02b61df7
|
/FEM_1D.py
|
9a62832ca683f670eb948c1d35efb2fe0154034f
|
[] |
no_license
|
philiplukek/FEM
|
99b2ce4f0c980aa0cf69ad3a82eb1c5115c032d7
|
edbdeb7512ced213269971f05ad0c1ae1d497945
|
refs/heads/main
| 2023-05-25T02:49:02.525414
| 2021-06-10T13:13:51
| 2021-06-10T13:13:51
| 375,695,320
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 35,096
|
py
|
"""
Python Code to Solve 1-D Elastic Bar using FEM
Developed by Philip Luke K, as part of MIN-552 Coursework submission at IIT-Roorkee, October 2020
THIS IS A GENERALIZED FEM CODE TO SOLVE A 1-D LINEAR-ELASTIC BAR SUBJECTED TO AXIAL LOADS. Key Features of the code include :
1. Discretization into any number of elements.
2. Incorporates Linear and Quadratic elements.
3. Ability to handle any type of loading, boundary conditions,
c/s area and elasticity along the length.
4. Graphical representation of the primary mesh (structure and elements).
5. Graphical representation of the variation of c/s area.
6. Support conditions can be any combination of 'Free' and 'Fixed'.
7. Displacements and Reactions at all nodes in the domain can be obtained.
8. Numerical value of displacement, strain and stress at any point in the domain can be found out.
9. Graphical representation of the variation of Shape and Deformation Functions for the Element specified.
User Input :
(A) Main Function
1. Length of the structure
2. No:of elements in the mesh
3. Value of traction (if any)
4. Type of Structure
5. Type of element in the mesh
6. Type of Loading
(B) Class 'System'
1. C/S Area along length
2. Elasticity along length
(C) Class 'BoundaryConditions'
1. Displacement Boundary Conditions
2. Traction Boundary Conditions
(D) Class 'Loading'
1. Value(s) of external load along the length
For further detailed info, the user is requested to view the documentation of each class/method.
To view the Documentation of any class/method, user may execute the command <class_name.__doc__> and/or <class_name.method_name.__doc__>
Comments has been provided wherever necessary.
"""
# IMPORTING ESSENTIAL LIBRARIES
import numpy as np
import matplotlib.pyplot as plt
import math
# GLOBAL VARIABLES
n = 100 # No:of points for integration
N = n + 1
# CLASS DEFINITIONS GO FROM HERE
class Vertex :
"""Class used to define the X Co-ordinates of the nodes of element/mesh/structure"""
def __init__(self, xcoord) :
"""Takes the position of element/structure and generate it as Co-ordinate for the Edge class to calculate the element/structure length"""
self.xcoord = xcoord
def get_coordinate(self) :
"""Prints the Co-ordinate of the node.
Optional Function"""
print("X Cordinate of this point is :",self.xcoord)
return self.xcoord
class Edge :
"""Class used to define each element of the mesh"""
def __init__(self, v1, v2) :
"""Takes in two objects of Class Vertex and assigns to First and Last coordinate of the element"""
self.coord1 = v1.xcoord # Coordinate of the first node
self.coord2 = v2.xcoord # Coordinate of the end node
def ele_length(self) :
"""Calculates the length of each element by subtracting first coordinate from the end coordinate
Returns the element length"""
self.length = self.coord2 - self.coord1 # Length of each element
#print("Length of the element is : ",self.length)
return self.length
class Mesh :
"""Class used to generate the Finite Element Mesh for the structure """
def __init__(self, origin_vertex, end_vertex, struct, no_ele) :
"""Uses Vertex objects and Edge object to calculate the length of the structure based on the coordinates of the origin and end. Takes in the no:of elements defined by user, to generate mesh"""
self.origin = origin_vertex.xcoord
self.end = end_vertex.xcoord
self.no_ele = no_ele
self.len = struct.ele_length()
print("Length of the Structure is : ", self.len, "units")
def mesh_generate(self) :
"""Generates FE Mesh based on the no:of elements and structure length. """
self.ele_len = self.len/self.no_ele # Calculates Element Length
print("Length of each element of the mesh is : ", self.ele_len, "units")
self.xcoord_incr = self.origin
self.mesh = []
while(self.xcoord_incr < self.end) :
self.mesh.append(self.xcoord_incr)
self.xcoord_incr = self.xcoord_incr + self.ele_len
self.mesh.append(self.xcoord_incr)
print("Meshed points for the 1-D structure is: ", self.mesh) # Prints the FEMesh Coordinates
plt.axis('off')
plt.suptitle("1-D structure and Mesh")
plt.plot(self.mesh, np.ones(len(self.mesh)), marker='o') # Graphical Representation of the FEMesh
return self.mesh, self.ele_len
class System :
"""Class used to Define System Parameters, Area and Elasticity over the mesh"""
def __init__(self, mesh, no_ele) :
"""Initializes the class with Mesh details"""
self.mesh = mesh
self.no_ele = no_ele
def area(self) :
"""Defines and returns a vector having Area corresponding to each element in the mesh.
Can be of 3 types
1. Constant
C/S area is constant throughout the length. Value is hard-coded inside the function
2. Linear
C/S area varies linearly throughout the length. User needs to specify the value at the first and last point
3. User Defined
C/S area at the end nodes of each element specified by the user.
The method also give a graphical representation of the variation of area over the length.
"""
self.area_type = "Constant" # Constant, Linear, User-Defined
if (self.area_type == "Constant") :
self.area_value = 5.0 # Value of constant area of c/s
self.mesh_area = np.ones(len(mesh))*self.area_value
elif (self.area_type == "Linear") :
self.area_value1 = 1.0 # Value of area at origin
self.area_value2 = 5.0 # value of area at end
self.mesh_area = np.linspace(self.area_value1, self.area_value2, len(self.mesh))
elif (self.area_type == "User-Defined") :
print("Enter c/s area at end nodes of each element. Use same value at connected nodes")
self.mesh_area = np.ones(len(mesh))
for i in range(len(self.mesh)-1) :
print("Element",i+1)
self.mesh_area[i] = input("First Node")
self.mesh_area[i+1] = input("Second Node")
self.ele_area = np.zeros(self.no_ele)
for i in range(self.no_ele) :
self.ele_area[i] = (self.mesh_area[i] + self.mesh_area[i+1])/2 # averaging the area at end nodes to obtain element-wise are
return self.mesh_area, self.ele_area # area of each element in the mesh
def plot_area(self, mesh_area) :
"""Plots the variation of area of c/s along the length of the structure.
Optional Function"""
plt.figure()
plt.axis('off')
plt.suptitle("c/s Area of the structure : ")
plt.plot(self.mesh_area,'k')
plt.plot(-self.mesh_area,'k')
def elasticity(self) :
"""Defines and returns a vector having Elastic Modulus corresponding to each element in the mesh.
Can be of 2 types
1. Constant
Elasticity is constant throughout the length. Value is hardcoded inside the function
2. User Defined
Elasticity for each element specified by the user.
"""
self.elast_type = "Constant" # Constant, User-Defined
self.ele_elast = np.ones(self.no_ele)
if (self.elast_type == "Constant") :
self.elast_value = 200000000000.0 # Value of constant Elasticity
self.ele_elast *= self.elast_value
elif (self.elast_type == "User-Defined") :
print("Enter values of Elastic Modulus for each element.")
for i in range(len(self.ele_elast)) :
print("\nElement ",)
self.ele_elast[i] = input(i+1)
return self.ele_elast # elasticity of each element in the mesh
class BoundaryCondition :
"""Class used to specify the complete Boundary Condition for the particular FE Problem
The user has the major control over B.C's."""
def __init__(self, ele_type, no_ele, struct_type, traction) :
"""Initializes the class with the problem background. The type of element, no: of elements. elemental DOF, type of structure and the value of traction (if any)
Calculates the Global DOF"""
self.no_ele = no_ele
self.ele_type = ele_type
if self.ele_type == "Linear" :
self.ele_dof = 2
self.gl_dof = self.no_ele + 1
elif self.ele_type == "Quadratic" :
self.ele_dof = 3
self.gl_dof = (self.no_ele*self.ele_dof) - (self.no_ele-1)
else :
quit()
self.disp_bc = np.ones(self.gl_dof)
self.traction_bc = np.zeros(self.gl_dof)
self.struct_type = struct_type
self.traction_value = traction
self.residue = np.zeros(self.gl_dof)
def get_bc(self) :
"""Returns the Displacement, Traction Boundary Conditions and the Residue Vector based on structure type.
Residue vector - non-zero where displacement BC is specified
Structure Type may be of 5 :
1. Fixed-Free : Left end fixed, right end free
Displacement at node 1 is 0.
Traction (if any) is specified at the very last node
Residue will be non-zero (here assigned with 1) at node 1
2. Free-Fixed : Left end free, right end fixed
Displacement at last node is 0.
Traction (if any) is specified at node 0
Residue non-zero at last node
3. Fixed-Fixed : Both ends fixed
Displacement at first and last node is 0
Residue is non-zero at first and last node
No traction B.C valid
4. Free-Free : Both ends free (unstable)
Displacement B.C is not valid
Residue 0 at every node
Traction may be at both first and last node
5. User-Defined :
User has to type in the values of displacement and traction at every node in the mesh
Should comply with theory of Mechanics to obtain logical solution.
Traction and Displacement B.C shouldn't be specified together at any node.
In case of fixity, enter 0 for Displacement B.C. Residue vector will be self calculated
based on that
"""
if self.struct_type=="Fixed-Free" :
self.disp_bc[0] = 0
self.traction_bc[-1] = self.traction_value
self.residue[0] = 1
elif self.struct_type=="Free-Fixed" :
self.disp_bc[-1] = 0
self.traction_bc[0] = self.traction_value
self.residue[-1] = 1
elif self.struct_type=="Fixed-Fixed" :
self.disp_bc[0] = 0
self.disp_bc[-1] = 0
self.residue[0] = 1
self.residue[-1] = 1
elif self.struct_type=="Free-Free" :
self.traction_bc[0] = self.traction_value
self.traction_bc[-1] = self.traction_value
elif self.struct_type=="User-Defined" :
print("Enter Displacement Boundary Condition at Each Co-ordinate. ")
self.inp = np.arange(1,(self.gl_dof+1),1)
for i in range(len(self.inp)) :
print(":\n")
self.disp_bc[i] = input(self.inp[i]) # Prompts the user to input displacement at each node
print("Co-ordinates and corresponding Displacement Boundary Condition of the mesh is : \n", self.inp, "\n", self.disp_bc)
self.flag = np.where(self.disp_bc==0)
self.residue[self.flag] = 1 # residue value = 1, where displacement BC = 0
print("Enter Traction Boundary Condition at Each Co-ordinate. Avoid the nodes where displacement Boundary Condition is already given")
for i in range(len(self.inp)) :
print(":\n")
self.traction_bc[i] = input(self.inp[i]) # Prompts the user to input traction at each node
print("Co-ordinates and corresponding Displacement Boundary Condition of the mesh is : \n", self.inp, "\n", self.traction_bc)
return self.ele_dof, self.gl_dof, self.disp_bc, self.traction_bc, self.residue
class ShapeFunction :
"""Class used to generate the Shape Function (N), Deformation Function (B) based on the type of element and display the graphical variation of N and B over an element"""
def __init__(self, ele_type, ele_len) :
"""Initializes the class by attributing the element type assigned, length of the element and degree of freedom for the particular element"""
self.ele_type = ele_type
self.ele_len = ele_len
self.ele_dof = 2 if self.ele_type=="Linear" else 3 if self.ele_type=="Quadratic" else 0
#self.B = np.zeros(self.ele_dof)
#self.N = np.zeros(self.ele_dof)
def get_shape_function(self, x) :
"""Generates and returns the shape function vector (N) based on the type of element. Takes in the position (x) and calculation using the already known component functions.
Displays an error message if neither Linear/Quadratic element and exits."""
self.ele_pos = x # position on which calculation is done. varies from 0 - element length
if self.ele_type == "Linear" :
self.N1 = 1 - (self.ele_pos/self.ele_len)
self.N2 = self.ele_pos/(self.ele_len)
self.N = np.array([[self.N1], [self.N2]])
elif self.ele_type == "Quadratic" :
self.N1 = 1 - (3*self.ele_pos/self.ele_len) + (2*(self.ele_pos**2)/(self.ele_len**2))
self.N2 = (4*self.ele_pos/self.ele_len) - (4*(self.ele_pos**2)/(self.ele_len**2))
self.N3 = (-self.ele_pos/self.ele_len) + (2*(self.ele_pos**2)/(self.ele_len**2))
self.N = np.array([[self.N1], [self.N2], [self.N3]])
else :
print("Undefined Element Type. Program Terminating...")
exit()
return self.N
def get_deformation_function(self, x) :
"""Generates and returns the Deformation function vector (B) based on the type of element. Takes in the position (x) and calculation using the already known component functions
Displays an error message if neither Linear/Quadratic element and exits."""
self.ele_pos = x # position on which calculation is done. varies from 0 - element length
if self.ele_type == "Linear" :
self.B1 = -1/(self.ele_len)
self.B2 = 1/(self.ele_len)
#self.B = np.append(self.B1, self.B2)
self.B = np.array([[self.B1], [self.B2]])
elif self.ele_type == "Quadratic" :
self.B1 = (-3/self.ele_len) + (4*self.ele_pos/(self.ele_len**2))
self.B2 = (4/self.ele_len) + (-8*self.ele_pos/(self.ele_len**2))
self.B3 = (-1/self.ele_len) + (4*self.ele_pos/(self.ele_len**2))
self.B = np.array([[self.B1], [self.B2], [self.B3]])
else :
print("Undefined Element Type. Program Terminating...")
exit()
return self.B
def plot_shape_function(self) :
"""Plots the variation of Shape and Deformation function over an element graphically
Optional Function"""
self.x = np.linspace(0, self.ele_len, 100)
self.N_value = np.zeros([self.ele_dof,1, len(self.x)])
self.B_value = np.zeros([self.ele_dof,1, len(self.x)])
for i in range(len(self.x)) :
self.N_value[:,:,i] = self.get_shape_function(self.x[i])
self.B_value[:,:,i] = self.get_deformation_function(self.x[i])
self.x = self.x.reshape(1,100)
fig_N,axs_N = plt.subplots()
fig_N.suptitle('Variation of Shape Function across each element')
axs_N.axis([0, self.ele_len, 0, 1.0])
fig_B,axs_B = plt.subplots()
fig_B.suptitle('Variation of Deformation Function')
#axs_B.axis([0, self.ele_len, 0, 1.0])
for i in range(len(self.N_value)) :
axs_N.plot(self.x.transpose(), self.N_value[i].transpose())
axs_B.plot(self.x.transpose(), self.B_value[i].transpose())
return None
class Loading :
"""Class used to define external load in the structure"""
def __init__(self, load_type, mesh) :
"""Initializes the class with load_type specified by the user and the mesh generated"""
self.load_type = load_type
if self.load_type == "Discrete" :
# Values and position of discrete forces in the domain. To be entered by the user
self.load_pos = np.array([0.33, 0.66]) # position (s) of the load in the domian
self.load_value = np.array([3.0, -1.0]) # value of load (s) corresponding to the positions given above
self.flag = 0
def get_point_load(self, x, ele_len, ele_flag, dx) :
"""Returns the value of body force and integration length at any given point in the domain. Used for integrating over an element to obtain elemental load vector. Four types of loading cases are acceptable :
1. Uniform ( b(x) = k ) - magnitude of load is constant throughout the domain. User need to enter the value of k manually.
2. Linear ( b(x) = ax + c ) - magnitude of load varies linearly throughout the domain. User need to enter the values for a and c.
3. Quadratic ( b(x) = ax^2 + cx + d) - magnitude of load varies quadratically. User needs to enter values for a, c and d.
4. Discrete - If the structure consists of discrete load only. The position and values, entered in the initial method will be taken up.
For any other type of complex/irregular loading, seperate function need be written.
"""
self.pos = (ele_flag-1)*ele_len + x # position of the point in the domain
if self.load_type == "Uniform" :
self.const_load = 10.0 # Value of constant load
return self.const_load, dx
elif self.load_type == "Linear" :
self.b_a = 5.0 # Value of a in ax + c
self.b_c = 0.0 # Value of c in ax + c
self.linear_load = self.b_a*self.pos+ self.b_c
return self.linear_load, dx
elif self.load_type == "Quadratic" :
self.b_a = 10.0 # Value of a in ax^2 + cx + d
self.b_c = 0.0 # Value of c in ax^2 + cx + d
self.b_d = 0.0 # Value of d in ax^2 + cx + d
self.quad_load = self.b_a*self.pos*self.pos + self.b_c*self.pos + self.b_d
return self.quad_load, dx
elif self.load_type == "Discrete" :
for i in range(len(self.load_pos)) :
# Loop makes sure that the discrete body load is returned only once for the smooth integration over the element in ElementLoadMatrix class.
if self.pos-dx < self.load_pos[i] < self.pos+dx :
if self.flag == i :
self.b = self.load_value[i]
self.flag += 1
break
else :
self.b = 0
else:
self.b = 0
return self.b, 1
else :
return 0
class ElementStiffnessMatrix :
"""Class used to generate and return the stiffness matrix of any element in the mesh"""
def __init__(self, ele_sf, area, elast, ele_dof) :
"""Initializes the class with the element geometric and material properties"""
self.ele_area = area # Area of the element
self.ele_elast = elast # Elastic Modulus of the element
self.ele_len = ele_sf.ele_len # Length of the element
self.x = np.linspace(0, self.ele_len, N) # Divides the element into 100 points for integration
self.dx = self.ele_len/N # length of one division
self.ele_dof = ele_dof # Elemental DOF
self.k = np.zeros([self.ele_dof,self.ele_dof])
def get_ele_stiff(self) :
"""Generates and returns the element stiffness matrix by integrating (B)^T.A.E.B over the element
At each division of the element, ShapeFunction is called to get B. Values at all points are then added together. Thereby integration is achieved"""
for i in range(len(self.x)) :
self.B = ele_sf.get_deformation_function(self.x[i])
self.Bt = self.B.transpose()
self.k += (self.B@self.Bt)*self.ele_area*self.ele_elast*self.dx
return (self.k)
class ElementLoadMatrix :
"""Class used to generate and return the force matrix of any element in the mesh"""
def __init__(self, ele_sf, struct_load, area, ele_dof) :
"""Initializes the class with the element geometric and material properties
Linear Distribution of Load over an element is assumed. For other kinds of distribution, separate functions/classes need to be written"""
self.ele_len = ele_sf.ele_len # Length of the element
self.ele_area = area # Area of the element
self.x = np.linspace(0, self.ele_len, N) # Divides the element into 100 points for integration
self.dx = self.ele_len/N # length of one division
self.ele_dof = ele_dof # Elemental DOF
self.f = np.zeros([self.ele_dof,1])
def get_ele_load(self, ele_traction_bc, ele_flag) :
"""Generates and returns the element load matrix by integrating (N)^T.b over the element
At each division of the element, ShapeFunction and Loading are called to get N and b at that point. Values at all points are then added together. Thereby integration is achieved
If at either the origin/end element, traction is specified, the effect is deduced from the load at that node. It is assumed that the user provides traction B.C sensible to Mechanics theory."""
for i in range(len(self.x)) :
self.N = ele_sf.get_shape_function(self.x[i])
self.b, self.dx1 = struct_load.get_point_load(self.x[i], self.ele_len, ele_flag, self.dx)
self.f += (self.N)*self.b*self.dx1
if any(ele_traction_bc) :
self.tra_pos = np.where(ele_traction_bc != 0)
self.tra_val = ele_traction_bc[self.tra_pos[0]]
self.f -= self.N*self.ele_area*self.tra_val
return (self.f)
class Solver :
"""Class used to solve the Finite Element formulation and return displacement and reaction vectors"""
def __init__(self, ele_type, no_ele, ele_dof, gl_dof, k, f) :
"""Initializes the class with stiffness matrices and force vectors for all elements, element type and no:of elements for the given problem
Calculates the global DOF"""
self.ele_type = ele_type
self.no_ele = no_ele
self.ele_dof = ele_dof
self.gl_dof = gl_dof
self.ele_k = k # multi-dimensional array having all element stiffness matrices
self.ele_f = f # multi-dimensional array having all element force matrices
def get_global_stiffness(self) :
"""Generates and returns the Global stiffness matrix (K) for the given system"""
self.gl_stiff = np.zeros([self.gl_dof, self.gl_dof])
for i in range(self.no_ele) :
# Loop goes over the stiffness matrix of each element and joins the values of connected nodes as well.
stiff = self.ele_k[:,:,i]
k = np.zeros(self.ele_dof)
#k = []
for j in range(self.ele_dof) :
k[j] = (self.ele_dof-1)*i + j
#temp = (self.ele_dof-1)*i + j
#k.append(temp)
r,s = 0, 0
for p in k :
for q in k :
p, q = int(p), int(q)
self.gl_stiff[p, q] += stiff[r, s]
s += 1
r += 1
s=0
return self.gl_stiff
def get_global_force(self) :
"""Generates and returns the Global Force vector (F) for the given system"""
self.gl_force = np.zeros([self.gl_dof,1])
for i in range(self.no_ele) :
force = self.ele_f[:,:,i]
k = np.zeros(self.ele_dof)
for j in range(self.ele_dof) :
# Loop goes over the stiffness matrix of each element and joins the values of connected nodes as well.
k[j] = (self.ele_dof-1)*i + j
r = 0
for p in k :
p = int(p)
self.gl_force[p] += force[r]
r += 1
return self.gl_force
def get_nodal_displacement(self, disp_bc, residue) :
"""
Calculates and returns the Displacement vector based on the equation 'KU-F = R'
Input : Displacement Boundary Condition, Residue Vector
Output : Displacement vector
"""
self.K = self.get_global_stiffness() # Global stiffness matrix
#print("Global Stiffness Matrix : \n", self.K)
self.F = self.get_global_force() # Global Force Vector
#print("Global Force Vector : \n", self.F)
self.U = np.zeros(self.gl_dof)
self.disp_bc = disp_bc
self.residue = residue
self.fixeddof = np.where(self.residue == 1) # Generates the vector of nodes where displacement is bounded
self.fixeddof = self.fixeddof[0]
self.freedof = np.where(self.residue != 1) # Generates the vector of nodes where displacement is unbounded
self.freedof = self.freedof[0]
self.residue1 = np.delete(self.residue, self.fixeddof) # Residue vector for unknown nodal displacements
self.F1 = np.delete(self.F, self.fixeddof) # Global force vector corresponding to unknown nodal displacements
self.K1 = self.K
for i in range(len(self.fixeddof)) :
# The loop removes the rows and columns corresponding to restricted displacements
# from the global Stiffness Matrix
j = self.fixeddof[i]
#self.K1 = self.K[j+1:,j+1:]
self.K1 = np.delete(self.K1,j-i,0)
self.K1 = np.delete(self.K1,j-i,1)
self.U1 = np.linalg.inv(self.K1)@(self.residue1 + self.F1) # Solving K.U - F = R to get unknown displacement
# Defines the global displacement vector and assigns calculated and known displacement
self.U = np.zeros(self.gl_dof)
self.U[self.freedof] = self.U1
self.U[self.fixeddof] = self.disp_bc[self.fixeddof]
# Defines the global residue(reaction) vector, calculates and assigns the reaction at fixed and free nodes
self.R = np.zeros(self.gl_dof).reshape(self.gl_dof,1)
self.R1 = self.K@self.U.reshape(self.gl_dof,1) - self.F # R = KU-F
self.R[self.fixeddof] = self.R1[self.fixeddof]
return self.U, self.R # returns the global displacement and reaction vector
class Results :
"""Class used to generate and return results
1. Displacement at any point in the domain
2. Strain at any point in the domain
3. Stress at any point in the domain
"""
def __init__(self, ele_len, ele_type, ele_dof, U, elast) :
"""Initialize the class with element and material properties and solved displacement vector"""
self.ele_len = ele_len
self.ele_type = ele_type
self.ele_dof = ele_dof
self.U = U
self.elast = elast
def get_ele_disp(self, x) :
"""Calculates and returns the element-displacement vector for any point in the domain
Input : Position of the point in the domain
Output : Displacement vector corresponding to the nodes of the element of the input point
"""
self.ele_pos = x # position of the point from origin
self.ele = math.ceil(x/self.ele_len) # element corresponding to the point
self.ele_disp = np.zeros([self.ele_dof, 1])
if self.ele_type == "Linear" :
self.node1 = (self.ele*self.ele_dof) - (self.ele+1) # first node of the corresponding element
self.node2 = (self.ele*self.ele_dof) - (self.ele) # end node of the corresponding element
self.ele_disp = np.array([self.U[self.node1], self.U[self.node2]]) #displacement vector of the element
elif self.ele_type == "Quadratic" :
self.node1 = (self.ele*self.ele_dof) - (self.ele+2) # first node of the corresponding element
self.node2 = (self.ele*self.ele_dof) - (self.ele) # end node of the corresponding element
self.node3 = (self.ele*self.ele_dof) - (self.ele+1) # second node of the corresponding element
self.ele_disp = np.array([self.U[self.node1], self.U[self.node3], self.U[self.node2]]) #element displacement vector
return self.ele_disp
def get_displacement(self, x) :
"""Calculates and returns displacement at any point in the domain
Input : Point in domain where displacement needs to be calculated
Output : Displacement corresponding to the input point"""
self.ele_pos = x
self.ele_sf = ShapeFunction(self.ele_type, self.ele_len)
self.ele_disp = self.get_ele_disp(self.ele_pos) # obtains element displacement vector
self.pos = self.ele_pos - (self.ele - 1)*self.ele_len # position of point from the first node
self.N_pos = self.ele_sf.get_shape_function(self.pos)
self.pos_disp = 0
for i in range(len(self.ele_disp)):
self.pos_disp += self.ele_disp[i]*self.N_pos[i]
#self.pos_disp = self.ele_disp[0] + (self.ele_disp[len(self.ele_disp)-1] - self.ele_disp[0])*self.pos/self.ele_len # interpolation
return self.pos_disp
def get_strain(self, x) :
"""Calculates and returns strain at any point in the domain
Input : Point in domain where strain needs to be calculated
Output : Strain corresponding to the input point"""
self.ele_pos = x
self.ele_disp = self.get_ele_disp(self.ele_pos) # obtains element displacement vector (d)
self.pos = self.ele_pos - (self.ele - 1)*self.ele_len # position of point from the first node
self.ele_sf = ShapeFunction(ele_type, ele_len) # obtaining deformation function (B)
self.pos_strain = self.ele_sf.get_deformation_function(self.pos).transpose()@self.ele_disp # strain,e = B*d
return self.pos_strain
def get_stress(self, x) :
"""Calculates and returns strain at any point in the domain
Input : Point in domain where strain needs to be calculated
Output : Strain corresponding to the input point"""
self.ele_pos = x
self.pos_strain = self.get_strain(self.ele_pos) # obtains strain (e) at the point
self.ele_elast = self.elast[self.ele-1] # obtains elastic modulus (E) at that point
self.pos_stress = self.pos_strain*self.ele_elast # stress, s = E*e
return self.pos_stress
"""
MAIN FUNCTION
Execution of the FE-Method go here. This is achieved by creating objects for the classes defined above.
For user-defined parameters that has multiple options, the options are mentioned as comments nearby the code.
"""
# System Properties
length = 10.0
struct_type = "Fixed-Fixed" # Fixed-Free, Free-Fixed, Fixed-Fixed
load_type = "Discrete" # Uniform, Discrete, Linear, Quadratic
traction = 0.0
no_ele = 5 # Number of elements
ele_type = "Linear" # Linear, Quadratic
bc = BoundaryCondition(ele_type, no_ele, struct_type, traction)
[ele_dof, gl_dof, disp_bc, traction_bc, residue] = bc.get_bc() # Obtains the boundary conditions for the problem
# ele_dof = 2 or 3
# disp_bc = np.array([])
# traction_bc = np.array([])
# residue = np.array([])
# User may define completely arbitrary Boundary Conditions above, but compatible with the no:of elements and element type
origin = Vertex(0)
end = Vertex(length)
struct = Edge(origin, end)
struct_mesh = Mesh(origin,end,struct,no_ele)
mesh, ele_len = struct_mesh.mesh_generate() # generates the FE Mesh for the problem.
# mesh = np.array([])
# User may define a completely arbitrary mesh above, but compatible with the no:of elements
sys = System(mesh, no_ele)
msh_area, area = sys.area()
elast = sys.elasticity()
# area = np.array([])
# elast = np.array([])
# User may define a completely arbitrary area/elasticity array above, but compatible with the no:of elements
sys.plot_area(msh_area) # Plots the variation of area along the length
struct_load = Loading(load_type, mesh)
ele_k = np.zeros([ele_dof, ele_dof, no_ele]) # multi-dimensional array to store all element-stiffness matrices
ele_f = np.zeros([ele_dof,1,no_ele])# multi-dimensional array to store all element-force vectors
for i in range(no_ele) :
# The loop travels through each element in the mesh and generates element stiffness matrix and element force vector which is stored in multi-dimensional arrays k and f respectively.
ele_flag = i +1 # element number in this loop
x0 = Vertex(mesh[i]) # First coordinate of the element in this loop
xl = Vertex(mesh[i+1]) # End coordinate of the element in this loop
ele = Edge(x0, xl)
ele_len = ele.ele_length() # Length of the element
ele_sf = ShapeFunction(ele_type, ele_len)
ele_stiff = ElementStiffnessMatrix(ele_sf, area[i], elast[i], ele_dof)
ele_k[:,:,i] = ele_stiff.get_ele_stiff()
print("\nStiffness matrix of element", i+1, "\n", ele_k[:,:,i])
ele_force = ElementLoadMatrix(ele_sf, struct_load, area[i], ele_dof)
ele_f[:,:,i] = ele_force.get_ele_load(traction_bc[i*(ele_dof-1): i*(ele_dof-1)+ele_dof], ele_flag)
print("\nForce vector of element", i+1, "\n", ele_f[:,:,i])
# Outside the loop.
ele_sf.plot_shape_function() # Plots the variation of shape and deformation functions over the element
solve = Solver(ele_type, no_ele, ele_dof, gl_dof, ele_k, ele_f) # Solver class is called to generate global K and global F.
U, R = solve.get_nodal_displacement(disp_bc, residue) # Displacement vector and residue/reaction vector
print("Displacement Vector is : ",U)
print("Reaction Vector is : ", R)
res = Results(ele_len, ele_type, ele_dof, U, elast) # To generate results
x = np.linspace(0,length,201) # set of points in the domain where results need to be found out
x1 = 0.5 # point in the domain where results need to be found out
stress =np.zeros(len(x))
strain = np.zeros(len(x))
disp = np.zeros(len(x))
print(res.get_displacement(x1)) # Prints displacement value at x1
print(res.get_strain(x1)) # Prints strain value at x1
print(res.get_stress(x1)) # Prints stress value at x1
for i in range(len(x)) :
# Collects results at all the points in x array
disp[i] = res.get_displacement(x[i])
plt.figure()
plt.xlabel("length of the domain")
plt.ylabel("displacement (m)")
plt.suptitle("2 linear element ")
plt.plot(x, disp)
#####################################################################
############################ END OF CODE ############################
#####################################################################
|
[
"philip_luke@ce.iitr.ac.in"
] |
philip_luke@ce.iitr.ac.in
|
b1ddcbd53d4265b9f3450c6c7734505502f2bc17
|
b196fc08f2c832a9f4df26e9f2bff249cab9ebb8
|
/run_firewall.py
|
2abf4c1b2ddd19361ddfe50e3d53fd53e7a68af6
|
[] |
no_license
|
kbuci/FirewallAssignment
|
e638233d0212b17e02b385ea5425bf6f4f37cc2f
|
52ce3ff60ed742f691ef61db6296969c74a2b445
|
refs/heads/master
| 2020-09-04T22:20:25.748073
| 2019-11-06T04:10:01
| 2019-11-06T04:10:01
| 219,907,603
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 959
|
py
|
from firewall import Firewall
if __name__ == "__main__":
fw = Firewall("prompt_input.csv")
assert(fw.accept_packet("inbound", "tcp", 80, "192.168.1.2")) # matches first rule
assert(fw.accept_packet("inbound", "udp", 53, "192.168.2.1")) # matches third rule
assert(fw.accept_packet("outbound", "tcp", 10234, "192.168.10.11")) # matches second rule true
assert(not fw.accept_packet("inbound", "tcp", 81, "192.168.1.2"))
assert(not fw.accept_packet("inbound", "udp", 24, "52.12.48.92"))
fw = Firewall("overlap_ranges_input.csv")
# test for case of overlapping intervals
assert(fw.accept_packet("outbound","tcp",14000, "192.168.10.12"))
assert(fw.accept_packet("outbound","tcp",14000, "192.168.10.11"))
assert(not fw.accept_packet("outbound","tcp",10000, "192.168.10.12"))
assert(not fw.accept_packet("outbound","tcp",13000, "192.168.10.16"))
assert(fw.accept_packet("outbound","tcp",15000, "192.168.10.16"))
|
[
"noreply@github.com"
] |
kbuci.noreply@github.com
|
c4144409f9e09516442d5488de13c377c98fbd9c
|
b2fe343d6ec73b627b763d984df7fa3b97a8886d
|
/grabber/views.py
|
f43acad4cdb3e949c79642ee15f46787cfdaa4df
|
[] |
no_license
|
zlyxero/phone-grabber
|
721275b379291efd99db0affae740dc20817f153
|
3f4e013ecb218da7759822b3ba2e8048ced76c40
|
refs/heads/master
| 2021-09-11T15:52:29.744435
| 2018-04-09T15:30:08
| 2018-04-09T15:30:08
| 125,367,433
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,245
|
py
|
from django.shortcuts import render, redirect
from .forms import FTP_Login_Form
from django.views import View
from .models import ConnectionData
import os
from ftplib import FTP, error_perm
from .mixin import log_in_to_ftp
# Create your views here.
class FTP_Login(View):
def get(self, request):
""" display ftp login form """
form = FTP_Login_Form()
return render(request, 'index.html', {'form':form})
def post(self, request):
"""
verifies ftp login form fields and tries to connect
to ftp server. If connection is established, login
data is saved in the database. Else the user is notified
of any errors and retries to login
"""
form = FTP_Login_Form(request.POST)
if form.is_valid():
# get login fields data
host = form.cleaned_data['host']
port = form.cleaned_data['port']
# attempt to login
try:
ftp = FTP()
ftp.connect(host, port)
ftp.login()
# save data to database
# NB: the program is only meant to work with one database object at id = 1
# previous login details are discarded(overwritten)
# Check if object with an id value of 1 exists. if it does overrite it with
# new ftp login details. Else, create a new object with provided details.
data_queryset = ConnectionData.objects.filter(id=1)
if data_queryset:
data = data_queryset[0]
data.host = host
data.port = port
data.save()
else:
ConnectionData.objects.create(id=1, host=host, port=port)
return redirect('navigator')
# should there be any errors, display index.html templated with a login error
except:
return render(request, 'errors.html', {'form': form, 'error':'login_error'})
return render(request, 'index.html', {'form': form, 'error':'form_error'})
def navigator(request):
"""
navigates to a given 'path'.
If 'path' is a file, it is downloaded
"""
# check connection
try:
ftp = log_in_to_ftp() # function defined in mixin.py
except:
return render(request, 'errors.html', {'error': 'connection_lost_error'})
# get the request url (in the format '/navigator/path/') and slice out the absolute path of
# the requested directory by removing the prepended '/navigator' string
path = request.path
directory_path = path[10:]
# change to the requested directory and retrieve listing
try:
ftp.cwd(directory_path)
except error_perm as error:
redirect("navigator")
print("There was an error!! Directory may be nonexistent")
items_list = [] # will hold both files and directories
long_listing = [] # will hold both files and directories with long listing format
file_list = [] # will only hold files
dir_list = [] # will only hold directories
def add_to_items_list(file_string):
"""
callback function for ftp.retrlines()
appends each file/dir string to items_list
"""
items_list.append(file_string)
def add_to_long_listing(file_string):
"""
callback function for ftp.retrlines()
appends items to long_listing list
"""
long_listing.append(file_string)
# retrieve directory listing
ftp.retrlines('NLST', add_to_items_list)
# get long listing of the current working directory
ftp.dir(add_to_long_listing)
# populate file_list and dir_list with files and directories respectively
for a, b in zip(items_list, long_listing):
if b.startswith('d'):
dir_list.append(a.lower())
else:
file_list.append(a.lower())
# sort file and dir lists
dir_list.sort()
file_list.sort()
return render(request, 'listing.html', {'file_list': file_list, 'dir_list': dir_list, 'path': directory_path})
class FileDownload(View):
def post(self, request):
# get selected download path and check if it exists.
download_path = request.POST.get('download_path')
if not os.path.exists(download_path):
return render(request, 'errors.html', {'error': 'download_path_error'})
# use getlist instead of get to retrieve our checkbox list from POST.
# see https://code.djangoproject.com/ticket/1130
selected_files_list = request.POST.getlist('files')
ftp = log_in_to_ftp()
if ftp == 'error':
return redirect('login')
def getfile(save_path, file_path, ftp):
""" fetches 'file' from FTP server and saves it to the destination at 'save_path'.
save_path: the path to save downloaded files
file_path: the absolute path to the file to be downloaded
"""
def writer(chunk):
""" callback function to FTP retrbinary().
writes a 'chunk' of data to a file object on each call to the function
"""
file_object.write(chunk)
# get filename from splitting the file path at the FTP server and retrieving the filename part
parts = file_path.split('/')
file_name = parts[-1]
# append filename to save path
save_path = '{}/{}'.format(save_path, file_name)
# open file for writing
with open(save_path, 'wb') as file_object:
ftp.retrbinary('RETR ' + file, writer, blocksize=10000)
# download selected files
for file in selected_files_list:
getfile(download_path, file, ftp)
# format downloaded file names for display on front end
files = [file[1:] for file in selected_files_list ]
return render(request, 'downloads.html', {'files':files})
|
[
"mitchyjeffy@gmail.com"
] |
mitchyjeffy@gmail.com
|
bbb5a22ab6340f09c9d9c0b587b7b62647b7d439
|
9bbd3831f7160788f79293980a530f3bf30ac919
|
/news/migrations/0002_auto_20200519_1109.py
|
b516f8df9979144aa2bc0d65135517d662bc9082
|
[] |
no_license
|
martingathu/moringa-tribune
|
88c426a6e8c32515f5bf2923c222601f2f2c1584
|
866b6a8ef68131196d50e18814824ddba059b861
|
refs/heads/master
| 2023-08-04T05:05:56.388538
| 2020-05-21T07:13:49
| 2020-05-21T07:13:49
| 265,782,760
| 0
| 0
| null | 2021-09-22T19:03:11
| 2020-05-21T07:23:37
|
Python
|
UTF-8
|
Python
| false
| false
| 623
|
py
|
# Generated by Django 3.0.6 on 2020-05-19 08:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='tags',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
],
),
migrations.AlterModelOptions(
name='editor',
options={'ordering': ['first_name']},
),
]
|
[
"martin5gathu@gmail.com"
] |
martin5gathu@gmail.com
|
26a00630aeba6a6ae67c356e67ad7108f664c08b
|
2aec9c5e8c72b731d3abf22f2a407fe09c1cde09
|
/ZQZ510/ZQZ510/pipelines.py
|
96d17ebedbe541b8ea71011896e82ef784f24a35
|
[] |
no_license
|
jiangyg/ZWFproject
|
8b24cc34970ae0a9c2a2b0039dc527c83a5862b5
|
aa35bc59566d92721f23d2dd00b0febd268ac2dd
|
refs/heads/master
| 2020-09-26T17:01:00.229380
| 2019-11-15T13:16:21
| 2019-11-15T13:16:21
| 226,297,631
| 0
| 1
| null | 2019-12-06T09:55:37
| 2019-12-06T09:55:36
| null |
UTF-8
|
Python
| false
| false
| 542
|
py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
class Zqz510Pipeline(object):
def open_spider(self, spider):
self.file = open('./zqz_data.json', 'w+', encoding='utf-8')
def process_item(self, item, spider):
self.file.write(json.dumps(dict(item), ensure_ascii=False) + '\n')
return item
def close_spider(self, spider):
self.file.close()
|
[
"34021500@qq.com"
] |
34021500@qq.com
|
69945bc96e07be5b35d5a63568feef62af072449
|
1d000c111cc5b910b002ba65d1dd0043bf1091a6
|
/commerce/migrations/0006_auto_20151130_1636.py
|
b3532a0988b06864613efc5f674669e2a02d2a58
|
[] |
no_license
|
oreon/gencomm
|
0f0957457f280eb8dc79cf6568d6d8aee0d8f02c
|
6f85ad975ad95a2b4eb83390bbc86202e3b0dede
|
refs/heads/master
| 2023-04-14T12:24:49.979930
| 2016-05-09T18:54:56
| 2016-05-09T18:54:56
| 47,437,502
| 1
| 0
| null | 2023-09-03T20:12:37
| 2015-12-05T01:39:24
|
Python
|
UTF-8
|
Python
| false
| false
| 779
|
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('commerce', '0005_auto_20151028_1120'),
]
operations = [
migrations.RemoveField(
model_name='employee',
name='city',
),
migrations.RemoveField(
model_name='employee',
name='province',
),
migrations.RemoveField(
model_name='employee',
name='street',
),
migrations.AlterField(
model_name='employee',
name='gender',
field=models.CharField(max_length=1, choices=[('M', 'Male'), ('F', 'Female'), ('U', 'Unknown')]),
),
]
|
[
"jagdeep.singh@mobilelive.com"
] |
jagdeep.singh@mobilelive.com
|
b337c4c75669d3e9abc434f3274411c1037886a1
|
ad06d319d1dd651b8a802848913f4cdfc40085f0
|
/specific_keras_functionality/001_antirectifier.py
|
1d8a531d682d5c12bda41621b41c34572694db79
|
[
"MIT"
] |
permissive
|
Daniel1586/Initiative_Keras_tutorials
|
132c0cfdfd892adaf087a79a6ed6572275f05531
|
7129ed42f4f148da7f97d5ab164de07cae4814b6
|
refs/heads/master
| 2020-03-18T06:47:32.676685
| 2018-06-18T13:37:57
| 2018-06-18T13:37:57
| 134,415,548
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,840
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# 如何在keras中编写自定义层
"""
The example demonstrates how to write custom layers for Keras. We build a custom activation
layer called 'Antirectifier', which modifies the shape of the tensor that passes through it.
We need to specify two methods: `compute_output_shape` and `call`.
Note that the same result can also be achieved via a Lambda layer. Because our custom layer
is written with primitives from the Keras backend (`K`), our code can run both on TensorFlow
and Theano.
"""
# Output after 40 epochs on CPU(i5-7500): ~0.9818
import keras
from keras import layers
from keras import backend as K
from keras.datasets import mnist
from keras.models import Sequential
# 自定义层Antirectifier
class Antirectifier(layers.Layer):
"""
This is the combination of a sample-wise L2 normalization with the concatenation of the
positive part of the input with the negative part of the input. The result is a tensor
of samples that are twice as large as the input samples.
It can be used in place of a ReLU.
# Input shape
2D tensor of shape (samples, n)
# Output shape
2D tensor of shape (samples, 2*n)
# Theoretical justification
When applying ReLU, assuming that the distribution of the previous output is
approximately centered around 0., you are discarding half of your input. This
is inefficient. Antirectifier allows to return all-positive outputs like ReLU,
without discarding any data. Tests on MNIST show that Antirectifier allows to
train networks with twice less parameters yet with comparable classification
accuracy as an equivalent ReLU-based network.
"""
def compute_output_shape(self, input_shape):
shape = list(input_shape)
assert len(shape) == 2 # only valid for 2D tensors
shape[-1] *= 2
return tuple(shape)
def call(self, inputs):
inputs -= K.mean(inputs, axis=1, keepdims=True)
inputs = K.l2_normalize(inputs, axis=1)
pos = K.relu(inputs)
neg = K.relu(-inputs)
return K.concatenate([pos, neg], axis=1)
# global parameters
batch_size = 128
num_classes = 10
epochs = 40
# the data, split between train and test sets
# MNIST数据集: 训练集60000,测试集10000
print('========== 1.Loading data...')
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
x_train /= 255
x_test /= 255
print('----- x_train shape:', x_train.shape)
print('----- x_test shape:', x_test.shape)
# convert class vectors to binary class matrices
# 对每条数据的类别标签(train/test)转换为类别数目的0/1值序列(one-hot)
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
# 搭建神经网络模型
print('========== 2.Building model...')
model = Sequential()
model.add(layers.Dense(256, input_shape=(784,))) # 输出(*,256)
model.add(Antirectifier()) # 输出(*,512)
model.add(layers.Dropout(0.1)) # 输出(*,512)
model.add(layers.Dense(256)) # 输出(*,256)
model.add(Antirectifier()) # 输出(*,512)
model.add(layers.Dropout(0.1)) # 输出(*,512)
model.add(layers.Dense(num_classes)) # 输出(*,10)
model.add(layers.Activation('softmax')) # 输出(*,10)
# compile the model
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
model.summary()
# train the model
model.fit(x_train, y_train, batch_size=batch_size, epochs=epochs, verbose=1,
validation_data=(x_test, y_test))
|
[
"33886608+Daniel1586@users.noreply.github.com"
] |
33886608+Daniel1586@users.noreply.github.com
|
560d28d47aec3beddae995957b47f2a586147262
|
153995fa868b4697d8d6b25379a16f9756604151
|
/student/migrations/0003_auto_20180530_1427.py
|
23c1d56c1bf88a956e612254eb17747ba36e63f8
|
[] |
no_license
|
Manju1313/django-school
|
816c13259654c4f57352add903cc13e3915f3724
|
1182de09e9b638a2a4f328024f6bc6807eff6029
|
refs/heads/master
| 2023-03-21T22:44:59.002131
| 2020-08-15T14:34:19
| 2020-08-15T14:34:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,414
|
py
|
# Generated by Django 2.0.4 on 2018-05-30 18:27
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('student', '0002_auto_20180530_1421'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='stu_phone_number',
),
migrations.AddField(
model_name='student',
name='phone_number',
field=models.CharField(blank=True, max_length=17, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$')]),
),
migrations.AlterField(
model_name='guardian',
name='phone_number',
field=models.CharField(blank=True, max_length=17, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$')]),
),
migrations.AlterField(
model_name='instructor',
name='phone_number',
field=models.CharField(blank=True, max_length=17, validators=[django.core.validators.RegexValidator(message="Phone number must be entered in the format: '+999999999'. Up to 15 digits allowed.", regex='^\\+?1?\\d{9,15}$')]),
),
]
|
[
"dihfahsihm@gmail.com"
] |
dihfahsihm@gmail.com
|
11ac4ad394b8e5dc3d3286dcc1047702be415edf
|
d1cf52f3b9743930a45d6533f9eeca4603360268
|
/apps/players/tests/factories.py
|
1db31453f99d38a1fa9e68bca32e510ae9a7075b
|
[] |
no_license
|
adamkberard/Django-DayOfDie
|
3992a763d1147f4921eb7592c5f3a45b045ce325
|
3b71440bf3db1cb609b778a47272410c00541828
|
refs/heads/development
| 2023-07-31T04:17:28.775785
| 2021-09-30T21:43:35
| 2021-09-30T21:43:35
| 343,927,519
| 0
| 0
| null | 2021-09-30T21:43:36
| 2021-03-02T22:13:42
|
Python
|
UTF-8
|
Python
| false
| false
| 537
|
py
|
import factory
from apps.players.models import Player
# Common variable
DEFAULT_PASSWORD = 'pass4user'
class PlayerFactory(factory.django.DjangoModelFactory):
class Meta:
model = Player
email = factory.Sequence(lambda n: '%d@example.com' % n)
password = DEFAULT_PASSWORD
@classmethod
def _create(cls, model_class, *args, **kwargs):
"""Create an instance of the model, and save it to the database."""
return Player.objects.create_user(email=kwargs['email'], password=kwargs['password'])
|
[
"adamkberard@gmail.com"
] |
adamkberard@gmail.com
|
ab0a9a101fe7c2a2f1ddd2a2c84c5c685e9c4320
|
ca5936d1417a90cc6bf702e11dd395e9487721fd
|
/azure_client.py
|
62515c974fa86ee6396c4980cab358b09bf4a7fa
|
[] |
no_license
|
JMFrmg/lecture_recettes
|
a826a3de4846b2a076a92f2c8ce4f517044d8fa2
|
cff6b0cf124def68cb54dbe6d414170d87639be7
|
refs/heads/master
| 2023-03-01T01:27:39.496464
| 2021-02-01T13:26:06
| 2021-02-01T13:26:06
| 334,927,789
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,218
|
py
|
from azure.cognitiveservices.vision.contentmoderator import ContentModeratorClient
from azure.ai.formrecognizer import FormRecognizerClient
from azure.cognitiveservices.vision.computervision import ComputerVisionClient
from azure.ai.textanalytics import TextAnalyticsClient
from azure.core.credentials import AzureKeyCredential
from msrest.authentication import CognitiveServicesCredentials
def get_api_creds(ressource):
f = open("keys.txt", "r")
keys = f.readlines()
f.close()
creds_dict = {l.split(" ")[0].strip(): l.split(" ")[1].strip() for l in keys}
return (creds_dict[ressource + "_endpoint"], creds_dict[ressource + "_key"])
def form_recognizer_client():
creds = get_api_creds("form_recognizer")
form_recognizer_key = creds[1]
client = FormRecognizerClient(creds[0],
AzureKeyCredential(form_recognizer_key))
return client
def computer_vision_client():
creds = get_api_creds("computer_vision")
computer_vision_key = creds[1]
computer_vision_endpoint = creds[0]
client = ComputerVisionClient(computer_vision_endpoint,
CognitiveServicesCredentials(computer_vision_key))
return client
|
[
"matthieu.colombert@gmail.com"
] |
matthieu.colombert@gmail.com
|
8cbb0199476d4a0ff738d2012c7bde1daee5d0e7
|
496e05014492b4bbecf9f15c40ae416c21e27a46
|
/src/outpost/django/api/serializers.py
|
f3b19c6cac6763725dbcfae9ac299911d7d02ba2
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
medunigraz/outpost_deprecated
|
b1ff802054c04cf989b3b660e132fa6a1c2a078c
|
bc88eaa3bb504d394fdf13f1131e40db27759c89
|
refs/heads/master
| 2022-01-23T15:46:34.859095
| 2019-05-21T08:38:11
| 2019-05-21T08:38:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,266
|
py
|
import re
from base64 import (
b64decode,
urlsafe_b64encode,
)
from pathlib import PurePosixPath
from uuid import uuid4
import six
from django.core.files.base import ContentFile
from drf_haystack.serializers import HaystackSerializer
from rest_framework.serializers import (
FileField,
IntegerField,
SerializerMethodField,
)
from outpost.django.geo import search_indexes as geo
from outpost.django.structure import search_indexes as structure
class AutocompleteSerializer(HaystackSerializer):
id = IntegerField(source='pk')
ctype = SerializerMethodField()
class Meta:
index_classes = [
geo.RoomIndex,
structure.OrganizationIndex,
structure.PersonIndex,
]
fields = [
'presentation',
'id',
'ctype',
'level_id',
'room_id',
'autocomplete',
]
ignore_fields = [
'text',
'autocomplete',
]
field_aliases = {
'q': 'autocomplete',
}
def get_ctype(self, obj):
return obj.content_type()
class Base64FileField(FileField):
"""
A Django REST framework field for handling file-uploads through raw post
data. It uses base64 for encoding and decoding the contents of the file.
Heavily based on
https://stackoverflow.com/a/28036805
"""
parser = re.compile(r'^data:(?P<mimetype>.*?);base64,')
def to_internal_value(self, raw):
# Check if this is a base64 string
if isinstance(raw, six.string_types):
header = self.parser.match(raw)
# Check if the base64 string is in the "data:" format
if header:
try:
decoded_file = b64decode(self.parser.sub('', raw))
except TypeError:
self.fail('invalid_image')
# Generate file name:
p = PurePosixPath()
uid = uuid4().bytes
u = urlsafe_b64encode(uid).decode('ascii').rstrip('=')
filename = p.joinpath(u).as_posix()
raw = ContentFile(decoded_file, name=filename)
return super(Base64FileField, self).to_internal_value(raw)
|
[
"michael@fladi.at"
] |
michael@fladi.at
|
a32498a9fb63bef9a6dc7a709925c7c54c530ba9
|
9680200efd5a5f971f91084ef07e20ac76f6aab7
|
/tests/unit/rankings/score_tests.py
|
19c0e9fd1856ca5f5829a9b0101a9d387edd4e6e
|
[] |
no_license
|
jnclt/ranking_service
|
651b1747fc20ccc5a0f3d5151482b1cba8bba3b2
|
2506fe6b44611d5959a2b2ba971cb1071d8e08e1
|
refs/heads/master
| 2023-08-29T14:30:28.345917
| 2013-11-23T11:31:44
| 2013-11-23T11:31:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 336
|
py
|
from django.test import TestCase
from django.db.models import fields
from rankings.models import Score
class ScoreModelTest(TestCase):
def test_should_contain_decimal_value_field(self):
value_field = Score._meta.get_field(name='value', many_to_many=False)
self.assertIsInstance(value_field, fields.DecimalField)
|
[
"jan.calta@gmail.com"
] |
jan.calta@gmail.com
|
de26f435d64fb5960218212130a82f90ba3b15ee
|
99afa8ce4d34998374edccd4ebc04375f87e1149
|
/Week_5/Day5/Daily_Challenge/daily_challenge2.py
|
e7986c92c1f10473105ce376f2d87ac562c2b26b
|
[] |
no_license
|
Yona-Dav/DI_Bootcamp
|
34077f51d50941b0f7d20db8d4e7cfc2a4e16f6f
|
c72ea034dcd70414b886ec2051eb1d7a1baf7eaa
|
refs/heads/main
| 2023-09-05T15:43:35.225028
| 2021-11-18T17:43:08
| 2021-11-18T17:43:08
| 413,339,914
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,069
|
py
|
# Part 1 : Quizz :
# Answer the following questions
# What is a class?A class is a code template for creating objects. Objects have member variables and have behaviour associated with them.
# What is an instance? An object is created using the constructor of the class. This object will then be called the instance of the class.
# What is encapsulation? We can restrict access to methods and variables. This prevents data from direct modification, which is called encapsulation.
# What is inheritance? Inheritance is the process by which one class takes on the attributes and methods of another.
# What is multiple inheritance? A class can inherit from two different classes; in this case, the order of the parent class in a class definition will decide what will be inherited
# What is polymorphism? Polymorphism allows the ability to use a standard interface for multiple forms or data types.
# What is method resolution order or MRO? Method Resolution Order(MRO) it denotes the way a programming language resolves a method or attribute.
# Part 2: Create A Deck Of Cards Class.
import random
class Card:
def __init__(self,suit,value):
self.suit = suit
self.value = value
def __repr__(self):
return f'{self.value} {self.suit}'
class Deck(Card):
def __init__(self):
self.cards = []
self.__game()
print(self.cards)
def __game(self):
for i in ['Hearts','Diamonds','Clubs','Spades']:
for j in ['A','2','3','4','5','6','7','8','9','10','J','Q','K']:
self.cards.append(Card(i,j))
def shuffle(self):
if len(set(self.cards))==52:
random.shuffle(self.cards)
return 'The card game deck was shuffled'
else:
raise Exception('The number of card is not 52 ')
def deal(self):
if len(self.cards)>0:
x = self.cards.pop()
return f'The draw card is {x}'
else:
return 'The card game deck is empty'
test = Deck()
print(test.shuffle())
print(test.deal())
|
[
"yona.bohbot@gmail.com"
] |
yona.bohbot@gmail.com
|
ee0b2466bc1cafcdb84c99ac4db6ddf33589c3c3
|
6b8d1dcf18f688192eebb95e3bd922250b352429
|
/deploy/ieee-workflow/ieee.py
|
de994f14b4bae9ba741274745e9cb08a1e2a4976
|
[
"MIT"
] |
permissive
|
bigdatalab/movingdata
|
571542226395e5a10b127d7588f17b40bfc06d75
|
9e51a54b9c601fa8fd55a571d28beb66fcca9def
|
refs/heads/master
| 2016-09-10T18:19:43.976792
| 2013-05-22T13:14:07
| 2013-05-22T13:14:07
| 10,197,707
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 927
|
py
|
#!/usr/bin/env python
#
# Runs the IEEE example workflow
#
# Copyright (c) 2013 by Michael Luckeneder
#
import sys
import os
import time
start_time = time.time()
files = []
i = 0
# download images from wikimedia several times
while True:
if i >= int(sys.argv[1]):
break
else:
i += 1
file = "./temp/"+str(time.time())
os.system("/usr/bin/env curl http://upload.wikimedia.org/wikipedia/commons/2/28/Keri_majakas.jpg > %s" % (file+".0.jpg"))
files.append(file)
# parse workflow text
workflow = sys.argv[2].split(",")
l = range(len(workflow)+1)
allocation = zip(l, l[1:])
# execute workflow
for f in files:
f0 = (file+".0.jpg")
for i, a in enumerate(allocation):
f1 = f+"."+str(a[1])+".jpg"
c = "/usr/bin/env curl -X POST -F file=@%s %s > %s" % (f0, workflow[i], f1)
os.system(c)
# calculate and display total run time
print str(time.time() - start_time)
|
[
"michael@luckeneder.net"
] |
michael@luckeneder.net
|
2e50c12dd8851b04187c57f3da365c997cbde47d
|
2e2d4861fe0a3c0bd824fd140f8c57fd1e05ddac
|
/workflows/deploy.py
|
d6395aaa7759fdf8e9cf27eee0712a757a5ff06c
|
[] |
no_license
|
mss7082/nornir_backup
|
5df4009339dea852a0047c4c415f400314afcbe9
|
f71b34ec28523ff7e15b0d78a8f6e6c1f6e373a8
|
refs/heads/master
| 2022-06-19T11:50:06.338198
| 2020-05-06T20:17:26
| 2020-05-06T20:17:26
| 261,788,227
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,720
|
py
|
#!/home/gns3/virtualenvs/nornir/bin/python
from nornir import InitNornir
from pprint import pprint
from nornir.plugins.tasks import networking, text
from nornir.plugins.functions.text import print_result
from nornir.core.filter import F
import logging
def main():
template = "interfaces.j2"
nr = InitNornir(config_file="../config.yaml")
# pprint(nr.inventory.get_inventory_dict()["hosts"])
juniper = nr.filter(F(platform="junos"))
# pprint(juniper.inventory.get_hosts_dict())
# rgit = juniper.run(task=get_template, template=template)
# print_result(rgit)
config = juniper.run(task=push_config, template=template)
print_result(config)
# def get_template(task, template):
# print("Getting template from gitlab")
# r = task.run(
# task=gitlab,
# action="get",
# url="http://gitlab.mss.com",
# token="yfP7ecnFpzRXDUsxzyg4",
# repository="Nornir_Templates",
# ref="master",
# filename=template,
# # filename=f"{task.host.platform}/{template}",
# destination="/tmp/hosts",
# severity_level=logging.DEBUG,
# )
# print_result(r)
def push_config(task, template):
print("Render and Push config")
r = task.run(
task=text.template_file,
name="Generate config from template",
template=template,
path=f"../templates/{task.host.platform}",
)
task.host["config"] = r.result
# Deploy that configuration to the device using NAPALM
task.run(
task=networking.napalm_configure,
name="Loading Configuration on the device",
replace=False,
dry_run=True,
configuration=task.host["config"],
)
main()
|
[
"mssokabi@pop-os.localdomain"
] |
mssokabi@pop-os.localdomain
|
2935ac6309fc6ef4ec2f6ebea6c24fb96c80ca16
|
1c9909744b978ef47ab3ef1a89fe5c16691c551f
|
/DS_Algo/Searching/peakIndexInMountainArray.py
|
3d16775963e2e47221c451521f0bfb9f9c86f401
|
[] |
no_license
|
barleen-kaur/LeetCode-Challenges
|
238036b8eba4867f0d2686b3a3fe70e43f3de20f
|
bdd53a0314d833728b3c248241ae5b8fa6051819
|
refs/heads/master
| 2023-02-20T05:23:12.427779
| 2021-01-22T00:45:10
| 2021-01-22T00:45:10
| 296,755,550
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,016
|
py
|
'''
852. Peak Index in a Mountain Array: Easy
Q. Let's call an array arr a mountain if the following properties hold:
arr.length >= 3
There exists some i with 0 < i < arr.length - 1 such that:
arr[0] < arr[1] < ... arr[i-1] < arr[i]
arr[i] > arr[i+1] > ... > arr[arr.length - 1]
Given an integer array arr that is guaranteed to be a mountain, return any i such that arr[0] < arr[1] < ... arr[i - 1] < arr[i] > arr[i + 1] > ... > arr[arr.length - 1].
Example 1:
Input: arr = [0,1,0]
Output: 1
Example 2:
Input: arr = [0,2,1,0]
Output: 1
'''
class Solution:
def peakIndexInMountainArray(self, arr: List[int]) -> int:
low, high = 0, len(arr)-1
while low < high:
mid = low + (high-low)//2
if low < mid < high and arr[mid] > arr[mid-1] and arr[mid] > arr[mid+1]:
return mid
elif arr[mid] < arr[mid+1]:
low = mid
else:
high = mid
return mid
|
[
"barleenk@gmail.com"
] |
barleenk@gmail.com
|
af963ee0e327eb1953c47441839a1f283f993d96
|
0c4c49d31937eff94fc34f1e7c2f371abbb07c64
|
/custom_components/colorfulclouds/const.py
|
902f823a722881bf83aa5d36fafde7f8e0d7f7f3
|
[] |
no_license
|
liuleidp/Colorfulclouds-weather
|
0697af89815c4982c208597ea22188040accb4d6
|
d3beaed0d6015d45810530e1c32e9f1fae45f8cf
|
refs/heads/master
| 2022-12-25T02:53:41.212108
| 2020-09-17T11:23:11
| 2020-09-17T11:23:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,918
|
py
|
"""Constants for colorfulclouds."""
DOMAIN = "colorfulclouds"
PLATFORMS = ["sensor"]
REQUIRED_FILES = [
"const.py",
"manifest.json",
"weather.py",
"config_flow.py",
"services.yaml",
"translations/en.json",
]
VERSION = "0.1.3"
ISSUE_URL = "https://github.com/fineemb/Colorfulclouds-weather/issues"
STARTUP = """
-------------------------------------------------------------------
{name}
Version: {version}
This is a custom component
If you have any issues with this you need to open an issue here:
{issueurl}
-------------------------------------------------------------------
"""
from homeassistant.const import (
ATTR_DEVICE_CLASS,
DEVICE_CLASS_TEMPERATURE,
LENGTH_FEET,
LENGTH_INCHES,
LENGTH_METERS,
SPEED_KILOMETERS_PER_HOUR,
SPEED_MILES_PER_HOUR,
LENGTH_MILES,
LENGTH_KILOMETERS,
LENGTH_INCHES,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
TIME_HOURS,
DEGREE,
UV_INDEX,
VOLUME_CUBIC_METERS,
)
ATTRIBUTION = "Data provided by Colorfulclouds"
ATTR_ICON = "icon"
ATTR_FORECAST = CONF_DAILYSTEPS = "forecast"
ATTR_LABEL = "label"
ATTR_UNIT_IMPERIAL = "Imperial"
ATTR_UNIT_METRIC = "Metric"
MANUFACTURER = "Colorfulclouds, Inc."
NAME = "Colorfulclouds"
CONF_API_KEY = "api_key"
CONF_API_VERSION = "api_version"
CONF_LATITUDE = "latitude"
CONF_LONGITUDE = "longitude"
CONF_ALERT = "alert"
CONF_HOURLYSTEPS = "hourlysteps"
CONF_DAILYSTEPS = "dailysteps"
CONF_STARTTIME = "starttime"
COORDINATOR = "coordinator"
UNDO_UPDATE_LISTENER = "undo_update_listener"
OPTIONAL_SENSORS = (
"WindDirection",
)
SENSOR_TYPES = {
"apparent_temperature": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "感觉温度",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
},
"temperature": {
ATTR_DEVICE_CLASS: DEVICE_CLASS_TEMPERATURE,
ATTR_ICON: None,
ATTR_LABEL: "温度",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
},
"cloudrate": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-cloudy",
ATTR_LABEL: "云量",
ATTR_UNIT_METRIC: "%",
ATTR_UNIT_IMPERIAL: "%",
},
"precipitation": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-rainy",
ATTR_LABEL: "雨量",
ATTR_UNIT_METRIC: "mm",
ATTR_UNIT_IMPERIAL: LENGTH_INCHES,
},
"pressure": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gauge",
ATTR_LABEL: "气压",
ATTR_UNIT_METRIC: "Pa",
ATTR_UNIT_IMPERIAL: "Pa",
},
"comfort": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:gauge",
ATTR_LABEL: "舒适指数",
ATTR_UNIT_METRIC: None,
ATTR_UNIT_IMPERIAL: None,
},
"ultraviolet": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-sunny",
ATTR_LABEL: "紫外线",
ATTR_UNIT_METRIC: UV_INDEX,
ATTR_UNIT_IMPERIAL: UV_INDEX,
},
"humidity": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:water-percent",
ATTR_LABEL: "湿度",
ATTR_UNIT_METRIC: TEMP_CELSIUS,
ATTR_UNIT_IMPERIAL: TEMP_FAHRENHEIT,
},
"visibility": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-fog",
ATTR_LABEL: "能见度",
ATTR_UNIT_METRIC: LENGTH_KILOMETERS,
ATTR_UNIT_IMPERIAL: LENGTH_MILES,
},
"WindSpeed": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-windy",
ATTR_LABEL: "风速",
ATTR_UNIT_METRIC: SPEED_KILOMETERS_PER_HOUR,
ATTR_UNIT_IMPERIAL: SPEED_MILES_PER_HOUR,
},
"WindDirection": {
ATTR_DEVICE_CLASS: None,
ATTR_ICON: "mdi:weather-windy",
ATTR_LABEL: "风向",
ATTR_UNIT_METRIC: DEGREE,
ATTR_UNIT_IMPERIAL: DEGREE,
},
}
|
[
"embont@gmail.com"
] |
embont@gmail.com
|
3cb9f0d148c54cbbe893c3e1c798c3bb23c70ffc
|
a3cc7286d4a319cb76f3a44a593c4a18e5ddc104
|
/lib/surface/compute/instances/delete_access_config.py
|
479bf531ec0ef199dca5ae411f4dd8aff59f1cff
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
jordanistan/Google-Cloud-SDK
|
f2c6bb7abc2f33b9dfaec5de792aa1be91154099
|
42b9d7914c36a30d1e4b84ae2925df7edeca9962
|
refs/heads/master
| 2023-09-01T01:24:53.495537
| 2023-08-22T01:12:23
| 2023-08-22T01:12:23
| 127,072,491
| 0
| 1
|
NOASSERTION
| 2023-08-22T01:12:24
| 2018-03-28T02:31:19
|
Python
|
UTF-8
|
Python
| false
| false
| 3,102
|
py
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command for deleting access configs from virtual machine instances."""
from googlecloudsdk.api_lib.compute import base_classes
from googlecloudsdk.api_lib.compute import constants
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.compute.instances import flags
class DeleteAccessConfig(base.SilentCommand):
"""Delete an access configuration from a virtual machine network interface."""
detailed_help = {
'DESCRIPTION': """\
*{command}* is used to delete access configurations from network
interfaces of Google Compute Engine virtual machines. Access
configurations allow you to assign a public, external IP to a virtual
machine.
""",
'EXAMPLES': """\
To remove the externally accessible IP from a virtual machine named
``example-instance'' in zone ``us-central1-a'', run:
$ {command} example-instance --zone us-central1-a
""",
}
@staticmethod
def Args(parser):
flags.INSTANCE_ARG.AddArgument(parser)
parser.add_argument(
'--access-config-name',
default=constants.DEFAULT_ACCESS_CONFIG_NAME,
help="""\
Specifies the name of the access configuration to delete.
``{0}'' is used as the default if this flag is not provided.
""".format(constants.DEFAULT_ACCESS_CONFIG_NAME))
parser.add_argument(
'--network-interface',
default=constants.DEFAULT_NETWORK_INTERFACE,
action=arg_parsers.StoreOnceAction,
help="""\
Specifies the name of the network interface from which to delete the
access configuration. If this is not provided, then ``nic0'' is used
as the default.
""")
def Run(self, args):
"""Invokes request necessary for removing an access config."""
holder = base_classes.ComputeApiHolder(self.ReleaseTrack())
client = holder.client
instance_ref = flags.INSTANCE_ARG.ResolveAsResource(
args, holder.resources,
scope_lister=flags.GetInstanceZoneScopeLister(client))
request = client.messages.ComputeInstancesDeleteAccessConfigRequest(
accessConfig=args.access_config_name,
instance=instance_ref.Name(),
networkInterface=args.network_interface,
project=instance_ref.project,
zone=instance_ref.zone)
return client.MakeRequests([(client.apitools_client.instances,
'DeleteAccessConfig', request)])
|
[
"jordan.robison@gmail.com"
] |
jordan.robison@gmail.com
|
71cd954f33c227545742d4faaa6f87496d9255cd
|
a900588a0f166022e3b4dac8329f9df16393afd4
|
/pipeline_stages_config.py
|
75c09399068b3b81502be479885e04a3900343d6
|
[] |
no_license
|
khalidm/rubratargetedpipeline
|
82efe3857ea89965a15d74bed116eeedd0be674e
|
90b4efe4cbb1068fa817b50de9a87a4fc3fa85e9
|
refs/heads/master
| 2016-09-06T05:26:02.394054
| 2014-09-10T06:40:00
| 2014-09-10T06:40:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,720
|
py
|
# stageDefaults contains the default options which are applied to each stage (command).
# This section is required for every Rubra pipeline.
# These can be overridden by options defined for individual stages, below.
# Stage options which Rubra will recognise are:
# - distributed: a boolean determining whether the task should be submitted to a cluster
# job scheduling system (True) or run on the system local to Rubra (False).
# - walltime: for a distributed PBS job, gives the walltime requested from the job
# queue system; the maximum allowed runtime. For local jobs has no effect.
# - memInGB: for a distributed PBS job, gives the memory in Gigabytes requested from the
# job queue system. For local jobs has no effect.
# - queue: for a distributed PBS job, this is the name of the queue to submit the
# job to. For local jobs has no effect.
# - modules: the modules to be loaded before running the task. This is intended for
# systems with environment modules installed. Rubra will call module load on each
# required module before running the task. Note that defining modules for individual
# stages will override (not add to) any modules listed here. This currently only
# works for distributed jobs.
stageDefaults = {
'distributed': True,
'walltime': "08:00:00",
'memInGB': 8,
'queue': "batch",
'jobscript': " --account VR0002",
'modules': [
"bwa-intel/0.7.5a",
"samtools-intel/0.1.19",
"picard/1.53",
"python-gcc/2.7.5",
"R-intel/2.15.3",
"gatk/2.6-5"
]
}
pipeline = {
'logDir': 'logfiles',
'logFile': 'pipeline.log',
'style': 'print',
'procs': 20,
'verbose': 1,
'end': [ ],
# 'end': [ 'earlyDepthOfCoverage', 'dedupedDepthOfCoverage', 'finalDepthOfCoverage',
# 'fastqc',
# 'igvcountMergedBams', 'countRunBam',
# 'collateReadCounts',
# 'vcfIndexSNPs', 'vcfIndexIndels',
# 'getEnsemblAnnotations',
# 'exonCoverage' ],
'force': [],
'restrict_samples': False
# 'allowed_samples': ['060037']
}
working_files = {
'fastq_dirs': [
# '/vlsci/VR0215/shared/khalid/targeted/testfastq'
'/vlsci/VR0215/shared/khalid/targeted/fastq_files/kr'
],
'fastq_symlink_dir': '/vlsci/VR0215/shared/khalid/targeted/outputkr/fastq_symlinks',
'output_dir': '/vlsci/VR0215/shared/khalid/targeted/outputkr/',
'varscanjar': '/vlsci/VR0002/kmahmood/Programs/varscan/VarScan.v2.3.6.jar',
# 'snpeff': '/vlsci/VR0002/kmahmood/Programs/snpEff/'
'snpeff': '/vlsci/VR0002/kmahmood/Programs/snpEff3.4/snpEff/'
}
ref_files = {
'fasta_reference': '/vlsci/VR0215/shared/khalid/targeted/refs/genome.fa',
'fasta_dict': '/vlsci/VR0215/shared/khalid/targeted/refs/genome.dict',
# 'exon_bed': '/vlsci/VR0215/shared/khalid/targeted/target_beds/S0470822_Covered.bed',
'exon_bed': '/vlsci/VR0215/shared/khalid/targeted/target_beds/S0470822_Covered_sorted_merged.bed',
'exon_bed_extended': '/vlsci/VR0215/shared/khalid/targeted/target_beds/S0470822_Covered100.bed',
'dbsnp': '/vlsci/VR0215/shared/khalid/targeted/refs/dbsnp_137.hg19.vcf',
'indels_realign_goldstandard': '/vlsci/VR0215/shared/khalid/targeted/refs/Mills_and_1000G_gold_standard.indels.hg19.vcf',
'indels_realign_1000G': '/vlsci/VR0215/shared/khalid/targeted/refs/1000G_phase1.indels.hg19.vcf'
}
# stages should hold the details of each stage which can be called by runStageCheck.
# This section is required for every Rubra pipeline.
# Calling a stage in this way carries out checkpointing and, if desired, batch job
# submission.
# Each stage must contain a 'command' definition. See stageDefaults above for other
# allowable options.
stages = {
"fastqc": {
"command": "fastqc --quiet -o %outdir %seq",
"walltime": "02:00:00",
'modules': [ "fastqc/0.10.1" ]
},
'indexReferenceBWA': {
'command': "bwa index %ref -a bwtsw",
'walltime': "05:00:00"
},
'indexReferenceSAM': {
'command': "samtools faidx %ref"
},
'alignBWA': {
'command': "bwa aln -t 8 %encodingflag %ref %seq > %out",
'walltime': "04:00:00",
'procs': 8,
# 'queue': 'smp',
'memInGB': 23
},
'alignToSamSE': {
'command': "bwa samse %ref %meta %align %seq > %out"
},
'alignToSamPE': {
'command': "bwa sampe %ref %meta %align1 %align2 %seq1 %seq2 > %out",
'memInGB': 32
},
'samToSortedBam': {
'command': "./SortSam 23 VALIDATION_STRINGENCY=LENIENT INPUT=%seq OUTPUT=%out SORT_ORDER=coordinate",
'memInGB': 32,
'walltime': "05:00:00"
},
'mergeBams': {
'command': "./PicardMerge 24 %baminputs USE_THREADING=true VALIDATION_STRINGENCY=LENIENT AS=true OUTPUT=%out",
'procs': 2,
'memInGB': 32,
'walltime': "10:00:00"
},
'indexBam': {
'command': "samtools index %bam"
},
'flagstat': {
'command': "samtools flagstat %bam > %out",
'walltime': "00:10:00"
},
'igvcount': {
'command': "igvtools count %bam %out hg19",
'modules': [ "igv/2.3.15" ]
},
'indexVCF': {
'command': "./vcftools_prepare.sh %vcf",
'modules': [ "tabix/0.2.5" ]
},
'dedup': {
'command': "./MarkDuplicates 24 INPUT=%bam REMOVE_DUPLICATES=true VALIDATION_STRINGENCY=LENIENT AS=true METRICS_FILE=%log OUTPUT=%out",
'memInGB': 32,
'walltime': '05:00:00'
},
'realignIntervals': {
# Hard-coded to take 2 known indels files right now
'command': "./GenomeAnalysisTK 23 -T RealignerTargetCreator -R %ref -I %bam --known %indels_goldstandard --known %indels_1000G -L %bed -log %log -o %out",
'memInGB': 32,
'walltime': "08:00:00"
},
'realign': {
'command': "./GenomeAnalysisTK 22 -T IndelRealigner -R %ref -I %bam -known %indels_goldstandard -known %indels_1000G -targetIntervals %intervals -log %log -o %out",
'memInGB': 32,
'walltime': "08:00:00"
},
# Original: moved above before realignemnt as recommended by GATK
# 'dedup': {
# 'command': "./MarkDuplicates 24 INPUT=%bam REMOVE_DUPLICATES=true VALIDATION_STRINGENCY=LENIENT AS=true METRICS_FILE=%log OUTPUT=%out",
# 'memInGB': 32,
# 'walltime': '05:00:00'
# },
#GATK1 'baseQualRecalCount': {
#GATK1 'command': "./GenomeAnalysisTK 12 -T CountCovariates -I %bam -R %ref --knownSites %dbsnp -nt 8 -l INFO -cov ReadGroupCovariate -cov QualityScoreCovariate -cov CycleCovariate -cov DinucCovariate -log %log -recalFile %out",
#GATK1 'queue': 'smp',
#GATK1 'memInGB': 23,
#GATK1 'walltime': "3:00:00:00"
#GATK1 },
#GATK1 'baseQualRecalTabulate': {
#GATK1 'command': "./GenomeAnalysisTK 4 -T TableRecalibration -I %bam -R %ref -recalFile %csvfile -l INFO -log %log -o %out",
#GATK1 'walltime': "3:00:00:00"
#GATK1 },
'leftalignindels': {
'command': "./GenomeAnalysisTK 24 -allowPotentiallyMisencodedQuals -T LeftAlignIndels -I %input -R %ref -o %output",
'memInGB': 32,
'walltime': "05:00:00"
},
'baseQualRecal': {
'command': "./GenomeAnalysisTK 24 -T BaseRecalibrator -I %bam -R %ref --knownSites %dbsnp --knownSites %indels_goldstandard -log %log -o %out",
'memInGB': 32,
'walltime': "05:00:00"
},
'baseQualRecalPrintReads': {
'command': "./GenomeAnalysisTK 32 -T PrintReads -I %bam -R %ref -BQSR %csvfile -log %log -o %out",
'memInGB': 48,
'walltime': "05:00:00"
},
'callSNPs': {
'command': "./GenomeAnalysisTK 24 -T UnifiedGenotyper -nt 8 -R %ref -I %bam -L %bed --dbsnp %dbsnp -stand_call_conf 50.0 -stand_emit_conf 10.0 -dcov 200 -l INFO -A AlleleBalance -A Coverage -A FisherStrand -glm SNP -log %log -o %out",
# 'queue': 'smp',
'procs': 8,
'memInGB': 32,
'walltime': "10:00:00"
},
'callHAP': {
'command': "./GenomeAnalysisTK 24 -T HaplotypeCaller -R %ref -I %bam -L %bed --dbsnp %dbsnp -stand_call_conf 50.0 -stand_emit_conf 10.0 -dcov 200 -l INFO -A AlleleBalance -A Coverage -A FisherStrand -log %log -o %out",
# 'queue': 'smp',
'procs': 2,
'memInGB': 32,
'walltime': "12:00:00"
},
# 'callHAPMerged': {
# 'command': "./GenomeAnalysisTK 24 -T HaplotypeCaller -R %ref %bam -L %bed --dbsnp %dbsnp -stand_call_conf 50.0 -stand_emit_conf 10.0 -dcov 200 -l INFO -A AlleleBalance -A Coverage -A FisherStrand -log %log -o %out",
# 'procs': 2,
# 'memInGB': 32,
# 'walltime': "240:00:00"
# },
'callIndels': {
'command': "./GenomeAnalysisTK 24 -T UnifiedGenotyper -nt 8 -R %ref -I %bam -L %bed --dbsnp %dbsnp -stand_call_conf 50.0 -stand_emit_conf 10.0 -dcov 200 -l INFO -A AlleleBalance -A Coverage -A FisherStrand -glm INDEL -log %log -o %out",
'procs': 8,
'memInGB': 32,
'walltime': "12:00:00"
},
'callVARSCAN': {
'command': "./RunVarscan %ref %varscanjar %samplelist %out %bam",
'procs': 1,
'memInGB': 32,
'modules': [ "samtools-intel/0.1.19", "java/1.7.0_25" ],
'walltime': "8:00:00"
},
# 'callVARSCANMerged': {
# 'command': "./RunVarscan %ref %varscanjar %samplelist %out %bam",
# # 'command': "./RunVarscan %ref %bam %varscanjar %out",
# 'procs': 1,
# 'memInGB': 32,
# 'modules': [ "samtools-intel/0.1.19", "java/1.7.0_25" ],
# 'walltime': "48:00:00"
# },
'filterSNPs': {
# Very minimal hard filters based on GATK recommendations. VQSR is preferable if possible.
'command': "./GenomeAnalysisTK 4 -T VariantFiltration -R %ref --variant %vcf --filterExpression 'QD < 2.0 || MQ < 40.0 || FS > 60.0 || HaplotypeScore > 13.0 || MQRankSum < -12.5 || ReadPosRankSum < -8.0' --filterName 'GATK_MINIMAL_FILTER' -log %log -o %out",
'memInGB': 32,
},
'filterVarscan': {
# Very minimal hard filters based on GATK recommendations. VQSR is preferable if possible.
#'command': "./GenomeAnalysisTK 4 -T VariantFiltration -R %ref --variant %vcf --filterExpression 'QD < 2.0 || MQ < 40.0 || FS > 60.0 || HaplotypeScore > 13.0 || MQRankSum < -12.5 || ReadPosRankSum < -8.0' --filterName 'GATK_MINIMAL_FILTER' -log %log -o %out",
'command': "./RunVarscanFilter %ref %varscanjar %vcf %out",
'procs': 1,
'memInGB': 32,
'modules': [ "samtools-intel/0.1.19", "java/1.7.0_25" ]
},
# 'filterVarscan2': {
# # Very minimal hard filters based on GATK recommendations. VQSR is preferable if possible.
# # 'command': "./GenomeAnalysisTK 4 -T VariantFiltration -R %ref --variant %vcf --filterExpression 'QD < 2.0 || MQ < 40.0 || FS > 60.0 || HaplotypeScore > 13.0 || MQRankSum < -12.5 || ReadPosRankSum < -8.0' --filterName 'GATK_MINIMAL_FILTER' -log %log -o %out",
# 'command': "./RunVarscan %ref %varscanjar %vcf %out",
# 'memInGB': 32,
# },
'filterHapVcfs': {
'command': "./GenomeAnalysisTK 4 -T VariantFiltration -R %ref --variant %vcf --filterExpression 'QD < 2.0 || MQ < 40.0 || FS > 60.0 || HaplotypeScore > 13.0 || MQRankSum < -12.5 || ReadPosRankSum < -8.0' --filterName 'GATK_MINIMAL_FILTER' -log %log -o %out",
'memInGB': 32,
},
# 'filterMergedHapVcfs': {
# 'command': "./GenomeAnalysisTK 4 -T VariantFiltration -R %ref --variant %vcf --filterExpression 'QD < 2.0 || MQ < 40.0 || FS > 60.0 || HaplotypeScore > 13.0 || MQRankSum < -12.5 || ReadPosRankSum < -8.0' --filterName 'GATK_MINIMAL_FILTER' -log %log -o %out",
# 'memInGB': 32,
# },
'filterIndels': {
# Very minimal hard filters based on GATK recommendations. VQSR is preferable if possible.
# If you have 10 or more samples GATK also recommends the filter InbreedingCoeff < -0.8
'command': "./GenomeAnalysisTK 4 -T VariantFiltration -R %ref --variant %vcf --filterExpression 'QD < 2.0 || ReadPosRankSum < -20.0 || InbreedingCoeff < -0.8 || FS > 200.0' --filterName 'GATK_MINIMAL_FILTER' -log %log -o %out",
'memInGB': 32,
},
# 'annotateEnsembl': {
# # This command as written assumes that VEP and its cache have been
# # downloaded in respective locations
# # ./variant_effect_predictor_2.5
# # ./variant_effect_predictor_2.5/vep_cache
# 'command': "perl variant_effect_predictor_2.5/variant_effect_predictor.pl --cache --dir variant_effect_predictor_2.5/vep_cache -i %vcf --vcf -o %out -species human --canonical --gene --protein --sift=b --polyphen=b > %log",
# 'modules': [ "perl/5.18.0", "ensembl/67" ]
# },
'annotateSNPEff': {
# This command as written assumes that snpEFF database hg19 is downloaded and the config file is amended accordingly
'command': "./SnpEff 24 %snpeff eff -c %config -v hg19 %vcf > %output",
'memInGB': 32,
'walltime': "12:00:00"
},
'depthOfCoverage': {
'command': "./GenomeAnalysisTK 24 -T DepthOfCoverage -R %ref -I %bam -L %bed -omitBaseOutput -ct 1 -ct 10 -ct 20 -ct 30 -o %out",
'memInGB': 32,
},
'exonCoverage': {
'command': "coverageBed -abam %bam -b %exon_bed > %out",
'modules': [ "bedtools-intel/2.17.0" ]
},
'intersectBam': {
'command': "intersectBed -abam %bam -b %bed > %out",
'modules': [ "bedtools-intel/2.17.0" ]
},
'collateReadcounts': {
'command': 'python count_flagstat_exome.py %dir %outdir',
'walltime': "00:10:00"
}
}
|
[
"mahmood.khalid@gmail.com"
] |
mahmood.khalid@gmail.com
|
a57e29a5245b7fc7cac8aac2d84443f7c42a14d0
|
9213de0545a68734d490b0be00c429ee0dce1698
|
/Tatto/settings.py
|
7f85a6a7b3b9740ebe3d4a68c378be74b48995ca
|
[] |
no_license
|
kabindrarijal/tattoo
|
e75f8c7fc3d2335041d217038933ae9f86f8f4eb
|
bc49896432282430895dee6ca53ef4d16a3db080
|
refs/heads/master
| 2020-06-27T03:20:05.449634
| 2019-07-31T10:26:21
| 2019-07-31T10:26:21
| 199,830,386
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,339
|
py
|
"""
Django settings for Tatto project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STAT = os.path.join(BASE_DIR,'static')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'wja_(i^-bm7rxrlx%g%eb!&n=)s3e#c9digw5%2w6sp(tk&p#o'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Pages',
'Tattos',
'user',
'userTime',
'crispy_forms',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Tatto.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,"templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Tatto.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS=[
STAT,
]
LOGIN_REDIRECT_URL='/registration/'
CRISPY_TEMPLATE_PACK='bootstrap4'
|
[
"kabindra@outlook.com"
] |
kabindra@outlook.com
|
921688e4e0fbdd1792cd348353ae939ebe9564b0
|
b4ad8d023bcd8d061d873dbab20c892c9cb887b8
|
/prac_08/unreliable_car.py
|
bbc71ce84459c1ede6dad695ab2552ab58448f8e
|
[] |
no_license
|
jordanMellish/cp1404Practicals
|
9de529a61b43e6031c3bfdff1c752d4a437c8ccb
|
5140db323d13a5564e92864b91904b8fe9464f90
|
refs/heads/master
| 2020-03-24T22:42:06.646462
| 2018-10-24T03:46:45
| 2018-10-24T03:46:45
| 143,098,469
| 0
| 0
| null | 2018-09-04T05:04:49
| 2018-08-01T03:20:42
|
Python
|
UTF-8
|
Python
| false
| false
| 744
|
py
|
"""
CP1404/CP5632 Practical
UnreliableCar class, derived from Car
"""
from random import randint
from prac_08.car import Car
class UnreliableCar(Car):
"""An unreliable version of a car."""
def __init__(self, name, fuel, reliability):
"""Initialise an UnreliableCar."""
super().__init__(name, fuel)
self.reliability = reliability
def drive(self, distance):
"""Drive the car, only sometimes, based on reliability."""
random_number = randint(1, 100)
if random_number >= self.reliability:
distance = 0
# Either way, we want to call the parent class's drive method (maybe driving 0)
distance_driven = super().drive(distance)
return distance_driven
|
[
"jordan.mellish@my.jcu.edu.au"
] |
jordan.mellish@my.jcu.edu.au
|
2f52dc55e8244d2992f25fe087aa779b5ee88b23
|
edfa045d12b8efb65de20261ff80a86160298e44
|
/contact/views.py
|
92ba8640b73aadf1add6ef04d0e028b1dae69786
|
[
"MIT"
] |
permissive
|
yusif763/Unistore-pro
|
1d559a89bb71f3db8b5d1e89df64ed7113f00f2a
|
41ad0fa209c79a201d3f6a7aa68ec0ace707dcad
|
refs/heads/main
| 2023-04-24T02:50:30.085011
| 2021-04-29T11:00:11
| 2021-04-29T11:00:11
| 362,782,688
| 4
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,954
|
py
|
from django.shortcuts import render,redirect
from contact.models import *
from contact.forms import ContactForm
from django.views.generic import (
ListView, DetailView,CreateView
)
from django.views.generic.edit import FormMixin
from django.http import HttpResponseRedirect
from django.urls import reverse_lazy
# Create your views here.
# def about_contact(request):
# form = ContactForm()
# # sub_form = SubscriberForm()
# if request.method == 'POST':
# form = ContactForm(data=request.POST)
# if form.is_valid():
# form.save()
# return redirect('/about-contact/')
# context = {
# # "sub_form":sub_form,
# 'form': form
# }
# return render(request , 'about_contact.html' , context)
class AboutContactView(CreateView):
form_class = ContactForm
# fields = '__all__'
# model = Contact
template_name = 'about_contact.html'
success_url = reverse_lazy('common:index')
def form_valid(self, form):
result = super(ContactView, self).form_valid(form)
messages.success(self.request, 'Sizin muracietiniz qebul edildi.')
return result
# def contact_page(request):
# form = ContactForm()
# # sub_form = SubscriberForm()
# if request.method == 'POST':
# form = ContactForm(data=request.POST)
# if form.is_valid():
# form.save()
# return redirect('/contact/')
# context = {
# # "sub_form":sub_form,
# 'form': form
# }
# return render(request, "contact.html", context)
class ContactView(CreateView):
form_class = ContactForm
# fields = '__all__'
# model = Contact
template_name = 'contact.html'
success_url = reverse_lazy('common:index')
def form_valid(self, form):
result = super(ContactView, self).form_valid(form)
messages.success(self.request, 'Sizin muracietiniz qebul edildi.')
return result
|
[
"you@example.com"
] |
you@example.com
|
e95f90e3add4a44c83da5cb198549a1b5aca2680
|
ad6216b583f72942c24865cdaee51da23e6c3a7c
|
/solutions/problem0014.py
|
1cec790b930ff845d0f759f6ba4dc19e93f0f00f
|
[] |
no_license
|
vshender/daily-coding-problem
|
325a68af96a5240cbbada7c6f615e5c91083a7dd
|
b13ccf67d670f0e9efd558506fcb5333ce2d8686
|
refs/heads/master
| 2020-05-01T12:05:43.654024
| 2019-05-18T13:26:37
| 2019-05-18T13:26:37
| 177,458,467
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,829
|
py
|
"""Problem #14 [Medium]
This problem was asked by Google.
The area of a circle is defined as `pi * r^2`. Estimate `pi` to 3 decimal
places using a Monte Carlo method.
Hint: The basic equation of a circle is `x^2 + y^2 = r^2`.
"""
from random import random
def estimate_pi(eps: float) -> float:
"""Estimate `pi` with the given precision using a Monte Carlo method."""
# When using the Monte Carlo method, we can obtain the required precision
# only with some probability, so the required number of samples depends on
# both the required precision and the probability.
#
# See also https://stackoverflow.com/a/33069371/436435
#
# Simplifying the calculation error when using a Monte Carlo method is
# proportional to `sqrt(D / N)`, where `D` is some constant, and `N` is the
# number of samples.
#
# TODO: (WHY?) The constant should be larger in order to obtain acceptable
# results for `eps = 10e-2`, for which the worst results are observed. For
# `eps = 1e-1` or `eps = `1e-3`, the constant can be even smaller.
D = 100
n = int(D / eps ** 2)
m = 0
for _ in range(n):
x = random()
y = random()
if x ** 2 + y ** 2 < 1.:
m += 1
return 4 * m / n
def _test(eps: float, ntests: int) -> None:
"""Test the `pi` estimation with the given precision."""
import math
print('-' * 79)
print(f'Test eps={eps} ntests={ntests}')
err_cnt = 0
for i in range(ntests):
pi_e = estimate_pi(eps)
if abs(int(math.pi / eps) * eps - int(pi_e / eps) * eps) > eps:
print('*', end=' ')
err_cnt += 1
print(pi_e)
print(f'Number of errors: {err_cnt} / {ntests}\n')
if __name__ == '__main__':
_test(1e-1, 100)
_test(1e-2, 100)
_test(1e-3, 20)
|
[
"vadim@shender.org"
] |
vadim@shender.org
|
c92ec169ce3f9c52a1a324c79db7b02e29ce371a
|
c1e2dfac83ca2fab928127e7ab3b898623f943ab
|
/source/added_value/toposort.py
|
269f08cfed840a0c15bcade02c506777a71eec11
|
[
"BSD-3-Clause"
] |
permissive
|
sixty-north/added-value
|
1cd7845ff63a97f6282f4ed8ebc0e6b9158be779
|
debd9e8dcf1475ecc7d2836a6fecdfd95a2de8ec
|
refs/heads/master
| 2023-04-28T11:16:59.244319
| 2023-04-14T19:37:35
| 2023-04-14T19:37:35
| 132,577,509
| 0
| 1
|
BSD-3-Clause
| 2023-04-14T19:37:36
| 2018-05-08T08:21:33
|
Python
|
UTF-8
|
Python
| false
| false
| 854
|
py
|
from collections import defaultdict, namedtuple
Results = namedtuple("Results", ["sorted", "cyclic"])
def topological_sort(dependency_pairs):
"Sort values subject to dependency constraints"
num_heads = defaultdict(int) # num arrows pointing in
tails = defaultdict(list) # list of arrows going out
heads = [] # unique list of heads in order first seen
for h, t in dependency_pairs:
num_heads[t] += 1
if h in tails:
tails[h].append(t)
else:
tails[h] = [t]
heads.append(h)
ordered = [h for h in heads if h not in num_heads]
for h in ordered:
for t in tails[h]:
num_heads[t] -= 1
if not num_heads[t]:
ordered.append(t)
cyclic = [n for n, heads in num_heads.items() if heads]
return Results(ordered, cyclic)
|
[
"rob@sixty-north.com"
] |
rob@sixty-north.com
|
9192f7c15ed59359d976a92fd0945a47e3af7c97
|
2582841c7a99804298d8c99dea1a533c3064d604
|
/pc.py
|
c3b034fce5d3a1b34deeb8d9fab3dcda03adab3c
|
[
"WTFPL"
] |
permissive
|
dgottlieb/storyteller
|
e2655a2d8c231138cad0d8f09fd83739862d002f
|
0cf430b1372a5aed95d08ebe1aff268314f362e1
|
refs/heads/master
| 2016-09-10T13:29:29.255787
| 2012-03-05T03:54:24
| 2012-03-05T03:54:24
| 2,816,254
| 5
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,703
|
py
|
import random
import inform
class Party(object):
def __init__(self, hero):
self.members = [hero]
self.items = []
self.gold = 50
self.status_box = inform.PartyStatus(self, right=True)
self.gold_box = inform.GoldStatus(self)
def __len__(self):
return len(self.members)
def __iter__(self):
return iter(self.members)
def __getitem__(self, idx):
return self.members[idx]
def add_gold(self, num_gold):
self.gold += num_gold
def add_exp(self, num_exp):
for member in self.members:
member.add_exp(num_exp)
def bought(self, item):
self.gold -= item.buy_price
self.items.append(item)
self.gold_box.update_values()
def sold(self, item):
self.gold += item.sell_price
self.items.remove(item)
self.gold_box.update_values()
class PC(object):
def __init__(self, name):
self.name = name
self.helmet = None
self.armor = None
self.gloves = None
self.pants = None
self.boots = None
self.weapon = None
self.offhand = None
self.max_hp = 25
self.max_mp = 15
self.hp = 25
self.mp = 15
self.exp = 0
self.str = 10
self.agi = 10
self.con = 10
self.will = 10
self.int = 10
self.spells = []
def __str__(self):
return self.name
def __getitem__(self, item_key):
if item_key.lower() == 'hp':
return self.hp
elif item_key.lower() == 'mp':
return self.mp
return 'pc[%s] is not known' % (item_key,)
def add_exp(self, num_exp):
self.exp += num_exp
@property
def attack(self):
return 14
def melee(self, enemy):
chance_to_hit = 0.9
chance_to_crit = 0.1
if random.random() > chance_to_hit:
return {"action": "miss", "feedback": "You miss!", "damage": 0}
if random.random() < chance_to_crit:
damage = self.attack * 3
enemy.attacked(damage)
return {"action": "crit", "damage": damage,
"feedback": "You critically strike %s for %d!" % (enemy.name, damage)}
damage = self.attack * self.attack / enemy.defense
enemy.attacked(damage)
return {"action": "hit", "damage": damage,
"feedback": "You hit %s for %d!" % (enemy.name, damage)}
@property
def alive(self):
return self.hp > 0
def get_wait_time(self, last_action):
last_action_delay = 0
return int(random.normalvariate(50 - (self.agi / 5), 5)) + last_action_delay
|
[
"danny.gottlieb@gmail.com"
] |
danny.gottlieb@gmail.com
|
0d4345f4f7d1fb1740337a32881c2c8a4533f342
|
4e25bbb3244f253f3296542931f3e8babf0af5c9
|
/main/migrations/0001_initial.py
|
fc757a8e23b0bf530886004f9cdc798c3be20404
|
[] |
no_license
|
dalovaelena/IoTApp
|
8ed373f0239937284429c69daae899097c36aab2
|
38d6e6e9df63a3553d7a0b48d924917cfe67a7c7
|
refs/heads/master
| 2020-09-13T16:15:20.945794
| 2019-11-20T03:15:28
| 2019-11-20T03:15:28
| 222,838,450
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 649
|
py
|
# Generated by Django 2.1.5 on 2019-11-19 07:06
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Tutorial',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tutorial_title', models.CharField(max_length=200)),
('tutorial_content', models.TextField()),
('tutorial_published', models.DateTimeField(verbose_name='date published')),
],
),
]
|
[
"noreply@github.com"
] |
dalovaelena.noreply@github.com
|
51076cbc05dfd34c93e5ff0d33ec683f7304252f
|
6cc795fef13e82a2e50f487740f5373b5a3f8549
|
/pyunlocbox/tests/__init__.py
|
7cae2d147d6d4ccbb8129886a11191b019a147e2
|
[
"BSD-3-Clause"
] |
permissive
|
epfl-lts2/pyunlocbox
|
7a14e97f7e46981ed6748bb5073d473f45af676e
|
ec84282096fa9154d8bdcc52bacc3531c9720779
|
refs/heads/master
| 2023-08-29T22:13:29.345251
| 2022-10-18T11:18:53
| 2022-10-18T11:18:53
| 17,248,167
| 98
| 28
|
BSD-3-Clause
| 2023-08-18T02:01:44
| 2014-02-27T12:33:31
|
Python
|
UTF-8
|
Python
| false
| false
| 445
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Test suite for the pyunlocbox package, broken by modules.
"""
import unittest
from . import test_functions
from . import test_operators
from . import test_solvers
from . import test_acceleration
from . import test_docstrings
suite = unittest.TestSuite([
test_functions.suite,
test_operators.suite,
test_solvers.suite,
test_acceleration.suite,
test_docstrings.suite,
])
|
[
"michael.defferrard@epfl.ch"
] |
michael.defferrard@epfl.ch
|
aa725acaf7e91703ebe920800ab4e826acdd933e
|
04322cdd13907b7664cbc85f73885ebfd2a8c8ec
|
/20210126/urls-secondapp.py
|
ab71dbbed458f0a27e776220c8db94793b859ff9
|
[] |
no_license
|
kim-jae-eun/TIL
|
e8fdfca93a4f30ccedbf02fa8dac2db739eba213
|
28bbe990bb237367f6af6e54f04b5e204c0f6bdc
|
refs/heads/master
| 2023-04-29T11:49:09.819181
| 2021-05-11T14:44:24
| 2021-05-11T14:44:24
| 325,237,605
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 154
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.exam1, name='exam1'),
path('exam2/', views.exam2, name='exam2'),
]
|
[
"kj6659@naver.com"
] |
kj6659@naver.com
|
86d183306acb70d51d2f66c4434f6faebdeca298
|
7df714c172e36948580d4b352e5454fdc5cbf81a
|
/restapi/serializers.py
|
a9f9a49706197ef2ab9b37c0a3b2ce59bcc5583a
|
[] |
no_license
|
textworld/messageboard
|
db6b4ffcbf86dc3cfbf3adca9eb1adc85bc57751
|
9663b556d6704c5d6db36934153fe85d50fb02e1
|
refs/heads/master
| 2020-12-07T08:34:35.910452
| 2020-01-09T00:11:15
| 2020-01-09T00:11:15
| 232,684,021
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 596
|
py
|
from django.contrib.auth.models import User, Group
from rest_framework import serializers
from restapi.models import Message
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ['url', 'username', 'email', 'groups']
class GroupSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Group
fields = ['url', 'name']
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = Message
# fields = '__all__'
fields = ('id', 'text', 'username', 'avatar')
|
[
"text.zwb@gmail.com"
] |
text.zwb@gmail.com
|
365cde7deab764e8c3579e15bed629c7e8dc4758
|
3fb976eadaba6a3ad10ef7a91bf0d47604c65339
|
/test_with_classifier.py
|
aaac33811c76234e199ac61ecad88f9bfd47af27
|
[] |
no_license
|
diogojapinto/cats-vs-dogs
|
72265afec4b5cf5c50505ad3c7b87c35bdd8d9ac
|
16190549b85e4f6f7fd070a01c8a443b37625895
|
refs/heads/master
| 2021-01-15T11:42:39.846587
| 2015-12-24T01:10:50
| 2015-12-24T01:10:50
| 47,896,957
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,258
|
py
|
import cv2
from matplotlib import pyplot as plt
import sklearn
import numpy as np
import pickle as pk
from os import listdir
plt.style.use('ggplot')
NR_WORDS = 1000
from os import listdir
def load_images(imgs_paths, gray=False):
for path in imgs_paths:
img = cv2.imread(path)
if gray:
yield cv2.imread(path, cv2.IMREAD_GRAYSCALE)
else:
yield cv2.imread(path)
# SIFT features detector and extractor
sift = cv2.xfeatures2d.SIFT_create()
# FLANN matcher
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks=50) # or pass empty dictionary
flann = cv2.FlannBasedMatcher(index_params,search_params)
def train_bow(detector, matcher, extractor=None):
if extractor == None:
extractor = detector
bow_extractor = cv2.BOWImgDescriptorExtractor(extractor, matcher)
vocabulary = pk.load(open('vocabulary_1000w.p', 'rb'))
bow_extractor.setVocabulary(vocabulary)
return bow_extractor
detector = sift
extractor = sift
sift_bow_extractor = train_bow(detector, flann, extractor=extractor)
train_folder = 'data/train/'
imgs_paths = [train_folder + filepath for filepath in listdir(train_folder)]
best_clf = pk.load(open('svm_classifier.p', 'rb'))
def save_labels_csv(labels):
pk.dump(labels, open('labels.p', 'wb'))
indexes = np.asmatrix(range(1, len(labels)+1)).transpose()
labels = np.asmatrix(labels).transpose()
indexed_labels = np.concatenate((indexes, labels), axis=1)
np.savetxt('results_1000.csv',
indexed_labels,
fmt='%d',
delimiter=',',
header='id,label',
comments='')
test_folder = 'data/test1/'
test_imgs_paths = [test_folder + filepath for filepath in listdir(test_folder)]
pred = []
test_imgs = load_images(test_imgs_paths, gray=True)
for i, img in enumerate(test_imgs):
if( i % 100 == 0 ):
print(i)
kp = detector.detect(img)
img_features = sift_bow_extractor.compute(img, kp)
try:
p = best_clf.predict(img_features)
except:
p = np.array(1)
pred.append(p)
save_labels_csv(pred)
|
[
"ruigrandaorocha@gmail.com"
] |
ruigrandaorocha@gmail.com
|
fdc7efa41a770c56baef8913b87e1dd6def66e0a
|
70ee9f1be6126f1bac8511513e7291002d354840
|
/src/Products/CMFCore/tests/__init__.py
|
67af542c0356d32e40ceb38b08484462eb3b1ce8
|
[
"ZPL-2.1"
] |
permissive
|
zopefoundation/Products.CMFCore
|
3d375d4bb682eabd64554c88166e50bdecb4483c
|
8c32b5ec521536c1a2c3752426f6ed209b11190c
|
refs/heads/master
| 2023-09-03T15:30:57.432178
| 2023-07-03T10:07:08
| 2023-07-03T10:07:08
| 36,093,975
| 4
| 19
|
NOASSERTION
| 2023-09-08T15:43:18
| 2015-05-22T20:33:14
|
Python
|
UTF-8
|
Python
| false
| false
| 171
|
py
|
"""\
Unit test package for CMFCore.
As test suites are added, they should be added to the
mega-test-suite in Products.CMFCore.tests.test_all.py
"""
_globals = globals()
|
[
"y.2015@wcm-solutions.de"
] |
y.2015@wcm-solutions.de
|
842a2c46f56a7757144550a71d0db5dc6c5c3357
|
08284d6d808408bdd43cd8115daa6b9d522c1510
|
/arm-py/disassemble.py
|
b95c7e8d9abcc86761a10ecc8539d5a207e5fd4d
|
[] |
no_license
|
securelayer7/arm-py32
|
da65b40d81813a4c34180f12b3092535a050d808
|
e328668fd7e3fed4e51421ea9cafcbf854d70574
|
refs/heads/master
| 2020-06-09T19:05:25.739145
| 2019-06-24T11:26:43
| 2019-06-24T11:26:43
| 193,489,789
| 6
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 771
|
py
|
# disassemble.py
# displays disassembly of the entire .text section
import sys
from elftools.elf.elffile import ELFFile
from capstone import *
def disassemble(bin_file):
with open(bin_file, 'rb') as f:
elf = ELFFile(f)
code = elf.get_section_by_name('.text')
ops = code.data() # returns a bytestring with the opcodes
addr = code['sh_addr'] # starting address of `.text`
md = Cs(CS_ARCH_ARM, CS_MODE_ARM)
for i in md.disasm(ops, addr): # looping through each opcode
print(f"0x{i.address:x}:\t{i.mnemonic}\t{i.op_str}")
if __name__ == '__main__':
try:
disassemble(sys.argv[1])
except IndexError:
print('error: requires an argument')
sys.exit()
|
[
"icyph0x@pm.me"
] |
icyph0x@pm.me
|
14b393e7ba2c8a870e184b37ae52e2d773a05b30
|
35459fb41290c85046d575389d27c42194d90e51
|
/Django REST Framework/Nothing/Nothing/settings.py
|
53792b615eee500ba3b819f7512d4128d1216da1
|
[] |
no_license
|
ghost1412/FrameworkLearning
|
5f34e18f34b1f75382fe6b71073e0c61d7ec63a7
|
c0dbb2693a9949fe8b0c7866157bcf8e4dd1ecf4
|
refs/heads/master
| 2022-12-11T23:04:53.316293
| 2020-09-10T11:33:42
| 2020-09-10T11:33:42
| 294,390,299
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,135
|
py
|
"""
Django settings for Nothing project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '8d54cn5$cp=29rwb6501pkp^g4_nxm8ptqmp5w$gfhuw$^z23-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'api_basic',
'rest_framework.authtoken'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Nothing.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Nothing.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
|
[
"jatav.ashutosh@gmail.com"
] |
jatav.ashutosh@gmail.com
|
91b19ebfcfc2d019937e967020366dcb0bf65cdb
|
bf815da9cc16326bd397c939bf4e5a7ce0d3261e
|
/Computer_Science_Career_Path/CS102/4.Queues/3.enqueue.py
|
530f8ae210ceafd4157794314f7a1fdb2163cb15
|
[] |
no_license
|
StingzLD/Codecademy
|
2e6979afe49cfba1c0fc1038dc1a50f0649f5924
|
a1a6f175758f6cd561cabd5bf050e219c1432930
|
refs/heads/master
| 2022-12-15T08:38:50.316955
| 2021-08-02T00:10:11
| 2021-08-02T00:10:11
| 172,606,261
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,143
|
py
|
from node import Node
class Queue:
def __init__(self, max_size=None):
self.head = None
self.tail = None
self.max_size = max_size
self.size = 0
# Add your enqueue method below:
def enqueue(self, value):
if self.has_space:
item_to_add = Node(value)
print(f"Adding {item_to_add.get_value()} to the queue!")
if self.is_empty():
self.head = item_to_add
self.tail = item_to_add
else:
self.tail.set_next_node(item_to_add)
self.tail = item_to_add
self.size += 1
else:
print("Sorry, no more room!")
def peek(self):
if self.size == 0:
print("Nothing to see here!")
else:
return self.head.get_value()
def get_size(self):
return self.size
def has_space(self):
if self.max_size == None:
return True
else:
return self.max_size > self.get_size()
def is_empty(self):
return self.size == 0
q = Queue()
q.enqueue("all the fluffy kitties")
|
[
"stingzlightingdesign@hotmail.com"
] |
stingzlightingdesign@hotmail.com
|
0a03b67eecb862ea28ff48d194cf4e78eb1133a0
|
f95102d82dff690a4a0d0a68a7bcac9eea283bab
|
/brooks_answers.py
|
a4049af35d86ab470429e70118df6de59c170e2b
|
[] |
no_license
|
almathaler/sudoku_solver
|
98228e0738baf9ef61eb2a01878d18d7cf64f992
|
78d184e10be87325c4bec4a07ea73a565bb7a79d
|
refs/heads/master
| 2021-04-09T22:12:47.636066
| 2021-02-10T01:36:16
| 2021-02-10T01:36:16
| 248,885,357
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,369
|
py
|
#! /usr/bin/python3
import sys
Cliques=[[0,1,2,3,4,5,6,7,8],[9,10,11,12,13,14,15,16,17],[18,19,20,21,22,23,24,25,26],[27,28,29,30,31,32,33,34,35],[36,37,38,39,40,41,42,43,44],[45,46,47,48,49,50,51,52,53],[54,55,56,57,58,59,60,61,62],[63,64,65,66,67,68,69,70,71],[72,73,74,75,76,77,78,79,80],[0,9,18,27,36,45,54,63,72],[1,10,19,28,37,46,55,64,73],[2,11,20,29,38,47,56,65,74],[3,12,21,30,39,48,57,66,75],[4,13,22,31,40,49,58,67,76],[5,14,23,32,41,50,59,68,77],[6,15,24,33,42,51,60,69,78],[7,16,25,34,43,52,61,70,79],[8,17,26,35,44,53,62,71,80],[0,1,2,9,10,11,18,19,20],[3,4,5,12,13,14,21,22,23],[6,7,8,15,16,17,24,25,26],[27,28,29,36,37,38,45,46,47],[30,31,32,39,40,41,48,49,50],[33,34,35,42,43,44,51,52,53],[54,55,56,63,64,65,72,73,74],[57,58,59,66,67,68,75,76,77],[60,61,62,69,70,71,78,79,80]]
Neighbors = {} # key is cell-id, value is set of neighbors. Neighbors[2] = set(0,1,3,4,5,6,7,8,11,20,29,38,47,56,65,74,9,10,18,19)
Boards = [] # all boards read in
def getBoards(argv = None):
global Boards
if not argv:
argv = sys.argv
with open(argv[1],'r') as f_in:
lines = f_in.read().split('\n')
i = 0
while i < len(lines):
if len(lines[i].split(',')) == 3: #if this is the name of a board
newboard = [] #make a newboard
for j in range(i+1,i+10): #fill in the new board with the next 9 lines
newboard += [int(x) for x in lines[j].split(',')]
Boards.append(newboard) #so now we have a list of all boards
i += 9 #move past this board
else:
i += 1 #this is when you're just on an empty line like "\n"
def makeNeighbors():
global Neighbors
for cell in range(81):
nb = set()
for clique in Cliques:
if cell in clique:
nb.update(clique) #add all the values from that clique, but ofc no repatitions
nb.discard(cell) #you shouldn't be checking if that cell itself == value of that cell
Neighbors[cell] = nb #now add this buffer to the dictionary
def getSwapped(board):
# returns the pair of positions that must be swapped to make this board a correct solution
# get the positions that have incorrect values
incorrect = getIncorrect(board)
# now try all pairs of swaps
#ok say incorrect is like this: [a, b, c, d, e, f]
for ileft in range(len(incorrect)-1): #aka [a, b, c, d, e]. for a go thru b, c, d, e, f then for b go through b, c, d, e, f...? Guess there are reps
for iright in range(ileft+1,len(incorrect)): #aka [b, c, d, e, f]
left = incorrect[ileft]
right = incorrect[iright]
swapped_board = board[:]
# try swapping these two positions
swapped_board[left],swapped_board[right] = swapped_board[right],swapped_board[left]
# see if the board is now correct
if len(getIncorrect(swapped_board)) == 0:
return [left,right]
return []
def getIncorrect(board):
# returns a list of the positions on this board whose values are incorrect
inc = []
for pos in range(len(board)):
val = board[pos] #this is the value of that cell
for neighbor in Neighbors[pos]:
if board[neighbor] == val: #if in that cell's neighbors there is a duplicate
inc.append(pos) #mark this cell as problematic. then when this goes through again, the duplicate will be marked as problematic
break
return inc
def printBoard(title,b):
print(title)
for row in range(9):
arow = []
for col in range(9):
arow.append(str(b[9*row+col])) #like, at first row == 0. so first append(b[9*0+0]), then append(b[9*0+1]) etc
print(','.join(arow)) #print row by row
print() #print blank space
def main(argv = None):
global Boards, Neighbors
if not argv: argv = sys.argv
Boards = []
Neighbors = {}
getBoards(argv)
makeNeighbors()
with open(argv[2],'w') as f_out:
for board in Boards:
swapped = getSwapped(board)
s_swapped = [str(x) for x in swapped] #b/c getSwapped returns a list
out = ','.join(s_swapped) #join that list w comma
f_out.write(out+'\n') #since out is a string we can write it to the file
print(out)
main()
|
[
"athaler00@stuy.edu"
] |
athaler00@stuy.edu
|
0b981c34caacbd6a75a6f68469358cb35d62e42f
|
7330c32b71012e0c5fe75c95eb342cfb2b441d1b
|
/Test_Suite/T730_Portrait_2x2_vmware.py
|
bcd6c85b3bbe2a8595c264aa71cd6294e5cfdd1c
|
[] |
no_license
|
nettlay/Linux_scripts_automation
|
a2f407649c3b0d0c9d9e4220a9c97f370d0be0e7
|
e480af6b93c1a0bb54865e6d896772552386d736
|
refs/heads/master
| 2023-06-05T19:32:15.189998
| 2021-06-28T08:08:49
| 2021-06-28T08:08:49
| 272,672,818
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 612
|
py
|
from Test_Script.ts_multiple_display import display_function, generate_xml_file
def run():
resolution_data = generate_xml_file.DisplaySetting('Portrait 2x2', {'DisplayPort-1': ['3840x2160', 60],
'DisplayPort-2': ['3840x2160', 60],
'DisplayPort-3': ['3840x2160', 60],
'DisplayPort-4': ['3840x2160', 60]}).generate()
display_function.check_resolution(resolution_data)
run()
|
[
"noreply@github.com"
] |
nettlay.noreply@github.com
|
671710238052c0289b8de317b9124f750a1ce3de
|
3f25e518ab86d7d3ba736d608c2b9ac7713b0ada
|
/filter_plugins/filters.py
|
950c19f49fcb7a10c89c48c8dc4043b8a3ee8e03
|
[] |
no_license
|
PavelChekhov/ansible_task
|
d57294480278f2a33e8a9011f79c8fd53c53d8f8
|
d3886a91aebe03974a6d7894afb750f2e9a8978e
|
refs/heads/master
| 2022-11-05T18:33:58.119856
| 2020-06-21T08:59:35
| 2020-06-21T08:59:35
| 273,868,691
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 769
|
py
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from functools import partial
import types
from ansible import errors
def get_mongo_src(arg, os_name, os_version, mongo_version):
import re
os_name = os_name.lower()
if os_name == "centos":
os_name = "rhel"
if os_name == "redhat":
os_name = "rhel"
if os_name == "linuxmint":
os_name = "ubuntu"
for item in arg:
if re.match('.*' + os_name + os_version + '.*', item, re.M|re.I) and re.match('.*' + mongo_version + '.*', item, re.M|re.I):
return item
return "nothing found"
class FilterModule(object):
def filters(self):
return {
'get_mongo_src': get_mongo_src
}
|
[
"Pavel_Chekhov@epam.com"
] |
Pavel_Chekhov@epam.com
|
535149511500657d6551b886a29959a2dde8c03c
|
bd3382151e75c370ede1c2e14c72d31d3b1dbb6a
|
/venv/bin/easy_install-3.7
|
f809f42383a86e63c68f2598a070f394485e8d52
|
[] |
no_license
|
mcmanyika/stats
|
267a7e561f1fdf53f9ff751e95f53436cd02cb1c
|
e3834377d327391bddb2e7e0e6d61abd9b6ce0a8
|
refs/heads/master
| 2022-04-21T10:43:48.854861
| 2020-04-19T17:51:44
| 2020-04-19T17:51:44
| 257,054,488
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 264
|
7
|
#!/Users/kdmedilink/Documents/stats/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"kdmedilink@Dispatchs-iMac.lan"
] |
kdmedilink@Dispatchs-iMac.lan
|
196aee3a236d95dd81093d250f46928df29e953c
|
f205b2a0f71fb6ef8931bd9498efe03b86afa028
|
/v4/code_4_2.py
|
c897bbaf4ea878f331653110b01584770d07e0ee
|
[] |
no_license
|
sigurbirnah19/Birna
|
11402deac1f4c59f55a65760e709b5f46315cfab
|
810a50912f631a8e80b34a21811c9cc46a75b107
|
refs/heads/master
| 2022-12-08T23:57:31.260669
| 2020-09-03T23:43:00
| 2020-09-03T23:43:00
| 292,614,025
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 103
|
py
|
highest = int(input("Input an int: "))
for i in range(1,highest+1):
if(i%3==0):
print(i)
|
[
"birna.haflidadottir@sabre.com"
] |
birna.haflidadottir@sabre.com
|
9317d49e4e0740b572e4aebdbdbb59a7e97dd3bb
|
daa47bc7d93e49cc6142f61cd380b2ee7be3c183
|
/2019/Day9.py
|
930474cf74c36dd75ff5c35ac498adabfd5b0fda
|
[] |
no_license
|
jimpyts/AdventOfCode
|
85746f2412a50ae9c40e2374cb7fdfd25bae152f
|
4909bb875009b362e3b86eb3f7376f387648f59c
|
refs/heads/master
| 2021-01-02T20:54:44.201125
| 2020-02-11T15:35:23
| 2020-02-11T15:35:23
| 239,796,483
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,718
|
py
|
import jimpy
import collections
import itertools
import types
class IntcodeError(Exception):
pass
class IntcodeComputer:
opcode_parameter_lengths = {
1: 3,
2: 3,
3: 1,
4: 1,
5: 2,
6: 2,
7: 3,
8: 3,
9: 1,
}
def __init__(self, intcode, phase_setting, input_values):
self.intcode_stored = intcode.copy()
self.pointer = 0
self.intcode = intcode.copy()
self.phase_setting = phase_setting
self.phase_setting_used = False
self.relative_base = 0
self.input_values = input_values
self.input_value_pointer = 0
self.test_outputs = []
self.overflow_memory = collections.defaultdict(int)
def __repr__(self):
return f"INTCODE COMPUTER {self.phase_setting}"
def __getitem__(self, item):
try:
return self.intcode[item]
except IndexError:
return self.overflow_memory[item]
def __setitem__(self, key, value):
try:
self.intcode[key] = value
except IndexError:
self.overflow_memory[key] = value
def pre_run(self, noun, verb):
self[1] = noun
self[2] = verb
@staticmethod
def parse_opcode_instruction(opcode_instruction):
assert opcode_instruction >= 0
opcode_instruction = str(opcode_instruction)
opcode_instruction = "0" * (5 - len(opcode_instruction)) + opcode_instruction
p3_mode, p2_mode, p1_mode, *opcode = opcode_instruction
opcode = int("".join(opcode))
return opcode, int(p1_mode), int(p2_mode), int(p3_mode)
def run(self):
instruction = self[self.pointer]
opcode, *parameter_modes = self.parse_opcode_instruction(instruction)
while opcode != 99:
assert opcode in self.opcode_fns
parameters_len = self.opcode_parameter_lengths[opcode]
parameters = [self.pointer + i + 1 for i in range(parameters_len)]
parameters = list(zip(parameters, parameter_modes))
self.opcode_fns[opcode](*parameters)
if opcode == 4:
# return self.test_outputs[-1]
print(self.test_outputs[-1])
instruction = self[self.pointer]
opcode, *parameter_modes = self.parse_opcode_instruction(instruction)
def result(self):
return self.intcode[0]
def get_val(self, parameter_value, parameter_mode):
actual_val = self[parameter_value]
if parameter_mode == 0:
return self[actual_val]
elif parameter_mode == 1:
return actual_val
elif parameter_mode == 2:
return self[self.relative_base + actual_val]
def add(self, p1, p2, p3):
val_1 = self.get_val(*p1)
val_2 = self.get_val(*p2)
p3_val, p3_mode = p3
val_3 = self[p3_val]
self[val_3] = val_1 + val_2
self.pointer += 4
def mul(self, p1, p2, p3):
val_1 = self.get_val(*p1)
val_2 = self.get_val(*p2)
p3_val, p3_mode = p3
val_3 = self[p3_val]
self[val_3] = val_1 * val_2
self.pointer += 4
def input(self, p1):
p1_val, p1_mode = p1
p1_val = self[p1_val]
if self.phase_setting_used:
self[p1_val] = self.input_values[self.input_value_pointer]
self.input_value_pointer += 1
self.input_value_pointer = min(self.input_value_pointer, len(self.input_values))
else:
self[p1_val] = self.phase_setting
self.phase_setting_used = True
# self[p1_val] = int(input("TEST: "))
self.pointer += 2
def output(self, p1):
p1_val = self.get_val(*p1)
# print(p1_val)
self.test_outputs.append(p1_val)
self.pointer += 2
def jump_if_true(self, p1, p2):
val_1 = self.get_val(*p1)
val_2 = self.get_val(*p2)
if val_1:
self.pointer = val_2
else:
self.pointer += 3
def jump_if_false(self, p1, p2):
val_1 = self.get_val(*p1)
val_2 = self.get_val(*p2)
if not val_1:
self.pointer = val_2
else:
self.pointer += 3
def less_than(self, p1, p2, p3):
val_1 = self.get_val(*p1)
val_2 = self.get_val(*p2)
p3_val, p3_mode = p3
val_3 = self[p3_val]
self[val_3] = int(val_1 < val_2)
self.pointer += 4
def equals(self, p1, p2, p3):
val_1 = self.get_val(*p1)
val_2 = self.get_val(*p2)
p3_val, p3_mode = p3
val_3 = self[p3_val]
self[val_3] = int(val_1 == val_2)
self.pointer += 4
def adjust_base(self, p1):
val_1 = self.get_val(*p1)
self.relative_base += val_1
self.pointer += 2
@property
def opcode_fns(self):
return {
1: self.add,
2: self.mul,
3: self.input,
4: self.output,
5: self.jump_if_true,
6: self.jump_if_false,
7: self.less_than,
8: self.equals,
9: self.adjust_base,
}
def target_seek(self, target):
for noun, verb in itertools.product(range(100), range(100)):
self.intcode = self.intcode_stored.copy()
self.pre_run(noun, verb)
self.run()
if self.result() == target:
return noun, verb
@staticmethod
def amplify(data, phases):
previous_output = 0
for phase in phases:
comp = IntcodeComputer(data, phase, [previous_output])
comp.run()
previous_output = comp.test_outputs[0]
return previous_output
@staticmethod
def amplify_loop(data, phases):
comps = [IntcodeComputer(data, phase, []) for phase in phases]
previous_output = 0
counter = 0
while True:
comp = comps[counter]
comp.input_values.append(previous_output)
result = comp.run()
counter += 1
counter %= 5
if not result:
break
previous_output = result
return previous_output
if __name__ == '__main__':
data = jimpy.get_input("inputs/day9.txt", ",", data_type=int)
# data = [1102, 20, 21, 7, 4, 7, 99, 0]
# data = [109, 1, 204, -1, 1001, 100, 1, 100, 1008, 100, 16, 101, 1006, 101, 0, 99]
comp = IntcodeComputer(data, 1, [])
comp.run()
|
[
"noreply@github.com"
] |
jimpyts.noreply@github.com
|
fd8dc6815a414f523f8bd3cc9b1f0c9d9322d917
|
1871ab24904127053c9d6c5ffd9b776a61b7495b
|
/Day004/day4.2_who_pay_the_bill.py
|
50409e09acbd17af9e4282c144172b9be2f4806a
|
[] |
no_license
|
izark99/100DaysPython
|
d857adb08e0186802c766fe6a8bddceb56feed09
|
cf0b885c2d03e0dbf80a1c825d325e025fe30fe4
|
refs/heads/master
| 2023-03-04T02:41:02.119120
| 2021-02-16T06:02:05
| 2021-02-16T06:02:05
| 336,467,714
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 287
|
py
|
import random
test_seed = int(input("Create a seed number: "))
random.seed(test_seed)
str_names = str(input("Give me everybody's names: "))
names = str_names.replace(' ', '').split(',')
random_choice = random.randint(0, len(names)-1)
print(f"{names[random_choice]} will pay the bill!")
|
[
"izarknguyen@gmail.com"
] |
izarknguyen@gmail.com
|
333052f3ec73a5eda4c8f049f35e3302fdbf1361
|
0c412883b09037cef2dc03399b5cde27c353c4fb
|
/classifier.py
|
1b86ed9ce1da85c54bc0bd75fc62675c03cb07a0
|
[] |
no_license
|
ggoh29/newsClassifier
|
729b3179640030504df8ff1ba354799fc2300e61
|
835fbc6b89c1fb8016acd14a22366062c0004b0f
|
refs/heads/main
| 2023-02-22T09:57:44.596888
| 2021-01-24T14:56:06
| 2021-01-24T14:56:06
| 332,407,714
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,813
|
py
|
import pandas as pd
import nltk.tokenize as nt
import nltk
nltk.download('averaged_perceptron_tagger')
import re
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.metrics import classification_report
import numpy as np
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
analyser = SentimentIntensityAnalyzer()
from scipy.special import softmax
import math
import copy
from sklearn.model_selection import RandomizedSearchCV, GridSearchCV
import eli5
from eli5.sklearn import PermutationImportance
source = r"C:/Users/CHRIS/Documents/"
fake = "Fake.csv"
real = "True.csv"
class newsClassifier:
def __init__(self):
self.tokens = {"CC": 1, "CD": 1, "DT": 1, "EX": 1, "FW": 1, "IN": 1, "JJ": 1, "JJR": 1, "JJS": 1, "LS": 1, "MD": 1,
"NN": 1, "NNS": 1, "NNP": 1, "NNPS": 1, "PDT": 1, "POS": 1, "PRP": 1, "PRP$": 1, "RB": 1, "RBR":1,
"RBS": 1, "RP": 1, "TO": 1, "UH": 1, "VB": 1, "VBD": 1, "VBG": 1, "VBN": 1, "VBP": 1, "VBZ": 1,
"WDT": 1, "WP": 1, "WP$": 1, "WRB": 1, "START": 1, "END": 1}
fake_t_body = {i: copy.deepcopy(self.tokens) for i in self.tokens}
real_t_body = {i: copy.deepcopy(self.tokens) for i in self.tokens}
fake_t_title = {i: copy.deepcopy(self.tokens) for i in self.tokens}
real_t_title = {i: copy.deepcopy(self.tokens) for i in self.tokens}
self.body_dct = {"fake": fake_t_body, "real": real_t_body}
self.title_dct = {"fake": fake_t_title, "real": real_t_title}
self.dct = {"body": self.body_dct, "title": self.title_dct}
fake_body_mem_of_two = {}
real_body_mem_of_two = {}
for token1 in self.tokens:
for token2 in self.tokens:
if token2 == "START" or token1 == "END":
continue
else:
fake_body_mem_of_two[token1 + " " + token2] = copy.deepcopy(self.tokens)
real_body_mem_of_two[token1 + " " + token2] = copy.deepcopy(self.tokens)
self.memTwoDct = {"fake": fake_body_mem_of_two, "real": real_body_mem_of_two}
def parseDataframe(self, df, type):
cols = df.columns
for index, row in df.iterrows():
title = row[cols[0]]
title_tokens = self.tokenize(title)
self.buildMatrix(title_tokens, self.title_dct[type])
body = row[cols[1]]
body_tokens = self.tokenize(body)
self.buildMatrix(body_tokens, self.body_dct[type])
self.buildMemOfTwoMatrix(body_tokens, self.memTwoDct[type])
def tokenize(self, text):
ss = nt.sent_tokenize(text)
tokenized_sent = [nt.word_tokenize(sent) for sent in ss]
tokens = [nltk.pos_tag(sent) for sent in tokenized_sent][0]
tokens.insert(0, ("START", "START"))
tokens.append(("END", "END"))
return tokens
def buildMatrix(self, tokens_lst, matrix):
for i in range(1, len(tokens_lst)):
prev = tokens_lst[i-1][1]
cur = tokens_lst[i][1]
if prev not in self.tokens or cur not in self.tokens:
continue
else:
matrix[prev][cur] += 1
def buildMemOfTwoMatrix(self, tokens_lst, matrix):
for i in range(2, len(tokens_lst)):
prevprev = tokens_lst[i-2][1]
prev = tokens_lst[i-1][1]
cur = tokens_lst[i][1]
if prevprev not in self.tokens or cur not in self.tokens or prev not in self.tokens:
continue
else:
matrix[prevprev + " " + prev][cur] += 1
def smoothAll(self):
for txt_type in ['real', 'fake']:
self.smooth(self.title_dct[txt_type])
self.smooth(self.body_dct[txt_type])
self.smooth(self.memTwoDct[txt_type])
def smooth(self, matrix):
for row in matrix:
acc = 0
for col in matrix[row]:
acc += matrix[row][col]
for col in matrix[row]:
matrix[row][col] = np.log(matrix[row][col]/ acc)
def generateVector(self, df):
vec = []
cols = df.columns
for index, row in df.iterrows():
title = row[cols[0]]
body = row[cols[1]]
vec.append(self.getProbVector(title, body))
return vec
def getProbVector(self, title, body):
title_score = analyser.polarity_scores(title)["compound"]
body_score = analyser.polarity_scores(body)["compound"]
title, body = self.tokenize(title), self.tokenize(body)
title_t_fake_prob = self.getlogProbTransitionProbMemOfOne(title, 'fake', 'title')
title_t_real_prob = self.getlogProbTransitionProbMemOfOne(title, 'real', 'title')
body_t_fake_prob = self.getlogProbTransitionProbMemOfOne(body, 'fake', 'body')
body_t_real_prob = self.getlogProbTransitionProbMemOfOne(body, 'real', 'body')
body_t_fake_prob_memTwo = self.getlogProbTransitionProbMemOfTwo(title, 'fake')
body_t_real_prob_memTwo = self.getlogProbTransitionProbMemOfTwo(title, 'real')
title_prob = softmax([title_t_fake_prob, title_t_real_prob])
body_prob = softmax([body_t_fake_prob, body_t_real_prob])
body_prob_memTwo = softmax([body_t_fake_prob_memTwo, body_t_real_prob_memTwo])
probVector = np.concatenate((title_prob, body_prob, body_prob_memTwo))
probVector = list(probVector)
probVector.append(title_score)
probVector.append(body_score)
return probVector
# return logP(x_0 -> x_1 .... | txt_type)
def getlogProbTransitionProbMemOfOne(self, token_list, txt_type, place):
# get transition matrix
matrix = self.dct[place][txt_type]
# get the probabilities
log_prob = 0
for i in range(1, len(token_list)):
from_token = token_list[i-1][1]
to_token = token_list[i][1]
if from_token not in self.tokens or to_token not in self.tokens:
continue
else:
log_prob += matrix[from_token][to_token]
return log_prob
def getlogProbTransitionProbMemOfTwo(self, tokens_lst, type):
matrix = self.memTwoDct[type]
acc = 0
for i in range(2, len(tokens_lst)):
prevprev = tokens_lst[i - 2][1]
prev = tokens_lst[i - 1][1]
cur = tokens_lst[i][1]
if prevprev not in self.tokens or cur not in self.tokens or prev not in self.tokens:
continue
else:
acc += matrix[prevprev + " " + prev][cur]
return acc
"""using https://www.kaggle.com/c/fake-news/data?select=train.csv as dataset"""
alt = "train.csv"
alt = pd.read_csv(source + alt).sort_values(by=['label']).dropna().reset_index()
alt = alt.drop(columns = ['id', 'index'])
number_of_labels = list(alt['label'].value_counts().values)
fake = alt.iloc[:number_of_labels[1]].reset_index().drop(columns = ['index'])
real = alt.iloc[number_of_labels[1]:].reset_index().drop(columns = ['index'])
bucket_train = 700
bucket_test = 70
"""using https://www.kaggle.com/clmentbisaillon/fake-and-real-news-dataset as dataset"""
# fake = pd.read_csv(source+fake)
# real = pd.read_csv(source+real)
#
# bucket_train = 1800
# bucket_test = 200
fake_train = fake.head(bucket_train * 10)
real_train = real.head(bucket_train * 10)
y_train = np.array(([0] * bucket_train + [1] * bucket_train) * 10)
x_train = []
fake_train_10 = np.array_split(fake_train, 10)
real_train_10 = np.array_split(real_train, 10)
for i in range(10):
classifier = newsClassifier()
for j in range(10):
if i != j:
classifier.parseDataframe(fake_train_10[j], "fake")
classifier.parseDataframe(real_train_10[j], "real")
classifier.smoothAll()
vec = classifier.generateVector(pd.concat([fake_train_10[i], real_train_10[i]]))
for vector in vec:
x_train.append(vector)
"""hyperparameter tuning - to be commented out once saved"""
# p_test = {'learning_rate':[0.01, 0.05, 0.1, 0.25, 0.5], 'n_estimators':[100, 250, 500, 750, 1000]}
#
# tuning = RandomizedSearchCV(estimator=GradientBoostingClassifier(max_depth=4, min_samples_split=2, min_samples_leaf=1, subsample=1,max_features='sqrt', random_state=10),
# param_distributions = p_test, scoring='accuracy', n_jobs=-1, n_iter=10, cv=5)
#
# tuning.fit(np.asarray(x_train), y_train)
#
# lr = tuning.best_params_['learning_rate']
# ne = tuning.best_params_['n_estimators']
#
# p_test2 = {'max_depth':[2, 3, 4, 5, 6, 7]}
#
# tuning = GridSearchCV(estimator=GradientBoostingClassifier(learning_rate=lr, n_estimators=ne, min_samples_split=2, min_samples_leaf=1, subsample=1,max_features='sqrt', random_state=10),
# param_grid = p_test2, scoring='accuracy',n_jobs=4, cv=5)
# tuning.fit(np.asarray(x_train), y_train)
#
# md = tuning.best_params_['max_depth']
#
# print("params are: learning rate:" + str(lr) + ",n_estimators:" + str(ne) + ",max_depth:" + str(md))
# "params are: learning rate:0.01,n_estimators:250,max_depth:3"
"""parameter tuned using the above commented code"""
model = GradientBoostingClassifier(learning_rate=0.01, n_estimators=250, max_depth=3, min_samples_split=2, min_samples_leaf=1, subsample=1,max_features='sqrt', random_state=10)
model.fit(np.asarray(x_train), y_train)
fake_test = fake.tail(bucket_test * 10)
real_test = real.tail(bucket_test * 10)
y_test = np.array([0] * bucket_test * 10 + [1] * bucket_test * 10)
classifier = newsClassifier()
classifier.parseDataframe(fake_train, "fake")
classifier.parseDataframe(real_train, "real")
classifier.smoothAll()
x_test = classifier.generateVector(pd.concat([fake_test, real_test]))
prediction = model.predict(np.asarray(x_test))
print(classification_report(y_test, prediction))
# with https://www.kaggle.com/clmentbisaillon/fake-and-real-news-dataset as dataset
# precision recall f1-score support
#
# 0 0.99 0.99 0.99 2000
# 1 0.99 0.99 0.99 2000
#
# accuracy 0.99 4000
# macro avg 0.99 0.99 0.99 4000
# weighted avg 0.99 0.99 0.99 4000
# with https://www.kaggle.com/c/fake-news/data?select=train.csv as dataset
# precision recall f1-score support
#
# 0 0.72 0.86 0.79 700
# 1 0.83 0.67 0.74 700
#
# accuracy 0.77 1400
# macro avg 0.78 0.77 0.76 1400
# weighted avg 0.78 0.77 0.76 1400
"""permutation importance to get feature importance"""
feature_names = ["title|fake", "title|real", "body|fake", "body|real", "body2|fake", "body2|real", "sent_title", "sent_body"]
permutation = PermutationImportance(model, random_state=0).fit(np.asarray(x_train), y_train)
output = eli5.explain_weights(permutation, feature_names=feature_names, top=8)
print(output)
# feature='title|fake', weight=0.02622857142857147,
# feature='title|real', weight=0.021014285714285763
# feature='body|fake', weight=0.01660000000000004,
# feature='body|real', weight=0.028071428571428636,
# feature='sent_body', weight=0.0033285714285714585,
# feature='sent_title', weight=0.0018285714285714682,
# feature='body2|fake', weight=0.0007142857142857561,
# feature='body2|real', weight=0.0005714285714286005
|
[
"noreply@github.com"
] |
ggoh29.noreply@github.com
|
5b9df18e21c3984939e05f0ff63a859a74d1fad9
|
8ea836381024468714b6fe11d4984f2ad44a3e88
|
/polls/migrations/0001_initial.py
|
c399f70ad21fb0ca2c6820063f8c1203b338c44c
|
[] |
no_license
|
Shubham7567/mysite
|
67139f8d088a12d21988a75fc3f46495e0615c5c
|
52e5380d5750ecb6336042a9acbce27c629f72a5
|
refs/heads/main
| 2023-02-15T11:05:59.020519
| 2021-01-01T03:02:25
| 2021-01-01T03:02:25
| 325,908,133
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,075
|
py
|
# Generated by Django 3.1.4 on 2020-12-25 04:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.TextField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.question')),
],
),
]
|
[
"shubhamrathore7567@gmail.com"
] |
shubhamrathore7567@gmail.com
|
8065d5a222ea47f64ef74359900886af9e88af37
|
fa9c5bb26c72b053a41f34e858d7395ee29aea5d
|
/HttpTesting/main.py
|
ed8bc130a9da20ac6da12479814e772dab2e1447
|
[
"Apache-2.0"
] |
permissive
|
HttpTesting/HttpTesting
|
3c01d7b00b13ddf5d52ac223693a5222d9a52a03
|
b5b68dd546cb7aea02931c46fe4e51a98f71566e
|
refs/heads/master
| 2021-06-20T15:34:35.583811
| 2019-12-25T05:58:34
| 2019-12-25T05:58:34
| 195,352,254
| 1
| 0
|
Apache-2.0
| 2021-03-25T22:50:17
| 2019-07-05T06:33:48
|
Python
|
UTF-8
|
Python
| false
| false
| 9,090
|
py
|
# ########################################################
# 将根目录加入sys.path中,解决命令行找不到包的问题
import sys
import os
curPath = os.path.abspath(os.path.dirname(__file__))
rootPath = os.path.split(curPath)[0]
sys.path.insert(0, rootPath)
# ########################################################
import unittest
import shutil
import time,json
import logging
from httptesting.globalVar import gl
from httptesting.library import HTMLTESTRunnerCN
from httptesting.library import scripts
from httptesting.library.scripts import (get_yaml_field)
from httptesting.library.emailstmp import EmailClass
from httptesting.library.case_queue import case_exec_queue
from httptesting import case
from httptesting.library.falsework import create_falsework
from httptesting.library.har import ConvertHarToYAML
from httptesting import __version__
import argparse
########################################################################
cmd_path = ''
# Command line mode.
def run_min():
# Takes the current path of the command line
cur_dir = os.getcwd()
os.chdir(cur_dir)
parse = argparse.ArgumentParser(
description='httptesting parameters',
prog='httptesting'
)
parse.add_argument(
"-v",
"--version",
action='version',
version="%(prog)s {}".format(__version__),
help='Framework version.'
)
parse.add_argument(
"-f",
"--file",
default='',
help='The file path; File absolute or relative path.'
)
parse.add_argument(
"-d",
"--dir",
default='',
help='The folder path; folder absolute or relative path.'
)
parse.add_argument(
"-sp",
"--startproject",
default='',
help='Generate test case templates.'
)
parse.add_argument(
"-conf",
"--config",
default='',
help='Basic setting of framework.'
)
parse.add_argument(
"-har",
default='',
help='Convert the har files to YAML. har file is *.har'
)
parse.add_argument(
"-c",
"--convert",
default='',
help='Convert the har files to YAML. YAML file is *.yaml'
)
args = parse.parse_args()
case_file = args.file
case_dir = args.dir
start_project = args.startproject
config = args.config
har = args.har
vert = args.convert
# Conver YAML.
if vert:
yamlfile = os.path.join(cur_dir, str(vert).strip())
scripts.generate_case_tmpl(yamlfile)
# Convert har files to YAML.
# r'D:\httphar.har'
if har:
temp_dict = ConvertHarToYAML.convert_har_to_ht(har)
ConvertHarToYAML.write_case_to_yaml('', temp_dict)
# Setting global var.
if config == 'set':
try:
os.system(gl.configFile)
except (KeyboardInterrupt, SystemExit):
print("已终止执行.")
if start_project:
create_falsework(os.path.join(os.getcwd(), start_project))
# Get the yaml file name and write to the queue.
if case_file:
case_exec_queue.put(case_file)
# Began to call.
Run_Test_Case.invoke()
if case_dir:
for root, dirs, files in os.walk(case_dir):
for f in files:
if 'yaml' in f:
case_exec_queue.put(os.path.join(case_dir, f))
# Began to call.
Run_Test_Case.invoke()
#########################################################################
# Not in command mode --dir defaults to the testcase directory.
# Example:
# python3 main.py --dir=r"D:\test_project\project\cloud_fi_v2\testcase"
#########################################################################
class Run_Test_Case(object):
@classmethod
def load_tests_list(cls, to):
"""
Specifies the order in which test cases are loaded
:return: There is no.
"""
tests = [unittest.TestLoader().loadTestsFromModule(to)]
return tests
@classmethod
def create_report_file(cls):
# 测试报告文件名
report_dir = time.strftime('%Y%m%d_%H%M%S', time.localtime())
rdir = os.path.join(os.getcwd() ,'report')
cls.file_name = 'report.html'
portdir = os.path.join(rdir, report_dir)
# 按日期创建测试报告文件夹
if not os.path.exists(portdir):
# os.mkdir(portdir)
os.makedirs(portdir)
cls.filePath = os.path.join(portdir, cls.file_name) # 确定生成报告的路径
return cls.filePath
@staticmethod
def copy_custom_function():
# 自定义函数功能
func = os.path.join(os.getcwd(), 'extfunc.py')
target = os.path.join(gl.loadcasePath, 'extfunc.py')
if os.path.exists(func):
shutil.copy(func, target)
@staticmethod
def copy_report(filePath, file_name):
# 复制report下子文件夹到 templates/report/下
split_path = os.path.dirname(filePath).split("\\")
low_path = split_path[split_path.__len__() - 1]
web_path = os.path.join(gl.templatesReportPath, low_path)
if not os.path.exists(web_path):
shutil.copytree(os.path.dirname(filePath), web_path)
else:
shutil.copy(filePath, os.path.join(web_path, file_name))
return low_path
@staticmethod
def tmpl_msg(low_path, file_name):
# 发送钉钉模版测试结果
result_str = """共{}个用例, 通过{}, 失败{}, 错误{}, 通过率{}""".format(
gl.get_value('sum'),
gl.get_value('passed'),
gl.get_value('failed'),
gl.get_value('error'),
gl.get_value('passrate')
)
# 测试结论
if '100' in str(gl.get_value('passrate')):
msg_1 = '本次测试★通过★'
else:
msg_1 = '本次测试★不通过★'
config = get_yaml_field(gl.configFile)
# report外网发布地址ip+port
report_url = config['REPORT_URL']
content = config['DING_TITLE']
# 发送钉钉消息
msg = """{}已完成:{},{}\n测试报告地址:{}/{}/{}"""
msg = msg.format(content, result_str, msg_1, report_url, low_path, file_name)
return msg
@staticmethod
def run(filePath):
"""
Execute the test and generate the test report file.
:param filePath: Report file absolute path.
:return: There is no.
"""
# custom function
Run_Test_Case.copy_custom_function()
# Load the unittest framework, which must be written here or DDT will be loaded first.
from httptesting.case import load_case
# Unittest test suite.
suite = unittest.TestSuite()
suite.addTests(Run_Test_Case.load_tests_list(load_case))
# Execute the test and generate the test report file.
with open(filePath, 'wb') as fp:
runner = HTMLTESTRunnerCN.HTMLTestRunner(
stream=fp,
title= '接口自动化测试报告',
description= '详细测试用例结果', # Do not default to null.
tester= "测试组", # tester name ,not default to jack.
verbosity=2
)
# Run the test case.
runner.run(suite)
@staticmethod
def invoke():
"""
Start executing tests generate test reports.
:return: There is no.
"""
# #########################Read configuration information###############
config = get_yaml_field(gl.configFile)
dd_enable = config['ENABLE_DDING']
dd_token = config['DD_TOKEN']
dd_url = config['DING_URL']
email_enable = config['EMAIL_ENABLE']
########################################################################
# Test report file name.
time_str = time.strftime('%Y%m%d_%H%M%S', time.localtime())
filePath = Run_Test_Case.create_report_file()
# Start test the send pin message.
if dd_enable:
scripts.send_msg_dding(
'{}:★开始API接口自动化测试★'.format(time_str),
dd_token,
dd_url
)
# Execute the test and send the test report.
Run_Test_Case.run(filePath)
print(filePath)
# Copy the folder under the report directory under /templates/report/
# low_path = Run_Test_Case.copy_report(filePath, Run_Test_Case.file_name)
if dd_enable:
# Template message.
dir_list = filePath.split('\\')
low_path = dir_list[len(dir_list) - 2]
msg = Run_Test_Case.tmpl_msg(low_path, Run_Test_Case.file_name)
print(msg)
scripts.send_msg_dding(msg, dd_token, dd_url)
if email_enable:
# Send test report to EMAIL.
email = EmailClass()
email.send(filePath)
if __name__ == "__main__":
run_min()
|
[
"lengyaohui.bj@acewill.cn"
] |
lengyaohui.bj@acewill.cn
|
1c8e3344ff726702de26bc95b86ffad4f8fa87df
|
cce5684e1bb9fea2df762c1afedb17b1795b7a5f
|
/pymcutil/selector/selectors/self_selector.py
|
03d75db75e9fd916c4c55f012ac4e9ca91173109
|
[
"MIT"
] |
permissive
|
Arcensoth/pymcutil
|
85071e5c3bbd25a47a1133bfa464f67126c62bdd
|
0c8f1efa4d611e92170ec48bedb160b1d00d0022
|
refs/heads/master
| 2020-03-18T05:02:06.769457
| 2018-07-02T00:45:51
| 2018-07-02T00:45:51
| 91,645,414
| 3
| 1
| null | 2017-07-12T15:56:12
| 2017-05-18T03:37:33
|
Python
|
UTF-8
|
Python
| false
| false
| 322
|
py
|
from pymcutil.selector.abc.selector import Selector
from pymcutil.symbols import selector_bases
from pymcutil.symbols.selector_bases.selector_bases import SelectorBase
class SelfSelector(Selector):
@property
def base(self) -> SelectorBase:
return selector_bases.self
self = SelfSelector
SELF = self()
|
[
"arcensoth@gmail.com"
] |
arcensoth@gmail.com
|
2f36eb11ae86a36399030211222692df4e389349
|
74963da08191530d6675620c2e143abd49921abe
|
/src/02-parse-airquality-data.py
|
999851749b4a013bb27fdec78cd1eb494f65975e
|
[] |
no_license
|
rossanx/iot-arinter-2021
|
53c55c687b1afd9c3acfe2e7675a0b4a9b1ca053
|
8ae4a7f51e48eb1686b1db57f4957c61c5456e9e
|
refs/heads/master
| 2023-06-27T06:43:56.491843
| 2021-07-23T16:25:21
| 2021-07-23T16:25:21
| 386,047,879
| 0
| 0
| null | null | null | null |
ISO-8859-1
|
Python
| false
| false
| 2,135
|
py
|
#!/usr/bin/python
# -*- coding: latin-1 -*-
# Author: rossano at gmail dot com
# Timestamp: Sun Jul 11 05:38:36 PM -03 2021
import json
# JSON content
airquality='{"status":"ok","data":{"aqi":24,"idx":329,"attributions":[{"url":"http://www.cetesb.sp.gov.br/","name":"CETESB - Companhia Ambiental do Estado de São Paulo","logo":"Brazil-CETESB.png"},{"url":"https://waqi.info/","name":"World Air Quality Index Project"}],"city":{"geo":[-22.724506902,-47.347456368],"name":"Americana, São Paulo, Brazil","url":"https://aqicn.org/city/brazil/sao-paulo/americana"},"dominentpol":"pm10","iaqi":{"dew":{"v":15},"h":{"v":57},"o3":{"v":24},"p":{"v":1020},"pm10":{"v":24},"t":{"v":24},"w":{"v":3.6}},"time":{"s":"2021-04-15 11:00:00","tz":"-03:00","v":1618484400,"iso":"2021-04-15T11:00:00-03:00"},"forecast":{"daily":{"o3":[{"avg":12,"day":"2021-04-13","max":38,"min":2},{"avg":13,"day":"2021-04-14","max":41,"min":3},{"avg":11,"day":"2021-04-15","max":53,"min":3},{"avg":10,"day":"2021-04-16","max":51,"min":1},{"avg":15,"day":"2021-04-17","max":34,"min":4},{"avg":16,"day":"2021-04-18","max":44,"min":3}],"pm10":[{"avg":25,"day":"2021-04-13","max":35,"min":17},{"avg":28,"day":"2021-04-14","max":40,"min":13},{"avg":27,"day":"2021-04-15","max":40,"min":16},{"avg":41,"day":"2021-04-16","max":64,"min":19},{"avg":30,"day":"2021-04-17","max":63,"min":14},{"avg":45,"day":"2021-04-18","max":89,"min":15}],"pm25":[{"avg":50,"day":"2021-04-13","max":63,"min":44},{"avg":48,"day":"2021-04-14","max":69,"min":27},{"avg":49,"day":"2021-04-15","max":86,"min":23},{"avg":95,"day":"2021-04-16","max":152,"min":43},{"avg":73,"day":"2021-04-17","max":137,"min":34},{"avg":111,"day":"2021-04-18","max":171,"min":48}],"uvi":[{"avg":1,"day":"2021-04-13","max":6,"min":0},{"avg":2,"day":"2021-04-14","max":8,"min":0},{"avg":1,"day":"2021-04-15","max":8,"min":0},{"avg":1,"day":"2021-04-16","max":7,"min":0},{"avg":1,"day":"2021-04-17","max":7,"min":0},{"avg":1,"day":"2021-04-18","max":6,"min":0},{"avg":1,"day":"2021-04-19","max":4,"min":0}]}},"debug":{"sync":"2021-04-15T23:23:28+09:00"}}}'
aq = json.loads(airquality)
print(aq["data"]["dominentpol"])
|
[
"rossano@gmail.com"
] |
rossano@gmail.com
|
b2fe5f745db4637bddd1e35aeee3c5df22b39957
|
9e7ac60fffd6addbde5e5f0695b93a45f095b354
|
/fissbuzz.py
|
47f249ed16cf1938eb7d94ae0274dd56d5914703
|
[] |
no_license
|
Aquino094/TP
|
f37df0585f1282f1df0258eb53d0d9ca163023ee
|
75ef1e67aba3353104af5978fef26c659f383ba4
|
refs/heads/master
| 2021-01-17T14:28:43.407048
| 2016-06-08T22:19:17
| 2016-06-08T22:19:17
| 59,785,559
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,080
|
py
|
#Programa que imprime fizz si el numero es multiplo de 3,
#buzz si es multiplo de 5 y si es multiplo de ambos imprime fizzbuzz numero
#crear un contador
count = 0
#usamos estructura while que permitira la
#ejecucion mientras se cumpla la condicion
while (count < 101):
#realizar una sentencia que compare si el numero incrementando
#es multiplo de ambos y si es asi imprimira FizzBuzz y
#el respectivo valor, y luego se le sumara 1 al contador
if (count % 5) == 0 and (count % 3) == 0:
print "FizzBuzz"
count = count + 1
#realizar una sentencia que compare si el numero
#incrementado es multiplo de 3 y si es asi imprimira Fizz
elif (count % 3) == 0:
print "Fizz"
count = count + 1
#realizar una sentencia que compare si el numero
#incrementado es multiplo de 5 y si es asi imprimira Buzz
elif (count % 5) == 0:
print "Buzz"
count = count + 1
#mostrar el numero e incrementar el valor del numero
else:
print count
count = count + 1
|
[
"julian.aquino086@gmail.com"
] |
julian.aquino086@gmail.com
|
0d404e2cf2f71b94dc6778d3d2add5d0b4a8985f
|
61d728164e2ab131403f1637ac6c23b126d43e70
|
/confusionModel.py
|
b1b85f7617a7c09d62b4a540ac941bc38098295a
|
[] |
no_license
|
alexliyang/HandGesture-Detection
|
b7321929f9191475e8de05b3cb2d284617a0d1da
|
6db93da6ac7a78c44699846f4a55978a7a5e548f
|
refs/heads/master
| 2021-05-03T17:24:10.988781
| 2018-01-17T18:09:51
| 2018-01-17T18:09:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,891
|
py
|
import tensorflow as tf
import glob
import cv2
import random
import numpy as np
import os
import ctypes
from time import time
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
import itertools
class VideoCamera(object):
def __init__(self, index=0):
self.video = cv2.VideoCapture(index)
self.index = index
print(self.video.isOpened())
def __del__(self):
self.video.release()
def get_frame(self, in_grayscale=False):
_, frame = self.video.read()
if in_grayscale:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
return frame
#info sur le geste à faire
#recup auto du numero de l'image à ajouter 0_{nb}
#save in color or in gray
# Open a new thread to manage the external cv2 interaction
cv2.startWindowThread()
cap = VideoCamera()
cameraSize = (800, 600)
saveSize = 256
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
print(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
def recup(folder):
X_test = np.load('./'+ folder + '/Xtest.npy')
y_test = np.load('./'+ folder + '/Ytest.npy')
return X_test, y_test
def new_weights_conv(name,shape):
return tf.get_variable(name, shape=shape, dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer_conv2d())
def new_weights_fc(name,shape):
return tf.get_variable(name, shape=shape, dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer())
def new_biases(length):
return tf.Variable(tf.constant(0.05, shape=[length], dtype=tf.float32), dtype=tf.float32)
def new_conv_layer(name,input, # The previous layer.
num_input_channels, # Num. channels in prev. layer.
filter_size, # Width and height of each filter.
num_filters, # Number of filters.
dropout, # Dropout rate
use_pooling=True): # Use 2x2 max-pooling.
shape = [filter_size, filter_size, num_input_channels, num_filters]
# Create new weights aka. filters with the given shape.
weights = new_weights_conv(name,shape)
# Create new biases, one for each filter.
biases = new_biases(length=num_filters)
layer = tf.nn.conv2d(input=input,
filter=weights,
strides=[1, 1, 1, 1],
padding='SAME')
layer += biases
# Use pooling to down-sample the image resolution?
if use_pooling:
layer = tf.nn.max_pool(value=layer,
ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1],
padding='SAME')
layer = tf.nn.relu(layer)
return layer, weights
def flatten_layer(layer):
# Get the shape of the input layer.
layer_shape = layer.get_shape()
num_features = layer_shape[1:4].num_elements()
layer_flat = tf.reshape(layer, [-1, num_features])
return layer_flat, num_features
def new_fc_layer(name,input, # The previous layer.
num_inputs, # Num. inputs from prev. layer.
num_outputs, use_nonlinear):
weights = new_weights_fc(name,[num_inputs, num_outputs])
biases = new_biases(length=num_outputs)
layer = tf.matmul(input, weights) + biases
if use_nonlinear:
layer = tf.nn.relu(layer)
return layer, weights
"""X_test, y_test = recup('dataTrain')
print(len(X_test))
print(X_test[0])
print('')
print(y_test[0])
input("recuperation done")"""
# Convolutional Layer 1.
filter_size1 = 3
num_filters1 = 32
num_filters2 = 64
num_filters3 = 128
n_classes = 15
batch_size = 256
imgSize = 64
x = tf.placeholder(tf.float32, [None, imgSize, imgSize])
x_image = tf.reshape(x, [-1, imgSize, imgSize, 1])
y = tf.placeholder(tf.float32)
keep_prob = tf.placeholder(tf.float32)
layer_conv1a, weights_conv1a = \
new_conv_layer("conv1a",input=x_image,
num_input_channels=1,
filter_size=filter_size1,
num_filters=num_filters1,
dropout=keep_prob,
use_pooling=False)
layer_conv1a1, weights_conv1a1 = \
new_conv_layer("conv1a1",input=layer_conv1a,
num_input_channels=num_filters1,
filter_size=filter_size1,
num_filters=num_filters1,
dropout=keep_prob,
use_pooling=True)
layer_conv1b, weights_conv1b = \
new_conv_layer("conv1b",input=layer_conv1a1,
num_input_channels=num_filters1,
filter_size=filter_size1,
num_filters=num_filters2,
dropout=keep_prob,
use_pooling=False)
layer_conv1b1, weights_conv1b1 = \
new_conv_layer("conv1b1",input=layer_conv1b,
num_input_channels=num_filters2,
filter_size=filter_size1,
num_filters=num_filters2,
dropout=keep_prob,
use_pooling=True)
layer_conv1c, weights_conv1c = \
new_conv_layer("conv1c",input=layer_conv1b1,
num_input_channels=num_filters2,
filter_size=filter_size1,
num_filters=num_filters2,
dropout=keep_prob,
use_pooling=False)
layer_conv1c1, weights_conv1c1 = \
new_conv_layer("conv1c1",input=layer_conv1c,
num_input_channels=num_filters2,
filter_size=filter_size1,
num_filters=num_filters2,
dropout=keep_prob,
use_pooling=True)
layer_flat, num_features = flatten_layer(layer_conv1c1)
layer_f, weights_f = new_fc_layer("fc",input=layer_flat,
num_inputs=num_features,
num_outputs=n_classes,
use_nonlinear=False)
y_pred = tf.nn.softmax(layer_f)
y_pred_cls = tf.argmax(y_pred, dimension=1)
print(layer_conv1a)
print(layer_flat)
print(layer_f)
correct = tf.equal(tf.argmax(layer_f, 1), tf.argmax(y, 1))
accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
saver = tf.train.Saver()
save_dir = 'final_model/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.join(save_dir, 'best_model')
gestures = ['None', 'fist', 'thumb up', 'thumb down', 'stop', 'catch', \
'swing', 'phone', 'victory','C', 'okay', '2 fingers', \
'2 fingers Horiz', 'rock&roll', 'rock&roll Horiz']
images, labels = [], []
def main(g):
global images, labels, imgSize, saveSize
t = time() + 1
cpt = 0
pauseState = True
print('Pause :', pauseState, 'Press SPACE to start')
while cpt <= 25:
image_np = cap.get_frame()
cv2.imshow('object detection', cv2.resize(image_np, cameraSize))
if time() - t > 0.1 and not(pauseState):
print('shoot', cpt)
color_image = cv2.cv2.resize(image_np, (saveSize,saveSize))
name = './image/' + str(g) + '_' + str(cpt) +'.png'
cv2.imwrite(name, color_image)
images.append(np.array(cv2.resize(cv2.imread(name, 0), (imgSize,imgSize))))
classes = np.zeros(n_classes)
classes[g] = 1
labels.append(classes)
t = time()
cpt += 1
key = cv2.waitKey(25) & 0xFF
if key == ord(' '):
pauseState = not(pauseState)
print('Pause :', pauseState, 'Press SPACE to change state')
elif key == ord('q'):
cv2.destroyAllWindows()
break
save_dir = 'image/'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
def recupTest(folder, num):
X_test = np.load('./'+ folder + '/Xtest_'+str(num)+'.npy')
y_test = np.load('./'+ folder + '/Ytest_'+str(num)+'.npy')
X_testClass = np.load('./'+ folder + '/XtestClass_'+str(num)+'.npy')
y_testClass = np.load('./'+ folder + '/YtestClass_'+str(num)+'.npy')
return X_test, y_test, X_testClass, y_testClass
"""
for g in range(n_classes):
print('Lancement main :', gestures[g])
main(g)
X_test = np.array(images)
y_test = np.array(labels)"""
X_test, y_test, _, _ = recupTest('dataTrain', 0)
"""batch_size = int(X_test.shape[0]/4)
X_test = X_test[:4*batch_size]
y_test = y_test[:4*batch_size]"""
batch_size = 256
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess=sess, save_path=save_path)
size_sample = int(len(X_test)/4)
y_ = y_pred.eval({x:X_test[:batch_size], keep_prob: 1})
for g in range(batch_size,size_sample,batch_size):
print(g, ' / ', size_sample)
y_ = np.vstack((y_,y_pred.eval({x:X_test[g:g+batch_size], keep_prob: 1})))
cnf_matrix = confusion_matrix(np.argmax(y_test[:g+batch_size],1), np.argmax(y_,1))
plt.figure()
plot_confusion_matrix(cnf_matrix, normalize=True, classes=gestures,
title='Confusion matrix, without normalization')
plt.show()
|
[
"andreas.pastor@etu.univ-nantes.fr"
] |
andreas.pastor@etu.univ-nantes.fr
|
556cd12c5bcabb294fdef6cef5e233d27d08634b
|
b5ce6908490cfb8e6a1e1cbe4745d675122ddce0
|
/questions/search-insert-position/Solution.py
|
c0090acd08a2b839bf40909c0f07c328192ae1f5
|
[
"MIT"
] |
permissive
|
franklingu/leetcode-solutions
|
8895910f13208e1d8e604100d84c2dd35684cde4
|
7ad7e5c1c040510b7b7bd225ed4297054464dbc6
|
refs/heads/master
| 2023-01-09T01:34:08.097518
| 2023-01-02T02:05:35
| 2023-01-02T02:05:35
| 43,345,677
| 155
| 66
|
MIT
| 2020-10-02T03:41:36
| 2015-09-29T04:54:38
|
Python
|
UTF-8
|
Python
| false
| false
| 523
|
py
|
'''
Given a sorted array and a target value, return the index if the target is found. If not, return the index where it would be if it were inserted in order.
You may assume no duplicates in the array.
Example 1:
Input: [1,3,5,6], 5
Output: 2
Example 2:
Input: [1,3,5,6], 2
Output: 1
Example 3:
Input: [1,3,5,6], 7
Output: 4
Example 4:
Input: [1,3,5,6], 0
Output: 0
'''
import bisect
class Solution:
def searchInsert(self, nums: List[int], target: int) -> int:
return bisect.bisect_left(nums, target)
|
[
"franklingujunchao@gmail.com"
] |
franklingujunchao@gmail.com
|
73a23e510d0db12d3463c18a0f24bc61535d211a
|
9d1b1d52f99b86bec0e74878c0535057115dc667
|
/pes/views.py
|
2c1c8ed73da304fb4070741309f11b3496348234
|
[] |
no_license
|
antocuni/pesranking
|
1f9b2bb8f03ba15f5f5d36ff6e70e0de8edc5002
|
574ecf8b5e49979adf709239a4df78de83acd039
|
refs/heads/master
| 2022-11-22T03:21:40.837305
| 2011-12-01T19:31:03
| 2011-12-01T19:31:03
| 275,815,179
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 194
|
py
|
from django.http import HttpResponseRedirect
from pes import models
def updateranking(request):
models.Match.updateranking()
return HttpResponseRedirect(request.META['HTTP_REFERER'])
|
[
"anto.cuni@gmail.com"
] |
anto.cuni@gmail.com
|
8ab5c8d3e3417cf68662e2bb6eed4603cbabc34b
|
a79e8e99099e355ba7f6d87309fbf13e70f74c57
|
/weixinqunBackend/wxqInfo/apps.py
|
dfced5e365cf8bd48cb79074595384988b2c4a06
|
[] |
no_license
|
fat39/weixinqunDeom
|
cad14bcf64c7a4da6293776ccf999fb82007af9e
|
118c8208dd950c5e94c607dd84b09c3255023642
|
refs/heads/master
| 2020-04-26T09:45:15.092717
| 2019-03-02T15:48:27
| 2019-03-02T15:48:27
| 173,466,712
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 89
|
py
|
from django.apps import AppConfig
class WxqinfoConfig(AppConfig):
name = 'wxqInfo'
|
[
"fat39@163.com"
] |
fat39@163.com
|
7d1d5d7dd799f554f4859903533709fce16dcc43
|
491e8ee92e1126eebe58c75b31e7280239524a09
|
/distillation/scripts/extract_distilbert.py
|
e5f6e2936843158a62b66b1fc4ffac77fc9808d1
|
[] |
no_license
|
sdonoso/glues_for_alberts_and_distilbeto
|
0e7815699fdd1cc949dbdfa74244391af71e6c94
|
5cca38bda64ac9e12a003bb9048226957bc74b38
|
refs/heads/master
| 2023-08-10T16:08:00.397625
| 2021-09-27T18:04:48
| 2021-09-27T18:04:48
| 401,483,905
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,315
|
py
|
# coding=utf-8
# Copyright 2019-present, the HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Preprocessing script before training DistilBERT.
Specific to BERT -> DistilBERT.
"""
import argparse
import torch
from transformers import BertForMaskedLM
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Extraction some layers of the full BertForMaskedLM or RObertaForMaskedLM for Transfer Learned Distillation"
)
parser.add_argument("--model_type", default="bert", choices=["bert"])
parser.add_argument("--model_name", default="bert-base-uncased", type=str)
parser.add_argument("--dump_checkpoint", default="serialization_dir/tf_bert-base-uncased_0247911.pth", type=str)
parser.add_argument("--vocab_transform", action="store_true")
args = parser.parse_args()
if args.model_type == "bert":
model = BertForMaskedLM.from_pretrained(args.model_name)
prefix = "bert"
else:
raise ValueError('args.model_type should be "bert".')
state_dict = model.state_dict()
compressed_sd = {}
for w in ["word_embeddings", "position_embeddings"]:
compressed_sd[f"distilbert.embeddings.{w}.weight"] = state_dict[f"{prefix}.embeddings.{w}.weight"]
for w in ["weight", "bias"]:
compressed_sd[f"distilbert.embeddings.LayerNorm.{w}"] = state_dict[f"{prefix}.embeddings.LayerNorm.{w}"]
std_idx = 0
for teacher_idx in [0, 2, 4, 7, 9, 11]:
for w in ["weight", "bias"]:
compressed_sd[f"distilbert.transformer.layer.{std_idx}.attention.q_lin.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.attention.self.query.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.attention.k_lin.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.attention.self.key.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.attention.v_lin.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.attention.self.value.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.attention.out_lin.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.attention.output.dense.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.sa_layer_norm.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.attention.output.LayerNorm.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.ffn.lin1.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.intermediate.dense.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.ffn.lin2.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.output.dense.{w}"
]
compressed_sd[f"distilbert.transformer.layer.{std_idx}.output_layer_norm.{w}"] = state_dict[
f"{prefix}.encoder.layer.{teacher_idx}.output.LayerNorm.{w}"
]
std_idx += 1
compressed_sd["vocab_projector.weight"] = state_dict["cls.predictions.decoder.weight"]
compressed_sd["vocab_projector.bias"] = state_dict["cls.predictions.bias"]
if args.vocab_transform:
for w in ["weight", "bias"]:
compressed_sd[f"vocab_transform.{w}"] = state_dict[f"cls.predictions.transform.dense.{w}"]
compressed_sd[f"vocab_layer_norm.{w}"] = state_dict[f"cls.predictions.transform.LayerNorm.{w}"]
print(f"N layers selected for distillation: {std_idx}")
print(f"Number of params transferred for distillation: {len(compressed_sd.keys())}")
print(f"Save transferred checkpoint to {args.dump_checkpoint}.")
torch.save(compressed_sd, args.dump_checkpoint)
|
[
"sdonoso@solotodo.com"
] |
sdonoso@solotodo.com
|
16fd87cc3e1a96176a7f22aa42ff1cfeeb7e0269
|
46ef40c8d12267bb399151b6004491be3dd0c3a6
|
/api/routes.py
|
d9eb050f38177b5d2365be0eda94df8a1090a765
|
[] |
no_license
|
biggeorge9900/potato
|
d602f03b15049963a7f8daf7af01eddee873e15b
|
2141f918e97a431cbc2debe3e6184f31ed0bc4c7
|
refs/heads/master
| 2016-09-15T20:20:47.637361
| 2014-06-02T02:48:42
| 2014-06-09T06:04:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 296
|
py
|
from api.box_api import authentication, finish_authentication
routes = [
(r'/api/login_test', 'api.login_test.MyHandler'),
(r'/api/get_box_', 'api.login_test.MyHandler'),
(r'/api/box/authentication', authentication),
(r'/api/box/finish_authentication', finish_authentication),
]
|
[
"tao.9900@gmail.com"
] |
tao.9900@gmail.com
|
2e348161c86c67245725ee7b207542d60aa277df
|
aef3789bf667efc60d4f02cfd2fb4c629e307715
|
/studyone/migrations/0004_snippet.py
|
49734c004d32e174bddb023a4982e89820743026
|
[] |
no_license
|
lvorcool/Blog
|
c2d0ee6104e8f8850fb58a25060d8d165c66b96c
|
dced5b9b83c7a1c808e9689515894dad050ce4f8
|
refs/heads/master
| 2021-08-14T10:00:46.221466
| 2017-11-15T09:40:46
| 2017-11-15T09:40:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 12,164
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-11-01 15:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('studyone', '0003_auto_20171101_1334'),
]
operations = [
migrations.CreateModel(
name='Snippet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(blank=True, default='', max_length=100)),
('code', models.TextField()),
('lineons', models.BooleanField(default=False)),
('language', models.CharField(choices=[('abap', 'ABAP'), ('abnf', 'ABNF'), ('ada', 'Ada'), ('adl', 'ADL'), ('agda', 'Agda'), ('aheui', 'Aheui'), ('ahk', 'autohotkey'), ('alloy', 'Alloy'), ('ampl', 'Ampl'), ('antlr', 'ANTLR'), ('antlr-as', 'ANTLR With ActionScript Target'), ('antlr-cpp', 'ANTLR With CPP Target'), ('antlr-csharp', 'ANTLR With C# Target'), ('antlr-java', 'ANTLR With Java Target'), ('antlr-objc', 'ANTLR With ObjectiveC Target'), ('antlr-perl', 'ANTLR With Perl Target'), ('antlr-python', 'ANTLR With Python Target'), ('antlr-ruby', 'ANTLR With Ruby Target'), ('apacheconf', 'ApacheConf'), ('apl', 'APL'), ('applescript', 'AppleScript'), ('arduino', 'Arduino'), ('as', 'ActionScript'), ('as3', 'ActionScript 3'), ('aspectj', 'AspectJ'), ('aspx-cs', 'aspx-cs'), ('aspx-vb', 'aspx-vb'), ('asy', 'Asymptote'), ('at', 'AmbientTalk'), ('autoit', 'AutoIt'), ('awk', 'Awk'), ('basemake', 'Base Makefile'), ('bash', 'Bash'), ('bat', 'Batchfile'), ('bbcode', 'BBCode'), ('bc', 'BC'), ('befunge', 'Befunge'), ('bib', 'BibTeX'), ('blitzbasic', 'BlitzBasic'), ('blitzmax', 'BlitzMax'), ('bnf', 'BNF'), ('boo', 'Boo'), ('boogie', 'Boogie'), ('brainfuck', 'Brainfuck'), ('bro', 'Bro'), ('bst', 'BST'), ('bugs', 'BUGS'), ('c', 'C'), ('c-objdump', 'c-objdump'), ('ca65', 'ca65 assembler'), ('cadl', 'cADL'), ('camkes', 'CAmkES'), ('capdl', 'CapDL'), ('capnp', "Cap'n Proto"), ('cbmbas', 'CBM BASIC V2'), ('ceylon', 'Ceylon'), ('cfc', 'Coldfusion CFC'), ('cfengine3', 'CFEngine3'), ('cfm', 'Coldfusion HTML'), ('cfs', 'cfstatement'), ('chai', 'ChaiScript'), ('chapel', 'Chapel'), ('cheetah', 'Cheetah'), ('cirru', 'Cirru'), ('clay', 'Clay'), ('clean', 'Clean'), ('clojure', 'Clojure'), ('clojurescript', 'ClojureScript'), ('cmake', 'CMake'), ('cobol', 'COBOL'), ('cobolfree', 'COBOLFree'), ('coffee-script', 'CoffeeScript'), ('common-lisp', 'Common Lisp'), ('componentpascal', 'Component Pascal'), ('console', 'Bash Session'), ('control', 'Debian Control file'), ('coq', 'Coq'), ('cpp', 'C++'), ('cpp-objdump', 'cpp-objdump'), ('cpsa', 'CPSA'), ('cr', 'Crystal'), ('crmsh', 'Crmsh'), ('croc', 'Croc'), ('cryptol', 'Cryptol'), ('csharp', 'C#'), ('csound', 'Csound Orchestra'), ('csound-document', 'Csound Document'), ('csound-score', 'Csound Score'), ('css', 'CSS'), ('css+django', 'CSS+Django/Jinja'), ('css+erb', 'CSS+Ruby'), ('css+genshitext', 'CSS+Genshi Text'), ('css+lasso', 'CSS+Lasso'), ('css+mako', 'CSS+Mako'), ('css+mozpreproc', 'CSS+mozpreproc'), ('css+myghty', 'CSS+Myghty'), ('css+php', 'CSS+PHP'), ('css+smarty', 'CSS+Smarty'), ('cucumber', 'Gherkin'), ('cuda', 'CUDA'), ('cypher', 'Cypher'), ('cython', 'Cython'), ('d', 'D'), ('d-objdump', 'd-objdump'), ('dart', 'Dart'), ('delphi', 'Delphi'), ('dg', 'dg'), ('diff', 'Diff'), ('django', 'Django/Jinja'), ('docker', 'Docker'), ('doscon', 'MSDOS Session'), ('dpatch', 'Darcs Patch'), ('dtd', 'DTD'), ('duel', 'Duel'), ('dylan', 'Dylan'), ('dylan-console', 'Dylan session'), ('dylan-lid', 'DylanLID'), ('earl-grey', 'Earl Grey'), ('easytrieve', 'Easytrieve'), ('ebnf', 'EBNF'), ('ec', 'eC'), ('ecl', 'ECL'), ('eiffel', 'Eiffel'), ('elixir', 'Elixir'), ('elm', 'Elm'), ('emacs', 'EmacsLisp'), ('erb', 'ERB'), ('erl', 'Erlang erl session'), ('erlang', 'Erlang'), ('evoque', 'Evoque'), ('extempore', 'xtlang'), ('ezhil', 'Ezhil'), ('factor', 'Factor'), ('fan', 'Fantom'), ('fancy', 'Fancy'), ('felix', 'Felix'), ('fish', 'Fish'), ('flatline', 'Flatline'), ('forth', 'Forth'), ('fortran', 'Fortran'), ('fortranfixed', 'FortranFixed'), ('foxpro', 'FoxPro'), ('fsharp', 'FSharp'), ('gap', 'GAP'), ('gas', 'GAS'), ('genshi', 'Genshi'), ('genshitext', 'Genshi Text'), ('glsl', 'GLSL'), ('gnuplot', 'Gnuplot'), ('go', 'Go'), ('golo', 'Golo'), ('gooddata-cl', 'GoodData-CL'), ('gosu', 'Gosu'), ('groff', 'Groff'), ('groovy', 'Groovy'), ('gst', 'Gosu Template'), ('haml', 'Haml'), ('handlebars', 'Handlebars'), ('haskell', 'Haskell'), ('haxeml', 'Hxml'), ('hexdump', 'Hexdump'), ('hsail', 'HSAIL'), ('html', 'HTML'), ('html+cheetah', 'HTML+Cheetah'), ('html+django', 'HTML+Django/Jinja'), ('html+evoque', 'HTML+Evoque'), ('html+genshi', 'HTML+Genshi'), ('html+handlebars', 'HTML+Handlebars'), ('html+lasso', 'HTML+Lasso'), ('html+mako', 'HTML+Mako'), ('html+myghty', 'HTML+Myghty'), ('html+ng2', 'HTML + Angular2'), ('html+php', 'HTML+PHP'), ('html+smarty', 'HTML+Smarty'), ('html+twig', 'HTML+Twig'), ('html+velocity', 'HTML+Velocity'), ('http', 'HTTP'), ('hx', 'Haxe'), ('hybris', 'Hybris'), ('hylang', 'Hy'), ('i6t', 'Inform 6 template'), ('idl', 'IDL'), ('idris', 'Idris'), ('iex', 'Elixir iex session'), ('igor', 'Igor'), ('inform6', 'Inform 6'), ('inform7', 'Inform 7'), ('ini', 'INI'), ('io', 'Io'), ('ioke', 'Ioke'), ('ipython2', 'IPython'), ('ipython3', 'IPython3'), ('ipythonconsole', 'IPython console session'), ('irc', 'IRC logs'), ('isabelle', 'Isabelle'), ('j', 'J'), ('jags', 'JAGS'), ('jasmin', 'Jasmin'), ('java', 'Java'), ('javascript+mozpreproc', 'Javascript+mozpreproc'), ('jcl', 'JCL'), ('jlcon', 'Julia console'), ('js', 'JavaScript'), ('js+cheetah', 'JavaScript+Cheetah'), ('js+django', 'JavaScript+Django/Jinja'), ('js+erb', 'JavaScript+Ruby'), ('js+genshitext', 'JavaScript+Genshi Text'), ('js+lasso', 'JavaScript+Lasso'), ('js+mako', 'JavaScript+Mako'), ('js+myghty', 'JavaScript+Myghty'), ('js+php', 'JavaScript+PHP'), ('js+smarty', 'JavaScript+Smarty'), ('jsgf', 'JSGF'), ('json', 'JSON'), ('json-object', 'JSONBareObject'), ('jsonld', 'JSON-LD'), ('jsp', 'Java Server Page'), ('julia', 'Julia'), ('juttle', 'Juttle'), ('kal', 'Kal'), ('kconfig', 'Kconfig'), ('koka', 'Koka'), ('kotlin', 'Kotlin'), ('lagda', 'Literate Agda'), ('lasso', 'Lasso'), ('lcry', 'Literate Cryptol'), ('lean', 'Lean'), ('less', 'LessCss'), ('lhs', 'Literate Haskell'), ('lidr', 'Literate Idris'), ('lighty', 'Lighttpd configuration file'), ('limbo', 'Limbo'), ('liquid', 'liquid'), ('live-script', 'LiveScript'), ('llvm', 'LLVM'), ('logos', 'Logos'), ('logtalk', 'Logtalk'), ('lsl', 'LSL'), ('lua', 'Lua'), ('make', 'Makefile'), ('mako', 'Mako'), ('maql', 'MAQL'), ('mask', 'Mask'), ('mason', 'Mason'), ('mathematica', 'Mathematica'), ('matlab', 'Matlab'), ('matlabsession', 'Matlab session'), ('md', 'markdown'), ('minid', 'MiniD'), ('modelica', 'Modelica'), ('modula2', 'Modula-2'), ('monkey', 'Monkey'), ('monte', 'Monte'), ('moocode', 'MOOCode'), ('moon', 'MoonScript'), ('mozhashpreproc', 'mozhashpreproc'), ('mozpercentpreproc', 'mozpercentpreproc'), ('mql', 'MQL'), ('mscgen', 'Mscgen'), ('mupad', 'MuPAD'), ('mxml', 'MXML'), ('myghty', 'Myghty'), ('mysql', 'MySQL'), ('nasm', 'NASM'), ('ncl', 'NCL'), ('nemerle', 'Nemerle'), ('nesc', 'nesC'), ('newlisp', 'NewLisp'), ('newspeak', 'Newspeak'), ('ng2', 'Angular2'), ('nginx', 'Nginx configuration file'), ('nim', 'Nimrod'), ('nit', 'Nit'), ('nixos', 'Nix'), ('nsis', 'NSIS'), ('numpy', 'NumPy'), ('nusmv', 'NuSMV'), ('objdump', 'objdump'), ('objdump-nasm', 'objdump-nasm'), ('objective-c', 'Objective-C'), ('objective-c++', 'Objective-C++'), ('objective-j', 'Objective-J'), ('ocaml', 'OCaml'), ('octave', 'Octave'), ('odin', 'ODIN'), ('ooc', 'Ooc'), ('opa', 'Opa'), ('openedge', 'OpenEdge ABL'), ('pacmanconf', 'PacmanConf'), ('pan', 'Pan'), ('parasail', 'ParaSail'), ('pawn', 'Pawn'), ('perl', 'Perl'), ('perl6', 'Perl6'), ('php', 'PHP'), ('pig', 'Pig'), ('pike', 'Pike'), ('pkgconfig', 'PkgConfig'), ('plpgsql', 'PL/pgSQL'), ('postgresql', 'PostgreSQL SQL dialect'), ('postscript', 'PostScript'), ('pot', 'Gettext Catalog'), ('pov', 'POVRay'), ('powershell', 'PowerShell'), ('praat', 'Praat'), ('prolog', 'Prolog'), ('properties', 'Properties'), ('protobuf', 'Protocol Buffer'), ('ps1con', 'PowerShell Session'), ('psql', 'PostgreSQL console (psql)'), ('pug', 'Pug'), ('puppet', 'Puppet'), ('py3tb', 'Python 3.0 Traceback'), ('pycon', 'Python console session'), ('pypylog', 'PyPy Log'), ('pytb', 'Python Traceback'), ('python', 'Python'), ('python3', 'Python 3'), ('qbasic', 'QBasic'), ('qml', 'QML'), ('qvto', 'QVTO'), ('racket', 'Racket'), ('ragel', 'Ragel'), ('ragel-c', 'Ragel in C Host'), ('ragel-cpp', 'Ragel in CPP Host'), ('ragel-d', 'Ragel in D Host'), ('ragel-em', 'Embedded Ragel'), ('ragel-java', 'Ragel in Java Host'), ('ragel-objc', 'Ragel in Objective C Host'), ('ragel-ruby', 'Ragel in Ruby Host'), ('raw', 'Raw token data'), ('rb', 'Ruby'), ('rbcon', 'Ruby irb session'), ('rconsole', 'RConsole'), ('rd', 'Rd'), ('rebol', 'REBOL'), ('red', 'Red'), ('redcode', 'Redcode'), ('registry', 'reg'), ('resource', 'ResourceBundle'), ('rexx', 'Rexx'), ('rhtml', 'RHTML'), ('rnc', 'Relax-NG Compact'), ('roboconf-graph', 'Roboconf Graph'), ('roboconf-instances', 'Roboconf Instances'), ('robotframework', 'RobotFramework'), ('rql', 'RQL'), ('rsl', 'RSL'), ('rst', 'reStructuredText'), ('rts', 'TrafficScript'), ('rust', 'Rust'), ('sas', 'SAS'), ('sass', 'Sass'), ('sc', 'SuperCollider'), ('scala', 'Scala'), ('scaml', 'Scaml'), ('scheme', 'Scheme'), ('scilab', 'Scilab'), ('scss', 'SCSS'), ('shen', 'Shen'), ('silver', 'Silver'), ('slim', 'Slim'), ('smali', 'Smali'), ('smalltalk', 'Smalltalk'), ('smarty', 'Smarty'), ('sml', 'Standard ML'), ('snobol', 'Snobol'), ('snowball', 'Snowball'), ('sourceslist', 'Debian Sourcelist'), ('sp', 'SourcePawn'), ('sparql', 'SPARQL'), ('spec', 'RPMSpec'), ('splus', 'S'), ('sql', 'SQL'), ('sqlite3', 'sqlite3con'), ('squidconf', 'SquidConf'), ('ssp', 'Scalate Server Page'), ('stan', 'Stan'), ('stata', 'Stata'), ('swift', 'Swift'), ('swig', 'SWIG'), ('systemverilog', 'systemverilog'), ('tads3', 'TADS 3'), ('tap', 'TAP'), ('tasm', 'TASM'), ('tcl', 'Tcl'), ('tcsh', 'Tcsh'), ('tcshcon', 'Tcsh Session'), ('tea', 'Tea'), ('termcap', 'Termcap'), ('terminfo', 'Terminfo'), ('terraform', 'Terraform'), ('tex', 'TeX'), ('text', 'Text only'), ('thrift', 'Thrift'), ('todotxt', 'Todotxt'), ('trac-wiki', 'MoinMoin/Trac Wiki markup'), ('treetop', 'Treetop'), ('ts', 'TypeScript'), ('tsql', 'Transact-SQL'), ('turtle', 'Turtle'), ('twig', 'Twig'), ('typoscript', 'TypoScript'), ('typoscriptcssdata', 'TypoScriptCssData'), ('typoscripthtmldata', 'TypoScriptHtmlData'), ('urbiscript', 'UrbiScript'), ('vala', 'Vala'), ('vb.net', 'VB.net'), ('vcl', 'VCL'), ('vclsnippets', 'VCLSnippets'), ('vctreestatus', 'VCTreeStatus'), ('velocity', 'Velocity'), ('verilog', 'verilog'), ('vgl', 'VGL'), ('vhdl', 'vhdl'), ('vim', 'VimL'), ('wdiff', 'WDiff'), ('whiley', 'Whiley'), ('x10', 'X10'), ('xml', 'XML'), ('xml+cheetah', 'XML+Cheetah'), ('xml+django', 'XML+Django/Jinja'), ('xml+erb', 'XML+Ruby'), ('xml+evoque', 'XML+Evoque'), ('xml+lasso', 'XML+Lasso'), ('xml+mako', 'XML+Mako'), ('xml+myghty', 'XML+Myghty'), ('xml+php', 'XML+PHP'), ('xml+smarty', 'XML+Smarty'), ('xml+velocity', 'XML+Velocity'), ('xquery', 'XQuery'), ('xslt', 'XSLT'), ('xtend', 'Xtend'), ('xul+mozpreproc', 'XUL+mozpreproc'), ('yaml', 'YAML'), ('yaml+jinja', 'YAML+Jinja'), ('zephir', 'Zephir')], default='python', max_length=100)),
('style', models.CharField(choices=[['a', 'm'], ['a', 'n'], ['a', 'p'], ['a', 'p'], ['a', 'p'], ['a', 'r'], ['a', 't'], ['b', 'a'], ['c', 'x'], ['e', 'd'], ['e', 'p'], ['g', 'i'], ['i', 'v'], ['l', 'a'], ['l', 'a'], ['m', 'e'], ['o', 'b'], ['o', 'c'], ['o', 'l'], ['o', 'm'], ['r', 'a'], ['r', 'f'], ['r', 'f'], ['r', 'r'], ['r', 't'], ['s', 'v'], ['u', 'a'], ['u', 'm'], ['w', 'b']], default='friendly', max_length=100)),
],
options={
'ordering': ('created',),
},
),
]
|
[
"2545567546@qq.com"
] |
2545567546@qq.com
|
84b921ebd67dca82253a50ee13baf4d2cb8fdb97
|
6646f6b92e9ff31f2f74b749ea12ace53cfc135c
|
/tests/unit/models/test_package_model.py
|
5d883c6352b89b74851372eb02d55c084db4b862
|
[] |
no_license
|
EricMontague/SponsorMatch
|
0a6685edb44b2694824d3d3a4d15dfcb42fdb68e
|
864aa3cfe25d74c2b97b9f09f45eb9fa10dac892
|
refs/heads/master
| 2022-12-08T22:43:21.684165
| 2021-03-19T00:50:06
| 2021-03-19T00:50:06
| 241,396,411
| 0
| 0
| null | 2022-12-08T03:38:23
| 2020-02-18T15:27:42
|
Python
|
UTF-8
|
Python
| false
| false
| 2,398
|
py
|
"""This module contains tests for the package model."""
import unittest
from tests.integration.testing_data import TestModelFactory
from app import create_app
from app.extensions import db
class PackageModelTestCase(unittest.TestCase):
"""Class to test the Package Model."""
def setUp(self):
"""Create application instance and insert necessary
information into the database before each test.
"""
self.app = create_app("testing", False)
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Pop application context, remove the db session,
and drop all tables in the database.
"""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_package_sold_out(self):
"""Test to ensure that a package is correctly
recognized as sold out.
"""
role = TestModelFactory.create_role("Event Organizer")
user = TestModelFactory.create_user()
user.role = role
venue = TestModelFactory.create_venue()
event = TestModelFactory.create_event("Test Event", "live")
event.user = user
event.venue = venue
package = TestModelFactory.create_package(price=100, available_packages=10)
package.event = event
db.session.add_all([user, event, package])
db.session.commit()
package.num_purchased = package.available_packages
self.assertTrue(package.is_sold_out())
def test_package_num_sales(self):
"""Test to ensure that the number of packages purchased
is recorded correctly in the database.
"""
role = TestModelFactory.create_role("Event Organizer")
user = TestModelFactory.create_user()
user.role = role
venue = TestModelFactory.create_venue()
event = TestModelFactory.create_event("Test Event", "live")
event.user = user
event.venue = venue
package = TestModelFactory.create_package(price=100, available_packages=10)
package.event = event
db.session.add_all([user, event, package])
db.session.commit()
self.assertEqual(package.num_for_sale(), 10)
package.num_purchased += 1
self.assertEqual(package.num_for_sale(), 9)
if __name__ == "__main__":
unittest.main()
|
[
"eric.g.montague@gmail.com"
] |
eric.g.montague@gmail.com
|
258c50f0a0095b90cf44f648a147bb9f908366f6
|
dc3a6a8fc1f250c91e2ce48236a5c57e65e1b800
|
/Graphs_I/03_csGraphAdjacencyListRepresentation (Task 3 of 5)/01_csGraphAdjacencyListRepresentation.py
|
a7eb433cf2a37f587ad39079886430508ff7f0d5
|
[
"MIT"
] |
permissive
|
sarahmarie1976/CSPT15_BST_GP
|
dc477d68bad0be1255dcc09136c3aaebde1440fc
|
8f478ebf8b620d4982eecc26c70ce8725fd45624
|
refs/heads/main
| 2023-03-04T12:37:05.981399
| 2021-02-19T22:20:37
| 2021-02-19T22:20:37
| 317,756,459
| 1
| 0
|
MIT
| 2021-02-19T22:20:38
| 2020-12-02T05:10:03
|
Python
|
UTF-8
|
Python
| false
| false
| 1,682
|
py
|
"""
check image for question
A - class Graph:
def __init__(self):
self.vertices = {
"A": {"B": 1},
"B": {"C": 3, "D": 2, "E": 1},
"C": {"E": 4},
"D": {"E": 2},
"E": {"F": 3},
"F": {},
"G": {"D": 1}
}
B - class Graph:
def __init__(self):
self.vertices = [
[0,1,0,0,0,0,0],
[0,0,3,2,1,0,0],
[0,0,0,0,2,0,0],
[0,0,0,0,4,0,0],
[0,0,0,0,0,3,0],
[0,0,0,0,0,0,0],
[0,0,0,1,0,0,0]
]
C - class Graph:
def __init__(self):
self.vertices = {
"A": {"B": 2},
"B": {"C": 2, "D": 3, "E": 4},
"C": {"E": 1},
"D": {"E": 3},
"E": {"F": 2},
"F": {},
"G": {"D": 5}
}
D - class Graph:
def __init__(self):
self.vertices = [
[0,2,0,0,0,0,0],
[0,0,2,3,4,0,0],
[0,0,0,0,1,0,0],
[0,0,0,0,3,0,0],
[0,0,0,0,0,2,0],
[0,0,0,0,0,0,0],
[0,0,0,5,0,0,0]
]
ANSWER IS:
class Graph:
def __init__(self):
self.vertices = {
"A": {"B": 1},
"B": {"C": 3, "D": 2, "E": 1},
"C": {"E": 4},
"D": {"E": 2},
"E": {"F": 3},
"F": {},
"G": {"D": 1}
}
"""
|
[
"sholle7@gmail.com"
] |
sholle7@gmail.com
|
760d794200067d3220230059a30f4b7cb3575360
|
451fd9e6dc42ea08258c3c294744c98fb8f939cb
|
/proxyip_pool.py
|
e852b06d0de5f1900daada7cf2e88b9249742592
|
[] |
no_license
|
huangliang0412/webscrawls
|
7eaec2ccd4fb06020e7e4732d2a940852e145eb5
|
934c4eec0774764ea232a7ed6635bb8e64422826
|
refs/heads/master
| 2021-01-12T16:16:32.909965
| 2017-06-01T13:42:28
| 2017-06-01T13:42:28
| 71,976,067
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,551
|
py
|
#!/usr/bin/env python3
import requests
import random
import redis
from redis_connect import conn
'''
#r = requests.get('http://www.baidu.com')
proxies = {
"http": "94.228.207.233:8085",
#"https": "94.228.207.233:8085",
}
r = requests.get('http://www.baidu.com', proxies = proxies)
print(r.status_code)
print(r.headers)
#r.encoding = 'utf-8'
response = requests.get('http://')
print(response.status_code)
print(response.elapsed.microseconds)
#print(response.text)
proxy = set()
for ip in response.text.split():
#print(ip)
proxy.add(ip)
print(len(proxy))
'''
class ProxyIpPoll(object):
#redis_poxxyip_pool = redis.StrictRedis('localhost', port = 6379, db = 0)
def __init__(self):
self.available_proxyips = set()
self.all_proxyips = set()
def get_new_proxyips(self):
try:
response = requests.get('http://api.xicidaili.com/free2016.txt')
for ip in response.text.split():
self.all_proxyips.add(ip)
#print(len(self.all_proxyips))
except:
print('require ip error')
def check_ip_available(self):
test_url = 'http://www.baidu.com'
timeout = 5
print(len(self.all_proxyips))
for ip in self.all_proxyips:
try:
proxies = {
'https': ip,
}
response = requests.get(test_url, proxies = proxies, timeout = 5)
print(response.elapsed.microseconds)
self.available_proxyips.add(ip)
#conn.sadd('proxyip_pool', ip)
print('%s is available' % ip)
except:
print('%s is not available' % ip)
def store_toredis(self):
for ip in self.available_proxyips:
conn.sadd('proxyip_pool', ip)
def get_available_proxyipool(self):
self.get_new_proxyips()
self.check_ip_available()
self.store_toredis()
def get_random_proxyip(self):
proxyip = conn.srandmember('proxyip_pool')
return proxyip
#return random.choice(self.available_proxyips)
def updata_proxyippool(self):
self.all_proxyips.clear()
self.available_proxyips.clear()
self.get_available_proxyipool()
self.store_toredis('temp_pool')
conn.sdiffstore('proxyip_pool', 'temp_pool', 'proxyip_pool')
conn.delete('temp_pool')
'''
if __name__ == '__main__':
ippool = ProxyIpPoll()
ippool.get_available_proxyipool()
#ippool.get_new_proxyips()
'''
|
[
"huangliang866@163.com"
] |
huangliang866@163.com
|
3a2efb5bb61e4e173d8b0c3791c47e269f4a6a8f
|
b54ab907310821392659b571e177c01d3c330721
|
/music/urls.py
|
eb04b8f517f4c8ffc336757c68010ab9216bb51c
|
[] |
no_license
|
adarsh-verma027/demo_repo
|
68aff0eeed42672ac9c93af2d0452381e9b0f0a1
|
8b5bca9c046b2726006b30d4f0ceffc99e33b5e0
|
refs/heads/master
| 2020-03-18T06:10:49.031695
| 2018-05-23T16:22:56
| 2018-05-23T16:22:56
| 134,380,027
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 190
|
py
|
from django.conf.urls import url
from . import views
urlpatterns = [
#url(r'^admin/', admin.site.urls),
url(r'^$', views.index),
url(r'^(?P<album_id>[0-9]+)/$', views.detail)
]
|
[
"adarsh@git.com"
] |
adarsh@git.com
|
9dc4b494a28257793973cafba0d97492a5e21a0a
|
8dbb2a3e2286c97b1baa3ee54210189f8470eb4d
|
/kubernetes-stubs/client/models/v1beta1_volume_error.pyi
|
9f645d23474efed86f712f543da51fd09fa5d231
|
[] |
no_license
|
foodpairing/kubernetes-stubs
|
e4b0f687254316e6f2954bacaa69ff898a88bde4
|
f510dc3d350ec998787f543a280dd619449b5445
|
refs/heads/master
| 2023-08-21T21:00:54.485923
| 2021-08-25T03:53:07
| 2021-08-25T04:45:17
| 414,555,568
| 0
| 0
| null | 2021-10-07T10:26:08
| 2021-10-07T10:26:08
| null |
UTF-8
|
Python
| false
| false
| 518
|
pyi
|
import datetime
import typing
import kubernetes.client
class V1beta1VolumeError:
message: typing.Optional[str]
time: typing.Optional[datetime.datetime]
def __init__(
self,
*,
message: typing.Optional[str] = ...,
time: typing.Optional[datetime.datetime] = ...
) -> None: ...
def to_dict(self) -> V1beta1VolumeErrorDict: ...
class V1beta1VolumeErrorDict(typing.TypedDict, total=False):
message: typing.Optional[str]
time: typing.Optional[datetime.datetime]
|
[
"nikhil.benesch@gmail.com"
] |
nikhil.benesch@gmail.com
|
17d520f308efa52144c77f897580255120253a76
|
ddf0ad89d1220550b948dda2a3c52f692d6532fa
|
/src/compas_pattern/datastructures/kagome/kagome.py
|
2534d417426731d560ba945049c836401437e019
|
[
"MIT"
] |
permissive
|
jf---/compas_pattern
|
f220a951d29e2c97886a1a4cf11b75007c81e8f6
|
f1663c6362d8751e2f0c45d5699b083e3ed14395
|
refs/heads/master
| 2021-05-19T08:20:07.774850
| 2020-03-09T09:58:05
| 2020-03-09T09:58:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 13,679
|
py
|
from compas_pattern.datastructures.mesh.mesh import Mesh
from compas_pattern.datastructures.network.network import Network
from compas.geometry import convex_hull
from compas.geometry import subtract_vectors
from compas.geometry import normalize_vector
from compas.geometry import dot_vectors
from compas.geometry import cross_vectors
from compas.geometry import midpoint_line
from compas.geometry import scale_vector
from compas.geometry import add_vectors
from compas.geometry import distance_point_point
from compas.geometry import centroid_points
from compas.datastructures import meshes_join_and_weld
from compas.datastructures import mesh_weld
from compas.datastructures import trimesh_subdivide_loop
from compas.topology import conway_ambo
from compas.topology import vertex_coloring
from compas.utilities import pairwise
from compas.utilities import window
class Kagome(Mesh):
def __init__(self):
super(Kagome, self).__init__()
self.dense_mesh = None
self.kagome = None
self.kagome_polyedge_data = None
@classmethod
def from_mesh(cls, mesh):
return cls.from_vertices_and_faces(*mesh.to_vertices_and_faces())
@classmethod
def from_skeleton(cls, lines, radius=1):
network = Network.from_lines(lines)
tube_extremities = {}
nodes = []
for vkey in network.vertices():
if len(network.vertex_neighbors(vkey)) > 1:
points = [network.edge_point(vkey, nbr, t = float(radius) / network.edge_length(vkey, nbr)) for nbr in network.vertex_neighbors(vkey)]
faces = convex_hull(points)
mesh = cls.from_vertices_and_faces(points, faces)
meshes = []
for fkey in mesh.faces():
vertices = [mesh.edge_midpoint(u, v) for u, v in mesh.face_halfedges(fkey)]
faces = [[0,1,2]]
meshes.append(cls.from_vertices_and_faces(vertices, faces))
for vkey_2 in mesh.vertices():
tops = []
bottoms = []
n = normalize_vector(subtract_vectors(mesh.vertex_coordinates(vkey_2), network.vertex_coordinates(vkey)))
for i in range(len(mesh.vertex_neighbors(vkey_2))):
pt_0 = mesh.edge_midpoint(vkey_2, mesh.vertex_neighbors(vkey_2, ordered = True)[i - 1])
bottoms.append(pt_0)
pt_1 = mesh.edge_midpoint(vkey_2, mesh.vertex_neighbors(vkey_2, ordered = True)[i])
pt_2 = midpoint_line([pt_0, pt_1])
pt_2 = add_vectors(scale_vector(n, distance_point_point(pt_0, pt_1)), pt_2)
tops.append(pt_2)
vertices = [pt_0, pt_2, pt_1]
faces = [[0,1,2]]
meshes.append(cls.from_vertices_and_faces(vertices, faces))
for i in range(len(tops)):
vertices = [tops[i - 1], tops[i], bottoms[i]]
faces = [[0,1,2]]
meshes.append(cls.from_vertices_and_faces(vertices, faces))
#print network.vertex_neighbors(vkey), network.vertex_neighbors(vkey)[vkey_2]
tube_extremities[(vkey, network.vertex_neighbors(vkey)[vkey_2])] = tops
mesh = meshes_join_and_weld(meshes)
#dense_mesh = trimesh_subdivide_loop(mesh, k = 3)
nodes.append(mesh)
return nodes[0]
meshes_2 = []
for u, v in network.edges():
if len(network.vertex_neighbors(u)) > 1 and len(network.vertex_neighbors(v)) > 1:
#print len(tube_extremities[(u, v)])
#print len(tube_extremities[(v, u)])
if len(tube_extremities[(u, v)]) == len(tube_extremities[(v, u)]):
n = len(tube_extremities[(u, v)])
l = network.edge_length(u, v) - 2 * radius
m = math.floor(l / radius) + 1
pt_uv = tube_extremities[(u, v)]
pt_vu = list(reversed(tube_extremities[(v, u)]))
dmin = -1
imin = None
for i in range(n):
distance = sum([distance_point_point(pt_uv[j], pt_vu[i + j - len(pt_vu)]) for j in range(n)])
if dmin < 0 or distance < dmin:
dmin = distance
imin = i
pt_vu = [pt_vu[imin + j - len(pt_vu)] for j in range(n)]
array = [pt_uv]
for i in range(int(m)):
polygon = []
for j in range(int(n)):
u = pt_uv[j]
v = pt_vu[j]
polygon.append(add_vectors(scale_vector(u, (float(m) - 1 - float(i))/float(m - 1)), scale_vector(v, float(i)/float(m - 1))))
array.append(polygon)
array.append(pt_vu)
#print len(array), len(array[0]), len(array[1]), len(array[2]), len(array[3])
for i in range(int(n)):
for j in range(int(m)):
vertices = [array[i - 1][j - 1], array[i - 1][j], array[i][j]]
faces = [[0, 1, 2]]
meshes_2.append(Mesh.from_vertices_and_faces(vertices, faces))
vertices, faces = join_and_weld_meshes(meshes_2)
#meshes_2 = rs.AddMesh(vertices, faces)
meshes = []
for node in nodes:
vertices, faces = node.to_vertices_and_faces()
meshes.append(rs.AddMesh(vertices, faces))
def singularities(self):
return [vkey for vkey in self.vertices() if (self.is_vertex_on_boundary(vkey) and self.vertex_degree(vkey) != 4) or (not self.is_vertex_on_boundary(vkey) and self.vertex_degree(vkey) != 6)]
def singularity_points(self):
return [self.vertex_coordinates(vkey) for vkey in self.singularities()]
def densification(self, k = 1, fixed_boundary = True):
if fixed_boundary:
fixed = self.vertices_on_boundary()
else:
fixed = None
self.dense_mesh = Kagome.from_mesh(trimesh_subdivide_loop(self, k, fixed))
def patterning(self):
self.kagome = conway_ambo(self.dense_mesh)
def store_kagome_polyedge_data(self):
self.kagome_polyedge_data = self.kagome_polyedges()
def kagome_singularities(self):
singular_faces = []
for fkey in self.kagome.faces():
singular_hex = all([len(self.kagome.face_vertices(nbr)) == 3 for nbr in self.kagome.face_neighbors(fkey)]) and len(self.kagome.face_vertices(fkey)) != 6
singular_tri = all([len(self.kagome.face_vertices(nbr)) == 6 for nbr in self.kagome.face_neighbors(fkey)]) and len(self.kagome.face_vertices(fkey)) != 3
if singular_hex or singular_tri:
singular_faces.append([self.kagome.vertex_coordinates(vkey) for vkey in self.kagome.face_vertices(fkey) + self.kagome.face_vertices(fkey)[: 1]])
return singular_faces
def kagome_negative_singularities(self):
return [fkey for fkey in self.kagome.faces() if all([len(self.kagome.face_vertices(nbr)) == 3 for nbr in self.kagome.face_neighbors(fkey)]) and len(self.kagome.face_vertices(fkey)) > 6]
def kagome_negative_polygons(self):
return [[self.kagome.vertex_coordinates(vkey) for vkey in self.kagome.face_vertices(fkey)] for fkey in self.kagome_negative_singularities()]
def kagome_vertex_opposite_vertex(self, u, v):
if self.kagome.is_edge_on_boundary(u, v):
if self.kagome.vertex_degree(v) == 2:
return None
else:
return [nbr for nbr in self.kagome.vertex_neighbors(v, ordered = True) if nbr != u and self.kagome.is_edge_on_boundary(v, nbr)][0]
elif self.kagome.is_vertex_on_boundary(v):
return None
else:
nbrs = self.kagome.vertex_neighbors(v, ordered = True)
return nbrs[nbrs.index(u) - 2]
def kagome_polyedge(self, u0, v0):
polyedge = [u0, v0]
while len(polyedge) <= self.kagome.number_of_vertices():
# end if closed loop
if polyedge[0] == polyedge[-1]:
break
# get next vertex accros four-valent vertex
w = self.kagome_vertex_opposite_vertex(*polyedge[-2 :])
# flip if end of first extremity
if w is None:
polyedge = list(reversed(polyedge))
# stop if end of second extremity
w = self.kagome_vertex_opposite_vertex(*polyedge[-2 :])
if w is None:
break
# add next vertex
polyedge.append(w)
return polyedge
def kagome_polyedges_0(self):
polyedges = []
edges = list(self.kagome.edges())
while len(edges) > 0:
# collect new polyedge
u0, v0 = edges.pop()
polyedges.append(self.kagome_polyedge(u0, v0))
# remove collected edges
for u, v in pairwise(polyedges[-1]):
if (u, v) in edges:
edges.remove((u, v))
elif (v, u) in edges:
edges.remove((v, u))
return polyedges
def kagome_polyedges(self):
polyedges = []
edge_visited = {(u, v): False for u, v in self.kagome.edges()}
for edge in self.kagome.edges():
if edge_visited[edge]:
continue
u0, v0 = edge
# collect new polyedge
polyedges.append(self.kagome_polyedge(u0, v0))
# remove collected edges
for u, v in pairwise(polyedges[-1]):
#if (u, v) in edge_visited:
edge_visited[(u, v)] = True
#elif (v, u) in edge_visited:
edge_visited[(v, u)] = True
#for edge, visited in edges.items():
# if not visited:
# print edge
#print len(polyedges)
return polyedges
def kagome_polyline(self, u, v):
return [self.kagome.vertex_coordinates(vkey) for vkey in self.kagome_polyedge(u0, v0)]
def kagome_polylines(self):
return [[self.kagome.vertex_coordinates(vkey) for vkey in polyedge] for polyedge in self.kagome_polyedge_data]
def kagome_polyline_frames(self):
polylines_frames = []
for polyedge in self.kagome_polyedge_data:
polyline_frames = []
for i, u in enumerate(polyedge):
#if end
if i == len(polyedge) - 1:
# if closed
if polyedge[0] == polyedge[-1]:
v = polyedge[1]
else:
v = polyedge[i - 1]
else:
v = polyedge[i + 1]
x = self.kagome.vertex_normal(u)
y = normalize_vector(subtract_vectors(self.kagome.vertex_coordinates(v), self.kagome.vertex_coordinates(u)))
if i == len(polyedge) - 1 and polyedge[0] != polyedge[-1]:
y = scale_vector(y, -1)
z = cross_vectors(x, y)
polyline_frames.append([x, y, z])
polylines_frames.append(polyline_frames)
return polylines_frames
def kagome_polyedge_colouring(self):
polyedges = self.kagome_polyedge_data
edge_to_polyedge_index = {vkey: {} for vkey in self.kagome.vertices()}
for i, polyedge in enumerate(polyedges):
for u, v in pairwise(polyedge):
edge_to_polyedge_index[u][v] = i
edge_to_polyedge_index[v][u] = i
vertices = [centroid_points([self.kagome.vertex_coordinates(vkey) for vkey in polyedge]) for polyedge in polyedges]
edges = []
for idx, polyedge in enumerate(polyedges):
for vkey in polyedge:
for vkey_2 in self.kagome.vertex_neighbors(vkey):
idx_2 = edge_to_polyedge_index[vkey][vkey_2]
if idx_2 != idx and idx < idx_2 and (idx, idx_2) not in edges:
edges.append((idx, idx_2))
polyedge_network = Network.from_vertices_and_edges(vertices, edges)
key_to_colour = vertex_coloring(polyedge_network.adjacency)
return {tuple(polyedge): key_to_colour[i] for i, polyedge in enumerate(polyedges)}
def kagome_polyedge_colouring_2(self):
polyedges = self.kagome_polyedge_data
edge_to_polyedge_index = {vkey: {} for vkey in self.kagome.vertices()}
for i, polyedge in enumerate(polyedges):
for u, v in pairwise(polyedge):
edge_to_polyedge_index[u][v] = i
edge_to_polyedge_index[v][u] = i
vertices = [centroid_points([self.kagome.vertex_coordinates(vkey) for vkey in polyedge]) for polyedge in polyedges]
edges = []
for idx, polyedge in enumerate(polyedges):
for vkey in polyedge:
for vkey_2 in self.kagome.vertex_neighbors(vkey):
idx_2 = edge_to_polyedge_index[vkey][vkey_2]
if idx_2 != idx and idx < idx_2 and (idx, idx_2) not in edges:
edges.append((idx, idx_2))
polyedge_network = Network.from_vertices_and_edges(vertices, edges)
key_to_colour = vertex_coloring(polyedge_network.adjacency)
return [key_to_colour[key] for key in sorted(key_to_colour.keys())]
def kagome_polyline_colouring(self):
return {tuple([tuple(self.kagome.vertex_coordinates(vkey)) for vkey in polyedge]): colour for polyedge, colour in self.kagome_polyedge_colouring().items()}
def kagome_polyedge_weaving(self):
mesh = self.kagome
edge_to_polyedge_index = {}
for i, polyedge in enumerate(self.kagome_polyedge_data):
for u, v in pairwise(polyedge):
edge_to_polyedge_index[(u, v)] = i
edge_to_polyedge_index[(v, u)] = i
vertex_to_polyege_offset = {vkey: {} for vkey in mesh.vertices()}
for fkey in mesh.faces():
if len(mesh.face_vertices(fkey)) == 3:
for u, v, w in window(mesh.face_vertices(fkey) + mesh.face_vertices(fkey)[:2], n = 3):
vertex_to_polyege_offset[v].update({edge_to_polyedge_index[(u, v)]: +1, edge_to_polyedge_index[(v, w)]: -1})
else:
for u, v, w in window(mesh.face_vertices(fkey) + mesh.face_vertices(fkey)[:2], n = 3):
vertex_to_polyege_offset[v].update({edge_to_polyedge_index[(u, v)]: -1, edge_to_polyedge_index[(v, w)]: +1})
polyedge_weave = []
for i, polyedge in enumerate(self.kagome_polyedge_data):
polyedge_weave.append([vertex_to_polyege_offset[vkey][i] for vkey in polyedge])
return polyedge_weave
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
import compas
from compas.plotters import MeshPlotter
vertices = [
[0., 0., 0.],
[1., 0., 0.],
[1., 1., 0.],
[0., 1., 0.],
[0.5, 0.5, 0.],
]
faces = [
[0, 1, 4],
[1, 2, 4],
[2, 3, 4],
[3, 0, 4],
]
lines = [
([0., 0., 0.],[1., 0., -1.]),
([0., 0., 0.],[-1., 0., -1.]),
([0., 0., 0.],[0., 1., -1.]),
([0., 0., 0.],[0., -1., -1.]),
([0., 0., 0.],[0., 0., 1.]),
]
#kagome = Kagome.from_skeleton(lines)
kagome = Kagome.from_vertices_and_faces(vertices, faces)
kagome.densification(3)
kagome.patterning()
kagome.store_kagome_polyedge_data()
plotter = MeshPlotter(kagome.kagome)
plotter.draw_vertices(radius=.005)
plotter.draw_edges()
plotter.draw_faces()
plotter.show()
#print kagome.kagome_negative_singularities()
#print kagome.kagome_singularities()
#print kagome.kagome_polyline_frames()
#kagome.kagome_polyedges()
#kagome.kagome_polyline_colouring()
kagome.kagome_polyedge_weaving()
|
[
"oval@arch.ethz.ch"
] |
oval@arch.ethz.ch
|
471d14d56e1a08984f632115d022ada928944b5c
|
39f0ac5c2cd93075cd85a4dc744186f495eb6822
|
/cloudhands/identity/registration.py
|
600b4a949ba3d7f56ca5ee03cac9d1985a4ec15c
|
[
"BSD-3-Clause"
] |
permissive
|
cedadev/cloudhands-web
|
f3299522ac9c084918c6c6d0f47eddf2d58609a3
|
2e83df3111c286a54ef3957e873c9ea29c523366
|
refs/heads/master
| 2021-01-19T14:33:20.213250
| 2015-03-27T09:36:51
| 2015-03-27T09:36:51
| 21,493,675
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,363
|
py
|
#!/usr/bin/env python3
# encoding: UTF-8
import datetime
import uuid
import bcrypt
import cloudhands.common.schema
from cloudhands.common.schema import BcryptedPassword
from cloudhands.common.schema import Membership
from cloudhands.common.schema import PosixUIdNumber
from cloudhands.common.schema import PublicKey
from cloudhands.common.schema import Touch
from cloudhands.common.schema import User
from cloudhands.common.states import RegistrationState
__doc__ = """
.. graphviz::
digraph registration {
center = true;
compound = true;
nodesep = 0.6;
edge [decorate=true,labeldistance=3,labelfontname=helvetica,
labelfontsize=10,labelfloat=false];
subgraph cluster_web {
label = "Web";
style = filled;
labeljust = "l";
node [shape=ellipse];
"Set LDAP password" [shape=circle,width=0.8,fixedsize=true];
PRE_USER_LDAPPUBLICKEY [shape=box];
PRE_REGISTRATION_INETORGPERSON_CN [shape=box];
PRE_REGISTRATION_INETORGPERSON_CN -> "Set LDAP password" [style=invis];
"Set LDAP password" -> "BcryptedPassword" [style=invis];
"BcryptedPassword" -> "PosixUIdNumber" [style=invis];
"PosixUIdNumber" -> "PosixGIdNumber" [style=invis];
"PosixGIdNumber" -> PRE_USER_LDAPPUBLICKEY [style=invis];
PRE_USER_LDAPPUBLICKEY -> "PublicKey"[style=invis];
"PublicKey" -> PRE_USER_LDAPPUBLICKEY [style=invis];
}
subgraph cluster_identity {
label = "LDAP client";
node [shape=box];
"PosixUId" [shape=ellipse];
"Write CN" [shape=circle,width=0.8,fixedsize=true];
"Write key" [shape=circle,width=0.8,fixedsize=true];
"Write CN" -> "PosixUId" [style=invis];
"PosixUId" -> PRE_USER_POSIXACCOUNT [style=invis];
PRE_USER_POSIXACCOUNT -> "Write key" [style=invis];
"Write key" -> VALID [style=invis];
}
subgraph cluster_observer {
label = "Observer";
node [shape=box];
"Monitor" [shape=circle];
"PublicKey ?" [shape=diamond];
"Monitor" -> PRE_REGISTRATION_INETORGPERSON [style=invis];
"Monitor" -> "PublicKey ?" [style=invis];
}
subgraph cluster_emailer {
label = "Emailer";
"TimeInterval" [shape=ellipse];
"Send" [shape=circle,width=0.5,fixedsize=true];
"TimeInterval" -> "Send" [style=invis];
}
subgraph cluster_admin {
label = "Admin";
style = filled;
labeljust = "l";
node [shape=ellipse];
PRE_REGISTRATION_PERSON [shape=box];
"User" -> "Registration" [style=invis];
"Registration" -> "EmailAddress" [style=invis];
"EmailAddress" -> PRE_REGISTRATION_PERSON [style=invis];
}
"Start" [shape=point];
"Guest" [shape=circle];
"Start" -> User [style=solid,arrowhead=odot];
"User" -> "Registration" [style=solid,arrowhead=odot];
"Registration" -> "EmailAddress" [style=solid,arrowhead=odot];
"EmailAddress" -> PRE_REGISTRATION_PERSON [style=solid,arrowhead=tee];
PRE_REGISTRATION_PERSON -> "Monitor" [style=dashed,arrowhead=vee];
PRE_REGISTRATION_INETORGPERSON_CN -> "Write CN"
[style=dashed,arrowhead=vee];
"Write CN" -> "PosixUId" [style=solid,arrowhead=odot];
"PosixUId" -> PRE_USER_POSIXACCOUNT [style=solid,arrowhead=tee];
PRE_USER_POSIXACCOUNT -> "PosixUIdNumber"
[taillabel="[POST /login]",style=dashed,arrowhead=odot];
"Set LDAP password" -> "BcryptedPassword"
[style=solid,arrowhead=odot];
"PosixUIdNumber" -> "PosixGIdNumber" [style=solid,arrowhead=odot];
"PosixGIdNumber" -> PRE_USER_LDAPPUBLICKEY [style=solid,arrowhead=tee];
PRE_USER_LDAPPUBLICKEY -> "Monitor" [style=dashed,arrowhead=vee];
PRE_USER_LDAPPUBLICKEY -> "PublicKey"
[taillabel="[POST /registration/{uuid}/keys]",style=dashed,arrowhead=odot];
"PublicKey" -> PRE_USER_LDAPPUBLICKEY [style=dashed,arrowhead=tee];
"Monitor" -> "PublicKey ?" [style=solid,arrowhead=vee];
"PublicKey ?" -> "Write key" [taillabel="Y",style=solid,arrowhead=vee];
"Write key" -> VALID [style=solid,arrowhead=vee];
"Monitor" -> PRE_REGISTRATION_INETORGPERSON [style=solid,arrowhead=tee];
PRE_REGISTRATION_INETORGPERSON -> "TimeInterval"
[style=solid,arrowhead=odot];
"TimeInterval" -> "Send" [style=solid,arrowhead=none];
"Send" -> "Guest" [style=dotted,arrowhead=vee];
"Guest" -> PRE_REGISTRATION_INETORGPERSON_CN
[taillabel="[GET /registration/{uuid}]",style=dotted,arrowhead=tee];
"Guest" -> "Set LDAP password"
[taillabel="[POST /registration/{uuid}/passwords]",style=dotted,arrowhead=tee];
}
"""
def handle_from_email(addrVal):
return ' '.join(
i.capitalize() for i in addrVal.split('@')[0].split('.'))
def from_pool(pool:set, taken:set=set()):
return iter(sorted(pool - taken))
class NewPassword:
"""
Adds a new password to a user registration
"""
def __init__(self, user, passwd, reg):
self.user = user
self.hash = bcrypt.hashpw(passwd, bcrypt.gensalt(12))
self.reg = reg
def match(self, attempt):
return bcrypt.checkpw(attempt, self.hash)
def __call__(self, session):
newreg = session.query(
RegistrationState).filter(
RegistrationState.name=="pre_registration_inetorgperson").one()
ts = datetime.datetime.utcnow()
act = Touch(
artifact=self.reg, actor=self.user, state=newreg, at=ts)
resource = BcryptedPassword(touch=act, value=self.hash)
session.add(resource)
session.commit()
return act
class NewAccount:
"""
Adds a posix account to a user registration
"""
def __init__(self, user, uidNumber:int, reg):
self.user = user
self.uidNumber = uidNumber
self.reg = reg
def __call__(self, session):
nextState = "user_posixaccount"
state = session.query(
RegistrationState).filter(
RegistrationState.name == nextState).one()
now = datetime.datetime.utcnow()
act = Touch(
artifact=self.reg, actor=self.user, state=state, at=now)
resource = PosixUIdNumber(value=self.uidNumber, touch=act, provider=None)
session.add(resource)
session.commit()
return act
|
[
"david.e.haynes@stfc.ac.uk"
] |
david.e.haynes@stfc.ac.uk
|
445617bc8f5daccd09e3dd3e12eed4659f08155b
|
56ebb7505f8b8f31c2d60fd4446798fc3d393922
|
/Chapter 03/Functions/homework.py
|
c8afe43cc31065e5ba236e10e4d5a808b29e9c67
|
[
"MIT"
] |
permissive
|
jinjun-ama/AI-Crash-Course
|
acafb0349051abfb11e32e723874cb4d7bba936c
|
9e0181b51986121415fc68d9b02e37f7da255042
|
refs/heads/master
| 2022-12-18T00:55:29.353127
| 2020-09-28T06:43:14
| 2020-09-28T06:43:14
| 298,290,644
| 0
| 0
|
MIT
| 2020-09-24T13:40:03
| 2020-09-24T13:40:02
| null |
UTF-8
|
Python
| false
| false
| 716
|
py
|
# Exercise for Functions: Homework Solution
def distance(x1, y1, x2, y2): # we create a new function "distance" that takes coordinates of both points as arguments
d = pow(pow(x1 - x2, 2) + pow(y1 - y2, 2), 0.5) # we calculate the distance between two points using the formula provided in the hint ("pow" function is power)
return d # this line means that our function will return the distance we calculated before
dist = distance(0, 0, 3, 4) # we call our function while inputting 4 required arguments, that are coordinates
print(dist) # we display the calculated distance
|
[
"noreply@github.com"
] |
jinjun-ama.noreply@github.com
|
1154ea6356bbe41ee8b4f92a5ebf0bc19091a42b
|
4d95cab2ac244c26f3fe6538a7b452124ce52aa8
|
/ibTracePorts.py
|
6b09133d4fa5f941e1ef884d6bc0d796b9211e72
|
[] |
no_license
|
plaguedbypenguins/ibTools
|
111f153da87c6e63b6d94abd078db9b5c98e13e5
|
b5a06795020eded7897a713016f0b3b511321bf0
|
refs/heads/master
| 2020-04-25T05:19:32.731373
| 2013-02-12T06:46:49
| 2013-02-12T06:46:49
| 7,753,267
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,706
|
py
|
#!/usr/bin/env python
# (c) Robin Humble 2010
# licensed under the GPL v3
# take an IB 'directed path address', and use ibnetdiscover output to figure out the endpoint, usually a host.
# - slurp up a designated ibnetdiscover output to figure out
# which switch ports are connected to HCA's
import sys
import socket
import os
ibDirDefault = '/root/ib'
startHost='xepbs'
startHost='vu-pbs'
startHost='r-pbs'
startHost='r-pbs HCA-1'
def parseIbnetdiscover( ibDir=None, ibNetFile=None ):
f = ibNetFile
d = ibDir
if d == None:
d = ibDirDefault
if f == None:
suffix = 'ibnetdiscover'
f, fTime = findMostRecentFile( d, suffix )
print 'using', f, 'in dir', d
f = d + '/' + f
lines = open( f, 'r' ).readlines()
#print lines
# ...
# Switch 36 "S-0021283a8836a0a0" # "0x0021283a8836a0a0 M2-1" enhanced port 0 lid 1678 lmc 0
# [36] "H-00212800013e555a"[1](212800013e555b) # "marmot1 HCA-1" lid 91 4xQDR
# [35] "S-0021283a89e015d2"[34] # "0x0021283a89e015d2 M9-3-LC-5d" lid 520 4xQDR
# [34] "H-00212800013e5822"[1](212800013e5823) # "marmot3 HCA-1" lid 32 4xQDR
# [33] "S-0021283a89e015d2"[33] # "0x0021283a89e015d2 M9-3-LC-5d" lid 520 4xQDR
# ...
# ...
# Ca 2 "H-00212800013e60f6" # "marmot2 HCA-1"
# [1](212800013e60f7) "S-0021283a842110d2"[28] # lid 241 lmc 0 "Sun DCS 648 QDR LC switch 1.6" lid 211 4xQDR
# [2](212800013e60f8) "S-0021283a842110d2"[29] # lid 256 lmc 0 "Sun DCS 648 QDR LC switch 1.6" lid 211 4xQDR
lph = []
switchTree = {}
byName = {}
rates = {}
d = None
next = 0
for l in lines:
if next == 'ca':
s = l.split('"')
if len(s) < 2:
# ran out of ports on this HCA
next = 0
continue
swlid = int(l.split()[-2])
swport = int(l.split('[')[2].split(']')[0])
lid = int(l.split('#')[1].split()[1])
port = int(l.split(']')[0].split('[')[1])
h = host
if port != 1: # multi-port HCA
h += ' port%d' % port
lph.append( ( swlid, swport, lid, h ) )
rates[(lid, 1)] = l.split()[-1]
next = 'ca' # goto next port on this HCA
elif l[:2] == 'Ca':
host = l.split('"')[3]
h = host.split()
if len(h) > 1:
if h[1] == 'HCA-1' or h[1] == 'HCA-2':
host = host.split()[0]
host += ' ' + host[1] # append HCA-* to the hostname
else:
#print 'skipping unnamed node', l,
next = 0
continue
#print host
next = 'ca'
elif l[:6] == 'Switch':
s = l.split('"')
if len(s[3].split()) > 1:
swName = s[3].split()[1]
else:
swName = s[3]
if swName == '' or swName == '-':
print 'error. unnamed switch chip', s
swLid = int(s[4].split()[4])
#print 'sw', swName, 'lid', swLid
d = {}
next = 'ports'
elif next == 'ports':
s = l.split('"')
if len(s) < 2:
next = 0
switchTree[swLid] = [ swName, swLid, d ]
continue
# down this switch port number...
port = int(s[0].split(']')[0][1:])
# ... we have this lid for a host/switch
lid = int(s[4].split()[1])
# ... which talks to us on this port
remPort = int(s[2].split(']')[0][1:])
t = s[1][0]
if t == 'H': # host at the end of this port
name = s[3]
if remPort != 1: # found a multi-port HCA. append the port number to the name
name += ' port%d' % remPort
elif t == 'S': # switch ""
if len(s[3].split()) > 1:
name = s[3].split()[1]
else:
name = s[3]
else:
print 'unknown type of link from switch. line is', l
continue
d[port] = [ name, lid, remPort ]
if t == 'H':
byName[name] = [ port, swName, swLid ]
#print 'port', port, 't', t, 'lid', lid, 'name', name
rates[(swLid, port)] = l.split()[-1]
return switchTree, byName, lph, rates
def findMostRecentFile( d, suffix ):
l, lt, lp, ltp = findMostRecentFiles( d, suffix )
return l, lt
def findMostRecentFiles( d, suffix ):
files = os.listdir( d )
last = None
lastTime = 0
lastPrev = None
lastTimePrev = 0
for f in files:
if f.split('.')[-1] == suffix:
m = os.path.getmtime( d + '/' + f)
if m > lastTime:
lastPrev = last
lastTimePrev = lastTime
last = f
lastTime = m
elif m > lastTimePrev:
lastPrev = f
lastTimePrev = m
return last, lastTime, lastPrev, lastTimePrev
if __name__ == '__main__':
ibNetFile = None
if len(sys.argv) == 3:
ibNetFile = sys.argv[2]
path = sys.argv[1]
elif len(sys.argv) == 2:
path = sys.argv[1]
else:
print 'usage', sys.argv[0], '<path> [ibnetdiscover file]'
print 'eg.', sys.argv[0], '0,1,31,18,33,31,27 /root/ib/2010-04-07-15:02:29.ibnetdiscover'
sys.exit(1)
switchTree, byName, lph, r = parseIbnetdiscover(ibNetFile=ibNetFile)
#print 'switchTree (len', len(switchTree), ')', switchTree
#print 'byName (len', len(byName), ')', byName
name = socket.gethostname()
if name != startHost:
print 'WARNING - the port trace is assumed to be relative to', startHost, 'not this host', name
#sys.exit(1)
name = startHost
start = byName[name]
port, swName, swLid = start
print 'start at', name, 'lid', swLid, 'attached to switch', swName, 'port', port
loc = swLid
# eg. 1,31,1,33,33 <- a dead link
# or
# 0,1,31,18,33,31,27 -> host == v1224
# 0,1 is out of vupbs
# 31 port out of qnem
# 18 port out of LC
# 33 FC
# 31 LC
# 27 qnem
# pull off initial '0,' if there is one
if path[:2] == '0,':
path = path[2:]
print 'path', path
if path[:2] != '1,':
print 'we assume path always starts with "1,". need to fix this script if you want something else.'
sys.exit(1)
else:
path = path[2:]
name = swName
path = path.split(',')
for p in path:
p = int(p)
swName, swLid, d = switchTree[loc]
nextHop = d[p]
#print 'nextHop', nextHop
name, loc, remPort = nextHop
print '... travelling to port', p, '(%s, lid %d)' % ( name, loc )
print 'destination', name, 'lid', loc, 'port', remPort
|
[
"plaguedbypenguins@gmail.com"
] |
plaguedbypenguins@gmail.com
|
688f5a27c17943c555fe537f43e8a91de0397e93
|
a46d135ba8fd7bd40f0b7d7a96c72be446025719
|
/packages/python/plotly/plotly/validators/scattercarpet/_uid.py
|
e693f7c7608fa0de61de5dbd33659c52dd174a3f
|
[
"MIT"
] |
permissive
|
hugovk/plotly.py
|
5e763fe96f225d964c4fcd1dea79dbefa50b4692
|
cfad7862594b35965c0e000813bd7805e8494a5b
|
refs/heads/master
| 2022-05-10T12:17:38.797994
| 2021-12-21T03:49:19
| 2021-12-21T03:49:19
| 234,146,634
| 0
| 0
|
MIT
| 2020-01-15T18:33:43
| 2020-01-15T18:33:41
| null |
UTF-8
|
Python
| false
| false
| 390
|
py
|
import _plotly_utils.basevalidators
class UidValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="uid", parent_name="scattercarpet", **kwargs):
super(UidValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
**kwargs
)
|
[
"noreply@github.com"
] |
hugovk.noreply@github.com
|
56be32ba217d1b791ad47524ae71cb3c863a157d
|
091a3ad89db18fb02894f956a24724242fcd2e1a
|
/socketxx.py
|
1adfbaf2e18ebfc92648013d9d225fa97afc40d0
|
[] |
no_license
|
Eyewart/test1
|
16c24011761f131894dcd1fa1de23a7dfe7a3ee4
|
da7e9a78a46ed4707774549ff1cb005cc97f4dd2
|
refs/heads/master
| 2022-12-11T19:48:27.392410
| 2020-09-02T23:22:00
| 2020-09-02T23:22:00
| 260,712,189
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 371
|
py
|
import socket
import time
hote = "localhost"
port = 12800
print("Fermeture de la connexion")
connexion_avec_serveur = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connexion_avec_serveur.close()
connexion_avec_serveur.connect((hote, port))
print("Connexion établie avec le serveur sur le port {}".format(port))
time.sleep(180)
connexion_avec_serveur.close()
|
[
"hrifilaroussi@gmail.com"
] |
hrifilaroussi@gmail.com
|
d272eec5ee0847d4d5f4a0304ac597276a10b45a
|
3c03e3e47a7983159b6d8a7be57083366305a123
|
/test/metview/simulated_sat_img.py
|
40c9535e73da18c37510b215920fe610a362fef1
|
[
"Apache-2.0"
] |
permissive
|
ecmwf/magics-test
|
717abfa7982b649018883574c0f13dc088d06b07
|
3bd91275b76ddd430fff24c02f1bce5440fa5334
|
refs/heads/master
| 2023-06-15T11:13:52.753267
| 2023-06-01T14:42:15
| 2023-06-01T14:42:15
| 176,471,328
| 7
| 5
|
Apache-2.0
| 2021-08-17T13:58:05
| 2019-03-19T09:12:22
|
GLSL
|
UTF-8
|
Python
| false
| false
| 1,652
|
py
|
# (C) Copyright 1996-2016 ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation nor
# does it submit to any jurisdiction.
#importing Magics module
from Magics.macro import *
ref = 'simulated_sat_img'
#Setting of the output file name
output = output(output_formats = ['png'],
output_name_first_page_number = "off",
output_name = ref)
#Setting the coordinates of the geographical area
projection = mmap(subpage_map_projection = 'geos',
subpage_map_vertical_longitude = -20,
subpage_frame = False,
subpage_x_position = 1,
subpage_y_position = 1,
subpage_x_length = 20,
subpage_y_length = 20,)
#Coastlines setting
coast = mcoast( map_coastline_colour = "cream",
map_coastline_thickness = 2,
map_grid_line_style = "dot",
map_grid_colour = "cream")
#Import the z500 data
data = mgrib(grib_input_file_name = "sim_ir9.grib",)
#Define the simple contouring for z500
contour = mcont( contour_automatic_setting = "ecmwf",
legend = "on" )
legend = mlegend(
legend_text_font_size = 0.25,
legend_label_frequency = 3
)
title = mtext(
text_justification = "left",
text_font_size = 0.8,
text_colour = "charcoal")
#To the plot
plot(output, projection, data, contour,
title, coast, legend)
|
[
"sylvie.lamy-thepaut@ecmwf.int"
] |
sylvie.lamy-thepaut@ecmwf.int
|
0611fe9b0f1c708bb1355f5f6025a0fd16acff2c
|
818b1f33d51d3b7132e55f7a12188662eca4f5e1
|
/gmf.py
|
3499ba8a083fd5b6aada66d11bf43f137a4ee00b
|
[] |
no_license
|
qianrenjian/sequence_recommendation
|
d2ef1e116766285d813f94732a7ce1dedf47183c
|
e3a289bfb5ae5643e67aba3aa8f5f04ca6fe5c6e
|
refs/heads/master
| 2020-07-07T12:57:17.944687
| 2019-03-22T06:43:06
| 2019-03-22T06:43:06
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,655
|
py
|
import numpy as np
import numpy as np
import theano.tensor as T
import keras
from keras import backend as K
from keras import initializers
from keras.models import Sequential, Model, load_model, save_model
from keras.layers.core import Dense, Lambda, Activation
from keras.layers import Embedding, Input, Dense, Multiply, Reshape,Flatten
from keras.optimizers import Adagrad, Adam, SGD, RMSprop
from keras.regularizers import l2
# from Dataset import Dataset
from evaluate import evaluate_model
from time import time
import multiprocessing as mp
import sys
import math
import argparse
from leave_one_dataset import LeaveOneDataset
#################### Arguments ####################
def parse_args():
parser = argparse.ArgumentParser(description="Run GMF.")
parser.add_argument('--path', nargs='?', default='Data/',
help='Input data path.')
parser.add_argument('--dataset', nargs='?', default='ml-1m',
help='Choose a dataset.')
parser.add_argument('--epochs', type=int, default=100,
help='Number of epochs.')
parser.add_argument('--batch_size', type=int, default=256,
help='Batch size.')
parser.add_argument('--num_factors', type=int, default=8,
help='Embedding size.')
parser.add_argument('--regs', nargs='?', default='[0,0]',
help="Regularization for user and item embeddings.")
parser.add_argument('--num_neg', type=int, default=4,
help='Number of negative instances to pair with a positive instance.')
parser.add_argument('--lr', type=float, default=0.001,
help='Learning rate.')
parser.add_argument('--learner', nargs='?', default='adam',
help='Specify an optimizer: adagrad, adam, rmsprop, sgd')
parser.add_argument('--verbose', type=int, default=1,
help='Show performance per X iterations')
parser.add_argument('--out', type=int, default=1,
help='Whether to save the trained model.')
return parser.parse_args()
def init_normal(shape, name=None):
# return initializers.normal(shape, scale=0.01, name=name)
return initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None)
def get_model(num_users, num_items, latent_dim, regs=[0,0]):
# Input variables
user_input = Input(shape=(1,), dtype='int32', name = 'user_input')
item_input = Input(shape=(1,), dtype='int32', name = 'item_input')
MF_Embedding_User = Embedding(input_dim = num_users, output_dim = latent_dim, name = 'user_embedding',
embeddings_initializer = initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None), embeddings_regularizer = l2(regs[0]), input_length=1)
MF_Embedding_Item = Embedding(input_dim = num_items, output_dim = latent_dim, name = 'item_embedding',
embeddings_initializer = initializers.RandomNormal(mean=0.0, stddev=0.01, seed=None), embeddings_regularizer = l2(regs[1]), input_length=1)
# Crucial to flatten an embedding vector!
user_latent = Flatten()(MF_Embedding_User(user_input))
item_latent = Flatten()(MF_Embedding_Item(item_input))
# Element-wise product of user and item embeddings
predict_vector = Multiply()([user_latent, item_latent])
# Final prediction layer
#prediction = Lambda(lambda x: K.sigmoid(K.sum(x)), output_shape=(1,))(predict_vector)
prediction = Dense(1, activation='sigmoid', kernel_initializer='lecun_uniform', name = 'prediction')(predict_vector)
model = Model(input=[user_input, item_input],
output=prediction)
return model
def get_train_instances(train_pairs):
user_input, item_input, labels = [],[],[]
train_pairs = train_pairs.values
for pair in train_pairs:
# positive instance
u,i = pair[0],pair[1]
user_input.append(u)
item_input.append(i)
labels.append(1)
# negative instances
for j in pair[2:]:
user_input.append(u)
item_input.append(j)
labels.append(0)
return user_input, item_input, labels
if __name__ == '__main__':
args = parse_args()
num_factors = args.num_factors
regs = eval(args.regs)
num_negatives = args.num_neg
learner = args.learner
learning_rate = args.lr
epochs = args.epochs
batch_size = args.batch_size
verbose = args.verbose
topK = 10
evaluation_threads = 1 #mp.cpu_count()
print("GMF arguments: %s" %(args))
model_out_file = 'result/%s_gmf_%d_%d.h5' %(args.dataset, num_factors, time())
# Loading data
t1 = time()
# dataset = Dataset(args.path + args.dataset)
ds = LeaveOneDataset()
ds.load('./data/%s'%args.dataset)
# train, testRatings, testNegatives = dataset.trainMatrix, dataset.testRatings, dataset.testNegatives
testRatings = ds.test_pairs.values[:,:2]
testNegatives = ds.test_pairs.values[:,2:]
num_users, num_items = ds.num_users,ds.num_items
print("Load data done [%.1f s]. #user=%d, #item=%d, #train=%d, #test=%d"
%(time()-t1, num_users, num_items, len(ds.train_pairs), len(testRatings)))
# Build model
model = get_model(num_users, num_items, num_factors, regs)
if learner.lower() == "adagrad":
model.compile(optimizer=Adagrad(lr=learning_rate), loss='binary_crossentropy')
elif learner.lower() == "rmsprop":
model.compile(optimizer=RMSprop(lr=learning_rate), loss='binary_crossentropy')
elif learner.lower() == "adam":
model.compile(optimizer=Adam(lr=learning_rate), loss='binary_crossentropy')
else:
model.compile(optimizer=SGD(lr=learning_rate), loss='binary_crossentropy')
#print(model.summary())
# Init performance
t1 = time()
(hits, ndcgs) = evaluate_model(model, testRatings, testNegatives, topK, evaluation_threads)
hr, ndcg = np.array(hits).mean(), np.array(ndcgs).mean()
#mf_embedding_norm = np.linalg.norm(model.get_layer('user_embedding').get_weights())+np.linalg.norm(model.get_layer('item_embedding').get_weights())
#p_norm = np.linalg.norm(model.get_layer('prediction').get_weights()[0])
print('Init: HR = %.4f, NDCG = %.4f\t [%.1f s]' % (hr, ndcg, time()-t1))
# Train model
best_hr, best_ndcg, best_iter = hr, ndcg, -1
for epoch in range(epochs):
t1 = time()
# Generate training instances
user_input, item_input, labels = get_train_instances(ds.train_pairs)
# Training
hist = model.fit([np.array(user_input), np.array(item_input)], #input
np.array(labels), # labels
batch_size=batch_size, epochs=1, verbose=0, shuffle=True)
t2 = time()
# Evaluation
if epoch %verbose == 0:
(hits, ndcgs) = evaluate_model(model, testRatings, testNegatives, topK, evaluation_threads)
hr, ndcg, loss = np.array(hits).mean(), np.array(ndcgs).mean(), hist.history['loss'][0]
print('Iteration %d [%.1f s]: HR = %.4f, NDCG = %.4f, loss = %.4f [%.1f s]'
% (epoch, t2-t1, hr, ndcg, loss, time()-t2))
if hr > best_hr:
best_hr, best_ndcg, best_iter = hr, ndcg, epoch
if args.out > 0:
model.save_weights(model_out_file, overwrite=True)
print("End. Best Iteration %d: HR = %.4f, NDCG = %.4f. " %(best_iter, best_hr, best_ndcg))
if args.out > 0:
print("The best GMF model is saved to %s" %(model_out_file))
|
[
"249683003@qq.com"
] |
249683003@qq.com
|
a9faf9dd14bff6ccdbef59575ef57dd8a5e86634
|
6ff6907120132e40eb7841d9b8e42add98ae110c
|
/docs/conf.py
|
462892757e5b224299a11a045176a8a71d45fa28
|
[
"MIT"
] |
permissive
|
CorneliusFranken/solvency2-data
|
a7ec8e4fb17367a064949617b56394fe70f38eca
|
61ab10d297721b2d9cf5063440bb1467b80fc4f5
|
refs/heads/master
| 2023-07-08T01:03:08.443083
| 2021-09-03T16:49:44
| 2021-09-03T16:49:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,944
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# solvency2_data documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 9 13:47:02 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import alabaster
import solvency2_data
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'solvency2-data'
copyright = u"2021, Willem Jan Willemse"
author = u"Willem Jan Willemse"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = solvency2_data.__version__
# The full version, including alpha/beta/rc tags.
release = solvency2_data.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ---------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'solvency2_datadoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto, manual, or own class]).
latex_documents = [
(master_doc, 'solvency2_data.tex',
u'solvency2-data Documentation',
u'De Nederlandsche Bank', 'manual'),
]
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'solvency2_data',
u'solvency2-data Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'solvency2_data',
u'solvency2-data Documentation',
author,
'solvency2_data',
'One line description of project.',
'Miscellaneous'),
]
|
[
"w.j.willemse@xs4all.nl"
] |
w.j.willemse@xs4all.nl
|
f8e6de07d5227dc1abec9911ddada669643f42bb
|
8b97e08d7d7cd904cafe5163eb81d6e2a81fbd90
|
/venv/bin/easy_install
|
74aede3b0a1b02eca86658618ec7a70e9aa649db
|
[] |
no_license
|
Shaigift/Python-Practice-3
|
180f8005b31526a4f4c834be5ea46bea1a04ef62
|
e464740050513a455debeecc1af568f10707010a
|
refs/heads/master
| 2022-08-27T15:50:14.602975
| 2020-05-20T08:31:32
| 2020-05-20T08:31:32
| 265,501,050
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 444
|
#!/Users/mphoshai/PycharmProjects/untitled/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
|
[
"Mphogivenshai@gmail.com"
] |
Mphogivenshai@gmail.com
|
|
073c78c464eb8c22be7697340798bdfb19009e7c
|
8adec48dfaee1cdfd6c7f4d2fb3038aa1c17bda6
|
/WProf/build/masters/master.chromium.chromiumos/master_gatekeeper_cfg.py
|
68b8a28fa503e0db0192a1a7d126068772a3feef
|
[] |
no_license
|
kusoof/wprof
|
ef507cfa92b3fd0f664d0eefef7fc7d6cd69481e
|
8511e9d4339d3d6fad5e14ad7fff73dfbd96beb8
|
refs/heads/master
| 2021-01-11T00:52:51.152225
| 2016-12-10T23:51:14
| 2016-12-10T23:51:14
| 70,486,057
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,526
|
py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
chromium_categories_steps = {
'': ['update'],
'tester': [
'base_unittests',
#'browser_tests',
'cacheinvalidation_unittests',
'content_unittests',
'courgette_unittests',
'crypto_unittests',
'dbus_unittests',
'googleurl_unittests',
'installer_util_unittests',
#'interactive_ui_tests',
'ipc_tests',
'jingle_unittests',
'media_unittests',
'mini_installer_test',
'nacl_integration',
'net_unittests',
'printing_unittests',
'remoting_unittests',
'sbox_integration_tests',
'sbox_unittests',
'sbox_validation_tests',
'sizes',
'sql_unittests',
'start_crash_handler',
'sync_unittests',
'test_shell_tests',
'ui_unittests',
'unit_tests',
'views_unittests',
#'webkit_tests',
],
'compile': ['check_deps', 'compile', 'archive_build'],
'closer': ['BuildTarget'],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler']
close_chromiumos_categories_steps = {
'closer': [
'LKGMSync',
'BuildBoard',
'UnitTest',
],
}
warn_chromiumos_categories_steps = {
'watch': [
'UploadPrebuilts',
'Archive',
'VMTest',
],
}
warn_aura_chromiumos_categories_steps = {
'aurawatch': [
'Archive',
'BuildTarget',
'BuildBoard',
'UnitTest',
]
}
subject = ('buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s')
warning_header = ('Please look at failure in "%(steps)s" on "%(builder)s" '
'and help out if you can')
def Update(config, active_master, alternate_master, c):
# chrome likely/possible failures to the chrome sheriffs, closing the
# chrome tree
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=chromium_categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject=subject,
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
sheriffs=['sheriff'],
use_getname=True))
# chromium os failures close the chromeOS tree
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=close_chromiumos_categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='Closer ' + subject,
extraRecipients=alternate_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=alternate_master.tree_status_url,
sheriffs=['sheriff_cros_mtv', 'sheriff_cros_nonmtv'],
use_getname=True))
# chromium os buried failures/flakiness to chrome OS folk
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=warn_chromiumos_categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='Warning ' + subject,
status_header=warning_header,
extraRecipients=[],
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=None,
sheriffs=['sheriff_cros_mtv', 'sheriff_cros_nonmtv'],
use_getname=True))
# while the Aura folk are in panic fast mode, let them know to help on
# failures that may be related to their special configs.
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=warn_aura_chromiumos_categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='Warning ' + subject,
status_header=warning_header,
extraRecipients=[],
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=None,
sheriffs=['sheriff_aura'],
use_getname=True))
|
[
"kusoof@kookaburra.(none)"
] |
kusoof@kookaburra.(none)
|
9d1134118ab6c2eea397c4ee09486f8954cfb743
|
8659eabdc3668eb0be6fe365e98b8e0654fcd761
|
/arraylib.py
|
f7a2f861df28a35c8b0c97877ac299b4011f5c78
|
[
"MIT"
] |
permissive
|
blokeley/forcelib
|
25bfcd6120ea0178da88a79c6a0bc7cd9e1ca58e
|
003fa02c70ee8ac8486db12a388ce67945488069
|
refs/heads/master
| 2021-01-10T14:18:39.983510
| 2019-06-10T16:48:43
| 2019-06-10T16:48:43
| 50,867,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,888
|
py
|
# MIT License
#
# Copyright (c) 2017 Tom Oakley
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Get the latest version from https://github.com/blokeley/forcelib
"""Array utilities."""
import numpy as np
import pandas as pd
# Note that the version of arraylib may be different to that of forcelib
__version__ = '1.0.0'
def rescale(old_array, min_, max_):
"""Scale array to vary between min and max."""
scale_factor = (max_ - min_) / (old_array.max() - old_array.min())
return min_ + scale_factor * (old_array - old_array.min())
def interp(df, new_index):
"""Return a new DataFrame with all columns values interpolated
to the new_index values."""
df_out = pd.DataFrame(index=new_index)
df_out.index.name = df.index.name
for colname, col in df.iteritems():
df_out[colname] = np.interp(new_index, df.index, col)
return df_out
|
[
"blokeley@gmail.com"
] |
blokeley@gmail.com
|
2041c8ec7d9ccbc9a16c350bbc04ce23c2dda211
|
0aa615521d59cbcbf821457019170548714ac5df
|
/Hero.py
|
256fac0716a77afb9500512901d12f8aed87d8f1
|
[] |
no_license
|
Movado20057/Py-Class
|
0d49afc0ccc3c89237366fdbee7950ac5681aa58
|
42bea47cfd1dc3357abe2679a6c71c33b0d4e858
|
refs/heads/master
| 2021-04-06T14:11:06.555837
| 2018-03-11T16:15:27
| 2018-03-11T16:15:27
| 124,768,511
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,229
|
py
|
#Hero's Inventory
#create a list with some items and display with a for loop
inventory = ["sword", "armor", "shield", "healing potion"]
print ("Your items: ")
for item in inventory:
print (item)
input("\nPress the enter key to continue.")
#get the length of a list
print("You have", len(inventory), "items in your possession.")
input("'\nPress the enter key to continue.")
#test for membership with in
if "healing potion" in inventory:
print ("You will live to fight another day.")
# display one item through an index
index = int (input ("\nEnter the index number for an item in inventory: "))
print("At index", index, "is", inventory[index])
# display a slice
start = int (input ("\nEnter the index nmber to begin a slice"))
finish = int (input ("\nEnter the index nmber to end a slice"))
print ("inventory[", start, ":", finish, "] is", end=" ")
print (inventory [start:finish])
input ("\nPress the enter key to continue.")
# concatenate two tuples
chest = ("gold", "gems")
print ("You find a chest. It contains:")
print (chest)
print ("You add the contents of the chest to your inventory.")
inventory += chest
print ("Your inventory is now:")
print (inventory)
input ("\n\nPress the enter key to exit.")
|
[
"seun.daniel@gmail.com"
] |
seun.daniel@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.